Skip to content

Commit

Permalink
Merge 4.9.1 into 4.10.0 (#358)
Browse files Browse the repository at this point in the history
* Merge 4.9.1 into 4.10.0 (#358)

---------

Signed-off-by: Álex Ruiz <[email protected]>
  • Loading branch information
AlexRuiz7 committed Nov 18, 2024
1 parent 791e91a commit b36fff8
Show file tree
Hide file tree
Showing 32 changed files with 1,737 additions and 119 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ SendSIGKILL=no
SuccessExitStatus=143

# Allow a slow startup before the systemd notifier module kicks in to extend the timeout
TimeoutStartSec=75
TimeoutStartSec=180

[Install]
WantedBy=multi-user.target
Expand Down
2 changes: 1 addition & 1 deletion distribution/packages/src/deb/debian/postinst
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ product_dir=/usr/share/wazuh-indexer
config_dir=/etc/wazuh-indexer
data_dir=/var/lib/wazuh-indexer
log_dir=/var/log/wazuh-indexer
pid_dir=/var/run/wazuh-indexer
pid_dir=/run/wazuh-indexer
tmp_dir=/var/log/wazuh-indexer/tmp


Expand Down
2 changes: 1 addition & 1 deletion distribution/packages/src/rpm/init.d/wazuh-indexer
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ if [ -f "$OPENSEARCH_ENV_FILE" ]; then
. "$OPENSEARCH_ENV_FILE"
fi

exec="$OPENSEARCH_HOME/bin/wazuh-indexer"
exec="$OPENSEARCH_HOME/bin/opensearch"
prog="wazuh-indexer"
pidfile="$PID_DIR/${prog}.pid"

Expand Down
6 changes: 4 additions & 2 deletions distribution/packages/src/rpm/wazuh-indexer.cicd.spec
Original file line number Diff line number Diff line change
Expand Up @@ -695,7 +695,9 @@ rm -fr %{buildroot}


%changelog
* Thu Mar 28 2024 support <[email protected]> - 4.9.0
* Thu Aug 15 2024 support <[email protected]> - 4.9.1
- More info: https://documentation.wazuh.com/current/release-notes/release-4-9-1.html
* Thu Aug 15 2024 support <[email protected]> - 4.9.0
- More info: https://documentation.wazuh.com/current/release-notes/release-4-9-0.html
* Tue Jan 30 2024 support <[email protected]> - 4.8.1
- More info: https://documentation.wazuh.com/current/release-notes/release-4-8-1.html
Expand Down Expand Up @@ -750,4 +752,4 @@ rm -fr %{buildroot}
* Wed May 18 2022 support <[email protected]> - 4.3.1
- More info: https://documentation.wazuh.com/current/release-notes/release-4-3-1.html
* Thu May 05 2022 support <[email protected]> - 4.3.0
- More info: https://documentation.wazuh.com/current/release-notes/release-4-3-0.html
- More info: https://documentation.wazuh.com/current/release-notes/release-4-3-0.html
16 changes: 10 additions & 6 deletions distribution/packages/src/rpm/wazuh-indexer.rpm.spec
Original file line number Diff line number Diff line change
Expand Up @@ -108,11 +108,13 @@ set -- "$@" "%%dir /usr/lib/systemd/system"
set -- "$@" "%%dir /usr/lib/tmpfiles.d"
set -- "$@" "%%dir /usr/share"
set -- "$@" "%%dir /var"
set -- "$@" "%%dir /var/run"
set -- "$@" "%%dir /var/run/%{name}"
set -- "$@" "%%dir /run"
set -- "$@" "%%dir /var/lib"
set -- "$@" "%%dir /var/log"
set -- "$@" "%%dir /usr/lib/sysctl.d"
set -- "$@" "%%dir /usr/lib/systemd"
set -- "$@" "%%dir /usr/lib/systemd"
set -- "$@" "%{_sysconfdir}/sysconfig/%{name}"
set -- "$@" "%{config_dir}/log4j2.properties"
set -- "$@" "%{config_dir}/jvm.options"
Expand Down Expand Up @@ -174,8 +176,8 @@ exit 0

%post
set -e
chown -R %{name}.%{name} %{config_dir}
chown -R %{name}.%{name} %{log_dir}
chown -R %{name}:%{name} %{config_dir}
chown -R %{name}:%{name} %{log_dir}

# Apply PerformanceAnalyzer Settings
chmod a+rw /tmp
Expand Down Expand Up @@ -232,7 +234,7 @@ exit 0
# Service files
%attr(0644, root, root) %{_prefix}/lib/systemd/system/%{name}.service
%attr(0644, root, root) %{_prefix}/lib/systemd/system/%{name}-performance-analyzer.service
%attr(0644, root, root) %{_sysconfdir}/init.d/%{name}
%attr(0750, root, root) %{_sysconfdir}/init.d/%{name}
%attr(0644, root, root) %config(noreplace) %{_prefix}/lib/sysctl.d/%{name}.conf
%attr(0644, root, root) %config(noreplace) %{_prefix}/lib/tmpfiles.d/%{name}.conf

Expand Down Expand Up @@ -263,9 +265,11 @@ exit 0
%attr(750, %{name}, %{name}) %{product_dir}/performance-analyzer-rca/bin/*

%changelog
* Wed Jun 19 2024 support <[email protected]> - 4.10.0
* Tue Aug 20 2024 support <[email protected]> - 4.10.0
- More info: https://documentation.wazuh.com/current/release-notes/release-4-10-0.html
* Thu Mar 28 2024 support <[email protected]> - 4.9.0
* Thu Aug 15 2024 support <[email protected]> - 4.9.1
- More info: https://documentation.wazuh.com/current/release-notes/release-4-9-1.html
* Thu Aug 15 2024 support <[email protected]> - 4.9.0
- More info: https://documentation.wazuh.com/current/release-notes/release-4-9-0.html
* Tue Jan 30 2024 support <[email protected]> - 4.8.1
- More info: https://documentation.wazuh.com/current/release-notes/release-4-8-1.html
Expand Down
2 changes: 1 addition & 1 deletion docker/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -91,4 +91,4 @@ Then, start a container with:

```console
docker run -it --rm wazuh-indexer:4.10.0
```
```
2 changes: 1 addition & 1 deletion docker/dev/images/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM gradle:jdk21-alpine AS builder
FROM gradle:8.7.0-jdk21-alpine AS builder
USER gradle
WORKDIR /home/wazuh-indexer
COPY --chown=gradle:gradle . /home/wazuh-indexer
Expand Down
3 changes: 2 additions & 1 deletion integrations/.gitignore
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
external
docker/certs
docker/certs
docker/config
11 changes: 5 additions & 6 deletions integrations/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,14 +14,13 @@ and combines security data from AWS and a broad range of enterprise security dat

Refer to these documents for more information about this integration:

* [User Guide](./amazon-security-lake/README.md).
* [Developer Guide](./amazon-security-lake/CONTRIBUTING.md).

- [User Guide](./amazon-security-lake/README.md).
- [Developer Guide](./amazon-security-lake/CONTRIBUTING.md).

### Other integrations

We host development environments to support the following integrations:

* [Splunk](./splunk/README.md).
* [Elasticsearch](./elastic/README.md).
* [OpenSearch](./opensearch/README.md).
- [Splunk](./splunk/README.md).
- [Elasticsearch](./elastic/README.md).
- [OpenSearch](./opensearch/README.md).
20 changes: 8 additions & 12 deletions integrations/amazon-security-lake/CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,41 +5,38 @@
A demo of the integration can be started using the content of this folder and Docker. Open a terminal in the `wazuh-indexer/integrations` folder and start the environment.

```console
docker compose -f ./docker/amazon-security-lake.yml up -d
docker compose -f ./docker/compose.amazon-security-lake.yml up -d
```

This Docker Compose project will bring up these services:

- a _wazuh-indexer_ node
- a _wazuh-dashboard_ node
- a _logstash_ node
- our [events generator](./tools/events-generator/README.md)
- our [events generator](../tools/events-generator/README.md)
- an AWS Lambda Python container.

On the one hand, the event generator will push events constantly to the indexer, to the `wazuh-alerts-4.x-sample` index by default (refer to the [events generator](./tools/events-generator/README.md) documentation for customization options). On the other hand, Logstash will query for new data and deliver it to output configured in the pipeline, which can be one of `indexer-to-s3` or `indexer-to-file`.
On the one hand, the event generator will push events constantly to the indexer, to the `wazuh-alerts-4.x-sample` index by default (refer to the [events generator](../tools/events-generator/README.md) documentation for customization options). On the other hand, Logstash will query for new data and deliver it to output configured in the pipeline `indexer-to-s3`. This pipeline delivers the data to an S3 bucket, from which the data is processed using a Lambda function, to finally be sent to the Amazon Security Lake bucket in Parquet format.

The `indexer-to-s3` pipeline is the method used by the integration. This pipeline delivers the data to an S3 bucket, from which the data is processed using a Lambda function, to finally be sent to the Amazon Security Lake bucket in Parquet format.


Attach a terminal to the container and start the integration by starting Logstash, as follows:
The pipeline starts automatically, but if you need to start it manually, attach a terminal to the Logstash container and start the integration using the command below:

```console
/usr/share/logstash/bin/logstash -f /usr/share/logstash/pipeline/indexer-to-s3.conf --path.settings /etc/logstash
/usr/share/logstash/bin/logstash -f /usr/share/logstash/pipeline/indexer-to-s3.conf
```

After 5 minutes, the first batch of data will show up in http://localhost:9444/ui/wazuh-aws-security-lake-raw. You'll need to invoke the Lambda function manually, selecting the log file to process.

```bash
bash amazon-security-lake/src/invoke-lambda.sh <file>
bash amazon-security-lake/invoke-lambda.sh <file>
```

Processed data will be uploaded to http://localhost:9444/ui/wazuh-aws-security-lake-parquet. Click on any file to download it, and check it's content using `parquet-tools`. Just make sure of installing the virtual environment first, through [requirements.txt](./amazon-security-lake/).
Processed data will be uploaded to http://localhost:9444/ui/wazuh-aws-security-lake-parquet. Click on any file to download it, and check it's content using `parquet-tools`. Just make sure of installing the virtual environment first, through [requirements.txt](./requirements.txt).

```bash
parquet-tools show <parquet-file>
```

If the `S3_BUCKET_OCSF` variable is set in the container running the AWS Lambda function, intermediate data in OCSF and JSON format will be written to a dedicated bucket. This is enabled by default, writing to the `wazuh-aws-security-lake-ocsf` bucket. Bucket names and additional environment variables can be configured editing the [amazon-security-lake.yml](./docker/amazon-security-lake.yml) file.
If the `S3_BUCKET_OCSF` variable is set in the container running the AWS Lambda function, intermediate data in OCSF and JSON format will be written to a dedicated bucket. This is enabled by default, writing to the `wazuh-aws-security-lake-ocsf` bucket. Bucket names and additional environment variables can be configured editing the [compose.amazon-security-lake.yml](../docker/compose.amazon-security-lake.yml) file.

For development or debugging purposes, you may want to enable hot-reload, test or debug on these files, by using the `--config.reload.automatic`, `--config.test_and_exit` or `--debug` flags, respectively.

Expand All @@ -56,4 +53,3 @@ See [README.md](README.md). The instructions on that section have been based on
**Docker is required**.

The [Makefile](./Makefile) in this folder automates the generation of a zip deployment package containing the source code and the required dependencies for the AWS Lambda function. Simply run `make` and it will generate the `wazuh_to_amazon_security_lake.zip` file. The main target runs a Docker container to install the Python3 dependencies locally, and zips the source code and the dependencies together.

53 changes: 12 additions & 41 deletions integrations/amazon-security-lake/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,46 +1,17 @@
# MULTI-STAGE build
# docker build --platform linux/amd64 --no-cache -f aws-lambda.dockerfile -t docker-image:test .
# docker run --platform linux/amd64 -p 9000:8080 docker-image:test

FROM python:3.9 as builder
# Create a virtualenv for dependencies. This isolates these packages from
# system-level packages.
RUN python3 -m venv /env
# Setting these environment variables are the same as running
# source /env/bin/activate.
ENV VIRTUAL_ENV /env
ENV PATH /env/bin:$PATH
# Copy the application's requirements.txt and run pip to install all
# dependencies into the virtualenv.
COPY requirements.txt /app/requirements.txt
RUN pip install -r /app/requirements.txt
# FROM public.ecr.aws/lambda/python:3.9
FROM amazon/aws-lambda-python:3.12

# Copy requirements.txt
COPY requirements.aws.txt ${LAMBDA_TASK_ROOT}

FROM python:3.9
ENV LOGSTASH_KEYSTORE_PASS="SecretPassword"
# Add the application source code.
COPY --chown=logstash:logstash ./src /home/app
# Add execution persmissions.
RUN chmod a+x /home/app/lambda_function.py
# Copy the application's dependencies.
COPY --from=builder /env /env
# Install the specified packages
RUN pip install -r requirements.aws.txt

# Install Logstash
RUN apt-get update && apt-get install -y iputils-ping wget gpg apt-transport-https
RUN wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | gpg --dearmor -o /usr/share/keyrings/elastic-keyring.gpg && \
echo "deb [signed-by=/usr/share/keyrings/elastic-keyring.gpg] https://artifacts.elastic.co/packages/8.x/apt stable main" | tee -a /etc/apt/sources.list.d/elastic-8.x.list && \
apt-get update && apt install -y logstash
# Install logstash-input-opensearch plugin.
RUN /usr/share/logstash/bin/logstash-plugin install logstash-input-opensearch
# Copy the Logstash's ingestion pipelines.
COPY --chown=logstash:logstash logstash/pipeline /usr/share/logstash/pipeline
# Grant logstash ownership over its files
RUN chown --recursive logstash:logstash /usr/share/logstash /etc/logstash /var/log/logstash /var/lib/logstash
# Copy function code
COPY src ${LAMBDA_TASK_ROOT}

USER logstash
# Copy and run the setup.sh script to create and configure a keystore for Logstash.
COPY --chown=logstash:logstash logstash/setup.sh /usr/share/logstash/bin/setup.sh
RUN bash /usr/share/logstash/bin/setup.sh

# Disable ECS compatibility
RUN `echo "pipeline.ecs_compatibility: disabled" >> /etc/logstash/logstash.yml`

WORKDIR /home/app
# Set the CMD to your handler (could also be done as a parameter override outside of the Dockerfile)
CMD [ "lambda_function.lambda_handler" ]
4 changes: 2 additions & 2 deletions integrations/amazon-security-lake/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ Follow the [official documentation](https://docs.aws.amazon.com/lambda/latest/dg
- Configure the runtime to have 512 MB of memory and 30 seconds timeout.
- Configure a trigger so every object with `.txt` extension uploaded to the S3 bucket created previously invokes the Lambda.
![AWS Lambda trigger](./images/asl-lambda-trigger.jpeg)
- Use the [Makefile](./Makefile) to generate the zip package `wazuh_to_amazon_security_lake.zip`, and upload it to the S3 bucket created previously as per [these instructions](https://docs.aws.amazon.com/lambda/latest/dg/gettingstarted-package.html#gettingstarted-package-zip). See [CONTRIBUTING](./CONTRIBUTING.md) for details about the Makefile.
- Use the [Makefile](./Makefile) to generate the zip package `wazuh_to_amazon_security_lake.zip`, and upload it to the S3 bucket created previously as per [these instructions](https://docs.aws.amazon.com/lambda/latest/dg/gettingstarted-package.html#gettingstarted-package-zip). See [CONTRIBUTING](./CONTRIBUTING.md) for details about the Makefile.
- Configure the Lambda with the at least the required _Environment Variables_ below:

| Environment variable | Required | Value |
Expand Down Expand Up @@ -234,7 +234,7 @@ The tables below represent how the Wazuh Security Events are mapped into the OCS
| type_uid | Long | 200101 |
| metadata.product.name | String | "Wazuh" |
| metadata.product.vendor_name | String | "Wazuh, Inc." |
| metadata.product.version | String | "4.9.0" |
| metadata.product.version | String | "4.9.1" |
| metadata.product.lang | String | "en" |
| metadata.log_name | String | "Security events" |
| metadata.log_provider | String | "Wazuh" |
Expand Down
2 changes: 1 addition & 1 deletion integrations/amazon-security-lake/invoke-lambda.sh
Original file line number Diff line number Diff line change
Expand Up @@ -39,4 +39,4 @@ curl -X POST "http://localhost:9000/2015-03-31/functions/function/invocations" -
}
}
]
}'
}'
Original file line number Diff line number Diff line change
Expand Up @@ -27,19 +27,27 @@ output {
s3 {
id => "output.s3"
access_key_id => "${AWS_ACCESS_KEY_ID}"
secret_access_key => "${AWS_SECRET_ACCESS_KEY}"
region => "${AWS_REGION}"
endpoint => "${AWS_ENDPOINT}"
bucket => "${S3_BUCKET_RAW}"
codec => "json_lines"
retry_count => 0
validate_credentials_on_root_bucket => false
encoding => "gzip"
endpoint => "${AWS_ENDPOINT}"
prefix => "%{+YYYY}%{+MM}%{+dd}"
region => "${AWS_REGION}"
retry_count => 0
secret_access_key => "${AWS_SECRET_ACCESS_KEY}"
server_side_encryption => true
server_side_encryption_algorithm => "AES256"
time_file => 5
validate_credentials_on_root_bucket => false
additional_settings => {
"force_path_style" => true
}
time_file => 5
}
file {
id => "output.file"
path => "/usr/share/logstash/logs/indexer-to-file-%{+YYYY-MM-dd-HH}.log"
file_mode => 0644
codec => json_lines
flush_interval => 30
}
}
3 changes: 2 additions & 1 deletion integrations/amazon-security-lake/src/lambda_function.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import os
import urllib.parse
import json
import gzip
import boto3
import pyarrow as pa
import pyarrow.parquet as pq
Expand Down Expand Up @@ -31,7 +32,7 @@ def get_events(bucket: str, key: str) -> list:
logger.info(f"Reading {key}.")
try:
response = s3_client.get_object(Bucket=bucket, Key=key)
data = response['Body'].read().decode('utf-8')
data = gzip.decompress(response['Body'].read()).decode('utf-8')
return data.splitlines()
except ClientError as e:
logger.error(
Expand Down
25 changes: 20 additions & 5 deletions integrations/docker/.env
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,6 @@ ELASTIC_PASSWORD=elastic
# Password for the 'kibana_system' user (at least 6 characters)
KIBANA_PASSWORD=elastic

# Version of Elastic products
STACK_VERSION=8.6.2

# Set the cluster name
CLUSTER_NAME=elastic

Expand All @@ -22,8 +19,26 @@ KIBANA_PORT=5602
# Increase or decrease based on the available host memory (in bytes)
MEM_LIMIT=1073741824

# Wazuh version
WAZUH_VERSION=4.8.1

# Wazuh Indexer version (Provisionally using OpenSearch)
WAZUH_INDEXER_VERSION=2.14.0

# Wazuh Dashboard version (Provisionally using OpenSearch Dashboards)
WAZUH_DASHBOARD_VERSION=2.14.0

# Wazuh certs generator version
WAZUH_CERTS_GENERATOR_VERSION=0.0.1

# OpenSearch destination cluster version
OS_VERSION=2.14.0

# Wazuh version
WAZUH_VERSION=4.7.5
# Logstash version:
LOGSTASH_OSS_VERSION=8.9.0

# Splunk version:
SPLUNK_VERSION=9.1.4

# Version of Elastic products
STACK_VERSION=8.14.3
Loading

0 comments on commit b36fff8

Please sign in to comment.