Skip to content

Commit

Permalink
Add first functional lambda function
Browse files Browse the repository at this point in the history
Tested on local environment, using S3 Ninja and a Lambda container
  • Loading branch information
AlexRuiz7 committed Apr 15, 2024
1 parent 5858a23 commit 0955d1b
Show file tree
Hide file tree
Showing 4 changed files with 125 additions and 1 deletion.
14 changes: 14 additions & 0 deletions integrations/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,20 @@ For production usage, follow the instructions in our documentation page about th

As a last note, we would like to point out that we also use this Docker environment for development.

###### Integration through an AWS Lambda function

Start the integration by sending log data to an S3 bucket.

```console
/usr/share/logstash/bin/logstash -f /usr/share/logstash/pipeline/indexer-to-s3.conf --path.settings /etc/logstash
```

Once there is data in the source bucket, you can invoke the lambda function manually using an HTTP API request, as follows:

```
curl -X POST "http://localhost:9000/2015-03-31/functions/function/invocations" -d '{"Resources":"hello world!"}'
```

### Other integrations

TBD
40 changes: 40 additions & 0 deletions integrations/amazon-security-lake/src/invoke-lambda.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
#!/bin/bash

curl -X POST "http://localhost:9000/2015-03-31/functions/function/invocations" -d '{
"Records": [
{
"eventVersion": "2.0",
"eventSource": "aws:s3",
"awsRegion": "us-east-1",
"eventTime": "1970-01-01T00:00:00.000Z",
"eventName": "ObjectCreated:Put",
"userIdentity": {
"principalId": "AIDAJDPLRKLG7UEXAMPLE"
},
"requestParameters":{
"sourceIPAddress":"127.0.0.1"
},
"responseElements":{
"x-amz-request-id":"C3D13FE58DE4C810",
"x-amz-id-2":"FMyUVURIY8/IgAtTv8xRjskZQpcIZ9KG4V5Wp6S7S/JRWeUWerMUE5JgHvANOjpD"
},
"s3": {
"s3SchemaVersion": "1.0",
"configurationId": "testConfigRule",
"bucket": {
"name": "wazuh-indexer-amazon-security-lake-bucket",
"ownerIdentity": {
"principalId":"A3NL1KOZZKExample"
},
"arn": "arn:aws:s3:::wazuh-indexer-amazon-security-lake-bucket"
},
"object": {
"key": "2024/04/11/ls.s3.f6e2a1b2-4ea5-47b6-be32-3a6746a48187.2024-04-11T17.10.part14.txt",
"size": 1024,
"eTag":"d41d8cd98f00b204e9800998ecf8427e",
"versionId":"096fKKXTRTtl3on89fVO.nfljtsv6qko"
}
}
}
]
}'
67 changes: 66 additions & 1 deletion integrations/amazon-security-lake/src/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@
import logging
import boto3
from botocore.exceptions import ClientError
import urllib.parse
import json

# NOTE work in progress
def upload_file(table, file_name, bucket, object_name=None):
Expand Down Expand Up @@ -122,4 +124,67 @@ def _test():
# _test()

def lambda_handler(event, context):
return f'Hello from run.py: {event}'
logging.basicConfig(filename='lambda.log', encoding='utf-8', level=logging.DEBUG)
print(event)
print(context)

# Constants : bucket names
# src_bucket = "wazuh-indexer-amazon-security-lake-bucket"
dst_bucket = "final-bucket"

# Variables : object name -> uploaded log file
# - From https://docs.aws.amazon.com/lambda/latest/dg/with-s3-example.html#with-s3-example-create-function
src_bucket = event['Records'][0]['s3']['bucket']['name']
key = urllib.parse.unquote_plus(event['Records'][0]['s3']['object']['key'], encoding='utf-8')
logging.info(f"Lambda function invoked due to {key}.")
logging.info(f"Source bucket name is {src_bucket}. Destination bucket is {dst_bucket}.")

# boto3 client setup
logging.info("Initializing boto3 client.")
client = boto3.client(
service_name='s3',
aws_access_key_id=os.environ['AWS_ACCESS_KEY_ID'],
aws_secret_access_key=os.environ['AWS_SECRET_ACCESS_KEY'],
region_name=os.environ['AWS_REGION'],
endpoint_url='http://s3.ninja:9000',
)
logging.info("boto3 client initialized.")

# Read from bucket A
logging.info(f"Reading {key}.")
response = client.get_object(
Bucket=src_bucket,
Key=key
)
data = response['Body'].read().decode('utf-8')
raw_events = data.splitlines()

# Transform data
logging.info("Transforming data.")
ocsf_events = []
for line in raw_events:
try:
event = transform.converter.from_json(line)
ocsf_event = transform.converter.to_detection_finding(event)
ocsf_events.append(ocsf_event.model_dump())

# Temporal disk storage
with open('tmp.json', "a") as fd:
fd.write(str(ocsf_event) + "\n")
except AttributeError as e:
print("Error transforming line to OCSF")
print(event)
print(e)

table = pa.Table.from_pylist(ocsf_events)
pq.write_table(table, 'tmp.parquet')

# Upload to bucket B
logging.info(f"Uploading data to {dst_bucket}.")
response = client.put_object(
Bucket=dst_bucket,
Key=key.replace('.txt', '.parquet'),
Body=open('tmp.parquet', 'rb')
)

return json.dumps({ 'size': len(raw_events), 'response': response})
5 changes: 5 additions & 0 deletions integrations/docker/amazon-security-lake.yml
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,11 @@ services:
dockerfile: ../amazon-security-lake/aws-lambda.dockerfile
container_name: wazuh.integration.security.lake.aws.lambda
hostname: wazuh.integration.security.lake.aws.lambda
environment:
AWS_ACCESS_KEY_ID: "AKIAIOSFODNN7EXAMPLE"
AWS_SECRET_ACCESS_KEY: "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY"
AWS_REGION: "us-east-1"
AWS_BUCKET: "wazuh-indexer-amazon-security-lake-bucket"
volumes:
- ../amazon-security-lake/src:/var/task
ports:
Expand Down

0 comments on commit 0955d1b

Please sign in to comment.