From b0bdb8ce4b64003fc136581f4231f69ed2e77556 Mon Sep 17 00:00:00 2001 From: Taro Matsuzawa Date: Wed, 18 Dec 2019 16:26:49 +0900 Subject: [PATCH 01/48] WIP: for python3 --- Dockerfile | 6 +++--- clamav.py | 2 +- scan.py | 4 ++-- scripts/run-scan-lambda | 2 +- scripts/run-update-lambda | 2 +- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/Dockerfile b/Dockerfile index 2805b357..871a62d2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -12,11 +12,11 @@ COPY requirements.txt /opt/app/requirements.txt # Install packages RUN yum update -y -RUN yum install -y cpio python2-pip yum-utils zip unzip less +RUN yum install -y cpio python3-pip yum-utils zip unzip less RUN yum install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm # This had --no-cache-dir, tracing through multiple tickets led to a problem in wheel -RUN pip install -r requirements.txt +RUN pip3 install -r requirements.txt RUN rm -rf /root/.cache/pip # Download libraries we need to run in lambda @@ -39,7 +39,7 @@ RUN echo "CompressLocalDatabase yes" >> /opt/app/bin/freshclam.conf WORKDIR /opt/app RUN zip -r9 --exclude="*test*" /opt/app/build/lambda.zip *.py bin -WORKDIR /usr/lib/python2.7/site-packages +WORKDIR /usr/lib/python3.7/site-packages RUN zip -r9 /opt/app/build/lambda.zip * WORKDIR /opt/app diff --git a/clamav.py b/clamav.py index ea83b62a..73fbe16e 100644 --- a/clamav.py +++ b/clamav.py @@ -42,7 +42,7 @@ def current_library_search_path(): - ld_verbose = subprocess.check_output(["ld", "--verbose"]) + ld_verbose = subprocess.check_output(["ld", "--verbose"]).decode('utf-8') rd_ld = re.compile(RE_SEARCH_DIR) return rd_ld.findall(ld_verbose) diff --git a/scan.py b/scan.py index de505b76..c55716d4 100644 --- a/scan.py +++ b/scan.py @@ -16,7 +16,7 @@ import copy import json import os -import urllib +from urllib.parse import unquote_plus from distutils.util import strtobool import boto3 @@ -66,7 +66,7 @@ def event_object(event, event_source="s3"): key_name = s3_obj["object"].get("key", None) if key_name: - key_name = urllib.unquote_plus(key_name.encode("utf8")) + key_name = unquote_plus(key_name) # Ensure both bucket and key exist if (not bucket_name) or (not key_name): diff --git a/scripts/run-scan-lambda b/scripts/run-scan-lambda index 2d6f21b0..c70e1e41 100755 --- a/scripts/run-scan-lambda +++ b/scripts/run-scan-lambda @@ -49,4 +49,4 @@ docker run --rm \ --memory-swap="${MEM}" \ --cpus="${CPUS}" \ --name="${NAME}" \ - lambci/lambda:python2.7 scan.lambda_handler "${EVENT}" + lambci/lambda:python3.7 scan.lambda_handler "${EVENT}" diff --git a/scripts/run-update-lambda b/scripts/run-update-lambda index 66706a89..3d24defa 100755 --- a/scripts/run-update-lambda +++ b/scripts/run-update-lambda @@ -26,4 +26,4 @@ docker run --rm \ --memory-swap="${MEM}" \ --cpus="${CPUS}" \ --name="${NAME}" \ - lambci/lambda:python2.7 update.lambda_handler + lambci/lambda:python3.7 update.lambda_handler From 62c3ea40f861c16b0bad5770cb0a6042f2881590 Mon Sep 17 00:00:00 2001 From: Taro Matsuzawa Date: Wed, 18 Dec 2019 17:03:08 +0900 Subject: [PATCH 02/48] fixes focus for operator --- scan_test.py | 40 ++++++++++++++++++++-------------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/scan_test.py b/scan_test.py index ffa024ae..42084c07 100644 --- a/scan_test.py +++ b/scan_test.py @@ -97,10 +97,10 @@ def test_s3_event_object_bucket_key_missing(self): event = {"Records": [{"s3": {"bucket": {}, "object": {}}}]} with self.assertRaises(Exception) as cm: event_object(event) - self.assertEquals( - cm.exception.message, - "Unable to retrieve object from event.\n{}".format(event), - ) + self.assertEquals( + cm.exception.message, + "Unable to retrieve object from event.\n{}".format(event), + ) def test_s3_event_object_no_records(self): event = {"Records": []} @@ -165,10 +165,10 @@ def test_verify_s3_object_versioning_not_enabled(self): with self.assertRaises(Exception) as cm: with s3_stubber_resource: verify_s3_object_version(self.s3, s3_obj) - self.assertEquals( - cm.exception.message, - "Object versioning is not enabled in bucket {}".format(self.s3_bucket_name), - ) + self.assertEquals( + cm.exception.message, + "Object versioning is not enabled in bucket {}".format(self.s3_bucket_name), + ) def test_verify_s3_object_version_multiple_versions(self): s3_obj = self.s3.Object(self.s3_bucket_name, self.s3_key_name) @@ -218,12 +218,12 @@ def test_verify_s3_object_version_multiple_versions(self): with self.assertRaises(Exception) as cm: with s3_stubber_resource: verify_s3_object_version(self.s3, s3_obj) - self.assertEquals( - cm.exception.message, - "Detected multiple object versions in {}.{}, aborting processing".format( - self.s3_bucket_name, self.s3_key_name - ), - ) + self.assertEquals( + cm.exception.message, + "Detected multiple object versions in {}.{}, aborting processing".format( + self.s3_bucket_name, self.s3_key_name + ), + ) def test_sns_start_scan(self): sns_stubber = Stubber(self.sns_client) @@ -422,9 +422,9 @@ def test_delete_s3_object_exception(self): with s3_stubber: s3_obj = self.s3.Object(self.s3_bucket_name, self.s3_key_name) delete_s3_object(s3_obj) - self.assertEquals( - cm.exception.message, - "Failed to delete infected file: {}.{}".format( - self.s3_bucket_name, self.s3_key_name - ), - ) + self.assertEquals( + cm.exception.message, + "Failed to delete infected file: {}.{}".format( + self.s3_bucket_name, self.s3_key_name + ), + ) From 3b80b73f67dc00f5f885c4d19ff8e60b34ef88f9 Mon Sep 17 00:00:00 2001 From: Taro Matsuzawa Date: Wed, 18 Dec 2019 17:05:11 +0900 Subject: [PATCH 03/48] fixes focus for operator --- scan_test.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scan_test.py b/scan_test.py index 42084c07..e7ffaa7a 100644 --- a/scan_test.py +++ b/scan_test.py @@ -85,13 +85,13 @@ def test_s3_event_object_missing_bucket(self): event = {"Records": [{"s3": {"object": {"key": self.s3_key_name}}}]} with self.assertRaises(Exception) as cm: event_object(event) - self.assertEquals(cm.exception.message, "No bucket found in event!") + self.assertEquals(cm.exception.message, "No bucket found in event!") def test_s3_event_object_missing_key(self): event = {"Records": [{"s3": {"bucket": {"name": self.s3_bucket_name}}}]} with self.assertRaises(Exception) as cm: event_object(event) - self.assertEquals(cm.exception.message, "No key found in event!") + self.assertEquals(cm.exception.message, "No key found in event!") def test_s3_event_object_bucket_key_missing(self): event = {"Records": [{"s3": {"bucket": {}, "object": {}}}]} @@ -106,7 +106,7 @@ def test_s3_event_object_no_records(self): event = {"Records": []} with self.assertRaises(Exception) as cm: event_object(event) - self.assertEquals(cm.exception.message, "No records found in event!") + self.assertEquals(cm.exception.message, "No records found in event!") def test_verify_s3_object_version(self): s3_obj = self.s3.Object(self.s3_bucket_name, self.s3_key_name) From 023dbd53cd5967580756c3188506d495ee488e9d Mon Sep 17 00:00:00 2001 From: Taro Matsuzawa Date: Wed, 18 Dec 2019 17:15:46 +0900 Subject: [PATCH 04/48] fixes README for python3 --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index b3f7c04e..b5ab084d 100644 --- a/README.md +++ b/README.md @@ -78,7 +78,7 @@ this every 3 hours to stay protected from the latest threats. 3. Choose **Author from scratch** on the *Create function* page 4. Name your function `bucket-antivirus-update` when prompted on the *Configure function* step. -5. Set *Runtime* to `Python 2.7` +5. Set *Runtime* to `Python 3.7` 6. Create a new role name `bucket-antivirus-update` that uses the following policy document @@ -363,6 +363,7 @@ The python tests in this repository use `unittest` and are run via the `nose` ut to install the developer resources and then run the tests: ```sh +pip install -r requirements.txt pip install -r requirements-dev.txt make test ``` From 512db7ea4d903aa71f6e3828fbad4db2bbd280c6 Mon Sep 17 00:00:00 2001 From: Taro Matsuzawa Date: Wed, 18 Dec 2019 17:21:05 +0900 Subject: [PATCH 05/48] fixes README for python3 --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index b5ab084d..e0ce5cb6 100644 --- a/README.md +++ b/README.md @@ -144,7 +144,7 @@ the default provided. 2. From the AWS Lambda Dashboard, click **Create function** 3. Choose **Author from scratch** on the *Create function* page 4. Name your function `bucket-antivirus-function` -5. Set *Runtime* to `Python 2.7` +5. Set *Runtime* to `Python 3.7` 6. Create a new role name `bucket-antivirus-function` that uses the following policy document From e83d913f82d193e2118f893d577a24331905c8d1 Mon Sep 17 00:00:00 2001 From: Taro Matsuzawa Date: Fri, 20 Dec 2019 13:15:09 +0900 Subject: [PATCH 06/48] fixes site-packages path --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 871a62d2..a59b66e3 100644 --- a/Dockerfile +++ b/Dockerfile @@ -39,7 +39,7 @@ RUN echo "CompressLocalDatabase yes" >> /opt/app/bin/freshclam.conf WORKDIR /opt/app RUN zip -r9 --exclude="*test*" /opt/app/build/lambda.zip *.py bin -WORKDIR /usr/lib/python3.7/site-packages +WORKDIR /usr/local/lib/python3.7/site-packages RUN zip -r9 /opt/app/build/lambda.zip * WORKDIR /opt/app From d48c47c5ad0672d7c96ed46590c3d025798fa4d1 Mon Sep 17 00:00:00 2001 From: Taro Matsuzawa Date: Fri, 20 Dec 2019 13:56:23 +0900 Subject: [PATCH 07/48] process result return as bytes, fixes bytes to string before processing --- clamav.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/clamav.py b/clamav.py index 73fbe16e..f46a5509 100644 --- a/clamav.py +++ b/clamav.py @@ -194,7 +194,7 @@ def scan_file(path): stdout=subprocess.PIPE, env=av_env, ) - output = av_proc.communicate()[0] + output = av_proc.communicate()[0].decode() print("clamscan output:\n%s" % output) # Turn the output into a data source we can read From fb7866326d5d107587784dc3a7adec49abe99427 Mon Sep 17 00:00:00 2001 From: Taro Matsuzawa Date: Fri, 20 Dec 2019 14:16:21 +0900 Subject: [PATCH 08/48] use python 3.7 for circleci test --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 28801ceb..477a8e7b 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -15,7 +15,7 @@ executors: - image: docker:17.05.0-ce python_test: docker: - - image: circleci/python:2.7-stretch + - image: circleci/python:3.7-stretch pre_commit_test: docker: - image: circleci/python:3.7-stretch From a57787d3d2b1359957a81d18d65a6123830e08ea Mon Sep 17 00:00:00 2001 From: Taro Matsuzawa Date: Fri, 20 Dec 2019 14:41:52 +0900 Subject: [PATCH 09/48] fixes code format by black --- clamav.py | 2 +- scan_test.py | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/clamav.py b/clamav.py index f46a5509..8042e739 100644 --- a/clamav.py +++ b/clamav.py @@ -42,7 +42,7 @@ def current_library_search_path(): - ld_verbose = subprocess.check_output(["ld", "--verbose"]).decode('utf-8') + ld_verbose = subprocess.check_output(["ld", "--verbose"]).decode("utf-8") rd_ld = re.compile(RE_SEARCH_DIR) return rd_ld.findall(ld_verbose) diff --git a/scan_test.py b/scan_test.py index e7ffaa7a..0a18132a 100644 --- a/scan_test.py +++ b/scan_test.py @@ -167,7 +167,9 @@ def test_verify_s3_object_versioning_not_enabled(self): verify_s3_object_version(self.s3, s3_obj) self.assertEquals( cm.exception.message, - "Object versioning is not enabled in bucket {}".format(self.s3_bucket_name), + "Object versioning is not enabled in bucket {}".format( + self.s3_bucket_name + ), ) def test_verify_s3_object_version_multiple_versions(self): From c09a62da25267b13af05375288b9ffe0aacd77e1 Mon Sep 17 00:00:00 2001 From: Avi Pinto Date: Sun, 7 Jun 2020 14:14:44 +0300 Subject: [PATCH 10/48] Without this change, at the logs of the update function you get: freshclam output: b'./bin/freshclam: error while loading shared libraries: libprelude.so.28: cannot open shared object file: No such file or directory\n' Unexpected exit code from freshclam: 127. This is the recommended fix by https://github.com/gmirsky gtom issue #125 ( https://github.com/upsidetravel/bucket-antivirus-function/issues/125 ) that solved this error also --- Dockerfile | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index a59b66e3..4d86948d 100644 --- a/Dockerfile +++ b/Dockerfile @@ -21,12 +21,17 @@ RUN rm -rf /root/.cache/pip # Download libraries we need to run in lambda WORKDIR /tmp -RUN yumdownloader -x \*i686 --archlist=x86_64 clamav clamav-lib clamav-update json-c pcre2 +RUN yumdownloader -x \*i686 --archlist=x86_64 clamav clamav-lib clamav-update json-c pcre2 libprelude gnutls libtasn1 lib64nettle nettle RUN rpm2cpio clamav-0*.rpm | cpio -idmv RUN rpm2cpio clamav-lib*.rpm | cpio -idmv RUN rpm2cpio clamav-update*.rpm | cpio -idmv RUN rpm2cpio json-c*.rpm | cpio -idmv RUN rpm2cpio pcre*.rpm | cpio -idmv +RUN rpm2cpio gnutls* | cpio -idmv +RUN rpm2cpio nettle* | cpio -idmv +RUN rpm2cpio lib* | cpio -idmv +RUN rpm2cpio *.rpm | cpio -idmv +RUN rpm2cpio libtasn1* | cpio -idmv # Copy over the binaries and libraries RUN cp /tmp/usr/bin/clamscan /tmp/usr/bin/freshclam /tmp/usr/lib64/* /opt/app/bin/ From 824f0991585b9405069191122c1bb288bd0068c7 Mon Sep 17 00:00:00 2001 From: Jeremy Deppen Date: Sun, 2 May 2021 23:43:51 -0400 Subject: [PATCH 11/48] added cloudformation template to bootstrap infra creation --- deploy/cloudformation.yaml | 297 +++++++++++++++++++++++++++++++++++++ 1 file changed, 297 insertions(+) create mode 100644 deploy/cloudformation.yaml diff --git a/deploy/cloudformation.yaml b/deploy/cloudformation.yaml new file mode 100644 index 00000000..3d9131e6 --- /dev/null +++ b/deploy/cloudformation.yaml @@ -0,0 +1,297 @@ +--- + AWSTemplateFormatVersion: '2010-09-09' + + Description: Bucket Antivirus Quickstart Template + + Parameters: + + AVBucketType: + Type: String + Description: Specifies if the bucket to hold the AV deinitions should be "public" or "private". Only choose "public" if other accounts need to access this bucket." + Default: "private" + AllowedValues: + - "public" + - "private" + + SourceBucket: + Type: String + Description: Name of the source bucket whose objects will be scanned. If more than one source bucket, the others will have to be manually added to the AV Scanner Policy after creation. + Default: "" + AllowedPattern : ".+" + + Conditions: + publicBucket: !Equals [ !Ref AVBucketType, "public" ] + + Resources: + + S3BucketAVDefinitions: + Type: AWS::S3::Bucket + Properties: + BucketName: !Join # Append the CloudFormation StackId for unique bucket naming + - "-" + - - "antivirus-definitions" + - !Select + - 0 + - !Split + - "-" + - !Select + - 2 + - !Split + - "/" + - !Ref "AWS::StackId" + BucketEncryption: + ServerSideEncryptionConfiguration: + - ServerSideEncryptionByDefault: + SSEAlgorithm: AES256 + AccessControl: BucketOwnerFullControl + PublicAccessBlockConfiguration: + BlockPublicAcls: !If [ publicBucket, false, true ] + BlockPublicPolicy: !If [ publicBucket, false, true ] + IgnorePublicAcls: !If [ publicBucket, false, true ] + RestrictPublicBuckets: !If [ publicBucket, false, true ] + Tags: + - Key: Service + Value: bucket-antivirus + VersioningConfiguration: + Status: Suspended + + S3BucketPolicyAVDefinitions: + Type: AWS::S3::BucketPolicy + Condition: publicBucket + Properties: + Bucket: !Ref S3BucketAVDefinitions + PolicyDocument: + Statement: + - Sid: AllowPublic + Action: + - s3:GetObject + - s3:GetObjectTagging + Effect: Allow + Principal: + AWS: + - "*" + Resource: + - !Sub [ "arn:aws:s3:::${BucketName}/*", { BucketName: !Ref S3BucketAVDefinitions } ] + + IamRoleAVDefinitions: + Type: 'AWS::IAM::Role' + Properties: + RoleName: AVDefinitionsLambdaRole + AssumeRolePolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Principal: + Service: + - lambda.amazonaws.com + Action: + - 'sts:AssumeRole' + Tags: + - Key: Service + Value: bucket-antivirus + + IamRoleAVScanner: + Type: 'AWS::IAM::Role' + Properties: + RoleName: AVScannerLambdaRole + AssumeRolePolicyDocument: + Version: "2012-10-17" + Statement: + - Effect: Allow + Principal: + Service: + - lambda.amazonaws.com + Action: + - 'sts:AssumeRole' + Tags: + - Key: Service + Value: bucket-antivirus + + IamPolicyAVDefinitions: + Type: AWS::IAM::Policy + DependsOn: + - S3BucketAVDefinitions + - IamRoleAVDefinitions + Properties: + PolicyName: AVDefinitionsLambdaPolicy + Roles: + - !Ref IamRoleAVDefinitions + PolicyDocument: + Version: "2012-10-17" + Statement: + - Sid: WriteCloudWatchLogs + Effect: Allow + Action: + - "logs:CreateLogGroup" + - "logs:CreateLogStream" + - "logs:PutLogEvents" + Resource: "*" + - Sid: S3GetAndPutWithTagging + Effect: Allow + Action: + - "s3:GetObject" + - "s3:GetObjectTagging" + - "s3:PutObject" + - "s3:PutObjectTagging" + - "s3:PutObjectVersionTagging" + Resource: + - !Sub [ "arn:aws:s3:::${BucketName}/*", { BucketName: !Ref S3BucketAVDefinitions } ] + - Sid: S3HeadObject + Effect: Allow + Action: + - "s3:ListBucket" + Resource: + - !Sub [ "arn:aws:s3:::${BucketName}/*", { BucketName: !Ref S3BucketAVDefinitions } ] + - !Sub [ "arn:aws:s3:::${BucketName}", { BucketName: !Ref S3BucketAVDefinitions } ] + + IamPolicyAVScanner: + Type: AWS::IAM::Policy + DependsOn: + - S3BucketAVDefinitions + - IamRoleAVScanner + Properties: + PolicyName: AVScannerLambdaPolicy + Roles: + - !Ref IamRoleAVScanner + PolicyDocument: + Version: "2012-10-17" + Statement: + - Sid: WriteCloudWatchLogs + Effect: Allow + Action: + - "logs:CreateLogGroup" + - "logs:CreateLogStream" + - "logs:PutLogEvents" + Resource: "*" + - Sid: S3AVScan + Effect: Allow + Action: + - "s3:GetObject" + - "s3:GetObjectTagging" + - "s3:GetObjectVersion" + - "s3:PutObjectTagging" + - "s3:PutObjectVersionTagging" + Resource: + - !Sub [ "arn:aws:s3:::${SourceBucketName}/*", { SourceBucketName: !Ref SourceBucket } ] + - Sid: S3AVDefinitions + Effect: Allow + Action: + - "s3:GetObject" + - "s3:GetObjectTagging" + Resource: + - !Sub [ "arn:aws:s3:::${BucketName}/*", { BucketName: !Ref S3BucketAVDefinitions } ] + - Sid: KmsDecrypt + Effect: Allow + Action: + - "kms:Decrypt" + Resource: + - !Sub [ "arn:aws:s3:::${SourceBucketName}/*", { SourceBucketName: !Ref SourceBucket } ] + - Sid: SNSPublic + Effect: Allow + Action: + - "sns:Publish" + Resource: + - "arn:aws:sns:::" + - "arn:aws:sns:::" + - Sid: S3HeadObject + Effect: Allow + Action: + - "s3:ListBucket" + Resource: + - !Sub [ "arn:aws:s3:::${BucketName}/*", { BucketName: !Ref S3BucketAVDefinitions } ] + - !Sub [ "arn:aws:s3:::${BucketName}", { BucketName: !Ref S3BucketAVDefinitions } ] + + LambdaAVUpdateDefinitions: + Type: AWS::Lambda::Function + DependsOn: + - S3BucketAVDefinitions + Properties: + FunctionName: avUpdateDefinitions + Description: LambdaFunction to update the AntiVirus definitions in the AV Definitions bucket. + Runtime: python3.7 + Code: + ZipFile: | + import json + def lambda_handler(event, context): + return { + 'statusCode': 200, 'body': json.dumps('Hello from Lambda!') + } + Handler: "update.lambda_handler" + MemorySize: 1024 + Timeout: 300 + Role: !GetAtt [ IamRoleAVDefinitions, Arn ] + Environment: + Variables: + AV_DEFINITION_S3_BUCKET: !Ref S3BucketAVDefinitions + Tags: + - Key: Service + Value: bucket-antivirus + + LambdaAVUpdateDefinitionsSchedule: + Type: "AWS::Events::Rule" + DependsOn: + - LambdaAVUpdateDefinitions + Properties: + Name: LambdaAVUpdateDefinitionsSchedule + Description: A schedule for the AV Update Definitions Lambda function. + ScheduleExpression: rate(3 hours) + State: ENABLED + Targets: + - Arn: !Sub ${LambdaAVUpdateDefinitions.Arn} + Id: LambdaAVUpdateDefinitionsSchedule + + LambdaAVUpdateDefinitionsSchedulePermission: + Type: "AWS::Lambda::Permission" + DependsOn: + - LambdaAVUpdateDefinitionsSchedule + Properties: + Action: 'lambda:InvokeFunction' + FunctionName: !Sub ${LambdaAVUpdateDefinitions.Arn} + Principal: 'events.amazonaws.com' + SourceArn: !Sub ${LambdaAVUpdateDefinitionsSchedule.Arn} + + LambdaAVScanner: + Type: AWS::Lambda::Function + Properties: + FunctionName: avScanner + Description: LambdaFunction to scan newly uploaded objects in S3. + Runtime: python3.7 + Code: + ZipFile: | + import json + def lambda_handler(event, context): + return { + 'statusCode': 200, 'body': json.dumps('Hello from Lambda!') + } + Handler: "scan.lambda_handler" + MemorySize: 1400 + Timeout: 300 + Role: !GetAtt [ IamRoleAVScanner, Arn ] + Environment: + Variables: + AV_DEFINITION_S3_BUCKET: !Ref S3BucketAVDefinitions + Tags: + - Key: Service + Value: bucket-antivirus + + + + Outputs: + + S3BucketAvDefinitions: + Value: !Ref S3BucketAVDefinitions + Description: S3 Bucket for the AV Definitions + + LambdaAVUpdateDefinitions: + Value: !Ref LambdaAVUpdateDefinitions + Description: Lambda function to update the Antivirus Definitions in its respective bucket + + LambdaAVScanner: + Value: !Ref LambdaAVScanner + Description: Lambda function to scan newly created S3 objects + + IamRoleAVScanner: + Value: !Ref IamRoleAVScanner + Description: IAM Role used by the Lambda Scanner function. Edit its policy to add/change source S3 buckets, and also to enable SNS functionality if desired + + \ No newline at end of file From a97b50de7335d334f6af33c7388d6a5204c52f9b Mon Sep 17 00:00:00 2001 From: Jeremy Deppen Date: Mon, 3 May 2021 08:33:55 -0400 Subject: [PATCH 12/48] bumping av-scanner lambda memory up --- deploy/cloudformation.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deploy/cloudformation.yaml b/deploy/cloudformation.yaml index 3d9131e6..70fc4d6c 100644 --- a/deploy/cloudformation.yaml +++ b/deploy/cloudformation.yaml @@ -264,7 +264,7 @@ 'statusCode': 200, 'body': json.dumps('Hello from Lambda!') } Handler: "scan.lambda_handler" - MemorySize: 1400 + MemorySize: 1500 Timeout: 300 Role: !GetAtt [ IamRoleAVScanner, Arn ] Environment: From bfd645d12b081a78652f6a31ca94b1d15b87173f Mon Sep 17 00:00:00 2001 From: Jeremy Deppen Date: Mon, 3 May 2021 11:33:46 -0400 Subject: [PATCH 13/48] updated readme for cloudformation create --- README.md | 210 +++++------------------------------------------------- 1 file changed, 16 insertions(+), 194 deletions(-) diff --git a/README.md b/README.md index e0ce5cb6..05e9e668 100644 --- a/README.md +++ b/README.md @@ -34,207 +34,29 @@ or INFECTED, along with the date and time of the scan. ### Build from Source -To build the archive to upload to AWS Lambda, run `make`. The build process is completed using +To build the archive to upload to AWS Lambda, run `make all`. The build process is completed using the [amazonlinux](https://hub.docker.com/_/amazonlinux/) [Docker](https://www.docker.com) image. The resulting archive will be built at `build/lambda.zip`. This file will be uploaded to AWS for both Lambda functions below. -### AV Definition Bucket +### Create Relevant AWS Infra via CloudFormation +Use CloudFormation with the `cloudformation.yaml` located in the `deploy/` directory to quickly spin up the AWS infra needed to run this project. CloudFormation will create: +* An S3 bucket that will store AntiVirus definitions. +* A Lambda Function called `avUpdateDefinitions` that will update the AV Definitions in the S3 Bucket every 3 hours. +This function accesses the user’s above S3 Bucket to download updated definitions using `freshclam`. +* A Lambda Function called `avScanner` that is triggered on each new S3 object creation which scans the object and tags it appropriately. It is created with `1600mb` of memory which should be enough, however if you start to see timeouts, this memory may have to be bumped up. In the past, we recommended using `1024mb`, but that has started causing Lambda timeouts and bumping this memory has resolved it. -Create an s3 bucket to store current antivirus definitions. This -provides the fastest download speeds for the scanner. This bucket can -be kept as private. +Running CloudFormation, it will ask for 2 inputs for this stack: +1. BucketType: `private` (default) or `public`. This is applied to the S3 bucket that stores the AntiVirus definitions. We recommend to only use `public` when other AWS accounts need access to this bucket. +2. SourceBucket: [a non-empty string]. The name (do not include `s3://`) of the S3 bucket that will have its objects scanned. _Note - this is just used to create the IAM Policy, you can add/change source buckets later via the IAM Policy that CloudFormation outputs_ -To allow public access, useful for other accounts, -add the following policy to the bucket. +After the Stack has successfully created, there are 3 manual processes that still have to be done: +1. Upload the `build/lambda.zip` file that was created by running `make all` to the `avUpdateDefinitions` and `avScanner` Lambda functions via the Lambda Console. +2. To trigger the Scanner function on new S3 objects, go to the `avScanner` Lambda function console, navigate to `Configuration` -> `Trigger` -> `Add Trigger` -> Search for S3, and choose your bucket(s) and select `All object create events`, then click `Add`. _Note - if you chose more than 1 bucket as the source, or chose a different bucket than the Source Bucket in the CloudFormation parameter, you will have to also edit the IAM Role to reflect these new buckets (see next section)_ +3. Navigate to the `avUpdateDefinitions` Lambda function and manually trigger the function to get the initial Clam definitions in the bucket (instead of waiting for the 3 hour trigger to happen). Do this by clicking the `Test` section, and then clicking the orange `test` button. The function should take a few seconds to execute, and when finished you should see the `clam_defs` in the `av-definitions` S3 bucket. -```json -{ - "Version": "2012-10-17", - "Statement": [ - { - "Sid": "AllowPublic", - "Effect": "Allow", - "Principal": "*", - "Action": [ - "s3:GetObject", - "s3:GetObjectTagging" - ], - "Resource": "arn:aws:s3:::/*" - } - ] -} -``` - -### Definition Update Lambda - -This function accesses the user’s ClamAV instance to download -updated definitions using `freshclam`. It is recommended to run -this every 3 hours to stay protected from the latest threats. - -1. Create the archive using the method in the - [Build from Source](#build-from-source) section. -2. From the AWS Lambda Dashboard, click **Create function** -3. Choose **Author from scratch** on the *Create function* page -4. Name your function `bucket-antivirus-update` when prompted on the -*Configure function* step. -5. Set *Runtime* to `Python 3.7` -6. Create a new role name `bucket-antivirus-update` that uses the -following policy document - - ```json - { - "Version":"2012-10-17", - "Statement":[ - { - "Sid":"WriteCloudWatchLogs", - "Effect":"Allow", - "Action":[ - "logs:CreateLogGroup", - "logs:CreateLogStream", - "logs:PutLogEvents" - ], - "Resource":"*" - }, - { - "Sid":"s3GetAndPutWithTagging", - "Action":[ - "s3:GetObject", - "s3:GetObjectTagging", - "s3:PutObject", - "s3:PutObjectTagging", - "s3:PutObjectVersionTagging" - ], - "Effect":"Allow", - "Resource":[ - "arn:aws:s3:::/*" - ] - }, - { - "Sid": "s3HeadObject", - "Effect": "Allow", - "Action": "s3:ListBucket", - "Resource": [ - "arn:aws:s3:::/*", - "arn:aws:s3:::" - ] - } - ] - } - ``` - -7. Click next to go to the Configuration page -8. Add a trigger from the left of **CloudWatch Event** using `rate(3 hours)` -for the **Schedule expression**. Be sure to check **Enable trigger** -9. Choose **Upload a ZIP file** for *Code entry type* and select the archive -downloaded in step 1. -10. Add a single environment variable named `AV_DEFINITION_S3_BUCKET` -and set its value to the name of the bucket created to store your AV -definitions. -11. Set *Lambda handler* to `update.lambda_handler` -12. Under *Basic Settings*, set *Timeout* to **5 minutes** and *Memory* to -**1024** -13. Save and test your function. If prompted for test data, just use -the default provided. - -### AV Scanner Lambda - -1. Create the archive using the method in the - [Build from Source](#build-from-source) section. -2. From the AWS Lambda Dashboard, click **Create function** -3. Choose **Author from scratch** on the *Create function* page -4. Name your function `bucket-antivirus-function` -5. Set *Runtime* to `Python 3.7` -6. Create a new role name `bucket-antivirus-function` that uses the -following policy document - - ```json - { - "Version":"2012-10-17", - "Statement":[ - { - "Sid":"WriteCloudWatchLogs", - "Effect":"Allow", - "Action":[ - "logs:CreateLogGroup", - "logs:CreateLogStream", - "logs:PutLogEvents" - ], - "Resource":"*" - }, - { - "Sid":"s3AntiVirusScan", - "Action":[ - "s3:GetObject", - "s3:GetObjectTagging", - "s3:GetObjectVersion", - "s3:PutObjectTagging", - "s3:PutObjectVersionTagging" - ], - "Effect":"Allow", - "Resource": [ - "arn:aws:s3:::/*", - "arn:aws:s3:::/*" - ] - }, - { - "Sid":"s3AntiVirusDefinitions", - "Action":[ - "s3:GetObject", - "s3:GetObjectTagging" - ], - "Effect":"Allow", - "Resource": [ - "arn:aws:s3:::/*" - ] - }, - { - "Sid":"kmsDecrypt", - "Action":[ - "kms:Decrypt" - ], - "Effect":"Allow", - "Resource": [ - "arn:aws:s3:::/*", - "arn:aws:s3:::/*" - ] - }, - { - "Sid":"snsPublish", - "Action": [ - "sns:Publish" - ], - "Effect":"Allow", - "Resource": [ - "arn:aws:sns:::", - "arn:aws:sns:::" - ] - }, - { - "Sid":"s3HeadObject", - "Effect":"Allow", - "Action":"s3:ListBucket", - "Resource":[ - "arn:aws:s3:::/*", - "arn:aws:s3:::" - ] - } - ] - } - ``` - -7. Click *next* to head to the Configuration page -8. Add a new trigger of type **S3 Event** using `ObjectCreate(all)`. -9. Choose **Upload a ZIP file** for *Code entry type* and select the archive -created in step 1. -10. Set *Lambda handler* to `scan.lambda_handler` -11. Add a single environment variable named `AV_DEFINITION_S3_BUCKET` -and set its value to the name of the bucket created to store your AV -definitions. If your bucket is `s3://my-bucket`, the value should be `my-bucket`. -12. Under *Basic settings*, set *Timeout* to **5 minutes** and *Memory* to -**1024** -13. Save the function. Testing is easiest performed by uploading a -file to the bucket configured as the trigger in step 4. +#### Adding or Changing Source Buckets +Changing or adding Source Buckets is done by editing the `AVScannerLambdaRole` IAM Role. More specifically, the `S3AVScan` and `KmsDecrypt` parts of that IAM Role's policy. ### S3 Events From a494d9a619dd9e4a643cd596bc4934601c0a47d0 Mon Sep 17 00:00:00 2001 From: Jeremy Deppen Date: Mon, 3 May 2021 11:35:08 -0400 Subject: [PATCH 14/48] updated readme for cloudformation create --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 05e9e668..33cf68e8 100644 --- a/README.md +++ b/README.md @@ -44,7 +44,7 @@ Use CloudFormation with the `cloudformation.yaml` located in the `deploy/` direc * An S3 bucket that will store AntiVirus definitions. * A Lambda Function called `avUpdateDefinitions` that will update the AV Definitions in the S3 Bucket every 3 hours. This function accesses the user’s above S3 Bucket to download updated definitions using `freshclam`. -* A Lambda Function called `avScanner` that is triggered on each new S3 object creation which scans the object and tags it appropriately. It is created with `1600mb` of memory which should be enough, however if you start to see timeouts, this memory may have to be bumped up. In the past, we recommended using `1024mb`, but that has started causing Lambda timeouts and bumping this memory has resolved it. +* A Lambda Function called `avScanner` that is triggered on each new S3 object creation which scans the object and tags it appropriately. It is created with `1600mb` of memory which should be enough, however if you start to see function timeouts, this memory may have to be bumped up. In the past, we recommended using `1024mb`, but that has started causing Lambda timeouts and bumping this memory has resolved it. Running CloudFormation, it will ask for 2 inputs for this stack: 1. BucketType: `private` (default) or `public`. This is applied to the S3 bucket that stores the AntiVirus definitions. We recommend to only use `public` when other AWS accounts need access to this bucket. From a689f3a936b4d39d80c9c44fe3db9ba5fefc4451 Mon Sep 17 00:00:00 2001 From: Jeremy Deppen Date: Mon, 3 May 2021 11:38:45 -0400 Subject: [PATCH 15/48] updated readme for cloudformation create --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 33cf68e8..dfc75afe 100644 --- a/README.md +++ b/README.md @@ -52,7 +52,7 @@ Running CloudFormation, it will ask for 2 inputs for this stack: After the Stack has successfully created, there are 3 manual processes that still have to be done: 1. Upload the `build/lambda.zip` file that was created by running `make all` to the `avUpdateDefinitions` and `avScanner` Lambda functions via the Lambda Console. -2. To trigger the Scanner function on new S3 objects, go to the `avScanner` Lambda function console, navigate to `Configuration` -> `Trigger` -> `Add Trigger` -> Search for S3, and choose your bucket(s) and select `All object create events`, then click `Add`. _Note - if you chose more than 1 bucket as the source, or chose a different bucket than the Source Bucket in the CloudFormation parameter, you will have to also edit the IAM Role to reflect these new buckets (see next section)_ +2. To trigger the Scanner function on new S3 objects, go to the `avScanner` Lambda function console, navigate to `Configuration` -> `Trigger` -> `Add Trigger` -> Search for S3, and choose your bucket(s) and select `All object create events`, then click `Add`. _Note - if you chose more than 1 bucket as the source, or chose a different bucket than the Source Bucket in the CloudFormation parameter, you will have to also edit the IAM Role to reflect these new buckets (see "Adding or Changing Source Buckets")_ 3. Navigate to the `avUpdateDefinitions` Lambda function and manually trigger the function to get the initial Clam definitions in the bucket (instead of waiting for the 3 hour trigger to happen). Do this by clicking the `Test` section, and then clicking the orange `test` button. The function should take a few seconds to execute, and when finished you should see the `clam_defs` in the `av-definitions` S3 bucket. #### Adding or Changing Source Buckets From 26f45eb02c15914236fcd91e97a00b6c6a36d69e Mon Sep 17 00:00:00 2001 From: Jeremy Deppen Date: Mon, 3 May 2021 12:21:34 -0400 Subject: [PATCH 16/48] updated pre commit hooks for cloudformation --- .pre-commit-config.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 806c3de2..bd15c799 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -13,10 +13,13 @@ repos: - repo: git://github.com/pre-commit/pre-commit-hooks rev: v2.2.3 hooks: + - id: cfn-python-lint + files: deploy/ - id: check-ast - id: check-json - id: check-merge-conflict - id: check-yaml + exlude: deploy/ - id: debug-statements - id: detect-private-key - id: fix-encoding-pragma From 34425a0066c81317597694dcb8e7302d5a6e3b9b Mon Sep 17 00:00:00 2001 From: Jeremy Deppen Date: Mon, 3 May 2021 12:25:39 -0400 Subject: [PATCH 17/48] updated pre commit hooks for cloudformation --- .pre-commit-config.yaml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index bd15c799..d85a21fe 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -13,8 +13,6 @@ repos: - repo: git://github.com/pre-commit/pre-commit-hooks rev: v2.2.3 hooks: - - id: cfn-python-lint - files: deploy/ - id: check-ast - id: check-json - id: check-merge-conflict @@ -31,3 +29,9 @@ repos: hooks: - id: markdownlint entry: markdownlint --ignore .github/*.md + + - repo: github.com/aws-cloudformation/cfn-python-lint + rev: stable + hooks: + - id: cfn-python-lint + files: deploy/ From a984c6b96bd1a3ac4355e1898f3cbd5345aab56d Mon Sep 17 00:00:00 2001 From: Jeremy Deppen Date: Mon, 3 May 2021 12:26:57 -0400 Subject: [PATCH 18/48] updated pre commit hooks for cloudformation --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d85a21fe..7f70e533 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -30,7 +30,7 @@ repos: - id: markdownlint entry: markdownlint --ignore .github/*.md - - repo: github.com/aws-cloudformation/cfn-python-lint + - repo: git://github.com/aws-cloudformation/cfn-python-lint rev: stable hooks: - id: cfn-python-lint From 58110becaf9ba2f85776a5004e6c4506a15c5826 Mon Sep 17 00:00:00 2001 From: Jeremy Deppen Date: Mon, 3 May 2021 12:35:38 -0400 Subject: [PATCH 19/48] bumping pre-commit version to latest --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index a527b641..564f9829 100644 --- a/Makefile +++ b/Makefile @@ -41,7 +41,7 @@ archive: clean ## Create the archive for AWS lambda .PHONY: pre_commit_install ## Ensure that pre-commit hook is installed and kept up to date pre_commit_install: .git/hooks/pre-commit ## Ensure pre-commit is installed .git/hooks/pre-commit: /usr/local/bin/pre-commit - pip install pre-commit==1.18.3 + pip install pre-commit==1.12.1 pre-commit install pre-commit install-hooks From 7583865b062918cda26b4ba42d5982435219e9e9 Mon Sep 17 00:00:00 2001 From: Jeremy Deppen Date: Mon, 3 May 2021 12:36:30 -0400 Subject: [PATCH 20/48] bumping pre-commit version to latest --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 564f9829..b65c4275 100644 --- a/Makefile +++ b/Makefile @@ -41,7 +41,7 @@ archive: clean ## Create the archive for AWS lambda .PHONY: pre_commit_install ## Ensure that pre-commit hook is installed and kept up to date pre_commit_install: .git/hooks/pre-commit ## Ensure pre-commit is installed .git/hooks/pre-commit: /usr/local/bin/pre-commit - pip install pre-commit==1.12.1 + pip install pre-commit==2.12.1 pre-commit install pre-commit install-hooks From b773c9eb9495f07ba89acfc540f003c8df41e54d Mon Sep 17 00:00:00 2001 From: Jeremy Deppen Date: Mon, 3 May 2021 12:37:49 -0400 Subject: [PATCH 21/48] bumping pre-commit version to latest --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 477a8e7b..1f677cf9 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -47,7 +47,7 @@ jobs: - restore_cache: keys: - pre-commit-dot-cache-{{ checksum ".pre-commit-config.yaml" }} - - run: sudo pip install pre-commit==1.18.3 + - run: sudo pip install pre-commit==2.12.1 - run: pre-commit install-hooks - save_cache: From 4a3b0e59e5ac53cd100dffce76b88e140fc17359 Mon Sep 17 00:00:00 2001 From: Jeremy Deppen Date: Mon, 3 May 2021 12:40:38 -0400 Subject: [PATCH 22/48] bumping pre-commit version to latest --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7f70e533..a16c8614 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -17,7 +17,7 @@ repos: - id: check-json - id: check-merge-conflict - id: check-yaml - exlude: deploy/ + exclude: deploy/ - id: debug-statements - id: detect-private-key - id: fix-encoding-pragma From bee01c16c0fd2b871afc4cd31a95dc67e6e1f101 Mon Sep 17 00:00:00 2001 From: Jeremy Deppen Date: Mon, 3 May 2021 12:45:18 -0400 Subject: [PATCH 23/48] bumping pre-commit version to latest --- .pre-commit-config.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a16c8614..4bb31e83 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -31,7 +31,6 @@ repos: entry: markdownlint --ignore .github/*.md - repo: git://github.com/aws-cloudformation/cfn-python-lint - rev: stable hooks: - id: cfn-python-lint files: deploy/ From 6751dc5189613de14124eadcd48395e40ec2e8d5 Mon Sep 17 00:00:00 2001 From: Jeremy Deppen Date: Mon, 3 May 2021 12:47:12 -0400 Subject: [PATCH 24/48] modifying python black version --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4bb31e83..49e71444 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,7 +1,7 @@ repos: - repo: https://github.com/ambv/black - rev: stable + rev: 19.3b0 hooks: - id: black language_version: python3.7 From 4cfab739f0303f70168d7a4f974947476e5b33e5 Mon Sep 17 00:00:00 2001 From: Jeremy Deppen Date: Mon, 3 May 2021 12:49:18 -0400 Subject: [PATCH 25/48] modifying cloudformation lint version --- .pre-commit-config.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 49e71444..a6cdf395 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -31,6 +31,7 @@ repos: entry: markdownlint --ignore .github/*.md - repo: git://github.com/aws-cloudformation/cfn-python-lint + rev: v0.49.0 hooks: - id: cfn-python-lint files: deploy/ From 015a0fc30b9bfe7be7da18e248feffb82f223e3c Mon Sep 17 00:00:00 2001 From: Jeremy Deppen Date: Mon, 3 May 2021 12:58:08 -0400 Subject: [PATCH 26/48] linted cloudformation template --- deploy/cloudformation.yaml | 8 -------- 1 file changed, 8 deletions(-) diff --git a/deploy/cloudformation.yaml b/deploy/cloudformation.yaml index 70fc4d6c..fccda579 100644 --- a/deploy/cloudformation.yaml +++ b/deploy/cloudformation.yaml @@ -109,9 +109,6 @@ IamPolicyAVDefinitions: Type: AWS::IAM::Policy - DependsOn: - - S3BucketAVDefinitions - - IamRoleAVDefinitions Properties: PolicyName: AVDefinitionsLambdaPolicy Roles: @@ -146,9 +143,6 @@ IamPolicyAVScanner: Type: AWS::IAM::Policy - DependsOn: - - S3BucketAVDefinitions - - IamRoleAVScanner Properties: PolicyName: AVScannerLambdaPolicy Roles: @@ -203,8 +197,6 @@ LambdaAVUpdateDefinitions: Type: AWS::Lambda::Function - DependsOn: - - S3BucketAVDefinitions Properties: FunctionName: avUpdateDefinitions Description: LambdaFunction to update the AntiVirus definitions in the AV Definitions bucket. From d6ace6cf2fc55601c4e50c49418c5996002d2462 Mon Sep 17 00:00:00 2001 From: Jeremy Deppen Date: Mon, 3 May 2021 13:14:56 -0400 Subject: [PATCH 27/48] linted cloudformation template --- README.md | 11 ++++++++--- deploy/cloudformation.yaml | 4 +--- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index dfc75afe..97a07104 100644 --- a/README.md +++ b/README.md @@ -40,22 +40,27 @@ the [amazonlinux](https://hub.docker.com/_/amazonlinux/) [Docker](https://www.do uploaded to AWS for both Lambda functions below. ### Create Relevant AWS Infra via CloudFormation + Use CloudFormation with the `cloudformation.yaml` located in the `deploy/` directory to quickly spin up the AWS infra needed to run this project. CloudFormation will create: -* An S3 bucket that will store AntiVirus definitions. -* A Lambda Function called `avUpdateDefinitions` that will update the AV Definitions in the S3 Bucket every 3 hours. + +- An S3 bucket that will store AntiVirus definitions. +- A Lambda Function called `avUpdateDefinitions` that will update the AV Definitions in the S3 Bucket every 3 hours. This function accesses the user’s above S3 Bucket to download updated definitions using `freshclam`. -* A Lambda Function called `avScanner` that is triggered on each new S3 object creation which scans the object and tags it appropriately. It is created with `1600mb` of memory which should be enough, however if you start to see function timeouts, this memory may have to be bumped up. In the past, we recommended using `1024mb`, but that has started causing Lambda timeouts and bumping this memory has resolved it. +- A Lambda Function called `avScanner` that is triggered on each new S3 object creation which scans the object and tags it appropriately. It is created with `1600mb` of memory which should be enough, however if you start to see function timeouts, this memory may have to be bumped up. In the past, we recommended using `1024mb`, but that has started causing Lambda timeouts and bumping this memory has resolved it. Running CloudFormation, it will ask for 2 inputs for this stack: + 1. BucketType: `private` (default) or `public`. This is applied to the S3 bucket that stores the AntiVirus definitions. We recommend to only use `public` when other AWS accounts need access to this bucket. 2. SourceBucket: [a non-empty string]. The name (do not include `s3://`) of the S3 bucket that will have its objects scanned. _Note - this is just used to create the IAM Policy, you can add/change source buckets later via the IAM Policy that CloudFormation outputs_ After the Stack has successfully created, there are 3 manual processes that still have to be done: + 1. Upload the `build/lambda.zip` file that was created by running `make all` to the `avUpdateDefinitions` and `avScanner` Lambda functions via the Lambda Console. 2. To trigger the Scanner function on new S3 objects, go to the `avScanner` Lambda function console, navigate to `Configuration` -> `Trigger` -> `Add Trigger` -> Search for S3, and choose your bucket(s) and select `All object create events`, then click `Add`. _Note - if you chose more than 1 bucket as the source, or chose a different bucket than the Source Bucket in the CloudFormation parameter, you will have to also edit the IAM Role to reflect these new buckets (see "Adding or Changing Source Buckets")_ 3. Navigate to the `avUpdateDefinitions` Lambda function and manually trigger the function to get the initial Clam definitions in the bucket (instead of waiting for the 3 hour trigger to happen). Do this by clicking the `Test` section, and then clicking the orange `test` button. The function should take a few seconds to execute, and when finished you should see the `clam_defs` in the `av-definitions` S3 bucket. #### Adding or Changing Source Buckets + Changing or adding Source Buckets is done by editing the `AVScannerLambdaRole` IAM Role. More specifically, the `S3AVScan` and `KmsDecrypt` parts of that IAM Role's policy. ### S3 Events diff --git a/deploy/cloudformation.yaml b/deploy/cloudformation.yaml index fccda579..6c3fd1a3 100644 --- a/deploy/cloudformation.yaml +++ b/deploy/cloudformation.yaml @@ -284,6 +284,4 @@ IamRoleAVScanner: Value: !Ref IamRoleAVScanner - Description: IAM Role used by the Lambda Scanner function. Edit its policy to add/change source S3 buckets, and also to enable SNS functionality if desired - - \ No newline at end of file + Description: IAM Role used by the Lambda Scanner function. Edit its policy to add/change source S3 buckets, and also to enable SNS functionality if desired \ No newline at end of file From d56d97eb482dd3000d2f434950018dcf9b5a5950 Mon Sep 17 00:00:00 2001 From: Jeremy Deppen Date: Mon, 3 May 2021 13:17:46 -0400 Subject: [PATCH 28/48] linted precommit file --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a6cdf395..a881ee5d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -34,4 +34,4 @@ repos: rev: v0.49.0 hooks: - id: cfn-python-lint - files: deploy/ + files: deploy/ \ No newline at end of file From ee9b5a1bbee7a67b1f0e9e1fe0ea1927f386aa10 Mon Sep 17 00:00:00 2001 From: Jeremy Deppen Date: Mon, 3 May 2021 13:21:24 -0400 Subject: [PATCH 29/48] linted precommit file --- .circleci/config.yml | 2 +- Makefile | 2 +- README.md | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 1f677cf9..85f04ad2 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -93,4 +93,4 @@ workflows: - build: requires: - pre_commit_test - - test + - test \ No newline at end of file diff --git a/Makefile b/Makefile index b65c4275..1c839af0 100644 --- a/Makefile +++ b/Makefile @@ -63,4 +63,4 @@ scan: ./build/lambda.zip ## Run scan function locally .PHONY: update update: ./build/lambda.zip ## Run update function locally - scripts/run-update-lambda + scripts/run-update-lambda \ No newline at end of file diff --git a/README.md b/README.md index 97a07104..afcb9503 100644 --- a/README.md +++ b/README.md @@ -240,4 +240,4 @@ limitations under the License. ClamAV is released under the [GPL Version 2 License](https://github.com/vrtadmin/clamav-devel/blob/master/COPYING) and all [source for ClamAV](https://github.com/vrtadmin/clamav-devel) is available -for download on Github. +for download on Github. \ No newline at end of file From 2abe9d2033ccae9be7ec50b213bb5b44bfe534eb Mon Sep 17 00:00:00 2001 From: Jeremy Deppen Date: Mon, 3 May 2021 13:25:52 -0400 Subject: [PATCH 30/48] linted readme file --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index afcb9503..97a07104 100644 --- a/README.md +++ b/README.md @@ -240,4 +240,4 @@ limitations under the License. ClamAV is released under the [GPL Version 2 License](https://github.com/vrtadmin/clamav-devel/blob/master/COPYING) and all [source for ClamAV](https://github.com/vrtadmin/clamav-devel) is available -for download on Github. \ No newline at end of file +for download on Github. From 1b087b268c776c2e1c9688fd855b9dfe91cd0769 Mon Sep 17 00:00:00 2001 From: Jeremy Deppen Date: Mon, 3 May 2021 13:32:22 -0400 Subject: [PATCH 31/48] linted files --- .pre-commit-config.yaml | 2 +- README.md | 4 +-- deploy/cloudformation.yaml | 68 +++++++++++++++++++------------------- 3 files changed, 37 insertions(+), 37 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a881ee5d..ea714cac 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -4,7 +4,7 @@ repos: rev: 19.3b0 hooks: - id: black - language_version: python3.7 + language_version: python3.8 exclude: > (?x)^( scripts/gen-docs-index| diff --git a/README.md b/README.md index 97a07104..23a60b59 100644 --- a/README.md +++ b/README.md @@ -44,8 +44,8 @@ the [amazonlinux](https://hub.docker.com/_/amazonlinux/) [Docker](https://www.do Use CloudFormation with the `cloudformation.yaml` located in the `deploy/` directory to quickly spin up the AWS infra needed to run this project. CloudFormation will create: - An S3 bucket that will store AntiVirus definitions. -- A Lambda Function called `avUpdateDefinitions` that will update the AV Definitions in the S3 Bucket every 3 hours. -This function accesses the user’s above S3 Bucket to download updated definitions using `freshclam`. +- A Lambda Function called `avUpdateDefinitions` that will update the AV Definitions in the S3 Bucket every 3 hours. +This function accesses the user’s above S3 Bucket to download updated definitions using `freshclam`. - A Lambda Function called `avScanner` that is triggered on each new S3 object creation which scans the object and tags it appropriately. It is created with `1600mb` of memory which should be enough, however if you start to see function timeouts, this memory may have to be bumped up. In the past, we recommended using `1024mb`, but that has started causing Lambda timeouts and bumping this memory has resolved it. Running CloudFormation, it will ask for 2 inputs for this stack: diff --git a/deploy/cloudformation.yaml b/deploy/cloudformation.yaml index 6c3fd1a3..91fe1457 100644 --- a/deploy/cloudformation.yaml +++ b/deploy/cloudformation.yaml @@ -1,10 +1,10 @@ --- AWSTemplateFormatVersion: '2010-09-09' - + Description: Bucket Antivirus Quickstart Template - + Parameters: - + AVBucketType: Type: String Description: Specifies if the bucket to hold the AV deinitions should be "public" or "private". Only choose "public" if other accounts need to access this bucket." @@ -23,7 +23,7 @@ publicBucket: !Equals [ !Ref AVBucketType, "public" ] Resources: - + S3BucketAVDefinitions: Type: AWS::S3::Bucket Properties: @@ -54,7 +54,7 @@ Value: bucket-antivirus VersioningConfiguration: Status: Suspended - + S3BucketPolicyAVDefinitions: Type: AWS::S3::BucketPolicy Condition: publicBucket @@ -70,7 +70,7 @@ Principal: AWS: - "*" - Resource: + Resource: - !Sub [ "arn:aws:s3:::${BucketName}/*", { BucketName: !Ref S3BucketAVDefinitions } ] IamRoleAVDefinitions: @@ -109,86 +109,86 @@ IamPolicyAVDefinitions: Type: AWS::IAM::Policy - Properties: + Properties: PolicyName: AVDefinitionsLambdaPolicy Roles: - !Ref IamRoleAVDefinitions - PolicyDocument: + PolicyDocument: Version: "2012-10-17" Statement: - Sid: WriteCloudWatchLogs - Effect: Allow - Action: + Effect: Allow + Action: - "logs:CreateLogGroup" - "logs:CreateLogStream" - "logs:PutLogEvents" Resource: "*" - Sid: S3GetAndPutWithTagging - Effect: Allow - Action: + Effect: Allow + Action: - "s3:GetObject" - "s3:GetObjectTagging" - "s3:PutObject" - "s3:PutObjectTagging" - "s3:PutObjectVersionTagging" - Resource: + Resource: - !Sub [ "arn:aws:s3:::${BucketName}/*", { BucketName: !Ref S3BucketAVDefinitions } ] - Sid: S3HeadObject - Effect: Allow - Action: + Effect: Allow + Action: - "s3:ListBucket" - Resource: + Resource: - !Sub [ "arn:aws:s3:::${BucketName}/*", { BucketName: !Ref S3BucketAVDefinitions } ] - !Sub [ "arn:aws:s3:::${BucketName}", { BucketName: !Ref S3BucketAVDefinitions } ] IamPolicyAVScanner: Type: AWS::IAM::Policy - Properties: + Properties: PolicyName: AVScannerLambdaPolicy Roles: - !Ref IamRoleAVScanner - PolicyDocument: + PolicyDocument: Version: "2012-10-17" Statement: - Sid: WriteCloudWatchLogs - Effect: Allow - Action: + Effect: Allow + Action: - "logs:CreateLogGroup" - "logs:CreateLogStream" - "logs:PutLogEvents" Resource: "*" - Sid: S3AVScan - Effect: Allow - Action: + Effect: Allow + Action: - "s3:GetObject" - "s3:GetObjectTagging" - "s3:GetObjectVersion" - "s3:PutObjectTagging" - "s3:PutObjectVersionTagging" - Resource: + Resource: - !Sub [ "arn:aws:s3:::${SourceBucketName}/*", { SourceBucketName: !Ref SourceBucket } ] - - Sid: S3AVDefinitions + - Sid: S3AVDefinitions Effect: Allow Action: - "s3:GetObject" - "s3:GetObjectTagging" Resource: - !Sub [ "arn:aws:s3:::${BucketName}/*", { BucketName: !Ref S3BucketAVDefinitions } ] - - Sid: KmsDecrypt + - Sid: KmsDecrypt Effect: Allow Action: - "kms:Decrypt" Resource: - !Sub [ "arn:aws:s3:::${SourceBucketName}/*", { SourceBucketName: !Ref SourceBucket } ] - - Sid: SNSPublic - Effect: Allow + - Sid: SNSPublic + Effect: Allow Action: - "sns:Publish" Resource: - "arn:aws:sns:::" - "arn:aws:sns:::" - Sid: S3HeadObject - Effect: Allow + Effect: Allow Action: - "s3:ListBucket" Resource: @@ -231,7 +231,7 @@ Targets: - Arn: !Sub ${LambdaAVUpdateDefinitions.Arn} Id: LambdaAVUpdateDefinitionsSchedule - + LambdaAVUpdateDefinitionsSchedulePermission: Type: "AWS::Lambda::Permission" DependsOn: @@ -265,11 +265,11 @@ Tags: - Key: Service Value: bucket-antivirus - - - + + + Outputs: - + S3BucketAvDefinitions: Value: !Ref S3BucketAVDefinitions Description: S3 Bucket for the AV Definitions @@ -280,7 +280,7 @@ LambdaAVScanner: Value: !Ref LambdaAVScanner - Description: Lambda function to scan newly created S3 objects + Description: Lambda function to scan newly created S3 objects IamRoleAVScanner: Value: !Ref IamRoleAVScanner From edc2f5224895ab0a822c7322576c63ac5901d5fe Mon Sep 17 00:00:00 2001 From: Jeremy Deppen Date: Mon, 3 May 2021 13:32:42 -0400 Subject: [PATCH 32/48] python version --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ea714cac..a881ee5d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -4,7 +4,7 @@ repos: rev: 19.3b0 hooks: - id: black - language_version: python3.8 + language_version: python3.7 exclude: > (?x)^( scripts/gen-docs-index| From 20d5391197a31044232941a3e73c4357c1fd6b91 Mon Sep 17 00:00:00 2001 From: dmarkey Date: Thu, 20 May 2021 19:44:16 +0100 Subject: [PATCH 33/48] Add endpoint urls for AWS services --- clamav.py | 4 ++-- common.py | 3 +++ display_infected.py | 4 ++-- scan.py | 10 ++++++---- scan_bucket.py | 7 ++++--- update.py | 7 +++++-- 6 files changed, 22 insertions(+), 13 deletions(-) diff --git a/clamav.py b/clamav.py index 8042e739..a44ab3a2 100644 --- a/clamav.py +++ b/clamav.py @@ -24,7 +24,7 @@ import botocore from pytz import utc -from common import AV_DEFINITION_S3_PREFIX +from common import AV_DEFINITION_S3_PREFIX, S3_ENDPOINT from common import AV_DEFINITION_PATH from common import AV_DEFINITION_FILE_PREFIXES from common import AV_DEFINITION_FILE_SUFFIXES @@ -90,7 +90,7 @@ def upload_defs_to_s3(s3_client, bucket, prefix, local_path): "Uploading %s to s3://%s" % (local_file_path, os.path.join(bucket, prefix, filename)) ) - s3 = boto3.resource("s3") + s3 = boto3.resource("s3", endpoint_url=S3_ENDPOINT) s3_object = s3.Object(bucket, os.path.join(prefix, filename)) s3_object.upload_file(os.path.join(local_path, filename)) s3_client.put_object_tagging( diff --git a/common.py b/common.py index 9e95af96..bb953fca 100644 --- a/common.py +++ b/common.py @@ -43,6 +43,9 @@ AV_DEFINITION_FILE_PREFIXES = ["main", "daily", "bytecode"] AV_DEFINITION_FILE_SUFFIXES = ["cld", "cvd"] +SNS_ENDPOINT = os.getenv("SNS_ENDPOINT", None) +S3_ENDPOINT = os.getenv("S3_ENDPOINT", None) +LAMBDA_ENDPOINT = os.getenv("LAMBDA_ENDPOINT", None) def create_dir(path): diff --git a/display_infected.py b/display_infected.py index 0c40bc98..b80e1347 100755 --- a/display_infected.py +++ b/display_infected.py @@ -20,7 +20,7 @@ import boto3 -from common import AV_SIGNATURE_METADATA +from common import AV_SIGNATURE_METADATA, S3_ENDPOINT from common import AV_SIGNATURE_OK from common import AV_SIGNATURE_UNKNOWN from common import AV_STATUS_METADATA @@ -78,7 +78,7 @@ def object_infected(s3_client, s3_bucket_name, key_name): def main(s3_bucket_name): # Verify the S3 bucket exists - s3_client = boto3.client("s3") + s3_client = boto3.client("s3", endpoint_url=S3_ENDPOINT) try: s3_client.head_bucket(Bucket=s3_bucket_name) except Exception: diff --git a/scan.py b/scan.py index c55716d4..48545a06 100644 --- a/scan.py +++ b/scan.py @@ -37,6 +37,8 @@ from common import AV_STATUS_SNS_PUBLISH_CLEAN from common import AV_STATUS_SNS_PUBLISH_INFECTED from common import AV_TIMESTAMP_METADATA +from common import SNS_ENDPOINT +from common import S3_ENDPOINT from common import create_dir from common import get_timestamp @@ -73,7 +75,7 @@ def event_object(event, event_source="s3"): raise Exception("Unable to retrieve object from event.\n{}".format(event)) # Create and return the object - s3 = boto3.resource("s3") + s3 = boto3.resource("s3", endpoint_url=S3_ENDPOINT) return s3.Object(bucket_name, key_name) @@ -199,9 +201,9 @@ def sns_scan_results( def lambda_handler(event, context): - s3 = boto3.resource("s3") - s3_client = boto3.client("s3") - sns_client = boto3.client("sns") + s3 = boto3.resource("s3", endpoint_url=S3_ENDPOINT) + s3_client = boto3.client("s3", endpoint_url=S3_ENDPOINT) + sns_client = boto3.client("sns", endpoint_url=SNS_ENDPOINT) # Get some environment variables ENV = os.getenv("ENV", "") diff --git a/scan_bucket.py b/scan_bucket.py index 6043ffb0..7c6367fb 100755 --- a/scan_bucket.py +++ b/scan_bucket.py @@ -21,8 +21,9 @@ import boto3 -from common import AV_STATUS_METADATA +from common import AV_STATUS_METADATA, LAMBDA_ENDPOINT from common import AV_TIMESTAMP_METADATA +from common import S3_ENDPOINT # Get all objects in an S3 bucket that have not been previously scanned @@ -87,7 +88,7 @@ def format_s3_event(s3_bucket_name, key_name): def main(lambda_function_name, s3_bucket_name, limit): # Verify the lambda exists - lambda_client = boto3.client("lambda") + lambda_client = boto3.client("lambda", endpoint_url=LAMBDA_ENDPOINT) try: lambda_client.get_function(FunctionName=lambda_function_name) except Exception: @@ -95,7 +96,7 @@ def main(lambda_function_name, s3_bucket_name, limit): sys.exit(1) # Verify the S3 bucket exists - s3_client = boto3.client("s3") + s3_client = boto3.client("s3", endpoint_url=S3_ENDPOINT) try: s3_client.head_bucket(Bucket=s3_bucket_name) except Exception: diff --git a/update.py b/update.py index 9730f230..95d4aedb 100644 --- a/update.py +++ b/update.py @@ -22,12 +22,15 @@ from common import AV_DEFINITION_S3_BUCKET from common import AV_DEFINITION_S3_PREFIX from common import CLAMAVLIB_PATH +from common import CLAMAVLIB_PATH +from common import S3_ENDPOINT +from common import SNS_ENDPOINT from common import get_timestamp def lambda_handler(event, context): - s3 = boto3.resource("s3") - s3_client = boto3.client("s3") + s3 = boto3.resource("s3", endpoint_url=S3_ENDPOINT) + s3_client = boto3.client("s3", endpoint_url=S3_ENDPOINT) print("Script starting at %s\n" % (get_timestamp())) to_download = clamav.update_defs_from_s3( From 50de02b3ba4b87020c9fa91972fff92692db2e1e Mon Sep 17 00:00:00 2001 From: dmarkey Date: Thu, 20 May 2021 19:45:23 +0100 Subject: [PATCH 34/48] Take out unused imports --- update.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/update.py b/update.py index 95d4aedb..80aa46d1 100644 --- a/update.py +++ b/update.py @@ -22,9 +22,7 @@ from common import AV_DEFINITION_S3_BUCKET from common import AV_DEFINITION_S3_PREFIX from common import CLAMAVLIB_PATH -from common import CLAMAVLIB_PATH from common import S3_ENDPOINT -from common import SNS_ENDPOINT from common import get_timestamp From 5ef73fca499b096a8b4b4f02bfea0dccb79f5c26 Mon Sep 17 00:00:00 2001 From: dmarkey Date: Sun, 23 May 2021 17:02:16 +0100 Subject: [PATCH 35/48] Update README --- README.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/README.md b/README.md index 23a60b59..802727b5 100644 --- a/README.md +++ b/README.md @@ -103,6 +103,9 @@ the table below for reference. | AV_PROCESS_ORIGINAL_VERSION_ONLY | Controls that only original version of an S3 key is processed (if bucket versioning is enabled) | False | No | | AV_DELETE_INFECTED_FILES | Controls whether infected files should be automatically deleted | False | No | | EVENT_SOURCE | The source of antivirus scan event "S3" or "SNS" (optional) | S3 | No | +| S3_ENDPOINT | The Endpoint to use when interacting wth S3 | None | No | +| SNS_ENDPOINT | The Endpoint to use when interacting wth SNS | None | No | +| LAMBDA_ENDPOINT | The Endpoint to use when interacting wth SNS | None | No | ## S3 Bucket Policy Examples From c7a53ed55f546b51010f09e4a7911e1852a3e23a Mon Sep 17 00:00:00 2001 From: dmarkey Date: Sun, 23 May 2021 19:49:56 +0100 Subject: [PATCH 36/48] Fix typo --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 802727b5..6d911919 100644 --- a/README.md +++ b/README.md @@ -105,7 +105,7 @@ the table below for reference. | EVENT_SOURCE | The source of antivirus scan event "S3" or "SNS" (optional) | S3 | No | | S3_ENDPOINT | The Endpoint to use when interacting wth S3 | None | No | | SNS_ENDPOINT | The Endpoint to use when interacting wth SNS | None | No | -| LAMBDA_ENDPOINT | The Endpoint to use when interacting wth SNS | None | No | +| LAMBDA_ENDPOINT | The Endpoint to use when interacting wth Lambda | None | No | ## S3 Bucket Policy Examples From f5303794f46d56de8606e7510f0d1fc34b4057b1 Mon Sep 17 00:00:00 2001 From: Taro Matsuzawa Date: Fri, 5 Jan 2024 12:50:52 +0900 Subject: [PATCH 37/48] #2 bump all packages, remove nose and add pytest, fixes warnings in test --- Makefile | 4 ++-- clamav_test.py | 12 ++++++------ requirements-dev.txt | 5 +++-- requirements.txt | 18 +++++++++--------- scan_bucket_test.py | 2 +- scan_test.py | 20 ++++++++++---------- 6 files changed, 31 insertions(+), 30 deletions(-) diff --git a/Makefile b/Makefile index 1c839af0..10c50731 100644 --- a/Makefile +++ b/Makefile @@ -51,11 +51,11 @@ pre_commit_tests: ## Run pre-commit tests .PHONY: test test: clean ## Run python tests - nosetests + pytest --no-cov .PHONY: coverage coverage: clean ## Run python tests with coverage - nosetests --with-coverage + pytest .PHONY: scan scan: ./build/lambda.zip ## Run scan function locally diff --git a/clamav_test.py b/clamav_test.py index 0ffbbf96..277a4831 100644 --- a/clamav_test.py +++ b/clamav_test.py @@ -132,7 +132,7 @@ def test_md5_from_s3_tags_no_md5(self): md5_hash = md5_from_s3_tags( self.s3_client, self.s3_bucket_name, self.s3_key_name ) - self.assertEquals("", md5_hash) + self.assertEqual("", md5_hash) def test_md5_from_s3_tags_has_md5(self): expected_md5_hash = "d41d8cd98f00b204e9800998ecf8427e" @@ -153,7 +153,7 @@ def test_md5_from_s3_tags_has_md5(self): md5_hash = md5_from_s3_tags( self.s3_client, self.s3_bucket_name, self.s3_key_name ) - self.assertEquals(expected_md5_hash, md5_hash) + self.assertEqual(expected_md5_hash, md5_hash) def test_time_from_s3(self): @@ -172,7 +172,7 @@ def test_time_from_s3(self): s3_time = time_from_s3( self.s3_client, self.s3_bucket_name, self.s3_key_name ) - self.assertEquals(expected_s3_time, s3_time) + self.assertEqual(expected_s3_time, s3_time) @mock.patch("clamav.md5_from_file") @mock.patch("common.os.path.exists") @@ -234,7 +234,7 @@ def test_update_defs_from_s3(self, mock_exists, mock_md5_from_file): to_download = update_defs_from_s3( self.s3_client, self.s3_bucket_name, AV_DEFINITION_S3_PREFIX ) - self.assertEquals(expected_to_download, to_download) + self.assertEqual(expected_to_download, to_download) @mock.patch("clamav.md5_from_file") @mock.patch("common.os.path.exists") @@ -283,7 +283,7 @@ def test_update_defs_from_s3_same_hash(self, mock_exists, mock_md5_from_file): to_download = update_defs_from_s3( self.s3_client, self.s3_bucket_name, AV_DEFINITION_S3_PREFIX ) - self.assertEquals(expected_to_download, to_download) + self.assertEqual(expected_to_download, to_download) @mock.patch("clamav.md5_from_file") @mock.patch("common.os.path.exists") @@ -349,4 +349,4 @@ def test_update_defs_from_s3_old_files(self, mock_exists, mock_md5_from_file): to_download = update_defs_from_s3( self.s3_client, self.s3_bucket_name, AV_DEFINITION_S3_PREFIX ) - self.assertEquals(expected_to_download, to_download) + self.assertEqual(expected_to_download, to_download) diff --git a/requirements-dev.txt b/requirements-dev.txt index b6f1ae9a..24cd24e8 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -3,5 +3,6 @@ boto3 # Test requirements coverage -mock==3.0.5 -nose +mock==5.1.0 +pytest +pytest-cov diff --git a/requirements.txt b/requirements.txt index 01c63d97..1bae7fd9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,9 +1,9 @@ -certifi==2018.11.29 -chardet==3.0.4 -datadog==0.26.0 -decorator==4.3 -idna==2.8 -requests==2.21 -simplejson==3.16 -urllib3==1.24.2 -pytz==2019.3 +certifi==2023.11.17 +chardet==5.2.0 +datadog==0.47.0 +decorator==5.1.1 +idna==3.6 +requests==2.31.0 +simplejson==3.19.2 +urllib3==2.1.0 +pytz==2023.3.post1 diff --git a/scan_bucket_test.py b/scan_bucket_test.py index d191e87e..28efb634 100644 --- a/scan_bucket_test.py +++ b/scan_bucket_test.py @@ -128,4 +128,4 @@ def test_format_s3_event(self): } ] } - self.assertEquals(s3_event, expected_s3_event) + self.assertEqual(s3_event, expected_s3_event) diff --git a/scan_test.py b/scan_test.py index 0a18132a..d24e25e8 100644 --- a/scan_test.py +++ b/scan_test.py @@ -64,7 +64,7 @@ def test_sns_event_object(self): sns_event = {"Records": [{"Sns": {"Message": json.dumps(event)}}]} s3_obj = event_object(sns_event, event_source="sns") expected_s3_object = self.s3.Object(self.s3_bucket_name, self.s3_key_name) - self.assertEquals(s3_obj, expected_s3_object) + self.assertEqual(s3_obj, expected_s3_object) def test_s3_event_object(self): event = { @@ -79,25 +79,25 @@ def test_s3_event_object(self): } s3_obj = event_object(event) expected_s3_object = self.s3.Object(self.s3_bucket_name, self.s3_key_name) - self.assertEquals(s3_obj, expected_s3_object) + self.assertEqual(s3_obj, expected_s3_object) def test_s3_event_object_missing_bucket(self): event = {"Records": [{"s3": {"object": {"key": self.s3_key_name}}}]} with self.assertRaises(Exception) as cm: event_object(event) - self.assertEquals(cm.exception.message, "No bucket found in event!") + self.assertEqual(cm.exception.message, "No bucket found in event!") def test_s3_event_object_missing_key(self): event = {"Records": [{"s3": {"bucket": {"name": self.s3_bucket_name}}}]} with self.assertRaises(Exception) as cm: event_object(event) - self.assertEquals(cm.exception.message, "No key found in event!") + self.assertEqual(cm.exception.message, "No key found in event!") def test_s3_event_object_bucket_key_missing(self): event = {"Records": [{"s3": {"bucket": {}, "object": {}}}]} with self.assertRaises(Exception) as cm: event_object(event) - self.assertEquals( + self.assertEqual( cm.exception.message, "Unable to retrieve object from event.\n{}".format(event), ) @@ -106,7 +106,7 @@ def test_s3_event_object_no_records(self): event = {"Records": []} with self.assertRaises(Exception) as cm: event_object(event) - self.assertEquals(cm.exception.message, "No records found in event!") + self.assertEqual(cm.exception.message, "No records found in event!") def test_verify_s3_object_version(self): s3_obj = self.s3.Object(self.s3_bucket_name, self.s3_key_name) @@ -165,7 +165,7 @@ def test_verify_s3_object_versioning_not_enabled(self): with self.assertRaises(Exception) as cm: with s3_stubber_resource: verify_s3_object_version(self.s3, s3_obj) - self.assertEquals( + self.assertEqual( cm.exception.message, "Object versioning is not enabled in bucket {}".format( self.s3_bucket_name @@ -220,7 +220,7 @@ def test_verify_s3_object_version_multiple_versions(self): with self.assertRaises(Exception) as cm: with s3_stubber_resource: verify_s3_object_version(self.s3, s3_obj) - self.assertEquals( + self.assertEqual( cm.exception.message, "Detected multiple object versions in {}.{}, aborting processing".format( self.s3_bucket_name, self.s3_key_name @@ -267,7 +267,7 @@ def test_get_local_path(self): s3_obj = self.s3.Object(self.s3_bucket_name, self.s3_key_name) file_path = get_local_path(s3_obj, local_prefix) expected_file_path = "/tmp/test_bucket/test_key" - self.assertEquals(file_path, expected_file_path) + self.assertEqual(file_path, expected_file_path) def test_set_av_metadata(self): scan_result = "CLEAN" @@ -424,7 +424,7 @@ def test_delete_s3_object_exception(self): with s3_stubber: s3_obj = self.s3.Object(self.s3_bucket_name, self.s3_key_name) delete_s3_object(s3_obj) - self.assertEquals( + self.assertEqual( cm.exception.message, "Failed to delete infected file: {}.{}".format( self.s3_bucket_name, self.s3_key_name From b19204349071eb7a43ed55b3290f3694a3aeb259 Mon Sep 17 00:00:00 2001 From: Taro Matsuzawa Date: Wed, 10 Jan 2024 14:36:18 +0900 Subject: [PATCH 38/48] #2 downgrade urllib3 to supports python3.7 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 1bae7fd9..40464b12 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,5 +5,5 @@ decorator==5.1.1 idna==3.6 requests==2.31.0 simplejson==3.19.2 -urllib3==2.1.0 +urllib3==1.26.18 pytz==2023.3.post1 From 1f4ad2898c72f5ef21e6b67e9d293a9a0e2691ef Mon Sep 17 00:00:00 2001 From: Taro Matsuzawa Date: Wed, 10 Jan 2024 15:53:07 +0900 Subject: [PATCH 39/48] #2 fix: find command raise error --- Makefile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 10c50731..af85fe04 100644 --- a/Makefile +++ b/Makefile @@ -29,7 +29,8 @@ clean: ## Clean build artifacts rm -rf build/ rm -rf tmp/ rm -f .coverage - find ./ -type d -name '__pycache__' -delete + rm -rf .pytest_cache/ + find ./ -type d -name '__pycache__' -exec rm -r {} \; find ./ -type f -name '*.pyc' -delete .PHONY: archive From 473bce568a37a70ff94934aa5f1d7956542fcce0 Mon Sep 17 00:00:00 2001 From: Taro Matsuzawa Date: Wed, 17 Jan 2024 14:39:25 +0900 Subject: [PATCH 40/48] fixes missing 'ld' --- clamav.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/clamav.py b/clamav.py index a44ab3a2..6575e367 100644 --- a/clamav.py +++ b/clamav.py @@ -112,7 +112,7 @@ def update_defs_from_freshclam(path, library_path=""): fc_env = os.environ.copy() if library_path: fc_env["LD_LIBRARY_PATH"] = "%s:%s" % ( - ":".join(current_library_search_path()), + fc_env["LD_LIBRARY_PATH"], CLAMAVLIB_PATH, ) print("Starting freshclam with defs in %s." % path) From cdbed44579fb0588e22d2374f7b5a917d9826490 Mon Sep 17 00:00:00 2001 From: Taro Matsuzawa Date: Fri, 19 Jan 2024 10:04:56 +0900 Subject: [PATCH 41/48] target: python 3.12, add localstack for testing --- .gitignore | 3 ++ Dockerfile | 42 ++++++++++++++++++---------- README.md | 65 +++++++++++++++++++++++++++++++++----------- common.py | 2 +- docker-compose.yml | 13 +++++++++ requirements-dev.txt | 3 ++ requirements.txt | 2 +- scan.py | 15 +++++++++- 8 files changed, 112 insertions(+), 33 deletions(-) create mode 100644 docker-compose.yml diff --git a/.gitignore b/.gitignore index da9084e7..117a509a 100644 --- a/.gitignore +++ b/.gitignore @@ -117,3 +117,6 @@ tmp/ # EICAR Files *eicar* + +# response.json +response.json \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 4d86948d..d0ba7c3b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,37 +1,51 @@ -FROM amazonlinux:2 +FROM amazonlinux:2023 as builder + +# Set up working directories +RUN mkdir -p /opt/python + +# Install packages +RUN dnf update -y +RUN dnf install -y gcc openssl-devel bzip2-devel libffi-devel zlib-devel wget make tar xz + +# Download and install Python 3.12 +WORKDIR /opt +RUN wget https://www.python.org/ftp/python/3.12.1/Python-3.12.1.tar.xz +RUN tar xvf Python-3.12.1.tar.xz +WORKDIR /opt/Python-3.12.1 +RUN ./configure --enable-optimizations --prefix=/opt/python +RUN make -j +RUN make install + +FROM amazonlinux:2023 # Set up working directories RUN mkdir -p /opt/app RUN mkdir -p /opt/app/build RUN mkdir -p /opt/app/bin/ +# Copy over the python binaries +COPY --from=builder /opt/python /opt/python + # Copy in the lambda source WORKDIR /opt/app COPY ./*.py /opt/app/ COPY requirements.txt /opt/app/requirements.txt # Install packages -RUN yum update -y -RUN yum install -y cpio python3-pip yum-utils zip unzip less -RUN yum install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm +RUN dnf update -y +RUN dnf install -y cpio openssl bzip2 libffi yum-utils zip unzip less # This had --no-cache-dir, tracing through multiple tickets led to a problem in wheel -RUN pip3 install -r requirements.txt +RUN /opt/python/bin/pip3 install -r requirements.txt RUN rm -rf /root/.cache/pip # Download libraries we need to run in lambda WORKDIR /tmp -RUN yumdownloader -x \*i686 --archlist=x86_64 clamav clamav-lib clamav-update json-c pcre2 libprelude gnutls libtasn1 lib64nettle nettle +RUN yumdownloader -x \*i686 --archlist=x86_64 clamav clamav-lib clamav-update libtool-ltdl RUN rpm2cpio clamav-0*.rpm | cpio -idmv RUN rpm2cpio clamav-lib*.rpm | cpio -idmv RUN rpm2cpio clamav-update*.rpm | cpio -idmv -RUN rpm2cpio json-c*.rpm | cpio -idmv -RUN rpm2cpio pcre*.rpm | cpio -idmv -RUN rpm2cpio gnutls* | cpio -idmv -RUN rpm2cpio nettle* | cpio -idmv -RUN rpm2cpio lib* | cpio -idmv -RUN rpm2cpio *.rpm | cpio -idmv -RUN rpm2cpio libtasn1* | cpio -idmv +RUN rpm2cpio libtool-ltdl* | cpio -idmv # Copy over the binaries and libraries RUN cp /tmp/usr/bin/clamscan /tmp/usr/bin/freshclam /tmp/usr/lib64/* /opt/app/bin/ @@ -44,7 +58,7 @@ RUN echo "CompressLocalDatabase yes" >> /opt/app/bin/freshclam.conf WORKDIR /opt/app RUN zip -r9 --exclude="*test*" /opt/app/build/lambda.zip *.py bin -WORKDIR /usr/local/lib/python3.7/site-packages +WORKDIR /opt/python/lib/python3.12/site-packages RUN zip -r9 /opt/app/build/lambda.zip * WORKDIR /opt/app diff --git a/README.md b/README.md index 6d911919..81130064 100644 --- a/README.md +++ b/README.md @@ -198,30 +198,63 @@ pip install -r requirements-dev.txt make test ``` -### Local lambdas +## Testing with Localstack -You can run the lambdas locally to test out what they are doing without deploying to AWS. This is accomplished -by using docker containers that act similarly to lambda. You will need to have set up some local variables in your -`.envrc.local` file and modify them appropriately first before running `direnv allow`. If you do not have `direnv` -it can be installed with `brew install direnv`. +You can test the lambda functions locally using [localstack](https://www.localstack.cloud/). This will run the lambda functions in docker containers. -For the Scan lambda you will need a test file uploaded to S3 and the variables `TEST_BUCKET` and `TEST_KEY` -set in your `.envrc.local` file. Then you can run: +To get started you will need to install [Docker](https://docs.docker.com/install/) and [Docker Compose](https://docs.docker.com/compose/install/). + +Then you can run: ```sh -direnv allow -make archive scan +make archive +docker compose up localstack -d # start localstack +aws s3 mb s3://antivirus-definitions --profile localstack # bucket name must match AV_DEFINITION_S3_BUCKET +aws s3 mb s3://test-bucket --profile localstack # bucket name must match TEST_BUCKET +wget https://secure.eicar.org/eicar_com.zip +aws s3 cp eicar_com.zip s3://test-bucket/eicar_com.zip --profile localstack +aws --endpoint-url=http://localhost:4566 lambda create-function \ + --function-name update-clamav \ + --runtime python3.12 \ + --handler update.lambda_handler \ + --role arn:aws:iam::123456789012:role/lambda-role \ + --zip-file fileb://./build/lambda.zip \ + --timeout 120 \ + --profile localstack \ + --environment "Variables={AV_DEFINITION_S3_BUCKET=antivirus-definitions}" +aws --endpoint-url=http://localhost:4566 lambda invoke \ + --function-name update-clamav --profile localstack \ + --invocation-type RequestResponse \ + --log-type Tail \ + --payload '{}' \ + response.json \ + --query 'LogResult' | tr -d '"' | base64 -d +aws --endpoint-url=http://localhost:4566 lambda create-function \ + --function-name scan-clamav \ + --runtime python3.12 \ + --handler scan.lambda_handler \ + --role arn:aws:iam::123456789012:role/lambda-role \ + --zip-file fileb://./build/lambda.zip \ + --timeout 120 \ + --profile localstack \ + --environment "Variables={AV_DEFINITION_S3_BUCKET=antivirus-definitions,AV_DELETE_INFECTED_FILES=True}" +aws --endpoint-url=http://localhost:4566 lambda invoke \ + --function-name scan-clamav --profile localstack \ + --invocation-type RequestResponse \ + --log-type Tail \ + --payload '{"Records": [{"s3": {"bucket": {"name": "test-bucket"}, "object": {"key": "eicar_com.zip"}}}]}' \ + response.json \ + --query 'LogResult' | tr -d '"' | base64 -d +aws s3 ls s3://test-bucket --profile localstack # should be empty ``` -If you want a file that will be recognized as a virus you can download a test file from the [EICAR](https://www.eicar.org/?page_id=3950) -website and uploaded to your bucket. +Note1: The `--profile localstack` is only needed if you have a profile named `localstack` in your `~/.aws/config` and `~/.aws/credentials` file. See [localstack docs](https://docs.localstack.cloud/user-guide/integrations/aws-cli/#aws-cli) for more info. -For the Update lambda you can run: +Note2: The `--endpoint-url` is only needed if you are not running localstack on the default port of `4566`. -```sh -direnv allow -make archive update -``` +Note3: The `--query 'LogResult' | tr -d '"' | base64 -d` is only needed if you want to see the logs from the lambda function. + +Note4: localstack will drop all file when it is stopped. If you want to keep the files you will need to copy them to a real s3 bucket. ## License diff --git a/common.py b/common.py index bb953fca..eeacb156 100644 --- a/common.py +++ b/common.py @@ -59,4 +59,4 @@ def create_dir(path): def get_timestamp(): - return datetime.datetime.utcnow().strftime("%Y/%m/%d %H:%M:%S UTC") + return datetime.datetime.now(datetime.UTC).strftime("%Y/%m/%d %H:%M:%S UTC") diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 00000000..d1ec2b70 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,13 @@ +version: "3.8" + +services: + localstack: + container_name: localstack + image: localstack/localstack + ports: + - "127.0.0.1:4566:4566" + - "127.0.0.1:4510-4559:4510-4559" + environment: + - DOCKER_HOST=unix:///var/run/docker.sock + volumes: + - "/var/run/docker.sock:/var/run/docker.sock" \ No newline at end of file diff --git a/requirements-dev.txt b/requirements-dev.txt index 24cd24e8..82c4c5aa 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,6 +1,9 @@ # boto3 available by default in AWS but not locally boto3 +# awscli available by default in AWS but not locally +awscli + # Test requirements coverage mock==5.1.0 diff --git a/requirements.txt b/requirements.txt index 40464b12..345c2e8b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,5 +5,5 @@ decorator==5.1.1 idna==3.6 requests==2.31.0 simplejson==3.19.2 -urllib3==1.26.18 +urllib3==2.0.7 pytz==2023.3.post1 diff --git a/scan.py b/scan.py index 48545a06..fadb8c49 100644 --- a/scan.py +++ b/scan.py @@ -17,7 +17,6 @@ import json import os from urllib.parse import unquote_plus -from distutils.util import strtobool import boto3 @@ -274,3 +273,17 @@ def lambda_handler(event, context): def str_to_bool(s): return bool(strtobool(str(s))) + +def strtobool(val): + """Convert a string representation of truth to true (1) or false (0). + True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values + are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if + 'val' is anything else. + """ + val = val.lower() + if val in ('y', 'yes', 't', 'true', 'on', '1'): + return 1 + elif val in ('n', 'no', 'f', 'false', 'off', '0'): + return 0 + else: + raise ValueError("invalid truth value %r" % (val,)) \ No newline at end of file From 4391036b117ddd1364ff204fe786c8c09be435b2 Mon Sep 17 00:00:00 2001 From: Taro Matsuzawa Date: Fri, 19 Jan 2024 10:26:15 +0900 Subject: [PATCH 42/48] remove coverage report directory from git --- .coveragerc | 3 +++ .gitignore | 5 ++++- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/.coveragerc b/.coveragerc index 023d3bb6..bd4102d1 100644 --- a/.coveragerc +++ b/.coveragerc @@ -6,3 +6,6 @@ omit = */python?.?/* */site-packages/nose/* show_missing = true + +[html] +directory = coverage_html_report \ No newline at end of file diff --git a/.gitignore b/.gitignore index 117a509a..22505e42 100644 --- a/.gitignore +++ b/.gitignore @@ -119,4 +119,7 @@ tmp/ *eicar* # response.json -response.json \ No newline at end of file +response.json + +# coverage report +coverage_html_report/ \ No newline at end of file From 11118e2019e8576abd2fc3633b0f64803d209db5 Mon Sep 17 00:00:00 2001 From: Taro Matsuzawa Date: Fri, 19 Jan 2024 10:28:57 +0900 Subject: [PATCH 43/48] remove local script because can't run lambda by os module doesn't work --- Makefile | 16 +++--------- scripts/run-scan-lambda | 52 --------------------------------------- scripts/run-update-lambda | 29 ---------------------- 3 files changed, 4 insertions(+), 93 deletions(-) delete mode 100755 scripts/run-scan-lambda delete mode 100755 scripts/run-update-lambda diff --git a/Makefile b/Makefile index af85fe04..6e0d0997 100644 --- a/Makefile +++ b/Makefile @@ -30,7 +30,7 @@ clean: ## Clean build artifacts rm -rf tmp/ rm -f .coverage rm -rf .pytest_cache/ - find ./ -type d -name '__pycache__' -exec rm -r {} \; + find ./ -type d -name '__pycache__' | xargs rm -rf find ./ -type f -name '*.pyc' -delete .PHONY: archive @@ -41,8 +41,8 @@ archive: clean ## Create the archive for AWS lambda .PHONY: pre_commit_install ## Ensure that pre-commit hook is installed and kept up to date pre_commit_install: .git/hooks/pre-commit ## Ensure pre-commit is installed -.git/hooks/pre-commit: /usr/local/bin/pre-commit - pip install pre-commit==2.12.1 +.git/hooks/pre-commit: venv/bin/pre-commit + pip install pre-commit pre-commit install pre-commit install-hooks @@ -56,12 +56,4 @@ test: clean ## Run python tests .PHONY: coverage coverage: clean ## Run python tests with coverage - pytest - -.PHONY: scan -scan: ./build/lambda.zip ## Run scan function locally - scripts/run-scan-lambda $(TEST_BUCKET) $(TEST_KEY) - -.PHONY: update -update: ./build/lambda.zip ## Run update function locally - scripts/run-update-lambda \ No newline at end of file + pytest --cov=. --cov-report html diff --git a/scripts/run-scan-lambda b/scripts/run-scan-lambda deleted file mode 100755 index c70e1e41..00000000 --- a/scripts/run-scan-lambda +++ /dev/null @@ -1,52 +0,0 @@ -#! /usr/bin/env bash - -set -eu -o pipefail - -# -# Run the scan.lambda_handler locally in a docker container -# - -if [ $# -lt 2 ]; then - echo 1>&2 "$0: not enough arguments. Please provide BUCKET and KEY" - exit 1 -fi - -BUCKET=$1 -KEY=$2 -EVENT="{\"Records\": [{\"s3\": {\"bucket\": {\"name\": \"${BUCKET}\"}, \"object\": {\"key\": \"${KEY}\"}}}]}" -echo "Sending S3 event: ${EVENT}" - -# Verify that the file exists first -aws s3 ls "s3://${BUCKET}/${KEY}" - -rm -rf tmp/ -unzip -qq -d ./tmp build/lambda.zip - -NAME="antivirus-scan" - -docker run --rm \ - -v "$(pwd)/tmp/:/var/task" \ - -e AV_DEFINITION_S3_BUCKET \ - -e AV_DEFINITION_S3_PREFIX \ - -e AV_DELETE_INFECTED_FILES \ - -e AV_PROCESS_ORIGINAL_VERSION_ONLY \ - -e AV_SCAN_START_METADATA \ - -e AV_SCAN_START_SNS_ARN \ - -e AV_SIGNATURE_METADATA \ - -e AV_STATUS_CLEAN \ - -e AV_STATUS_INFECTED \ - -e AV_STATUS_METADATA \ - -e AV_STATUS_SNS_ARN \ - -e AV_STATUS_SNS_PUBLISH_CLEAN \ - -e AV_STATUS_SNS_PUBLISH_INFECTED \ - -e AV_TIMESTAMP_METADATA \ - -e AWS_ACCESS_KEY_ID \ - -e AWS_DEFAULT_REGION \ - -e AWS_REGION \ - -e AWS_SECRET_ACCESS_KEY \ - -e AWS_SESSION_TOKEN \ - --memory="${MEM}" \ - --memory-swap="${MEM}" \ - --cpus="${CPUS}" \ - --name="${NAME}" \ - lambci/lambda:python3.7 scan.lambda_handler "${EVENT}" diff --git a/scripts/run-update-lambda b/scripts/run-update-lambda deleted file mode 100755 index 3d24defa..00000000 --- a/scripts/run-update-lambda +++ /dev/null @@ -1,29 +0,0 @@ -#! /usr/bin/env bash - -set -eu -o pipefail - -# -# Run the update.lambda_handler locally in a docker container -# - -rm -rf tmp/ -unzip -qq -d ./tmp build/lambda.zip - -NAME="antivirus-update" - -docker run --rm \ - -v "$(pwd)/tmp/:/var/task" \ - -e AV_DEFINITION_PATH \ - -e AV_DEFINITION_S3_BUCKET \ - -e AV_DEFINITION_S3_PREFIX \ - -e AWS_ACCESS_KEY_ID \ - -e AWS_DEFAULT_REGION \ - -e AWS_REGION \ - -e AWS_SECRET_ACCESS_KEY \ - -e AWS_SESSION_TOKEN \ - -e CLAMAVLIB_PATH \ - --memory="${MEM}" \ - --memory-swap="${MEM}" \ - --cpus="${CPUS}" \ - --name="${NAME}" \ - lambci/lambda:python3.7 update.lambda_handler From 1e201e152aea3c2cc17aebc4ff816e83ac36eda9 Mon Sep 17 00:00:00 2001 From: Taro Matsuzawa Date: Fri, 19 Jan 2024 11:50:49 +0900 Subject: [PATCH 44/48] enable to pre-commit with venv --- .pre-commit-config.yaml | 22 +++++++++++++--------- Makefile | 2 +- clamav_test.py | 1 - deploy/cloudformation.yaml | 1 - display_infected.py | 4 +--- display_infected_test.py | 2 -- docker-compose.yml | 2 +- scan.py | 8 ++++---- scan_bucket.py | 2 -- scan_bucket_test.py | 3 --- 10 files changed, 20 insertions(+), 27 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a881ee5d..13d6cf07 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,17 +1,17 @@ repos: - repo: https://github.com/ambv/black - rev: 19.3b0 + rev: 23.12.1 hooks: - id: black - language_version: python3.7 + language_version: python3.12 exclude: > (?x)^( scripts/gen-docs-index| )$ - - repo: git://github.com/pre-commit/pre-commit-hooks - rev: v2.2.3 + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 hooks: - id: check-ast - id: check-json @@ -21,17 +21,21 @@ repos: - id: debug-statements - id: detect-private-key - id: fix-encoding-pragma - - id: flake8 - id: trailing-whitespace - - repo: git://github.com/igorshubovych/markdownlint-cli - rev: v0.17.0 + - repo: https://github.com/pycqa/flake8 + rev: 7.0.0 + hooks: + - id: flake8 + + - repo: https://github.com/igorshubovych/markdownlint-cli + rev: v0.38.0 hooks: - id: markdownlint entry: markdownlint --ignore .github/*.md - - repo: git://github.com/aws-cloudformation/cfn-python-lint - rev: v0.49.0 + - repo: https://github.com/aws-cloudformation/cfn-python-lint + rev: v0.84.0 hooks: - id: cfn-python-lint files: deploy/ \ No newline at end of file diff --git a/Makefile b/Makefile index 6e0d0997..cb69f6d8 100644 --- a/Makefile +++ b/Makefile @@ -41,7 +41,7 @@ archive: clean ## Create the archive for AWS lambda .PHONY: pre_commit_install ## Ensure that pre-commit hook is installed and kept up to date pre_commit_install: .git/hooks/pre-commit ## Ensure pre-commit is installed -.git/hooks/pre-commit: venv/bin/pre-commit +.git/hooks/pre-commit: venv ## Ensure venv is created first pip install pre-commit pre-commit install pre-commit install-hooks diff --git a/clamav_test.py b/clamav_test.py index 277a4831..ee8027cb 100644 --- a/clamav_test.py +++ b/clamav_test.py @@ -156,7 +156,6 @@ def test_md5_from_s3_tags_has_md5(self): self.assertEqual(expected_md5_hash, md5_hash) def test_time_from_s3(self): - expected_s3_time = datetime.datetime(2019, 1, 1) s3_stubber = Stubber(self.s3_client) diff --git a/deploy/cloudformation.yaml b/deploy/cloudformation.yaml index 91fe1457..a29e33ae 100644 --- a/deploy/cloudformation.yaml +++ b/deploy/cloudformation.yaml @@ -43,7 +43,6 @@ ServerSideEncryptionConfiguration: - ServerSideEncryptionByDefault: SSEAlgorithm: AES256 - AccessControl: BucketOwnerFullControl PublicAccessBlockConfiguration: BlockPublicAcls: !If [ publicBucket, false, true ] BlockPublicPolicy: !If [ publicBucket, false, true ] diff --git a/display_infected.py b/display_infected.py index b80e1347..dfb07077 100755 --- a/display_infected.py +++ b/display_infected.py @@ -30,7 +30,6 @@ # Get all objects in an S3 bucket that are infected def get_objects_and_sigs(s3_client, s3_bucket_name): - s3_object_list = [] s3_list_objects_result = {"IsTruncated": True} @@ -76,7 +75,6 @@ def object_infected(s3_client, s3_bucket_name, key_name): def main(s3_bucket_name): - # Verify the S3 bucket exists s3_client = boto3.client("s3", endpoint_url=S3_ENDPOINT) try: @@ -87,7 +85,7 @@ def main(s3_bucket_name): # Scan the objects in the bucket s3_object_and_sigs_list = get_objects_and_sigs(s3_client, s3_bucket_name) - for (key_name, av_signature) in s3_object_and_sigs_list: + for key_name, av_signature in s3_object_and_sigs_list: print("Infected: {}/{}, {}".format(s3_bucket_name, key_name, av_signature)) diff --git a/display_infected_test.py b/display_infected_test.py index 86328c14..cd99ca1b 100644 --- a/display_infected_test.py +++ b/display_infected_test.py @@ -129,7 +129,6 @@ def test_get_objects_and_sigs_infected_with_sig_ok(self): self.assertEqual(s3_object_list, expected_object_list) def test_get_objects_and_sigs_clean(self): - get_object_tagging_response = { "VersionId": "abc123", "TagSet": [{"Key": AV_STATUS_METADATA, "Value": AV_STATUS_CLEAN}], @@ -150,7 +149,6 @@ def test_get_objects_and_sigs_clean(self): self.assertEqual(s3_object_list, expected_object_list) def test_get_objects_and_sigs_unscanned(self): - get_object_tagging_response = {"VersionId": "abc123", "TagSet": []} get_object_tagging_expected_params = { "Bucket": self.s3_bucket_name, diff --git a/docker-compose.yml b/docker-compose.yml index d1ec2b70..4f98214a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -5,7 +5,7 @@ services: container_name: localstack image: localstack/localstack ports: - - "127.0.0.1:4566:4566" + - "127.0.0.1:4566:4566" - "127.0.0.1:4510-4559:4510-4559" environment: - DOCKER_HOST=unix:///var/run/docker.sock diff --git a/scan.py b/scan.py index fadb8c49..cb9ba3a8 100644 --- a/scan.py +++ b/scan.py @@ -43,7 +43,6 @@ def event_object(event, event_source="s3"): - # SNS events are slightly different if event_source.upper() == "SNS": event = json.loads(event["Records"][0]["Sns"]["Message"]) @@ -274,6 +273,7 @@ def lambda_handler(event, context): def str_to_bool(s): return bool(strtobool(str(s))) + def strtobool(val): """Convert a string representation of truth to true (1) or false (0). True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values @@ -281,9 +281,9 @@ def strtobool(val): 'val' is anything else. """ val = val.lower() - if val in ('y', 'yes', 't', 'true', 'on', '1'): + if val in ("y", "yes", "t", "true", "on", "1"): return 1 - elif val in ('n', 'no', 'f', 'false', 'off', '0'): + elif val in ("n", "no", "f", "false", "off", "0"): return 0 else: - raise ValueError("invalid truth value %r" % (val,)) \ No newline at end of file + raise ValueError("invalid truth value %r" % (val,)) diff --git a/scan_bucket.py b/scan_bucket.py index 7c6367fb..d0cb7632 100755 --- a/scan_bucket.py +++ b/scan_bucket.py @@ -28,7 +28,6 @@ # Get all objects in an S3 bucket that have not been previously scanned def get_objects(s3_client, s3_bucket_name): - s3_object_list = [] s3_list_objects_result = {"IsTruncated": True} @@ -63,7 +62,6 @@ def object_previously_scanned(s3_client, s3_bucket_name, key_name): # Scan an S3 object for viruses by invoking the lambda function # Skip any objects that have already been scanned def scan_object(lambda_client, lambda_function_name, s3_bucket_name, key_name): - print("Scanning: {}/{}".format(s3_bucket_name, key_name)) s3_event = format_s3_event(s3_bucket_name, key_name) lambda_invoke_result = lambda_client.invoke( diff --git a/scan_bucket_test.py b/scan_bucket_test.py index 28efb634..33b3e85b 100644 --- a/scan_bucket_test.py +++ b/scan_bucket_test.py @@ -56,7 +56,6 @@ def setUp(self): ) def test_get_objects_previously_scanned_status(self): - get_object_tagging_response = { "VersionId": "abc123", "TagSet": [{"Key": AV_STATUS_METADATA, "Value": AV_STATUS_INFECTED}], @@ -77,7 +76,6 @@ def test_get_objects_previously_scanned_status(self): self.assertEqual(s3_object_list, expected_object_list) def test_get_objects_previously_scanned_timestamp(self): - get_object_tagging_response = { "VersionId": "abc123", "TagSet": [{"Key": AV_TIMESTAMP_METADATA, "Value": get_timestamp()}], @@ -98,7 +96,6 @@ def test_get_objects_previously_scanned_timestamp(self): self.assertEqual(s3_object_list, expected_object_list) def test_get_objects_unscanned(self): - get_object_tagging_response = {"VersionId": "abc123", "TagSet": []} get_object_tagging_expected_params = { "Bucket": self.s3_bucket_name, From 3ade62658f389e94f8146beeaace5bd8af28a7b8 Mon Sep 17 00:00:00 2001 From: Taro Matsuzawa Date: Fri, 19 Jan 2024 14:21:16 +0900 Subject: [PATCH 45/48] add localstack test --- .github/workflows/ci.yaml | 61 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 61 insertions(+) create mode 100644 .github/workflows/ci.yaml diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 00000000..706d32db --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,61 @@ +name: CI + +on: [push, pull_request] + +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Install dependencies + run: sudo apt update && sudo apt install -y make wget + - name: build docker image + run: make archive + - name: Start LocalStack + uses: LocalStack/setup-localstack@main + with: + image-tag: 'latest' + install-awslocal: 'true' + - name: Run tests against localstack + run: | + awslocal s3 mb s3://antivirus-definitions + awslocal s3 mb s3://bucket-test + wget https://secure.eicar.org/eicar_com.zip + awslocal s3 cp eicar_com.zip s3://bucket-test/eicar_com.zip + awslocal lambda create-function \ + --function-name update-clamav \ + --runtime python3.12 \ + --handler update.lambda_handler \ + --role arn:aws:iam::123456789012:role/lambda-role \ + --zip-file fileb://./build/lambda.zip \ + --timeout 120 \ + --environment "Variables={AV_DEFINITION_S3_BUCKET=antivirus-definitions}" + awslocal lambda invoke \ + --function-name update-clamav \ + --invocation-type RequestResponse \ + --log-type Tail \ + --payload '{}' \ + response.json \ + --query 'LogResult' | tr -d '"' | base64 -d + awslocal lambda create-function \ + --function-name scan-clamav \ + --runtime python3.12 \ + --handler scan.lambda_handler \ + --role arn:aws:iam::123456789012:role/lambda-role \ + --zip-file fileb://./build/lambda.zip \ + --timeout 120 \ + --environment "Variables={AV_DEFINITION_S3_BUCKET=antivirus-definitions,AV_DELETE_INFECTED_FILES=True}" + awslocal lambda invoke \ + --function-name scan-clamav \ + --invocation-type RequestResponse \ + --log-type Tail \ + --payload '{"Records": [{"s3": {"bucket": {"name": "test-bucket"}, "object": {"key": "eicar_com.zip"}}}]}' \ + response.json \ + --query 'LogResult' | tr -d '"' | base64 -d + result=$(awslocal s3 ls s3://test-bucket) + if [ -z "$result" ]; then + echo "Bucket is empty" + else + echo "Bucket is not empty" + exit 1 + fi From f9737987b6f3fddc8cfa1b117a309b65be1aafc4 Mon Sep 17 00:00:00 2001 From: Taro Matsuzawa Date: Fri, 19 Jan 2024 14:59:20 +0900 Subject: [PATCH 46/48] added workflow, add sleep in lambda --- .github/workflows/ci.yaml | 68 ++++++++----------------------- .github/workflows/lambda.yaml | 63 ++++++++++++++++++++++++++++ .github/workflows/pre-commit.yaml | 26 ++++++++++++ 3 files changed, 107 insertions(+), 50 deletions(-) create mode 100644 .github/workflows/lambda.yaml create mode 100644 .github/workflows/pre-commit.yaml diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 706d32db..3b0b8e86 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -7,55 +7,23 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - name: Install dependencies - run: sudo apt update && sudo apt install -y make wget - - name: build docker image - run: make archive - - name: Start LocalStack - uses: LocalStack/setup-localstack@main + - name: Setup Python + uses: actions/setup-python@v2 with: - image-tag: 'latest' - install-awslocal: 'true' - - name: Run tests against localstack + python-version: '3.12' + - name: Create virtualenv + run: | + python -m venv venv + source venv/bin/activate + - name: Install dependencies + run: | + pip install -r requirements.txt + pip install -r requirements-dev.txt + - name: Run pytest + run: | + pytest -v + - name: Run pre-commit run: | - awslocal s3 mb s3://antivirus-definitions - awslocal s3 mb s3://bucket-test - wget https://secure.eicar.org/eicar_com.zip - awslocal s3 cp eicar_com.zip s3://bucket-test/eicar_com.zip - awslocal lambda create-function \ - --function-name update-clamav \ - --runtime python3.12 \ - --handler update.lambda_handler \ - --role arn:aws:iam::123456789012:role/lambda-role \ - --zip-file fileb://./build/lambda.zip \ - --timeout 120 \ - --environment "Variables={AV_DEFINITION_S3_BUCKET=antivirus-definitions}" - awslocal lambda invoke \ - --function-name update-clamav \ - --invocation-type RequestResponse \ - --log-type Tail \ - --payload '{}' \ - response.json \ - --query 'LogResult' | tr -d '"' | base64 -d - awslocal lambda create-function \ - --function-name scan-clamav \ - --runtime python3.12 \ - --handler scan.lambda_handler \ - --role arn:aws:iam::123456789012:role/lambda-role \ - --zip-file fileb://./build/lambda.zip \ - --timeout 120 \ - --environment "Variables={AV_DEFINITION_S3_BUCKET=antivirus-definitions,AV_DELETE_INFECTED_FILES=True}" - awslocal lambda invoke \ - --function-name scan-clamav \ - --invocation-type RequestResponse \ - --log-type Tail \ - --payload '{"Records": [{"s3": {"bucket": {"name": "test-bucket"}, "object": {"key": "eicar_com.zip"}}}]}' \ - response.json \ - --query 'LogResult' | tr -d '"' | base64 -d - result=$(awslocal s3 ls s3://test-bucket) - if [ -z "$result" ]; then - echo "Bucket is empty" - else - echo "Bucket is not empty" - exit 1 - fi + pip install pre-commit + pre-commit install-hooks + pre-commit run --all-files \ No newline at end of file diff --git a/.github/workflows/lambda.yaml b/.github/workflows/lambda.yaml new file mode 100644 index 00000000..d3e7b6fe --- /dev/null +++ b/.github/workflows/lambda.yaml @@ -0,0 +1,63 @@ +name: Test with LocalStack + +on: [push, pull_request] + +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Install dependencies + run: sudo apt update && sudo apt install -y make wget + - name: build docker image + run: make archive + - name: Start LocalStack + uses: LocalStack/setup-localstack@main + with: + image-tag: 'latest' + install-awslocal: 'true' + - name: Run tests against localstack + run: | + awslocal s3 mb s3://antivirus-definitions + awslocal s3 mb s3://bucket-test + wget https://secure.eicar.org/eicar_com.zip + awslocal s3 cp eicar_com.zip s3://bucket-test/eicar_com.zip + awslocal lambda create-function \ + --function-name update-clamav \ + --runtime python3.12 \ + --handler update.lambda_handler \ + --role arn:aws:iam::123456789012:role/lambda-role \ + --zip-file fileb://./build/lambda.zip \ + --timeout 120 \ + --environment "Variables={AV_DEFINITION_S3_BUCKET=antivirus-definitions}" + sleep 30 + awslocal lambda invoke \ + --function-name update-clamav \ + --invocation-type RequestResponse \ + --log-type Tail \ + --payload '{}' \ + response.json \ + --query 'LogResult' | tr -d '"' | base64 -d + awslocal lambda create-function \ + --function-name scan-clamav \ + --runtime python3.12 \ + --handler scan.lambda_handler \ + --role arn:aws:iam::123456789012:role/lambda-role \ + --zip-file fileb://./build/lambda.zip \ + --timeout 120 \ + --environment "Variables={AV_DEFINITION_S3_BUCKET=antivirus-definitions,AV_DELETE_INFECTED_FILES=True}" + sleep 30 + awslocal lambda invoke \ + --function-name scan-clamav \ + --invocation-type RequestResponse \ + --log-type Tail \ + --payload '{"Records": [{"s3": {"bucket": {"name": "test-bucket"}, "object": {"key": "eicar_com.zip"}}}]}' \ + response.json \ + --query 'LogResult' | tr -d '"' | base64 -d + result=$(awslocal s3 ls s3://test-bucket) + if [ -z "$result" ]; then + echo "Bucket is empty" + else + echo "Bucket is not empty" + exit 1 + fi diff --git a/.github/workflows/pre-commit.yaml b/.github/workflows/pre-commit.yaml new file mode 100644 index 00000000..ddb599dd --- /dev/null +++ b/.github/workflows/pre-commit.yaml @@ -0,0 +1,26 @@ +name: pre-commit + +on: [push, pull_request] + +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: '3.12' + - name: Create virtualenv + run: | + python -m venv venv + source venv/bin/activate + - name: Install dependencies + run: | + pip install -r requirements.txt + pip install -r requirements-dev.txt + - name: Run pre-commit + run: | + pip install pre-commit + pre-commit install-hooks + pre-commit run --all-files \ No newline at end of file From 5b070f4f1e51d62e4fb3ea4d94ff30ced5c32b19 Mon Sep 17 00:00:00 2001 From: Taro Matsuzawa Date: Fri, 19 Jan 2024 15:13:03 +0900 Subject: [PATCH 47/48] fix typo, run on push only --- .github/workflows/ci.yaml | 2 +- .github/workflows/lambda.yaml | 6 +++--- .github/workflows/pre-commit.yaml | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 3b0b8e86..3619caab 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -1,6 +1,6 @@ name: CI -on: [push, pull_request] +on: [push] jobs: build: diff --git a/.github/workflows/lambda.yaml b/.github/workflows/lambda.yaml index d3e7b6fe..57acf3a2 100644 --- a/.github/workflows/lambda.yaml +++ b/.github/workflows/lambda.yaml @@ -1,6 +1,6 @@ name: Test with LocalStack -on: [push, pull_request] +on: [push] jobs: build: @@ -19,9 +19,9 @@ jobs: - name: Run tests against localstack run: | awslocal s3 mb s3://antivirus-definitions - awslocal s3 mb s3://bucket-test + awslocal s3 mb s3://test-bucket wget https://secure.eicar.org/eicar_com.zip - awslocal s3 cp eicar_com.zip s3://bucket-test/eicar_com.zip + awslocal s3 cp eicar_com.zip s3://test-bucket/eicar_com.zip awslocal lambda create-function \ --function-name update-clamav \ --runtime python3.12 \ diff --git a/.github/workflows/pre-commit.yaml b/.github/workflows/pre-commit.yaml index ddb599dd..cd32dcbf 100644 --- a/.github/workflows/pre-commit.yaml +++ b/.github/workflows/pre-commit.yaml @@ -1,6 +1,6 @@ name: pre-commit -on: [push, pull_request] +on: [push] jobs: build: From adeba9dc680bcd9cf25283c5bc615a0b5d971c34 Mon Sep 17 00:00:00 2001 From: Taro Matsuzawa Date: Fri, 19 Jan 2024 15:41:15 +0900 Subject: [PATCH 48/48] clean up --- .circleci/config.yml | 96 -------------------------------------------- .github/CODEOWNERS | 1 - README.md | 4 +- 3 files changed, 2 insertions(+), 99 deletions(-) delete mode 100644 .circleci/config.yml delete mode 100644 .github/CODEOWNERS diff --git a/.circleci/config.yml b/.circleci/config.yml deleted file mode 100644 index 85f04ad2..00000000 --- a/.circleci/config.yml +++ /dev/null @@ -1,96 +0,0 @@ -############ -# -# Caches -# -# Caches may require a prefix, since caches in CircleCI 2.0 are immutable. -# A prefix provides an easy way to invalidate a cache. See https://circleci.com/docs/2.0/caching/#clearing-cache -# -############ - -version: "2.1" - -executors: - primary: - docker: - - image: docker:17.05.0-ce - python_test: - docker: - - image: circleci/python:3.7-stretch - pre_commit_test: - docker: - - image: circleci/python:3.7-stretch - -jobs: - - # `build` is used for building the archive - build: - executor: primary - working_directory: ~/src - steps: - - setup_remote_docker: - reusable: true # default - false - exclusive: true # default - true - - run: - name: Dependencies - command: apk add --no-cache make git curl openssh - - checkout - - run: - name: Build - command: make archive - - # `pre_commit_deps` is used for cache pre-commit sources - pre_commit_deps: - executor: pre_commit_test - steps: - - checkout - - - restore_cache: - keys: - - pre-commit-dot-cache-{{ checksum ".pre-commit-config.yaml" }} - - run: sudo pip install pre-commit==2.12.1 - - run: pre-commit install-hooks - - - save_cache: - key: pre-commit-dot-cache-{{ checksum ".pre-commit-config.yaml" }} - paths: - - ~/.cache/pre-commit - - # `pre_commit_test` is used to run pre-commit hooks on all files - pre_commit_test: - executor: pre_commit_test - steps: - - checkout - - restore_cache: - keys: - - pre-commit-dot-cache-{{ checksum ".pre-commit-config.yaml" }} - - run: sudo pip install pre-commit==1.18.3 - - run: - name: Run pre-commit tests - command: pre-commit run --all-files - - # `test` is used to run python tests - test: - executor: python_test - steps: - - checkout - - restore_cache: - keys: - - pre-commit-dot-cache-{{ checksum ".pre-commit-config.yaml" }} - - run: sudo pip install -r requirements.txt - - run: sudo pip install -r requirements-dev.txt - - run: nosetests - -workflows: - version: 2 - - main: - jobs: - - pre_commit_deps - - pre_commit_test: - requires: - - pre_commit_deps - - test - - build: - requires: - - pre_commit_test - - test \ No newline at end of file diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS deleted file mode 100644 index 6a8d16fe..00000000 --- a/.github/CODEOWNERS +++ /dev/null @@ -1 +0,0 @@ -* @UpsideTravel/full-access diff --git a/README.md b/README.md index 81130064..8fd09279 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,8 @@ # bucket-antivirus-function -[![CircleCI](https://circleci.com/gh/upsidetravel/bucket-antivirus-function.svg?style=svg)](https://circleci.com/gh/upsidetravel/bucket-antivirus-function) +[![CI](https://github.com/Georepublic/bucket-antivirus-function/actions/workflows/ci.yaml/badge.svg)](https://github.com/Georepublic/bucket-antivirus-function/actions/workflows/ci.yaml) [![pre-commit](https://github.com/Georepublic/bucket-antivirus-function/actions/workflows/pre-commit.yaml/badge.svg)](https://github.com/Georepublic/bucket-antivirus-function/actions/workflows/pre-commit.yaml) [![Test with LocalStack](https://github.com/Georepublic/bucket-antivirus-function/actions/workflows/lambda.yaml/badge.svg)](https://github.com/Georepublic/bucket-antivirus-function/actions/workflows/lambda.yaml) -Scan new objects added to any s3 bucket using AWS Lambda. [more details in this post](https://engineering.upside.com/s3-antivirus-scanning-with-lambda-and-clamav-7d33f9c5092e) +Scan new objects added to any s3 bucket using AWS Lambda. ## Features