Skip to content

Commit

Permalink
updated zip file, function app code
Browse files Browse the repository at this point in the history
  • Loading branch information
ashwinvenkatesha committed Oct 16, 2024
1 parent 1a5d2a3 commit f50c361
Show file tree
Hide file tree
Showing 12 changed files with 209 additions and 154 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
### This is to treat this as a package ###
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,15 @@
PORT = int(os.environ.get("PCE_PORT", 443))
ORG_ID = os.environ["ORG_ID"]
MAX_WORKLOADS = os.environ.get("MAX_WORKLOADS", 100000)
LOGS_TO_CONSUME = os.environ.get("logTypes", "all")
NETWORK_TRAFFIC_TO_CONSUME = os.environ.get("networkTrafficLogTypes", "All")
FLOW_EVENTS = "FLow Summaries"
LOGS_TO_CONSUME = os.environ.get("logTypes", "all").lower()
NETWORK_TRAFFIC_TO_CONSUME = os.environ.get("networkTrafficLogTypes", "All").lower()
FLOW_EVENTS = "Flow Summaries"
AUDIT_EVENTS = "Auditable Events"
ALLOWED_TRAFFIC = "allowed"
POTENTIALLY_BLOCKED_TRAFFIC = "potentially_blocked"
BLOCKED_TRAFFIC = "blocked"
UNKNOWN_TRAFFIC = "unknown"
ALL_TRAFFIC = "All"
ALL_TRAFFIC = "all"

# Azure config
AZURE_TENANT_ID = os.environ["AZURE_TENANT_ID"]
Expand All @@ -42,3 +42,7 @@
WORKSPACE_ID = os.environ["WORKSPACE_ID"]
AZURE_STORAGE_CONNECTION_STRING = os.environ["AzureWebJobsStorage"]
MAX_QUEUE_MESSAGES_MAIN_QUEUE = int(os.environ.get("MAX_QUEUE_MESSAGES_MAIN_QUEUE", 80))

# Azure Storage Queue
AZURE_STORAGE_PRIMARY_QUEUE = "python-queue-items"
AZURE_STORAGE_BACKLOG_QUEUE = "python-queue-items-backlog"
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
import constants
from .constants import (
LOGS_TO_CONSUME,
FLOW_EVENTS,
AUDIT_EVENTS,
MAX_SCRIPT_EXEC_TIME_MINUTES,
ALL_TRAFFIC,
)
import time

LOGS_TO_CONSUME = constants.LOGS_TO_CONSUME.downcase
FLOW_EVENTS = constants.FLOW_EVENTS
AUDIT_EVENTS = constants.AUDIT_EVENTS
MAX_SCRIPT_EXEC_TIME_MINUTES = constants.MAX_SCRIPT_EXEC_TIME_MINUTES


def skip_processing_file(file_path):
"""
Expand All @@ -17,7 +18,7 @@ def skip_processing_file(file_path):
So if LOGS_TO_CONSUME is set to All by customer, then all logs are consumed by default and the method returns False
Else, either audit or traffic events are consumed
"""
if LOGS_TO_CONSUME == "All":
if LOGS_TO_CONSUME == ALL_TRAFFIC:
return False

if "auditable" in file_path:
Expand Down
Binary file modified Solutions/IllumioSaaS/Data Connectors/IllumioEventsConn.zip
Binary file not shown.
Binary file modified Solutions/IllumioSaaS/Data Connectors/IllumioQueueTrigger.zip
Binary file not shown.
Original file line number Diff line number Diff line change
Expand Up @@ -2,23 +2,32 @@
import time
import logging
import azure.functions as func
from ..azure_storage_queue import AzureStorageQueueHelper
from .. import constants
from ..helper import check_if_script_runs_too_long
from ..CommonCode.azure_storage_queue import AzureStorageQueueHelper
from ..CommonCode.constants import (
MAX_SCRIPT_EXEC_TIME_MINUTES,
AZURE_STORAGE_CONNECTION_STRING,
MAX_QUEUE_MESSAGES_MAIN_QUEUE,
AZURE_STORAGE_PRIMARY_QUEUE,
AZURE_STORAGE_BACKLOG_QUEUE,
)

MAX_SCRIPT_EXEC_TIME_MINUTES = constants.MAX_SCRIPT_EXEC_TIME_MINUTES
AZURE_STORAGE_CONNECTION_STRING = constants.AZURE_STORAGE_CONNECTION_STRING
MAX_QUEUE_MESSAGES_MAIN_QUEUE = constants.MAX_QUEUE_MESSAGES_MAIN_QUEUE

def check_if_script_runs_too_long(percentage, script_start_time):
now = int(time.time())
duration = now - script_start_time
max_duration = int(MAX_SCRIPT_EXEC_TIME_MINUTES * 60 * percentage)
return duration > max_duration


async def main(mytimer: func.TimerRequest):
script_start_time = int(time.time())
mainQueueHelper = AzureStorageQueueHelper(
connectionString=AZURE_STORAGE_CONNECTION_STRING, queueName="python-queue-items"
connectionString=AZURE_STORAGE_CONNECTION_STRING,
queueName=AZURE_STORAGE_PRIMARY_QUEUE,
)
backlogQueueHelper = AzureStorageQueueHelper(
connectionString=AZURE_STORAGE_CONNECTION_STRING,
queueName="python-queue-items-backlog",
queueName=AZURE_STORAGE_BACKLOG_QUEUE,
)

backlogQueueCount = backlogQueueHelper.get_queue_current_count()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,31 +7,24 @@
import logging
import azure.functions as func
import urllib.parse
from .. import constants
from ..sentinel_connector import AzureSentinelConnectorAsync
from ..helper import skip_processing_file

# Azure config
AZURE_TENANT_ID = constants.AZURE_TENANT_ID
AZURE_CLIENT_ID = constants.AZURE_CLIENT_ID
AZURE_CLIENT_SECRET = constants.AZURE_CLIENT_SECRET
DCE_ENDPOINT = constants.DCE_ENDPOINT
DCR_ID = constants.DCR_ID
LOG_ANALYTICS_URI = constants.LOG_ANALYTICS_URI
WORKSPACE_ID = constants.WORKSPACE_ID
FLOW_LOGS_CUSTOM_TABLE = constants.FLOW_LOGS_CUSTOM_TABLE
AUDIT_LOGS_CUSTOM_TABLE = constants.AUDIT_LOGS_CUSTOM_TABLE
LOGS_TO_CONSUME = constants.LOGS_TO_CONSUME
FLOW_EVENTS = constants.FLOW_EVENTS
AUDIT_EVENTS = constants.AUDIT_EVENTS

# AWS config
AWS_KEY = constants.AWS_KEY
AWS_SECRET = constants.AWS_SECRET
AWS_REGION_NAME = constants.AWS_REGION_NAME
VISIBILITY_TIMEOUT = 1800
LINE_SEPARATOR = constants.LINE_SEPARATOR
MAX_SCRIPT_EXEC_TIME_MINUTES = constants.MAX_SCRIPT_EXEC_TIME_MINUTES
from ..CommonCode.sentinel_connector import AzureSentinelConnectorAsync
from ..CommonCode.constants import (
AZURE_TENANT_ID,
AZURE_CLIENT_ID,
AZURE_CLIENT_SECRET,
DCE_ENDPOINT,
DCR_ID,
FLOW_LOGS_CUSTOM_TABLE,
AUDIT_LOGS_CUSTOM_TABLE,
LOGS_TO_CONSUME,
AWS_KEY,
AWS_SECRET,
AWS_REGION_NAME,
LINE_SEPARATOR,
ALL_TRAFFIC,
FLOW_EVENTS,
AUDIT_EVENTS,
)


# Defining the S3 Client object based on AWS Credentials
Expand All @@ -52,14 +45,24 @@ def _create_s3_client():
)


def fileToBeFiltered(file_path):
if LOGS_TO_CONSUME == ALL_TRAFFIC:
return False

if "auditable" in file_path:
return FLOW_EVENTS in LOGS_TO_CONSUME
else:
return AUDIT_EVENTS in LOGS_TO_CONSUME


async def _generate_sentinel_connectors(session):
stream_names = []
sentinel_connectors = {}
if LOGS_TO_CONSUME == "All":
if LOGS_TO_CONSUME == ALL_TRAFFIC:
stream_names.append(FLOW_LOGS_CUSTOM_TABLE)
stream_names.append(AUDIT_LOGS_CUSTOM_TABLE)

elif LOGS_TO_CONSUME == "Auditable Events":
elif LOGS_TO_CONSUME == AUDIT_EVENTS:
stream_names.append(AUDIT_LOGS_CUSTOM_TABLE)
else:
stream_names.append(FLOW_LOGS_CUSTOM_TABLE)
Expand Down Expand Up @@ -104,7 +107,7 @@ async def main(msg: func.QueueMessage):
file_size = obj.get("file_size", 0)
accumulated_file_size += file_size

if skip_processing_file(link):
if fileToBeFiltered(link):
continue

sqs_ids_seen_so_far += 1
Expand Down
Loading

0 comments on commit f50c361

Please sign in to comment.