diff --git a/.github/workflows/ci-test-py.yml b/.github/workflows/ci-test-py.yml deleted file mode 100644 index ebe43d2..0000000 --- a/.github/workflows/ci-test-py.yml +++ /dev/null @@ -1,45 +0,0 @@ -name: ci-test-py -on: - push: - branches: [main] - pull_request: - branches: [main] - -jobs: - py-pip-ai-sentryflow: - runs-on: ubuntu-20.04 - steps: - - uses: actions/checkout@v3 - - - uses: actions/setup-python@v4 - with: - python-version: '3.11' - cache: 'pip' - - - name: check Python pip3 - - run: pip install -r requirements.txt - - run: pip test - working-directory: ai-engine - - py-lint-ai-sentryflow: - runs-on: ubuntu-20.04 - steps: - - uses: actions/checkout@v3 - - - uses: actions/setup-python@v4 - with: - python-version: '3.11' - cache: 'pip' - - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install -r requirements.txt - working-directory: ai-engine - - - name: Lint with Ruff - run: | - pip install ruff - ruff --output-format=github . - continue-on-error: true - working-directory: ai-engine diff --git a/ai-engine/.dockerignore b/ai-engine/.dockerignore deleted file mode 100644 index 23ca759..0000000 --- a/ai-engine/.dockerignore +++ /dev/null @@ -1,6 +0,0 @@ -.idea -.git -.gitignore -protobuf -Dockerfile -__pycache__/ \ No newline at end of file diff --git a/ai-engine/.gitignore b/ai-engine/.gitignore deleted file mode 100644 index 533d889..0000000 --- a/ai-engine/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -.idea/ -__pycache__/ -protobuf/ \ No newline at end of file diff --git a/ai-engine/Dockerfile b/ai-engine/Dockerfile index 1e40850..f2141b8 100644 --- a/ai-engine/Dockerfile +++ b/ai-engine/Dockerfile @@ -3,26 +3,14 @@ # Dockerfile FROM ubuntu:latest -RUN apt-get update && apt-get -y install python3 python3-pip wget git +RUN apt-get update && apt-get -y install python3 python3-pip wget git -RUN git clone https://github.com/isu-kim/stringlifier.git -WORKDIR ./stringlifier -RUN pip install . +RUN git clone https://github.com/adobe/stringlifier -RUN mkdir /app -WORKDIR /app -COPY /ai-engine . - -# Build protobuf for Python -RUN pip install grpcio grpcio-tools -RUN mkdir protobuf/ -COPY /protobuf ./protobuf - -# Due to python import bugs, we have to compile protoc using this command -# Refer to https://github.com/protocolbuffers/protobuf/issues/1491#issuecomment-261621112 for more information on this -RUN python3 -m grpc_tools.protoc --python_out=. --pyi_out=. --grpc_python_out=. -I=. protobuf/sentryflow_metrics.proto +RUN pip3 install ./stringlifier pymongo Flask WORKDIR /app -RUN pip install -r requirements.txt + +COPY . . CMD ["python3", "ai-engine.py"] diff --git a/ai-engine/Makefile b/ai-engine/Makefile index c42b031..9be3361 100644 --- a/ai-engine/Makefile +++ b/ai-engine/Makefile @@ -1,9 +1,9 @@ # SPDX-License-Identifier: Apache-2.0 -IMAGE_NAME = 5gsec/sentryflow-ai-engine -TAG = v0.0.1 +IMAGE_NAME = 5GSEC/sentryflow-ai-engine +TAG = v0.1 .PHONY: build build: - docker build -t $(IMAGE_NAME):$(TAG) -f ./Dockerfile ../ + docker build -t $(IMAGE_NAME):$(TAG) -f ./Dockerfile diff --git a/ai-engine/ai-engine.py b/ai-engine/ai-engine.py index 8f1be34..9f025ce 100644 --- a/ai-engine/ai-engine.py +++ b/ai-engine/ai-engine.py @@ -1,94 +1,37 @@ -import os -import grpc - +from pymongo import MongoClient from stringlifier.api import Stringlifier -from concurrent import futures - -from protobuf import sentryflow_metrics_pb2_grpc -from protobuf import sentryflow_metrics_pb2 - - -class HandlerServer: - """ - Class for gRPC Servers - """ - def __init__(self): - try: - self.listen_addr = os.environ["AI_ENGINE_ADDRESS"] - except KeyError: - self.listen_addr = "0.0.0.0:5000" - - self.server = None - self.grpc_servers = list() - - def init_grpc_servers(self): - """ - init_grpc_servers method that initializes and registers gRPC servers - :return: None - """ - self.server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) - self.grpc_servers.append(APIClassificationServer()) # @todo: make this configurable - - grpc_server: GRPCServer - for grpc_server in self.grpc_servers: - grpc_server.register(self.server) - - def serve(self): - """ - serve method that starts serving gRPC servers, this is blocking function. - :return: None - """ - self.server.add_insecure_port(self.listen_addr) - - print("[INFO] Starting to serve on {}".format(self.listen_addr)) - self.server.start() - self.server.wait_for_termination() +from flask import Flask +app = Flask(__name__) +s = Stringlifier() -class GRPCServer: - """ - Abstract class for an individual gRPC Server - """ - def register(self, server): - """ - register method that registers gRPC service to target server - :param server: The server - :return: None - """ - pass +@app.route('/api_metrics') +def api_metrics(): + # Connect to MongoDB + client = MongoClient('mongodb://mongo:27017') + # Access the numbat database + db = client.numbat -class APIClassificationServer(sentryflow_metrics_pb2_grpc.SentryFlowMetricsServicer, GRPCServer): - """ - Class for API Classification Server using Stringlifier - """ + # Access the access-logs collection + collection = db['access-logs'] - def __init__(self): - self.stringlifier = Stringlifier() - print("[Init] Successfully initialized APIClassificationServer") + # Retrieve all documents from the collection + logs = list(collection.find({})) - def register(self, server): - sentryflow_metrics_pb2_grpc.add_SentryFlowMetricsServicer_to_server(self, server) + # Close the MongoDB connection + client.close() - def GetAPIClassification(self, request_iterator, context): - """ - GetAPIClassification method that runs multiple API ML Classification at once - :param request_iterator: The requests - :param context: The context - :return: The results - """ + paths = list() - for req in request_iterator: - paths = req.paths - ml_results = self.stringlifier(paths) - print("{} -> {}".format(paths, ml_results)) + # Print out all entries + for log in logs: + paths.append(log["path"]) - results = [sentryflow_metrics_pb2.APIClassificationSingleResponse(merged=ml_result, fields=[]) for ml_result - in ml_results] - yield sentryflow_metrics_pb2.APIClassificationResponse(response=results) + parsed = s(paths) + print(set(parsed)) + return str(set(parsed)) if __name__ == '__main__': - hs = HandlerServer() - hs.init_grpc_servers() - hs.serve() + app.run(host='0.0.0.0', port=5000) diff --git a/ai-engine/client.py b/ai-engine/client.py deleted file mode 100644 index 8921127..0000000 --- a/ai-engine/client.py +++ /dev/null @@ -1,24 +0,0 @@ -import os -import uuid - -import grpc - -from protobuf import sentryflow_metrics_pb2_grpc -from protobuf import sentryflow_metrics_pb2 - -if __name__ == "__main__": - try: - listen_addr = os.environ["AI_ENGINE_ADDRESS"] - except KeyError: - listen_addr = "0.0.0.0:5000" - - with grpc.insecure_channel(listen_addr) as channel: - stub = sentryflow_metrics_pb2_grpc.SentryFlowMetricsStub(channel) - req = sentryflow_metrics_pb2.APIClassificationRequest(paths=["/api/test", "/api/test/" + str(uuid.uuid4())]) - - try: - response_stream = stub.GetAPIClassification(req) - for response in response_stream: - print("Response: ", str(response)) - except grpc.RpcError as e: - print("Error occurred during RPC:", e) \ No newline at end of file diff --git a/ai-engine/requirements.txt b/ai-engine/requirements.txt deleted file mode 100644 index 7c37043..0000000 Binary files a/ai-engine/requirements.txt and /dev/null differ diff --git a/deployments/sentryflow.yaml b/deployments/sentryflow.yaml index 923b1ea..3284d75 100644 --- a/deployments/sentryflow.yaml +++ b/deployments/sentryflow.yaml @@ -8,43 +8,6 @@ metadata: pod-security.kubernetes.io/enforce: privileged pod-security.kubernetes.io/warn: privileged --- -apiVersion: apps/v1 -kind: Deployment -metadata: - name: ai-engine - namespace: sentryflow -spec: - replicas: 1 - selector: - matchLabels: - app: ai-engine - template: - metadata: - labels: - app: ai-engine - spec: - containers: - - name: sentryflow - image: 5gsec/sentryflow-ai-engine:v0.0.1 - ports: - - containerPort: 5000 - protocol: TCP - name: grpc-sentryflow ---- -apiVersion: v1 -kind: Service -metadata: - name: ai-engine - namespace: sentryflow -spec: - selector: - app: ai-engine - ports: - - protocol: TCP - port: 5000 - targetPort: 5000 - name: grpc-sentryflow ---- apiVersion: v1 kind: ServiceAccount metadata: diff --git a/protobuf/Makefile b/protobuf/Makefile index 8811773..eea513e 100644 --- a/protobuf/Makefile +++ b/protobuf/Makefile @@ -1,4 +1,4 @@ -PROTO:=sentryflow.proto sentryflow_metrics.proto +PROTO:=sentryflow.proto PBGO:=$(PROTO:.proto=.pb.go) .PHONY: build diff --git a/protobuf/sentryflow_metrics.proto b/protobuf/sentryflow_metrics.proto deleted file mode 100644 index 46c3e1f..0000000 --- a/protobuf/sentryflow_metrics.proto +++ /dev/null @@ -1,18 +0,0 @@ -syntax = "proto3"; - -package protobuf; - -option go_package = "sentryflow/protobuf"; - -message APIClassificationRequest { - string path = 1; -} - -message APIClassificationResponse { - string merged = 1; - repeated string fields = 2; -} - -service SentryFlowMetrics { - rpc GetAPIClassification(stream APIClassificationRequest) returns (stream APIClassificationResponse); -} diff --git a/sentryflow/config/config.go b/sentryflow/config/config.go index db5cacd..dd2ac60 100644 --- a/sentryflow/config/config.go +++ b/sentryflow/config/config.go @@ -23,9 +23,6 @@ type NumbatConfig struct { PatchNamespace bool // Enable/Disable patching namespace for Istio injection PatchRestartDeployments bool // Enable/Disable restarting deployments after patching - AIEngineService string - AIEngineBatchSize int - Debug bool // Enable/Disable SentryFlow debug mode } @@ -40,8 +37,6 @@ const ( CustomExportListenPort string = "customExportListenPort" PatchNamespace string = "patchNamespace" PatchRestartDeployments string = "patchRestartDeployments" - AIEngineService string = "AIEngineService" - AIEngineBatchSize string = "AIEngineBatchSize" Debug string = "debug" ) @@ -52,8 +47,6 @@ func readCmdLineParams() { customExportListenPortStr := flag.String(CustomExportListenPort, "8080", "Custom export gRPC server listen port") patchNamespaceB := flag.Bool(PatchNamespace, false, "Enable/Disable patching Istio injection to all namespaces") patchRestartDeploymentsB := flag.Bool(PatchRestartDeployments, false, "Enable/Disable restarting deployments in all namespaces") - AIEngineServiceStr := flag.String(AIEngineService, "ai-engine.sentryflow.svc.cluster.local", "Service address for SentryFlow AI Engine") - AIEngineBatchSizeInt := flag.Int(AIEngineBatchSize, 5, "Batch size fo SentryFlow AI Engine") configDebugB := flag.Bool(Debug, false, "Enable/Disable debugging mode using logs") var flags []string @@ -71,8 +64,6 @@ func readCmdLineParams() { viper.SetDefault(CustomExportListenPort, *customExportListenPortStr) viper.SetDefault(PatchNamespace, *patchNamespaceB) viper.SetDefault(PatchRestartDeployments, *patchRestartDeploymentsB) - viper.SetDefault(AIEngineService, *AIEngineServiceStr) - viper.SetDefault(AIEngineBatchSize, *AIEngineBatchSizeInt) viper.SetDefault(Debug, *configDebugB) } @@ -85,7 +76,7 @@ func LoadConfig() error { viper.AutomaticEnv() // todo: read configuration from config file - _ = os.Getenv("SENTRYFLOW_CFG") + _ = os.Getenv("NUMBAT_CFG") GlobalCfg.OtelGRPCListenAddr = viper.GetString(OtelGRPCListenAddr) GlobalCfg.OtelGRPCListenPort = viper.GetString(OtelGRPCListenPort) @@ -93,8 +84,6 @@ func LoadConfig() error { GlobalCfg.CustomExportListenPort = viper.GetString(CustomExportListenPort) GlobalCfg.PatchNamespace = viper.GetBool(PatchNamespace) GlobalCfg.PatchRestartDeployments = viper.GetBool(PatchRestartDeployments) - GlobalCfg.AIEngineService = viper.GetString(AIEngineService) - GlobalCfg.AIEngineBatchSize = viper.GetInt(AIEngineBatchSize) GlobalCfg.Debug = viper.GetBool(Debug) log.Printf("Configuration [%+v]", GlobalCfg) diff --git a/sentryflow/metrics/api/apiAnalyzer.go b/sentryflow/metrics/api/apiAnalyzer.go index 78a2ff7..0798746 100644 --- a/sentryflow/metrics/api/apiAnalyzer.go +++ b/sentryflow/metrics/api/apiAnalyzer.go @@ -19,9 +19,6 @@ type Analyzer struct { perAPICount map[string]uint64 perAPICountLock sync.Mutex // @todo perhaps combine those two? - curBatchCount int - batchCountLock sync.Mutex - stopChan chan struct{} apiJob chan string }