diff --git a/.github/workflows/production.yml b/.github/workflows/production.yml new file mode 100644 index 0000000..0323ea0 --- /dev/null +++ b/.github/workflows/production.yml @@ -0,0 +1,34 @@ +name: Production + +on: + push: + branches: + - 'production' + release: + types: + - created + +jobs: + docker: + runs-on: ubuntu-latest + steps: + - + name: Set up QEMU + uses: docker/setup-qemu-action@v3 + - + name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + - + name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - + name: Build and push + uses: docker/build-push-action@v5 + with: + push: true + tags: | + runbooksolutions/agent:latest + ${{ github.event_name == 'release' && github.ref ? 'runbooksolutions/agent:' + github.ref : '' }} \ No newline at end of file diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml new file mode 100644 index 0000000..c083533 --- /dev/null +++ b/.github/workflows/staging.yml @@ -0,0 +1,29 @@ +name: Dev + +on: + push: + branches: + - 'staging' + +jobs: + docker: + runs-on: ubuntu-latest + steps: + - + name: Set up QEMU + uses: docker/setup-qemu-action@v3 + - + name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + - + name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - + name: Build and push + uses: docker/build-push-action@v5 + with: + push: true + tags: runbooksolutions/agent:dev \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..92d77a0 --- /dev/null +++ b/.gitignore @@ -0,0 +1,6 @@ +kerberos +run +OLD +hosts +test.py +__pycache__ \ No newline at end of file diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..f352406 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,26 @@ +# Use an official Python runtime as a parent image +FROM python:latest + +# Install Required System Tools +RUN apt-get update && \ + apt-get install nmap gcc libkrb5-dev libssl-dev krb5-user -y + +# Set the working directory to /app +WORKDIR /app + +RUN mkdir plugins,stores + +COPY _docker/start.sh /start.sh +RUN chmod +x /start.sh + +# Copy the current directory contents into the container at /app +COPY requirements.txt /app + +# Install any needed packages specified in requirements.txt +RUN pip install -r requirements.txt + +COPY app.py /app +COPY runbooksolutions /app/runbooksolutions + +# Define the command to run your application +CMD [ "/start.sh" ] \ No newline at end of file diff --git a/README.md b/README.md index a0f87e9..5ef8657 100644 --- a/README.md +++ b/README.md @@ -1 +1,83 @@ # RunbookSolution Network Agent + +This codebase comprises the core of the Network Agent for RunbookSolutions. + +## Installation + +### Prebuilt Docker Image + +```sh +mkdir agent +cd agent +mkdir plugins stores kerberos +wget https://raw.githubusercontent.com/RunbookSolutions/agent/staging/config.ini + +docker run \ + --name RunbookSolutions_Agent \ + -v $(pwd)/config.ini:/app/config.ini \ + -v $(pwd)/plugins:/app/plugins \ + -v $(pwd)/stores:/app/stores \ + -v $(pwd)/kerberos/krb5.conf:/etc/krb5.conf \ + -v $(pwd)/kerberos:/keytabs \ + -d \ + --restart unless-stopped \ + runbooksolutions/agent:latest + +``` + +### Extending the Default Image + +The default image includes the following Python libraries by default. To include additional libraries for your custom plugins, simply extend our default image and use it instead. + +```Dockerfile +FROM runbooksolutions/agent:latest +# Using a requirements.txt (Recommended) +COPY requirements.txt /app/custom_requirements.txt +RUN pip install -r custom_requirements.txt +# OR Individually +RUN pip install some_package +``` + + +### From Source +```sh +git clone https://github.com/RunbookSolutions/agent.git +cd agent +./run +``` + +## Configuration +Configuration is maintained in a simple 'config.ini' file consisting of the 'server_url' of the backend and the 'client_id' for device authentication. + +```ini +[agent] +server_url=http://192.168.1.197 # Note: Do NOT include a trailing slash on the server_url +client_id=9ab55261-bfb7-4bb3-ad29-a6dbdbf8a5af # Device Code Grant client_id provided by the server +auth=True # To disable auth when not using with RunbookSolutions. +``` + +## Expected Server Responses +Due to the agent's nature, it can easily be used by others outside of RunbookSolutions. +To implement a backend for this agent, you will need to provide the following endpoints. + +`GET /api/agent` for the agent to load information about itself. This endpoint also provides the agent with a list of PLUGIN_IDs that it needs to load. + +`GET /api/agent/plugin/{PLUGIN_ID}` for the agent to download plugins. This endpoint also provides details about commands the plugin provides. + +`GET /api/agent/tasks` for the agent to load tasks that it needs to run. Tasks include scheduled and one-off tasks to run and will always present tasks until they are removed from the backend. This allows the agent to restart without skipping task execution. + +Additional details can be found on the [Expected Server Responses](/docs/Responses.md) page. + +## Creating a Keytab File + +Some plugins may require authentication against your windows domain. + +The simplest way to acomplish this is by using the [Docker Kerberos Keytab Generator](https://github.com/simplesteph/docker-kerberos-get-keytab): + +```sh +cd agent +docker run -it --rm \ + -v $(pwd)/kerberos:/output \ + -e PRINCIPAL= \ + simplesteph/docker-kerberos-get-keytab +``` \ No newline at end of file diff --git a/_docker/start.sh b/_docker/start.sh new file mode 100644 index 0000000..fee62ae --- /dev/null +++ b/_docker/start.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +keytab_directory="/keytabs" + +for keytab_file in "$keytab_directory"/*.keytab; do + identity=$(basename "$keytab_file" .keytab) + kinit -kt "$keytab_file" "$identity" +done + +python app.py \ No newline at end of file diff --git a/app.py b/app.py new file mode 100644 index 0000000..50afdb5 --- /dev/null +++ b/app.py @@ -0,0 +1,22 @@ + +import logging +from runbooksolutions.logging_setup import setup_logging +setup_logging() + + +from runbooksolutions.store.Store import Store +# Set the Data Store(s) Password +Store.set_encryption_key(b"Store_Encryption_Password") +from runbooksolutions.agent.Agent import Agent + +async def main(): + agent = Agent(num_threads=3) + try: + await agent.start() + except KeyboardInterrupt: + logging.info("Received CTRL+C. Stopping gracefully.") + await agent.stop() + +import asyncio +if __name__ == "__main__": + asyncio.run(main()) \ No newline at end of file diff --git a/config.ini b/config.ini new file mode 100644 index 0000000..55be0c2 --- /dev/null +++ b/config.ini @@ -0,0 +1,7 @@ +[agent] +# Note: Do NOT include a trailing slash on the server_url +server_url=http://192.168.1.197 +# Device Code Grant client_id provided by the server +client_id=9ab55261-bfb7-4bb3-ad29-a6dbdbf8a5af +# If we are required to preform Device Code Authentication +auth=True \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..5ab7912 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,10 @@ +version: '3' +services: + network-agent: + build: + context: . + dockerfile: Dockerfile + volumes: + - .:/app + environment: + - DEBUG=true diff --git a/docs/Responses.md b/docs/Responses.md new file mode 100644 index 0000000..e4b4af0 --- /dev/null +++ b/docs/Responses.md @@ -0,0 +1,88 @@ +# Responses From Backend + +> Note: The backend server is expected to identify the agent making the request without any additional parameters being sent. By default RunbookSolutions achieves this using the OAuth Device Code process to retrieve an Access Token for authentication. +>> In non-NAT'ed environments; the backend server could use the IP address from the request to identify agents. + +## `GET /api/agent` + +This endpoint provides the agent with basic information about itself along with a list of PLUGIN_ID's that the agent should have loaded. + +- The Team ID provided here is used to identify and group agents to specific groups on the backend. It is **required** even if not used. + + +```json +{ + "data": { + "id": "9ab55db0-9bfa-45a6-8280-bb94c6b0fe8d", + "name": "Test Agent", + "team_id": "9ab55261-b6b0-4fb4-85e5-a3491a72f720", + "plugins": [ + "9ab56426-f429-4c4b-9755-40c92449f0be" + ] + } +} +``` + +## `GET /api/agent/plugin/{plugin_id}` +This endpoint provides the agent with individual plugins for the agent along with the corresponding commands the plugin makes available. + +> Note: It is important to note that two different versions of a plugin may be loaded by an agent. Commands are prefixed with the PLUGIN_ID to avoid collisions. + +- The `script` variable contains the code the agent will execute when required. +- The `hash` variable is the `SHA512` hash of the script; the agent will verify both the script variable as well as the file it creates to store the plugin. +- The `commands` variable contains the details of what function in the program to run for which command is provided. + +> Important: Both the `script` variable and the file written to disk must match the provided hash for the plugin to be loaded and run. + +```json +{ + "data": { + "id": "9ab56426-f429-4c4b-9755-40c92449f0be", + "name": "Test Plugin", + "version": 0, + "description": null, + "script": "class Plugin:\n def __init__(self):\n pass\n def greet(self):\n print(\"Hello from the Test Plugin!\")\n def square(self, number):\n result = number ** 2\n print(f\"The square of {number} is {result}\")", + "hash": "809c167b7b1e7fb9504dc136af3c2dc1c17545355a9aaec28c3792e54bc540943db236b6af547a732161b5d717c9b14a7c508ab49b3f06e128997de06b3abfd3", + "commands": { + "9ab56426-f429-4c4b-9755-40c92449f0be.greet": { + "id": "9ab5659c-271d-40d5-be37-3a3847b92aab", + "name": "9ab56426-f429-4c4b-9755-40c92449f0be.greet", + "function": "greet" + }, + "9ab56426-f429-4c4b-9755-40c92449f0be.square": { + "id": "9ab565fc-1036-4afb-ac7e-ec92e0db6985", + "name": "9ab56426-f429-4c4b-9755-40c92449f0be.square", + "function": "square" + } + } + } +} +``` + +## `GET /api/agent/tasks` +The following endpoint provides the agent with details of what commands need to be run, when (if scheduled), and any arguments for said command. + +- The `command` variable must match one of the keys provided by the `plugin.commands` variable when downloading plugins. +- The `cron` variable is the cron formatted schedule for when the task runs, or `null` if it should only be run once. +- The `arguments` variable should be a JSON encoded string containing the argument name and values for the function to run. + +> Note: The agent uses the `task.id` to ensure tasks are not being duplicated into the schedule and queue. + +```json +{ + "data": [ + { + "id": "9ab581cf-546d-405a-afaf-474cc631ed5c", + "command": "9ab56426-f429-4c4b-9755-40c92449f0be.greet", + "cron": null, + "arguments": "{}" + }, + { + "id": "9ab582f7-9e64-4fad-b6b9-369633776ae4", + "command": "9ab56426-f429-4c4b-9755-40c92449f0be.square", + "cron": "* * * * *", + "arguments": "{\"number\":2}" + } + ] +} +``` \ No newline at end of file diff --git a/plugins/.gitignore b/plugins/.gitignore new file mode 100644 index 0000000..c96a04f --- /dev/null +++ b/plugins/.gitignore @@ -0,0 +1,2 @@ +* +!.gitignore \ No newline at end of file diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..c2a0c7c --- /dev/null +++ b/requirements.txt @@ -0,0 +1,4 @@ +requests +cryptography +colorlog +croniter \ No newline at end of file diff --git a/run b/run new file mode 100644 index 0000000..c8acd9a --- /dev/null +++ b/run @@ -0,0 +1,7 @@ +docker run \ + -v ./config.ini:/app/config.ini \ + -v ./plugins:/app/plugins \ + -v ./stores:/app/stores \ + -v $(pwd)/kerberos/krb5.conf:/etc/krb5.conf \ + -v $(pwd)/kerberos:/keytabs \ + -it $(docker build -q .) diff --git a/runbooksolutions/__init__.py b/runbooksolutions/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/runbooksolutions/agent/API.py b/runbooksolutions/agent/API.py new file mode 100644 index 0000000..b8ba4c4 --- /dev/null +++ b/runbooksolutions/agent/API.py @@ -0,0 +1,68 @@ +from runbooksolutions.auth.Auth import Auth +from runbooksolutions.agent.Task import Task +import requests +import logging + +class AgentDetails: + id: str + name: str + team_id: str + plugins = list + def __init__(self, response: dict) -> None: + response = response.get('data') + self.id = response.get('id') + self.name = response.get('name') + self.team_id = response.get('team_id') + if response.get('plugins') == None: + self.plugins = [] + else: + self.plugins = response.get('plugins') + +class AgentTasks: + tasks: [Task] + + def __init__(self, response: dict) -> None: + self.tasks = [] + response = response.get('data') + for task in response: + self.tasks.append(Task(task)) + + def getTasks(self): + return self.tasks + +class API: + url: str = None + auth: Auth = None + + def __init__(self, auth: Auth, url: str) -> None: + self.auth = auth + self.url = url + "/api" + + def getAgentDetails(self) -> AgentDetails: + return AgentDetails(self.sendRequest('/agent', 'GET')) + + def getAgentTasks(self) -> AgentTasks: + return AgentTasks(self.sendRequest('/agent/tasks', 'GET')) + + def sendRequest(self, url, method, data=None): + url = self.url + url + headers = self.auth.getHeaders() # Assuming Auth has a method to get authentication headers + method = method.upper() + + # Choose the appropriate requests method based on the provided 'method' + if method.upper() == 'GET': + response = requests.get(url, headers=headers) + elif method.upper() == 'POST': + response = requests.post(url, headers=headers, json=data) + elif method.upper() == 'PUT': + response = requests.put(url, headers=headers, json=data) + elif method.upper() == 'DELETE': + response = requests.delete(url, headers=headers) + else: + raise ValueError("Invalid HTTP method. Supported methods are GET, POST, PUT, and DELETE.") + + # You might want to handle response status codes and raise exceptions if needed + if response.status_code != 200: + raise Exception(f"Request failed with status code {response.status_code}. Response content: {response.text}") + + return response.json() # Assuming the response is in JSON format \ No newline at end of file diff --git a/runbooksolutions/agent/Agent.py b/runbooksolutions/agent/Agent.py new file mode 100644 index 0000000..33c884c --- /dev/null +++ b/runbooksolutions/agent/Agent.py @@ -0,0 +1,79 @@ +from runbooksolutions.auth.Auth import Auth +from runbooksolutions.schedule.Schedule import Schedule +from runbooksolutions.queue.Queue import Queue +from runbooksolutions.agent.API import API +from runbooksolutions.agent.API import AgentDetails +from runbooksolutions.agent.PluginManager import PluginManager +import asyncio +import configparser +import logging + +class Agent: + auth: Auth = None + schedule: Schedule = None + queue: Queue = None + api: API = None + pluginManager: PluginManager = None + + agentDetails: AgentDetails = None + + def __init__(self, num_threads: int = 1) -> None: + self.agentConfig = self.loadConfig() + self.auth = Auth( + url=self.agentConfig.get('server_url'), + client_id=self.agentConfig.get('client_id'), + enabled=self.agentConfig.get('auth') + ) + self.api = API(auth=self.auth, url=self.agentConfig.get('server_url')) + self.pluginManager = PluginManager(self.api) + self.queue = Queue(num_threads, self.pluginManager) + self.schedule = Schedule(self.queue) + + def loadConfig(self): + config = configparser.ConfigParser() + try: + config.read('config.ini') + return config['agent'] + except FileNotFoundError: + logging.critical("config.ini file not found!") + exit() + except configparser.Error: + logging.critical("Error reading config file!") + exit() + + async def syncAgent(self): + while True: + self.agentDetails = self.api.getAgentDetails() + self.pluginManager.syncPlugins(self.agentDetails.plugins) + + tasks = self.api.getAgentTasks().getTasks() + for task in tasks: + if task.shouldSchedule(): + self.schedule.add_task(task=task, cron_expression=task.cron) + else: + await self.queue.enqueue_task(task) + + # Sleep for 60 seconds + await asyncio.sleep(60) + + async def start(self) -> None: + agent_task = asyncio.create_task(self.syncAgent()) + queue_task = asyncio.create_task(self.queue.start()) + schedule_task = asyncio.create_task(self.schedule.start()) + + try: + # Other asynchronous tasks can be started here... + + logging.info("End of Start") + + # Wait for the background task to complete + await agent_task + await queue_task + await schedule_task + except asyncio.CancelledError: + logging.info("Agent task canceled. Stopping gracefully.") + finally: + self.queue.stop() + + def stop(self) -> None: + self.queue.stop() \ No newline at end of file diff --git a/runbooksolutions/agent/Plugin.py b/runbooksolutions/agent/Plugin.py new file mode 100644 index 0000000..2d15547 --- /dev/null +++ b/runbooksolutions/agent/Plugin.py @@ -0,0 +1,3 @@ +class Plugin: + def __init__(self) -> None: + pass \ No newline at end of file diff --git a/runbooksolutions/agent/PluginManager.py b/runbooksolutions/agent/PluginManager.py new file mode 100644 index 0000000..f6d3d08 --- /dev/null +++ b/runbooksolutions/agent/PluginManager.py @@ -0,0 +1,164 @@ +from runbooksolutions.agent.Plugin import Plugin +from runbooksolutions.agent.API import API +import json +import logging +import os +import importlib +import hashlib + +class PluginManager: + plugin_directory: str = "plugins" + plugins: dict = dict() + loadedCommands: dict = dict() + api: API = None + + def __init__(self, api: API) -> None: + self.api = api + + def verify_plugin_hash(self, pluginID: str) -> bool: + json_file_path = os.path.join(self.plugin_directory, f"{pluginID}.json") + with open(json_file_path, 'r') as json_file: + plugin_definition = json.loads(json_file.read()) + + script = plugin_definition.get('script', '').encode('utf-8') + json_hash = hashlib.sha512(script).hexdigest() + + script_file_path = os.path.join(self.plugin_directory, f"{pluginID}.py") + with open(script_file_path, 'rb') as script_file: + script_content = script_file.read() + script_hash = hashlib.sha512(script_content).hexdigest() + + logging.debug(f"Expected Hash: {plugin_definition.get('hash')}") + logging.debug(f"JSON Hash: {json_hash}") + logging.debug(f"JSON Hash: {script_hash}") + + if not json_hash == plugin_definition.get('hash', ''): + logging.critical("JSON Hash mismatch") + return False + + if not script_hash == plugin_definition.get('hash', ''): + logging.critical("SCRIPT Hash mismatch") + return False + + return True + + def addPlugin(self, pluginID: str) -> None: + logging.debug(f"Adding Plugin {pluginID}") + if self.pluginIsLocal(pluginID): + logging.debug("Plugin is local") + else: + self.downloadPlugin(pluginID) + + if not self.verify_plugin_hash(pluginID): + logging.critical(f"Plugin {pluginID} FAILED Hash Verification") + return + + self.plugins[pluginID] = self.loadPlugin(pluginID) + self.loadPluginCommands(pluginID) + + def removePlugin(self, pluginID: str) -> None: + logging.debug(f"Removing Plugin {pluginID}") + if pluginID in self.plugins: + del self.plugins[pluginID] + else: + logging.warning(f"Plugin {pluginID} not found in loaded plugins.") + + # TODO: Remove the plugin files from the file system + json_file_path = os.path.join(self.plugin_directory, f"{pluginID}.json") + with open(json_file_path, 'r') as json_file: + plugin_definition = json.load(json_file) + + for command in plugin_definition.get('commands', []).keys(): + self.loadedCommands.pop(command) + + def syncPlugins(self, plugins: list) -> None: + logging.debug(f"Syncing Plugins. Loaded Plugins: {list(self.plugins.keys())} Requested Plugins: {plugins}") + for pluginID in self.plugins.keys(): + if pluginID not in plugins: + logging.debug("Removing Plugin") + self.removePlugin(pluginID) + else: + logging.debug("Plugin Still Required.") + + for pluginID in plugins: + if pluginID not in self.plugins.keys(): + logging.debug("Adding Plugin") + self.addPlugin(pluginID) + else: + logging.debug("Plugin Already Loaded") + + def pluginIsLocal(self, pluginID: str) -> bool: + if not os.path.exists(os.path.join(self.plugin_directory, f"{pluginID}.json")): + logging.debug("Plugin JSON Not Local") + return False + + if not os.path.exists(os.path.join(self.plugin_directory, f"{pluginID}.py")): + logging.debug("Plugin python Not Local") + return False + + return True + + def downloadPlugin(self, pluginID: str) -> None: + logging.debug(f"Downloading Plugin {pluginID}") + pluginData = self.api.sendRequest(f'/agent/plugin/{pluginID}', 'GET') + pluginData = pluginData.get('data') + self.savePlugin(pluginData) + + def savePlugin(self, plugin_definition: dict) -> None: + logging.debug("Saving Plugin") + if not os.path.exists(self.plugin_directory): + os.makedirs(self.plugin_directory) + + # Save JSON data + json_file_path = os.path.join(self.plugin_directory, f"{plugin_definition.get('id')}.json") + with open(json_file_path, 'w') as json_file: + json.dump(plugin_definition, json_file) + + # Save Python script + script_file_path = os.path.join(self.plugin_directory, f"{plugin_definition.get('id')}.py") + with open(script_file_path, 'w') as script_file: + script_file.write(plugin_definition.get('script', '')) + + def commandIsLoaded(self, commandName: str) -> bool: + if commandName in self.loadedCommands.keys(): + return True + return False + + def loadPlugin(self, pluginID: str) -> Plugin: + try: + script_file_path = os.path.join(self.plugin_directory, f"{pluginID}.py") + spec = importlib.util.spec_from_file_location("Plugin", script_file_path) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + # Store the instance of the plugin in the loaded_plugins dictionary + return module.Plugin() + except Exception as e: + print(f"Error importing plugin {pluginID}: {e}") + return None + + + def loadPluginCommands(self, pluginID: str) -> None: + json_file_path = os.path.join(self.plugin_directory, f"{pluginID}.json") + with open(json_file_path, 'r') as json_file: + plugin_definition = json.load(json_file) + + commands = plugin_definition.get('commands', []) + + for command_name, command_data in commands.items(): + modified_command = {'pluginID': pluginID, **command_data} + + self.loadedCommands.update({command_name: modified_command}) + + def executeCommand(self, commandName, *args, **kwargs) -> None: + if not self.commandIsLoaded(commandName): + logging.critical(f"Tried to call {commandName} when it wasn't loaded") + return + + pluginID = self.loadedCommands.get(commandName).get('pluginID') + function_name = self.loadedCommands.get(commandName).get('function') + + function_to_call = getattr(self.plugins.get(pluginID), function_name, None) + if callable(function_to_call): + function_to_call(*args, **kwargs) + else: + print(f"Function {function_name} not found in plugin {pluginID}.") \ No newline at end of file diff --git a/runbooksolutions/agent/PluginManager.py.old b/runbooksolutions/agent/PluginManager.py.old new file mode 100644 index 0000000..db5dfa5 --- /dev/null +++ b/runbooksolutions/agent/PluginManager.py.old @@ -0,0 +1,126 @@ +import importlib +import json +import requests +import hashlib +import os + +class PluginManager: + def __init__(self, plugin_directory="plugins"): + self.plugin_directory = plugin_directory + self.loaded_plugins = {} + self.available_commands = set() + + def load_plugin(self, plugin_name, plugin_url=None): + if plugin_name in self.loaded_plugins: + print(f"Plugin {plugin_name} is already loaded.") + return + + # If plugin is not available locally, perform HTTP request to get it + if plugin_url: + response = requests.get(plugin_url) + if response.status_code == 200: + plugin_data = response.json() + # Verify the integrity of the received plugin using hash + if self.verify_plugin_hash(plugin_data): + self.save_plugin_locally(plugin_name, plugin_data) + self.update_available_commands(plugin_data) + self.loaded_plugins[plugin_name] = self.import_plugin(plugin_name) + print(f"Plugin {plugin_name} loaded successfully.") + else: + print("Plugin integrity verification failed. Aborting.") + else: + print(f"Failed to fetch plugin {plugin_name}. HTTP Error {response.status_code}.") + else: + print(f"Local copy of plugin {plugin_name} not found and no URL provided.") + + def sync_plugins(self, plugin_list): + # Unload plugins not in the provided list + plugins_to_unload = set(self.loaded_plugins.keys()) - set(plugin_list) + for plugin_name in plugins_to_unload: + self.unload_plugin(plugin_name) + + # Load plugins in the provided list + for plugin_name in plugin_list: + if self.is_command_available(plugin_name): + self.load_plugin(plugin_name) + + def unload_plugin(self, plugin_name): + if plugin_name in self.loaded_plugins: + del self.loaded_plugins[plugin_name] + print(f"Plugin {plugin_name} unloaded successfully.") + else: + print(f"Plugin {plugin_name} is not loaded.") + + def get_available_commands(self): + return self.available_commands + + def is_command_available(self, command_name): + return command_name in self.available_commands + + def execute_command(self, command_name, *args, **kwargs): + if self.is_command_available(command_name): + for plugin_name, plugin_data in self.loaded_plugins.items(): + plugin_commands = plugin_data.get('commands', {}) + if command_name in plugin_commands: + function_name = plugin_commands[command_name] + function_to_call = getattr(plugin_data['instance'], function_name, None) + if callable(function_to_call): + function_to_call(*args, **kwargs) + else: + print(f"Function {function_name} not found in plugin {plugin_name}.") + return + print(f"Command {command_name} not found in any loaded plugins.") + else: + print(f"Command {command_name} is not available.") + + def verify_plugin_hash(self, plugin_data): + script = plugin_data.get('script', '').encode('utf-8') + calculated_hash = hashlib.sha256(script).hexdigest() + return calculated_hash == plugin_data.get('hash', '') + + def save_plugin_locally(self, plugin_name, plugin_data): + if not os.path.exists(self.plugin_directory): + os.makedirs(self.plugin_directory) + + # Save JSON data + json_file_path = os.path.join(self.plugin_directory, f"{plugin_name}.json") + with open(json_file_path, 'w') as json_file: + json.dump(plugin_data, json_file) + + # Save Python script + script_file_path = os.path.join(self.plugin_directory, f"{plugin_name}.py") + with open(script_file_path, 'w') as script_file: + script_file.write(plugin_data.get('script', '')) + + def import_plugin(self, plugin_name): + try: + module_name = os.path.splitext(plugin_name)[0] + spec = importlib.util.spec_from_file_location(module_name, os.path.join(self.plugin_directory, f"{plugin_name}.json")) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + # Store the instance of the plugin in the loaded_plugins dictionary + module.instance = module.Plugin() + return module + except Exception as e: + print(f"Error importing plugin {plugin_name}: {e}") + return None + + def update_available_commands(self, plugin_data): + commands = plugin_data.get('commands', {}).keys() + self.available_commands.update(commands) + +# Example usage: +if __name__ == "__main__": + plugin_manager = PluginManager() + + # Load plugins based on available commands + plugin_list = ["NMAP Plugin", "Another Plugin"] + plugin_manager.sync_plugins(plugin_list) + + # Get available commands + commands = plugin_manager.get_available_commands() + print("Available commands:", commands) + + # Execute a command + command_to_execute = "COMMAND_NAME" + plugin_manager.execute_command(command_to_execute) diff --git a/runbooksolutions/agent/Task.py b/runbooksolutions/agent/Task.py new file mode 100644 index 0000000..e753468 --- /dev/null +++ b/runbooksolutions/agent/Task.py @@ -0,0 +1,18 @@ +import json + +class Task: + id: str + command: str + arguments: dict + cron: str + def __init__(self, task: dict) -> None: + self.id = task.get('id') + self.command = task.get('command') + self.arguments = task.get('arguments') + self.cron = task.get('cron') + + def shouldSchedule(self): + return self.cron != None + + def getArguments(self): + return json.loads(self.arguments) \ No newline at end of file diff --git a/runbooksolutions/agent/__init__.py b/runbooksolutions/agent/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/runbooksolutions/auth/AccessToken.py b/runbooksolutions/auth/AccessToken.py new file mode 100644 index 0000000..2013562 --- /dev/null +++ b/runbooksolutions/auth/AccessToken.py @@ -0,0 +1,55 @@ +from runbooksolutions.store.Store import Store +import logging +import time +from typing import Optional + +class AccessToken: + + def __init__(self, token_data: dict) -> None: + try: + self.access_token = token_data.get('access_token') + self.token_type = token_data.get('token_type') + self.expires_in = token_data.get('expires_in') + self.scope = token_data.get('scope') + self.creation_time = time.time() + + self._save_to_store() + except AttributeError as e: + logging.error(f"Failed to initialize AccessToken. Received data: {token_data}") + logging.error(f"Error details: {e}") + raise + + def _save_to_store(self) -> None: + data_to_store = { + 'access_token': self.access_token, + 'token_type': self.token_type, + 'expires_in': self.expires_in, + 'scope': self.scope, + 'creation_time': self.creation_time + } + store = Store(self.__class__.__name__) + store.save(data_to_store) + + @classmethod + def load_from_store(cls) -> Optional['AccessToken']: + store = Store(cls.__name__) + data = store.load() + + if data: + return cls(data) + else: + return None + + def getAccessToken(self) -> str: + return self.access_token + + def getTokenType(self) -> str: + return self.token_type + + def getExpiresIn(self) -> int: + return self.expires_in + + def isExpired(self) -> bool: + current_time = time.time() + elapsed_time = current_time - self.creation_time + return elapsed_time > self.expires_in diff --git a/runbooksolutions/auth/Auth.py b/runbooksolutions/auth/Auth.py new file mode 100644 index 0000000..2b57216 --- /dev/null +++ b/runbooksolutions/auth/Auth.py @@ -0,0 +1,117 @@ +from runbooksolutions.auth.DeviceCode import DeviceCode +from runbooksolutions.auth.AccessToken import AccessToken +import logging +import json +import requests +import time + +class Auth: + url: str + client_id: str + enabled: bool = True + deviceCode: DeviceCode = None + accessToken: AccessToken = None + + def __init__(self, url: str, client_id: str, enabled: bool = True) -> None: + self.url = url + self.client_id = client_id + self.enabled = enabled + + logging.debug("Starting Authentication") + if not self.enabled: + return + + self.deviceCode = DeviceCode.load_from_store() + self.accessToken = AccessToken.load_from_store() + + if self.accessToken is None: + logging.debug("Access Token does not yet exist.") + if self.deviceCode is None: + logging.debug("Device code does not yet exist.") + self.getDeviceCode() + + # We have a Device Code; so lets get our access tokens + self.pollAuthorization() + + logging.debug("Finished Authentication") + + def getHeaders(self) -> dict: + if not self.enabled: + return { + 'Content-Type': 'application/json', + } + + headers = { + 'Authorization': f'Bearer {self.accessToken.getAccessToken()}', + 'Content-Type': 'application/json', + } + return headers + + def getDeviceCode(self) -> None: + logging.debug("Getting a device code.") + myobj = { + 'client_id': self.client_id, + 'grant_type': 'urn:ietf:params:oauth:grant-type:device_code', + 'scope': '' + } + url = f"{self.url}/oauth/device/code" + + try: + response = requests.post(url, json=myobj) + # Check the HTTP status code + response.raise_for_status() + + self.deviceCode = DeviceCode(json.loads(response.text)) + + logging.critical(f"{self.deviceCode.getVerificationURI()} CODE: {self.deviceCode.getUserCode()}") + + except requests.RequestException as e: + logging.error(f"Request failed: {e}") + raise Exception(f"Request failed: {e}") + + def pollAuthorization(self) -> None: + logging.debug("Polling for user authorization...") + + while True: + if self.deviceCode.isExpired(): + self.getDeviceCode() + try: + myobj = { + 'client_id': self.client_id, + 'grant_type': 'urn:ietf:params:oauth:grant-type:device_code', + 'device_code': self.deviceCode.getDeviceCode() + } + url = f"{self.url}/oauth/token" + + logging.debug(myobj) + + response = requests.post(url, json=myobj) + logging.debug(f"Response Status Code: {response.status_code}") + + if response.status_code == 200: + logging.debug(type(response.text)) + # Authorization successful, extract and store the access token + try: + # Try parsing the response as JSON + token_data = json.loads(response.text) + self.accessToken = AccessToken(token_data) + logging.debug(f"Access Token: {self.accessToken.getAccessToken()}") + break + except json.JSONDecodeError: + logging.error("Failed to parse response as JSON.") + raise Exception("Failed to parse response as JSON.") + elif response.status_code == 400 and "authorization_pending" in response.json().get('error'): + # Authorization is still pending, wait and poll again + logging.debug(f"Authorization Pending") + time.sleep(5) + elif response.status_code == 400 and "expired_token" in response.json().get('error'): + logging.error("Device code has expired.") + self.getDeviceCode() + # raise Exception("Device code has expired.") + else: + logging.error(f"Failed to obtain access token: {response.text}") + raise Exception(f"Failed to obtain access token: {response.text}") + + except requests.RequestException as e: + logging.error(f"Request failed: {e}") + raise Exception(f"Request failed: {e}") \ No newline at end of file diff --git a/runbooksolutions/auth/DeviceCode.py b/runbooksolutions/auth/DeviceCode.py new file mode 100644 index 0000000..35b14a3 --- /dev/null +++ b/runbooksolutions/auth/DeviceCode.py @@ -0,0 +1,53 @@ +from runbooksolutions.store.Store import Store +import logging +import time + +class DeviceCode: + + def __init__(self, DeviceCode: dict) -> None: + self.device_code = DeviceCode["device_code"] + self.user_code = DeviceCode["user_code"] + self.verification_uri = DeviceCode["verification_uri"] + self.expires_in = DeviceCode["expires_in"] + self.creation_time = time.time() + + # Save the device code to the store + self._save_to_store() + + def _save_to_store(self) -> None: + data_to_store = { + 'device_code': self.device_code, + 'user_code': self.user_code, + 'verification_uri': self.verification_uri, + 'expires_in': self.expires_in, + 'creation_time': self.creation_time + } + store = Store(self.__class__.__name__) + store.save(data_to_store) + + @classmethod + def load_from_store(cls): + store = Store(cls.__name__) + data = store.load() + + if data: + return cls(data) + else: + return None + + def getDeviceCode(self) -> str: + return self.device_code + + def getUserCode(self) -> str: + return self.user_code + + def getVerificationURI(self) -> str: + return self.verification_uri + + def getExpiresIn(self) -> str: + return self.expires_in + + def isExpired(self) -> bool: + current_time = time.time() + elapsed_time = current_time - self.creation_time + return elapsed_time > self.expires_in diff --git a/runbooksolutions/auth/__init__.py b/runbooksolutions/auth/__init__.py new file mode 100644 index 0000000..2bdfb31 --- /dev/null +++ b/runbooksolutions/auth/__init__.py @@ -0,0 +1 @@ +from . import Auth \ No newline at end of file diff --git a/runbooksolutions/logging_setup.py b/runbooksolutions/logging_setup.py new file mode 100644 index 0000000..ef4f3ce --- /dev/null +++ b/runbooksolutions/logging_setup.py @@ -0,0 +1,27 @@ +import logging +from colorlog import ColoredFormatter + +def setup_logging(): + logging.basicConfig(level=logging.DEBUG) + for handler in logging.root.handlers[:]: + if isinstance(handler, logging.StreamHandler): + logging.root.removeHandler(handler) + + formatter = ColoredFormatter( + "%(log_color)s%(levelname)-8s %(message)s%(reset)s", + datefmt=None, + reset=True, + log_colors={ + 'DEBUG': 'cyan', + 'INFO': 'green', + 'WARNING': 'yellow', + 'ERROR': 'red', + 'CRITICAL': 'white,bg_red', + }, + secondary_log_colors={}, + style='%' + ) + + ch = logging.StreamHandler() + ch.setFormatter(formatter) + logging.root.addHandler(ch) diff --git a/runbooksolutions/queue/Queue.py b/runbooksolutions/queue/Queue.py new file mode 100644 index 0000000..1ca8791 --- /dev/null +++ b/runbooksolutions/queue/Queue.py @@ -0,0 +1,66 @@ +from runbooksolutions.agent.PluginManager import PluginManager +from runbooksolutions.agent.Task import Task +from threading import Event +import logging +import asyncio + +class Queue: + def __init__(self, num_threads: int = 1, pluginManager: PluginManager = None) -> None: + self.task_queue = asyncio.Queue() + self.num_threads = num_threads + self.threads = [] + self.stop_event = Event() + self.pluginManager = pluginManager + + def _worker(self): + while not self.stop_event.is_set(): + try: + task = self.task_queue.get_nowait() + self.execute_task(task) + except asyncio.QueueEmpty: + pass + except asyncio.CancelledError: + # Log a message when the coroutine is canceled + logging.debug("Worker coroutine canceled.") + break + except Exception as e: + logging.error(e) + pass # Handle exceptions as needed + + def execute_task(self, task: Task) -> None: + + command = task.command + arguments = task.getArguments() + + # Implement your task execution logic here + logging.info(f"Running Task \'{command}\' with arguments {arguments}") + self.pluginManager.executeCommand(command, **arguments) + pass + + async def start(self) -> None: + logging.debug("Queue Started") + try: + loop = asyncio.get_event_loop() + tasks = [loop.run_in_executor(None, self._worker) for _ in range(self.num_threads)] + await asyncio.gather(*tasks) + except KeyboardInterrupt: + logging.info("Received KeyboardInterrupt. Stopping gracefully.") + finally: + self.stop() + + def stop(self) -> None: + self.stop_event.set() + + async def enqueue_task(self, task: Task) -> None: + task_id = task.id + if not self._is_task_in_queue(task_id): + logging.debug("Enqueued Task") + await self.task_queue.put(task) + else: + logging.warning("Task already in Queue.") + + def _is_task_in_queue(self, task_id: str) -> bool: + for task in self.task_queue._queue: + if task.id == task_id: + return True + return False diff --git a/runbooksolutions/queue/__init__.py b/runbooksolutions/queue/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/runbooksolutions/schedule/Schedule.py b/runbooksolutions/schedule/Schedule.py new file mode 100644 index 0000000..55e283c --- /dev/null +++ b/runbooksolutions/schedule/Schedule.py @@ -0,0 +1,46 @@ +from runbooksolutions.queue.Queue import Queue +from runbooksolutions.agent.Task import Task +from datetime import datetime +from croniter import croniter +import asyncio +import logging + +class Schedule: + queue: Queue = None + + def __init__(self, queue: Queue) -> None: + self.queue = queue + self.tasks = [] + + def add_task(self, task: Task, cron_expression: str) -> None: + task_id = task.id + if not self._is_task_in_schedule(task_id): + logging.debug("Added Task") + self.tasks.append((task, cron_expression)) + else: + logging.warning("Task already Scheduled") + + async def start(self) -> None: + logging.debug("Schedule Started") + while True: + for task, cron_expression in self.tasks: + if self.shouldRun(cron_expression): + await self.queue.enqueue_task(task) + await asyncio.sleep(1) + + def shouldRun(self, cron_expression): + from dateutil.relativedelta import relativedelta + cron = croniter(cron_expression, datetime.now()) + td, ms1 = cron.get_current(datetime), relativedelta(microseconds=1) + if not td.microsecond: + td = td + ms1 + cron.set_current(td, force=True) + tdp, tdt = cron.get_current(), cron.get_prev() + precision_in_seconds = 1 + return (max(tdp, tdt) - min(tdp, tdt)) < precision_in_seconds + + def _is_task_in_schedule(self, task_id: str) -> bool: + for task, _ in self.tasks: + if task.id == task_id: + return True + return False \ No newline at end of file diff --git a/runbooksolutions/schedule/__init__.py b/runbooksolutions/schedule/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/runbooksolutions/store/Store.py b/runbooksolutions/store/Store.py new file mode 100644 index 0000000..11223b9 --- /dev/null +++ b/runbooksolutions/store/Store.py @@ -0,0 +1,92 @@ +import pickle +import hashlib +import os +from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC +from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.padding import PKCS7 +from cryptography.exceptions import InvalidTag + +class Store: + # Class variable for the encryption key (should be kept secure) + STATIC_ENCRYPTION_KEY = None + + def __init__(self, filename): + self.filename = "stores/" + filename + ".pkl" + + def _generate_checksum(self, data): + # Calculate SHA-256 checksum for data + hasher = hashlib.sha256() + hasher.update(pickle.dumps(data)) + return hasher.hexdigest() + + def save(self, data): + if not self.STATIC_ENCRYPTION_KEY: + raise ValueError("Encryption key not set. Call set_encryption_key() before using the Store.") + + # Generate checksum + checksum = self._generate_checksum(data) + + # Derive a key using PBKDF2 + kdf = PBKDF2HMAC( + algorithm=hashes.SHA256(), + salt=os.urandom(16), + iterations=100000, + length=32, + backend=default_backend() + ) + key = kdf.derive(self.STATIC_ENCRYPTION_KEY) + + # Generate a random IV for AES-CBC + iv = os.urandom(16) + + # Pad the data to a multiple of the block size using PKCS7 + padder = PKCS7(algorithms.AES.block_size).padder() + padded_data = padder.update(pickle.dumps(data)) + padder.finalize() + + # Encrypt the data using AES-CBC + cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend=default_backend()) + encryptor = cipher.encryptor() + ciphertext = encryptor.update(padded_data) + encryptor.finalize() + + # Save the data, key, IV, and checksum to the file + with open(self.filename, 'wb') as file: + file.write(key + iv + checksum.encode() + ciphertext) + + def load(self): + try: + # Read the data from the file + with open(self.filename, 'rb') as file: + encrypted_data = file.read() + + # Extract the salt from the end of the file + key = encrypted_data[:32] + encrypted_data = encrypted_data[32:] + + # Extract the IV, checksum, and ciphertext + iv = encrypted_data[:16] + checksum = encrypted_data[16:80].decode() + ciphertext = encrypted_data[80:] + + # Decrypt the data using AES-CBC + cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend=default_backend()) + decryptor = cipher.decryptor() + + decrypted_data = decryptor.update(ciphertext) + decryptor.finalize() + + # Calculate the checksum of the decrypted data + decrypted_checksum = self._generate_checksum(pickle.loads(decrypted_data)) + + # Verify the checksum + if checksum != decrypted_checksum: + raise ValueError("Checksum verification failed. Data may be corrupted.") + + return pickle.loads(decrypted_data, fix_imports=False) + + except FileNotFoundError: + return None + + @classmethod + def set_encryption_key(cls, key): + cls.STATIC_ENCRYPTION_KEY = key diff --git a/runbooksolutions/store/__init__.py b/runbooksolutions/store/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/stores/.gitignore b/stores/.gitignore new file mode 100644 index 0000000..c96a04f --- /dev/null +++ b/stores/.gitignore @@ -0,0 +1,2 @@ +* +!.gitignore \ No newline at end of file