From bbdd74d987799a585356117538858df098291d96 Mon Sep 17 00:00:00 2001 From: Will G Date: Mon, 6 Nov 2023 20:16:42 -0500 Subject: [PATCH 1/8] WIP Initial Agent and basic NMAP scan working - no real API ability yet --- Dockerfile | 19 +++++++++ agent.ini | 3 ++ agent/__init__.py | 0 agent/agent.py | 89 ++++++++++++++++++++++++++++++++++++++++++ agent/api.py | 42 ++++++++++++++++++++ agent/base_plugin.py | 24 ++++++++++++ agent/output.py | 25 ++++++++++++ agent/runner.py | 36 +++++++++++++++++ app.py | 28 +++++++++++++ docker-compose.yml | 10 +++++ plugins.ini | 2 + plugins/nmap/plugin.py | 34 ++++++++++++++++ poll | 0 requirements.txt | 5 +++ run | 1 + 15 files changed, 318 insertions(+) create mode 100755 Dockerfile create mode 100644 agent.ini create mode 100644 agent/__init__.py create mode 100644 agent/agent.py create mode 100644 agent/api.py create mode 100644 agent/base_plugin.py create mode 100644 agent/output.py create mode 100644 agent/runner.py create mode 100644 app.py create mode 100755 docker-compose.yml create mode 100644 plugins.ini create mode 100644 plugins/nmap/plugin.py create mode 100644 poll create mode 100755 requirements.txt create mode 100755 run diff --git a/Dockerfile b/Dockerfile new file mode 100755 index 0000000..5d92f38 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,19 @@ +# Use an official Python runtime as a parent image +FROM python:latest + +# Install Required System Tools +RUN apt-get update && apt-get install nmap -y + +# Set the working directory to /app +WORKDIR /app + +# Copy the current directory contents into the container at /app +COPY requirements.txt /app + +# Install any needed packages specified in requirements.txt +RUN pip install -r requirements.txt + +COPY . /app + +# Define the command to run your application +CMD [ "python", "app.py" ] diff --git a/agent.ini b/agent.ini new file mode 100644 index 0000000..3584312 --- /dev/null +++ b/agent.ini @@ -0,0 +1,3 @@ +[API] +api_url = http://192.168.1.197:8000 +poll_interval = 2 \ No newline at end of file diff --git a/agent/__init__.py b/agent/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/agent/agent.py b/agent/agent.py new file mode 100644 index 0000000..998889b --- /dev/null +++ b/agent/agent.py @@ -0,0 +1,89 @@ +# agent/agent.py +import asyncio +from apscheduler.schedulers.asyncio import AsyncIOScheduler +from agent.output import Output +from agent.api import API +import configparser +import os +import threading +from agent.runner import Runner +from queue import Queue + +class Agent: + def __init__(self): + self.event_bus = Queue() + self.plugins = [] + self.scheduler = AsyncIOScheduler() + self.output = Output() + + self.running_jobs = set() + + config = configparser.ConfigParser() + config.read('agent.ini') + self.api = API(config.get('API', 'api_url'), self) + if config.has_option('API', 'poll_interval'): + self.add_periodic_task(int(config.get('API', 'poll_interval')), self.api.poll) + else: + self.add_periodic_task(30, self.api.poll) + + async def start(self): + # Start the event loop + await self.run() + + def put_event(self, event): + self.output.debug(f"Adding Event: {event} to the bus") + self.event_bus.put(event) + + async def add_plugin(self, plugin): + await plugin.setup(self) + self.plugins.append(plugin) + + async def add_plugin_by_name(self, plugin_info): + plugin_name = plugin_info['name'].lower() + try: + plugin_module = f'plugins.{plugin_name}.plugin' + plugin_class = getattr(__import__(plugin_module, fromlist=['Plugin']), 'Plugin') + plugin_instance = plugin_class(plugin_info['options']) + await self.add_plugin(plugin_instance) + self.output.success(f"Plugin [{plugin_name}] Loaded!") + except (ImportError, AttributeError): + self.output.error(f"Could not add plugin: {plugin_name}") + + def add_periodic_task(self, interval, task_function): + self.output.debug(f"Adding Task: {task_function} to run every {interval} seconds") + self.scheduler.add_job(task_function, 'interval', seconds=interval) + + async def run(self): + # Start the scheduler + scheduler_thread = threading.Thread(target=self.start_scheduler) + scheduler_thread.start() + + runner = Runner(self.event_bus, self) + runner.start() + while True: + await asyncio.sleep(1) + + def start_scheduler(self): + self.output.debug(f"Starting the Scheduler.") + + # Set up a new event loop for this thread + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + + # Start the scheduler in the new event loop + self.scheduler._eventloop = loop + self.scheduler.start() + loop.run_forever() + + def handle_event(self, event): + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + loop.run_until_complete(self.handle_event_coroutine(event)) + loop.close() + + async def handle_event_coroutine(self, event): + for plugin in self.plugins: + if await plugin.should_handle(event): + plugin_folder = os.path.basename(os.path.dirname(plugin.__class__.__module__)) + self.output.debug(f"Running Plugin: {plugin_folder.lower()}") + await plugin.handle(event) \ No newline at end of file diff --git a/agent/api.py b/agent/api.py new file mode 100644 index 0000000..6814e0a --- /dev/null +++ b/agent/api.py @@ -0,0 +1,42 @@ +# File agent/api.py +from gql import gql, Client +from gql.transport.requests import RequestsHTTPTransport +import json +import requests + +class API: + def __init__(self, api_url, agent): + self.api_url = str(api_url) + self.api_token = None + self.agent = agent + + def authenticate(self): + # Implement your authentication logic here, e.g., setting headers, tokens, etc. + # You can use self.api_url and self.api_token for authentication + pass + + def get_graphql_client(self): + headers = {'Authorization': f'Bearer {self.api_token}'} + transport = RequestsHTTPTransport(url=self.api_url, headers=headers, use_json=True) + return Client(transport=transport) + + def send_query(self, query): + client = self.get_graphql_client() + response = client.execute(gql(query)) + return response + + async def poll(self): + response = requests.get(f"{self.api_url}/poll") + try: + # Check if the response status code is 200 (OK) before parsing and printing the JSON + if response.status_code == 200: + response_json = response.json() # Parse the response content as JSON + # print(json.dumps(response_json, indent=2)) # Pretty-print the JSON + for item in response_json: + self.agent.put_event(item) + else: + print(f"Error: {response.status_code}") + except json.decoder.JSONDecodeError: + pass + + return [] \ No newline at end of file diff --git a/agent/base_plugin.py b/agent/base_plugin.py new file mode 100644 index 0000000..b0f7dcc --- /dev/null +++ b/agent/base_plugin.py @@ -0,0 +1,24 @@ +#File: /agent/base_plugin.py + +from abc import ABC, abstractmethod + +class BasePlugin(ABC): + + def __init__(self, options): + self.options = options + + async def setup(self, agent): + self.agent = agent + + @abstractmethod + async def handle(self, event): + pass + + @abstractmethod + async def teardown(self): + pass + + @abstractmethod + # This method should be implemented by plugins to specify if they should handle the event. + async def should_handle(self, event): + return True \ No newline at end of file diff --git a/agent/output.py b/agent/output.py new file mode 100644 index 0000000..a73360d --- /dev/null +++ b/agent/output.py @@ -0,0 +1,25 @@ +#File: /agent/output.py +class Output(): + COLORS = { + 'reset': '\033[0m', + 'black': '\033[30m', + 'red': '\033[31m', + 'green': '\033[32m', + 'yellow': '\033[33m', + 'blue': '\033[34m', + 'magenta': '\033[35m', + 'cyan': '\033[36m', + 'white': '\033[37m', + } + + def debug(self, message): + print(f"{self.COLORS.get('blue')}{message}{self.COLORS.get('reset')}") + + def info(self, message): + print(f"{message}") + + def success(self, message): + print(f"{self.COLORS.get('green')}{message}{self.COLORS.get('reset')}") + + def error(self, message): + print(f"{self.COLORS.get('red')}{message}{self.COLORS.get('reset')}") \ No newline at end of file diff --git a/agent/runner.py b/agent/runner.py new file mode 100644 index 0000000..3bd7d93 --- /dev/null +++ b/agent/runner.py @@ -0,0 +1,36 @@ +# agent/runner.py +import threading + +# Function that simulates a task that threads will execute +def worker_thread(thread_id, data_queue, agent): + while True: + # Poll for tasks to run (e.g., from a task queue) + event = data_queue.get() # Get data from the queue + if event is None: # Exit condition + break + + agent.handle_event(event) + +class Runner: + concurrency_limit = 1 + + def __init__(self, data_queue, agent_instance): + self.data_queue = data_queue + self.agent = agent_instance + self.thread_group = [] + + def start(self): + for i in range(self.concurrency_limit): + thread = threading.Thread(target=worker_thread, args=(i, self.data_queue, self.agent)) + thread.daemon = True # Mark threads as daemon to exit when the main program exits + self.thread_group.append(thread) + thread.start() + + def join(self): + for thread in self.thread_group: + thread.join() + + def stop(self): + # Signal the worker threads to exit by adding None to the data queue + for _ in range(self.concurrency_limit): + self.data_queue.put(None) \ No newline at end of file diff --git a/app.py b/app.py new file mode 100644 index 0000000..5a11169 --- /dev/null +++ b/app.py @@ -0,0 +1,28 @@ +# app.py +import asyncio +from agent.agent import Agent +import configparser + +async def main(): + agent = Agent() + + config = configparser.ConfigParser() + config.read('plugins.ini') + + for section_name in config.sections(): + plugin = {} + plugin['name'] = section_name + plugin['options'] = dict(config.items(section_name)) + if config.getboolean(section_name, 'enabled'): + await agent.add_plugin_by_name(plugin) + + try: + await agent.start() + except KeyboardInterrupt: + print("Ctrl+C pressed. Stopping the agent...") + await agent.stop() + except asyncio.CancelledError: + pass + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100755 index 0000000..5ab7912 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,10 @@ +version: '3' +services: + network-agent: + build: + context: . + dockerfile: Dockerfile + volumes: + - .:/app + environment: + - DEBUG=true diff --git a/plugins.ini b/plugins.ini new file mode 100644 index 0000000..3b3634b --- /dev/null +++ b/plugins.ini @@ -0,0 +1,2 @@ +[NMAP] +enabled = True \ No newline at end of file diff --git a/plugins/nmap/plugin.py b/plugins/nmap/plugin.py new file mode 100644 index 0000000..404c49c --- /dev/null +++ b/plugins/nmap/plugin.py @@ -0,0 +1,34 @@ +# File: /plugins/nmap/plugin.py +import nmap +from agent.base_plugin import BasePlugin + +class Plugin(BasePlugin): + + async def should_handle(self, event): + if event['plugin'] == "nmap" and 'target' in event: + return True + return False + + def handle(self, event): + target = event['target'] + + self.agent.output.info(f"Running NMAP scan on target: {target}") + + # Create an NmapScanner instance + nm = nmap.PortScanner() + + # Perform a simple ping scan + nm.scan(hosts=target, arguments='-sn') + + # Get scan results + scan_results = nm.all_hosts() + + self.agent.output.success("NMAP Scan Results:") + for host in scan_results: + self.agent.output.success(f"Host: {host}, Status: {nm[host].state()}") + # You can print additional scan information as needed + + return super().handle(event) + + def teardown(self): + return super().teardown() diff --git a/poll b/poll new file mode 100644 index 0000000..e69de29 diff --git a/requirements.txt b/requirements.txt new file mode 100755 index 0000000..ac2eced --- /dev/null +++ b/requirements.txt @@ -0,0 +1,5 @@ +apscheduler +gql +requests +requests_toolbelt +python-nmap \ No newline at end of file diff --git a/run b/run new file mode 100755 index 0000000..1373d0c --- /dev/null +++ b/run @@ -0,0 +1 @@ +docker run -it $(docker build -q .) From f7f4f6bd4b3a0fe7ad823b5bc960268c49430f6b Mon Sep 17 00:00:00 2001 From: Will G Date: Fri, 24 Nov 2023 21:18:39 -0500 Subject: [PATCH 2/8] WIP --- .gitignore | 8 ++ Dockerfile | 8 +- README.md | 10 ++ _docker/start.sh | 10 ++ agent.ini | 3 - agent/agent.py | 89 -------------- agent/api.py | 42 ------- agent/base_plugin.py | 24 ---- agent/output.py | 25 ---- agent/runner.py | 36 ------ app.py | 33 +++-- docker-compose.yml | 0 plugins.ini | 2 - plugins/nmap/plugin.py | 34 ------ requirements.txt | 7 +- run | 7 +- {agent => runbooksolutions}/__init__.py | 0 runbooksolutions/agent/API.py | 29 +++++ runbooksolutions/agent/Agent.py | 43 +++++++ runbooksolutions/agent/Plugin.py | 3 + runbooksolutions/agent/PluginManager.py | 98 +++++++++++++++ runbooksolutions/agent/PluginManager.py.old | 126 ++++++++++++++++++++ poll => runbooksolutions/agent/__init__.py | 0 runbooksolutions/auth/AccessToken.py | 55 +++++++++ runbooksolutions/auth/Auth.py | 103 ++++++++++++++++ runbooksolutions/auth/DeviceCode.py | 53 ++++++++ runbooksolutions/auth/__init__.py | 1 + runbooksolutions/logging_setup.py | 27 +++++ runbooksolutions/queue/Queue.py | 51 ++++++++ runbooksolutions/queue/__init__.py | 0 runbooksolutions/schedule/Schedule.py | 57 +++++++++ runbooksolutions/schedule/__init__.py | 0 runbooksolutions/store/Store.py | 92 ++++++++++++++ runbooksolutions/store/__init__.py | 0 34 files changed, 796 insertions(+), 280 deletions(-) create mode 100644 .gitignore mode change 100755 => 100644 Dockerfile create mode 100644 _docker/start.sh delete mode 100644 agent.ini delete mode 100644 agent/agent.py delete mode 100644 agent/api.py delete mode 100644 agent/base_plugin.py delete mode 100644 agent/output.py delete mode 100644 agent/runner.py mode change 100755 => 100644 docker-compose.yml delete mode 100644 plugins.ini delete mode 100644 plugins/nmap/plugin.py mode change 100755 => 100644 requirements.txt mode change 100755 => 100644 run rename {agent => runbooksolutions}/__init__.py (100%) create mode 100644 runbooksolutions/agent/API.py create mode 100644 runbooksolutions/agent/Agent.py create mode 100644 runbooksolutions/agent/Plugin.py create mode 100644 runbooksolutions/agent/PluginManager.py create mode 100644 runbooksolutions/agent/PluginManager.py.old rename poll => runbooksolutions/agent/__init__.py (100%) create mode 100644 runbooksolutions/auth/AccessToken.py create mode 100644 runbooksolutions/auth/Auth.py create mode 100644 runbooksolutions/auth/DeviceCode.py create mode 100644 runbooksolutions/auth/__init__.py create mode 100644 runbooksolutions/logging_setup.py create mode 100644 runbooksolutions/queue/Queue.py create mode 100644 runbooksolutions/queue/__init__.py create mode 100644 runbooksolutions/schedule/Schedule.py create mode 100644 runbooksolutions/schedule/__init__.py create mode 100644 runbooksolutions/store/Store.py create mode 100644 runbooksolutions/store/__init__.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..38a3c56 --- /dev/null +++ b/.gitignore @@ -0,0 +1,8 @@ +kerberos +run +OLD +stores +hosts +test.py +plugins +__pycache__ \ No newline at end of file diff --git a/Dockerfile b/Dockerfile old mode 100755 new mode 100644 index 5d92f38..08dc39c --- a/Dockerfile +++ b/Dockerfile @@ -2,11 +2,15 @@ FROM python:latest # Install Required System Tools -RUN apt-get update && apt-get install nmap -y +RUN apt-get update && \ + apt-get install nmap gcc libkrb5-dev libssl-dev krb5-user -y # Set the working directory to /app WORKDIR /app +COPY _docker/start.sh /start.sh +RUN chmod +x /start.sh + # Copy the current directory contents into the container at /app COPY requirements.txt /app @@ -16,4 +20,4 @@ RUN pip install -r requirements.txt COPY . /app # Define the command to run your application -CMD [ "python", "app.py" ] +CMD [ "/start.sh" ] \ No newline at end of file diff --git a/README.md b/README.md index a0f87e9..bd79aec 100644 --- a/README.md +++ b/README.md @@ -1 +1,11 @@ # RunbookSolution Network Agent + + +### Creating a Keytab File + +``` +docker run -it --rm \ + -v $(pwd):/output \ + -e PRINCIPAL= \ + simplesteph/docker-kerberos-get-keytab +``` \ No newline at end of file diff --git a/_docker/start.sh b/_docker/start.sh new file mode 100644 index 0000000..fee62ae --- /dev/null +++ b/_docker/start.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +keytab_directory="/keytabs" + +for keytab_file in "$keytab_directory"/*.keytab; do + identity=$(basename "$keytab_file" .keytab) + kinit -kt "$keytab_file" "$identity" +done + +python app.py \ No newline at end of file diff --git a/agent.ini b/agent.ini deleted file mode 100644 index 3584312..0000000 --- a/agent.ini +++ /dev/null @@ -1,3 +0,0 @@ -[API] -api_url = http://192.168.1.197:8000 -poll_interval = 2 \ No newline at end of file diff --git a/agent/agent.py b/agent/agent.py deleted file mode 100644 index 998889b..0000000 --- a/agent/agent.py +++ /dev/null @@ -1,89 +0,0 @@ -# agent/agent.py -import asyncio -from apscheduler.schedulers.asyncio import AsyncIOScheduler -from agent.output import Output -from agent.api import API -import configparser -import os -import threading -from agent.runner import Runner -from queue import Queue - -class Agent: - def __init__(self): - self.event_bus = Queue() - self.plugins = [] - self.scheduler = AsyncIOScheduler() - self.output = Output() - - self.running_jobs = set() - - config = configparser.ConfigParser() - config.read('agent.ini') - self.api = API(config.get('API', 'api_url'), self) - if config.has_option('API', 'poll_interval'): - self.add_periodic_task(int(config.get('API', 'poll_interval')), self.api.poll) - else: - self.add_periodic_task(30, self.api.poll) - - async def start(self): - # Start the event loop - await self.run() - - def put_event(self, event): - self.output.debug(f"Adding Event: {event} to the bus") - self.event_bus.put(event) - - async def add_plugin(self, plugin): - await plugin.setup(self) - self.plugins.append(plugin) - - async def add_plugin_by_name(self, plugin_info): - plugin_name = plugin_info['name'].lower() - try: - plugin_module = f'plugins.{plugin_name}.plugin' - plugin_class = getattr(__import__(plugin_module, fromlist=['Plugin']), 'Plugin') - plugin_instance = plugin_class(plugin_info['options']) - await self.add_plugin(plugin_instance) - self.output.success(f"Plugin [{plugin_name}] Loaded!") - except (ImportError, AttributeError): - self.output.error(f"Could not add plugin: {plugin_name}") - - def add_periodic_task(self, interval, task_function): - self.output.debug(f"Adding Task: {task_function} to run every {interval} seconds") - self.scheduler.add_job(task_function, 'interval', seconds=interval) - - async def run(self): - # Start the scheduler - scheduler_thread = threading.Thread(target=self.start_scheduler) - scheduler_thread.start() - - runner = Runner(self.event_bus, self) - runner.start() - while True: - await asyncio.sleep(1) - - def start_scheduler(self): - self.output.debug(f"Starting the Scheduler.") - - # Set up a new event loop for this thread - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - - # Start the scheduler in the new event loop - self.scheduler._eventloop = loop - self.scheduler.start() - loop.run_forever() - - def handle_event(self, event): - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - loop.run_until_complete(self.handle_event_coroutine(event)) - loop.close() - - async def handle_event_coroutine(self, event): - for plugin in self.plugins: - if await plugin.should_handle(event): - plugin_folder = os.path.basename(os.path.dirname(plugin.__class__.__module__)) - self.output.debug(f"Running Plugin: {plugin_folder.lower()}") - await plugin.handle(event) \ No newline at end of file diff --git a/agent/api.py b/agent/api.py deleted file mode 100644 index 6814e0a..0000000 --- a/agent/api.py +++ /dev/null @@ -1,42 +0,0 @@ -# File agent/api.py -from gql import gql, Client -from gql.transport.requests import RequestsHTTPTransport -import json -import requests - -class API: - def __init__(self, api_url, agent): - self.api_url = str(api_url) - self.api_token = None - self.agent = agent - - def authenticate(self): - # Implement your authentication logic here, e.g., setting headers, tokens, etc. - # You can use self.api_url and self.api_token for authentication - pass - - def get_graphql_client(self): - headers = {'Authorization': f'Bearer {self.api_token}'} - transport = RequestsHTTPTransport(url=self.api_url, headers=headers, use_json=True) - return Client(transport=transport) - - def send_query(self, query): - client = self.get_graphql_client() - response = client.execute(gql(query)) - return response - - async def poll(self): - response = requests.get(f"{self.api_url}/poll") - try: - # Check if the response status code is 200 (OK) before parsing and printing the JSON - if response.status_code == 200: - response_json = response.json() # Parse the response content as JSON - # print(json.dumps(response_json, indent=2)) # Pretty-print the JSON - for item in response_json: - self.agent.put_event(item) - else: - print(f"Error: {response.status_code}") - except json.decoder.JSONDecodeError: - pass - - return [] \ No newline at end of file diff --git a/agent/base_plugin.py b/agent/base_plugin.py deleted file mode 100644 index b0f7dcc..0000000 --- a/agent/base_plugin.py +++ /dev/null @@ -1,24 +0,0 @@ -#File: /agent/base_plugin.py - -from abc import ABC, abstractmethod - -class BasePlugin(ABC): - - def __init__(self, options): - self.options = options - - async def setup(self, agent): - self.agent = agent - - @abstractmethod - async def handle(self, event): - pass - - @abstractmethod - async def teardown(self): - pass - - @abstractmethod - # This method should be implemented by plugins to specify if they should handle the event. - async def should_handle(self, event): - return True \ No newline at end of file diff --git a/agent/output.py b/agent/output.py deleted file mode 100644 index a73360d..0000000 --- a/agent/output.py +++ /dev/null @@ -1,25 +0,0 @@ -#File: /agent/output.py -class Output(): - COLORS = { - 'reset': '\033[0m', - 'black': '\033[30m', - 'red': '\033[31m', - 'green': '\033[32m', - 'yellow': '\033[33m', - 'blue': '\033[34m', - 'magenta': '\033[35m', - 'cyan': '\033[36m', - 'white': '\033[37m', - } - - def debug(self, message): - print(f"{self.COLORS.get('blue')}{message}{self.COLORS.get('reset')}") - - def info(self, message): - print(f"{message}") - - def success(self, message): - print(f"{self.COLORS.get('green')}{message}{self.COLORS.get('reset')}") - - def error(self, message): - print(f"{self.COLORS.get('red')}{message}{self.COLORS.get('reset')}") \ No newline at end of file diff --git a/agent/runner.py b/agent/runner.py deleted file mode 100644 index 3bd7d93..0000000 --- a/agent/runner.py +++ /dev/null @@ -1,36 +0,0 @@ -# agent/runner.py -import threading - -# Function that simulates a task that threads will execute -def worker_thread(thread_id, data_queue, agent): - while True: - # Poll for tasks to run (e.g., from a task queue) - event = data_queue.get() # Get data from the queue - if event is None: # Exit condition - break - - agent.handle_event(event) - -class Runner: - concurrency_limit = 1 - - def __init__(self, data_queue, agent_instance): - self.data_queue = data_queue - self.agent = agent_instance - self.thread_group = [] - - def start(self): - for i in range(self.concurrency_limit): - thread = threading.Thread(target=worker_thread, args=(i, self.data_queue, self.agent)) - thread.daemon = True # Mark threads as daemon to exit when the main program exits - self.thread_group.append(thread) - thread.start() - - def join(self): - for thread in self.thread_group: - thread.join() - - def stop(self): - # Signal the worker threads to exit by adding None to the data queue - for _ in range(self.concurrency_limit): - self.data_queue.put(None) \ No newline at end of file diff --git a/app.py b/app.py index 5a11169..767d6bf 100644 --- a/app.py +++ b/app.py @@ -1,28 +1,25 @@ -# app.py -import asyncio -from agent.agent import Agent -import configparser -async def main(): - agent = Agent() +import logging +from runbooksolutions.logging_setup import setup_logging +setup_logging() - config = configparser.ConfigParser() - config.read('plugins.ini') - for section_name in config.sections(): - plugin = {} - plugin['name'] = section_name - plugin['options'] = dict(config.items(section_name)) - if config.getboolean(section_name, 'enabled'): - await agent.add_plugin_by_name(plugin) +from runbooksolutions.store.Store import Store +# Set the Data Store(s) Password +Store.set_encryption_key(b"Store_Encryption_Password") +from runbooksolutions.agent.Agent import Agent +async def main(): + agent = Agent(num_threads=3) try: + agent.schedule.add_task({"key": "value"}, "* * * * *") + + #await agent.queue.enqueue_task({"test": "Value"}) await agent.start() except KeyboardInterrupt: - print("Ctrl+C pressed. Stopping the agent...") + logging.info("Received CTRL+C. Stopping gracefully.") await agent.stop() - except asyncio.CancelledError: - pass +import asyncio if __name__ == "__main__": - asyncio.run(main()) + asyncio.run(main()) \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml old mode 100755 new mode 100644 diff --git a/plugins.ini b/plugins.ini deleted file mode 100644 index 3b3634b..0000000 --- a/plugins.ini +++ /dev/null @@ -1,2 +0,0 @@ -[NMAP] -enabled = True \ No newline at end of file diff --git a/plugins/nmap/plugin.py b/plugins/nmap/plugin.py deleted file mode 100644 index 404c49c..0000000 --- a/plugins/nmap/plugin.py +++ /dev/null @@ -1,34 +0,0 @@ -# File: /plugins/nmap/plugin.py -import nmap -from agent.base_plugin import BasePlugin - -class Plugin(BasePlugin): - - async def should_handle(self, event): - if event['plugin'] == "nmap" and 'target' in event: - return True - return False - - def handle(self, event): - target = event['target'] - - self.agent.output.info(f"Running NMAP scan on target: {target}") - - # Create an NmapScanner instance - nm = nmap.PortScanner() - - # Perform a simple ping scan - nm.scan(hosts=target, arguments='-sn') - - # Get scan results - scan_results = nm.all_hosts() - - self.agent.output.success("NMAP Scan Results:") - for host in scan_results: - self.agent.output.success(f"Host: {host}, Status: {nm[host].state()}") - # You can print additional scan information as needed - - return super().handle(event) - - def teardown(self): - return super().teardown() diff --git a/requirements.txt b/requirements.txt old mode 100755 new mode 100644 index ac2eced..c2a0c7c --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,4 @@ -apscheduler -gql requests -requests_toolbelt -python-nmap \ No newline at end of file +cryptography +colorlog +croniter \ No newline at end of file diff --git a/run b/run old mode 100755 new mode 100644 index 1373d0c..32eb288 --- a/run +++ b/run @@ -1 +1,6 @@ -docker run -it $(docker build -q .) +docker run \ + -v ./:/app \ + -v ./hosts:/etc/hosts \ + -v $(pwd)/kerberos/krb5.conf:/etc/krb5.conf \ + -v $(pwd)/kerberos:/keytabs \ + -it $(docker build -q .) diff --git a/agent/__init__.py b/runbooksolutions/__init__.py similarity index 100% rename from agent/__init__.py rename to runbooksolutions/__init__.py diff --git a/runbooksolutions/agent/API.py b/runbooksolutions/agent/API.py new file mode 100644 index 0000000..62ccf06 --- /dev/null +++ b/runbooksolutions/agent/API.py @@ -0,0 +1,29 @@ +from runbooksolutions.auth.Auth import Auth +import requests + +class API: + auth: Auth = None + + def __init__(self, auth: Auth) -> None: + self.auth = auth + + def sendRequest(self, url, method, data=None): + headers = self.auth.getHeaders() # Assuming Auth has a method to get authentication headers + + # Choose the appropriate requests method based on the provided 'method' + if method.upper() == 'GET': + response = requests.get(url, headers=headers) + elif method.upper() == 'POST': + response = requests.post(url, headers=headers, json=data) + elif method.upper() == 'PUT': + response = requests.put(url, headers=headers, json=data) + elif method.upper() == 'DELETE': + response = requests.delete(url, headers=headers) + else: + raise ValueError("Invalid HTTP method. Supported methods are GET, POST, PUT, and DELETE.") + + # You might want to handle response status codes and raise exceptions if needed + if response.status_code != 200: + raise Exception(f"Request failed with status code {response.status_code}. Response content: {response.text}") + + return response.json() # Assuming the response is in JSON format \ No newline at end of file diff --git a/runbooksolutions/agent/Agent.py b/runbooksolutions/agent/Agent.py new file mode 100644 index 0000000..270ad83 --- /dev/null +++ b/runbooksolutions/agent/Agent.py @@ -0,0 +1,43 @@ +from runbooksolutions.auth.Auth import Auth +from runbooksolutions.schedule.Schedule import Schedule +from runbooksolutions.queue.Queue import Queue +from runbooksolutions.agent.API import API +from runbooksolutions.agent.PluginManager import PluginManager +import asyncio +import logging + +class Agent: + auth: Auth = None + schedule: Schedule = None + queue: Queue = None + api: API = None + pluginManager: PluginManager = None + + def __init__(self, num_threads: int = 1) -> None: + self.auth = Auth() + self.api = API(self.auth) + self.pluginManager = PluginManager(self.api) + self.queue = Queue(num_threads, self.pluginManager) + self.schedule = Schedule(self.queue) + + self.pluginManager.addPlugin("123456789") + + async def start(self) -> None: + queue_task = asyncio.create_task(self.queue.start()) + schedule_task = asyncio.create_task(self.schedule.start()) + + try: + # Other asynchronous tasks can be started here... + + logging.info("End of Start") + + # Wait for the background task to complete + await queue_task + await schedule_task + except asyncio.CancelledError: + logging.info("Agent task canceled. Stopping gracefully.") + finally: + self.queue.stop() + + def stop(self) -> None: + self.queue.stop() \ No newline at end of file diff --git a/runbooksolutions/agent/Plugin.py b/runbooksolutions/agent/Plugin.py new file mode 100644 index 0000000..2d15547 --- /dev/null +++ b/runbooksolutions/agent/Plugin.py @@ -0,0 +1,3 @@ +class Plugin: + def __init__(self) -> None: + pass \ No newline at end of file diff --git a/runbooksolutions/agent/PluginManager.py b/runbooksolutions/agent/PluginManager.py new file mode 100644 index 0000000..a42686e --- /dev/null +++ b/runbooksolutions/agent/PluginManager.py @@ -0,0 +1,98 @@ +from runbooksolutions.agent.Plugin import Plugin +from runbooksolutions.agent.API import API +import json +import logging +import os +import importlib + +class PluginManager: + plugin_directory: str = "plugins" + plugins: dict = dict() + loadedCommands: dict = dict() + api: API = None + + def __init__(self, api: API) -> None: + self.api = api + + def addPlugin(self, pluginID: str) -> None: + logging.debug(f"Adding Plugin {pluginID}") + if self.pluginIsLocal(pluginID): + logging.debug("Plugin is local") + else: + self.downloadPlugin(pluginID) + + self.plugins[pluginID] = self.loadPlugin(pluginID) + self.loadPluginCommands(pluginID) + + def removePlugin(self, pluginID: str) -> None: + pass + + def syncPlugins(self, plugins: list) -> None: + pass + + def pluginIsLocal(self, pluginID: str) -> bool: + if not os.path.exists(os.path.join(self.plugin_directory, f"{pluginID}.json")): + logging.debug("Plugin JSON Not Local") + return False + + if not os.path.exists(os.path.join(self.plugin_directory, f"{pluginID}.py")): + logging.debug("Plugin python Not Local") + return False + + return True + + def downloadPlugin(self, pluginID: str) -> None: + logging.debug(f"Downloading Plugin {pluginID}") + pluginData = self.api.sendRequest('http://192.168.1.197/api/agent/plugins/download', 'GET') + self.savePlugin(pluginData) + + def savePlugin(self, plugin_definition: dict) -> None: + logging.debug("Saving Plugin") + if not os.path.exists(self.plugin_directory): + os.makedirs(self.plugin_directory) + + # Save JSON data + json_file_path = os.path.join(self.plugin_directory, f"{plugin_definition.get('id')}.json") + with open(json_file_path, 'w') as json_file: + json.dump(plugin_definition, json_file) + + # Save Python script + script_file_path = os.path.join(self.plugin_directory, f"{plugin_definition.get('id')}.py") + with open(script_file_path, 'w') as script_file: + script_file.write(plugin_definition.get('script', '')) + + def commandIsLoaded(self, commandName: str) -> bool: + if commandName in self.loadedCommands.keys(): + return True + return False + + def loadPlugin(self, pluginID: str) -> Plugin: + try: + script_file_path = os.path.join(self.plugin_directory, f"{pluginID}.py") + spec = importlib.util.spec_from_file_location("Plugin", script_file_path) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + # Store the instance of the plugin in the loaded_plugins dictionary + return module.Plugin() + except Exception as e: + print(f"Error importing plugin {pluginID}: {e}") + return None + + def loadPluginCommands(self, pluginID: str) -> None: + json_file_path = os.path.join(self.plugin_directory, f"{pluginID}.json") + with open(json_file_path, 'r') as json_file: + plugin_definition = json.load(json_file) + self.loadedCommands.update(plugin_definition.get('commands', [])) + + def executeCommand(self, commandName, *args, **kwargs) -> None: + if not self.commandIsLoaded(commandName): + logging.critical(f"Tried to call {commandName} when it wasn't loaded") + return + + function_name = self.loadedCommands.get(commandName).get('function') + + function_to_call = getattr(self.plugins.get('123456789'), function_name, None) + if callable(function_to_call): + function_to_call(*args, **kwargs) + else: + print(f"Function {function_name} not found in plugin {"123456789"}.") \ No newline at end of file diff --git a/runbooksolutions/agent/PluginManager.py.old b/runbooksolutions/agent/PluginManager.py.old new file mode 100644 index 0000000..db5dfa5 --- /dev/null +++ b/runbooksolutions/agent/PluginManager.py.old @@ -0,0 +1,126 @@ +import importlib +import json +import requests +import hashlib +import os + +class PluginManager: + def __init__(self, plugin_directory="plugins"): + self.plugin_directory = plugin_directory + self.loaded_plugins = {} + self.available_commands = set() + + def load_plugin(self, plugin_name, plugin_url=None): + if plugin_name in self.loaded_plugins: + print(f"Plugin {plugin_name} is already loaded.") + return + + # If plugin is not available locally, perform HTTP request to get it + if plugin_url: + response = requests.get(plugin_url) + if response.status_code == 200: + plugin_data = response.json() + # Verify the integrity of the received plugin using hash + if self.verify_plugin_hash(plugin_data): + self.save_plugin_locally(plugin_name, plugin_data) + self.update_available_commands(plugin_data) + self.loaded_plugins[plugin_name] = self.import_plugin(plugin_name) + print(f"Plugin {plugin_name} loaded successfully.") + else: + print("Plugin integrity verification failed. Aborting.") + else: + print(f"Failed to fetch plugin {plugin_name}. HTTP Error {response.status_code}.") + else: + print(f"Local copy of plugin {plugin_name} not found and no URL provided.") + + def sync_plugins(self, plugin_list): + # Unload plugins not in the provided list + plugins_to_unload = set(self.loaded_plugins.keys()) - set(plugin_list) + for plugin_name in plugins_to_unload: + self.unload_plugin(plugin_name) + + # Load plugins in the provided list + for plugin_name in plugin_list: + if self.is_command_available(plugin_name): + self.load_plugin(plugin_name) + + def unload_plugin(self, plugin_name): + if plugin_name in self.loaded_plugins: + del self.loaded_plugins[plugin_name] + print(f"Plugin {plugin_name} unloaded successfully.") + else: + print(f"Plugin {plugin_name} is not loaded.") + + def get_available_commands(self): + return self.available_commands + + def is_command_available(self, command_name): + return command_name in self.available_commands + + def execute_command(self, command_name, *args, **kwargs): + if self.is_command_available(command_name): + for plugin_name, plugin_data in self.loaded_plugins.items(): + plugin_commands = plugin_data.get('commands', {}) + if command_name in plugin_commands: + function_name = plugin_commands[command_name] + function_to_call = getattr(plugin_data['instance'], function_name, None) + if callable(function_to_call): + function_to_call(*args, **kwargs) + else: + print(f"Function {function_name} not found in plugin {plugin_name}.") + return + print(f"Command {command_name} not found in any loaded plugins.") + else: + print(f"Command {command_name} is not available.") + + def verify_plugin_hash(self, plugin_data): + script = plugin_data.get('script', '').encode('utf-8') + calculated_hash = hashlib.sha256(script).hexdigest() + return calculated_hash == plugin_data.get('hash', '') + + def save_plugin_locally(self, plugin_name, plugin_data): + if not os.path.exists(self.plugin_directory): + os.makedirs(self.plugin_directory) + + # Save JSON data + json_file_path = os.path.join(self.plugin_directory, f"{plugin_name}.json") + with open(json_file_path, 'w') as json_file: + json.dump(plugin_data, json_file) + + # Save Python script + script_file_path = os.path.join(self.plugin_directory, f"{plugin_name}.py") + with open(script_file_path, 'w') as script_file: + script_file.write(plugin_data.get('script', '')) + + def import_plugin(self, plugin_name): + try: + module_name = os.path.splitext(plugin_name)[0] + spec = importlib.util.spec_from_file_location(module_name, os.path.join(self.plugin_directory, f"{plugin_name}.json")) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + # Store the instance of the plugin in the loaded_plugins dictionary + module.instance = module.Plugin() + return module + except Exception as e: + print(f"Error importing plugin {plugin_name}: {e}") + return None + + def update_available_commands(self, plugin_data): + commands = plugin_data.get('commands', {}).keys() + self.available_commands.update(commands) + +# Example usage: +if __name__ == "__main__": + plugin_manager = PluginManager() + + # Load plugins based on available commands + plugin_list = ["NMAP Plugin", "Another Plugin"] + plugin_manager.sync_plugins(plugin_list) + + # Get available commands + commands = plugin_manager.get_available_commands() + print("Available commands:", commands) + + # Execute a command + command_to_execute = "COMMAND_NAME" + plugin_manager.execute_command(command_to_execute) diff --git a/poll b/runbooksolutions/agent/__init__.py similarity index 100% rename from poll rename to runbooksolutions/agent/__init__.py diff --git a/runbooksolutions/auth/AccessToken.py b/runbooksolutions/auth/AccessToken.py new file mode 100644 index 0000000..2013562 --- /dev/null +++ b/runbooksolutions/auth/AccessToken.py @@ -0,0 +1,55 @@ +from runbooksolutions.store.Store import Store +import logging +import time +from typing import Optional + +class AccessToken: + + def __init__(self, token_data: dict) -> None: + try: + self.access_token = token_data.get('access_token') + self.token_type = token_data.get('token_type') + self.expires_in = token_data.get('expires_in') + self.scope = token_data.get('scope') + self.creation_time = time.time() + + self._save_to_store() + except AttributeError as e: + logging.error(f"Failed to initialize AccessToken. Received data: {token_data}") + logging.error(f"Error details: {e}") + raise + + def _save_to_store(self) -> None: + data_to_store = { + 'access_token': self.access_token, + 'token_type': self.token_type, + 'expires_in': self.expires_in, + 'scope': self.scope, + 'creation_time': self.creation_time + } + store = Store(self.__class__.__name__) + store.save(data_to_store) + + @classmethod + def load_from_store(cls) -> Optional['AccessToken']: + store = Store(cls.__name__) + data = store.load() + + if data: + return cls(data) + else: + return None + + def getAccessToken(self) -> str: + return self.access_token + + def getTokenType(self) -> str: + return self.token_type + + def getExpiresIn(self) -> int: + return self.expires_in + + def isExpired(self) -> bool: + current_time = time.time() + elapsed_time = current_time - self.creation_time + return elapsed_time > self.expires_in diff --git a/runbooksolutions/auth/Auth.py b/runbooksolutions/auth/Auth.py new file mode 100644 index 0000000..0a6e41c --- /dev/null +++ b/runbooksolutions/auth/Auth.py @@ -0,0 +1,103 @@ +from runbooksolutions.auth.DeviceCode import DeviceCode +from runbooksolutions.auth.AccessToken import AccessToken +import logging +import json +import requests +import time + +class Auth: + deviceCode: DeviceCode = None + accessToken: AccessToken = None + + def __init__(self) -> None: + logging.debug("Starting Authentication") + + self.deviceCode = DeviceCode.load_from_store() + self.accessToken = AccessToken.load_from_store() + + if self.accessToken is None: + logging.debug("Access Token does not yet exist.") + if self.deviceCode is None: + logging.debug("Device code does not yet exist.") + self.getDeviceCode() + + # We have a Device Code; so lets get our access tokens + self.pollAuthorization() + + logging.debug("Finished Authentication") + + def getHeaders(self) -> dict: + headers = { + 'Authorization': f'Bearer {self.accessToken.getAccessToken()}', + 'Content-Type': 'application/json', + } + return headers + + def getDeviceCode(self) -> None: + logging.debug("Getting a device code.") + myobj = { + 'client_id': '9aa3c276-93c3-4255-b148-086fa9bc3224', + 'grant_type': 'urn:ietf:params:oauth:grant-type:device_code', + 'scope': '' + } + url = "http://192.168.1.197/oauth/device/code" + + try: + response = requests.post(url, json=myobj) + # Check the HTTP status code + response.raise_for_status() + + self.deviceCode = DeviceCode(json.loads(response.text)) + + logging.critical(f"{self.deviceCode.getVerificationURI()} CODE: {self.deviceCode.getUserCode()}") + + except requests.RequestException as e: + logging.error(f"Request failed: {e}") + raise Exception(f"Request failed: {e}") + + def pollAuthorization(self) -> None: + logging.debug("Polling for user authorization...") + + while True: + if self.deviceCode.isExpired(): + self.getDeviceCode() + try: + myobj = { + 'client_id': '9aa3c276-93c3-4255-b148-086fa9bc3224', + 'grant_type': 'urn:ietf:params:oauth:grant-type:device_code', + 'device_code': self.deviceCode.getDeviceCode() + } + url = "http://192.168.1.197/oauth/token" + + logging.debug(myobj) + + response = requests.post(url, json=myobj) + logging.debug(f"Response Status Code: {response.status_code}") + + if response.status_code == 200: + logging.debug(type(response.text)) + # Authorization successful, extract and store the access token + try: + # Try parsing the response as JSON + token_data = json.loads(response.text) + self.accessToken = AccessToken(token_data) + logging.debug(f"Access Token: {self.accessToken.getAccessToken()}") + break + except json.JSONDecodeError: + logging.error("Failed to parse response as JSON.") + raise Exception("Failed to parse response as JSON.") + elif response.status_code == 400 and "authorization_pending" in response.json().get('error'): + # Authorization is still pending, wait and poll again + logging.debug(f"Authorization Pending") + time.sleep(5) + elif response.status_code == 400 and "expired_token" in response.json().get('error'): + logging.error("Device code has expired.") + self.getDeviceCode() + # raise Exception("Device code has expired.") + else: + logging.error(f"Failed to obtain access token: {response.text}") + raise Exception(f"Failed to obtain access token: {response.text}") + + except requests.RequestException as e: + logging.error(f"Request failed: {e}") + raise Exception(f"Request failed: {e}") \ No newline at end of file diff --git a/runbooksolutions/auth/DeviceCode.py b/runbooksolutions/auth/DeviceCode.py new file mode 100644 index 0000000..35b14a3 --- /dev/null +++ b/runbooksolutions/auth/DeviceCode.py @@ -0,0 +1,53 @@ +from runbooksolutions.store.Store import Store +import logging +import time + +class DeviceCode: + + def __init__(self, DeviceCode: dict) -> None: + self.device_code = DeviceCode["device_code"] + self.user_code = DeviceCode["user_code"] + self.verification_uri = DeviceCode["verification_uri"] + self.expires_in = DeviceCode["expires_in"] + self.creation_time = time.time() + + # Save the device code to the store + self._save_to_store() + + def _save_to_store(self) -> None: + data_to_store = { + 'device_code': self.device_code, + 'user_code': self.user_code, + 'verification_uri': self.verification_uri, + 'expires_in': self.expires_in, + 'creation_time': self.creation_time + } + store = Store(self.__class__.__name__) + store.save(data_to_store) + + @classmethod + def load_from_store(cls): + store = Store(cls.__name__) + data = store.load() + + if data: + return cls(data) + else: + return None + + def getDeviceCode(self) -> str: + return self.device_code + + def getUserCode(self) -> str: + return self.user_code + + def getVerificationURI(self) -> str: + return self.verification_uri + + def getExpiresIn(self) -> str: + return self.expires_in + + def isExpired(self) -> bool: + current_time = time.time() + elapsed_time = current_time - self.creation_time + return elapsed_time > self.expires_in diff --git a/runbooksolutions/auth/__init__.py b/runbooksolutions/auth/__init__.py new file mode 100644 index 0000000..2bdfb31 --- /dev/null +++ b/runbooksolutions/auth/__init__.py @@ -0,0 +1 @@ +from . import Auth \ No newline at end of file diff --git a/runbooksolutions/logging_setup.py b/runbooksolutions/logging_setup.py new file mode 100644 index 0000000..ef4f3ce --- /dev/null +++ b/runbooksolutions/logging_setup.py @@ -0,0 +1,27 @@ +import logging +from colorlog import ColoredFormatter + +def setup_logging(): + logging.basicConfig(level=logging.DEBUG) + for handler in logging.root.handlers[:]: + if isinstance(handler, logging.StreamHandler): + logging.root.removeHandler(handler) + + formatter = ColoredFormatter( + "%(log_color)s%(levelname)-8s %(message)s%(reset)s", + datefmt=None, + reset=True, + log_colors={ + 'DEBUG': 'cyan', + 'INFO': 'green', + 'WARNING': 'yellow', + 'ERROR': 'red', + 'CRITICAL': 'white,bg_red', + }, + secondary_log_colors={}, + style='%' + ) + + ch = logging.StreamHandler() + ch.setFormatter(formatter) + logging.root.addHandler(ch) diff --git a/runbooksolutions/queue/Queue.py b/runbooksolutions/queue/Queue.py new file mode 100644 index 0000000..3f5f518 --- /dev/null +++ b/runbooksolutions/queue/Queue.py @@ -0,0 +1,51 @@ +from runbooksolutions.agent.PluginManager import PluginManager +from threading import Event +import logging +import asyncio + +class Queue: + def __init__(self, num_threads: int = 1, pluginManager: PluginManager = None) -> None: + self.task_queue = asyncio.Queue() + self.num_threads = num_threads + self.threads = [] + self.stop_event = Event() + self.pluginManager = pluginManager + + def _worker(self): + while not self.stop_event.is_set(): + try: + task = self.task_queue.get_nowait() + self.execute_task(task) + except asyncio.QueueEmpty: + pass + except asyncio.CancelledError: + # Log a message when the coroutine is canceled + logging.debug("Worker coroutine canceled.") + break + except Exception as e: + logging.error(e) + pass # Handle exceptions as needed + + def execute_task(self, task: dict) -> None: + # Implement your task execution logic here + logging.error(f"Running Task {task}") + self.pluginManager.executeCommand('lol', 2) + pass + + async def start(self) -> None: + logging.debug("Queue Started") + try: + loop = asyncio.get_event_loop() + tasks = [loop.run_in_executor(None, self._worker) for _ in range(self.num_threads)] + await asyncio.gather(*tasks) + except KeyboardInterrupt: + logging.info("Received KeyboardInterrupt. Stopping gracefully.") + finally: + self.stop() + + def stop(self) -> None: + self.stop_event.set() + + async def enqueue_task(self, task: dict) -> None: + logging.debug("Enqueued Task") + await self.task_queue.put(task) diff --git a/runbooksolutions/queue/__init__.py b/runbooksolutions/queue/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/runbooksolutions/schedule/Schedule.py b/runbooksolutions/schedule/Schedule.py new file mode 100644 index 0000000..84c448f --- /dev/null +++ b/runbooksolutions/schedule/Schedule.py @@ -0,0 +1,57 @@ +from runbooksolutions.queue.Queue import Queue +from datetime import datetime +from croniter import croniter +import asyncio +import logging + +class Schedule: + queue: Queue = None + + def __init__(self, queue: Queue) -> None: + self.queue = queue + self.tasks = [] + + def add_task(self, task: dict, cron_expression: str) -> None: + logging.debug("Added Task") + self.tasks.append((task, cron_expression)) + + # async def start(self) -> None: + # logging.debug("Schedule Started") + # while True: + # current_time = datetime( + # datetime.now().year, + # datetime.now().month, + # datetime.now().day, + # datetime.now().hour, + # datetime.now().minute, + # datetime.now().second + # ) + # for task, cron_expression in self.tasks: + # cron = croniter(cron_expression, current_time) + # next_run_time = cron.get_next(datetime) + # last_run_time = cron.get_prev(datetime) + # logging.debug(f"Current Time: {current_time}, Next Run Time: {next_run_time}") + # if next_run_time == current_time or current_time == last_run_time: + # logging.info("Queuing Task") + # await self.queue.enqueue_task(task) + # else: + # logging.debug("Not Time") + # await asyncio.sleep(1) + async def start(self) -> None: + logging.debug("Schedule Started") + while True: + for task, cron_expression in self.tasks: + if self.shouldRun(cron_expression): + await self.queue.enqueue_task(task) + await asyncio.sleep(1) + + def shouldRun(self, cron_expression): + from dateutil.relativedelta import relativedelta + cron = croniter(cron_expression, datetime.now()) + td, ms1 = cron.get_current(datetime), relativedelta(microseconds=1) + if not td.microsecond: + td = td + ms1 + cron.set_current(td, force=True) + tdp, tdt = cron.get_current(), cron.get_prev() + precision_in_seconds = 1 + return (max(tdp, tdt) - min(tdp, tdt)) < precision_in_seconds \ No newline at end of file diff --git a/runbooksolutions/schedule/__init__.py b/runbooksolutions/schedule/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/runbooksolutions/store/Store.py b/runbooksolutions/store/Store.py new file mode 100644 index 0000000..11223b9 --- /dev/null +++ b/runbooksolutions/store/Store.py @@ -0,0 +1,92 @@ +import pickle +import hashlib +import os +from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC +from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.padding import PKCS7 +from cryptography.exceptions import InvalidTag + +class Store: + # Class variable for the encryption key (should be kept secure) + STATIC_ENCRYPTION_KEY = None + + def __init__(self, filename): + self.filename = "stores/" + filename + ".pkl" + + def _generate_checksum(self, data): + # Calculate SHA-256 checksum for data + hasher = hashlib.sha256() + hasher.update(pickle.dumps(data)) + return hasher.hexdigest() + + def save(self, data): + if not self.STATIC_ENCRYPTION_KEY: + raise ValueError("Encryption key not set. Call set_encryption_key() before using the Store.") + + # Generate checksum + checksum = self._generate_checksum(data) + + # Derive a key using PBKDF2 + kdf = PBKDF2HMAC( + algorithm=hashes.SHA256(), + salt=os.urandom(16), + iterations=100000, + length=32, + backend=default_backend() + ) + key = kdf.derive(self.STATIC_ENCRYPTION_KEY) + + # Generate a random IV for AES-CBC + iv = os.urandom(16) + + # Pad the data to a multiple of the block size using PKCS7 + padder = PKCS7(algorithms.AES.block_size).padder() + padded_data = padder.update(pickle.dumps(data)) + padder.finalize() + + # Encrypt the data using AES-CBC + cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend=default_backend()) + encryptor = cipher.encryptor() + ciphertext = encryptor.update(padded_data) + encryptor.finalize() + + # Save the data, key, IV, and checksum to the file + with open(self.filename, 'wb') as file: + file.write(key + iv + checksum.encode() + ciphertext) + + def load(self): + try: + # Read the data from the file + with open(self.filename, 'rb') as file: + encrypted_data = file.read() + + # Extract the salt from the end of the file + key = encrypted_data[:32] + encrypted_data = encrypted_data[32:] + + # Extract the IV, checksum, and ciphertext + iv = encrypted_data[:16] + checksum = encrypted_data[16:80].decode() + ciphertext = encrypted_data[80:] + + # Decrypt the data using AES-CBC + cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend=default_backend()) + decryptor = cipher.decryptor() + + decrypted_data = decryptor.update(ciphertext) + decryptor.finalize() + + # Calculate the checksum of the decrypted data + decrypted_checksum = self._generate_checksum(pickle.loads(decrypted_data)) + + # Verify the checksum + if checksum != decrypted_checksum: + raise ValueError("Checksum verification failed. Data may be corrupted.") + + return pickle.loads(decrypted_data, fix_imports=False) + + except FileNotFoundError: + return None + + @classmethod + def set_encryption_key(cls, key): + cls.STATIC_ENCRYPTION_KEY = key diff --git a/runbooksolutions/store/__init__.py b/runbooksolutions/store/__init__.py new file mode 100644 index 0000000..e69de29 From ed95fd401f3a7041ac3b1bd2a584e24e6406295c Mon Sep 17 00:00:00 2001 From: Will G Date: Sun, 26 Nov 2023 19:50:27 -0500 Subject: [PATCH 3/8] WIP --- app.py | 3 - config.ini | 5 ++ docs/Responses.md | 61 +++++++++++++++++++ runbooksolutions/agent/API.py | 41 ++++++++++++- runbooksolutions/agent/Agent.py | 38 +++++++++++- runbooksolutions/agent/PluginManager.py | 78 +++++++++++++++++++++++-- runbooksolutions/agent/Task.py | 18 ++++++ runbooksolutions/auth/Auth.py | 15 +++-- runbooksolutions/queue/Queue.py | 27 +++++++-- runbooksolutions/schedule/Schedule.py | 41 +++++-------- 10 files changed, 277 insertions(+), 50 deletions(-) create mode 100644 config.ini create mode 100644 docs/Responses.md create mode 100644 runbooksolutions/agent/Task.py diff --git a/app.py b/app.py index 767d6bf..50afdb5 100644 --- a/app.py +++ b/app.py @@ -12,9 +12,6 @@ async def main(): agent = Agent(num_threads=3) try: - agent.schedule.add_task({"key": "value"}, "* * * * *") - - #await agent.queue.enqueue_task({"test": "Value"}) await agent.start() except KeyboardInterrupt: logging.info("Received CTRL+C. Stopping gracefully.") diff --git a/config.ini b/config.ini new file mode 100644 index 0000000..cd68f9d --- /dev/null +++ b/config.ini @@ -0,0 +1,5 @@ +[agent] +# Note: Do NOT include a trailing slash on the server_url +server_url=http://192.168.1.197 +# Note: This must be provided by the server operator. +client_id=9ab55261-bfb7-4bb3-ad29-a6dbdbf8a5af \ No newline at end of file diff --git a/docs/Responses.md b/docs/Responses.md new file mode 100644 index 0000000..b154c49 --- /dev/null +++ b/docs/Responses.md @@ -0,0 +1,61 @@ +# Responses From Backend + +GET /api/agent +```json +{ + "data": { + "id": "9ab55db0-9bfa-45a6-8280-bb94c6b0fe8d", + "name": "Test Agent", + "team_id": "9ab55261-b6b0-4fb4-85e5-a3491a72f720", + "plugins": [ + "9ab56426-f429-4c4b-9755-40c92449f0be" + ] + } +} +``` + +GET /api/agent/plugin/{plugin_id} +```json +{ + "data": { + "id": "9ab56426-f429-4c4b-9755-40c92449f0be", + "name": "Test Plugin", + "version": 0, + "description": null, + "script": "class Plugin:\n def __init__(self):\n pass\n def greet(self):\n print(\"Hello from the Test Plugin!\")\n def square(self, number):\n result = number ** 2\n print(f\"The square of {number} is {result}\")", + "hash": "809c167b7b1e7fb9504dc136af3c2dc1c17545355a9aaec28c3792e54bc540943db236b6af547a732161b5d717c9b14a7c508ab49b3f06e128997de06b3abfd3", + "commands": { + "9ab56426-f429-4c4b-9755-40c92449f0be.greet": { + "id": "9ab5659c-271d-40d5-be37-3a3847b92aab", + "name": "9ab56426-f429-4c4b-9755-40c92449f0be.greet", + "function": "greet" + }, + "9ab56426-f429-4c4b-9755-40c92449f0be.square": { + "id": "9ab565fc-1036-4afb-ac7e-ec92e0db6985", + "name": "9ab56426-f429-4c4b-9755-40c92449f0be.square", + "function": "square" + } + } + } +} +``` + +GET /api/agent/tasks +```json +{ + "data": [ + { + "id": "9ab581cf-546d-405a-afaf-474cc631ed5c", + "command": "9ab56426-f429-4c4b-9755-40c92449f0be.greet", + "cron": null, + "arguments": "{}" + }, + { + "id": "9ab582f7-9e64-4fad-b6b9-369633776ae4", + "command": "9ab56426-f429-4c4b-9755-40c92449f0be.square", + "cron": "* * * * *", + "arguments": "{\"number\":2}" + } + ] +} +``` \ No newline at end of file diff --git a/runbooksolutions/agent/API.py b/runbooksolutions/agent/API.py index 62ccf06..b8ba4c4 100644 --- a/runbooksolutions/agent/API.py +++ b/runbooksolutions/agent/API.py @@ -1,14 +1,53 @@ from runbooksolutions.auth.Auth import Auth +from runbooksolutions.agent.Task import Task import requests +import logging + +class AgentDetails: + id: str + name: str + team_id: str + plugins = list + def __init__(self, response: dict) -> None: + response = response.get('data') + self.id = response.get('id') + self.name = response.get('name') + self.team_id = response.get('team_id') + if response.get('plugins') == None: + self.plugins = [] + else: + self.plugins = response.get('plugins') + +class AgentTasks: + tasks: [Task] + + def __init__(self, response: dict) -> None: + self.tasks = [] + response = response.get('data') + for task in response: + self.tasks.append(Task(task)) + + def getTasks(self): + return self.tasks class API: + url: str = None auth: Auth = None - def __init__(self, auth: Auth) -> None: + def __init__(self, auth: Auth, url: str) -> None: self.auth = auth + self.url = url + "/api" + + def getAgentDetails(self) -> AgentDetails: + return AgentDetails(self.sendRequest('/agent', 'GET')) + + def getAgentTasks(self) -> AgentTasks: + return AgentTasks(self.sendRequest('/agent/tasks', 'GET')) def sendRequest(self, url, method, data=None): + url = self.url + url headers = self.auth.getHeaders() # Assuming Auth has a method to get authentication headers + method = method.upper() # Choose the appropriate requests method based on the provided 'method' if method.upper() == 'GET': diff --git a/runbooksolutions/agent/Agent.py b/runbooksolutions/agent/Agent.py index 270ad83..883c2af 100644 --- a/runbooksolutions/agent/Agent.py +++ b/runbooksolutions/agent/Agent.py @@ -2,8 +2,10 @@ from runbooksolutions.schedule.Schedule import Schedule from runbooksolutions.queue.Queue import Queue from runbooksolutions.agent.API import API +from runbooksolutions.agent.API import AgentDetails from runbooksolutions.agent.PluginManager import PluginManager import asyncio +import configparser import logging class Agent: @@ -13,16 +15,45 @@ class Agent: api: API = None pluginManager: PluginManager = None + agentDetails: AgentDetails = None + def __init__(self, num_threads: int = 1) -> None: - self.auth = Auth() - self.api = API(self.auth) + self.agentConfig = self.loadConfig() + self.auth = Auth(url=self.agentConfig.get('server_url'), client_id=self.agentConfig.get('client_id')) + self.api = API(auth=self.auth, url=self.agentConfig.get('server_url')) self.pluginManager = PluginManager(self.api) self.queue = Queue(num_threads, self.pluginManager) self.schedule = Schedule(self.queue) - self.pluginManager.addPlugin("123456789") + def loadConfig(self): + config = configparser.ConfigParser() + try: + config.read('config.ini') + return config['agent'] + except FileNotFoundError: + logging.critical("config.ini file not found!") + exit() + except configparser.Error: + logging.critical("Error reading config file!") + exit() + + async def syncAgent(self): + while True: + self.agentDetails = self.api.getAgentDetails() + self.pluginManager.syncPlugins(self.agentDetails.plugins) + + tasks = self.api.getAgentTasks().getTasks() + for task in tasks: + if task.shouldSchedule(): + self.schedule.add_task(task=task, cron_expression=task.cron) + else: + await self.queue.enqueue_task(task) + + # Sleep for 60 seconds + await asyncio.sleep(60) async def start(self) -> None: + agent_task = asyncio.create_task(self.syncAgent()) queue_task = asyncio.create_task(self.queue.start()) schedule_task = asyncio.create_task(self.schedule.start()) @@ -32,6 +63,7 @@ async def start(self) -> None: logging.info("End of Start") # Wait for the background task to complete + await agent_task await queue_task await schedule_task except asyncio.CancelledError: diff --git a/runbooksolutions/agent/PluginManager.py b/runbooksolutions/agent/PluginManager.py index a42686e..f6d3d08 100644 --- a/runbooksolutions/agent/PluginManager.py +++ b/runbooksolutions/agent/PluginManager.py @@ -4,6 +4,7 @@ import logging import os import importlib +import hashlib class PluginManager: plugin_directory: str = "plugins" @@ -14,21 +15,77 @@ class PluginManager: def __init__(self, api: API) -> None: self.api = api + def verify_plugin_hash(self, pluginID: str) -> bool: + json_file_path = os.path.join(self.plugin_directory, f"{pluginID}.json") + with open(json_file_path, 'r') as json_file: + plugin_definition = json.loads(json_file.read()) + + script = plugin_definition.get('script', '').encode('utf-8') + json_hash = hashlib.sha512(script).hexdigest() + + script_file_path = os.path.join(self.plugin_directory, f"{pluginID}.py") + with open(script_file_path, 'rb') as script_file: + script_content = script_file.read() + script_hash = hashlib.sha512(script_content).hexdigest() + + logging.debug(f"Expected Hash: {plugin_definition.get('hash')}") + logging.debug(f"JSON Hash: {json_hash}") + logging.debug(f"JSON Hash: {script_hash}") + + if not json_hash == plugin_definition.get('hash', ''): + logging.critical("JSON Hash mismatch") + return False + + if not script_hash == plugin_definition.get('hash', ''): + logging.critical("SCRIPT Hash mismatch") + return False + + return True + def addPlugin(self, pluginID: str) -> None: logging.debug(f"Adding Plugin {pluginID}") if self.pluginIsLocal(pluginID): logging.debug("Plugin is local") else: self.downloadPlugin(pluginID) + + if not self.verify_plugin_hash(pluginID): + logging.critical(f"Plugin {pluginID} FAILED Hash Verification") + return self.plugins[pluginID] = self.loadPlugin(pluginID) self.loadPluginCommands(pluginID) def removePlugin(self, pluginID: str) -> None: - pass + logging.debug(f"Removing Plugin {pluginID}") + if pluginID in self.plugins: + del self.plugins[pluginID] + else: + logging.warning(f"Plugin {pluginID} not found in loaded plugins.") + + # TODO: Remove the plugin files from the file system + json_file_path = os.path.join(self.plugin_directory, f"{pluginID}.json") + with open(json_file_path, 'r') as json_file: + plugin_definition = json.load(json_file) + + for command in plugin_definition.get('commands', []).keys(): + self.loadedCommands.pop(command) def syncPlugins(self, plugins: list) -> None: - pass + logging.debug(f"Syncing Plugins. Loaded Plugins: {list(self.plugins.keys())} Requested Plugins: {plugins}") + for pluginID in self.plugins.keys(): + if pluginID not in plugins: + logging.debug("Removing Plugin") + self.removePlugin(pluginID) + else: + logging.debug("Plugin Still Required.") + + for pluginID in plugins: + if pluginID not in self.plugins.keys(): + logging.debug("Adding Plugin") + self.addPlugin(pluginID) + else: + logging.debug("Plugin Already Loaded") def pluginIsLocal(self, pluginID: str) -> bool: if not os.path.exists(os.path.join(self.plugin_directory, f"{pluginID}.json")): @@ -43,7 +100,8 @@ def pluginIsLocal(self, pluginID: str) -> bool: def downloadPlugin(self, pluginID: str) -> None: logging.debug(f"Downloading Plugin {pluginID}") - pluginData = self.api.sendRequest('http://192.168.1.197/api/agent/plugins/download', 'GET') + pluginData = self.api.sendRequest(f'/agent/plugin/{pluginID}', 'GET') + pluginData = pluginData.get('data') self.savePlugin(pluginData) def savePlugin(self, plugin_definition: dict) -> None: @@ -78,21 +136,29 @@ def loadPlugin(self, pluginID: str) -> Plugin: print(f"Error importing plugin {pluginID}: {e}") return None + def loadPluginCommands(self, pluginID: str) -> None: json_file_path = os.path.join(self.plugin_directory, f"{pluginID}.json") with open(json_file_path, 'r') as json_file: plugin_definition = json.load(json_file) - self.loadedCommands.update(plugin_definition.get('commands', [])) + + commands = plugin_definition.get('commands', []) + + for command_name, command_data in commands.items(): + modified_command = {'pluginID': pluginID, **command_data} + + self.loadedCommands.update({command_name: modified_command}) def executeCommand(self, commandName, *args, **kwargs) -> None: if not self.commandIsLoaded(commandName): logging.critical(f"Tried to call {commandName} when it wasn't loaded") return + pluginID = self.loadedCommands.get(commandName).get('pluginID') function_name = self.loadedCommands.get(commandName).get('function') - function_to_call = getattr(self.plugins.get('123456789'), function_name, None) + function_to_call = getattr(self.plugins.get(pluginID), function_name, None) if callable(function_to_call): function_to_call(*args, **kwargs) else: - print(f"Function {function_name} not found in plugin {"123456789"}.") \ No newline at end of file + print(f"Function {function_name} not found in plugin {pluginID}.") \ No newline at end of file diff --git a/runbooksolutions/agent/Task.py b/runbooksolutions/agent/Task.py new file mode 100644 index 0000000..e753468 --- /dev/null +++ b/runbooksolutions/agent/Task.py @@ -0,0 +1,18 @@ +import json + +class Task: + id: str + command: str + arguments: dict + cron: str + def __init__(self, task: dict) -> None: + self.id = task.get('id') + self.command = task.get('command') + self.arguments = task.get('arguments') + self.cron = task.get('cron') + + def shouldSchedule(self): + return self.cron != None + + def getArguments(self): + return json.loads(self.arguments) \ No newline at end of file diff --git a/runbooksolutions/auth/Auth.py b/runbooksolutions/auth/Auth.py index 0a6e41c..1f482c8 100644 --- a/runbooksolutions/auth/Auth.py +++ b/runbooksolutions/auth/Auth.py @@ -6,12 +6,17 @@ import time class Auth: + url: str + client_id: str deviceCode: DeviceCode = None accessToken: AccessToken = None - def __init__(self) -> None: + def __init__(self, url: str, client_id: str) -> None: logging.debug("Starting Authentication") + self.url = url + self.client_id = client_id + self.deviceCode = DeviceCode.load_from_store() self.accessToken = AccessToken.load_from_store() @@ -36,11 +41,11 @@ def getHeaders(self) -> dict: def getDeviceCode(self) -> None: logging.debug("Getting a device code.") myobj = { - 'client_id': '9aa3c276-93c3-4255-b148-086fa9bc3224', + 'client_id': self.client_id, 'grant_type': 'urn:ietf:params:oauth:grant-type:device_code', 'scope': '' } - url = "http://192.168.1.197/oauth/device/code" + url = f"{self.url}/oauth/device/code" try: response = requests.post(url, json=myobj) @@ -63,11 +68,11 @@ def pollAuthorization(self) -> None: self.getDeviceCode() try: myobj = { - 'client_id': '9aa3c276-93c3-4255-b148-086fa9bc3224', + 'client_id': self.client_id, 'grant_type': 'urn:ietf:params:oauth:grant-type:device_code', 'device_code': self.deviceCode.getDeviceCode() } - url = "http://192.168.1.197/oauth/token" + url = f"{self.url}/oauth/token" logging.debug(myobj) diff --git a/runbooksolutions/queue/Queue.py b/runbooksolutions/queue/Queue.py index 3f5f518..1ca8791 100644 --- a/runbooksolutions/queue/Queue.py +++ b/runbooksolutions/queue/Queue.py @@ -1,4 +1,5 @@ from runbooksolutions.agent.PluginManager import PluginManager +from runbooksolutions.agent.Task import Task from threading import Event import logging import asyncio @@ -26,10 +27,14 @@ def _worker(self): logging.error(e) pass # Handle exceptions as needed - def execute_task(self, task: dict) -> None: + def execute_task(self, task: Task) -> None: + + command = task.command + arguments = task.getArguments() + # Implement your task execution logic here - logging.error(f"Running Task {task}") - self.pluginManager.executeCommand('lol', 2) + logging.info(f"Running Task \'{command}\' with arguments {arguments}") + self.pluginManager.executeCommand(command, **arguments) pass async def start(self) -> None: @@ -46,6 +51,16 @@ async def start(self) -> None: def stop(self) -> None: self.stop_event.set() - async def enqueue_task(self, task: dict) -> None: - logging.debug("Enqueued Task") - await self.task_queue.put(task) + async def enqueue_task(self, task: Task) -> None: + task_id = task.id + if not self._is_task_in_queue(task_id): + logging.debug("Enqueued Task") + await self.task_queue.put(task) + else: + logging.warning("Task already in Queue.") + + def _is_task_in_queue(self, task_id: str) -> bool: + for task in self.task_queue._queue: + if task.id == task_id: + return True + return False diff --git a/runbooksolutions/schedule/Schedule.py b/runbooksolutions/schedule/Schedule.py index 84c448f..55e283c 100644 --- a/runbooksolutions/schedule/Schedule.py +++ b/runbooksolutions/schedule/Schedule.py @@ -1,4 +1,5 @@ from runbooksolutions.queue.Queue import Queue +from runbooksolutions.agent.Task import Task from datetime import datetime from croniter import croniter import asyncio @@ -11,32 +12,14 @@ def __init__(self, queue: Queue) -> None: self.queue = queue self.tasks = [] - def add_task(self, task: dict, cron_expression: str) -> None: - logging.debug("Added Task") - self.tasks.append((task, cron_expression)) + def add_task(self, task: Task, cron_expression: str) -> None: + task_id = task.id + if not self._is_task_in_schedule(task_id): + logging.debug("Added Task") + self.tasks.append((task, cron_expression)) + else: + logging.warning("Task already Scheduled") - # async def start(self) -> None: - # logging.debug("Schedule Started") - # while True: - # current_time = datetime( - # datetime.now().year, - # datetime.now().month, - # datetime.now().day, - # datetime.now().hour, - # datetime.now().minute, - # datetime.now().second - # ) - # for task, cron_expression in self.tasks: - # cron = croniter(cron_expression, current_time) - # next_run_time = cron.get_next(datetime) - # last_run_time = cron.get_prev(datetime) - # logging.debug(f"Current Time: {current_time}, Next Run Time: {next_run_time}") - # if next_run_time == current_time or current_time == last_run_time: - # logging.info("Queuing Task") - # await self.queue.enqueue_task(task) - # else: - # logging.debug("Not Time") - # await asyncio.sleep(1) async def start(self) -> None: logging.debug("Schedule Started") while True: @@ -54,4 +37,10 @@ def shouldRun(self, cron_expression): cron.set_current(td, force=True) tdp, tdt = cron.get_current(), cron.get_prev() precision_in_seconds = 1 - return (max(tdp, tdt) - min(tdp, tdt)) < precision_in_seconds \ No newline at end of file + return (max(tdp, tdt) - min(tdp, tdt)) < precision_in_seconds + + def _is_task_in_schedule(self, task_id: str) -> bool: + for task, _ in self.tasks: + if task.id == task_id: + return True + return False \ No newline at end of file From e6df8d480720e4d194d3087d6c107c9ec29d2af9 Mon Sep 17 00:00:00 2001 From: Will G Date: Sun, 26 Nov 2023 20:51:40 -0500 Subject: [PATCH 4/8] WIP --- .gitignore | 2 -- Dockerfile | 5 ++++- README.md | 41 +++++++++++++++++++++++++++++++++++++++++ config.ini | 6 ++---- plugins/.gitignore | 2 ++ run | 5 +++-- stores/.gitignore | 2 ++ 7 files changed, 54 insertions(+), 9 deletions(-) create mode 100644 plugins/.gitignore create mode 100644 stores/.gitignore diff --git a/.gitignore b/.gitignore index 38a3c56..92d77a0 100644 --- a/.gitignore +++ b/.gitignore @@ -1,8 +1,6 @@ kerberos run OLD -stores hosts test.py -plugins __pycache__ \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 08dc39c..5db15ce 100644 --- a/Dockerfile +++ b/Dockerfile @@ -8,6 +8,8 @@ RUN apt-get update && \ # Set the working directory to /app WORKDIR /app +RUN mkdir plugins,stores + COPY _docker/start.sh /start.sh RUN chmod +x /start.sh @@ -17,7 +19,8 @@ COPY requirements.txt /app # Install any needed packages specified in requirements.txt RUN pip install -r requirements.txt -COPY . /app +COPY app.py /app +COPY runbooksolutions /app # Define the command to run your application CMD [ "/start.sh" ] \ No newline at end of file diff --git a/README.md b/README.md index bd79aec..ca81a71 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,46 @@ # RunbookSolution Network Agent +This codebase composes the core of the Network Agent for RunbookSolutions. + +## Installation + +### Prebuilt Docker Image +NOTE: THE DOCKER IMAGE DOES NOT CURRENTLY EXIST + +```sh +mkdir agent +cd agent +mkdir plugins,stores,kerberos +wget https://raw.githubusercontent.com/RunbookSolutions/agent/staging/config.ini + +docker run \ + --name RunbookSolutions_Agent \ + -v $(pwd)/config.ini:/app/config.ini \ + -v $(pwd)/plugins:/app/plugins \ + -v $(pwd)/stores:/app/stores \ + -v $(pwd)/kerberos/krb5.conf:/etc/krb5.conf \ + -v $(pwd)/kerberos:/keytabs \ + -d \ + --restart unless-stopped \ + runbooksolutions/image_agent:latest + +``` + +### From Source: +```sh +git clone https://github.com/RunbookSolutions/agent.git +cd agent +./run +``` + +## Configuration +Configuration maintained in a simple `config.ini` file consisting of the server_url of the backend; and the client_id for the device authentication. + +```ini +[agent] +server_url=http://192.168.1.197 # Note: Do NOT include a trailing slash on the server_url +client_id=9ab55261-bfb7-4bb3-ad29-a6dbdbf8a5af # Device Code Grant client_id provided by the server +``` ### Creating a Keytab File diff --git a/config.ini b/config.ini index cd68f9d..4e629ec 100644 --- a/config.ini +++ b/config.ini @@ -1,5 +1,3 @@ [agent] -# Note: Do NOT include a trailing slash on the server_url -server_url=http://192.168.1.197 -# Note: This must be provided by the server operator. -client_id=9ab55261-bfb7-4bb3-ad29-a6dbdbf8a5af \ No newline at end of file +server_url=http://192.168.1.197 # Note: Do NOT include a trailing slash on the server_url +client_id=9ab55261-bfb7-4bb3-ad29-a6dbdbf8a5af # Device Code Grant client_id provided by the server \ No newline at end of file diff --git a/plugins/.gitignore b/plugins/.gitignore new file mode 100644 index 0000000..c96a04f --- /dev/null +++ b/plugins/.gitignore @@ -0,0 +1,2 @@ +* +!.gitignore \ No newline at end of file diff --git a/run b/run index 32eb288..c8acd9a 100644 --- a/run +++ b/run @@ -1,6 +1,7 @@ docker run \ - -v ./:/app \ - -v ./hosts:/etc/hosts \ + -v ./config.ini:/app/config.ini \ + -v ./plugins:/app/plugins \ + -v ./stores:/app/stores \ -v $(pwd)/kerberos/krb5.conf:/etc/krb5.conf \ -v $(pwd)/kerberos:/keytabs \ -it $(docker build -q .) diff --git a/stores/.gitignore b/stores/.gitignore new file mode 100644 index 0000000..c96a04f --- /dev/null +++ b/stores/.gitignore @@ -0,0 +1,2 @@ +* +!.gitignore \ No newline at end of file From c48f21d4cbe1a0d76bbeb0ae32061e7fe93e540c Mon Sep 17 00:00:00 2001 From: Will G Date: Sun, 26 Nov 2023 21:27:10 -0500 Subject: [PATCH 5/8] WIP --- Dockerfile | 2 +- README.md | 25 ++++++++++++++++++++++--- config.ini | 8 ++++++-- docs/Responses.md | 31 ++++++++++++++++++++++++++++--- runbooksolutions/agent/Agent.py | 6 +++++- runbooksolutions/auth/Auth.py | 15 ++++++++++++--- 6 files changed, 74 insertions(+), 13 deletions(-) diff --git a/Dockerfile b/Dockerfile index 5db15ce..f352406 100644 --- a/Dockerfile +++ b/Dockerfile @@ -20,7 +20,7 @@ COPY requirements.txt /app RUN pip install -r requirements.txt COPY app.py /app -COPY runbooksolutions /app +COPY runbooksolutions /app/runbooksolutions # Define the command to run your application CMD [ "/start.sh" ] \ No newline at end of file diff --git a/README.md b/README.md index ca81a71..a9b3ec8 100644 --- a/README.md +++ b/README.md @@ -40,13 +40,32 @@ Configuration maintained in a simple `config.ini` file consisting of the server_ [agent] server_url=http://192.168.1.197 # Note: Do NOT include a trailing slash on the server_url client_id=9ab55261-bfb7-4bb3-ad29-a6dbdbf8a5af # Device Code Grant client_id provided by the server +auth=True # To disable auth when not using with RunbookSolutions. ``` -### Creating a Keytab File +## Expected Server Responses +Due to the Agent's nature; it can easily be used by others outside of RunbookSolutions. -``` +To implement a backend for this agent you will need to provided the following endpoints. + +`GET /api/agent` for the agent to load information about itself. This endpoint also provides the agent with a list of PLUGIN_ID's that it needs to load. + +`GET /api/agent/plugin/{PLUGIN_ID}` for the agent to download plugins. This endpoint also provides details about commands the plugin provides. + +`GET /api/agent/tasks` for the agent to load tasks that it needs to run. Tasks include scheduled and one-off tasks to run; and will always present tasks until they are removed from the backend. This allows for the agent to restart without skipping task execution. + +Additional details can be found on the [Expected Server Responses](/docs/Responses.md) page. + +## Creating a Keytab File + +Some plugins may require authentication against your windows domain. + +The simplest way to acomplish this is by using the [Docker Kerberos Keytab Generator](https://github.com/simplesteph/docker-kerberos-get-keytab): + +```sh +cd agent docker run -it --rm \ - -v $(pwd):/output \ + -v $(pwd)/kerberos:/output \ -e PRINCIPAL= \ simplesteph/docker-kerberos-get-keytab ``` \ No newline at end of file diff --git a/config.ini b/config.ini index 4e629ec..55be0c2 100644 --- a/config.ini +++ b/config.ini @@ -1,3 +1,7 @@ [agent] -server_url=http://192.168.1.197 # Note: Do NOT include a trailing slash on the server_url -client_id=9ab55261-bfb7-4bb3-ad29-a6dbdbf8a5af # Device Code Grant client_id provided by the server \ No newline at end of file +# Note: Do NOT include a trailing slash on the server_url +server_url=http://192.168.1.197 +# Device Code Grant client_id provided by the server +client_id=9ab55261-bfb7-4bb3-ad29-a6dbdbf8a5af +# If we are required to preform Device Code Authentication +auth=True \ No newline at end of file diff --git a/docs/Responses.md b/docs/Responses.md index b154c49..c0690c1 100644 --- a/docs/Responses.md +++ b/docs/Responses.md @@ -1,6 +1,13 @@ # Responses From Backend -GET /api/agent +> Note: The backend server is expected to identify the agent making the request without any additional parameters being sent. By default RunbookSolutions achieves this using the OAuth Device Code process to retrieve an Access Token for authentication. +>> In non-NAT'ed environments; the backend server could use the IP address from the request to identify agents. + +## `GET /api/agent` +This endpoint provides the agent with basic information about itself along with a list of PLUGIN_ID's that the agent should have loaded. + +- The Team ID provided here is used to identify and group agents to specific groups on the backend. It is **required** even if not used. + ```json { "data": { @@ -14,7 +21,17 @@ GET /api/agent } ``` -GET /api/agent/plugin/{plugin_id} +## `GET /api/agent/plugin/{plugin_id}` +This endpoint provides the agent with individual plugins for the agent along with the corresponding commands the plugin makes available. + +> Note: It is important to note that two different versions of a plugin may be loaded by an agent. Commands are prefixed with the PLUGIN_ID to avoid collisions. + +- The `script` variable contains the code the agent will execute when required. +- The `hash` variable is the `SHA512` hash of the script; the agent will verify both the script variable as well as the file it creates to store the plugin. +- The `commands` variable contains contains the details of what function in the program to run for which command is provided. + +> Important: Both the `script` variable and the file written to disk must match the provided hash for the plugin to be loaded and run. + ```json { "data": { @@ -40,7 +57,15 @@ GET /api/agent/plugin/{plugin_id} } ``` -GET /api/agent/tasks +## `GET /api/agent/tasks` +The following endpoint provides the agent with details of what commands need to be run, when (if scheduled), and any arguments for said command. + +- The `command` variable must match one of the keys provided by the `plugin.commands` variable when downloading plugins. +- The `cron` variable is the cron formatted schedule for when the task runs, or `null` if it should only be run once. +- The `arguments` variable should be a JSON encoded string containg the arument name and values for the function to run. + +> Note: The agent uses the `task.id` to ensure tasks are not being duplicated into the schedule and queue. + ```json { "data": [ diff --git a/runbooksolutions/agent/Agent.py b/runbooksolutions/agent/Agent.py index 883c2af..33c884c 100644 --- a/runbooksolutions/agent/Agent.py +++ b/runbooksolutions/agent/Agent.py @@ -19,7 +19,11 @@ class Agent: def __init__(self, num_threads: int = 1) -> None: self.agentConfig = self.loadConfig() - self.auth = Auth(url=self.agentConfig.get('server_url'), client_id=self.agentConfig.get('client_id')) + self.auth = Auth( + url=self.agentConfig.get('server_url'), + client_id=self.agentConfig.get('client_id'), + enabled=self.agentConfig.get('auth') + ) self.api = API(auth=self.auth, url=self.agentConfig.get('server_url')) self.pluginManager = PluginManager(self.api) self.queue = Queue(num_threads, self.pluginManager) diff --git a/runbooksolutions/auth/Auth.py b/runbooksolutions/auth/Auth.py index 1f482c8..2b57216 100644 --- a/runbooksolutions/auth/Auth.py +++ b/runbooksolutions/auth/Auth.py @@ -8,14 +8,18 @@ class Auth: url: str client_id: str + enabled: bool = True deviceCode: DeviceCode = None accessToken: AccessToken = None - def __init__(self, url: str, client_id: str) -> None: - logging.debug("Starting Authentication") - + def __init__(self, url: str, client_id: str, enabled: bool = True) -> None: self.url = url self.client_id = client_id + self.enabled = enabled + + logging.debug("Starting Authentication") + if not self.enabled: + return self.deviceCode = DeviceCode.load_from_store() self.accessToken = AccessToken.load_from_store() @@ -32,6 +36,11 @@ def __init__(self, url: str, client_id: str) -> None: logging.debug("Finished Authentication") def getHeaders(self) -> dict: + if not self.enabled: + return { + 'Content-Type': 'application/json', + } + headers = { 'Authorization': f'Bearer {self.accessToken.getAccessToken()}', 'Content-Type': 'application/json', From 9672d40cef0ab89cdf3c8c7aa0b40cf42ec052e4 Mon Sep 17 00:00:00 2001 From: Will G Date: Sun, 26 Nov 2023 21:42:16 -0500 Subject: [PATCH 6/8] Github Actions --- .github/workflows/production.yaml | 34 +++++++++++++++++++++++++++++++ .github/workflows/staging.yaml | 29 ++++++++++++++++++++++++++ README.md | 2 +- 3 files changed, 64 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/production.yaml create mode 100644 .github/workflows/staging.yaml diff --git a/.github/workflows/production.yaml b/.github/workflows/production.yaml new file mode 100644 index 0000000..0323ea0 --- /dev/null +++ b/.github/workflows/production.yaml @@ -0,0 +1,34 @@ +name: Production + +on: + push: + branches: + - 'production' + release: + types: + - created + +jobs: + docker: + runs-on: ubuntu-latest + steps: + - + name: Set up QEMU + uses: docker/setup-qemu-action@v3 + - + name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + - + name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - + name: Build and push + uses: docker/build-push-action@v5 + with: + push: true + tags: | + runbooksolutions/agent:latest + ${{ github.event_name == 'release' && github.ref ? 'runbooksolutions/agent:' + github.ref : '' }} \ No newline at end of file diff --git a/.github/workflows/staging.yaml b/.github/workflows/staging.yaml new file mode 100644 index 0000000..c083533 --- /dev/null +++ b/.github/workflows/staging.yaml @@ -0,0 +1,29 @@ +name: Dev + +on: + push: + branches: + - 'staging' + +jobs: + docker: + runs-on: ubuntu-latest + steps: + - + name: Set up QEMU + uses: docker/setup-qemu-action@v3 + - + name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + - + name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - + name: Build and push + uses: docker/build-push-action@v5 + with: + push: true + tags: runbooksolutions/agent:dev \ No newline at end of file diff --git a/README.md b/README.md index a9b3ec8..76d79d4 100644 --- a/README.md +++ b/README.md @@ -22,7 +22,7 @@ docker run \ -v $(pwd)/kerberos:/keytabs \ -d \ --restart unless-stopped \ - runbooksolutions/image_agent:latest + runbooksolutions/agent:latest ``` From 42b0ea164e253bcf22583096c2c021973242956f Mon Sep 17 00:00:00 2001 From: Will G Date: Sun, 26 Nov 2023 21:43:32 -0500 Subject: [PATCH 7/8] Github Actions --- .github/workflows/{production.yaml => production.yml} | 0 .github/workflows/{staging.yaml => staging.yml} | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename .github/workflows/{production.yaml => production.yml} (100%) rename .github/workflows/{staging.yaml => staging.yml} (100%) diff --git a/.github/workflows/production.yaml b/.github/workflows/production.yml similarity index 100% rename from .github/workflows/production.yaml rename to .github/workflows/production.yml diff --git a/.github/workflows/staging.yaml b/.github/workflows/staging.yml similarity index 100% rename from .github/workflows/staging.yaml rename to .github/workflows/staging.yml From dcb1cc9afd58b999c0627425de463cfa7f085a1b Mon Sep 17 00:00:00 2001 From: Will G Date: Sun, 26 Nov 2023 22:02:00 -0500 Subject: [PATCH 8/8] Update Docs --- README.md | 32 ++++++++++++++++++++++---------- docs/Responses.md | 6 ++++-- 2 files changed, 26 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index 76d79d4..5ef8657 100644 --- a/README.md +++ b/README.md @@ -1,16 +1,15 @@ # RunbookSolution Network Agent -This codebase composes the core of the Network Agent for RunbookSolutions. +This codebase comprises the core of the Network Agent for RunbookSolutions. ## Installation ### Prebuilt Docker Image -NOTE: THE DOCKER IMAGE DOES NOT CURRENTLY EXIST ```sh mkdir agent cd agent -mkdir plugins,stores,kerberos +mkdir plugins stores kerberos wget https://raw.githubusercontent.com/RunbookSolutions/agent/staging/config.ini docker run \ @@ -26,7 +25,21 @@ docker run \ ``` -### From Source: +### Extending the Default Image + +The default image includes the following Python libraries by default. To include additional libraries for your custom plugins, simply extend our default image and use it instead. + +```Dockerfile +FROM runbooksolutions/agent:latest +# Using a requirements.txt (Recommended) +COPY requirements.txt /app/custom_requirements.txt +RUN pip install -r custom_requirements.txt +# OR Individually +RUN pip install some_package +``` + + +### From Source ```sh git clone https://github.com/RunbookSolutions/agent.git cd agent @@ -34,7 +47,7 @@ cd agent ``` ## Configuration -Configuration maintained in a simple `config.ini` file consisting of the server_url of the backend; and the client_id for the device authentication. +Configuration is maintained in a simple 'config.ini' file consisting of the 'server_url' of the backend and the 'client_id' for device authentication. ```ini [agent] @@ -44,15 +57,14 @@ auth=True # To disable auth when not using with RunbookSolutions. ``` ## Expected Server Responses -Due to the Agent's nature; it can easily be used by others outside of RunbookSolutions. - -To implement a backend for this agent you will need to provided the following endpoints. +Due to the agent's nature, it can easily be used by others outside of RunbookSolutions. +To implement a backend for this agent, you will need to provide the following endpoints. -`GET /api/agent` for the agent to load information about itself. This endpoint also provides the agent with a list of PLUGIN_ID's that it needs to load. +`GET /api/agent` for the agent to load information about itself. This endpoint also provides the agent with a list of PLUGIN_IDs that it needs to load. `GET /api/agent/plugin/{PLUGIN_ID}` for the agent to download plugins. This endpoint also provides details about commands the plugin provides. -`GET /api/agent/tasks` for the agent to load tasks that it needs to run. Tasks include scheduled and one-off tasks to run; and will always present tasks until they are removed from the backend. This allows for the agent to restart without skipping task execution. +`GET /api/agent/tasks` for the agent to load tasks that it needs to run. Tasks include scheduled and one-off tasks to run and will always present tasks until they are removed from the backend. This allows the agent to restart without skipping task execution. Additional details can be found on the [Expected Server Responses](/docs/Responses.md) page. diff --git a/docs/Responses.md b/docs/Responses.md index c0690c1..e4b4af0 100644 --- a/docs/Responses.md +++ b/docs/Responses.md @@ -4,10 +4,12 @@ >> In non-NAT'ed environments; the backend server could use the IP address from the request to identify agents. ## `GET /api/agent` + This endpoint provides the agent with basic information about itself along with a list of PLUGIN_ID's that the agent should have loaded. - The Team ID provided here is used to identify and group agents to specific groups on the backend. It is **required** even if not used. + ```json { "data": { @@ -28,7 +30,7 @@ This endpoint provides the agent with individual plugins for the agent along wit - The `script` variable contains the code the agent will execute when required. - The `hash` variable is the `SHA512` hash of the script; the agent will verify both the script variable as well as the file it creates to store the plugin. -- The `commands` variable contains contains the details of what function in the program to run for which command is provided. +- The `commands` variable contains the details of what function in the program to run for which command is provided. > Important: Both the `script` variable and the file written to disk must match the provided hash for the plugin to be loaded and run. @@ -62,7 +64,7 @@ The following endpoint provides the agent with details of what commands need to - The `command` variable must match one of the keys provided by the `plugin.commands` variable when downloading plugins. - The `cron` variable is the cron formatted schedule for when the task runs, or `null` if it should only be run once. -- The `arguments` variable should be a JSON encoded string containg the arument name and values for the function to run. +- The `arguments` variable should be a JSON encoded string containing the argument name and values for the function to run. > Note: The agent uses the `task.id` to ensure tasks are not being duplicated into the schedule and queue.