Skip to content
This repository has been archived by the owner on Feb 15, 2024. It is now read-only.

Commit

Permalink
Merge pull request #11 from RunbookSolutions/testing
Browse files Browse the repository at this point in the history
General Improvements

Allow plugin to require the API by auto-injecting it into plugins that require it.
Added the config option to force_redownload plugins even if they already exist locally. Good for testing.
Added the ability for tasks to be removed from the schedule.
  • Loading branch information
sniper7kills authored Dec 8, 2023
2 parents 6e68550 + c23c688 commit e1c9951
Show file tree
Hide file tree
Showing 7 changed files with 78 additions and 14 deletions.
4 changes: 4 additions & 0 deletions .dockerignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
__pycache__
*.pyc
*.pyo
*.pyd
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ RUN apt-get update && \
# Set the working directory to /app
WORKDIR /app

RUN mkdir plugins,stores
RUN mkdir /app/plugins /app/stores

COPY _docker/start.sh /start.sh
RUN chmod +x /start.sh
Expand Down
8 changes: 5 additions & 3 deletions config.ini
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
[agent]
# Note: Do NOT include a trailing slash on the server_url
server_url=http://192.168.1.197
server_url=https://graphql.dev.runbook.solutions
# Device Code Grant client_id provided by the server
client_id=9ab55261-bfb7-4bb3-ad29-a6dbdbf8a5af
client_id=9ac5c7f4-0dbe-4e7e-a16a-06c1027771b3
# If we are required to preform Device Code Authentication
auth=True
auth=True
# If we are forcing the re-downloading of plugins
force_redownload=False
5 changes: 4 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
requests
cryptography
colorlog
croniter
croniter
python-nmap
pyinstaller
pytest
13 changes: 10 additions & 3 deletions runbooksolutions/agent/Agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def __init__(self, num_threads: int = 1) -> None:
enabled=self.agentConfig.get('auth')
)
self.api = API(auth=self.auth, url=self.agentConfig.get('server_url'))
self.pluginManager = PluginManager(self.api)
self.pluginManager = PluginManager(self.api, self.agentConfig.get('force_redownload', False))
self.queue = Queue(num_threads, self.pluginManager)
self.schedule = Schedule(self.queue)

Expand All @@ -46,8 +46,15 @@ async def syncAgent(self):
self.agentDetails = self.api.getAgentDetails()
self.pluginManager.syncPlugins(self.agentDetails.plugins)

tasks = self.api.getAgentTasks().getTasks()
for task in tasks:
tasks_from_api = self.api.getAgentTasks().getTasks()
task_ids_from_api = [task.id for task in tasks_from_api]

# Remove tasks from the schedule that are not in the API response
for task, _ in self.schedule.tasks.copy():
if task.id not in task_ids_from_api:
self.schedule.remove_task(task.id)

for task in tasks_from_api:
if task.shouldSchedule():
self.schedule.add_task(task=task, cron_expression=task.cron)
else:
Expand Down
53 changes: 47 additions & 6 deletions runbooksolutions/agent/PluginManager.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,18 @@
import os
import importlib
import hashlib
import inspect

class PluginManager:
plugin_directory: str = "plugins"
plugins: dict = dict()
loadedCommands: dict = dict()
api: API = None
force_redownload: bool = False

def __init__(self, api: API) -> None:
def __init__(self, api: API, force_redownload: bool = False) -> None:
self.api = api
self.force_redownload = force_redownload

def verify_plugin_hash(self, pluginID: str) -> bool:
json_file_path = os.path.join(self.plugin_directory, f"{pluginID}.json")
Expand All @@ -31,7 +34,7 @@ def verify_plugin_hash(self, pluginID: str) -> bool:

logging.debug(f"Expected Hash: {plugin_definition.get('hash')}")
logging.debug(f"JSON Hash: {json_hash}")
logging.debug(f"JSON Hash: {script_hash}")
logging.debug(f"File Hash: {script_hash}")

if not json_hash == plugin_definition.get('hash', ''):
logging.critical("JSON Hash mismatch")
Expand Down Expand Up @@ -64,20 +67,47 @@ def removePlugin(self, pluginID: str) -> None:
else:
logging.warning(f"Plugin {pluginID} not found in loaded plugins.")

# TODO: Remove the plugin files from the file system
json_file_path = os.path.join(self.plugin_directory, f"{pluginID}.json")
with open(json_file_path, 'r') as json_file:
plugin_definition = json.load(json_file)

for command in plugin_definition.get('commands', []).keys():
self.loadedCommands.pop(command)

self.removePluginFiles(pluginID)

def removePluginFiles(self, pluginID: str) -> None:
# Remove the plugin files from the file system
json_file_path = os.path.join(self.plugin_directory, f"{pluginID}.json")
script_file_path = os.path.join(self.plugin_directory, f"{pluginID}.py")

if self.force_redownload:
# Delete the files only if force_redownload is True
if os.path.exists(json_file_path):
os.remove(json_file_path)
logging.debug(f"Deleted JSON file: {json_file_path}")
else:
logging.warning(f"JSON file not found: {json_file_path}")

if os.path.exists(script_file_path):
os.remove(script_file_path)
logging.debug(f"Deleted Python file: {script_file_path}")
else:
logging.warning(f"Python file not found: {script_file_path}")

def syncPlugins(self, plugins: list) -> None:
logging.debug(f"Syncing Plugins. Loaded Plugins: {list(self.plugins.keys())} Requested Plugins: {plugins}")
for pluginID in self.plugins.keys():

# Create a copy of the keys to avoid dictionary size change during iteration
loaded_plugins_keys = list(self.plugins.keys())

for pluginID in loaded_plugins_keys:
if pluginID not in plugins:
logging.debug("Removing Plugin")
self.removePlugin(pluginID)
elif self.force_redownload:
logging.debug("Removing Plugging due to Forced Redownload")
self.removePlugin(pluginID)
else:
logging.debug("Plugin Still Required.")

Expand All @@ -88,7 +118,12 @@ def syncPlugins(self, plugins: list) -> None:
else:
logging.debug("Plugin Already Loaded")


def pluginIsLocal(self, pluginID: str) -> bool:
if self.force_redownload:
self.removePluginFiles(pluginID)
return False

if not os.path.exists(os.path.join(self.plugin_directory, f"{pluginID}.json")):
logging.debug("Plugin JSON Not Local")
return False
Expand Down Expand Up @@ -131,8 +166,14 @@ def loadPlugin(self, pluginID: str) -> Plugin:
spec = importlib.util.spec_from_file_location("Plugin", script_file_path)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
# Store the instance of the plugin in the loaded_plugins dictionary
return module.Plugin()

# Check if the Plugin class requires the 'api' parameter
if 'api' in inspect.getfullargspec(module.Plugin.__init__).args:
# Pass the 'api' parameter if required
return module.Plugin(self.api)
else:
# Instantiate the Plugin without the 'api' parameter
return module.Plugin()
except Exception as e:
print(f"Error importing plugin {pluginID}: {e}")
return None
Expand Down
7 changes: 7 additions & 0 deletions runbooksolutions/schedule/Schedule.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,13 @@ def add_task(self, task: Task, cron_expression: str) -> None:
else:
logging.warning("Task already Scheduled")

def remove_task(self, task_id: str) -> None:
for i, (task, _) in enumerate(self.tasks):
if task.id == task_id:
logging.debug(f"Removing Task with ID {task_id} from schedule")
del self.tasks[i]
break

async def start(self) -> None:
logging.debug("Schedule Started")
while True:
Expand Down

0 comments on commit e1c9951

Please sign in to comment.