Skip to content

Commit

Permalink
Improved frontend to view list of reports, and job info. Properly upd…
Browse files Browse the repository at this point in the history
…ate the job db entry
  • Loading branch information
matteogreek authored and lauraschauer committed Jul 4, 2023
1 parent d1b0b49 commit 0f49bc8
Show file tree
Hide file tree
Showing 22 changed files with 306 additions and 137 deletions.
40 changes: 21 additions & 19 deletions prospector/data_sources/nvd/filter_entries.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import datetime
import json

import aiofiles
import aiohttp
import psycopg2
import requests
Expand All @@ -21,9 +22,9 @@

config = parse_config_file()

with open("./data/project_metadata.json", "r") as f:
global match_list
match_list = json.load(f)
# with open("./data/project_metadata.json", "r") as f:
# global match_list
# match_list = json.load(f)


def connect_to_db():
Expand All @@ -43,7 +44,6 @@ def disconnect_from_database(db):


async def retrieve_vulns(d_time):

start_date, end_date = get_time_range(d_time)

data = ""
Expand Down Expand Up @@ -88,23 +88,25 @@ def save_vuln_to_db(vulns):
db.disconnect()


def get_cve_by_id(id):
async def get_cve_by_id(id):
nvd_url = f"https://services.nvd.nist.gov/rest/json/cves/2.0?cveID={id}"

try:
print(nvd_url)
response = requests.get(nvd_url)
except Exception as e:
print(str(e))

if response.status_code == 200:
data = json.loads(response.text)
# print(data["vulnerabilities"])
async with aiohttp.ClientSession() as session:
try:
async with session.get(nvd_url) as response:
if response.status == 200:
data = await response.json()
else:
print("Error while trying to retrieve entry")
except aiohttp.ClientError as e:
print(str(e))
logger.error("Error while retrieving vulnerability from NVD", exc_info=True)
return data

else:
print("Error while trying to retrieve entries")

return data
async def add_single_cve(vuln_id: str):
raw_json_cve = get_cve_by_id(vuln_id)
save_vuln_to_db(raw_json_cve)


def write_list_to_file(lst, filename):
Expand Down Expand Up @@ -164,8 +166,8 @@ async def process_entries():

async def map_entry(vuln):
# TODO: improve mapping technique
# async with aiofiles.open("./data/project_metadata.json", "r") as f:
# match_list = json.loads(await f.read())
async with aiofiles.open("./data/project_metadata.json", "r") as f:
match_list = json.loads(await f.read())

project_names = extract_products(vuln["cve"]["descriptions"][0]["value"])
# print(project_names)
Expand Down
75 changes: 46 additions & 29 deletions prospector/data_sources/nvd/job_creation.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
import json
import sys
import time
from datetime import datetime, timedelta
from datetime import datetime

import redis
import requests
from rq import Connection, Queue, get_current_job

from backenddb.postgres import PostgresBackendDB
Expand All @@ -12,18 +13,30 @@
from log.logger import logger
from util.config_parser import parse_config_file

# get the redis server url
# get the redis server url and backend from configuration file
config = parse_config_file()
redis_url = config.redis_url
backend = config.backend


def run_prospector(vuln_id, repo_url, v_int):

start_time = time.time()
job = get_current_job()
db = connect_to_db()
db.update_job(job.get_id(), job.get_status(), job.started_at)
job_id = job.get_id()
url = f"{backend}/jobs/{job_id}"
data = {
"status": job.get_status(),
"started_at": job.started_at.isoformat(),
}

try:
response = requests.put(url, json=data)
if response.status_code == 200:
response_object = response.json()
print(response_object)
else:
print("Error:", response.status_code)
except requests.exceptions.RequestException as e:
print("Error:", e)

try:
results, advisory_record = prospector(
Expand All @@ -36,36 +49,40 @@ def run_prospector(vuln_id, repo_url, v_int):
results,
advisory_record,
"html",
f"data_sources/reports/{vuln_id}",
f"data_sources/reports/{vuln_id}_{job_id}",
)
status = "finished"
results = f"data_sources/reports/{vuln_id}_{job_id}"
except Exception:
end_time = time.time()
elapsed_time = end_time - start_time
ended_at = job.started_at + timedelta(seconds=int(elapsed_time))
status = "failed"
results = None
logger.error("job failed during execution")
print(job.get_id(), "failed", ended_at)
db.update_job(job.get_id(), "failed", ended_at=ended_at)
db.disconnect()
else:
end_time = time.time()
elapsed_time = end_time - start_time
ended_at = job.started_at + timedelta(seconds=int(elapsed_time))
print(job.get_id(), "finished", ended_at, f"data_sources/reports/{vuln_id}")
db.update_job(
job.get_id(),
"finished",
ended_at=ended_at,
results=f"data_sources/reports/{vuln_id}",
)
db.disconnect()
finally:
end_time = datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f")
print(job_id, status, end_time, results)
data = {"status": status, "finished_at": end_time, "results": results}
try:
response = requests.put(url, json=data)
if response.status_code == 200:
response_object = response.json()
print(response_object)
else:
print("Error:", response.status_code)
except requests.exceptions.RequestException as e:
print("Error:", e)

return f"data_sources/reports/{vuln_id}"
return f"data_sources/reports/{vuln_id}_{job_id}"


def create_prospector_job(vuln_id, repo, version):
def create_prospector_job(vuln_id, repo, version, at_front=False):
with Connection(redis.from_url(redis_url)):
queue = Queue(default_timeout=500)
job = queue.enqueue(run_prospector, args=(vuln_id, repo, version))
queue = Queue(default_timeout=800)
if at_front:
job = queue.enqueue(
run_prospector, args=(vuln_id, repo, version), at_front=True
)
else:
job = queue.enqueue(run_prospector, args=(vuln_id, repo, version))

return job

Expand Down
7 changes: 4 additions & 3 deletions prospector/data_sources/nvd/versions_extraction.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,9 +66,10 @@ def process_versions(ranges_list):
start, end = last_range[1:].split(":")
if "]" in end:
end_components = end[:-1].split(".")
end_components[-1] = str(
int(end_components[-1]) + 1
) # Increment the last component
if end_components[-1].isdigit():
end_components[-1] = str(
int(end_components[-1]) + 1
) # Increment the last component
end = ".".join(end_components)
else:
end = end.strip(")")
Expand Down
14 changes: 11 additions & 3 deletions prospector/datamodel/advisory.py
Original file line number Diff line number Diff line change
Expand Up @@ -249,9 +249,17 @@ def extract_hashes(self, reference: str, filter: bool = False) -> str | None:
def parse_advisory_2(self, details, metadata):
self.affected_products = [details["affected"][0]["product"]]
self.versions = dict(details["affected"][0]["versions"][0])
self.published_timestamp = int(isoparse(metadata["datePublished"]).timestamp())
self.updated_timestamp = int(isoparse(metadata["dateUpdated"]).timestamp())
self.reserved_timestamp = int(isoparse(metadata["dateReserved"]).timestamp())
timestamp_fields = {
"published_timestamp": "datePublished",
"updated_timestamp": "dateUpdated",
"reserved_timestamp": "dateReserved",
}

for field, key in timestamp_fields.items():
timestamp = metadata.get(key)
setattr(
self, field, int(isoparse(timestamp).timestamp()) if timestamp else None
)
if not self.description:
self.description = details["descriptions"][0]["value"]
self.references = defaultdict(
Expand Down
2 changes: 1 addition & 1 deletion prospector/ddl/40_processed_vuln.sql
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,6 @@ CREATE TABLE public.processed_vuln (
_id SERIAL PRIMARY KEY,
fk_vulnerability INT NOT NULL UNIQUE,
repository varchar NOT NULL,
versions varchar NOT NULL,
versions varchar,
FOREIGN KEY (fk_vulnerability) REFERENCES public.vulnerability (_id)
);
2 changes: 1 addition & 1 deletion prospector/docker/cli/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -23,5 +23,5 @@ FROM prospector-base:1.0
#WORKDIR /app
#ENV PYTHONPATH "${PYTHONPATH}:/app"

VOLUME [ "/results" ]
#VOLUME [ "/results" ]
ENTRYPOINT [ "python","cli/main.py" ]
7 changes: 5 additions & 2 deletions prospector/run_prospector.sh
Original file line number Diff line number Diff line change
Expand Up @@ -26,11 +26,14 @@ get_option_value() {
}

REPORT_FILENAME=$(get_option_value "$@")
echo $REPORT_FILENAME
if [[ -z $REPORT_FILENAME ]]; then
OUTPUT_DIR="."
OUTPUT_DIR=""
else
OUTPUT_DIR=$(dirname "$REPORT_FILENAME")
fi
echo $OUTPUT_DIR
echo $(pwd)/$OUTPUT_DIR

# run the docker container
docker run --network=prospector_default --rm -t -v $(pwd)/$OUTPUT_DIR:/app/results $IMAGE_NAME "$@"
docker run --network=prospector_default --rm -t -v $(pwd)/$OUTPUT_DIR:/app/$OUTPUT_DIR $IMAGE_NAME "$@"
30 changes: 15 additions & 15 deletions prospector/service/api/routers/feeds.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,21 @@ def connect_to_db():
return db


@router.get("/reports")
async def get_reports(request: Request):
report_list = []
for filename in os.listdir("/app/data_sources/reports"):
if filename.endswith(".html"):
file_path = os.path.join("/app/data_sources/reports", filename)
mtime = os.path.getmtime(file_path)
mtime_dt = datetime.fromtimestamp(mtime)
report_list.append((os.path.splitext(filename)[0], mtime_dt))
report_list.sort(key=lambda x: x[1], reverse=True)
return templates.TemplateResponse(
"report_list.html", {"request": request, "report_list": report_list}
)


@router.get("/{vuln_id}")
async def get_vuln(vuln_id: str):

Expand Down Expand Up @@ -70,21 +85,6 @@ async def get_vulnList():
return vulnlist


@router.get("/reports")
async def get_reports(request: Request):
report_list = []
for filename in os.listdir("/app/data_sources/reports"):
if filename.endswith(".html"):
file_path = os.path.join("/app/data_sources/reports", filename)
mtime = os.path.getmtime(file_path)
mtime_dt = datetime.fromtimestamp(mtime)
report_list.append((os.path.splitext(filename)[0], mtime_dt))
report_list.sort(key=lambda x: x[1], reverse=True)
return templates.TemplateResponse(
"report_list.html", {"request": request, "report_list": report_list}
)


@router.get("/fetch_vulns/{d_time}")
async def get_vulns(d_time: int):
# Retrieve and save new vulns using NVD APIs
Expand Down
Loading

0 comments on commit 0f49bc8

Please sign in to comment.