Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix #166 APIvoid error #299

Merged
merged 4 commits into from
Apr 17, 2023
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion engines/apivoid/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
FROM alpine:3.16.3
LABEL Name="APIVoid\ \(Patrowl engine\)" Version="1.4.31"
LABEL Name="APIVoid\ \(Patrowl engine\)" Version="1.4.32"

# Create the target repo
RUN mkdir -p /opt/patrowl-engines/apivoid
Expand Down
4 changes: 2 additions & 2 deletions engines/apivoid/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ set your APIKey in APIVOID_APIKEY environment variable

## Start with Docker
```
docker build . -t engine-apivoid
docker run -p5022:5022 -e APIVOID_APIKEY=XXXXX engine-apivoid
docker build . -t patrowl/engine-apivoid
docker run -p5022:5022 -e APIVOID_APIKEY=XXXXX patrowl/engine-apivoid
```

## Testing URLs
Expand Down
2 changes: 1 addition & 1 deletion engines/apivoid/VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1.4.31
1.4.32
4 changes: 2 additions & 2 deletions engines/apivoid/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
# -*- coding: utf-8 -*-

__title__ = 'patrowl_engine_apivoid'
__version__ = '1.4.27'
__version__ = '1.4.32'
__author__ = 'Nicolas MATTIOCCO'
__license__ = 'AGPLv3'
__copyright__ = 'Copyright (C) 2020-2022 Nicolas Mattiocco - @MaKyOtOx'
__copyright__ = 'Copyright (C) 2020-2023 Nicolas Mattiocco - @MaKyOtOx'
4 changes: 2 additions & 2 deletions engines/apivoid/apivoid.json.sample
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
{
"name": "APIVOID",
"version": "1.4.28",
"version": "1.4.32",
"description": "APIVoid reputation API",
"allowed_asset_types": ["domain", "ip", "url", "ip-subnet"],
"allowed_asset_types": ["domain", "fqdn", "ip", "url", "ip-subnet"],
"apikeys": [
"xx",
"yy"
Expand Down
123 changes: 98 additions & 25 deletions engines/apivoid/engine-apivoid.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,13 @@
import json
import time
import requests
import datetime
import re
from urllib.parse import urlparse
from flask import Flask, request, jsonify
from flask import Flask, request, jsonify, send_from_directory
from concurrent.futures import ThreadPoolExecutor
from ratelimit import limits, sleep_and_retry
from netaddr import IPNetwork
from netaddr import IPNetwork, IPAddress
from netaddr.core import AddrFormatError

from PatrowlEnginesUtils.PatrowlEngine import PatrowlEngine
Expand All @@ -25,7 +26,7 @@
APP_MAXSCANS = int(os.environ.get('APP_MAXSCANS', 25))
APP_ENGINE_NAME = "apivoid"
APP_BASE_DIR = os.path.dirname(os.path.realpath(__file__))
VERSION = "1.4.28"
VERSION = "1.4.32"

engine = PatrowlEngine(
app=app,
Expand Down Expand Up @@ -173,8 +174,14 @@ def stop_scan(scan_id):

@app.route('/engines/apivoid/getreport/<scan_id>')
def getreport(scan_id):
"""Get report on finished scans."""
return engine.getreport(scan_id)
if not scan_id.isdecimal():
return jsonify({"status": "error", "reason": "scan_id must be numeric digits only"})
filepath = f"{APP_BASE_DIR}/results/apivoid_{scan_id}.json"

if not os.path.exists(filepath):
return jsonify({"status": "error", "reason": f"report file for scan_id '{scan_id}' not found"})

return send_from_directory(f"{APP_BASE_DIR}/results/", "apivoid_{scan_id}.json")


def _loadconfig():
Expand Down Expand Up @@ -213,7 +220,7 @@ def start_scan():
if len(engine.scans) == APP_MAXSCANS:
res.update({
"status": "error",
"reason": "Scan refused: max concurrent active scans reached ({})".format(APP_MAXSCANS)
"reason": f"Scan refused: max concurrent active scans reached ({APP_MAXSCANS})"
})
return jsonify(res)

Expand All @@ -223,7 +230,7 @@ def start_scan():
res.update({
"status": "refused",
"details": {
"reason": "bad scanner status {}".format(engine.scanner['status'])
"reason": f"Bad scanner status {engine.scanner['status']}"
}})
return jsonify(res)

Expand Down Expand Up @@ -259,7 +266,7 @@ def start_scan():
if asset["datatype"] not in engine.scanner["allowed_asset_types"]:
res.update({
"status": "error",
"reason": "asset '{}' datatype '{}' not supported".format(asset["value"],asset["datatype"])
"reason": "asset '{}' has unsupported datatype '{}'".format(asset["value"], asset["datatype"])
})
return jsonify(res)

Expand All @@ -271,6 +278,12 @@ def start_scan():
if asset["datatype"] == "url":
parsed_uri = urlparse(asset["value"])
asset["value"] = parsed_uri.netloc

# Check the netloc type
if is_valid_ip(asset["value"]):
asset["datatype"] == "ip"
else:
asset["datatype"] == "domain"

assets.append(asset["value"])

Expand All @@ -280,7 +293,7 @@ def start_scan():
res.update({
"status": "refused",
"details": {
"reason": "scan '{}' already launched".format(data['scan_id']),
"reason": f"scan '{data['scan_id']}' already launched",
}
})
return jsonify(res)
Expand Down Expand Up @@ -310,7 +323,7 @@ def start_scan():

if 'domain_reputation' in scan['options'].keys() and data['options']['domain_reputation']:
for asset in data["assets"]:
if asset["datatype"] == "domain":
if asset["datatype"] in ["domain", "fqdn"]:
th = this.pool.submit(_scan_domain_reputation, scan_id, asset["value"])
engine.scans[scan_id]['futures'].append(th)

Expand All @@ -330,7 +343,7 @@ def _scan_ip_reputation(scan_id, asset):
try:
engine.scans[scan_id]["findings"][asset]['ip_reputation'] = get_report_ip_reputation(scan_id, asset, apikey)
except Exception as ex:
app.logger.error("_scan_ip_reputation failed {}".format(re.sub(r'/' + apikey + '/', r'/***/', ex.__str__())))
app.logger.error("_scan_ip_reputation failed: {}".format(re.sub(r'/' + apikey + '/', r'/***/', ex.__str__())))
return False

return True
Expand All @@ -343,7 +356,7 @@ def _scan_domain_reputation(scan_id, asset):
try:
engine.scans[scan_id]["findings"][asset]['domain_reputation'] = get_report_domain_reputation(scan_id, asset, apikey)
except Exception as ex:
app.logger.error("_scan_domain_reputation failed {}".format(re.sub(r'/' + apikey + '/', r'/***/', ex.__str__())))
app.logger.error("_scan_domain_reputation failed: {}".format(re.sub(r'/' + apikey + '/', r'/***/', ex.__str__())))
return False

return True
Expand All @@ -365,7 +378,7 @@ def get_report_ip_reputation(scan_id, asset, apikey):
response = requests.get(scan_url)
# print(response.content)
except Exception as ex:
app.logger.error("get_report_ip_reputation failed {}".format(re.sub(r'/' + apikey + '/', r'/***/', ex.__str__())))
app.logger.error("get_report_ip_reputation failed: {}".format(re.sub(r'/' + apikey + '/', r'/***/', ex.__str__())))
return []

return response.content
Expand All @@ -380,7 +393,7 @@ def get_report_domain_reputation(scan_id, asset, apikey):
response = requests.get(scan_url)
# print(response.content)
except Exception as ex:
app.logger.error("get_report_domain_reputation failed {}".format(re.sub(r'/' + apikey + '/', r'/***/', ex.__str__())))
app.logger.error("get_report_domain_reputation failed: {}".format(re.sub(r'/' + apikey + '/', r'/***/', ex.__str__())))
return []

return response.content
Expand All @@ -400,18 +413,34 @@ def _parse_results(scan_id):
ts = int(time.time() * 1000)

for asset in engine.scans[scan_id]["findings"]:

if 'ip_reputation' in engine.scans[scan_id]["findings"][asset].keys():
res = json.loads(engine.scans[scan_id]["findings"][asset]['ip_reputation'])

if 'data' in res:
severity = "info"
report_summary = ""
try:
detections = res["data"]["report"]["blacklists"]["detections"]
risk_score = res["data"]["report"]["risk_score"]["result"]
if risk_score == 100:
severity = "high"
elif risk_score >= 70:
severity = "medium"

report_summary = f" (detect:{detections}, risk:{risk_score})"
except Exception:
pass

nb_vulns['info'] += 1
issues.append({
"issue_id": len(issues) + 1,
"severity": "info", "confidence": "certain",
"severity": severity, "confidence": "certain",
"target": {
"addr": [asset],
"protocol": "domain"
},
"title": "IP Reputation Check",
"title": "IP Reputation Check"+report_summary,
"description": f"IP Reputation Check for '{asset}'\n\nSee raw_data",
"solution": "n/a",
"metadata": {
Expand All @@ -421,18 +450,34 @@ def _parse_results(scan_id):
"raw": res['data'],
"timestamp": ts
})

if 'domain_reputation' in engine.scans[scan_id]["findings"][asset].keys():
res = json.loads(engine.scans[scan_id]["findings"][asset]['domain_reputation'])

if 'data' in res:
severity = "info"
report_summary = ""
try:
detections = res["data"]["report"]["blacklists"]["detections"]
risk_score = res["data"]["report"]["risk_score"]["result"]
if risk_score == 100:
severity = "high"
elif risk_score >= 70:
severity = "medium"

report_summary = f" (detect:{detections}, risk:{risk_score})"
except Exception:
pass

nb_vulns['info'] += 1
issues.append({
"issue_id": len(issues) + 1,
"severity": "info", "confidence": "certain",
"severity": severity, "confidence": "certain",
"target": {
"addr": [asset],
"protocol": "domain"
},
"title": "Domain Reputation Check",
"title": "Domain Reputation Check"+report_summary,
"description": f"Domain Reputation Check for '{asset}'\n\nSee raw_data",
"solution": "n/a",
"metadata": {
Expand Down Expand Up @@ -469,8 +514,10 @@ def getfindings(scan_id):
# check if the scan is finished
status_scan(scan_id)
if engine.scans[scan_id]['status'] != "FINISHED":
res.update({"status": "error",
"reason": f"scan_id '{scan_id}' not finished (status={engine.scans[scan_id]['status']})"})
res.update({
"status": "error",
"reason": f"scan_id '{scan_id}' not finished (status={engine.scans[scan_id]['status']})"
})
return jsonify(res)

status, issues, summary = _parse_results(scan_id)
Expand All @@ -484,15 +531,30 @@ def getfindings(scan_id):
}

scan.update(status)

# remove the scan from the active scan list
clean_scan(scan_id)

res.update({"scan": scan, "summary": summary, "issues": issues})

res_data = {"scan": scan, "summary": summary, "issues": issues}

# Store the findings in a file
with open(f"{APP_BASE_DIR}/results/apivoid_{scan_id}.json", 'w') as report_file:
Fixed Show fixed Hide fixed

Check failure

Code scanning / SonarCloud

I/O function calls should not be vulnerable to path injection attacks

<!--SONAR_ISSUE_KEY:AYePSKOkcZCxPupmLgzU-->Change this code to not construct the path from user-controlled data. <p>See more on <a href="https://sonarcloud.io/project/issues?id=Patrowl_PatrowlEngines&issues=AYePSKOkcZCxPupmLgzU&open=AYePSKOkcZCxPupmLgzU&pullRequest=299">SonarCloud</a></p>
json.dump(res_data, report_file, default=_json_serial)

# # Remove the scan from the active scan list
# clean_scan(scan_id)

# Prepare response
res.update(res_data)
res.update(status)
return jsonify(res)


def is_valid_ip(ip):
try:
IPAddress(ip)
except (TypeError, ValueError, AddrFormatError):
return False
return True


def is_valid_subnet(subnet):
try:
IPNetwork(subnet)
Expand All @@ -509,6 +571,17 @@ def get_ips_from_subnet(subnet):
return [str(ip) for ip in IPNetwork(subnet)]


def _json_serial(obj):
"""
JSON serializer for objects not serializable by default json code
Used for datetime serialization when the results are written in file
"""
if isinstance(obj, datetime.datetime) or isinstance(obj, datetime.date):
serial = obj.isoformat()
return serial
raise TypeError("Type not serializable")


@app.before_first_request
def main():
"""First function called."""
Expand Down