Skip to content

Commit

Permalink
Merge pull request #304 from Patrowl/develop
Browse files Browse the repository at this point in the history
1.5.11 Update
  • Loading branch information
sebastien-powl authored Apr 27, 2023
2 parents c497a6f + 65b27d6 commit 6e1a76f
Show file tree
Hide file tree
Showing 15 changed files with 325 additions and 169 deletions.
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1.5.10
1.5.11
2 changes: 1 addition & 1 deletion engines/apivoid/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
FROM alpine:3.16.3
LABEL Name="APIVoid\ \(Patrowl engine\)" Version="1.4.31"
LABEL Name="APIVoid\ \(Patrowl engine\)" Version="1.4.32"

# Create the target repo
RUN mkdir -p /opt/patrowl-engines/apivoid
Expand Down
4 changes: 2 additions & 2 deletions engines/apivoid/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ set your APIKey in APIVOID_APIKEY environment variable

## Start with Docker
```
docker build . -t engine-apivoid
docker run -p5022:5022 -e APIVOID_APIKEY=XXXXX engine-apivoid
docker build . -t patrowl/engine-apivoid
docker run -p5022:5022 -e APIVOID_APIKEY=XXXXX patrowl/engine-apivoid
```

## Testing URLs
Expand Down
2 changes: 1 addition & 1 deletion engines/apivoid/VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1.4.31
1.4.32
4 changes: 2 additions & 2 deletions engines/apivoid/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
# -*- coding: utf-8 -*-

__title__ = 'patrowl_engine_apivoid'
__version__ = '1.4.27'
__version__ = '1.4.32'
__author__ = 'Nicolas MATTIOCCO'
__license__ = 'AGPLv3'
__copyright__ = 'Copyright (C) 2020-2022 Nicolas Mattiocco - @MaKyOtOx'
__copyright__ = 'Copyright (C) 2020-2023 Nicolas Mattiocco - @MaKyOtOx'
4 changes: 2 additions & 2 deletions engines/apivoid/apivoid.json.sample
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
{
"name": "APIVOID",
"version": "1.4.28",
"version": "1.4.32",
"description": "APIVoid reputation API",
"allowed_asset_types": ["domain", "ip", "url", "ip-subnet"],
"allowed_asset_types": ["domain", "fqdn", "ip", "url", "ip-subnet"],
"apikeys": [
"xx",
"yy"
Expand Down
123 changes: 98 additions & 25 deletions engines/apivoid/engine-apivoid.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,13 @@
import json
import time
import requests
import datetime
import re
from urllib.parse import urlparse
from flask import Flask, request, jsonify
from flask import Flask, request, jsonify, send_from_directory
from concurrent.futures import ThreadPoolExecutor
from ratelimit import limits, sleep_and_retry
from netaddr import IPNetwork
from netaddr import IPNetwork, IPAddress
from netaddr.core import AddrFormatError

from PatrowlEnginesUtils.PatrowlEngine import PatrowlEngine
Expand All @@ -25,7 +26,7 @@
APP_MAXSCANS = int(os.environ.get('APP_MAXSCANS', 25))
APP_ENGINE_NAME = "apivoid"
APP_BASE_DIR = os.path.dirname(os.path.realpath(__file__))
VERSION = "1.4.28"
VERSION = "1.4.32"

engine = PatrowlEngine(
app=app,
Expand Down Expand Up @@ -173,8 +174,14 @@ def stop_scan(scan_id):

@app.route('/engines/apivoid/getreport/<scan_id>')
def getreport(scan_id):
"""Get report on finished scans."""
return engine.getreport(scan_id)
if not scan_id.isdecimal():
return jsonify({"status": "error", "reason": "scan_id must be numeric digits only"})
filepath = f"{APP_BASE_DIR}/results/apivoid_{scan_id}.json"

if not os.path.exists(filepath):
return jsonify({"status": "error", "reason": f"report file for scan_id '{scan_id}' not found"})

return send_from_directory(f"{APP_BASE_DIR}/results/", "apivoid_{scan_id}.json")


def _loadconfig():
Expand Down Expand Up @@ -213,7 +220,7 @@ def start_scan():
if len(engine.scans) == APP_MAXSCANS:
res.update({
"status": "error",
"reason": "Scan refused: max concurrent active scans reached ({})".format(APP_MAXSCANS)
"reason": f"Scan refused: max concurrent active scans reached ({APP_MAXSCANS})"
})
return jsonify(res)

Expand All @@ -223,7 +230,7 @@ def start_scan():
res.update({
"status": "refused",
"details": {
"reason": "bad scanner status {}".format(engine.scanner['status'])
"reason": f"Bad scanner status {engine.scanner['status']}"
}})
return jsonify(res)

Expand Down Expand Up @@ -259,7 +266,7 @@ def start_scan():
if asset["datatype"] not in engine.scanner["allowed_asset_types"]:
res.update({
"status": "error",
"reason": "asset '{}' datatype '{}' not supported".format(asset["value"],asset["datatype"])
"reason": "asset '{}' has unsupported datatype '{}'".format(asset["value"], asset["datatype"])
})
return jsonify(res)

Expand All @@ -271,6 +278,12 @@ def start_scan():
if asset["datatype"] == "url":
parsed_uri = urlparse(asset["value"])
asset["value"] = parsed_uri.netloc

# Check the netloc type
if is_valid_ip(asset["value"]):
asset["datatype"] == "ip"
else:
asset["datatype"] == "domain"

assets.append(asset["value"])

Expand All @@ -280,7 +293,7 @@ def start_scan():
res.update({
"status": "refused",
"details": {
"reason": "scan '{}' already launched".format(data['scan_id']),
"reason": f"scan '{data['scan_id']}' already launched",
}
})
return jsonify(res)
Expand Down Expand Up @@ -310,7 +323,7 @@ def start_scan():

if 'domain_reputation' in scan['options'].keys() and data['options']['domain_reputation']:
for asset in data["assets"]:
if asset["datatype"] == "domain":
if asset["datatype"] in ["domain", "fqdn"]:
th = this.pool.submit(_scan_domain_reputation, scan_id, asset["value"])
engine.scans[scan_id]['futures'].append(th)

Expand All @@ -330,7 +343,7 @@ def _scan_ip_reputation(scan_id, asset):
try:
engine.scans[scan_id]["findings"][asset]['ip_reputation'] = get_report_ip_reputation(scan_id, asset, apikey)
except Exception as ex:
app.logger.error("_scan_ip_reputation failed {}".format(re.sub(r'/' + apikey + '/', r'/***/', ex.__str__())))
app.logger.error("_scan_ip_reputation failed: {}".format(re.sub(r'/' + apikey + '/', r'/***/', ex.__str__())))
return False

return True
Expand All @@ -343,7 +356,7 @@ def _scan_domain_reputation(scan_id, asset):
try:
engine.scans[scan_id]["findings"][asset]['domain_reputation'] = get_report_domain_reputation(scan_id, asset, apikey)
except Exception as ex:
app.logger.error("_scan_domain_reputation failed {}".format(re.sub(r'/' + apikey + '/', r'/***/', ex.__str__())))
app.logger.error("_scan_domain_reputation failed: {}".format(re.sub(r'/' + apikey + '/', r'/***/', ex.__str__())))
return False

return True
Expand All @@ -365,7 +378,7 @@ def get_report_ip_reputation(scan_id, asset, apikey):
response = requests.get(scan_url)
# print(response.content)
except Exception as ex:
app.logger.error("get_report_ip_reputation failed {}".format(re.sub(r'/' + apikey + '/', r'/***/', ex.__str__())))
app.logger.error("get_report_ip_reputation failed: {}".format(re.sub(r'/' + apikey + '/', r'/***/', ex.__str__())))
return []

return response.content
Expand All @@ -380,7 +393,7 @@ def get_report_domain_reputation(scan_id, asset, apikey):
response = requests.get(scan_url)
# print(response.content)
except Exception as ex:
app.logger.error("get_report_domain_reputation failed {}".format(re.sub(r'/' + apikey + '/', r'/***/', ex.__str__())))
app.logger.error("get_report_domain_reputation failed: {}".format(re.sub(r'/' + apikey + '/', r'/***/', ex.__str__())))
return []

return response.content
Expand All @@ -400,18 +413,34 @@ def _parse_results(scan_id):
ts = int(time.time() * 1000)

for asset in engine.scans[scan_id]["findings"]:

if 'ip_reputation' in engine.scans[scan_id]["findings"][asset].keys():
res = json.loads(engine.scans[scan_id]["findings"][asset]['ip_reputation'])

if 'data' in res:
severity = "info"
report_summary = ""
try:
detections = res["data"]["report"]["blacklists"]["detections"]
risk_score = res["data"]["report"]["risk_score"]["result"]
if risk_score == 100:
severity = "high"
elif risk_score >= 70:
severity = "medium"

report_summary = f" (detect:{detections}, risk:{risk_score})"
except Exception:
pass

nb_vulns['info'] += 1
issues.append({
"issue_id": len(issues) + 1,
"severity": "info", "confidence": "certain",
"severity": severity, "confidence": "certain",
"target": {
"addr": [asset],
"protocol": "domain"
},
"title": "IP Reputation Check",
"title": "IP Reputation Check"+report_summary,
"description": f"IP Reputation Check for '{asset}'\n\nSee raw_data",
"solution": "n/a",
"metadata": {
Expand All @@ -421,18 +450,34 @@ def _parse_results(scan_id):
"raw": res['data'],
"timestamp": ts
})

if 'domain_reputation' in engine.scans[scan_id]["findings"][asset].keys():
res = json.loads(engine.scans[scan_id]["findings"][asset]['domain_reputation'])

if 'data' in res:
severity = "info"
report_summary = ""
try:
detections = res["data"]["report"]["blacklists"]["detections"]
risk_score = res["data"]["report"]["risk_score"]["result"]
if risk_score == 100:
severity = "high"
elif risk_score >= 70:
severity = "medium"

report_summary = f" (detect:{detections}, risk:{risk_score})"
except Exception:
pass

nb_vulns['info'] += 1
issues.append({
"issue_id": len(issues) + 1,
"severity": "info", "confidence": "certain",
"severity": severity, "confidence": "certain",
"target": {
"addr": [asset],
"protocol": "domain"
},
"title": "Domain Reputation Check",
"title": "Domain Reputation Check"+report_summary,
"description": f"Domain Reputation Check for '{asset}'\n\nSee raw_data",
"solution": "n/a",
"metadata": {
Expand Down Expand Up @@ -469,8 +514,10 @@ def getfindings(scan_id):
# check if the scan is finished
status_scan(scan_id)
if engine.scans[scan_id]['status'] != "FINISHED":
res.update({"status": "error",
"reason": f"scan_id '{scan_id}' not finished (status={engine.scans[scan_id]['status']})"})
res.update({
"status": "error",
"reason": f"scan_id '{scan_id}' not finished (status={engine.scans[scan_id]['status']})"
})
return jsonify(res)

status, issues, summary = _parse_results(scan_id)
Expand All @@ -484,15 +531,30 @@ def getfindings(scan_id):
}

scan.update(status)

# remove the scan from the active scan list
clean_scan(scan_id)

res.update({"scan": scan, "summary": summary, "issues": issues})

res_data = {"scan": scan, "summary": summary, "issues": issues}

# Store the findings in a file
with open(f"{APP_BASE_DIR}/results/apivoid_{scan_id}.json", 'w') as report_file:
json.dump(res_data, report_file, default=_json_serial)

# # Remove the scan from the active scan list
# clean_scan(scan_id)

# Prepare response
res.update(res_data)
res.update(status)
return jsonify(res)


def is_valid_ip(ip):
try:
IPAddress(ip)
except (TypeError, ValueError, AddrFormatError):
return False
return True


def is_valid_subnet(subnet):
try:
IPNetwork(subnet)
Expand All @@ -509,6 +571,17 @@ def get_ips_from_subnet(subnet):
return [str(ip) for ip in IPNetwork(subnet)]


def _json_serial(obj):
"""
JSON serializer for objects not serializable by default json code
Used for datetime serialization when the results are written in file
"""
if isinstance(obj, datetime.datetime) or isinstance(obj, datetime.date):
serial = obj.isoformat()
return serial
raise TypeError("Type not serializable")


@app.before_first_request
def main():
"""First function called."""
Expand Down
2 changes: 1 addition & 1 deletion engines/nmap/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
FROM alpine:3.16.3
LABEL Name="Nmap\ \(Patrowl engine\)" Version="1.4.41"
LABEL Name="Nmap\ \(Patrowl engine\)" Version="1.4.42"

# Set the working directory
RUN mkdir -p /opt/patrowl-engines/nmap
Expand Down
2 changes: 1 addition & 1 deletion engines/nmap/VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1.4.41
1.4.42
2 changes: 1 addition & 1 deletion engines/nmap/engine-nmap.py
Original file line number Diff line number Diff line change
Expand Up @@ -957,7 +957,7 @@ def getreport(scan_id):
f"{BASE_DIR}/results",
f"nmap_{scan_id}.json",
mimetype='application/json',
attachment_filename=f"nmap_{scan_id}.json",
download_name=f"nmap_{scan_id}.json",
as_attachment=True
)

Expand Down
27 changes: 10 additions & 17 deletions engines/owl_dns/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,21 +1,19 @@
FROM ubuntu:20.04
LABEL Name="Patrowl\ DNS\ \(Patrowl engine\)" Version="1.5.0"
FROM alpine:3.16.3
LABEL Name="Patrowl\ DNS\ \(Patrowl engine\)" Version="1.5.1"

# Install dependencies
RUN apt-get update && \
apt-get install -y --no-install-recommends python3-dnspython \
python3-geoip python3-whois python3-requests python3-ssdeep \
python3-pip python3-setuptools python3-dev git wget locales && \
locale-gen en_US.UTF-8
ENV LC_ALL=en_US.UTF-8
ENV LANG=en_US.UTF-8
ENV LANGUAGE=en_US.UTF-8
RUN apk add --update --no-cache \
python3 python3-dev py3-pip \
git \
&& rm -rf /var/cache/apk/*

# Create the target repo
RUN mkdir -p /opt/patrowl-engines/owl_dns
RUN mkdir -p /opt/patrowl-engines/owl_dns/results
RUN mkdir -p /opt/patrowl-engines/owl_dns/external-libs
WORKDIR /opt/patrowl-engines/owl_dns/

# Set the working directory to /opt/<engine_name>
WORKDIR /opt/patrowl-engines/owl_dns

# Copy the current directory contents into the container at /
COPY __init__.py .
Expand All @@ -33,17 +31,12 @@ WORKDIR /opt/patrowl-engines/owl_dns/external-libs/Sublist3r
RUN pip3 install --trusted-host pypi.python.org -r requirements.txt
WORKDIR /opt/patrowl-engines/owl_dns/external-libs
RUN git clone https://github.com/elceef/dnstwist
WORKDIR /opt/patrowl-engines/owl_dns/external-libs/dnstwist
RUN pip3 install --trusted-host pypi.python.org -r requirements.txt

# Set the working directory to /opt/<engine_name>
WORKDIR /opt/patrowl-engines/owl_dns

# Install python modules for engine
WORKDIR /opt/patrowl-engines/owl_dns/
RUN pip3 install --upgrade pip
RUN pip3 install --trusted-host pypi.python.org -r requirements.txt


# TCP port exposed by the container (NAT)
EXPOSE 5006

Expand Down
Loading

0 comments on commit 6e1a76f

Please sign in to comment.