diff --git a/README.md b/README.md index 8ad7ee8..4a73b51 100644 --- a/README.md +++ b/README.md @@ -98,7 +98,7 @@ There are some requirements for this to be run successfully. ### Installation -You can download the latest [tar.gz](https://github.com/ekultek/zeus-scanner/tarball/master), the latest [zip](https://github.com/ekultek/zeus-scanner/zipball/master), or you can find the current stable release [here](https://github.com/Ekultek/Zeus-Scanner/releases/tag/v1.2). Alternatively you can install the latest development version by following the instructions that best match your operating system: +You can download the latest [tar.gz](https://github.com/ekultek/zeus-scanner/tarball/master), the latest [zip](https://github.com/ekultek/zeus-scanner/zipball/master), or you can find the current stable release [here](https://github.com/Ekultek/Zeus-Scanner/releases/tag/v1.3). Alternatively you can install the latest development version by following the instructions that best match your operating system: **_NOTE: (optional but highly advised)_** add sqlmap and nmap to your environment PATH by moving them to `/usr/bin` or by adding them to the PATH via terminal diff --git a/etc/checksum/md5sum.md5 b/etc/checksum/md5sum.md5 index 181a968..ee57330 100644 --- a/etc/checksum/md5sum.md5 +++ b/etc/checksum/md5sum.md5 @@ -1,4 +1,4 @@ -a48339bbd1bbcd2e27650fd930ebfa4c ./zeus.py +e4ea2d20dd1e0ec58e68159689e2cb74 ./zeus.py 4b32db388e8acda35570c734d27c950c ./etc/scripts/launch_sqlmap.sh 6ad5f22ec4a6f8324bfb1b01ab6d51ec ./etc/scripts/cleanup.sh 74d7bee13890a9dd279bb857591647ce ./etc/scripts/reinstall.sh @@ -55,6 +55,7 @@ bf5285dc059c761e1719bc734ae8504f ./lib/firewall/varnish.py cb45428e92485b759ff5cb46a0be9c73 ./lib/firewall/yunsuo.py bbd8b4c6100070d420d48dc7dfc297eb ./lib/firewall/webknight.py 95b908a21c0ff456ae59df4c6c189c54 ./lib/firewall/wallarm.py +8fc8d62377bebbfa7ca4d70a79eab115 ./lib/firewall/bigip.py 6ea65a0160c21e144e92334acc2e3667 ./lib/firewall/anquanbao.py 7f4e3ba2f459926fc77bcddc17b933aa ./lib/firewall/generic.py cf236a16c7869282f55dd4c5ad6347a5 ./lib/attacks/gist_lookup/__init__.py @@ -68,15 +69,16 @@ d2846e039fefee741db24dd64f7bd50e ./lib/attacks/admin_panel_finder/__init__.py b5cd5e913cc62112776153bdf0f60fa4 ./lib/attacks/xss_scan/__init__.py 63c45495ec1ed2e98946bef514d8805e ./lib/attacks/nmap_scan/__init__.py 216999fa0e84866d5c1d96d5676034e4 ./lib/attacks/nmap_scan/nmap_opts.py -7267f30f11ed3d096e222da949da5bea ./lib/header_check/__init__.py +b6f5f8e43c1e480329b66e193bd91751 ./lib/header_check/__init__.py 9a2bb0d52f64e12d5a63ce83874ea74a ./lib/core/common.py 1faa2b5dfad6eb538bbfe42942d2a9da ./lib/core/errors.py d41d8cd98f00b204e9800998ecf8427e ./lib/core/__init__.py -bf158550d8f51f4841fd1b003cb71c55 ./lib/core/settings.py +4a87e14ed7a070ae15b1ed7ac7ceaecc ./lib/core/settings.py +4b507b34677b414b8338475fea2c012a ./lib/core/cache.py 9a02e5b913d210350545ac26510a63c9 ./var/search/__init__.py 8402f23a2586b6f684fb1c3c04c4386f ./var/search/selenium_search.py 63ba132381a0cc2d7629852bd5e4aa17 ./var/search/pgp_search.py d41d8cd98f00b204e9800998ecf8427e ./var/__init__.py d41d8cd98f00b204e9800998ecf8427e ./var/auto_issue/__init__.py 0c11c16126baf789388a661bbbefb149 ./var/auto_issue/github.py -df0c4467256fb6fb7ab6c40209e4ab6d ./var/blackwidow/__init__.py \ No newline at end of file +7608a7a8195d6d66ebae7664f25639fd ./var/blackwidow/__init__.py \ No newline at end of file diff --git a/lib/core/cache.py b/lib/core/cache.py new file mode 100644 index 0000000..5c8cce9 --- /dev/null +++ b/lib/core/cache.py @@ -0,0 +1,27 @@ +from functools import wraps + +import lib.core.settings + + +def cache(func): + """ + if we come across the same URL more then once, it will be cached into memory + so that we don't have to test it again + """ + __cache = {} + + @wraps(func) + def func_wrapper(*args, **kwargs): + if args in __cache: + lib.core.settings.logger.warning(lib.core.settings.set_color( + "cached detection has shown that the target URL WAF/IPS/IDS is '{}'...".format( + __cache[args] + ), level=35 + )) + return __cache[args] + else: + __to_cache = func(*args, **kwargs) + __cache[args] = __to_cache + return __to_cache + + return func_wrapper diff --git a/lib/core/settings.py b/lib/core/settings.py index a3aa1ba..227f8a3 100644 --- a/lib/core/settings.py +++ b/lib/core/settings.py @@ -46,7 +46,7 @@ ISSUE_LINK = "https://github.com/ekultek/zeus-scanner/issues" # current version -VERSION = "1.2.41.{}".format(PATCH_ID) +VERSION = "1.3".format(PATCH_ID) # colors to output depending on the version VERSION_TYPE_COLORS = {"dev": 33, "stable": 92, "other": 30} diff --git a/lib/firewall/bigip.py b/lib/firewall/bigip.py new file mode 100644 index 0000000..5f8f114 --- /dev/null +++ b/lib/firewall/bigip.py @@ -0,0 +1,19 @@ +import re + +from lib.core.common import HTTP_HEADER + + +__item__ = "BIG-IP Application Security Manager (F5 Networks)" + + +def detect(content, **kwargs): + headers = kwargs.get("headers", None) + detection_schema = ( + re.compile(r"\ATS\w{4,}=", re.I), re.compile(r"BIGip|BipServer", re.I), + re.compile(r"\AF5\Z", re.I) + ) + for detection in detection_schema: + if detection.search(headers.get(HTTP_HEADER.SERVER, "")) is not None: + return True + if detection.search(headers.get(HTTP_HEADER.SET_COOKIE, "")) is not None: + return True diff --git a/lib/header_check/__init__.py b/lib/header_check/__init__.py index c4a969d..9800330 100644 --- a/lib/header_check/__init__.py +++ b/lib/header_check/__init__.py @@ -1,12 +1,14 @@ import os import re import importlib +import unicodedata import requests from xml.dom import minidom from requests.exceptions import ConnectionError from var.auto_issue.github import request_issue_creation +from lib.core.cache import cache from lib.core.common import ( write_to_log_file, shutdown, @@ -32,6 +34,7 @@ ) +@cache def detect_protection(url, **kwargs): verbose = kwargs.get("verbose", False) agent = kwargs.get("agent", None) @@ -68,12 +71,13 @@ def detect_protection(url, **kwargs): html, status, headers = protection_check_req.content, protection_check_req.status_code, protection_check_req.headers - for dbms in DBMS_ERRORS: # make sure there are no DBMS errors in the HTML + # make sure there are no DBMS errors in the HTML + for dbms in DBMS_ERRORS: for regex in DBMS_ERRORS[dbms]: if re.compile(regex).search(html) is not None: - logger.info(set_color( + logger.warning(set_color( "it appears that the WAF/IDS/IPS check threw a DBMS error and may be vulnerable " - "to SQL injection attacks. it appears the backend DBMS is '{}'...".format(dbms), level=25 + "to SQL injection attacks. it appears the backend DBMS is '{}'...".format(dbms), level=30 )) return None @@ -94,7 +98,7 @@ def detect_protection(url, **kwargs): if len(retval) >= 2: try: del retval[retval.index("Generic (Unknown)")] - except: + except (Exception, IndexError): logger.warning(set_color( "multiple firewalls identified ({}), displaying most likely...".format( ", ".join(retval) @@ -105,9 +109,11 @@ def detect_protection(url, **kwargs): logger.warning(set_color( "discovered firewall is unknown to Zeus, saving fingerprint to file. " "if you know the details or the context of the firewall please create " - "an issue with the fingerprint, or a pull request with the script...", level=30 + "an issue ({}) with the fingerprint, or a pull request with the script...".format( + ISSUE_LINK + ), level=30 )) - fingerprint = "\n{}".format( + fingerprint = "\n{}".format( status, headers, html ) write_to_log_file(fingerprint, UNKNOWN_FIREWALL_FINGERPRINT_PATH, UNKNOWN_FIREWALL_FILENAME) @@ -143,12 +149,14 @@ def load_xml_data(path, start_node="header", search_node="name"): def load_headers(url, **kwargs): """ - load the URL headers + load the HTTP headers """ agent = kwargs.get("agent", None) proxy = kwargs.get("proxy", None) xforward = kwargs.get("xforward", False) + literal_match = re.compile(r"\\(\X(\d+)?\w+)?", re.I) + if proxy is not None: proxy = proxy_string_to_dict(proxy) if not xforward: @@ -182,7 +190,23 @@ def load_headers(url, **kwargs): [c for c in req.cookies.itervalues()], COOKIE_LOG_PATH, COOKIE_FILENAME.format(replace_http(url)) ) - return req.headers + retval = {} + do_not_use = [] + http_headers = req.headers + for header in http_headers: + try: + # test to see if there are any unicode errors in the string + retval[header] = unicodedata.normalize("NFKD", u"{}".format(http_headers[header])).encode("ascii", errors="ignore") + # just to be safe, we're going to put all the possible Unicode errors into a tuple + except (UnicodeEncodeError, UnicodeDecodeError, UnicodeError, UnicodeTranslateError, UnicodeWarning): + # if there are, we're going to append them to a `do_not_use` list + do_not_use.append(header) + retval.clear() + for head in http_headers: + # if the header is in the list, we skip it + if head not in do_not_use: + retval[head] = http_headers[head] + return retval def compare_headers(found_headers, comparable_headers): @@ -222,6 +246,7 @@ def main_header_check(url, **kwargs): "checking if target URL is protected by some kind of WAF/IPS/IDS..." )) identified = detect_protection(url, proxy=proxy, agent=agent, verbose=verbose, xforward=xforward) + if identified is None: logger.info(set_color( "no WAF/IDS/IPS has been identified on target URL...", level=25 diff --git a/var/blackwidow/__init__.py b/var/blackwidow/__init__.py index 015c110..1e787da 100644 --- a/var/blackwidow/__init__.py +++ b/var/blackwidow/__init__.py @@ -51,7 +51,8 @@ def test_connection(self): make sure the connection is good before you continue """ try: - attempt = requests.get(self.url, params=self.headers, proxies=self.proxy) + # verify=False will take care of SSLErrors + attempt = requests.get(self.url, params=self.headers, proxies=self.proxy, verify=False) if attempt.status_code == 200: return ("ok", None) return ("fail", attempt.status_code) diff --git a/zeus.py b/zeus.py index 03d6f6d..d1c5327 100755 --- a/zeus.py +++ b/zeus.py @@ -285,7 +285,7 @@ def __run_attacks_main(**kwargs): ), level=25 )) logger.info(set_color( - "checking URL headers..." + "checking for HTTP headers..." )) main_header_check( url, verbose=opt.runInVerbose, agent=agent_to_use,