diff --git a/.github/workflows/auto-release.yml b/.github/workflows/auto-release.yml new file mode 100644 index 000000000..7c068aa2c --- /dev/null +++ b/.github/workflows/auto-release.yml @@ -0,0 +1,60 @@ +name: Update Version and Changelog and Readme + +on: + release: + types: [published] + +jobs: + update-version-and-changelog: + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - name: Checkout code + uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: Get latest release info + id: get_release + uses: actions/github-script@v6 + with: + script: | + const release = await github.rest.repos.getLatestRelease({ + owner: context.repo.owner, + repo: context.repo.repo, + }); + core.setOutput('tag_name', release.data.tag_name); + core.setOutput('body', release.data.body); + + - name: Update version file + run: echo ${{ steps.get_release.outputs.tag_name }} > web/.version + + - name: Update CHANGELOG.md + run: | + echo "# Changelog" > CHANGELOG.md.new + echo "" >> CHANGELOG.md.new + echo "## ${{ steps.get_release.outputs.tag_name }}" >> CHANGELOG.md.new + echo "" >> CHANGELOG.md.new + echo "${{ steps.get_release.outputs.body }}" >> CHANGELOG.md.new + echo "" >> CHANGELOG.md.new + if [ -f CHANGELOG.md ]; then + sed '1,2d' CHANGELOG.md >> CHANGELOG.md.new + fi + mv CHANGELOG.md.new CHANGELOG.md + + - name: Update README.md + run: | + sed -i 's|https://img.shields.io/badge/version-.*-informational|https://img.shields.io/badge/version-${{ steps.get_release.outputs.tag_name }}-informational|g' README.md + + - name: Commit and push changes + run: | + git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com" + git config --local user.name "github-actions[bot]" + git add web/.version CHANGELOG.md README.md + if git diff --staged --quiet; then + echo "No changes to commit" + else + git commit -m "reNgine release: ${{ steps.get_release.outputs.tag_name }} :rocket:" + git push origin HEAD:${{ github.event.repository.default_branch }} + fi diff --git a/README.md b/README.md index e09810b25..fb285f178 100644 --- a/README.md +++ b/README.md @@ -30,9 +30,12 @@ Open Source Security Index - Fastest Growing Open Source Security Projects

+

reNgine 2.2.0 is released!

+

+ reNgine 2.2.0 comes with bounty hub where you can sync and import your hackerone programs, in app notifications, chaos as subdomain enumeration tool, ability to upload multiple nuclei and gf patterns, support for regex in out of scope subdomain config, additional pdf report template and many more. + Check out What's new in reNgine 2.2.0! +

-

reNgine 2.1.0 is released!

-

Unleash the power of LLM toolkit! Now you can use local LLM models to generate attack surface and vulnerability reports!, Checkout the release-notes!

What is reNgine?

reNgine is your ultimate web application reconnaissance suite, designed to supercharge the recon process for security pros, pentesters, and bug bounty hunters. It is go-to web application reconnaissance suite that's designed to simplify and streamline the reconnaissance process for all the needs of security professionals, penetration testers, and bug bounty hunters. With its highly configurable engines, data correlation capabilities, continuous monitoring, database-backed reconnaissance data, and an intuitive user interface, reNgine redefines how you gather critical information about your target web applications. @@ -58,10 +61,11 @@ Detailed documentation available at [https://rengine.wiki](https://rengine.wiki) * [About reNgine](#about-rengine) * [Workflow](#workflow) * [Features](#features) -* [Scan Engine](#scan-engine) * [Quick Installation](#quick-installation) -* [What's new in reNgine 2.0](#changelog) +* [Installation Video](#installation-video-tutorial) +* [Community-Curated Videos](#community-curated-videos) * [Screenshots](#screenshots) +* [What's new in reNgine](https://github.com/yogeshojha/rengine/releases) * [Contributing](#contributing) * [reNgine Support](#rengine-support) * [Support and Sponsoring](#support-and-sponsoring) @@ -158,126 +162,7 @@ reNgine is not an ordinary reconnaissance suite; it's a game-changer! We've turb * Identification of related domains and related TLDs for targets * Find actionable insights such as Most Common Vulnerability, Most Common CVE ID, Most Vulnerable Target/Subdomain, etc. * You can now use local LLMs for Attack surface identification and vulnerability description (NEW: reNgine 2.1.0) - -![-----------------------------------------------------](https://raw.githubusercontent.com/andreasbm/readme/master/assets/lines/aqua.png) - -## Scan Engine - -```yaml -# Global vars for all tools -# -# custom_headers: ['Foo: bar', 'User-Agent: Anything'] # FFUF, Nuclei, Dalfox, CRL Fuzz, HTTP Crawl, Fetch URL, etc -# enable_http_crawl: true # All tools -# threads: 30 # All tools - -subdomain_discovery: { - 'uses_tools': ['subfinder', 'ctfr', 'sublist3r', 'tlsx', 'oneforall', 'netlas'], # amass-passive, amass-active, All - 'enable_http_crawl': true, - 'threads': 30, - 'timeout': 5, - # 'use_subfinder_config': false, - # 'use_amass_config': false, - # 'amass_wordlist': 'deepmagic.com-prefixes-top50000' -} -http_crawl: { - # 'threads': 30, - # 'follow_redirect': true -} -port_scan: { - 'enable_http_crawl': true, - 'timeout': 5, - # 'exclude_ports': [], - # 'exclude_subdomains': [], - 'ports': ['top-100'], - 'rate_limit': 150, - 'threads': 30, - 'passive': false, - # 'use_naabu_config': false, - # 'enable_nmap': true, - # 'nmap_cmd': '', - # 'nmap_script': '', - # 'nmap_script_args': '' -} -osint: { - 'discover': [ - 'emails', - 'metainfo', - 'employees' - ], - 'dorks': [ - 'login_pages', - 'admin_panels', - 'dashboard_pages', - 'stackoverflow', - 'social_media', - 'project_management', - 'code_sharing', - 'config_files', - 'jenkins', - 'wordpress_files', - 'php_error', - 'exposed_documents', - 'db_files', - 'git_exposed' - ], - # 'custom_dorks': [], - 'intensity': 'normal', - 'documents_limit': 50 -} -dir_file_fuzz: { - 'auto_calibration': true, - 'enable_http_crawl': true, - 'rate_limit': 150, - 'extensions': ['html', 'php','git','yaml','conf','cnf','config','gz','env','log','db','mysql','bak','asp','aspx','txt','conf','sql','json','yml','pdf'], - 'follow_redirect': false, - 'max_time': 0, - 'match_http_status': [200, 204], - 'recursive_level': 2, - 'stop_on_error': false, - 'timeout': 5, - 'threads': 30, - 'wordlist_name': 'dicc' -} -fetch_url: { - 'uses_tools': ['gospider', 'hakrawler', 'waybackurls', 'katana', 'gau'], - 'remove_duplicate_endpoints': true, - 'duplicate_fields': ['content_length', 'page_title'], - 'enable_http_crawl': true, - 'gf_patterns': ['debug_logic', 'idor', 'interestingEXT', 'interestingparams', 'interestingsubs', 'lfi', 'rce', 'redirect', 'sqli', 'ssrf', 'ssti', 'xss'], - 'ignore_file_extensions': ['png', 'jpg', 'jpeg', 'gif', 'mp4', 'mpeg', 'mp3'], - 'threads': 30, - # 'exclude_subdomains': false -} -vulnerability_scan: { - 'run_nuclei': true, - 'run_dalfox': false, - 'run_crlfuzz': false, - 'run_s3scanner': false, - 'enable_http_crawl': true, - 'concurrency': 50, - 'intensity': 'normal', - 'rate_limit': 150, - 'retries': 1, - 'timeout': 5, - 'fetch_gpt_report': true, - 'nuclei': { - 'use_nuclei_config': false, - 'severities': ['unknown', 'info', 'low', 'medium', 'high', 'critical'], - # 'tags': [], # Nuclei tags (https://github.com/projectdiscovery/nuclei-templates) - # 'templates': [], # Nuclei templates (https://github.com/projectdiscovery/nuclei-templates) - # 'custom_templates': [] # Nuclei custom templates uploaded in reNgine - } -} -waf_detection: { - 'enable_http_crawl': true -} -screenshot: { - 'enable_http_crawl': true, - 'intensity': 'normal', - 'timeout': 10, - 'threads': 40 -} -``` +* BountyHub, a central hub to manage your hackerone targets ![-----------------------------------------------------](https://raw.githubusercontent.com/andreasbm/readme/master/assets/lines/aqua.png) @@ -354,6 +239,12 @@ screenshot: { For Mac, Windows, or other systems, refer to our detailed installation guide [https://reNgine.wiki/install/detailed/](https://reNgine.wiki/install/detailed/) +### Installation Video Tutorial + +If you encounter any issues during installation or prefer a visual guide, one of our community members has created an excellent installation video for Kali Linux installation. You can find it here: [https://www.youtube.com/watch?v=7OFfrU6VrWw](https://www.youtube.com/watch?v=7OFfrU6VrWw) + +Please note: This is community-curated content and is not owned by reNgine. The installation process may change, so please refer to the official documentation for the most up-to-date instructions. + ## Updating 1. To update reNgine, run: @@ -368,11 +259,25 @@ For Mac, Windows, or other systems, refer to our detailed installation guide [ht sudo chmod +x update.sh ``` -## Changelog +![-----------------------------------------------------](https://raw.githubusercontent.com/andreasbm/readme/master/assets/lines/aqua.png) + +## Community-Curated Videos + +reNgine has a vibrant community that often creates helpful content about installation, features, and usage. Below is a collection of community-curated videos that you might find useful. Please note that these videos are not official reNgine content, and the information they contain may become outdated as reNgine evolves. + +Always refer to the official documentation for the most up-to-date and accurate information. If you've created a video about reNgine and would like it featured here, please send a pull request updating this table. -For the latest updates and changes, please check our [changelog.](https://rengine.wiki/changelog/) +| Video Title | Language | Publisher | Date | Link | +|-------------|----------|----------|------|------| +| reNgine Installation on Kali Linux | English | Secure the Cyber World | 2024-02-29 | [Watch](https://www.youtube.com/watch?v=7OFfrU6VrWw) | +| Resultados do ReNgine - Automação para Recon | Portuguese | Guia Anônima | 2023-04-18 | [Watch](https://www.youtube.com/watch?v=6aNvDy1FzIM) | +| reNgine Introduction | Moroccan Arabic | Th3 Hacker News Bdarija | 2021-07-27 | [Watch](https://www.youtube.com/watch?v=9FuRrcmWgWU) | +| Automated recon? ReNgine - Hacker Tools | English | Intigriti | 2021-07-21 | [Watch](https://www.youtube.com/watch?v=9FuRrcmWgWU) | + +We appreciate the community's contributions in creating these resources. + +![-----------------------------------------------------](https://raw.githubusercontent.com/andreasbm/readme/master/assets/lines/aqua.png) -![-----------------------------------------------------](https://raw.githubusercontent.com/andreasbm/readme/master/assets/lines/aqua.png) ## Screenshots @@ -518,13 +423,6 @@ Thank you for your support! ![-----------------------------------------------------](https://raw.githubusercontent.com/andreasbm/readme/master/assets/lines/aqua.png) -## License - -Distributed under the GNU GPL v3 License. See [LICENSE](LICENSE) for more information. - -![-----------------------------------------------------](https://raw.githubusercontent.com/andreasbm/readme/master/assets/lines/aqua.png) - - ## Reporting Security Vulnerabilities We appreciate your efforts to responsibly disclose your findings and will make every effort to acknowledge your contributions. @@ -552,4 +450,10 @@ Thank you for helping to keep reNgine and its users safe! ![-----------------------------------------------------](https://raw.githubusercontent.com/andreasbm/readme/master/assets/lines/aqua.png) -

Note: Parts of this README were written or refined using AI language models.

+## License + +Distributed under the GNU GPL v3 License. See [LICENSE](LICENSE) for more information. + +![-----------------------------------------------------](https://raw.githubusercontent.com/andreasbm/readme/master/assets/lines/aqua.png) + +

Note: Parts of this README were written or refined using AI language models.

\ No newline at end of file diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index 9481d7f46..3359cfb8f 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -94,9 +94,6 @@ services: - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} - POSTGRES_PORT=${POSTGRES_PORT} - POSTGRES_HOST=${POSTGRES_HOST} - # THIS IS A MUST FOR CHECKING UPDATE, EVERYTIME A COMMIT IS MERGED INTO - # MASTER, UPDATE THIS!!! MAJOR.MINOR.PATCH https://semver.org/ - - RENGINE_CURRENT_VERSION='2.1.3' volumes: - ./web:/usr/src/app - github_repos:/usr/src/github diff --git a/docker-compose.yml b/docker-compose.yml index 2daccd382..6f40c5bd6 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -96,9 +96,6 @@ services: - POSTGRES_PORT=${POSTGRES_PORT} - POSTGRES_HOST=${POSTGRES_HOST} - DJANGO_SUPERUSER_PASSWORD=${DJANGO_SUPERUSER_PASSWORD} - # THIS IS A MUST FOR CHECKING UPDATE, EVERYTIME A COMMIT IS MERGED INTO - # MASTER, UPDATE THIS!!! MAJOR.MINOR.PATCH https://semver.org/ - - RENGINE_CURRENT_VERSION='2.1.3' volumes: - ./web:/usr/src/app - github_repos:/usr/src/github diff --git a/scripts/uninstall.sh b/scripts/uninstall.sh index cc177285d..761738b5c 100755 --- a/scripts/uninstall.sh +++ b/scripts/uninstall.sh @@ -30,8 +30,8 @@ read -p "$(echo -e ${WARNING}"Are you sure you want to proceed? (y/Y/yes/YES to # change answer to lowecase for comparison ANSWER_LC=$(echo "$CONFIRM" | tr '[:upper:]' '[:lower:]') -if [[ "$ANSWER_LC" != "y" && "$ANSWER_LC" != "yes" ]]; then - print_status "${YELLOW}Uninstall aborted by user.${RESET}" +if [ -z "$CONFIRM" ] || { [ "$CONFIRM" != "y" ] && [ "$CONFIRM" != "Y" ] && [ "$CONFIRM" != "yes" ] && [ "$CONFIRM" != "Yes" ] && [ "$CONFIRM" != "YES" ]; }; then + print_status "${WARNING}Uninstall aborted by user.${RESET}" exit 0 fi diff --git a/web/.version b/web/.version new file mode 100644 index 000000000..cf2dc0bc4 --- /dev/null +++ b/web/.version @@ -0,0 +1 @@ +v2.2.0 \ No newline at end of file diff --git a/web/Dockerfile b/web/Dockerfile index 33dd9ee86..66d709e75 100644 --- a/web/Dockerfile +++ b/web/Dockerfile @@ -85,6 +85,7 @@ RUN printf "\ github.com/tomnomnom/waybackurls@latest\n\ github.com/projectdiscovery/httpx/cmd/httpx@latest\n\ github.com/projectdiscovery/subfinder/v2/cmd/subfinder@latest\n\ + github.com/projectdiscovery/chaos-client/cmd/chaos@latest\n\ github.com/projectdiscovery/nuclei/v3/cmd/nuclei@latest\n\ github.com/projectdiscovery/naabu/v2/cmd/naabu@latest\n\ github.com/hakluke/hakrawler@latest\n\ @@ -103,6 +104,9 @@ RUN printf "\ # Update Nuclei and Nuclei-Templates RUN nuclei -update-templates +# update chaos +RUN chaos -update + # Copy requirements COPY ./requirements.txt /tmp/requirements.txt RUN pip3 install --upgrade setuptools==72.1.0 diff --git a/web/api/serializers.py b/web/api/serializers.py index 1fd0b7e91..a01c9b909 100644 --- a/web/api/serializers.py +++ b/web/api/serializers.py @@ -1,6 +1,5 @@ from dashboard.models import * -from django.contrib.humanize.templatetags.humanize import (naturalday, - naturaltime) +from django.contrib.humanize.templatetags.humanize import (naturalday, naturaltime) from django.db.models import F, JSONField, Value from recon_note.models import * from reNgine.common_func import * @@ -8,6 +7,60 @@ from scanEngine.models import * from startScan.models import * from targetApp.models import * +from dashboard.models import InAppNotification + + +class HackerOneProgramAttributesSerializer(serializers.Serializer): + """ + Serializer for HackerOne Program + IMP: THIS is not a model serializer, programs will not be stored in db + due to ever changing nature of programs, rather cache will be used on these serializers + """ + handle = serializers.CharField(required=False) + name = serializers.CharField(required=False) + currency = serializers.CharField(required=False) + submission_state = serializers.CharField(required=False) + triage_active = serializers.BooleanField(allow_null=True, required=False) + state = serializers.CharField(required=False) + started_accepting_at = serializers.DateTimeField(required=False) + bookmarked = serializers.BooleanField(required=False) + allows_bounty_splitting = serializers.BooleanField(required=False) + offers_bounties = serializers.BooleanField(required=False) + open_scope = serializers.BooleanField(allow_null=True, required=False) + fast_payments = serializers.BooleanField(allow_null=True, required=False) + gold_standard_safe_harbor = serializers.BooleanField(allow_null=True, required=False) + + def to_representation(self, instance): + return {key: value for key, value in instance.items()} + + +class HackerOneProgramSerializer(serializers.Serializer): + id = serializers.CharField() + type = serializers.CharField() + attributes = HackerOneProgramAttributesSerializer() + + + +class InAppNotificationSerializer(serializers.ModelSerializer): + class Meta: + model = InAppNotification + fields = [ + 'id', + 'title', + 'description', + 'icon', + 'is_read', + 'created_at', + 'notification_type', + 'status', + 'redirect_link', + 'open_in_new_tab', + 'project' + ] + read_only_fields = ['id', 'created_at'] + + def get_project_name(self, obj): + return obj.project.name if obj.project else None class SearchHistorySerializer(serializers.ModelSerializer): diff --git a/web/api/shared_api_tasks.py b/web/api/shared_api_tasks.py new file mode 100644 index 000000000..d21ca23fa --- /dev/null +++ b/web/api/shared_api_tasks.py @@ -0,0 +1,209 @@ +# include all the celery tasks to be used in the API, do not put in tasks.py +import requests + +from reNgine.common_func import create_inappnotification, get_hackerone_key_username +from reNgine.definitions import PROJECT_LEVEL_NOTIFICATION, HACKERONE_ALLOWED_ASSET_TYPES +from reNgine.celery import app +from reNgine.database_utils import bulk_import_targets + +@app.task(name='import_hackerone_programs_task', bind=False, queue='api_queue') +def import_hackerone_programs_task(handles, project_slug, is_sync = False): + """ + Runs in the background to import programs from HackerOne + + Args: + handles (list): List of handles to import + project_slug (str): Slug of the project + is_sync (bool): If the import is a sync operation + Returns: + None + rather creates inapp notifications + """ + def fetch_program_details_from_hackerone(program_handle): + url = f'https://api.hackerone.com/v1/hackers/programs/{program_handle}' + headers = {'Accept': 'application/json'} + creds = get_hackerone_key_username() + + if not creds: + raise Exception("HackerOne API credentials not configured") + + username, api_key = creds + + response = requests.get( + url, + headers=headers, + auth=(username, api_key) + ) + + if response.status_code == 401: + raise Exception("HackerOne API credentials are invalid") + + if response.status_code == 200: + return response.json() + else: + return None + + for handle in handles: + program_details = fetch_program_details_from_hackerone(handle) + if program_details: + # Thanks, some parts of this logics were originally written by @null-ref-0000 + # via PR https://github.com/yogeshojha/rengine/pull/1410 + try: + program_name = program_details['attributes']['name'] + + assets = [] + scopes = program_details['relationships']['structured_scopes']['data'] + for scope in scopes: + asset_type = scope['attributes']['asset_type'] + asset_identifier = scope['attributes']['asset_identifier'] + eligible_for_submission = scope['attributes']['eligible_for_submission'] + + # for now we should ignore the scope that are not eligible for submission + # in future release we will add this in target out_of_scope + + # we need to filter the scope that are supported by reNgine now + if asset_type in HACKERONE_ALLOWED_ASSET_TYPES and eligible_for_submission: + assets.append(asset_identifier) + + # in some cases asset_type is OTHER and may contain the asset + elif asset_type == 'OTHER' and ('.' in asset_identifier or asset_identifier.startswith('http')): + assets.append(asset_identifier) + + # cleanup assets + assets = list(set(assets)) + + # convert assets to list of dict with name and description + assets = [{'name': asset, 'description': None} for asset in assets] + new_targets_added = bulk_import_targets( + targets=assets, + project_slug=project_slug, + organization_name=program_name, + org_description='Imported from Hackerone', + h1_team_handle=handle + ) + + if new_targets_added: + create_inappnotification( + title=f"HackerOne Program Imported: {handle}", + description=f"The program '{program_name}' from hackerone has been successfully imported.", + notification_type=PROJECT_LEVEL_NOTIFICATION, + project_slug=project_slug, + icon="mdi-check-circle", + status='success' + ) + + except Exception as e: + create_inappnotification( + title=f"HackerOne Program Import Failed: {handle}", + description=f"Failed to import program from hackerone with handle '{handle}'. {str(e)}", + notification_type=PROJECT_LEVEL_NOTIFICATION, + project_slug=project_slug, + icon="mdi-alert-circle", + status='error' + ) + else: + create_inappnotification( + title=f"HackerOne Program Import Failed: {handle}", + description=f"Failed to import program from hackerone with handle '{handle}'. Program details could not be fetched.", + notification_type=PROJECT_LEVEL_NOTIFICATION, + project_slug=project_slug, + icon="mdi-alert-circle", + status='error' + ) + + if is_sync: + title = "HackerOne Program Sync Completed" + description = f"Sync process for {len(handles)} program(s) has completed." + else: + title = "HackerOne Program Import Completed" + description = f"Import process for {len(handles)} program(s) has completed." + + create_inappnotification( + title=title, + description=description, + notification_type=PROJECT_LEVEL_NOTIFICATION, + project_slug=project_slug, + icon="mdi-check-all", + status='success' + ) + + +@app.task(name='sync_bookmarked_programs_task', bind=False, queue='api_queue') +def sync_bookmarked_programs_task(project_slug): + """ + Runs in the background to sync bookmarked programs from HackerOne + + Args: + project_slug (str): Slug of the project + Returns: + None + Creates in-app notifications for progress and results + """ + + def fetch_bookmarked_programs(): + url = f'https://api.hackerone.com/v1/hackers/programs?&page[size]=100' + headers = {'Accept': 'application/json'} + bookmarked_programs = [] + + credentials = get_hackerone_key_username() + if not credentials: + raise Exception("HackerOne API credentials not configured") + + username, api_key = credentials + + while url: + response = requests.get( + url, + headers=headers, + auth=(username, api_key) + ) + + if response.status_code == 401: + raise Exception("HackerOne API credentials are invalid") + elif response.status_code != 200: + raise Exception(f"HackerOne API request failed with status code {response.status_code}") + + data = response.json() + programs = data['data'] + bookmarked = [p for p in programs if p['attributes']['bookmarked']] + bookmarked_programs.extend(bookmarked) + + url = data['links'].get('next') + + return bookmarked_programs + + try: + bookmarked_programs = fetch_bookmarked_programs() + handles = [program['attributes']['handle'] for program in bookmarked_programs] + + if not handles: + create_inappnotification( + title="HackerOne Bookmarked Programs Sync Completed", + description="No bookmarked programs found.", + notification_type=PROJECT_LEVEL_NOTIFICATION, + project_slug=project_slug, + icon="mdi-information", + status='info' + ) + return + + import_hackerone_programs_task.delay(handles, project_slug, is_sync=True) + + create_inappnotification( + title="HackerOne Bookmarked Programs Sync Progress", + description=f"Found {len(handles)} bookmarked program(s). Starting import process.", + notification_type=PROJECT_LEVEL_NOTIFICATION, + project_slug=project_slug, + icon="mdi-progress-check", + status='info' + ) + + except Exception as e: + create_inappnotification( + title="HackerOne Bookmarked Programs Sync Failed", + description=f"Failed to sync bookmarked programs: {str(e)}", + notification_type=PROJECT_LEVEL_NOTIFICATION, + project_slug=project_slug, + icon="mdi-alert-circle", + status='error' + ) diff --git a/web/api/urls.py b/web/api/urls.py index cfdd8f265..7c1c12802 100644 --- a/web/api/urls.py +++ b/web/api/urls.py @@ -19,6 +19,8 @@ router.register(r'listIps', IpAddressViewSet) router.register(r'listActivityLogs', ListActivityLogsViewSet) router.register(r'listScanLogs', ListScanLogsViewSet) +router.register(r'notifications', InAppNotificationManagerViewSet, basename='notification') +router.register(r'hackerone-programs', HackerOneProgramViewSet, basename='hackerone_program') urlpatterns = [ url('^', include(router.urls)), @@ -239,6 +241,11 @@ 'action/create/project', CreateProjectApi.as_view(), name='create_project'), + path( + 'toggle-bug-bounty-mode/', + ToggleBugBountyModeView.as_view(), + name='toggle_bug_bounty_mode' + ), ] urlpatterns += router.urls diff --git a/web/api/views.py b/web/api/views.py index f5df4bbbf..fcea8abd9 100644 --- a/web/api/views.py +++ b/web/api/views.py @@ -1,24 +1,30 @@ -import logging import re import socket -from ipaddress import IPv4Network - +import logging import requests import validators -from dashboard.models import * +import requests + +from ipaddress import IPv4Network from django.db.models import CharField, Count, F, Q, Value -from django.shortcuts import get_object_or_404 from django.utils import timezone from packaging import version from django.template.defaultfilters import slugify -from rest_framework import viewsets +from datetime import datetime +from rest_framework import viewsets, status from rest_framework.response import Response from rest_framework.views import APIView -from rest_framework.status import HTTP_400_BAD_REQUEST +from rest_framework.status import HTTP_400_BAD_REQUEST, HTTP_204_NO_CONTENT, HTTP_202_ACCEPTED +from rest_framework.decorators import action +from django.core.exceptions import ObjectDoesNotExist +from django.core.cache import cache + +from dashboard.models import * from recon_note.models import * from reNgine.celery import app from reNgine.common_func import * +from reNgine.database_utils import * from reNgine.definitions import ABORTED_TASK from reNgine.tasks import * from reNgine.llm import * @@ -27,12 +33,305 @@ from startScan.models import * from startScan.models import EndPoint from targetApp.models import * - +from api.shared_api_tasks import import_hackerone_programs_task, sync_bookmarked_programs_task from .serializers import * + logger = logging.getLogger(__name__) +class ToggleBugBountyModeView(APIView): + """ + This class manages the user bug bounty mode + """ + def post(self, request, *args, **kwargs): + user_preferences = get_object_or_404(UserPreferences, user=request.user) + user_preferences.bug_bounty_mode = not user_preferences.bug_bounty_mode + user_preferences.save() + return Response({ + 'bug_bounty_mode': user_preferences.bug_bounty_mode + }, status=status.HTTP_200_OK) + + +class HackerOneProgramViewSet(viewsets.ViewSet): + """ + This class manages the HackerOne Program model, + provides basic fetching of programs and caching + """ + CACHE_KEY = 'hackerone_programs' + CACHE_TIMEOUT = 60 * 30 # 30 minutes + PROGRAM_CACHE_KEY = 'hackerone_program_{}' + + API_BASE = 'https://api.hackerone.com/v1/hackers' + + ALLOWED_ASSET_TYPES = ["WILDCARD", "DOMAIN", "IP_ADDRESS", "CIDR", "URL"] + + def list(self, request): + try: + sort_by = request.query_params.get('sort_by', 'age') + sort_order = request.query_params.get('sort_order', 'desc') + + programs = self.get_cached_programs() + + if sort_by == 'name': + programs = sorted(programs, key=lambda x: x['attributes']['name'].lower(), + reverse=(sort_order.lower() == 'desc')) + elif sort_by == 'reports': + programs = sorted(programs, key=lambda x: x['attributes'].get('number_of_reports_for_user', 0), + reverse=(sort_order.lower() == 'desc')) + elif sort_by == 'age': + programs = sorted(programs, + key=lambda x: datetime.strptime(x['attributes'].get('started_accepting_at', '1970-01-01T00:00:00.000Z'), '%Y-%m-%dT%H:%M:%S.%fZ'), + reverse=(sort_order.lower() == 'desc') + ) + + serializer = HackerOneProgramSerializer(programs, many=True) + return Response(serializer.data) + except Exception as e: + return self.handle_exception(e) + + def get_api_credentials(self): + try: + api_key = HackerOneAPIKey.objects.first() + if not api_key: + raise ObjectDoesNotExist("HackerOne API credentials not found") + return api_key.username, api_key.key + except ObjectDoesNotExist: + raise Exception("HackerOne API credentials not configured") + + @action(detail=False, methods=['get']) + def bookmarked_programs(self, request): + try: + # do not cache bookmarked programs due to the user specific nature + programs = self.fetch_programs_from_hackerone() + bookmarked = [p for p in programs if p['attributes']['bookmarked']] + serializer = HackerOneProgramSerializer(bookmarked, many=True) + return Response(serializer.data) + except Exception as e: + return self.handle_exception(e) + + @action(detail=False, methods=['get']) + def bounty_programs(self, request): + try: + programs = self.get_cached_programs() + bounty_programs = [p for p in programs if p['attributes']['offers_bounties']] + serializer = HackerOneProgramSerializer(bounty_programs, many=True) + return Response(serializer.data) + except Exception as e: + return self.handle_exception(e) + + def get_cached_programs(self): + programs = cache.get(self.CACHE_KEY) + if programs is None: + programs = self.fetch_programs_from_hackerone() + cache.set(self.CACHE_KEY, programs, self.CACHE_TIMEOUT) + return programs + + def fetch_programs_from_hackerone(self): + url = f'{self.API_BASE}/programs?page[size]=100' + headers = {'Accept': 'application/json'} + all_programs = [] + try: + username, api_key = self.get_api_credentials() + except Exception as e: + raise Exception("API credentials error: " + str(e)) + + while url: + response = requests.get( + url, + headers=headers, + auth=(username, api_key) + ) + + if response.status_code == 401: + raise Exception("Invalid API credentials") + elif response.status_code != 200: + raise Exception(f"HackerOne API request failed with status code {response.status_code}") + + data = response.json() + all_programs.extend(data['data']) + + url = data['links'].get('next') + + return all_programs + + @action(detail=False, methods=['post']) + def refresh_cache(self, request): + try: + programs = self.fetch_programs_from_hackerone() + cache.set(self.CACHE_KEY, programs, self.CACHE_TIMEOUT) + return Response({"status": "Cache refreshed successfully"}) + except Exception as e: + return self.handle_exception(e) + + @action(detail=True, methods=['get']) + def program_details(self, request, pk=None): + try: + program_handle = pk + cache_key = self.PROGRAM_CACHE_KEY.format(program_handle) + program_details = cache.get(cache_key) + + if program_details is None: + program_details = self.fetch_program_details_from_hackerone(program_handle) + if program_details: + cache.set(cache_key, program_details, self.CACHE_TIMEOUT) + + if program_details: + filtered_scopes = [ + scope for scope in program_details.get('relationships', {}).get('structured_scopes', {}).get('data', []) + if scope.get('attributes', {}).get('asset_type') in self.ALLOWED_ASSET_TYPES + ] + + program_details['relationships']['structured_scopes']['data'] = filtered_scopes + + return Response(program_details) + else: + return Response({"error": "Program not found"}, status=status.HTTP_404_NOT_FOUND) + except Exception as e: + return self.handle_exception(e) + + def fetch_program_details_from_hackerone(self, program_handle): + url = f'{self.API_BASE}/programs/{program_handle}' + headers = {'Accept': 'application/json'} + try: + username, api_key = self.get_api_credentials() + except Exception as e: + raise Exception("API credentials error: " + str(e)) + + response = requests.get( + url, + headers=headers, + auth=(username, api_key) + ) + + if response.status_code == 401: + raise Exception("Invalid API credentials") + elif response.status_code == 200: + return response.json() + else: + return None + + @action(detail=False, methods=['post']) + def import_programs(self, request): + try: + project_slug = request.query_params.get('project_slug') + if not project_slug: + return Response({"error": "Project slug is required"}, status=status.HTTP_400_BAD_REQUEST) + handles = request.data.get('handles', []) + + if not handles: + return Response({"error": "No program handles provided"}, status=status.HTTP_400_BAD_REQUEST) + + import_hackerone_programs_task.delay(handles, project_slug) + + create_inappnotification( + title="HackerOne Program Import Started", + description=f"Import process for {len(handles)} program(s) has begun.", + notification_type=PROJECT_LEVEL_NOTIFICATION, + project_slug=project_slug, + icon="mdi-download", + status='info' + ) + + return Response({"message": f"Import process for {len(handles)} program(s) has begun."}, status=status.HTTP_202_ACCEPTED) + except Exception as e: + return self.handle_exception(e) + + @action(detail=False, methods=['get']) + def sync_bookmarked(self, request): + try: + project_slug = request.query_params.get('project_slug') + if not project_slug: + return Response({"error": "Project slug is required"}, status=status.HTTP_400_BAD_REQUEST) + + sync_bookmarked_programs_task.delay(project_slug) + + create_inappnotification( + title="HackerOne Bookmarked Programs Sync Started", + description="Sync process for bookmarked programs has begun.", + notification_type=PROJECT_LEVEL_NOTIFICATION, + project_slug=project_slug, + icon="mdi-sync", + status='info' + ) + + return Response({"message": "Sync process for bookmarked programs has begun."}, status=status.HTTP_202_ACCEPTED) + except Exception as e: + return self.handle_exception(e) + + def handle_exception(self, exc): + if isinstance(exc, ObjectDoesNotExist): + return Response({"error": "HackerOne API credentials not configured"}, status=status.HTTP_503_SERVICE_UNAVAILABLE) + elif str(exc) == "Invalid API credentials": + return Response({"error": "Invalid HackerOne API credentials"}, status=status.HTTP_401_UNAUTHORIZED) + else: + return Response({"error": str(exc)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) + +class InAppNotificationManagerViewSet(viewsets.ModelViewSet): + """ + This class manages the notification model, provided CRUD operation on notif model + such as read notif, clear all, fetch all notifications etc + """ + serializer_class = InAppNotificationSerializer + pagination_class = None + + def get_queryset(self): + # we will see later if user based notif is needed + # return InAppNotification.objects.filter(user=self.request.user) + project_slug = self.request.query_params.get('project_slug') + queryset = InAppNotification.objects.all() + if project_slug: + queryset = queryset.filter( + Q(project__slug=project_slug) | Q(notification_type='system') + ) + return queryset.order_by('-created_at') + + @action(detail=False, methods=['post']) + def mark_all_read(self, request): + # marks all notification read + project_slug = self.request.query_params.get('project_slug') + queryset = self.get_queryset() + + if project_slug: + queryset = queryset.filter( + Q(project__slug=project_slug) | Q(notification_type='system') + ) + queryset.update(is_read=True) + return Response(status=HTTP_204_NO_CONTENT) + + @action(detail=True, methods=['post']) + def mark_read(self, request, pk=None): + # mark individual notification read when cliked + notification = self.get_object() + notification.is_read = True + notification.save() + return Response(status=HTTP_204_NO_CONTENT) + + @action(detail=False, methods=['get']) + def unread_count(self, request): + # this fetches the count for unread notif mainly for the badge + project_slug = self.request.query_params.get('project_slug') + queryset = self.get_queryset() + if project_slug: + queryset = queryset.filter( + Q(project__slug=project_slug) | Q(notification_type='system') + ) + count = queryset.filter(is_read=False).count() + return Response({'count': count}) + + @action(detail=False, methods=['post']) + def clear_all(self, request): + # when clicked on the clear button this must be called to clear all notif + project_slug = self.request.query_params.get('project_slug') + queryset = self.get_queryset() + if project_slug: + queryset = queryset.filter( + Q(project__slug=project_slug) | Q(notification_type='system') + ) + queryset.delete() + return Response(status=HTTP_204_NO_CONTENT) + + class OllamaManager(APIView): def get(self, request): """ @@ -622,6 +921,11 @@ def post(self, request): h1_team_handle = data.get('h1_team_handle') description = data.get('description') domain_name = data.get('domain_name') + # remove wild card from domain + domain_name = domain_name.replace('*', '') + # if domain_name begins with . remove that + if domain_name.startswith('.'): + domain_name = domain_name[1:] organization_name = data.get('organization') slug = data.get('slug') @@ -629,35 +933,26 @@ def post(self, request): if not validators.domain(domain_name): return Response({'status': False, 'message': 'Invalid domain or IP'}) - project = Project.objects.get(slug=slug) - - # Create domain object in DB - domain, _ = Domain.objects.get_or_create(name=domain_name) - domain.project = project - domain.h1_team_handle = h1_team_handle - domain.description = description - if not domain.insert_date: - domain.insert_date = timezone.now() - domain.save() - - # Create org object in DB - if organization_name: - organization_obj = None - organization_query = Organization.objects.filter(name=organization_name) - if organization_query.exists(): - organization_obj = organization_query[0] - else: - organization_obj = Organization.objects.create( - name=organization_name, - project=project, - insert_date=timezone.now()) - organization_obj.domains.add(domain) + status = bulk_import_targets( + targets=[{ + 'name': domain_name, + 'description': description, + }], + organization_name=organization_name, + h1_team_handle=h1_team_handle, + project_slug=slug + ) + if status: + return Response({ + 'status': True, + 'message': 'Domain successfully added as target !', + 'domain_name': domain_name, + # 'domain_id': domain.id + }) return Response({ - 'status': True, - 'message': 'Domain successfully added as target !', - 'domain_name': domain_name, - 'domain_id': domain.id + 'status': False, + 'message': 'Failed to add as target !' }) @@ -763,6 +1058,9 @@ def post(self, request): if data['type'] == 'subscan': for row in data['rows']: SubScan.objects.get(id=row).delete() + elif data['type'] == 'organization': + for row in data['rows']: + Organization.objects.get(id=row).delete() response = True except Exception as e: response = False @@ -774,63 +1072,95 @@ class StopScan(APIView): def post(self, request): req = self.request data = req.data - scan_id = data.get('scan_id') - subscan_id = data.get('subscan_id') - response = {} - task_ids = [] - scan = None - subscan = None - if subscan_id: - try: - subscan = get_object_or_404(SubScan, id=subscan_id) - scan = subscan.scan_history - task_ids = subscan.celery_ids - subscan.status = ABORTED_TASK - subscan.stop_scan_date = timezone.now() - subscan.save() - create_scan_activity( - subscan.scan_history.id, - f'Subscan {subscan_id} aborted', - SUCCESS_TASK) - response['status'] = True - except Exception as e: - logging.error(e) - response = {'status': False, 'message': str(e)} - elif scan_id: + scan_ids = data.get('scan_ids', []) + subscan_ids = data.get('subscan_ids', []) + + scan_ids = [int(id) for id in scan_ids] + subscan_ids = [int(id) for id in subscan_ids] + + response = {'status': False} + + def abort_scan(scan): + response = {} + logger.info(f'Aborting scan History') try: - scan = get_object_or_404(ScanHistory, id=scan_id) + logger.info(f"Setting scan {scan} status to ABORTED_TASK") task_ids = scan.celery_ids scan.scan_status = ABORTED_TASK scan.stop_scan_date = timezone.now() scan.aborted_by = request.user scan.save() + for task_id in task_ids: + app.control.revoke(task_id, terminate=True, signal='SIGKILL') + + tasks = ( + ScanActivity.objects + .filter(scan_of=scan) + .filter(status=RUNNING_TASK) + .order_by('-pk') + ) + for task in tasks: + task.status = ABORTED_TASK + task.time = timezone.now() + task.save() + create_scan_activity( scan.id, "Scan aborted", - SUCCESS_TASK) + ABORTED_TASK + ) response['status'] = True except Exception as e: - logging.error(e) + logger.error(e) response = {'status': False, 'message': str(e)} - logger.warning(f'Revoking tasks {task_ids}') - for task_id in task_ids: - app.control.revoke(task_id, terminate=True, signal='SIGKILL') + return response - # Abort running tasks - tasks = ( - ScanActivity.objects - .filter(scan_of=scan) - .filter(status=RUNNING_TASK) - .order_by('-pk') - ) - if tasks.exists(): - for task in tasks: - if subscan_id and task.id not in subscan.celery_ids: + def abort_subscan(subscan): + response = {} + logger.info(f'Aborting subscan') + try: + logger.info(f"Setting scan {subscan} status to ABORTED_TASK") + task_ids = subscan.celery_ids + + for task_id in task_ids: + app.control.revoke(task_id, terminate=True, signal='SIGKILL') + + subscan.status = ABORTED_TASK + subscan.stop_scan_date = timezone.now() + subscan.save() + create_scan_activity( + subscan.scan_history.id, + f'Subscan aborted', + ABORTED_TASK + ) + response['status'] = True + except Exception as e: + logger.error(e) + response = {'status': False, 'message': str(e)} + + return response + + for scan_id in scan_ids: + try: + scan = ScanHistory.objects.get(id=scan_id) + # if scan is already successful or aborted then do nothing + if scan.scan_status == SUCCESS_TASK or scan.scan_status == ABORTED_TASK: continue - task.status = ABORTED_TASK - task.time = timezone.now() - task.save() + response = abort_scan(scan) + except Exception as e: + logger.error(e) + response = {'status': False, 'message': str(e)} + + for subscan_id in subscan_ids: + try: + subscan = SubScan.objects.get(id=subscan_id) + if subscan.scan_status == SUCCESS_TASK or subscan.scan_status == ABORTED_TASK: + continue + response = abort_subscan(subscan) + except Exception as e: + logger.error(e) + response = {'status': False, 'message': str(e)} return Response(response) @@ -890,10 +1220,7 @@ def get(self, request): # get current version_number # remove quotes from current_version - current_version = ((os.environ['RENGINE_CURRENT_VERSION' - ])[1:] if os.environ['RENGINE_CURRENT_VERSION' - ][0] == 'v' - else os.environ['RENGINE_CURRENT_VERSION']).replace("'", "") + current_version = RENGINE_CURRENT_VERSION # for consistency remove v from both if exists latest_version = re.search(r'v(\d+\.)?(\d+\.)?(\*|\d+)', @@ -914,8 +1241,21 @@ def get(self, request): return_response['status'] = True return_response['latest_version'] = latest_version return_response['current_version'] = current_version - return_response['update_available'] = version.parse(current_version) < version.parse(latest_version) - if version.parse(current_version) < version.parse(latest_version): + is_version_update_available = version.parse(current_version) < version.parse(latest_version) + + # if is_version_update_available then we should create inapp notification + create_inappnotification( + title='reNgine Update Available', + description=f'Update to version {latest_version} is available', + notification_type=SYSTEM_LEVEL_NOTIFICATION, + project_slug=None, + icon='mdi-update', + redirect_link='https://github.com/yogeshojha/rengine/releases', + open_in_new_tab=True + ) + + return_response['update_available'] = is_version_update_available + if is_version_update_available: return_response['changelog'] = response[0]['body'] return Response(return_response) @@ -1015,7 +1355,11 @@ def get(self, request): version_number = None _, stdout = run_command(tool.version_lookup_command) - version_number = re.search(re.compile(tool.version_match_regex), str(stdout)) + if tool.version_match_regex: + version_number = re.search(re.compile(tool.version_match_regex), str(stdout)) + else: + version_match_regex = r'(?i:v)?(\d+(?:\.\d+){2,})' + version_number = re.search(version_match_regex, str(stdout)) if not version_number: return Response({'status': False, 'message': 'Invalid version lookup command.'}) diff --git a/web/art/reNgine.txt b/web/art/reNgine.txt index cf0082bd3..a94a0ea1d 100644 --- a/web/art/reNgine.txt +++ b/web/art/reNgine.txt @@ -3,6 +3,6 @@ _ __ ___| \| | __ _ _ _ __ ___ | '__/ _ \ . ` |/ _` | | '_ \ / _ \ | | | __/ |\ | (_| | | | | | __/ - |_| \___|_| \_|\__, |_|_| |_|\___| v2.1.1 + |_| \___|_| \_|\__, |_|_| |_|\___| __/ | |___/ diff --git a/web/celery-entrypoint.sh b/web/celery-entrypoint.sh index 6d7968fff..54e014cc3 100755 --- a/web/celery-entrypoint.sh +++ b/web/celery-entrypoint.sh @@ -151,8 +151,6 @@ then chmod +x /usr/src/github/goofuzz/GooFuzz fi -exec "$@" - # httpx seems to have issue, use alias instead!!! echo 'alias httpx="/go/bin/httpx"' >> ~/.bashrc @@ -167,28 +165,70 @@ if [ "$DEBUG" == "1" ]; then loglevel='debug' fi -# watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --autoscale=10,0 -l INFO -Q scan_queue & -echo "Starting Workers..." -echo "Starting Main Scan Worker with Concurrency: $MAX_CONCURRENCY,$MIN_CONCURRENCY" -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --loglevel=$loglevel --autoscale=$MAX_CONCURRENCY,$MIN_CONCURRENCY -Q main_scan_queue & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=30 --loglevel=$loglevel -Q initiate_scan_queue -n initiate_scan_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=30 --loglevel=$loglevel -Q subscan_queue -n subscan_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=20 --loglevel=$loglevel -Q report_queue -n report_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q send_notif_queue -n send_notif_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q send_scan_notif_queue -n send_scan_notif_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q send_task_notif_queue -n send_task_notif_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=5 --loglevel=$loglevel -Q send_file_to_discord_queue -n send_file_to_discord_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=5 --loglevel=$loglevel -Q send_hackerone_report_queue -n send_hackerone_report_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q parse_nmap_results_queue -n parse_nmap_results_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=20 --loglevel=$loglevel -Q geo_localize_queue -n geo_localize_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q query_whois_queue -n query_whois_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=30 --loglevel=$loglevel -Q remove_duplicate_endpoints_queue -n remove_duplicate_endpoints_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=50 --loglevel=$loglevel -Q run_command_queue -n run_command_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q query_reverse_whois_queue -n query_reverse_whois_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q query_ip_history_queue -n query_ip_history_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=30 --loglevel=$loglevel -Q llm_queue -n llm_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q dorking_queue -n dorking_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q osint_discovery_queue -n osint_discovery_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q h8mail_queue -n h8mail_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q theHarvester_queue -n theHarvester_worker -exec "$@" +generate_worker_command() { + local queue=$1 + local concurrency=$2 + local worker_name=$3 + local app=${4:-"reNgine.tasks"} + local directory=${5:-"/usr/src/app/reNgine/"} + + local base_command="celery -A $app worker --pool=gevent --autoscale=$concurrency,1 --loglevel=$loglevel -Q $queue -n $worker_name" --optimization=fair + + if [ "$DEBUG" == "1" ]; then + echo "watchmedo auto-restart --recursive --pattern=\"*.py\" --directory=\"$directory\" -- $base_command &" + else + echo "$base_command &" + fi +} + +echo "Starting Celery Workers..." + +commands="" + +# Main scan worker +if [ "$DEBUG" == "1" ]; then + commands+="watchmedo auto-restart --recursive --pattern=\"*.py\" --directory=\"/usr/src/app/reNgine/\" -- celery -A reNgine.tasks worker --loglevel=$loglevel --optimization=fair --autoscale=$MAX_CONCURRENCY,$MIN_CONCURRENCY -Q main_scan_queue &"$'\n' +else + commands+="celery -A reNgine.tasks worker --loglevel=$loglevel --optimization=fair --autoscale=$MAX_CONCURRENCY,$MIN_CONCURRENCY -Q main_scan_queue &"$'\n' +fi + +# API shared task worker +if [ "$DEBUG" == "1" ]; then + commands+="watchmedo auto-restart --recursive --pattern=\"*.py\" --directory=\"/usr/src/app/api/\" -- celery -A api.shared_api_tasks worker --pool=gevent --optimization=fair --concurrency=30 --loglevel=$loglevel -Q api_queue -n api_worker &"$'\n' +else + commands+="celery -A api.shared_api_tasks worker --pool=gevent --concurrency=30 --optimization=fair --loglevel=$loglevel -Q api_queue -n api_worker &"$'\n' +fi + +# worker format: "queue_name:concurrency:worker_name" +workers=( + "initiate_scan_queue:30:initiate_scan_worker" + "subscan_queue:30:subscan_worker" + "report_queue:20:report_worker" + "send_notif_queue:10:send_notif_worker" + "send_task_notif_queue:10:send_task_notif_worker" + "send_file_to_discord_queue:5:send_file_to_discord_worker" + "send_hackerone_report_queue:5:send_hackerone_report_worker" + "parse_nmap_results_queue:10:parse_nmap_results_worker" + "geo_localize_queue:20:geo_localize_worker" + "query_whois_queue:10:query_whois_worker" + "remove_duplicate_endpoints_queue:30:remove_duplicate_endpoints_worker" + "run_command_queue:50:run_command_worker" + "query_reverse_whois_queue:10:query_reverse_whois_worker" + "query_ip_history_queue:10:query_ip_history_worker" + "llm_queue:30:llm_worker" + "dorking_queue:10:dorking_worker" + "osint_discovery_queue:10:osint_discovery_worker" + "h8mail_queue:10:h8mail_worker" + "theHarvester_queue:10:theHarvester_worker" + "send_scan_notif_queue:10:send_scan_notif_worker" +) + +for worker in "${workers[@]}"; do + IFS=':' read -r queue concurrency worker_name <<< "$worker" + commands+="$(generate_worker_command "$queue" "$concurrency" "$worker_name")"$'\n' +done +commands="${commands%&}" + +eval "$commands" + +wait \ No newline at end of file diff --git a/web/dashboard/admin.py b/web/dashboard/admin.py index be2a79a67..0c44dd932 100644 --- a/web/dashboard/admin.py +++ b/web/dashboard/admin.py @@ -5,3 +5,7 @@ admin.site.register(Project) admin.site.register(OpenAiAPIKey) admin.site.register(NetlasAPIKey) +admin.site.register(ChaosAPIKey) +admin.site.register(HackerOneAPIKey) +admin.site.register(InAppNotification) +admin.site.register(UserPreferences) \ No newline at end of file diff --git a/web/dashboard/migrations/0001_initial.py b/web/dashboard/migrations/0001_initial.py index 44e9ac9a7..542cb1f17 100644 --- a/web/dashboard/migrations/0001_initial.py +++ b/web/dashboard/migrations/0001_initial.py @@ -1,6 +1,8 @@ -# Generated by Django 3.2.23 on 2024-06-19 02:43 +# Generated by Django 3.2.23 on 2024-09-06 01:47 +from django.conf import settings from django.db import migrations, models +import django.db.models.deletion class Migration(migrations.Migration): @@ -8,9 +10,25 @@ class Migration(migrations.Migration): initial = True dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ + migrations.CreateModel( + name='ChaosAPIKey', + fields=[ + ('id', models.AutoField(primary_key=True, serialize=False)), + ('key', models.CharField(max_length=500)), + ], + ), + migrations.CreateModel( + name='HackerOneAPIKey', + fields=[ + ('id', models.AutoField(primary_key=True, serialize=False)), + ('username', models.CharField(max_length=500)), + ('key', models.CharField(max_length=500)), + ], + ), migrations.CreateModel( name='NetlasAPIKey', fields=[ @@ -49,4 +67,31 @@ class Migration(migrations.Migration): ('query', models.CharField(max_length=1000)), ], ), + migrations.CreateModel( + name='UserPreferences', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('bug_bounty_mode', models.BooleanField(default=True)), + ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ], + ), + migrations.CreateModel( + name='InAppNotification', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('notification_type', models.CharField(choices=[('system', 'system'), ('project', 'project')], default='system', max_length=10)), + ('status', models.CharField(choices=[('success', 'Success'), ('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], default='info', max_length=10)), + ('title', models.CharField(max_length=255)), + ('description', models.TextField()), + ('icon', models.CharField(max_length=50)), + ('is_read', models.BooleanField(default=False)), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('redirect_link', models.URLField(blank=True, max_length=255, null=True)), + ('open_in_new_tab', models.BooleanField(default=False)), + ('project', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='dashboard.project')), + ], + options={ + 'ordering': ['-created_at'], + }, + ), ] diff --git a/web/dashboard/models.py b/web/dashboard/models.py index 8ed77dd43..a1ed47a7a 100644 --- a/web/dashboard/models.py +++ b/web/dashboard/models.py @@ -1,4 +1,6 @@ from django.db import models +from reNgine.definitions import * +from django.contrib.auth.models import User class SearchHistory(models.Model): @@ -41,3 +43,55 @@ class NetlasAPIKey(models.Model): def __str__(self): return self.key + + +class ChaosAPIKey(models.Model): + id = models.AutoField(primary_key=True) + key = models.CharField(max_length=500) + + def __str__(self): + return self.key + + +class HackerOneAPIKey(models.Model): + id = models.AutoField(primary_key=True) + username = models.CharField(max_length=500) + key = models.CharField(max_length=500) + + def __str__(self): + return self.username + + +class InAppNotification(models.Model): + project = models.ForeignKey(Project, on_delete=models.CASCADE, null=True, blank=True) + notification_type = models.CharField(max_length=10, choices=NOTIFICATION_TYPES, default='system') + status = models.CharField(max_length=10, choices=NOTIFICATION_STATUS_TYPES, default='info') + title = models.CharField(max_length=255) + description = models.TextField() + icon = models.CharField(max_length=50) # mdi icon class name + is_read = models.BooleanField(default=False) + created_at = models.DateTimeField(auto_now_add=True) + redirect_link = models.URLField(max_length=255, blank=True, null=True) + open_in_new_tab = models.BooleanField(default=False) + + class Meta: + ordering = ['-created_at'] + + def __str__(self): + if self.notification_type == 'system': + return f"System wide notif: {self.title}" + else: + return f"Project wide notif: {self.project.name}: {self.title}" + + @property + def is_system_wide(self): + # property to determine if the notification is system wide or project specific + return self.notification_type == 'system' + + +class UserPreferences(models.Model): + user = models.OneToOneField(User, on_delete=models.CASCADE) + bug_bounty_mode = models.BooleanField(default=True) + + def __str__(self): + return f"{self.user.username}'s preferences" diff --git a/web/dashboard/templates/dashboard/bountyhub_programs.html b/web/dashboard/templates/dashboard/bountyhub_programs.html new file mode 100644 index 000000000..130d1e054 --- /dev/null +++ b/web/dashboard/templates/dashboard/bountyhub_programs.html @@ -0,0 +1,92 @@ +{% extends 'base/base.html' %} +{% load humanize %} +{% load static %} + +{% block title %} +{{platform}} Programs +{% endblock title %} + +{% block custom_js_css_link %} +{% endblock custom_js_css_link %} + +{% block page_title %} +{{platform}} Programs +{% endblock page_title %} + +{% block breadcrumb_title %} + + + +{% endblock breadcrumb_title %} + +{% block main_content %} +
+
+
+
+
+
+
+
+ + +
+
+
+ +
+
+ +
+
+
+ + +
+
+
+
+ + +
+
+
+
+
+
+
+
+ +
+
+
+ + +
+{% endblock main_content %} + + +{% block page_level_script %} + +{% endblock page_level_script %} diff --git a/web/dashboard/templates/dashboard/index.html b/web/dashboard/templates/dashboard/index.html index cbdeeeb8d..394d1c560 100644 --- a/web/dashboard/templates/dashboard/index.html +++ b/web/dashboard/templates/dashboard/index.html @@ -17,7 +17,7 @@ {% endblock custom_js_css_link %} {% block breadcrumb_title %} -reNgine 2.1.3 +reNgine {{ RENGINE_CURRENT_VERSION }} {% endblock breadcrumb_title %} {% block main_content %} diff --git a/web/dashboard/templates/dashboard/onboarding.html b/web/dashboard/templates/dashboard/onboarding.html index e171f52ef..969ffd230 100644 --- a/web/dashboard/templates/dashboard/onboarding.html +++ b/web/dashboard/templates/dashboard/onboarding.html @@ -7,87 +7,145 @@ +
-
-
- {% csrf_token %} -
-
-

Hey {{user.username}}! Welcome to reNgine

-

You will need to create your first project before you start using reNgine. Projects are now a part of reNgine 2.0! Learn more about projects.

- {% if error %} -
- {{error}} +
+ {% csrf_token %} +
+

Welcome to reNgine

+

Let's set up your environment to get started with reNgine.

+
+ {% if error %} +
+ {{error}} +
+ {% endif %} +
+
+
Project
+

Create your first project to organize and manage your security assessments.

+
+ +
- {% endif %} -
-
-
-

Project

-
- - -
-

Additional User

-

You can add additional users and assign them roles. You may add additional users and also change their roles at any point in future.

-
- - -
-
- - -
-
- - -
-
-
-
-
-

Default API Keys

-

If you have API keys for these services, please enter them here.

-
- -

OpenAI keys will be used to generate vulnerability description, remediation, impact and vulnerability report writing using ChatGPT.

- {% if openai_key %} - - {% else %} - - {% endif %} - This is optional but recommended. -
-
- -

Netlas keys will be used to get whois information and other OSINT data.

- {% if netlas_key %} - - {% else %} - - {% endif %} - This is optional -
-
- -
-
+
+
+
+
+
Additional User
+

Add an additional user and assign them a role. You can manage users and their roles anytime in the future.

+
+ + +
+
+ + +
+
+ + +
+
+
+
+
+
User Preferences
+

Customize your reNgine experience with these preferences.

+
+
+ +
+ + Enabling Bug Bounty Mode will: +
    +
  • Activate automatic reporting to HackerOne
  • +
  • Enable the Bounty Hub for importing HackerOne programs
  • +
  • Provide bug bounty specific features and optimizations
  • +
+
+
+
+
API Keys
+

Enter your API keys for various services to enhance reNgine's capabilities.

+
+ + + Used for generating vulnerability descriptions, remediation, impact, and report writing using ChatGPT. +
+
+ + + Used to get whois information and other OSINT data. +
+
+ + + Enhances reconnaissance capabilities for Public Bug Bounty Programs. Get your API key +
+
+ + +
+
+ + + Used for importing targets, bookmarked programs, and submitting automated vulnerability reports. Generate your API Token +
+
+
+
+ +
+ - + \ No newline at end of file diff --git a/web/dashboard/urls.py b/web/dashboard/urls.py index cec484a42..0830493f5 100644 --- a/web/dashboard/urls.py +++ b/web/dashboard/urls.py @@ -40,4 +40,8 @@ 'delete/project/', views.delete_project, name='delete_project'), + path( + '/bountyhub/list/programs', + views.list_bountyhub_programs, + name='list_bountyhub_programs'), ] diff --git a/web/dashboard/views.py b/web/dashboard/views.py index 11c688bfc..13e685044 100644 --- a/web/dashboard/views.py +++ b/web/dashboard/views.py @@ -319,6 +319,13 @@ def onboarding(request): context = {} error = '' + # check is any projects exists, then redirect to project list else onboarding + project = Project.objects.first() + + if project: + slug = project.slug + return HttpResponseRedirect(reverse('dashboardIndex', kwargs={'slug': slug})) + if request.method == "POST": project_name = request.POST.get('project_name') slug = slugify(project_name) @@ -327,6 +334,10 @@ def onboarding(request): create_user_role = request.POST.get('create_user_role') key_openai = request.POST.get('key_openai') key_netlas = request.POST.get('key_netlas') + key_chaos = request.POST.get('key_chaos') + key_hackerone = request.POST.get('key_hackerone') + username_hackerone = request.POST.get('username_hackerone') + bug_bounty_mode = request.POST.get('bug_bounty_mode') == 'on' insert_date = timezone.now() @@ -340,18 +351,29 @@ def onboarding(request): error = ' Could not create project, Error: ' + str(e) + # update currently logged in user's preferences for bug bounty mode + user_preferences, _ = UserPreferences.objects.get_or_create(user=request.user) + user_preferences.bug_bounty_mode = bug_bounty_mode + user_preferences.save() + + try: if create_username and create_password and create_user_role: UserModel = get_user_model() - user = UserModel.objects.create_user( + new_user = UserModel.objects.create_user( username=create_username, password=create_password ) - assign_role(user, create_user_role) - except Exception as e: - error = ' Could not create User, Error: ' + str(e) + assign_role(new_user, create_user_role) + # initially bug bounty mode is enabled for new user as selected for current user + new_user_preferences, _ = UserPreferences.objects.get_or_create(user=new_user) + new_user_preferences.bug_bounty_mode = bug_bounty_mode + new_user_preferences.save() + + except Exception as e: + error = ' Could not create User, Error: ' + str(e) if key_openai: openai_api_key = OpenAiAPIKey.objects.first() @@ -369,15 +391,47 @@ def onboarding(request): else: NetlasAPIKey.objects.create(key=key_netlas) + if key_chaos: + chaos_api_key = ChaosAPIKey.objects.first() + if chaos_api_key: + chaos_api_key.key = key_chaos + chaos_api_key.save() + else: + ChaosAPIKey.objects.create(key=key_chaos) + + if key_hackerone and username_hackerone: + hackerone_api_key = HackerOneAPIKey.objects.first() + if hackerone_api_key: + hackerone_api_key.username = username_hackerone + hackerone_api_key.key = key_hackerone + hackerone_api_key.save() + else: + HackerOneAPIKey.objects.create( + username=username_hackerone, + key=key_hackerone + ) + context['error'] = error - # check is any projects exists, then redirect to project list else onboarding - project = Project.objects.first() + context['openai_key'] = OpenAiAPIKey.objects.first() context['netlas_key'] = NetlasAPIKey.objects.first() + context['chaos_key'] = ChaosAPIKey.objects.first() + context['hackerone_key'] = HackerOneAPIKey.objects.first().key + context['hackerone_username'] = HackerOneAPIKey.objects.first().username - if project: - slug = project.slug - return HttpResponseRedirect(reverse('dashboardIndex', kwargs={'slug': slug})) + context['user_preferences'], _ = UserPreferences.objects.get_or_create( + user=request.user + ) return render(request, 'dashboard/onboarding.html', context) + + + +def list_bountyhub_programs(request, slug): + context = {} + # get parameter to device which platform is being requested + platform = request.GET.get('platform') or 'hackerone' + context['platform'] = platform.capitalize() + + return render(request, 'dashboard/bountyhub_programs.html', context) \ No newline at end of file diff --git a/web/fixtures/default_scan_engines.yaml b/web/fixtures/default_scan_engines.yaml index 6194b4585..825bcd6a3 100644 --- a/web/fixtures/default_scan_engines.yaml +++ b/web/fixtures/default_scan_engines.yaml @@ -2,11 +2,11 @@ pk: 1 fields: engine_name: Full Scan - yaml_configuration: "subdomain_discovery: {\r\n 'uses_tools': ['subfinder', 'ctfr', - 'sublist3r', 'tlsx', 'oneforall', 'netlas'],\r\n 'enable_http_crawl': true,\r\n - \ 'threads': 30,\r\n 'timeout': 5,\r\n}\r\nhttp_crawl: {}\r\nport_scan: {\r\n - \ 'enable_http_crawl': true,\r\n 'timeout': 5,\r\n # 'exclude_ports': [],\r\n - \ # 'exclude_subdomains': [],\r\n 'ports': ['top-100'],\r\n 'rate_limit': + yaml_configuration: "subdomain_discovery: {\r\n 'uses_tools': ['subfinder', 'chaos', + 'ctfr', 'sublist3r', 'tlsx', 'oneforall', 'netlas'],\r\n 'enable_http_crawl': + true,\r\n 'threads': 30,\r\n 'timeout': 5,\r\n}\r\nhttp_crawl: {}\r\nport_scan: + {\r\n 'enable_http_crawl': true,\r\n 'timeout': 5,\r\n # 'exclude_ports': + [],\r\n # 'exclude_subdomains': [],\r\n 'ports': ['top-100'],\r\n 'rate_limit': 150,\r\n 'threads': 30,\r\n 'passive': false,\r\n # 'use_naabu_config': false,\r\n \ # 'enable_nmap': true,\r\n # 'nmap_cmd': '',\r\n # 'nmap_script': '',\r\n \ # 'nmap_script_args': ''\r\n}\r\nosint: {\r\n 'discover': [\r\n 'emails',\r\n @@ -26,14 +26,15 @@ 'page_title'],\r\n 'enable_http_crawl': true,\r\n 'gf_patterns': ['debug_logic', 'idor', 'interestingEXT', 'interestingparams', 'interestingsubs', 'lfi', 'rce', 'redirect', 'sqli', 'ssrf', 'ssti', 'xss'],\r\n 'ignore_file_extensions': ['png', - 'jpg', 'jpeg', 'gif', 'mp4', 'mpeg', 'mp3'],\r\n 'threads': 30\r\n}\r\nvulnerability_scan: {\r\n - \ 'run_nuclei': true,\r\n 'run_dalfox': true,\r\n 'run_crlfuzz': true,\r\n + 'jpg', 'jpeg', 'gif', 'mp4', 'mpeg', 'mp3'],\r\n 'threads': 30\r\n}\r\nvulnerability_scan: + {\r\n 'run_nuclei': true,\r\n 'run_dalfox': true,\r\n 'run_crlfuzz': true,\r\n \ 'enable_http_crawl': true,\r\n 'concurrency': 50,\r\n 'intensity': 'normal',\r\n \ 'rate_limit': 150,\r\n 'retries': 1,\r\n 'timeout': 5,\r\n 'fetch_gpt_report': - true,\r\n 'nuclei': {\r\n 'use_nuclei_config': false,\r\n 'severities': ['unknown', - 'info', 'low', 'medium', 'high', 'critical']\r\n }\r\n}\r\nwaf_detection: {\r\n\r\n}\r\nscreenshot: - {\r\n 'enable_http_crawl': true,\r\n 'intensity': 'normal',\r\n 'timeout': - 10,\r\n 'threads': 40\r\n}\r\n\r\n# custom_headers: [\"Cookie: Test\"]" + false,\r\n 'nuclei': {\r\n 'use_nuclei_config': false,\r\n 'severities': + ['unknown', 'info', 'low', 'medium', 'high', 'critical']\r\n }\r\n}\r\nwaf_detection: + {\r\n\r\n}\r\nscreenshot: {\r\n 'enable_http_crawl': true,\r\n 'intensity': + 'normal',\r\n 'timeout': 10,\r\n 'threads': 40\r\n}\r\n\r\n# custom_headers: + [\"Cookie: Test\"]" default_engine: true - model: scanEngine.enginetype pk: 2 @@ -41,8 +42,8 @@ engine_name: Subdomain Scan yaml_configuration: "subdomain_discovery: {\r\n 'uses_tools': [\r\n 'subfinder', \r\n 'ctfr', \r\n 'sublist3r', \r\n 'tlsx', \r\n 'oneforall', \r\n - \ 'netlas'\r\n ],\r\n 'enable_http_crawl': true,\r\n 'threads': 30,\r\n - \ 'timeout': 5,\r\n}\r\nhttp_crawl: {}" + \ 'netlas', \r\n 'chaos'\r\n ],\r\n 'enable_http_crawl': true,\r\n 'threads': + 30,\r\n 'timeout': 5,\r\n}\r\nhttp_crawl: {}" default_engine: true - model: scanEngine.enginetype pk: 3 @@ -60,11 +61,11 @@ pk: 4 fields: engine_name: Vulnerability Scan - yaml_configuration: "subdomain_discovery: {\r\n 'uses_tools': ['subfinder', 'ctfr', - 'sublist3r', 'tlsx', 'oneforall', 'netlas'],\r\n 'enable_http_crawl': true,\r\n - \ 'threads': 30,\r\n 'timeout': 5,\r\n}\r\nhttp_crawl: {}\r\nosint: {\r\n 'discover': - [\r\n 'emails',\r\n 'metainfo',\r\n 'employees'\r\n ],\r\n - \ 'dorks': [\r\n 'login_pages',\r\n 'admin_panels',\r\n 'dashboard_pages',\r\n + yaml_configuration: "subdomain_discovery: {\r\n 'uses_tools': ['subfinder', 'chaos', + 'ctfr', 'sublist3r', 'tlsx', 'oneforall', 'netlas'],\r\n 'enable_http_crawl': + true,\r\n 'threads': 30,\r\n 'timeout': 5,\r\n}\r\nhttp_crawl: {}\r\nosint: + {\r\n 'discover': [\r\n 'emails',\r\n 'metainfo',\r\n 'employees'\r\n + \ ],\r\n 'dorks': [\r\n 'login_pages',\r\n 'admin_panels',\r\n 'dashboard_pages',\r\n \ 'stackoverflow',\r\n 'social_media',\r\n 'project_management',\r\n \ 'code_sharing',\r\n 'config_files',\r\n 'jenkins',\r\n 'wordpress_files',\r\n \ 'php_error',\r\n 'exposed_documents',\r\n 'db_files',\r\n 'git_exposed'\r\n @@ -72,8 +73,8 @@ {\r\n 'run_nuclei': true,\r\n 'run_dalfox': true,\r\n 'run_crlfuzz': true,\r\n \ 'enable_http_crawl': true,\r\n 'concurrency': 50,\r\n 'intensity': 'normal',\r\n \ 'rate_limit': 150,\r\n 'retries': 1,\r\n 'timeout': 5,\r\n 'fetch_gpt_report': - true,\r\n 'nuclei': {\r\n 'use_nuclei_config': false,\r\n 'severities': ['unknown', - 'info', 'low', 'medium', 'high', 'critical']\r\n }\r\n}" + false,\r\n 'nuclei': {\r\n 'use_nuclei_config': false,\r\n 'severities': + ['unknown', 'info', 'low', 'medium', 'high', 'critical']\r\n }\r\n}" default_engine: true - model: scanEngine.enginetype pk: 5 @@ -90,15 +91,16 @@ pk: 6 fields: engine_name: reNgine Recommended - yaml_configuration: "subdomain_discovery: {\r\n 'uses_tools': ['subfinder', 'ctfr', - 'sublist3r', 'tlsx', 'oneforall', 'netlas'],\r\n 'enable_http_crawl': true,\r\n - \ 'threads': 30,\r\n 'timeout': 5,\r\n}\r\nhttp_crawl: {}\r\nosint: {\r\n 'discover': - [\r\n 'emails',\r\n 'metainfo'\r\n ],\r\n 'dorks': [\r\n 'login_pages',\r\n - \ 'admin_panels',\r\n 'dashboard_pages',\r\n 'config_files',\r\n 'exposed_documents',\r\n - \ ],\r\n 'intensity': 'normal',\r\n 'documents_limit': 50\r\n}\r\nvulnerability_scan: - {\r\n 'run_nuclei': true,\r\n 'run_dalfox': true,\r\n 'run_crlfuzz': true,\r\n - \ 'enable_http_crawl': false,\r\n 'concurrency': 50,\r\n 'intensity': 'normal',\r\n - \ 'rate_limit': 150,\r\n 'retries': 1,\r\n 'timeout': 5,\r\n 'fetch_gpt_report': - true,\r\n 'nuclei': {\r\n 'use_nuclei_config': false,\r\n 'severities': ['low', - 'medium', 'high', 'critical']\r\n }\r\n}" + yaml_configuration: "subdomain_discovery: {\r\n 'uses_tools': ['subfinder', 'chaos', + 'ctfr', 'sublist3r', 'tlsx', 'oneforall', 'netlas'],\r\n 'enable_http_crawl': + true,\r\n 'threads': 30,\r\n 'timeout': 5,\r\n}\r\nhttp_crawl: {}\r\nosint: + {\r\n 'discover': [\r\n 'emails',\r\n 'metainfo'\r\n ],\r\n 'dorks': + [\r\n 'login_pages',\r\n 'admin_panels',\r\n 'dashboard_pages',\r\n + \ 'config_files',\r\n 'exposed_documents',\r\n ],\r\n 'intensity': 'normal',\r\n + \ 'documents_limit': 50\r\n}\r\nvulnerability_scan: {\r\n 'run_nuclei': true,\r\n + \ 'run_dalfox': true,\r\n 'run_crlfuzz': true,\r\n 'enable_http_crawl': false,\r\n + \ 'concurrency': 50,\r\n 'intensity': 'normal',\r\n 'rate_limit': 150,\r\n + \ 'retries': 1,\r\n 'timeout': 5,\r\n 'fetch_gpt_report': false,\r\n 'nuclei': + {\r\n 'use_nuclei_config': false,\r\n 'severities': ['low', 'medium', + 'high', 'critical']\r\n }\r\n}" default_engine: true diff --git a/web/fixtures/external_tools.yaml b/web/fixtures/external_tools.yaml index 0c2994b64..9c56b4d24 100644 --- a/web/fixtures/external_tools.yaml +++ b/web/fixtures/external_tools.yaml @@ -329,3 +329,20 @@ is_github_cloned: false github_clone_path: null subdomain_gathering_command: null +- model: scanEngine.installedexternaltool + pk: 19 + fields: + logo_url: null + name: chaos + description: Go client to communicate with Project Discovery's Chaos dataset API. + github_url: https://github.com/projectdiscovery/chaos-client + license_url: https://github.com/projectdiscovery/chaos-client/blob/main/LICENSE.md + version_lookup_command: chaos -version + update_command: chaos -up + install_command: go install -v github.com/projectdiscovery/chaos-client/cmd/chaos@latest + version_match_regex: (?i:v)?(\d+(?:\.\d+){2,}) + is_default: true + is_subdomain_gathering: true + is_github_cloned: false + github_clone_path: null + subdomain_gathering_command: null diff --git a/web/reNgine/celery_custom_task.py b/web/reNgine/celery_custom_task.py index 37bbdbbbb..863f77169 100644 --- a/web/reNgine/celery_custom_task.py +++ b/web/reNgine/celery_custom_task.py @@ -67,7 +67,8 @@ def __call__(self, *args, **kwargs): self.subscan_id = ctx.get('subscan_id') self.engine_id = ctx.get('engine_id') self.filename = ctx.get('filename') - self.url_filter = ctx.get('url_filter', '') + self.starting_point_path = ctx.get('starting_point_path', '') + self.excluded_paths = ctx.get('excluded_paths', []) self.results_dir = ctx.get('results_dir', RENGINE_RESULTS) self.yaml_configuration = ctx.get('yaml_configuration', {}) self.out_of_scope_subdomains = ctx.get('out_of_scope_subdomains', []) diff --git a/web/reNgine/charts.py b/web/reNgine/charts.py new file mode 100644 index 000000000..546f09a62 --- /dev/null +++ b/web/reNgine/charts.py @@ -0,0 +1,194 @@ +import base64 +import colorsys + +import plotly.graph_objs as go +from plotly.io import to_image +from django.db.models import Count +from reNgine.definitions import NUCLEI_SEVERITY_MAP + +from startScan.models import * + + + +""" + This file is used to generate the charts for the pdf report. +""" + +def generate_subdomain_chart_by_http_status(subdomains): + """ + Generates a donut chart using plotly for the subdomains based on the http status. + Includes label, count, and percentage inside the chart segments and in the legend. + Args: + subdomains: QuerySet of subdomains. + Returns: + Image as base64 encoded string. + """ + http_statuses = ( + subdomains + .exclude(http_status=0) + .values('http_status') + .annotate(count=Count('http_status')) + .order_by('-count') + ) + http_status_count = [{'http_status': entry['http_status'], 'count': entry['count']} for entry in http_statuses] + + total = sum(entry['count'] for entry in http_status_count) + + labels = [str(entry['http_status']) for entry in http_status_count] + sizes = [entry['count'] for entry in http_status_count] + colors = [get_color_by_http_status(entry['http_status']) for entry in http_status_count] + + text = [f"{label}
{size}
({size/total:.1%})" for label, size in zip(labels, sizes)] + + fig = go.Figure(data=[go.Pie( + labels=labels, + values=sizes, + marker=dict(colors=colors), + hole=0.4, + textinfo="text", + text=text, + textposition="inside", + textfont=dict(size=10), + hoverinfo="label+percent+value" + )]) + + fig.update_layout( + title_text="", + annotations=[dict(text='HTTP Status', x=0.5, y=0.5, font_size=14, showarrow=False)], + showlegend=True, + margin=dict(t=60, b=60, l=60, r=60), + width=700, + height=700, + legend=dict( + font=dict(size=18), + orientation="v", + yanchor="middle", + y=0.5, + xanchor="left", + x=1.05 + ), + ) + + img_bytes = to_image(fig, format="png") + img_base64 = base64.b64encode(img_bytes).decode('utf-8') + return img_base64 + + + +def get_color_by_severity(severity_int): + """ + Returns a color based on the severity level using a modern color scheme. + """ + color_map = { + 4: '#FF4D6A', + 3: '#FF9F43', + 2: '#FFCA3A', + 1: '#4ADE80', + 0: '#4ECDC4', + -1: '#A8A9AD', + } + return color_map.get(severity_int, '#A8A9AD') # Default to gray if severity is unknown + +def generate_vulnerability_chart_by_severity(vulnerabilities): + """ + Generates a donut chart using plotly for the vulnerabilities based on the severity. + Args: + vulnerabilities: QuerySet of Vulnerability objects. + Returns: + Image as base64 encoded string. + """ + severity_counts = ( + vulnerabilities + .values('severity') + .annotate(count=Count('severity')) + .order_by('-severity') + ) + + total = sum(entry['count'] for entry in severity_counts) + + labels = [NUCLEI_REVERSE_SEVERITY_MAP[entry['severity']].capitalize() for entry in severity_counts] + values = [entry['count'] for entry in severity_counts] + colors = [get_color_by_severity(entry['severity']) for entry in severity_counts] + + text = [f"{label}
{value}
({value/total:.1%})" for label, value in zip(labels, values)] + + fig = go.Figure(data=[go.Pie( + labels=labels, + values=values, + marker=dict(colors=colors), + hole=0.4, + textinfo="text", + text=text, + textposition="inside", + textfont=dict(size=12), + hoverinfo="label+percent+value", + )]) + + fig.update_layout( + title_text="", + annotations=[dict(text='Severity', x=0.5, y=0.5, font_size=14, showarrow=False)], + showlegend=True, + margin=dict(t=60, b=60, l=60, r=60), + width=700, + height=700, + legend=dict( + font=dict(size=18), + orientation="v", + yanchor="middle", + y=0.5, + xanchor="left", + x=1.05 + ), + ) + + + img_bytes = to_image(fig, format="png") + img_base64 = base64.b64encode(img_bytes).decode('utf-8') + return img_base64 + + + +def generate_color(base_color, offset): + r, g, b = int(base_color[1:3], 16), int(base_color[3:5], 16), int(base_color[5:7], 16) + factor = 1 + (offset * 0.03) + r, g, b = [min(255, int(c * factor)) for c in (r, g, b)] + return f"#{r:02x}{g:02x}{b:02x}" + + +def get_color_by_http_status(http_status): + """ + Returns the color based on the http status. + Args: + http_status: HTTP status code. + Returns: + Color code. + """ + + status = int(http_status) + + colors = { + 200: "#36a2eb", + 300: "#4bc0c0", + 400: "#ff6384", + 401: "#ff9f40", + 403: "#f27474", + 404: "#ffa1b5", + 429: "#bf7bff", + 500: "#9966ff", + 502: "#8a4fff", + 503: "#c39bd3", + } + + + if status in colors: + return colors[status] + elif 200 <= status < 300: + return generate_color(colors[200], status - 200) + elif 300 <= status < 400: + return generate_color(colors[300], status - 300) + elif 400 <= status < 500: + return generate_color(colors[400], status - 400) + elif 500 <= status < 600: + return generate_color(colors[500], status - 500) + else: + return "#c9cbcf" \ No newline at end of file diff --git a/web/reNgine/common_func.py b/web/reNgine/common_func.py index bab9a9646..ad58a94a8 100644 --- a/web/reNgine/common_func.py +++ b/web/reNgine/common_func.py @@ -6,14 +6,14 @@ import random import shutil import traceback -from time import sleep - +import ipaddress import humanize import redis import requests import tldextract import xmltodict +from time import sleep from bs4 import BeautifulSoup from urllib.parse import urlparse from celery.utils.log import get_task_logger @@ -1034,6 +1034,21 @@ def get_netlas_key(): netlas_key = NetlasAPIKey.objects.all() return netlas_key[0] if netlas_key else None + +def get_chaos_key(): + chaos_key = ChaosAPIKey.objects.all() + return chaos_key[0] if chaos_key else None + + +def get_hackerone_key_username(): + """ + Get the HackerOne API key username from the database. + Returns: a tuple of the username and api key + """ + hackerone_key = HackerOneAPIKey.objects.all() + return (hackerone_key[0].username, hackerone_key[0].key) if hackerone_key else None + + def parse_llm_vulnerability_report(report): report = report.replace('**', '') data = {} @@ -1167,6 +1182,52 @@ def update_or_create_port(port_number, service_name=None, description=None): return port, created +def exclude_urls_by_patterns(exclude_paths, urls): + """ + Filter out URLs based on a list of exclusion patterns provided from user + + Args: + exclude_patterns (list of str): A list of patterns to exclude. + These can be plain path or regex. + urls (list of str): A list of URLs to filter from. + + Returns: + list of str: A new list containing URLs that don't match any exclusion pattern. + """ + logger.info('exclude_urls_by_patterns') + if not exclude_paths: + # if no exclude paths are passed and is empty list return all urls as it is + return urls + + compiled_patterns = [] + for path in exclude_paths: + # treat each path as either regex or plain path + try: + raw_pattern = r"{}".format(path) + compiled_patterns.append(re.compile(raw_pattern)) + except re.error: + compiled_patterns.append(path) + + filtered_urls = [] + for url in urls: + exclude = False + for pattern in compiled_patterns: + if isinstance(pattern, re.Pattern): + if pattern.search(url): + exclude = True + break + else: + if pattern in url: #if the word matches anywhere in url exclude + exclude = True + break + + # if none conditions matches then add the url to filtered urls + if not exclude: + filtered_urls.append(url) + + return filtered_urls + + def get_domain_info_from_db(target): """ Retrieves the Domain object from the database using the target domain name. @@ -1503,3 +1564,85 @@ def save_domain_info_to_db(target, domain_info): domain.save() return domain_info_obj + + +def create_inappnotification( + title, + description, + notification_type=SYSTEM_LEVEL_NOTIFICATION, + project_slug=None, + icon="mdi-bell", + is_read=False, + status='info', + redirect_link=None, + open_in_new_tab=False +): + """ + This function will create an inapp notification + Inapp Notification not to be confused with Notification model + that is used for sending alerts on telegram, slack etc. + Inapp notification is used to show notification on the web app + + Args: + title: str: Title of the notification + description: str: Description of the notification + notification_type: str: Type of the notification, it can be either + SYSTEM_LEVEL_NOTIFICATION or PROJECT_LEVEL_NOTIFICATION + project_slug: str: Slug of the project, if notification is PROJECT_LEVEL_NOTIFICATION + icon: str: Icon of the notification, only use mdi icons + is_read: bool: Whether the notification is read or not, default is False + status: str: Status of the notification (success, info, warning, error), default is info + redirect_link: str: Link to redirect when notification is clicked + open_in_new_tab: bool: Whether to open the redirect link in a new tab, default is False + + Returns: + ValueError: if error + InAppNotification: InAppNotification object if successful + """ + logger.info('Creating InApp Notification with title: %s', title) + if notification_type not in [SYSTEM_LEVEL_NOTIFICATION, PROJECT_LEVEL_NOTIFICATION]: + raise ValueError("Invalid notification type") + + if status not in [choice[0] for choice in NOTIFICATION_STATUS_TYPES]: + raise ValueError("Invalid notification status") + + project = None + if notification_type == PROJECT_LEVEL_NOTIFICATION: + if not project_slug: + raise ValueError("Project slug is required for project level notification") + try: + project = Project.objects.get(slug=project_slug) + except Project.DoesNotExist as e: + raise ValueError(f"No project exists: {e}") + + notification = InAppNotification( + title=title, + description=description, + notification_type=notification_type, + project=project, + icon=icon, + is_read=is_read, + status=status, + redirect_link=redirect_link, + open_in_new_tab=open_in_new_tab + ) + notification.save() + return notification + +def get_ip_info(ip_address): + is_ipv4 = bool(validators.ipv4(ip_address)) + is_ipv6 = bool(validators.ipv6(ip_address)) + ip_data = None + if is_ipv4: + ip_data = ipaddress.IPv4Address(ip_address) + elif is_ipv6: + ip_data = ipaddress.IPv6Address(ip_address) + else: + return None + return ip_data + +def get_ips_from_cidr_range(target): + try: + return [str(ip) for ip in ipaddress.IPv4Network(target, False)] + except Exception as e: + logger.error(f'{target} is not a valid CIDR range. Skipping.') diff --git a/web/reNgine/context_processors.py b/web/reNgine/context_processors.py index 8fefeae09..c2255ef9b 100644 --- a/web/reNgine/context_processors.py +++ b/web/reNgine/context_processors.py @@ -1,5 +1,6 @@ from dashboard.models import * -import requests +from django.conf import settings + def projects(request): projects = Project.objects.all() @@ -13,8 +14,12 @@ def projects(request): 'current_project': project } -def misc(request): - externalIp = requests.get('https://checkip.amazonaws.com').text.strip() +def version_context(request): return { - 'external_ip': externalIp - } \ No newline at end of file + 'RENGINE_CURRENT_VERSION': settings.RENGINE_CURRENT_VERSION + } + +def user_preferences(request): + if hasattr(request, 'user_preferences'): + return {'user_preferences': request.user_preferences} + return {} \ No newline at end of file diff --git a/web/reNgine/database_utils.py b/web/reNgine/database_utils.py new file mode 100644 index 000000000..1faaa3580 --- /dev/null +++ b/web/reNgine/database_utils.py @@ -0,0 +1,182 @@ +import re +import validators +import logging + +from urllib.parse import urlparse +from django.db import transaction +from django.utils import timezone + +from dashboard.models import Project +from targetApp.models import Organization, Domain +from startScan.models import EndPoint, IpAddress +from reNgine.settings import LOGGING +from reNgine.common_func import * + +logger = logging.getLogger(__name__) + +@transaction.atomic +def bulk_import_targets( + targets: list[dict], + project_slug: str, + organization_name: str = None, + org_description: str = None, + h1_team_handle: str = None): + """ + Used to import targets in reNgine + + Args: + targets (list[dict]): list of targets to import, [{'target': 'target1.com', 'description': 'desc1'}, ...] + project_slug (str): slug of the project + organization_name (str): name of the organization to tag these targets + org_description (str): description of the organization + h1_team_handle (str): hackerone team handle (if imported from hackerone) + + Returns: + bool: True if new targets are imported, False otherwise + """ + new_targets_imported = False + project = Project.objects.get(slug=project_slug) + + all_targets = [] + + for target in targets: + name = target.get('name', '').strip() + description = target.get('description', '') + + if not name: + logger.warning(f"Skipping target with empty name") + continue + + is_domain = validators.domain(name) + is_ip = validators.ipv4(name) or validators.ipv6(name) + is_url = validators.url(name) + + logger.info(f'{name} | Domain? {is_domain} | IP? {is_ip} | URL? {is_url}') + + if is_domain: + target_obj = store_domain(name, project, description, h1_team_handle) + elif is_url: + target_obj = store_url(name, project, description, h1_team_handle) + elif is_ip: + target_obj = store_ip(name, project, description, h1_team_handle) + else: + logger.warning(f'{name} is not supported by reNgine') + continue + + if target_obj: + all_targets.append(target_obj) + new_targets_imported = True + + if organization_name and all_targets: + org_name = organization_name.strip() + org, created = Organization.objects.get_or_create( + name=org_name, + defaults={ + 'project': project, + 'description': org_description or '', + 'insert_date': timezone.now() + } + ) + + if not created: + org.project = project + if org_description: + org.description = org_description + if org.insert_date is None: + org.insert_date = timezone.now() + org.save() + + # Associate all targets with the organization + for target in all_targets: + org.domains.add(target) + + logger.info(f"{'Created' if created else 'Updated'} organization {org_name} with {len(all_targets)} targets") + + return new_targets_imported + + + +def remove_wildcard(input_string): + """ + Remove wildcard (*) from the beginning of the input string. + In future, we may find the meaning of wildcards and try to use in target configs such as out of scope etc + """ + return re.sub(r'^\*\.', '', input_string) + +def store_domain(domain_name, project, description, h1_team_handle): + """ + This function is used to store domain in reNgine + """ + existing_domain = Domain.objects.filter(name=domain_name).first() + + if existing_domain: + logger.info(f'Domain {domain_name} already exists. skipping.') + return + + current_time = timezone.now() + + new_domain = Domain.objects.create( + name=domain_name, + description=description, + h1_team_handle=h1_team_handle, + project=project, + insert_date=current_time + ) + + logger.info(f'Added new domain {new_domain.name}') + + return new_domain + +def store_url(url, project, description, h1_team_handle): + parsed_url = urlparse(url) + http_url = parsed_url.geturl() + domain_name = parsed_url.netloc + + domain = Domain.objects.filter(name=domain_name).first() + + if domain: + logger.info(f'Domain {domain_name} already exists. skipping...') + + else: + domain = Domain.objects.create( + name=domain_name, + description=description, + h1_team_handle=h1_team_handle, + project=project, + insert_date=timezone.now() + ) + logger.info(f'Added new domain {domain.name}') + + EndPoint.objects.get_or_create( + target_domain=domain, + http_url=sanitize_url(http_url) + ) + + return domain + +def store_ip(ip_address, project, description, h1_team_handle): + + domain = Domain.objects.filter(name=ip_address).first() + + if domain: + logger.info(f'Domain {ip_address} already exists. skipping...') + else: + domain = Domain.objects.create( + name=ip_address, + description=description, + h1_team_handle=h1_team_handle, + project=project, + insert_date=timezone.now(), + ip_address_cidr=ip_address + ) + logger.info(f'Added new domain {domain.name}') + + ip_data = get_ip_info(ip_address) + ip_data = get_ip_info(ip_address) + ip, created = IpAddress.objects.get_or_create(address=ip_address) + ip.reverse_pointer = ip_data.reverse_pointer + ip.is_private = ip_data.is_private + ip.version = ip_data.version + ip.save() + + return domain \ No newline at end of file diff --git a/web/reNgine/definitions.py b/web/reNgine/definitions.py index 5a9f6f0bf..abe599156 100644 --- a/web/reNgine/definitions.py +++ b/web/reNgine/definitions.py @@ -423,6 +423,22 @@ '.pdf', ] +# Default Excluded Paths during Initate Scan +# Mostly static files and directories +DEFAULT_EXCLUDED_PATHS = [ + # Static assets (using regex patterns) + '/static/.*', + '/assets/.*', + '/css/.*', + '/js/.*', + '/images/.*', + '/img/.*', + '/fonts/.*', + + # File types (using regex patterns) + '.*\.ico', +] + # Roles and Permissions PERM_MODIFY_SYSTEM_CONFIGURATIONS = 'modify_system_configurations' PERM_MODIFY_SCAN_CONFIGURATIONS = 'modify_scan_configurations' @@ -532,3 +548,21 @@ # OSINT GooFuzz Path GOFUZZ_EXEC_PATH = '/usr/src/github/goofuzz/GooFuzz' + + +# In App Notification Definitions +SYSTEM_LEVEL_NOTIFICATION = 'system' +PROJECT_LEVEL_NOTIFICATION = 'project' +NOTIFICATION_TYPES = ( + ('system', SYSTEM_LEVEL_NOTIFICATION), + ('project', PROJECT_LEVEL_NOTIFICATION), +) +NOTIFICATION_STATUS_TYPES = ( + ('success', 'Success'), + ('info', 'Informational'), + ('warning', 'Warning'), + ('error', 'Error'), +) + +# Bountyhub Definitions +HACKERONE_ALLOWED_ASSET_TYPES = ["WILDCARD", "DOMAIN", "IP_ADDRESS", "URL"] \ No newline at end of file diff --git a/web/reNgine/middleware.py b/web/reNgine/middleware.py new file mode 100644 index 000000000..e301bb917 --- /dev/null +++ b/web/reNgine/middleware.py @@ -0,0 +1,10 @@ +from dashboard.models import UserPreferences + +class UserPreferencesMiddleware: + def __init__(self, get_response): + self.get_response = get_response + + def __call__(self, request): + if request.user.is_authenticated: + request.user_preferences, created = UserPreferences.objects.get_or_create(user=request.user) + return self.get_response(request) diff --git a/web/reNgine/settings.py b/web/reNgine/settings.py index 0924a6391..408a6554f 100644 --- a/web/reNgine/settings.py +++ b/web/reNgine/settings.py @@ -43,6 +43,21 @@ ALLOWED_HOSTS = ['*'] SECRET_KEY = first_run(SECRET_FILE, BASE_DIR) +# Rengine version +# reads current version from a file called .version +VERSION_FILE = os.path.join(BASE_DIR, '.version') +if os.path.exists(VERSION_FILE): + with open(VERSION_FILE, 'r') as f: + _version = f.read().strip() +else: + _version = 'unknown' + +# removes v from _version if exists +if _version.startswith('v'): + _version = _version[1:] + +RENGINE_CURRENT_VERSION = _version + # Databases DATABASES = { 'default': { @@ -90,6 +105,7 @@ 'login_required.middleware.LoginRequiredMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', + 'reNgine.middleware.UserPreferencesMiddleware', ] TEMPLATES = [ { @@ -103,7 +119,8 @@ 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', 'reNgine.context_processors.projects', - 'reNgine.context_processors.misc' + 'reNgine.context_processors.version_context', + 'reNgine.context_processors.user_preferences', ], }, }] @@ -303,6 +320,26 @@ 'handlers': ['task'], 'level': 'DEBUG' if DEBUG else 'INFO', 'propagate': False + }, + 'api.views': { + 'handlers': ['console'], + 'level': 'DEBUG' if DEBUG else 'INFO', + 'propagate': False } }, } + +''' +File upload settings +''' +DATA_UPLOAD_MAX_NUMBER_FIELDS = None + +''' + Caching Settings +''' +CACHES = { + 'default': { + 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', + 'TIMEOUT': 60 * 30, # 30 minutes caching will be used + } +} \ No newline at end of file diff --git a/web/reNgine/tasks.py b/web/reNgine/tasks.py index 66a45498d..3fc3e5b89 100644 --- a/web/reNgine/tasks.py +++ b/web/reNgine/tasks.py @@ -20,6 +20,7 @@ from django.db.models import Count from dotted_dict import DottedDict from django.utils import timezone +from django.shortcuts import get_object_or_404 from pycvesearch import CVESearch from metafinder.extractor import extract_metadata_from_google_search @@ -57,7 +58,9 @@ def initiate_scan( imported_subdomains=[], out_of_scope_subdomains=[], initiated_by_id=None, - url_filter=''): + starting_point_path='', + excluded_paths=[], + ): """Initiate a new scan. Args: @@ -68,8 +71,9 @@ def initiate_scan( results_dir (str): Results directory. imported_subdomains (list): Imported subdomains. out_of_scope_subdomains (list): Out-of-scope subdomains. - url_filter (str): URL path. Default: ''. + starting_point_path (str): URL path. Default: '' Defined where to start the scan. initiated_by (int): User ID initiating the scan. + excluded_paths (list): Excluded paths. Default: [], url paths to exclude from scan. """ logger.info('Initiating scan on celery') scan = None @@ -89,7 +93,7 @@ def initiate_scan( domain.save() # Get path filter - url_filter = url_filter.rstrip('/') + starting_point_path = starting_point_path.rstrip('/') # for live scan scan history id is passed as scan_history_id # and no need to create scan_history object @@ -111,6 +115,12 @@ def initiate_scan( scan.tasks = engine.tasks scan.results_dir = f'{results_dir}/{domain.name}_{scan.id}' add_gf_patterns = gf_patterns and 'fetch_url' in engine.tasks + # add configs to scan object, cfg_ prefix is used to avoid conflicts with other scan object fields + scan.cfg_starting_point_path = starting_point_path + scan.cfg_excluded_paths = excluded_paths + scan.cfg_out_of_scope_subdomains = out_of_scope_subdomains + scan.cfg_imported_subdomains = imported_subdomains + if add_gf_patterns: scan.used_gf_patterns = ','.join(gf_patterns) scan.save() @@ -124,7 +134,8 @@ def initiate_scan( 'engine_id': engine_id, 'domain_id': domain.id, 'results_dir': scan.results_dir, - 'url_filter': url_filter, + 'starting_point_path': starting_point_path, + 'excluded_paths': excluded_paths, 'yaml_configuration': config, 'out_of_scope_subdomains': out_of_scope_subdomains } @@ -148,7 +159,7 @@ def initiate_scan( # If enable_http_crawl is set, create an initial root HTTP endpoint so that # HTTP crawling can start somewhere - http_url = f'{domain.name}{url_filter}' if url_filter else domain.name + http_url = f'{domain.name}{starting_point_path}' if starting_point_path else domain.name endpoint, _ = save_endpoint( http_url, ctx=ctx, @@ -224,7 +235,9 @@ def initiate_subscan( engine_id=None, scan_type=None, results_dir=RENGINE_RESULTS, - url_filter=''): + starting_point_path='', + excluded_paths=[], + ): """Initiate a new subscan. Args: @@ -233,7 +246,8 @@ def initiate_subscan( engine_id (int): Engine ID. scan_type (int): Scan type (periodic, live). results_dir (str): Results directory. - url_filter (str): URL path. Default: '' + starting_point_path (str): URL path. Default: '' + excluded_paths (list): Excluded paths. Default: [], url paths to exclude from scan. """ # Get Subdomain, Domain and ScanHistory @@ -291,12 +305,13 @@ def initiate_subscan( 'subdomain_id': subdomain.id, 'yaml_configuration': config, 'results_dir': results_dir, - 'url_filter': url_filter + 'starting_point_path': starting_point_path, + 'excluded_paths': excluded_paths, } # Create initial endpoints in DB: find domain HTTP endpoint so that HTTP # crawling can start somewhere - base_url = f'{subdomain.name}{url_filter}' if url_filter else subdomain.name + base_url = f'{subdomain.name}{starting_point_path}' if starting_point_path else subdomain.name endpoint, _ = save_endpoint( base_url, crawl=enable_http_crawl, @@ -398,8 +413,8 @@ def subdomain_discovery( if not host: host = self.subdomain.name if self.subdomain else self.domain.name - if self.url_filter: - logger.warning(f'Ignoring subdomains scan as an URL path filter was passed ({self.url_filter}).') + if self.starting_point_path: + logger.warning(f'Ignoring subdomains scan as an URL path filter was passed ({self.starting_point_path}).') return # Config @@ -412,6 +427,7 @@ def subdomain_discovery( custom_subdomain_tools = [tool.name.lower() for tool in InstalledExternalTool.objects.filter(is_default=False).filter(is_subdomain_gathering=True)] send_subdomain_changes, send_interesting = False, False notif = Notification.objects.first() + subdomain_scope_checker = SubdomainScopeChecker(self.out_of_scope_subdomains) if notif: send_subdomain_changes = notif.send_subdomain_changes_notif send_interesting = notif.send_interesting_notif @@ -482,6 +498,15 @@ def subdomain_discovery( cmd_extract = f"grep -oE '([a-zA-Z0-9]([-a-zA-Z0-9]*[a-zA-Z0-9])?\.)+{host}'" cmd += f' | {cmd_extract} > {results_file}' + elif tool == 'chaos': + # we need to find api key if not ignore + chaos_key = get_chaos_key() + if not chaos_key: + logger.error('Chaos API key not found. Skipping.') + continue + results_file = self.results_dir + '/subdomains_chaos.txt' + cmd = f'chaos -d {host} -silent -key {chaos_key} -o {results_file}' + elif tool in custom_subdomain_tools: tool_query = InstalledExternalTool.objects.filter(name__icontains=tool.lower()) if not tool_query.exists(): @@ -557,7 +582,7 @@ def subdomain_discovery( if valid_url: subdomain_name = urlparse(subdomain_name).netloc - if subdomain_name in self.out_of_scope_subdomains: + if subdomain_scope_checker.is_out_of_scope(subdomain_name): logger.error(f'Subdomain {subdomain_name} is out of scope. Skipping.') continue @@ -1921,7 +1946,7 @@ def fetch_url(self, urls=[], ctx={}, description=None): if base_url and urlpath: subdomain = urlparse(base_url) - url = f'{subdomain.scheme}://{subdomain.netloc}{self.url_filter}' + url = f'{subdomain.scheme}://{subdomain.netloc}{self.starting_point_path}' if not validators.url(url): logger.warning(f'Invalid URL "{url}". Skipping.') @@ -1930,8 +1955,12 @@ def fetch_url(self, urls=[], ctx={}, description=None): all_urls.append(url) # Filter out URLs if a path filter was passed - if self.url_filter: - all_urls = [url for url in all_urls if self.url_filter in url] + if self.starting_point_path: + all_urls = [url for url in all_urls if self.starting_point_path in url] + + # if exclude_paths is found, then remove urls matching those paths + if self.excluded_paths: + all_urls = exclude_urls_by_patterns(self.excluded_paths, all_urls) # Write result to output path with open(self.output_path, 'w') as f: @@ -2198,13 +2227,24 @@ def nuclei_individual_severity_module(self, cmd, severity, enable_http_crawl, sh fields, add_meta_info=False) - # Send report to hackerone - hackerone_query = Hackerone.objects.all() + """ + Send report to hackerone when + 1. send_report is True from Hackerone model in ScanEngine + 2. username and key is set in HackerOneAPIKey in Dashboard + 3. severity is not info or low + """ + hackerone_query = Hackerone.objects.filter(send_report=True) + api_key_check_query = HackerOneAPIKey.objects.filter( + Q(username__isnull=False) & Q(key__isnull=False) + ) + send_report = ( hackerone_query.exists() and + api_key_check_query.exists() and severity not in ('info', 'low') and vuln.target_domain.h1_team_handle ) + if send_report: hackerone = hackerone_query.first() if hackerone.send_critical and severity == 'critical': @@ -2827,8 +2867,9 @@ def http_crawl( input_path = f'{self.results_dir}/httpx_input.txt' history_file = f'{self.results_dir}/commands.txt' if urls: # direct passing URLs to check - if self.url_filter: - urls = [u for u in urls if self.url_filter in u] + if self.starting_point_path: + urls = [u for u in urls if self.starting_point_path in u] + with open(input_path, 'w') as f: f.write('\n'.join(urls)) else: @@ -2839,6 +2880,10 @@ def http_crawl( ) # logger.debug(urls) + # exclude urls by pattern + if self.excluded_paths: + urls = exclude_urls_by_patterns(self.excluded_paths, urls) + # If no URLs found, skip it if not urls: return @@ -3045,12 +3090,6 @@ def send_scan_notif( subscan_id (int, optional): SuScan id. engine_id (int, optional): EngineType id. """ - - # Skip send if notification settings are not configured - notif = Notification.objects.first() - if not (notif and notif.send_scan_status_notif): - return - # Get domain, engine, scan_history objects engine = EngineType.objects.filter(pk=engine_id).first() scan = ScanHistory.objects.filter(pk=scan_history_id).first() @@ -3061,6 +3100,7 @@ def send_scan_notif( url = get_scan_url(scan_history_id, subscan_id) title = get_scan_title(scan_history_id, subscan_id) fields = get_scan_fields(engine, scan, subscan, status, tasks) + severity = None msg = f'{title} {status}\n' msg += '\n🡆 '.join(f'**{k}:** {v}' for k, v in fields.items()) @@ -3074,12 +3114,68 @@ def send_scan_notif( } logger.warning(f'Sending notification "{title}" [{severity}]') - # Send notification - send_notif( - msg, - scan_history_id, - subscan_id, - **opts) + # inapp notification has to be sent eitherways + generate_inapp_notification(scan, subscan, status, engine, fields) + + notif = Notification.objects.first() + + if notif and notif.send_scan_status_notif: + # Send notification + send_notif( + msg, + scan_history_id, + subscan_id, + **opts) + +def generate_inapp_notification(scan, subscan, status, engine, fields): + scan_type = "Subscan" if subscan else "Scan" + domain = subscan.domain.name if subscan else scan.domain.name + duration_msg = None + redirect_link = None + + if status == 'RUNNING': + title = f"{scan_type} Started" + description = f"{scan_type} has been initiated for {domain}" + icon = "mdi-play-circle-outline" + notif_status = 'info' + elif status == 'SUCCESS': + title = f"{scan_type} Completed" + description = f"{scan_type} was successful for {domain}" + icon = "mdi-check-circle-outline" + notif_status = 'success' + duration_msg = f'Completed in {fields.get("Duration")}' + elif status == 'ABORTED': + title = f"{scan_type} Aborted" + description = f"{scan_type} was aborted for {domain}" + icon = "mdi-alert-circle-outline" + notif_status = 'warning' + duration_msg = f'Aborted in {fields.get("Duration")}' + elif status == 'FAILED': + title = f"{scan_type} Failed" + description = f"{scan_type} has failed for {domain}" + icon = "mdi-close-circle-outline" + notif_status = 'error' + duration_msg = f'Failed in {fields.get("Duration")}' + + description += f"
Engine: {engine.engine_name if engine else 'N/A'}" + slug = scan.domain.project.slug if scan else subscan.history.domain.project.slug + if duration_msg: + description += f"
{duration_msg}" + + if status != 'RUNNING': + redirect_link = f"/scan/{slug}/detail/{scan.id}" if scan else None + + create_inappnotification( + title=title, + description=description, + notification_type='project', + project_slug=slug, + icon=icon, + is_read=False, + status=notif_status, + redirect_link=redirect_link, + open_in_new_tab=False + ) @app.task(name='send_task_notif', bind=False, queue='send_task_notif_queue') @@ -3206,58 +3302,64 @@ def send_hackerone_report(vulnerability_id): """ vulnerability = Vulnerability.objects.get(id=vulnerability_id) severities = {v: k for k,v in NUCLEI_SEVERITY_MAP.items()} - headers = { - 'Content-Type': 'application/json', - 'Accept': 'application/json' + + # can only send vulnerability report if team_handle exists and send_report is True and api_key exists + hackerone = Hackerone.objects.filter(send_report=True).first() + api_key = HackerOneAPIKey.objects.filter(username__isnull=False, key__isnull=False).first() + + if not (vulnerability.target_domain.h1_team_handle and hackerone and api_key): + logger.error('Missing required data: team handle, Hackerone config, or API key.') + return {"status_code": 400, "message": "Missing required data"} + + severity_value = severities[vulnerability.severity] + tpl = hackerone.report_template or "" + + tpl_vars = { + '{vulnerability_name}': vulnerability.name, + '{vulnerable_url}': vulnerability.http_url, + '{vulnerability_severity}': severity_value, + '{vulnerability_description}': vulnerability.description or '', + '{vulnerability_extracted_results}': vulnerability.extracted_results or '', + '{vulnerability_reference}': vulnerability.reference or '', } - # can only send vulnerability report if team_handle exists - if len(vulnerability.target_domain.h1_team_handle) !=0: - hackerone_query = Hackerone.objects.all() - if hackerone_query.exists(): - hackerone = Hackerone.objects.first() - severity_value = severities[vulnerability.severity] - tpl = hackerone.report_template - - # Replace syntax of report template with actual content - tpl = tpl.replace('{vulnerability_name}', vulnerability.name) - tpl = tpl.replace('{vulnerable_url}', vulnerability.http_url) - tpl = tpl.replace('{vulnerability_severity}', severity_value) - tpl = tpl.replace('{vulnerability_description}', vulnerability.description if vulnerability.description else '') - tpl = tpl.replace('{vulnerability_extracted_results}', vulnerability.extracted_results if vulnerability.extracted_results else '') - tpl = tpl.replace('{vulnerability_reference}', vulnerability.reference if vulnerability.reference else '') - - data = { - "data": { - "type": "report", - "attributes": { - "team_handle": vulnerability.target_domain.h1_team_handle, - "title": f'{vulnerability.name} found in {vulnerability.http_url}', - "vulnerability_information": tpl, - "severity_rating": severity_value, - "impact": "More information about the impact and vulnerability can be found here: \n" + vulnerability.reference if vulnerability.reference else "NA", - } - } + # Replace syntax of report template with actual content + for key, value in tpl_vars.items(): + tpl = tpl.replace(key, value) + + data = { + "data": { + "type": "report", + "attributes": { + "team_handle": vulnerability.target_domain.h1_team_handle, + "title": f'{vulnerability.name} found in {vulnerability.http_url}', + "vulnerability_information": tpl, + "severity_rating": severity_value, + "impact": "More information about the impact and vulnerability can be found here: \n" + vulnerability.reference if vulnerability.reference else "NA", } + } + } - r = requests.post( - 'https://api.hackerone.com/v1/hackers/reports', - auth=(hackerone.username, hackerone.api_key), - json=data, - headers=headers - ) - response = r.json() - status_code = r.status_code - if status_code == 201: - vulnerability.hackerone_report_id = response['data']["id"] - vulnerability.open_status = False - vulnerability.save() - return status_code + headers = { + 'Content-Type': 'application/json', + 'Accept': 'application/json' + } - else: - logger.error('No team handle found.') - status_code = 111 - return status_code + r = requests.post( + 'https://api.hackerone.com/v1/hackers/reports', + auth=(api_key.username, api_key.key), + json=data, + headers=headers + ) + response = r.json() + status_code = r.status_code + if status_code == 201: + vulnerability.hackerone_report_id = response['data']["id"] + vulnerability.open_status = False + vulnerability.save() + return {"status_code": r.status_code, "message": "Report sent successfully"} + logger.error(f"Error sending report to HackerOne") + return {"status_code": r.status_code, "message": response} #-------------# @@ -4404,6 +4506,7 @@ def save_subdomain(subdomain_name, ctx={}): scan_id = ctx.get('scan_history_id') subscan_id = ctx.get('subscan_id') out_of_scope_subdomains = ctx.get('out_of_scope_subdomains', []) + subdomain_checker = SubdomainScopeChecker(out_of_scope_subdomains) valid_domain = ( validators.domain(subdomain_name) or validators.ipv4(subdomain_name) or @@ -4413,7 +4516,7 @@ def save_subdomain(subdomain_name, ctx={}): logger.error(f'{subdomain_name} is not an invalid domain. Skipping.') return None, False - if subdomain_name in out_of_scope_subdomains: + if subdomain_checker.is_out_of_scope(subdomain_name): logger.error(f'{subdomain_name} is out-of-scope. Skipping.') return None, False @@ -4539,7 +4642,7 @@ def query_reverse_whois(lookup_keyword): dict: Reverse WHOIS information. """ - return get_associated_domains(lookup_keyword) + return reverse_whois(lookup_keyword) @app.task(name='query_ip_history', bind=False, queue='query_ip_history_queue') diff --git a/web/reNgine/utilities.py b/web/reNgine/utilities.py index c63fef975..9f9eee92f 100644 --- a/web/reNgine/utilities.py +++ b/web/reNgine/utilities.py @@ -1,3 +1,4 @@ +import re import os import validators @@ -113,4 +114,62 @@ def is_valid_url(url, validate_only_http_scheme=True): if validate_only_http_scheme: return url.startswith('http://') or url.startswith('https://') return True - return False \ No newline at end of file + return False + + +class SubdomainScopeChecker: + """ + SubdomainScopeChecker is a utility class to check if a subdomain is in scope or not. + it supports both regex and string matching. + """ + + def __init__(self, patterns): + self.regex_patterns = set() + self.plain_patterns = set() + self.load_patterns(patterns) + + def load_patterns(self, patterns): + """ + Load patterns into the checker. + + Args: + patterns (list): List of patterns to load. + Returns: + None + """ + for pattern in patterns: + # skip empty patterns + if not pattern: + continue + try: + self.regex_patterns.add(re.compile(pattern, re.IGNORECASE)) + except re.error: + self.plain_patterns.add(pattern.lower()) + + def is_out_of_scope(self, subdomain): + """ + Check if a subdomain is out of scope. + + Args: + subdomain (str): The subdomain to check. + Returns: + bool: True if the subdomain is out of scope, False otherwise. + """ + subdomain = subdomain.lower() # though we wont encounter this, but just in case + if subdomain in self.plain_patterns: + return True + return any(pattern.search(subdomain) for pattern in self.regex_patterns) + + + +def sorting_key(subdomain): + # sort subdomains based on their http status code with priority 200 < 300 < 400 < rest + status = subdomain['http_status'] + if 200 <= status <= 299: + return 1 + elif 300 <= status <= 399: + return 2 + elif 400 <= status <= 499: + return 3 + else: + return 4 \ No newline at end of file diff --git a/web/recon_note/migrations/0001_initial.py b/web/recon_note/migrations/0001_initial.py index c9dff4ce9..b4763f571 100644 --- a/web/recon_note/migrations/0001_initial.py +++ b/web/recon_note/migrations/0001_initial.py @@ -1,4 +1,4 @@ -# Generated by Django 3.2.23 on 2024-06-19 02:43 +# Generated by Django 3.2.23 on 2024-09-06 01:47 from django.db import migrations, models import django.db.models.deletion @@ -9,8 +9,8 @@ class Migration(migrations.Migration): initial = True dependencies = [ - ('startScan', '0001_initial'), ('dashboard', '0001_initial'), + ('startScan', '0001_initial'), ] operations = [ diff --git a/web/requirements.txt b/web/requirements.txt index 9b53e163f..6fff50162 100644 --- a/web/requirements.txt +++ b/web/requirements.txt @@ -40,3 +40,5 @@ weasyprint==53.3 wafw00f==2.2.0 xmltodict==0.13.0 django-environ==0.11.2 +plotly==5.23.0 +kaleido \ No newline at end of file diff --git a/web/scanEngine/admin.py b/web/scanEngine/admin.py index b2f2e0c10..edca16621 100644 --- a/web/scanEngine/admin.py +++ b/web/scanEngine/admin.py @@ -9,3 +9,4 @@ admin.site.register(Notification) admin.site.register(VulnerabilityReportSetting) admin.site.register(InstalledExternalTool) +admin.site.register(Hackerone) \ No newline at end of file diff --git a/web/scanEngine/forms.py b/web/scanEngine/forms.py index 4eddf0d92..2317351eb 100644 --- a/web/scanEngine/forms.py +++ b/web/scanEngine/forms.py @@ -176,12 +176,14 @@ class Meta: slack_hook_url = forms.CharField( required=False, - widget=forms.TextInput( + widget=forms.PasswordInput( attrs={ - "class": "form-control", + "class": "form-control h-100", "id": "slack_hook_url", "placeholder": "https://hooks.slack.com/services/T00000000/B00000000/XXXXXXXXXXXXXXXXXXXXXXXX", - })) + }, + render_value=True + )) send_to_lark = forms.BooleanField( required=False, @@ -193,12 +195,14 @@ class Meta: lark_hook_url = forms.CharField( required=False, - widget=forms.TextInput( + widget=forms.PasswordInput( attrs={ - "class": "form-control", + "class": "form-control h-100", "id": "lark_hook_url", "placeholder": "https://open.larksuite.com/open-apis/bot/v2/hook/XXXXXXXXXXXXXXXXXXXXXXXX", - })) + }, + render_value=True + )) send_to_discord = forms.BooleanField( required=False, @@ -210,12 +214,14 @@ class Meta: discord_hook_url = forms.CharField( required=False, - widget=forms.TextInput( + widget=forms.PasswordInput( attrs={ - "class": "form-control", + "class": "form-control h-100", "id": "discord_hook_url", "placeholder": "https://discord.com/api/webhooks/000000000000000000/XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX", - })) + }, + render_value=True + )) send_to_telegram = forms.BooleanField( required=False, @@ -227,21 +233,25 @@ class Meta: telegram_bot_token = forms.CharField( required=False, - widget=forms.TextInput( + widget=forms.PasswordInput( attrs={ - "class": "form-control", + "class": "form-control h-100", "id": "telegram_bot_token", "placeholder": "Bot Token", - })) + }, + render_value=True + )) telegram_bot_chat_id = forms.CharField( required=False, - widget=forms.TextInput( + widget=forms.PasswordInput( attrs={ - "class": "form-control", + "class": "form-control h-100", "id": "telegram_bot_chat_id", "placeholder": "Bot Chat ID", - })) + }, + render_value=True + )) send_scan_status_notif = forms.BooleanField( required=False, @@ -388,22 +398,12 @@ class Meta: model = Hackerone fields = '__all__' - username = forms.CharField( - required=True, - widget=forms.TextInput( - attrs={ - "class": "form-control form-control-lg", - "id": "username", - "placeholder": "Your Hackerone Username", - })) - - api_key = forms.CharField( - required=True, - widget=forms.TextInput( + send_report = forms.BooleanField( + required=False, + widget=forms.CheckboxInput( attrs={ - "class": "form-control form-control-lg", - "id": "api_key", - "placeholder": "Hackerone API Token", + "class": "form-check-input", + "id": "send_report", })) send_critical = forms.BooleanField( @@ -441,6 +441,7 @@ def set_value(self, key): self.initial['username'] = key.username self.initial['api_key'] = key.api_key + self.initial['send_report'] = key.send_report self.initial['send_critical'] = key.send_critical self.initial['send_high'] = key.send_high self.initial['send_medium'] = key.send_medium @@ -448,6 +449,7 @@ def set_value(self, key): self.initial['report_template'] = key.report_template def set_initial(self): + self.initial['send_report'] = False self.initial['send_critical'] = True self.initial['send_high'] = True self.initial['send_medium'] = False diff --git a/web/scanEngine/migrations/0001_initial.py b/web/scanEngine/migrations/0001_initial.py index aa0dcfc18..2d58745f2 100644 --- a/web/scanEngine/migrations/0001_initial.py +++ b/web/scanEngine/migrations/0001_initial.py @@ -1,4 +1,4 @@ -# Generated by Django 3.2.23 on 2024-06-19 02:43 +# Generated by Django 3.2.23 on 2024-09-06 01:47 from django.db import migrations, models @@ -35,6 +35,7 @@ class Migration(migrations.Migration): ('id', models.AutoField(primary_key=True, serialize=False)), ('username', models.CharField(blank=True, max_length=100, null=True)), ('api_key', models.CharField(blank=True, max_length=200, null=True)), + ('send_report', models.BooleanField(blank=True, default=False, null=True)), ('send_critical', models.BooleanField(default=True)), ('send_high', models.BooleanField(default=True)), ('send_medium', models.BooleanField(default=False)), diff --git a/web/scanEngine/models.py b/web/scanEngine/models.py index 3e5c6d07f..f0beef502 100644 --- a/web/scanEngine/models.py +++ b/web/scanEngine/models.py @@ -97,8 +97,10 @@ class Proxy(models.Model): class Hackerone(models.Model): id = models.AutoField(primary_key=True) - username = models.CharField(max_length=100, null=True, blank=True) - api_key = models.CharField(max_length=200, null=True, blank=True) + # TODO: username and api_key fields will be deprecated in another major release, Instead HackerOneAPIKey model from dasbhboard/models.py will be used + username = models.CharField(max_length=100, null=True, blank=True) # unused + api_key = models.CharField(max_length=200, null=True, blank=True) # unused + send_report = models.BooleanField(default=False, null=True, blank=True) send_critical = models.BooleanField(default=True) send_high = models.BooleanField(default=True) send_medium = models.BooleanField(default=False) diff --git a/web/scanEngine/templates/scanEngine/settings/api.html b/web/scanEngine/templates/scanEngine/settings/api.html index 150e912a0..c1d6520b6 100644 --- a/web/scanEngine/templates/scanEngine/settings/api.html +++ b/web/scanEngine/templates/scanEngine/settings/api.html @@ -29,27 +29,79 @@

OpenAI keys will be used to generate vulnerability description, remediation, impact and vulnerability report writing using GPT.

+
{% if openai_key %} - + {% else %} {% endif %} - This is optional but recommended. +
+ +
+
+ This is optional but recommended. Get your API key from https://platform.openai.com/api-keys

Netlas keys will be used to get whois information and other OSINT related data.

+
{% if netlas_key %} - + {% else %} {% endif %} - This is optional +
+ +
+
+ This is optional. Get your API key from https://netlas.io +
+
+ +

Chaos keys will be used for subdomain enumeration and recon data for Public Bug Bounty Programs.

+
+ {% if chaos_key %} + + {% else %} + + {% endif %} +
+ +
+
+ This is optional but recommended. Get your API key from https://cloud.projectdiscovery.io +
+ {% if user_preferences.bug_bounty_mode %} +
+ +
+

Hackerone Keys will be used to import targets, bookmarked programs, and submit automated vulnerability report to Hackerone. This is a bug bounty specific feature.

+
+ + {% if hackerone_username %} {% else %} {% endif %} +
+
+ +
+ {% if hackerone_key %} {% else %} {% endif %} +
+ +
+
+
+
+

This is optional but recommended for bug hunters. Get your API key from Hackerone Documentation

+
+ +
+
+ {% endif %}
- +
-
@@ -60,4 +112,4 @@ {% block page_level_script %} -{% endblock page_level_script %} +{% endblock page_level_script %} \ No newline at end of file diff --git a/web/scanEngine/templates/scanEngine/settings/hackerone.html b/web/scanEngine/templates/scanEngine/settings/hackerone.html index 1cbd18142..5a9de97ad 100644 --- a/web/scanEngine/templates/scanEngine/settings/hackerone.html +++ b/web/scanEngine/templates/scanEngine/settings/hackerone.html @@ -12,11 +12,11 @@ {% block breadcrumb_title %} - + {% endblock breadcrumb_title %} {% block page_title %} -HackerOne Settings +Hackerone Automatic Vulnerability Report Settings {% endblock page_title %} {% block main_content %} @@ -24,8 +24,6 @@
-

Hackerone Automatic Vulnerability Report Settings

-