diff --git a/.github/workflows/auto-release.yml b/.github/workflows/auto-release.yml new file mode 100644 index 000000000..7c068aa2c --- /dev/null +++ b/.github/workflows/auto-release.yml @@ -0,0 +1,60 @@ +name: Update Version and Changelog and Readme + +on: + release: + types: [published] + +jobs: + update-version-and-changelog: + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - name: Checkout code + uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: Get latest release info + id: get_release + uses: actions/github-script@v6 + with: + script: | + const release = await github.rest.repos.getLatestRelease({ + owner: context.repo.owner, + repo: context.repo.repo, + }); + core.setOutput('tag_name', release.data.tag_name); + core.setOutput('body', release.data.body); + + - name: Update version file + run: echo ${{ steps.get_release.outputs.tag_name }} > web/.version + + - name: Update CHANGELOG.md + run: | + echo "# Changelog" > CHANGELOG.md.new + echo "" >> CHANGELOG.md.new + echo "## ${{ steps.get_release.outputs.tag_name }}" >> CHANGELOG.md.new + echo "" >> CHANGELOG.md.new + echo "${{ steps.get_release.outputs.body }}" >> CHANGELOG.md.new + echo "" >> CHANGELOG.md.new + if [ -f CHANGELOG.md ]; then + sed '1,2d' CHANGELOG.md >> CHANGELOG.md.new + fi + mv CHANGELOG.md.new CHANGELOG.md + + - name: Update README.md + run: | + sed -i 's|https://img.shields.io/badge/version-.*-informational|https://img.shields.io/badge/version-${{ steps.get_release.outputs.tag_name }}-informational|g' README.md + + - name: Commit and push changes + run: | + git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com" + git config --local user.name "github-actions[bot]" + git add web/.version CHANGELOG.md README.md + if git diff --staged --quiet; then + echo "No changes to commit" + else + git commit -m "reNgine release: ${{ steps.get_release.outputs.tag_name }} :rocket:" + git push origin HEAD:${{ github.event.repository.default_branch }} + fi diff --git a/README.md b/README.md index e09810b25..fb285f178 100644 --- a/README.md +++ b/README.md @@ -30,9 +30,12 @@
++ reNgine 2.2.0 comes with bounty hub where you can sync and import your hackerone programs, in app notifications, chaos as subdomain enumeration tool, ability to upload multiple nuclei and gf patterns, support for regex in out of scope subdomain config, additional pdf report template and many more. + Check out What's new in reNgine 2.2.0! +
-Unleash the power of LLM toolkit! Now you can use local LLM models to generate attack surface and vulnerability reports!, Checkout the release-notes!
Note: Parts of this README were written or refined using AI language models.
+## License + +Distributed under the GNU GPL v3 License. See [LICENSE](LICENSE) for more information. + +![-----------------------------------------------------](https://raw.githubusercontent.com/andreasbm/readme/master/assets/lines/aqua.png) + +Note: Parts of this README were written or refined using AI language models.
\ No newline at end of file diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index 9481d7f46..3359cfb8f 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -94,9 +94,6 @@ services: - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} - POSTGRES_PORT=${POSTGRES_PORT} - POSTGRES_HOST=${POSTGRES_HOST} - # THIS IS A MUST FOR CHECKING UPDATE, EVERYTIME A COMMIT IS MERGED INTO - # MASTER, UPDATE THIS!!! MAJOR.MINOR.PATCH https://semver.org/ - - RENGINE_CURRENT_VERSION='2.1.3' volumes: - ./web:/usr/src/app - github_repos:/usr/src/github diff --git a/docker-compose.yml b/docker-compose.yml index 2daccd382..6f40c5bd6 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -96,9 +96,6 @@ services: - POSTGRES_PORT=${POSTGRES_PORT} - POSTGRES_HOST=${POSTGRES_HOST} - DJANGO_SUPERUSER_PASSWORD=${DJANGO_SUPERUSER_PASSWORD} - # THIS IS A MUST FOR CHECKING UPDATE, EVERYTIME A COMMIT IS MERGED INTO - # MASTER, UPDATE THIS!!! MAJOR.MINOR.PATCH https://semver.org/ - - RENGINE_CURRENT_VERSION='2.1.3' volumes: - ./web:/usr/src/app - github_repos:/usr/src/github diff --git a/scripts/uninstall.sh b/scripts/uninstall.sh index cc177285d..761738b5c 100755 --- a/scripts/uninstall.sh +++ b/scripts/uninstall.sh @@ -30,8 +30,8 @@ read -p "$(echo -e ${WARNING}"Are you sure you want to proceed? (y/Y/yes/YES to # change answer to lowecase for comparison ANSWER_LC=$(echo "$CONFIRM" | tr '[:upper:]' '[:lower:]') -if [[ "$ANSWER_LC" != "y" && "$ANSWER_LC" != "yes" ]]; then - print_status "${YELLOW}Uninstall aborted by user.${RESET}" +if [ -z "$CONFIRM" ] || { [ "$CONFIRM" != "y" ] && [ "$CONFIRM" != "Y" ] && [ "$CONFIRM" != "yes" ] && [ "$CONFIRM" != "Yes" ] && [ "$CONFIRM" != "YES" ]; }; then + print_status "${WARNING}Uninstall aborted by user.${RESET}" exit 0 fi diff --git a/web/.version b/web/.version new file mode 100644 index 000000000..cf2dc0bc4 --- /dev/null +++ b/web/.version @@ -0,0 +1 @@ +v2.2.0 \ No newline at end of file diff --git a/web/Dockerfile b/web/Dockerfile index 33dd9ee86..66d709e75 100644 --- a/web/Dockerfile +++ b/web/Dockerfile @@ -85,6 +85,7 @@ RUN printf "\ github.com/tomnomnom/waybackurls@latest\n\ github.com/projectdiscovery/httpx/cmd/httpx@latest\n\ github.com/projectdiscovery/subfinder/v2/cmd/subfinder@latest\n\ + github.com/projectdiscovery/chaos-client/cmd/chaos@latest\n\ github.com/projectdiscovery/nuclei/v3/cmd/nuclei@latest\n\ github.com/projectdiscovery/naabu/v2/cmd/naabu@latest\n\ github.com/hakluke/hakrawler@latest\n\ @@ -103,6 +104,9 @@ RUN printf "\ # Update Nuclei and Nuclei-Templates RUN nuclei -update-templates +# update chaos +RUN chaos -update + # Copy requirements COPY ./requirements.txt /tmp/requirements.txt RUN pip3 install --upgrade setuptools==72.1.0 diff --git a/web/api/serializers.py b/web/api/serializers.py index 1fd0b7e91..a01c9b909 100644 --- a/web/api/serializers.py +++ b/web/api/serializers.py @@ -1,6 +1,5 @@ from dashboard.models import * -from django.contrib.humanize.templatetags.humanize import (naturalday, - naturaltime) +from django.contrib.humanize.templatetags.humanize import (naturalday, naturaltime) from django.db.models import F, JSONField, Value from recon_note.models import * from reNgine.common_func import * @@ -8,6 +7,60 @@ from scanEngine.models import * from startScan.models import * from targetApp.models import * +from dashboard.models import InAppNotification + + +class HackerOneProgramAttributesSerializer(serializers.Serializer): + """ + Serializer for HackerOne Program + IMP: THIS is not a model serializer, programs will not be stored in db + due to ever changing nature of programs, rather cache will be used on these serializers + """ + handle = serializers.CharField(required=False) + name = serializers.CharField(required=False) + currency = serializers.CharField(required=False) + submission_state = serializers.CharField(required=False) + triage_active = serializers.BooleanField(allow_null=True, required=False) + state = serializers.CharField(required=False) + started_accepting_at = serializers.DateTimeField(required=False) + bookmarked = serializers.BooleanField(required=False) + allows_bounty_splitting = serializers.BooleanField(required=False) + offers_bounties = serializers.BooleanField(required=False) + open_scope = serializers.BooleanField(allow_null=True, required=False) + fast_payments = serializers.BooleanField(allow_null=True, required=False) + gold_standard_safe_harbor = serializers.BooleanField(allow_null=True, required=False) + + def to_representation(self, instance): + return {key: value for key, value in instance.items()} + + +class HackerOneProgramSerializer(serializers.Serializer): + id = serializers.CharField() + type = serializers.CharField() + attributes = HackerOneProgramAttributesSerializer() + + + +class InAppNotificationSerializer(serializers.ModelSerializer): + class Meta: + model = InAppNotification + fields = [ + 'id', + 'title', + 'description', + 'icon', + 'is_read', + 'created_at', + 'notification_type', + 'status', + 'redirect_link', + 'open_in_new_tab', + 'project' + ] + read_only_fields = ['id', 'created_at'] + + def get_project_name(self, obj): + return obj.project.name if obj.project else None class SearchHistorySerializer(serializers.ModelSerializer): diff --git a/web/api/shared_api_tasks.py b/web/api/shared_api_tasks.py new file mode 100644 index 000000000..d21ca23fa --- /dev/null +++ b/web/api/shared_api_tasks.py @@ -0,0 +1,209 @@ +# include all the celery tasks to be used in the API, do not put in tasks.py +import requests + +from reNgine.common_func import create_inappnotification, get_hackerone_key_username +from reNgine.definitions import PROJECT_LEVEL_NOTIFICATION, HACKERONE_ALLOWED_ASSET_TYPES +from reNgine.celery import app +from reNgine.database_utils import bulk_import_targets + +@app.task(name='import_hackerone_programs_task', bind=False, queue='api_queue') +def import_hackerone_programs_task(handles, project_slug, is_sync = False): + """ + Runs in the background to import programs from HackerOne + + Args: + handles (list): List of handles to import + project_slug (str): Slug of the project + is_sync (bool): If the import is a sync operation + Returns: + None + rather creates inapp notifications + """ + def fetch_program_details_from_hackerone(program_handle): + url = f'https://api.hackerone.com/v1/hackers/programs/{program_handle}' + headers = {'Accept': 'application/json'} + creds = get_hackerone_key_username() + + if not creds: + raise Exception("HackerOne API credentials not configured") + + username, api_key = creds + + response = requests.get( + url, + headers=headers, + auth=(username, api_key) + ) + + if response.status_code == 401: + raise Exception("HackerOne API credentials are invalid") + + if response.status_code == 200: + return response.json() + else: + return None + + for handle in handles: + program_details = fetch_program_details_from_hackerone(handle) + if program_details: + # Thanks, some parts of this logics were originally written by @null-ref-0000 + # via PR https://github.com/yogeshojha/rengine/pull/1410 + try: + program_name = program_details['attributes']['name'] + + assets = [] + scopes = program_details['relationships']['structured_scopes']['data'] + for scope in scopes: + asset_type = scope['attributes']['asset_type'] + asset_identifier = scope['attributes']['asset_identifier'] + eligible_for_submission = scope['attributes']['eligible_for_submission'] + + # for now we should ignore the scope that are not eligible for submission + # in future release we will add this in target out_of_scope + + # we need to filter the scope that are supported by reNgine now + if asset_type in HACKERONE_ALLOWED_ASSET_TYPES and eligible_for_submission: + assets.append(asset_identifier) + + # in some cases asset_type is OTHER and may contain the asset + elif asset_type == 'OTHER' and ('.' in asset_identifier or asset_identifier.startswith('http')): + assets.append(asset_identifier) + + # cleanup assets + assets = list(set(assets)) + + # convert assets to list of dict with name and description + assets = [{'name': asset, 'description': None} for asset in assets] + new_targets_added = bulk_import_targets( + targets=assets, + project_slug=project_slug, + organization_name=program_name, + org_description='Imported from Hackerone', + h1_team_handle=handle + ) + + if new_targets_added: + create_inappnotification( + title=f"HackerOne Program Imported: {handle}", + description=f"The program '{program_name}' from hackerone has been successfully imported.", + notification_type=PROJECT_LEVEL_NOTIFICATION, + project_slug=project_slug, + icon="mdi-check-circle", + status='success' + ) + + except Exception as e: + create_inappnotification( + title=f"HackerOne Program Import Failed: {handle}", + description=f"Failed to import program from hackerone with handle '{handle}'. {str(e)}", + notification_type=PROJECT_LEVEL_NOTIFICATION, + project_slug=project_slug, + icon="mdi-alert-circle", + status='error' + ) + else: + create_inappnotification( + title=f"HackerOne Program Import Failed: {handle}", + description=f"Failed to import program from hackerone with handle '{handle}'. Program details could not be fetched.", + notification_type=PROJECT_LEVEL_NOTIFICATION, + project_slug=project_slug, + icon="mdi-alert-circle", + status='error' + ) + + if is_sync: + title = "HackerOne Program Sync Completed" + description = f"Sync process for {len(handles)} program(s) has completed." + else: + title = "HackerOne Program Import Completed" + description = f"Import process for {len(handles)} program(s) has completed." + + create_inappnotification( + title=title, + description=description, + notification_type=PROJECT_LEVEL_NOTIFICATION, + project_slug=project_slug, + icon="mdi-check-all", + status='success' + ) + + +@app.task(name='sync_bookmarked_programs_task', bind=False, queue='api_queue') +def sync_bookmarked_programs_task(project_slug): + """ + Runs in the background to sync bookmarked programs from HackerOne + + Args: + project_slug (str): Slug of the project + Returns: + None + Creates in-app notifications for progress and results + """ + + def fetch_bookmarked_programs(): + url = f'https://api.hackerone.com/v1/hackers/programs?&page[size]=100' + headers = {'Accept': 'application/json'} + bookmarked_programs = [] + + credentials = get_hackerone_key_username() + if not credentials: + raise Exception("HackerOne API credentials not configured") + + username, api_key = credentials + + while url: + response = requests.get( + url, + headers=headers, + auth=(username, api_key) + ) + + if response.status_code == 401: + raise Exception("HackerOne API credentials are invalid") + elif response.status_code != 200: + raise Exception(f"HackerOne API request failed with status code {response.status_code}") + + data = response.json() + programs = data['data'] + bookmarked = [p for p in programs if p['attributes']['bookmarked']] + bookmarked_programs.extend(bookmarked) + + url = data['links'].get('next') + + return bookmarked_programs + + try: + bookmarked_programs = fetch_bookmarked_programs() + handles = [program['attributes']['handle'] for program in bookmarked_programs] + + if not handles: + create_inappnotification( + title="HackerOne Bookmarked Programs Sync Completed", + description="No bookmarked programs found.", + notification_type=PROJECT_LEVEL_NOTIFICATION, + project_slug=project_slug, + icon="mdi-information", + status='info' + ) + return + + import_hackerone_programs_task.delay(handles, project_slug, is_sync=True) + + create_inappnotification( + title="HackerOne Bookmarked Programs Sync Progress", + description=f"Found {len(handles)} bookmarked program(s). Starting import process.", + notification_type=PROJECT_LEVEL_NOTIFICATION, + project_slug=project_slug, + icon="mdi-progress-check", + status='info' + ) + + except Exception as e: + create_inappnotification( + title="HackerOne Bookmarked Programs Sync Failed", + description=f"Failed to sync bookmarked programs: {str(e)}", + notification_type=PROJECT_LEVEL_NOTIFICATION, + project_slug=project_slug, + icon="mdi-alert-circle", + status='error' + ) diff --git a/web/api/urls.py b/web/api/urls.py index cfdd8f265..7c1c12802 100644 --- a/web/api/urls.py +++ b/web/api/urls.py @@ -19,6 +19,8 @@ router.register(r'listIps', IpAddressViewSet) router.register(r'listActivityLogs', ListActivityLogsViewSet) router.register(r'listScanLogs', ListScanLogsViewSet) +router.register(r'notifications', InAppNotificationManagerViewSet, basename='notification') +router.register(r'hackerone-programs', HackerOneProgramViewSet, basename='hackerone_program') urlpatterns = [ url('^', include(router.urls)), @@ -239,6 +241,11 @@ 'action/create/project', CreateProjectApi.as_view(), name='create_project'), + path( + 'toggle-bug-bounty-mode/', + ToggleBugBountyModeView.as_view(), + name='toggle_bug_bounty_mode' + ), ] urlpatterns += router.urls diff --git a/web/api/views.py b/web/api/views.py index f5df4bbbf..fcea8abd9 100644 --- a/web/api/views.py +++ b/web/api/views.py @@ -1,24 +1,30 @@ -import logging import re import socket -from ipaddress import IPv4Network - +import logging import requests import validators -from dashboard.models import * +import requests + +from ipaddress import IPv4Network from django.db.models import CharField, Count, F, Q, Value -from django.shortcuts import get_object_or_404 from django.utils import timezone from packaging import version from django.template.defaultfilters import slugify -from rest_framework import viewsets +from datetime import datetime +from rest_framework import viewsets, status from rest_framework.response import Response from rest_framework.views import APIView -from rest_framework.status import HTTP_400_BAD_REQUEST +from rest_framework.status import HTTP_400_BAD_REQUEST, HTTP_204_NO_CONTENT, HTTP_202_ACCEPTED +from rest_framework.decorators import action +from django.core.exceptions import ObjectDoesNotExist +from django.core.cache import cache + +from dashboard.models import * from recon_note.models import * from reNgine.celery import app from reNgine.common_func import * +from reNgine.database_utils import * from reNgine.definitions import ABORTED_TASK from reNgine.tasks import * from reNgine.llm import * @@ -27,12 +33,305 @@ from startScan.models import * from startScan.models import EndPoint from targetApp.models import * - +from api.shared_api_tasks import import_hackerone_programs_task, sync_bookmarked_programs_task from .serializers import * + logger = logging.getLogger(__name__) +class ToggleBugBountyModeView(APIView): + """ + This class manages the user bug bounty mode + """ + def post(self, request, *args, **kwargs): + user_preferences = get_object_or_404(UserPreferences, user=request.user) + user_preferences.bug_bounty_mode = not user_preferences.bug_bounty_mode + user_preferences.save() + return Response({ + 'bug_bounty_mode': user_preferences.bug_bounty_mode + }, status=status.HTTP_200_OK) + + +class HackerOneProgramViewSet(viewsets.ViewSet): + """ + This class manages the HackerOne Program model, + provides basic fetching of programs and caching + """ + CACHE_KEY = 'hackerone_programs' + CACHE_TIMEOUT = 60 * 30 # 30 minutes + PROGRAM_CACHE_KEY = 'hackerone_program_{}' + + API_BASE = 'https://api.hackerone.com/v1/hackers' + + ALLOWED_ASSET_TYPES = ["WILDCARD", "DOMAIN", "IP_ADDRESS", "CIDR", "URL"] + + def list(self, request): + try: + sort_by = request.query_params.get('sort_by', 'age') + sort_order = request.query_params.get('sort_order', 'desc') + + programs = self.get_cached_programs() + + if sort_by == 'name': + programs = sorted(programs, key=lambda x: x['attributes']['name'].lower(), + reverse=(sort_order.lower() == 'desc')) + elif sort_by == 'reports': + programs = sorted(programs, key=lambda x: x['attributes'].get('number_of_reports_for_user', 0), + reverse=(sort_order.lower() == 'desc')) + elif sort_by == 'age': + programs = sorted(programs, + key=lambda x: datetime.strptime(x['attributes'].get('started_accepting_at', '1970-01-01T00:00:00.000Z'), '%Y-%m-%dT%H:%M:%S.%fZ'), + reverse=(sort_order.lower() == 'desc') + ) + + serializer = HackerOneProgramSerializer(programs, many=True) + return Response(serializer.data) + except Exception as e: + return self.handle_exception(e) + + def get_api_credentials(self): + try: + api_key = HackerOneAPIKey.objects.first() + if not api_key: + raise ObjectDoesNotExist("HackerOne API credentials not found") + return api_key.username, api_key.key + except ObjectDoesNotExist: + raise Exception("HackerOne API credentials not configured") + + @action(detail=False, methods=['get']) + def bookmarked_programs(self, request): + try: + # do not cache bookmarked programs due to the user specific nature + programs = self.fetch_programs_from_hackerone() + bookmarked = [p for p in programs if p['attributes']['bookmarked']] + serializer = HackerOneProgramSerializer(bookmarked, many=True) + return Response(serializer.data) + except Exception as e: + return self.handle_exception(e) + + @action(detail=False, methods=['get']) + def bounty_programs(self, request): + try: + programs = self.get_cached_programs() + bounty_programs = [p for p in programs if p['attributes']['offers_bounties']] + serializer = HackerOneProgramSerializer(bounty_programs, many=True) + return Response(serializer.data) + except Exception as e: + return self.handle_exception(e) + + def get_cached_programs(self): + programs = cache.get(self.CACHE_KEY) + if programs is None: + programs = self.fetch_programs_from_hackerone() + cache.set(self.CACHE_KEY, programs, self.CACHE_TIMEOUT) + return programs + + def fetch_programs_from_hackerone(self): + url = f'{self.API_BASE}/programs?page[size]=100' + headers = {'Accept': 'application/json'} + all_programs = [] + try: + username, api_key = self.get_api_credentials() + except Exception as e: + raise Exception("API credentials error: " + str(e)) + + while url: + response = requests.get( + url, + headers=headers, + auth=(username, api_key) + ) + + if response.status_code == 401: + raise Exception("Invalid API credentials") + elif response.status_code != 200: + raise Exception(f"HackerOne API request failed with status code {response.status_code}") + + data = response.json() + all_programs.extend(data['data']) + + url = data['links'].get('next') + + return all_programs + + @action(detail=False, methods=['post']) + def refresh_cache(self, request): + try: + programs = self.fetch_programs_from_hackerone() + cache.set(self.CACHE_KEY, programs, self.CACHE_TIMEOUT) + return Response({"status": "Cache refreshed successfully"}) + except Exception as e: + return self.handle_exception(e) + + @action(detail=True, methods=['get']) + def program_details(self, request, pk=None): + try: + program_handle = pk + cache_key = self.PROGRAM_CACHE_KEY.format(program_handle) + program_details = cache.get(cache_key) + + if program_details is None: + program_details = self.fetch_program_details_from_hackerone(program_handle) + if program_details: + cache.set(cache_key, program_details, self.CACHE_TIMEOUT) + + if program_details: + filtered_scopes = [ + scope for scope in program_details.get('relationships', {}).get('structured_scopes', {}).get('data', []) + if scope.get('attributes', {}).get('asset_type') in self.ALLOWED_ASSET_TYPES + ] + + program_details['relationships']['structured_scopes']['data'] = filtered_scopes + + return Response(program_details) + else: + return Response({"error": "Program not found"}, status=status.HTTP_404_NOT_FOUND) + except Exception as e: + return self.handle_exception(e) + + def fetch_program_details_from_hackerone(self, program_handle): + url = f'{self.API_BASE}/programs/{program_handle}' + headers = {'Accept': 'application/json'} + try: + username, api_key = self.get_api_credentials() + except Exception as e: + raise Exception("API credentials error: " + str(e)) + + response = requests.get( + url, + headers=headers, + auth=(username, api_key) + ) + + if response.status_code == 401: + raise Exception("Invalid API credentials") + elif response.status_code == 200: + return response.json() + else: + return None + + @action(detail=False, methods=['post']) + def import_programs(self, request): + try: + project_slug = request.query_params.get('project_slug') + if not project_slug: + return Response({"error": "Project slug is required"}, status=status.HTTP_400_BAD_REQUEST) + handles = request.data.get('handles', []) + + if not handles: + return Response({"error": "No program handles provided"}, status=status.HTTP_400_BAD_REQUEST) + + import_hackerone_programs_task.delay(handles, project_slug) + + create_inappnotification( + title="HackerOne Program Import Started", + description=f"Import process for {len(handles)} program(s) has begun.", + notification_type=PROJECT_LEVEL_NOTIFICATION, + project_slug=project_slug, + icon="mdi-download", + status='info' + ) + + return Response({"message": f"Import process for {len(handles)} program(s) has begun."}, status=status.HTTP_202_ACCEPTED) + except Exception as e: + return self.handle_exception(e) + + @action(detail=False, methods=['get']) + def sync_bookmarked(self, request): + try: + project_slug = request.query_params.get('project_slug') + if not project_slug: + return Response({"error": "Project slug is required"}, status=status.HTTP_400_BAD_REQUEST) + + sync_bookmarked_programs_task.delay(project_slug) + + create_inappnotification( + title="HackerOne Bookmarked Programs Sync Started", + description="Sync process for bookmarked programs has begun.", + notification_type=PROJECT_LEVEL_NOTIFICATION, + project_slug=project_slug, + icon="mdi-sync", + status='info' + ) + + return Response({"message": "Sync process for bookmarked programs has begun."}, status=status.HTTP_202_ACCEPTED) + except Exception as e: + return self.handle_exception(e) + + def handle_exception(self, exc): + if isinstance(exc, ObjectDoesNotExist): + return Response({"error": "HackerOne API credentials not configured"}, status=status.HTTP_503_SERVICE_UNAVAILABLE) + elif str(exc) == "Invalid API credentials": + return Response({"error": "Invalid HackerOne API credentials"}, status=status.HTTP_401_UNAUTHORIZED) + else: + return Response({"error": str(exc)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) + +class InAppNotificationManagerViewSet(viewsets.ModelViewSet): + """ + This class manages the notification model, provided CRUD operation on notif model + such as read notif, clear all, fetch all notifications etc + """ + serializer_class = InAppNotificationSerializer + pagination_class = None + + def get_queryset(self): + # we will see later if user based notif is needed + # return InAppNotification.objects.filter(user=self.request.user) + project_slug = self.request.query_params.get('project_slug') + queryset = InAppNotification.objects.all() + if project_slug: + queryset = queryset.filter( + Q(project__slug=project_slug) | Q(notification_type='system') + ) + return queryset.order_by('-created_at') + + @action(detail=False, methods=['post']) + def mark_all_read(self, request): + # marks all notification read + project_slug = self.request.query_params.get('project_slug') + queryset = self.get_queryset() + + if project_slug: + queryset = queryset.filter( + Q(project__slug=project_slug) | Q(notification_type='system') + ) + queryset.update(is_read=True) + return Response(status=HTTP_204_NO_CONTENT) + + @action(detail=True, methods=['post']) + def mark_read(self, request, pk=None): + # mark individual notification read when cliked + notification = self.get_object() + notification.is_read = True + notification.save() + return Response(status=HTTP_204_NO_CONTENT) + + @action(detail=False, methods=['get']) + def unread_count(self, request): + # this fetches the count for unread notif mainly for the badge + project_slug = self.request.query_params.get('project_slug') + queryset = self.get_queryset() + if project_slug: + queryset = queryset.filter( + Q(project__slug=project_slug) | Q(notification_type='system') + ) + count = queryset.filter(is_read=False).count() + return Response({'count': count}) + + @action(detail=False, methods=['post']) + def clear_all(self, request): + # when clicked on the clear button this must be called to clear all notif + project_slug = self.request.query_params.get('project_slug') + queryset = self.get_queryset() + if project_slug: + queryset = queryset.filter( + Q(project__slug=project_slug) | Q(notification_type='system') + ) + queryset.delete() + return Response(status=HTTP_204_NO_CONTENT) + + class OllamaManager(APIView): def get(self, request): """ @@ -622,6 +921,11 @@ def post(self, request): h1_team_handle = data.get('h1_team_handle') description = data.get('description') domain_name = data.get('domain_name') + # remove wild card from domain + domain_name = domain_name.replace('*', '') + # if domain_name begins with . remove that + if domain_name.startswith('.'): + domain_name = domain_name[1:] organization_name = data.get('organization') slug = data.get('slug') @@ -629,35 +933,26 @@ def post(self, request): if not validators.domain(domain_name): return Response({'status': False, 'message': 'Invalid domain or IP'}) - project = Project.objects.get(slug=slug) - - # Create domain object in DB - domain, _ = Domain.objects.get_or_create(name=domain_name) - domain.project = project - domain.h1_team_handle = h1_team_handle - domain.description = description - if not domain.insert_date: - domain.insert_date = timezone.now() - domain.save() - - # Create org object in DB - if organization_name: - organization_obj = None - organization_query = Organization.objects.filter(name=organization_name) - if organization_query.exists(): - organization_obj = organization_query[0] - else: - organization_obj = Organization.objects.create( - name=organization_name, - project=project, - insert_date=timezone.now()) - organization_obj.domains.add(domain) + status = bulk_import_targets( + targets=[{ + 'name': domain_name, + 'description': description, + }], + organization_name=organization_name, + h1_team_handle=h1_team_handle, + project_slug=slug + ) + if status: + return Response({ + 'status': True, + 'message': 'Domain successfully added as target !', + 'domain_name': domain_name, + # 'domain_id': domain.id + }) return Response({ - 'status': True, - 'message': 'Domain successfully added as target !', - 'domain_name': domain_name, - 'domain_id': domain.id + 'status': False, + 'message': 'Failed to add as target !' }) @@ -763,6 +1058,9 @@ def post(self, request): if data['type'] == 'subscan': for row in data['rows']: SubScan.objects.get(id=row).delete() + elif data['type'] == 'organization': + for row in data['rows']: + Organization.objects.get(id=row).delete() response = True except Exception as e: response = False @@ -774,63 +1072,95 @@ class StopScan(APIView): def post(self, request): req = self.request data = req.data - scan_id = data.get('scan_id') - subscan_id = data.get('subscan_id') - response = {} - task_ids = [] - scan = None - subscan = None - if subscan_id: - try: - subscan = get_object_or_404(SubScan, id=subscan_id) - scan = subscan.scan_history - task_ids = subscan.celery_ids - subscan.status = ABORTED_TASK - subscan.stop_scan_date = timezone.now() - subscan.save() - create_scan_activity( - subscan.scan_history.id, - f'Subscan {subscan_id} aborted', - SUCCESS_TASK) - response['status'] = True - except Exception as e: - logging.error(e) - response = {'status': False, 'message': str(e)} - elif scan_id: + scan_ids = data.get('scan_ids', []) + subscan_ids = data.get('subscan_ids', []) + + scan_ids = [int(id) for id in scan_ids] + subscan_ids = [int(id) for id in subscan_ids] + + response = {'status': False} + + def abort_scan(scan): + response = {} + logger.info(f'Aborting scan History') try: - scan = get_object_or_404(ScanHistory, id=scan_id) + logger.info(f"Setting scan {scan} status to ABORTED_TASK") task_ids = scan.celery_ids scan.scan_status = ABORTED_TASK scan.stop_scan_date = timezone.now() scan.aborted_by = request.user scan.save() + for task_id in task_ids: + app.control.revoke(task_id, terminate=True, signal='SIGKILL') + + tasks = ( + ScanActivity.objects + .filter(scan_of=scan) + .filter(status=RUNNING_TASK) + .order_by('-pk') + ) + for task in tasks: + task.status = ABORTED_TASK + task.time = timezone.now() + task.save() + create_scan_activity( scan.id, "Scan aborted", - SUCCESS_TASK) + ABORTED_TASK + ) response['status'] = True except Exception as e: - logging.error(e) + logger.error(e) response = {'status': False, 'message': str(e)} - logger.warning(f'Revoking tasks {task_ids}') - for task_id in task_ids: - app.control.revoke(task_id, terminate=True, signal='SIGKILL') + return response - # Abort running tasks - tasks = ( - ScanActivity.objects - .filter(scan_of=scan) - .filter(status=RUNNING_TASK) - .order_by('-pk') - ) - if tasks.exists(): - for task in tasks: - if subscan_id and task.id not in subscan.celery_ids: + def abort_subscan(subscan): + response = {} + logger.info(f'Aborting subscan') + try: + logger.info(f"Setting scan {subscan} status to ABORTED_TASK") + task_ids = subscan.celery_ids + + for task_id in task_ids: + app.control.revoke(task_id, terminate=True, signal='SIGKILL') + + subscan.status = ABORTED_TASK + subscan.stop_scan_date = timezone.now() + subscan.save() + create_scan_activity( + subscan.scan_history.id, + f'Subscan aborted', + ABORTED_TASK + ) + response['status'] = True + except Exception as e: + logger.error(e) + response = {'status': False, 'message': str(e)} + + return response + + for scan_id in scan_ids: + try: + scan = ScanHistory.objects.get(id=scan_id) + # if scan is already successful or aborted then do nothing + if scan.scan_status == SUCCESS_TASK or scan.scan_status == ABORTED_TASK: continue - task.status = ABORTED_TASK - task.time = timezone.now() - task.save() + response = abort_scan(scan) + except Exception as e: + logger.error(e) + response = {'status': False, 'message': str(e)} + + for subscan_id in subscan_ids: + try: + subscan = SubScan.objects.get(id=subscan_id) + if subscan.scan_status == SUCCESS_TASK or subscan.scan_status == ABORTED_TASK: + continue + response = abort_subscan(subscan) + except Exception as e: + logger.error(e) + response = {'status': False, 'message': str(e)} return Response(response) @@ -890,10 +1220,7 @@ def get(self, request): # get current version_number # remove quotes from current_version - current_version = ((os.environ['RENGINE_CURRENT_VERSION' - ])[1:] if os.environ['RENGINE_CURRENT_VERSION' - ][0] == 'v' - else os.environ['RENGINE_CURRENT_VERSION']).replace("'", "") + current_version = RENGINE_CURRENT_VERSION # for consistency remove v from both if exists latest_version = re.search(r'v(\d+\.)?(\d+\.)?(\*|\d+)', @@ -914,8 +1241,21 @@ def get(self, request): return_response['status'] = True return_response['latest_version'] = latest_version return_response['current_version'] = current_version - return_response['update_available'] = version.parse(current_version) < version.parse(latest_version) - if version.parse(current_version) < version.parse(latest_version): + is_version_update_available = version.parse(current_version) < version.parse(latest_version) + + # if is_version_update_available then we should create inapp notification + create_inappnotification( + title='reNgine Update Available', + description=f'Update to version {latest_version} is available', + notification_type=SYSTEM_LEVEL_NOTIFICATION, + project_slug=None, + icon='mdi-update', + redirect_link='https://github.com/yogeshojha/rengine/releases', + open_in_new_tab=True + ) + + return_response['update_available'] = is_version_update_available + if is_version_update_available: return_response['changelog'] = response[0]['body'] return Response(return_response) @@ -1015,7 +1355,11 @@ def get(self, request): version_number = None _, stdout = run_command(tool.version_lookup_command) - version_number = re.search(re.compile(tool.version_match_regex), str(stdout)) + if tool.version_match_regex: + version_number = re.search(re.compile(tool.version_match_regex), str(stdout)) + else: + version_match_regex = r'(?i:v)?(\d+(?:\.\d+){2,})' + version_number = re.search(version_match_regex, str(stdout)) if not version_number: return Response({'status': False, 'message': 'Invalid version lookup command.'}) diff --git a/web/art/reNgine.txt b/web/art/reNgine.txt index cf0082bd3..a94a0ea1d 100644 --- a/web/art/reNgine.txt +++ b/web/art/reNgine.txt @@ -3,6 +3,6 @@ _ __ ___| \| | __ _ _ _ __ ___ | '__/ _ \ . ` |/ _` | | '_ \ / _ \ | | | __/ |\ | (_| | | | | | __/ - |_| \___|_| \_|\__, |_|_| |_|\___| v2.1.1 + |_| \___|_| \_|\__, |_|_| |_|\___| __/ | |___/ diff --git a/web/celery-entrypoint.sh b/web/celery-entrypoint.sh index 6d7968fff..54e014cc3 100755 --- a/web/celery-entrypoint.sh +++ b/web/celery-entrypoint.sh @@ -151,8 +151,6 @@ then chmod +x /usr/src/github/goofuzz/GooFuzz fi -exec "$@" - # httpx seems to have issue, use alias instead!!! echo 'alias httpx="/go/bin/httpx"' >> ~/.bashrc @@ -167,28 +165,70 @@ if [ "$DEBUG" == "1" ]; then loglevel='debug' fi -# watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --autoscale=10,0 -l INFO -Q scan_queue & -echo "Starting Workers..." -echo "Starting Main Scan Worker with Concurrency: $MAX_CONCURRENCY,$MIN_CONCURRENCY" -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --loglevel=$loglevel --autoscale=$MAX_CONCURRENCY,$MIN_CONCURRENCY -Q main_scan_queue & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=30 --loglevel=$loglevel -Q initiate_scan_queue -n initiate_scan_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=30 --loglevel=$loglevel -Q subscan_queue -n subscan_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=20 --loglevel=$loglevel -Q report_queue -n report_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q send_notif_queue -n send_notif_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q send_scan_notif_queue -n send_scan_notif_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q send_task_notif_queue -n send_task_notif_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=5 --loglevel=$loglevel -Q send_file_to_discord_queue -n send_file_to_discord_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=5 --loglevel=$loglevel -Q send_hackerone_report_queue -n send_hackerone_report_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q parse_nmap_results_queue -n parse_nmap_results_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=20 --loglevel=$loglevel -Q geo_localize_queue -n geo_localize_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q query_whois_queue -n query_whois_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=30 --loglevel=$loglevel -Q remove_duplicate_endpoints_queue -n remove_duplicate_endpoints_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=50 --loglevel=$loglevel -Q run_command_queue -n run_command_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q query_reverse_whois_queue -n query_reverse_whois_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q query_ip_history_queue -n query_ip_history_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=30 --loglevel=$loglevel -Q llm_queue -n llm_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q dorking_queue -n dorking_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q osint_discovery_queue -n osint_discovery_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q h8mail_queue -n h8mail_worker & -watchmedo auto-restart --recursive --pattern="*.py" --directory="/usr/src/app/reNgine/" -- celery -A reNgine.tasks worker --pool=gevent --concurrency=10 --loglevel=$loglevel -Q theHarvester_queue -n theHarvester_worker -exec "$@" +generate_worker_command() { + local queue=$1 + local concurrency=$2 + local worker_name=$3 + local app=${4:-"reNgine.tasks"} + local directory=${5:-"/usr/src/app/reNgine/"} + + local base_command="celery -A $app worker --pool=gevent --autoscale=$concurrency,1 --loglevel=$loglevel -Q $queue -n $worker_name" --optimization=fair + + if [ "$DEBUG" == "1" ]; then + echo "watchmedo auto-restart --recursive --pattern=\"*.py\" --directory=\"$directory\" -- $base_command &" + else + echo "$base_command &" + fi +} + +echo "Starting Celery Workers..." + +commands="" + +# Main scan worker +if [ "$DEBUG" == "1" ]; then + commands+="watchmedo auto-restart --recursive --pattern=\"*.py\" --directory=\"/usr/src/app/reNgine/\" -- celery -A reNgine.tasks worker --loglevel=$loglevel --optimization=fair --autoscale=$MAX_CONCURRENCY,$MIN_CONCURRENCY -Q main_scan_queue &"$'\n' +else + commands+="celery -A reNgine.tasks worker --loglevel=$loglevel --optimization=fair --autoscale=$MAX_CONCURRENCY,$MIN_CONCURRENCY -Q main_scan_queue &"$'\n' +fi + +# API shared task worker +if [ "$DEBUG" == "1" ]; then + commands+="watchmedo auto-restart --recursive --pattern=\"*.py\" --directory=\"/usr/src/app/api/\" -- celery -A api.shared_api_tasks worker --pool=gevent --optimization=fair --concurrency=30 --loglevel=$loglevel -Q api_queue -n api_worker &"$'\n' +else + commands+="celery -A api.shared_api_tasks worker --pool=gevent --concurrency=30 --optimization=fair --loglevel=$loglevel -Q api_queue -n api_worker &"$'\n' +fi + +# worker format: "queue_name:concurrency:worker_name" +workers=( + "initiate_scan_queue:30:initiate_scan_worker" + "subscan_queue:30:subscan_worker" + "report_queue:20:report_worker" + "send_notif_queue:10:send_notif_worker" + "send_task_notif_queue:10:send_task_notif_worker" + "send_file_to_discord_queue:5:send_file_to_discord_worker" + "send_hackerone_report_queue:5:send_hackerone_report_worker" + "parse_nmap_results_queue:10:parse_nmap_results_worker" + "geo_localize_queue:20:geo_localize_worker" + "query_whois_queue:10:query_whois_worker" + "remove_duplicate_endpoints_queue:30:remove_duplicate_endpoints_worker" + "run_command_queue:50:run_command_worker" + "query_reverse_whois_queue:10:query_reverse_whois_worker" + "query_ip_history_queue:10:query_ip_history_worker" + "llm_queue:30:llm_worker" + "dorking_queue:10:dorking_worker" + "osint_discovery_queue:10:osint_discovery_worker" + "h8mail_queue:10:h8mail_worker" + "theHarvester_queue:10:theHarvester_worker" + "send_scan_notif_queue:10:send_scan_notif_worker" +) + +for worker in "${workers[@]}"; do + IFS=':' read -r queue concurrency worker_name <<< "$worker" + commands+="$(generate_worker_command "$queue" "$concurrency" "$worker_name")"$'\n' +done +commands="${commands%&}" + +eval "$commands" + +wait \ No newline at end of file diff --git a/web/dashboard/admin.py b/web/dashboard/admin.py index be2a79a67..0c44dd932 100644 --- a/web/dashboard/admin.py +++ b/web/dashboard/admin.py @@ -5,3 +5,7 @@ admin.site.register(Project) admin.site.register(OpenAiAPIKey) admin.site.register(NetlasAPIKey) +admin.site.register(ChaosAPIKey) +admin.site.register(HackerOneAPIKey) +admin.site.register(InAppNotification) +admin.site.register(UserPreferences) \ No newline at end of file diff --git a/web/dashboard/migrations/0001_initial.py b/web/dashboard/migrations/0001_initial.py index 44e9ac9a7..542cb1f17 100644 --- a/web/dashboard/migrations/0001_initial.py +++ b/web/dashboard/migrations/0001_initial.py @@ -1,6 +1,8 @@ -# Generated by Django 3.2.23 on 2024-06-19 02:43 +# Generated by Django 3.2.23 on 2024-09-06 01:47 +from django.conf import settings from django.db import migrations, models +import django.db.models.deletion class Migration(migrations.Migration): @@ -8,9 +10,25 @@ class Migration(migrations.Migration): initial = True dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ + migrations.CreateModel( + name='ChaosAPIKey', + fields=[ + ('id', models.AutoField(primary_key=True, serialize=False)), + ('key', models.CharField(max_length=500)), + ], + ), + migrations.CreateModel( + name='HackerOneAPIKey', + fields=[ + ('id', models.AutoField(primary_key=True, serialize=False)), + ('username', models.CharField(max_length=500)), + ('key', models.CharField(max_length=500)), + ], + ), migrations.CreateModel( name='NetlasAPIKey', fields=[ @@ -49,4 +67,31 @@ class Migration(migrations.Migration): ('query', models.CharField(max_length=1000)), ], ), + migrations.CreateModel( + name='UserPreferences', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('bug_bounty_mode', models.BooleanField(default=True)), + ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ], + ), + migrations.CreateModel( + name='InAppNotification', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('notification_type', models.CharField(choices=[('system', 'system'), ('project', 'project')], default='system', max_length=10)), + ('status', models.CharField(choices=[('success', 'Success'), ('info', 'Informational'), ('warning', 'Warning'), ('error', 'Error')], default='info', max_length=10)), + ('title', models.CharField(max_length=255)), + ('description', models.TextField()), + ('icon', models.CharField(max_length=50)), + ('is_read', models.BooleanField(default=False)), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('redirect_link', models.URLField(blank=True, max_length=255, null=True)), + ('open_in_new_tab', models.BooleanField(default=False)), + ('project', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='dashboard.project')), + ], + options={ + 'ordering': ['-created_at'], + }, + ), ] diff --git a/web/dashboard/models.py b/web/dashboard/models.py index 8ed77dd43..a1ed47a7a 100644 --- a/web/dashboard/models.py +++ b/web/dashboard/models.py @@ -1,4 +1,6 @@ from django.db import models +from reNgine.definitions import * +from django.contrib.auth.models import User class SearchHistory(models.Model): @@ -41,3 +43,55 @@ class NetlasAPIKey(models.Model): def __str__(self): return self.key + + +class ChaosAPIKey(models.Model): + id = models.AutoField(primary_key=True) + key = models.CharField(max_length=500) + + def __str__(self): + return self.key + + +class HackerOneAPIKey(models.Model): + id = models.AutoField(primary_key=True) + username = models.CharField(max_length=500) + key = models.CharField(max_length=500) + + def __str__(self): + return self.username + + +class InAppNotification(models.Model): + project = models.ForeignKey(Project, on_delete=models.CASCADE, null=True, blank=True) + notification_type = models.CharField(max_length=10, choices=NOTIFICATION_TYPES, default='system') + status = models.CharField(max_length=10, choices=NOTIFICATION_STATUS_TYPES, default='info') + title = models.CharField(max_length=255) + description = models.TextField() + icon = models.CharField(max_length=50) # mdi icon class name + is_read = models.BooleanField(default=False) + created_at = models.DateTimeField(auto_now_add=True) + redirect_link = models.URLField(max_length=255, blank=True, null=True) + open_in_new_tab = models.BooleanField(default=False) + + class Meta: + ordering = ['-created_at'] + + def __str__(self): + if self.notification_type == 'system': + return f"System wide notif: {self.title}" + else: + return f"Project wide notif: {self.project.name}: {self.title}" + + @property + def is_system_wide(self): + # property to determine if the notification is system wide or project specific + return self.notification_type == 'system' + + +class UserPreferences(models.Model): + user = models.OneToOneField(User, on_delete=models.CASCADE) + bug_bounty_mode = models.BooleanField(default=True) + + def __str__(self): + return f"{self.user.username}'s preferences" diff --git a/web/dashboard/templates/dashboard/bountyhub_programs.html b/web/dashboard/templates/dashboard/bountyhub_programs.html new file mode 100644 index 000000000..130d1e054 --- /dev/null +++ b/web/dashboard/templates/dashboard/bountyhub_programs.html @@ -0,0 +1,92 @@ +{% extends 'base/base.html' %} +{% load humanize %} +{% load static %} + +{% block title %} +{{platform}} Programs +{% endblock title %} + +{% block custom_js_css_link %} +{% endblock custom_js_css_link %} + +{% block page_title %} +{{platform}} Programs +{% endblock page_title %} + +{% block breadcrumb_title %} +