Skip to content

Commit

Permalink
add days_back_fetch support for cloudflare
Browse files Browse the repository at this point in the history
  • Loading branch information
8naama committed Jul 7, 2024
1 parent 7b29fd0 commit 7ba428a
Show file tree
Hide file tree
Showing 2 changed files with 43 additions and 1 deletion.
40 changes: 40 additions & 0 deletions src/apis/cloudflare/Cloudflare.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,30 @@
from datetime import datetime, timedelta, UTC
import logging
from pydantic import Field
import re

from src.apis.general.Api import ApiFetcher
from src.apis.general.PaginationSettings import PaginationSettings
from src.apis.general.StopPaginationSettings import StopPaginationSettings


DATE_FILTER_PARAMETER = "since="
FIND_DATE_PATTERN = re.compile(r'since=(\S+?)(?:&|$)')

logger = logging.getLogger(__name__)


class Cloudflare(ApiFetcher):
"""
:param cloudflare_account_id: The CloudFlare Account ID
:param cloudflare_bearer_token: The cloudflare Bearer token
:param pagination_off: True if pagination should be off, False otherwise
:param days_back_fetch: Amount of days to fetch back in the first request, Optional (adds a filter on 'since')
"""
cloudflare_account_id: str = Field(frozen=True)
cloudflare_bearer_token: str = Field(frozen=True)
pagination_off: bool = Field(default=False)
days_back_fetch: int = Field(default=-1, frozen=True)

def __init__(self, **data):
res_data_path = "result"
Expand All @@ -38,3 +49,32 @@ def __init__(self, **data):
# Update the cloudflare account id in both the url and next url
self.url = self.url.replace("{account_id}", self.cloudflare_account_id)
self.next_url = self.next_url.replace("{account_id}", self.cloudflare_account_id)

if self.days_back_fetch > 0:
self._initialize_url_date()

def _initialize_url_date(self):
if "?" in self.url:
self.url += f"&since={self._generate_start_fetch_date()}"
else:
self.url += f"?since={self._generate_start_fetch_date()}"

def _generate_start_fetch_date(self):
return (datetime.now(UTC) - timedelta(days=self.days_back_fetch)).strftime("%Y-%m-%dT%H:%M:%S.%fZ")

def send_request(self):
data = super().send_request()

# Add 1 second to a known date filter to avoid duplicates in the logs
if DATE_FILTER_PARAMETER in self.url:
try:
org_date = re.search(FIND_DATE_PATTERN, self.url).group(1)
org_date_date = datetime.strptime(org_date, "%Y-%m-%dT%H:%M:%S.%fZ")
org_date_plus_second = (org_date_date + timedelta(seconds=1)).strftime("%Y-%m-%dT%H:%M:%S.%fZ")
self.url = self.url.replace(org_date, org_date_plus_second)
except IndexError:
logger.error(f"Failed to add 1s to the {self.name} api 'since' filter value, on url {self.url}")
except ValueError:
logger.error(f"Failed to parse API {self.name} date in URL: {self.url}")

return data
4 changes: 3 additions & 1 deletion src/apis/cloudflare/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ By default `cloudflare` API type has built in pagination settings and sets the `
| url | The request URL | Required | - |
| next_url | If needed to update the URL in next requests based on the last response. Supports using variables (see [General API](../general/README.md)) | Optional | - |
| additional_fields | Additional custom fields to add to the logs before sending to logzio | Optional | - |
| days_back_fetch | The amount of days to fetch back in the first request. Applies a filter on `since` parameter. | Optional | - |
| scrape_interval | Time interval to wait between runs (unit: `minutes`) | Optional | 1 (minute) |
| pagination_off | True if builtin pagination should be off, False otherwise | Optional | `False` |

Expand All @@ -20,8 +21,9 @@ apis:
type: cloudflare
cloudflare_account_id: <<CLOUDFLARE_ACCOUNT_ID>>
cloudflare_bearer_token: <<CLOUDFLARE_BEARER_TOKEN>>
url: https://api.cloudflare.com/client/v4/accounts/{account_id}/alerting/v3/history?since=2024-05-21T04:06:20.522451Z
url: https://api.cloudflare.com/client/v4/accounts/{account_id}/alerting/v3/history
next_url: https://api.cloudflare.com/client/v4/accounts/{account_id}/alerting/v3/history?since={res.result.[0].sent}
days_back_fetch: 7
scrape_interval: 5
additional_fields:
type: cloudflare
Expand Down

0 comments on commit 7ba428a

Please sign in to comment.