Skip to content

Commit

Permalink
Added: retry policy.
Browse files Browse the repository at this point in the history
  • Loading branch information
im-n1 committed Jan 30, 2023
1 parent 7309690 commit 61051b0
Show file tree
Hide file tree
Showing 2 changed files with 45 additions and 21 deletions.
4 changes: 4 additions & 0 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -240,6 +240,10 @@ Retrieves live market data.
Changelog
---------

0.4.9
~~~~~
- new retry policy

0.4.8
~~~~~
- new ``fetch_crypto_live_data()``
Expand Down
62 changes: 41 additions & 21 deletions karpet/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,18 +5,21 @@

import asyncio
import time
from datetime import date, datetime, timedelta
from datetime import datetime, timedelta

import aiohttp
import numpy as np
import pandas as pd
import requests
from bs4 import BeautifulSoup
from requests.adapters import HTTPAdapter, Retry


class Karpet:

quick_search_data = None
req_retries = 4
req_backoff_factor = 3

def __init__(self, start=None, end=None):
"""
Expand All @@ -28,6 +31,28 @@ def __init__(self, start=None, end=None):

self.start = start
self.end = end
self.req_ses = self.get_session()

def get_session(self):

# Waits for 1.5s, 3s, 6s, 12s, 24s between requests.
status_forcelist = (500, 502, 503, 504, 429)

retry = Retry(
total=self.req_retries,
read=self.req_retries,
connect=self.req_retries,
backoff_factor=self.req_backoff_factor,
status_forcelist=status_forcelist,
method_whitelist=False,
)
adapter = HTTPAdapter(max_retries=retry)

session = requests.Session()
# session.mount("http://", adapter)
session.mount("https://", adapter)

return session

def get_quick_search_data(self):
"""
Expand All @@ -53,24 +78,12 @@ def get_quick_search_data(self):
:rtype: list
"""

if self.quick_search_data:
return self.quick_search_data

url = "https://s2.coinmarketcap.com/generated/search/quick_search.json"

# Download.
try:
response = requests.get(url)
except:
raise Exception("Couldn't download necessary data from the internet.")

# Parse.
try:
self.quick_search_data = response.json()
if not self.quick_search_data:
self.quick_search_data = self._get_json(
"https://s2.coinmarketcap.com/generated/search/quick_search.json"
)

return self.quick_search_data
except:
raise Exception("Couldn't parse downloaded data from the internet.")
return self.quick_search_data

def fetch_crypto_historical_data(self, symbol=None, id=None):
"""
Expand Down Expand Up @@ -246,7 +259,12 @@ def fetch_google_trends(

stich_overlap = trdays - overlap
n_days = (self.end - self.start).days
pytrends = TrendReq(hl=hl, tz=tz)
pytrends = TrendReq(
hl=hl,
tz=tz,
retries=self.req_retries,
backoff_factor=self.req_backoff_factor,
)

# Get the dates for each search.
if n_days <= trdays:
Expand Down Expand Up @@ -402,7 +420,7 @@ def get_top_news():
headers = {
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:69.0) Gecko/20100101 Firefox/69.0"
}
response = requests.get("https://cointelegraph.com/", headers=headers)
response = self.req_ses.get("https://cointelegraph.com/", headers=headers)
dom = BeautifulSoup(response.text, "lxml")

def parse_news(news_items):
Expand Down Expand Up @@ -673,10 +691,12 @@ def _get_json(self, url):

# Download.
try:
response = requests.get(url)
response = self.req_ses.get(url)
except:
raise Exception("Couldn't download necessary data from the internet.")

response.raise_for_status()

# Parse.
try:
return response.json()
Expand Down

0 comments on commit 61051b0

Please sign in to comment.