Skip to content

Commit

Permalink
Merge pull request #9 from game-node-app/dev
Browse files Browse the repository at this point in the history
New crontab implementation
  • Loading branch information
Lamarcke authored Oct 21, 2024
2 parents 273f443 + f0d567a commit 30d99c6
Show file tree
Hide file tree
Showing 7 changed files with 50 additions and 20 deletions.
8 changes: 8 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,17 @@ WORKDIR /app

ENV PYTHONUNBUFFERED=1

RUN apt-get update && apt-get install -y cron vim

COPY requirements.txt requirements.txt

RUN pip install -r requirements.txt

COPY . .

RUN chmod 0644 crontab

RUN crontab crontab

#CMD ["cron","-f"]

3 changes: 3 additions & 0 deletions crontab
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
#
0 0 * * 0,4 /usr/local/bin/python3 /app/main.py > /proc/1/fd/1 2>&1
#
2 changes: 1 addition & 1 deletion docker-compose.prod.yml
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ services:

restart: always

command: ["python3", "-u", "main.py"]
command: bash -c "printenv > /etc/environment && cron -f"

networks:
- game_node_app
Expand Down
2 changes: 0 additions & 2 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,5 +21,3 @@ services:

volumes:
redis:
db:

3 changes: 2 additions & 1 deletion igdb/config/redis.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import logging
import os
from contextlib import contextmanager
from typing import Union
Expand All @@ -10,7 +11,7 @@
@contextmanager
def get_redis_connection() -> Redis:
# This should point to the Redis container hostname in production.
redis_url = os.environ.get("REDIS_URL", "redis://localhost:9011")
redis_url = os.environ.get("REDIS_URL", "redis://localhost:9012")
global redis_client
if redis_client is None:
redis_client = Redis.from_url(redis_url)
Expand Down
36 changes: 27 additions & 9 deletions igdb/sync/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from retry import retry

from igdb.auth import IGDBAuthService
from igdb.config import SingletonMeta
from igdb.config import SingletonMeta, get_redis_connection
import requests
import logging

Expand Down Expand Up @@ -59,21 +59,38 @@
"game_localizations.*",
"language_supports.*",
"first_release_date",
"age_ratings.*"
]

ITEMS_PER_PAGE = 500

IGDB_GAMES_ENDPOINT = "https://api.igdb.com/v4/games"

LAST_USED_OFFSET = None


class IGDBSyncService(metaclass=SingletonMeta):
__igdb_auth_service = IGDBAuthService()
__offset_persist_key = "idgb-sync-offset"

def __init__(self):
pass

def __get_offset(self) -> int:
with get_redis_connection() as redis:
offset_item = redis.get(self.__offset_persist_key)
if isinstance(offset_item, bytes):
return int(offset_item.decode("utf-8"))
if isinstance(offset_item, str):
return int(offset_item)

return offset_item

def __save_offset(self, offset: int):
with get_redis_connection() as redis:
# 1800s = 30min
print(f"Saving new offset on store: {offset}")
redis.set(self.__offset_persist_key, offset, ex=1800)


def __build_request_params(self, offset: int) -> Dict:
auth_token = self.__igdb_auth_service.get_token()
fields_values = ", ".join(IGDB_FIELDS)
Expand All @@ -96,19 +113,20 @@ def fetch_games_interval(self, offset: int):
def fetch_games(self):
has_next_page = True
current_offset = 0
# Fallback to return to previous attempt when errors occur
global LAST_USED_OFFSET
if LAST_USED_OFFSET is not None:
current_offset = LAST_USED_OFFSET

last_used_offset = self.__get_offset()
if last_used_offset is not None:
print(f"Using offset from store: {last_used_offset}")
current_offset = last_used_offset

while has_next_page:
print(f"Current offset: {current_offset}")
self.__save_offset(current_offset)
response = self.fetch_games_interval(current_offset)
print(f"Fetched {len(response)} entries")
current_offset += ITEMS_PER_PAGE
LAST_USED_OFFSET = current_offset
has_next_page = len(response) > 0 and len(response) >= ITEMS_PER_PAGE
if not has_next_page:
print(f"Detected last page of results at offset: {current_offset}")
LAST_USED_OFFSET = None
self.__save_offset(0)
yield response
16 changes: 9 additions & 7 deletions main.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,21 @@
import logging

from igdb.config import get_pika
from igdb.sync import IGDBSyncService
from time import sleep

import json
import logging

# 7 days
LOOP_WAIT_TIME = 86400 * 7
# 10 minutes
LOOP_ERROR_WAIT_TIME = 720
# 5 minutes
LOOP_ERROR_WAIT_TIME = 300
# 16 seconds
RUN_WAIT_TIME = 16

sync_service = IGDBSyncService()

def run():
sync_service = IGDBSyncService()
print(f"Starting IGDB sync job...")

with get_pika() as pika:
for games in sync_service.fetch_games():
pika.basic_publish(exchange="sync", routing_key="sync-igdb", body=json.dumps(games))
Expand All @@ -27,9 +29,9 @@ def run():
while True:
try:
run()
sleep(LOOP_WAIT_TIME)
except KeyboardInterrupt:
exit(0)
except Exception as e:
print(e)
logging.error(e, exc_info=e, stack_info=True)
sleep(LOOP_ERROR_WAIT_TIME)

0 comments on commit 30d99c6

Please sign in to comment.