Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add scheduled task to update loved maps #53

Merged
merged 1 commit into from
May 29, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions osuchan/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,6 +205,10 @@ class EnvSettings(BaseSettings):
"task": "leaderboards.tasks.dispatch_update_global_leaderboard_top_5_score_cache",
"schedule": crontab(minute="*/20"),
},
"update-loved-beatmaps-every-month": {
"task": "profiles.tasks.update_loved_beatmaps",
"schedule": crontab(minute="0", hour="0", day_of_month="1"),
},
}


Expand Down
171 changes: 0 additions & 171 deletions profiles/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@

from common.error_reporter import ErrorReporter
from common.osu import utils
from common.osu.apiv1 import OsuApiV1
from common.osu.difficultycalculator import (
AbstractDifficultyCalculator,
DifficultyCalculator,
Expand Down Expand Up @@ -91,176 +90,6 @@ class UserStats(models.Model):

objects = UserStatsQuerySet.as_manager()

def add_scores_from_data(self, score_data_list: list[dict]):
"""
Adds a list of scores and their beatmaps from the passed score_data_list.
(requires all dicts to have beatmap_id set along with usual score data)
"""
# Remove unranked scores
# Only process "high scores" (highest scorev1 per mod per map per user)
# (need to make this distinction to prevent lazer scores from being treated as ranked)
ranked_score_data_list = [
score_data
for score_data in score_data_list
if score_data.get("score_id", None) is not None
]

# Parse dates
for score_data in ranked_score_data_list:
score_data["date"] = datetime.strptime(
score_data["date"], "%Y-%m-%d %H:%M:%S"
).replace(tzinfo=timezone.utc)

# Remove potential duplicates from a top 100 play also being in the recent 50
# Unique on date since we don't track score_id (not ideal but not much we can do)
unique_score_data_list = [
score
for score in ranked_score_data_list
if score
== next(s for s in ranked_score_data_list if s["date"] == score["date"])
]

# Remove scores which already exist in db
score_dates = [s["date"] for s in unique_score_data_list]
existing_score_dates = Score.objects.filter(date__in=score_dates).values_list(
"date", flat=True
)
new_score_data_list = []
for score_data in unique_score_data_list:
if score_data["date"] not in existing_score_dates:
new_score_data_list.append(score_data)

# Fetch beatmaps from database in bulk
beatmap_ids = [int(s["beatmap_id"]) for s in new_score_data_list]
beatmaps = list(Beatmap.objects.filter(id__in=beatmap_ids))

beatmaps_to_create = []
scores_to_create = []
for score_data in new_score_data_list:
score = Score()

# Update Score fields
score.score = int(score_data["score"])
score.count_300 = int(score_data["count300"])
score.count_100 = int(score_data["count100"])
score.count_50 = int(score_data["count50"])
score.count_miss = int(score_data["countmiss"])
score.count_geki = int(score_data["countgeki"])
score.count_katu = int(score_data["countkatu"])
score.best_combo = int(score_data["maxcombo"])
score.perfect = bool(int(score_data["perfect"]))
score.mods = int(score_data["enabled_mods"])
score.rank = score_data["rank"]
score.date = score_data["date"]

# Update foreign keys
# Search for beatmap in fetched, else create it
beatmap_id = int(score_data["beatmap_id"])
beatmap = next(
(beatmap for beatmap in beatmaps if beatmap.id == beatmap_id), None
)
if beatmap is None:
osu_api_v1 = OsuApiV1()
beatmap_data = osu_api_v1.get_beatmap(beatmap_id)
if beatmap_data is None:
continue

beatmap = Beatmap.from_data(beatmap_data)
if (
beatmap.status
not in [
BeatmapStatus.APPROVED,
BeatmapStatus.RANKED,
BeatmapStatus.LOVED,
]
or score.mods & Mods.UNRANKED != 0
):
# Skip unranked/unloved scores
continue
beatmaps.append(
beatmap
) # add to beatmaps incase another score is on this map
beatmaps_to_create.append(beatmap)
score.beatmap = beatmap
score.user_stats = self

# Update pp
if "pp" in score_data and score_data["pp"] is not None:
score.performance_total = float(score_data["pp"])
score.difficulty_calculator_engine = "legacy"
score.difficulty_calculator_version = "legacy"
else:
# Check for gamemode
if self.gamemode != Gamemode.STANDARD:
# We cant calculate pp for this mode yet so we need to disregard this score
continue

try:
with DifficultyCalculator() as calc:
calculation = calc.calculate_score(
DifficultyCalculatorScore(
mods=score.mods,
beatmap_id=beatmap_id,
count_100=score.count_100,
count_50=score.count_50,
count_miss=score.count_miss,
combo=score.best_combo,
)
)
score.performance_total = calculation.performance_values[
"total"
]
score.difficulty_calculator_engine = (
DifficultyCalculator.engine()
)
score.difficulty_calculator_version = (
DifficultyCalculator.version()
)
except DifficultyCalculatorException as e:
error_reporter = ErrorReporter()
error_reporter.report_error(e)
continue

# Update convenience fields
score.gamemode = self.gamemode
score.accuracy = utils.get_accuracy(
score.count_300,
score.count_100,
score.count_50,
score.count_miss,
score.count_katu,
score.count_geki,
gamemode=self.gamemode,
)
score.bpm = utils.get_bpm(beatmap.bpm, score.mods)
score.length = utils.get_length(beatmap.drain_time, score.mods)
score.circle_size = utils.get_cs(
beatmap.circle_size, score.mods, score.gamemode
)
score.approach_rate = utils.get_ar(beatmap.approach_rate, score.mods)
score.overall_difficulty = utils.get_od(
beatmap.overall_difficulty, score.mods
)

# Process score
score.process()

scores_to_create.append(score)

# Bulk add and update beatmaps and scores
created_beatmaps = Beatmap.objects.bulk_create(
beatmaps_to_create,
ignore_conflicts=True, # potential race condition from two concurrent updates creating the same beatmap
)
created_scores = Score.objects.bulk_create(scores_to_create)

# Recalculate with new scores added
self.recalculate()
self.save()

# Return new scores
return created_scores

def recalculate(self):
"""
Calculates pp totals (extra pp, nochoke pp) and scores style using unique maps
Expand Down
Loading