Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: improve encounter random generator filters #85

Open
wants to merge 20 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 15 commits
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,8 @@ sqlx = { version = "0.8.2", features = ["runtime-async-std", "sqlite"] }
cached = { version = "0.54.0", features = ["async"] }

anyhow = "1.0.93"
serde = { version = "1.0.214", features = ["derive"] }
serde_json = "1.0.132"
serde = { version = "1.0.215", features = ["derive"] }
serde_json = "1.0.133"
strum = {version="0.26.3", features = ["derive"]}
fastrand = "2.2.0"
counter = "0.6.0"
Expand Down
105 changes: 68 additions & 37 deletions src/db/bestiary_proxy.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
use crate::models::creature::creature_struct::Creature;
use std::collections::{HashMap, HashSet};
use std::collections::HashMap;

use crate::db::data_providers::creature_fetcher::fetch_traits_associated_with_creatures;
use crate::db::data_providers::{creature_fetcher, generic_fetcher};
use crate::models::bestiary_structs::{BestiaryPaginatedRequest, CreatureSortEnum};
use crate::models::bestiary_structs::{
BestiaryFilterQuery, BestiaryPaginatedRequest, CreatureSortEnum,
};
use crate::models::creature::creature_component::creature_core::CreatureCoreData;
use crate::models::creature::creature_filter_enum::{CreatureFilter, FieldsUniqueValuesStruct};
use crate::models::creature::creature_metadata::alignment_enum::AlignmentEnum;
Expand All @@ -16,6 +18,7 @@ use crate::models::routers_validator_structs::{CreatureFieldFilters, OrderEnum};
use crate::AppState;
use anyhow::Result;
use cached::proc_macro::once;
use itertools::Itertools;
use strum::IntoEnumIterator;

pub async fn get_creature_by_id(
Expand Down Expand Up @@ -94,6 +97,26 @@ pub async fn get_paginated_creatures(
.essential
.family
.cmp(&b.core_data.essential.family),
CreatureSortEnum::Alignment => a
.core_data
.essential
.alignment
.cmp(&b.core_data.essential.alignment),
CreatureSortEnum::Melee => a
.core_data
.derived
.is_melee
.cmp(&b.core_data.derived.is_melee),
CreatureSortEnum::Ranged => a
.core_data
.derived
.is_ranged
.cmp(&b.core_data.derived.is_ranged),
CreatureSortEnum::SpellCaster => a
.core_data
.derived
.is_spell_caster
.cmp(&b.core_data.derived.is_spell_caster),
};
match pagination
.bestiary_sort_data
Expand All @@ -105,11 +128,14 @@ pub async fn get_paginated_creatures(
OrderEnum::Descending => cmp.reverse(),
}
});

let curr_slice: Vec<Creature> = filtered_list
.iter()
.skip(pagination.paginated_request.cursor as usize)
.take(pagination.paginated_request.page_size.unsigned_abs() as usize)
.take(if pagination.paginated_request.page_size >= 0 {
pagination.paginated_request.page_size.unsigned_abs() as usize
} else {
usize::MAX
})
.cloned()
.collect();

Expand All @@ -118,16 +144,24 @@ pub async fn get_paginated_creatures(

pub async fn get_creatures_passing_all_filters(
app_state: &AppState,
key_value_filters: HashMap<CreatureFilter, HashSet<String>>,
filters: &BestiaryFilterQuery,
fetch_weak: bool,
fetch_elite: bool,
) -> Result<Vec<Creature>> {
let mut creature_vec = Vec::new();
let level_vec = key_value_filters
.get(&CreatureFilter::Level)
.map_or_else(HashSet::new, std::clone::Clone::clone);
let modified_filters =
prepare_filters_for_db_communication(key_value_filters, fetch_weak, fetch_elite);
let level_vec = filters.creature_table_fields_filter.level_filter.clone();
let mut modified_filters = filters.clone();
modified_filters.creature_table_fields_filter.level_filter =
prepare_level_filter_for_db_communication(
filters
.creature_table_fields_filter
.level_filter
.clone()
.into_iter(),
fetch_weak,
fetch_elite,
);

for core in
creature_fetcher::fetch_creatures_core_data_with_filters(&app_state.conn, &modified_filters)
.await?
Expand All @@ -137,13 +171,13 @@ pub async fn get_creatures_passing_all_filters(
// mean that if we have [0,1,2,3] in the filter and allow_elite => [-1,0,1,2,3] then
// a creature of level 1 will always be considered the elite variant of level 0. We'll
// duplicate the data and will have a base 0 for level 0 and elite 0 for level 1
if fetch_weak && level_vec.contains(&(core.essential.base_level - 1).to_string()) {
if fetch_weak && level_vec.contains(&(core.essential.base_level - 1)) {
creature_vec.push(Creature::from_core_with_variant(
core.clone(),
CreatureVariant::Weak,
));
}
if fetch_elite && level_vec.contains(&(core.essential.base_level + 1).to_string()) {
if fetch_elite && level_vec.contains(&(core.essential.base_level + 1)) {
creature_vec.push(Creature::from_core_with_variant(
core.clone(),
CreatureVariant::Elite,
Expand Down Expand Up @@ -227,12 +261,12 @@ async fn get_all_keys(app_state: &AppState) -> FieldsUniqueValuesStruct {

/// Gets all the creature core data from the DB. It will not fetch data outside of variant and core.
/// It will cache the result.
#[once(sync_writes = true, result = true)]
async fn get_all_creatures_from_db(app_state: &AppState) -> Result<Vec<CreatureCoreData>> {
creature_fetcher::fetch_creatures_core_data(&app_state.conn, 0, -1).await
}

/// Infallible method, it will expose a vector representing the values fetched from db or empty vec
#[once(sync_writes = true)]
async fn get_list(app_state: &AppState, variant: CreatureVariant) -> Vec<Creature> {
if let Ok(creatures) = get_all_creatures_from_db(app_state).await {
return match variant {
Expand Down Expand Up @@ -261,30 +295,27 @@ pub fn order_list_by_level(creature_list: &[Creature]) -> HashMap<i64, Vec<Creat
/// Used to prepare the filters for db communication.
/// The level must be adjusted if elite/weak must be fetched.
///Example if we allow weak then we can fetch creature with level +1 => weak = level
fn prepare_filters_for_db_communication(
key_value_filters: HashMap<CreatureFilter, HashSet<String>>,
fn prepare_level_filter_for_db_communication<I>(
level_filter: I,
fetch_weak: bool,
fetch_elite: bool,
) -> HashMap<CreatureFilter, HashSet<String>> {
key_value_filters
.into_iter()
.map(|(key, values)| match key {
CreatureFilter::Level => {
let mut new_values = HashSet::new();
for str_level in values {
if let Ok(level) = str_level.parse::<i64>() {
if fetch_weak {
new_values.insert((level + 1).to_string());
}
if fetch_elite {
new_values.insert((level - 1).to_string());
}
new_values.insert(level.to_string());
}
}
(key, new_values)
}
_ => (key, values),
})
.collect()
) -> Vec<i64>
where
I: Iterator<Item = i64>,
{
// do not remove sorted, it would break contract with merge and dedup
let levels = level_filter.sorted().collect::<Vec<_>>();
let levels_for_elite: Vec<i64> = if fetch_elite {
levels.iter().map(|x| x - 1).collect()
} else {
vec![]
};
let levels_for_weak: Vec<i64> = if fetch_weak {
levels.iter().map(|x| x + 1).collect()
} else {
vec![]
};

let x = itertools::merge(levels_for_elite, levels_for_weak).collect::<Vec<_>>();
itertools::merge(x, levels).dedup().collect::<Vec<_>>()
}
7 changes: 3 additions & 4 deletions src/db/data_providers/creature_fetcher.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ use crate::db::data_providers::generic_fetcher::{
fetch_weapon_damage_data, fetch_weapon_runes, fetch_weapon_traits, MyString,
};
use crate::db::data_providers::raw_query_builder::prepare_filtered_get_creatures_core;
use crate::models::bestiary_structs::BestiaryFilterQuery;
use crate::models::creature::creature_component::creature_combat::{
CreatureCombatData, SavingThrows,
};
Expand All @@ -12,7 +13,6 @@ use crate::models::creature::creature_component::creature_extra::{
};
use crate::models::creature::creature_component::creature_spell_caster::CreatureSpellCasterData;
use crate::models::creature::creature_component::creature_variant::CreatureVariantData;
use crate::models::creature::creature_filter_enum::CreatureFilter;
use crate::models::creature::creature_metadata::alignment_enum::ALIGNMENT_TRAITS;
use crate::models::creature::creature_metadata::variant_enum::CreatureVariant;
use crate::models::creature::creature_struct::Creature;
Expand Down Expand Up @@ -46,7 +46,6 @@ use crate::models::scales_struct::strike_bonus_scales::StrikeBonusScales;
use crate::models::scales_struct::strike_dmg_scales::StrikeDmgScales;
use anyhow::Result;
use sqlx::{Pool, Sqlite};
use std::collections::{HashMap, HashSet};

async fn fetch_creature_immunities(
conn: &Pool<Sqlite>,
Expand Down Expand Up @@ -524,9 +523,9 @@ pub async fn fetch_creature_by_id(

pub async fn fetch_creatures_core_data_with_filters(
conn: &Pool<Sqlite>,
key_value_filters: &HashMap<CreatureFilter, HashSet<String>>,
bestiary_filter_query: &BestiaryFilterQuery,
) -> Result<Vec<CreatureCoreData>> {
let query = prepare_filtered_get_creatures_core(key_value_filters);
let query = prepare_filtered_get_creatures_core(bestiary_filter_query);
let core_data: Vec<CreatureCoreData> = sqlx::query_as(query.as_str()).fetch_all(conn).await?;
Ok(update_creatures_core_with_traits(conn, core_data).await)
}
Expand Down
Loading
Loading