Skip to content

Commit

Permalink
- Removing StorageFixed and just going with StorageDynamic (now just …
Browse files Browse the repository at this point in the history
…called Storage)

- Changing the with_capacity function to now take a struct of all desired archetype capacities
- Removing the previously-added zip functions in favor of...
- Adding iter() and iter_mut() to archetypes for when you have a mutable reference
  • Loading branch information
recatek committed Jul 21, 2024
1 parent 14f55be commit eae38cd
Show file tree
Hide file tree
Showing 24 changed files with 439 additions and 1,008 deletions.
16 changes: 0 additions & 16 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 0 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,4 +22,3 @@ wrapping_entity_raw_version = []
gecs_macros = { version = "0.3.0", path = "macros", default_features = false }

seq-macro = { version = "0.3.5" } # For building "variadic" storage
itertools = { version = "0.13.0" } # For zipping slices in archetypes
25 changes: 4 additions & 21 deletions macros/src/data.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@ use std::collections::HashMap;

use base64::Engine as _;
use speedy::{Readable, Writable};
use syn::{Expr, Ident};
use syn::Ident;

use crate::parse::{HasAttributeId, ParseAttributeCfg, ParseCapacity, ParseEcsFinalize};
use crate::parse::{HasAttributeId, ParseAttributeCfg, ParseEcsFinalize};

#[derive(Debug, Readable, Writable)]
pub struct DataWorld {
Expand All @@ -30,15 +30,7 @@ pub struct DataComponent {
}

#[derive(Debug)]
pub struct DataArchetypeBuildOnly {
pub capacity: DataCapacity,
}

#[derive(Debug)]
pub enum DataCapacity {
Fixed(Expr),
Dynamic,
}
pub struct DataArchetypeBuildOnly;

impl DataWorld {
pub fn new(mut parse: ParseEcsFinalize) -> syn::Result<Self> {
Expand Down Expand Up @@ -82,9 +74,7 @@ impl DataWorld {
});
}

let build_data = DataArchetypeBuildOnly {
capacity: convert_capacity(archetype.capacity),
};
let build_data = DataArchetypeBuildOnly;

archetypes.push(DataArchetype {
id: last_archetype_id.unwrap(),
Expand Down Expand Up @@ -126,13 +116,6 @@ impl DataArchetype {
}
}

fn convert_capacity(capacity: ParseCapacity) -> DataCapacity {
match capacity {
ParseCapacity::Fixed(expr) => DataCapacity::Fixed(expr),
ParseCapacity::Dynamic => DataCapacity::Dynamic,
}
}

fn evaluate_cfgs(cfg_data: &HashMap<String, bool>, cfgs: &[ParseAttributeCfg]) -> bool {
for cfg in cfgs {
let predicate = cfg.predicate.to_string();
Expand Down
110 changes: 48 additions & 62 deletions macros/src/generate/world.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use proc_macro2::TokenStream;
use quote::{format_ident, quote};
use xxhash_rust::xxh3::xxh3_128;

use crate::data::{DataArchetype, DataCapacity, DataWorld};
use crate::data::{DataArchetype, DataWorld};
use crate::generate::util::to_snake;

#[allow(non_snake_case)] // Allow for type-like names to make quote!() clearer
Expand All @@ -19,6 +19,7 @@ pub fn generate_world(world_data: &DataWorld, raw_input: &str) -> TokenStream {

// Types and traits
let World = format_ident!("{}", world_data.name);
let WorldCapacity = format_ident!("{}Capacity", world_data.name);

let ArchetypeSelectId = format_ident!("ArchetypeSelectId");
let ArchetypeSelectEntity = format_ident!("ArchetypeSelectEntity");
Expand Down Expand Up @@ -72,7 +73,13 @@ pub fn generate_world(world_data: &DataWorld, raw_input: &str) -> TokenStream {

quote!(
#( pub use #ecs_world_sealed::#Archetype; )*
pub use #ecs_world_sealed::{#World, #ArchetypeSelectId, #ArchetypeSelectEntity, #ArchetypeSelectEntityRaw};
pub use #ecs_world_sealed::{
#World,
#WorldCapacity,
#ArchetypeSelectId,
#ArchetypeSelectEntity,
#ArchetypeSelectEntityRaw
};

#[doc(hidden)]
pub use #ecs_world_sealed::{#ArchetypeSelectInternalWorld};
Expand All @@ -93,6 +100,12 @@ pub fn generate_world(world_data: &DataWorld, raw_input: &str) -> TokenStream {
)*
}

pub struct #WorldCapacity {
#(
pub #with_capacity_param,
)*
}

impl #World {
/// Creates a new empty world.
///
Expand All @@ -111,17 +124,15 @@ pub fn generate_world(world_data: &DataWorld, raw_input: &str) -> TokenStream {

/// Creates a new world with per-archetype capacities.
///
/// Expects a capacity value for every dynamic archetype.
///
/// This will allocate all archetypes to either their fixed size, or the given
/// dynamic capacity. If a given dynamic capacity is 0, that archetype will not
/// allocate until an entity is created in it.
/// This will allocate all archetypes to the given dynamic capacity. If a
/// given dynamic capacity is 0, that archetype will not allocate until an
/// entity is created in it.
///
/// # Panics
///
/// This will panic if given a size that exceeds the maximum possible capacity
/// value for an archetype (currently `16,777,216`).
pub fn with_capacity(#(#with_capacity_param)*) -> Self {
pub fn with_capacity(capacity: #WorldCapacity) -> Self {
Self {
#( #archetype: #Archetype::#with_capacity_new, )*
}
Expand Down Expand Up @@ -485,33 +496,12 @@ fn section_archetype(archetype_data: &DataArchetype) -> TokenStream {
let SlicesN = format_ident!("Slices{}", count_str);
let ContentArgs = quote!(#Archetype, #(#Component),*);

let (StorageN, BorrowN, StorageArgs) =
match &archetype_data.build_data.as_ref().unwrap().capacity {
DataCapacity::Fixed(expr) => (
format_ident!("StorageFixed{}", count_str),
format_ident!("BorrowFixed{}", count_str),
quote!(#Archetype, #(#Component,)* { #expr }),
),
DataCapacity::Dynamic => (
format_ident!("StorageDynamic{}", count_str),
format_ident!("BorrowDynamic{}", count_str),
quote!(#Archetype, #(#Component,)*),
),
};
let StorageN = format_ident!("Storage{}", count_str);
let BorrowN = format_ident!("Borrow{}", count_str);
let StorageArgs = quote!(#Archetype, #(#Component,)*);

// Generated subsections
let with_capacity = match &archetype_data.build_data.as_ref().unwrap().capacity {
DataCapacity::Fixed(_) => quote!(),
DataCapacity::Dynamic => quote!(
/// Constructs a new archetype pre-allocated to the given storage capacity.
///
/// If the given capacity would result in zero size, this will not allocate.
#[inline(always)]
pub fn with_capacity(capacity: usize) -> Self {
Self { data: #StorageN::with_capacity(capacity) }
}
),
};
let IterArgs = quote!(&Entity<#Archetype>, #(&#Component,)*);
let IterMutArgs = quote!(&Entity<#Archetype>, #(&mut #Component,)*);

// Function names
let get_slice = (0..count)
Expand Down Expand Up @@ -563,7 +553,13 @@ fn section_archetype(archetype_data: &DataArchetype) -> TokenStream {
Self { data: #StorageN::new() }
}

#with_capacity // Only generated for dynamic storage
/// Constructs a new archetype pre-allocated to the given storage capacity.
///
/// If the given capacity would result in zero size, this will not allocate.
#[inline(always)]
pub fn with_capacity(capacity: usize) -> Self {
Self { data: #StorageN::with_capacity(capacity) }
}

/// Returns the number of entities in the archetype, also referred to as its length.
#[inline(always)]
Expand All @@ -589,7 +585,7 @@ fn section_archetype(archetype_data: &DataArchetype) -> TokenStream {

/// Returns the generational version of the archetype. Intended for internal use.
#[inline(always)]
pub const fn version(&self) -> VersionArchetype {
pub const fn version(&self) -> ArchetypeVersion {
self.data.version()
}

Expand Down Expand Up @@ -641,6 +637,18 @@ fn section_archetype(archetype_data: &DataArchetype) -> TokenStream {
self.data.remove(entity)
}

/// Returns an iterator over all of the entities and their data.
#[inline(always)]
pub fn iter(&mut self) -> impl Iterator<Item = (#IterArgs)> {
self.data.iter()
}

/// Returns a mutable iterator over all of the entities and their data.
#[inline(always)]
pub fn iter_mut(&mut self) -> impl Iterator<Item = (#IterMutArgs)> {
self.data.iter_mut()
}

/// Begins a borrow context for the given entity on this archetype. This will allow
/// direct access to that entity's components with runtime borrow checking. This can
/// be faster than accessing the components as slices, as it will skip bounds checks.
Expand Down Expand Up @@ -838,22 +846,6 @@ fn section_archetype(archetype_data: &DataArchetype) -> TokenStream {
}
}

impl<'a> #ArchetypeSlices<'a> {
#[inline(always)]
pub fn zipped(
&'a self,
) -> impl Iterator<Item = (&'a Entity<#Archetype>, #(&'a #Component),*)> {
::gecs::__internal::izip!(self.entity.iter(), #(self.#component.iter()),*)
}

#[inline(always)]
pub fn zipped_mut(
&'a mut self,
) -> impl Iterator<Item = (&'a Entity<#Archetype>, #(&'a mut #Component),*)> {
::gecs::__internal::izip!(self.entity.iter(), #(self.#component.iter_mut()),*)
}
}

impl<'a> ArchetypeCanResolve<'a, #ArchetypeView<'a>, Entity<#Archetype>> for #Archetype {
#[inline(always)]
fn resolve_for(&self, key: Entity<#Archetype>) -> Option<usize> {
Expand Down Expand Up @@ -882,18 +874,12 @@ fn section_archetype(archetype_data: &DataArchetype) -> TokenStream {

#[allow(non_snake_case)]
fn with_capacity_param(archetype_data: &DataArchetype) -> TokenStream {
let archetype_capacity = format_ident!("capacity_{}", to_snake(&archetype_data.name));
match archetype_data.build_data.as_ref().unwrap().capacity {
DataCapacity::Fixed(_) => quote!(),
DataCapacity::Dynamic => quote!(#archetype_capacity: usize,),
}
let archetype = format_ident!("{}", to_snake(&archetype_data.name));
quote!(#archetype: usize)
}

#[allow(non_snake_case)]
fn with_capacity_new(archetype_data: &DataArchetype) -> TokenStream {
let archetype_capacity = format_ident!("capacity_{}", to_snake(&archetype_data.name));
match archetype_data.build_data.as_ref().unwrap().capacity {
DataCapacity::Fixed(_) => quote!(new()),
DataCapacity::Dynamic => quote!(with_capacity(#archetype_capacity)),
}
let archetype = format_ident!("{}", to_snake(&archetype_data.name));
quote!(with_capacity(capacity.#archetype))
}
26 changes: 2 additions & 24 deletions macros/src/parse/world.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@ use proc_macro2::{Span, TokenStream};
use quote::format_ident;
use syn::parse::{Parse, ParseStream};
use syn::punctuated::Punctuated;
use syn::token::{Comma, Dyn, Semi};
use syn::{braced, bracketed, parenthesized, Expr, Ident, LitBool, LitInt, Token};
use syn::token::{Comma, Semi};
use syn::{braced, bracketed, parenthesized, Ident, LitBool, LitInt, Token};

use super::*;

Expand Down Expand Up @@ -52,7 +52,6 @@ pub struct ParseArchetype {
pub cfgs: Vec<ParseAttributeCfg>,
pub id: Option<u8>,
pub name: Ident,
pub capacity: ParseCapacity,
pub components: Vec<ParseComponent>,
}

Expand Down Expand Up @@ -86,12 +85,6 @@ pub enum ParseAttributeData {
ComponentId(ParseAttributeId),
}

#[derive(Debug)]
pub enum ParseCapacity {
Fixed(Expr),
Dynamic,
}

impl ParseEcsWorld {
pub fn collect_all_cfg_predicates(&self) -> Vec<TokenStream> {
let mut filter = HashSet::new();
Expand Down Expand Up @@ -227,8 +220,6 @@ impl Parse for ParseArchetype {

let name: Ident = content.parse()?;

content.parse::<Comma>()?;
let capacity: ParseCapacity = content.parse()?;
content.parse::<Comma>()?;

let components: Vec<ParseComponent> =
Expand All @@ -242,7 +233,6 @@ impl Parse for ParseArchetype {
cfgs,
id: None,
name,
capacity,
components,
})
}
Expand Down Expand Up @@ -345,18 +335,6 @@ impl Parse for ParseAttribute {
}
}

impl Parse for ParseCapacity {
fn parse(input: ParseStream) -> syn::Result<Self> {
let lookahead = input.lookahead1();
if lookahead.peek(Dyn) {
input.parse::<Dyn>()?;
Ok(ParseCapacity::Dynamic)
} else {
input.parse().map(ParseCapacity::Fixed)
}
}
}

impl HasAttributeId for ParseArchetype {
fn name(&self) -> &Ident {
&self.name
Expand Down
Loading

0 comments on commit eae38cd

Please sign in to comment.