diff --git a/clovers-cli/Cargo.toml b/clovers-cli/Cargo.toml index 0719b853..5145c748 100644 --- a/clovers-cli/Cargo.toml +++ b/clovers-cli/Cargo.toml @@ -24,25 +24,25 @@ clovers = { path = "../clovers", features = [ # External blue-noise-sampler = "0.1.0" -clap = { version = "4.5.16", features = ["std", "derive"] } +clap = { version = "4.5.21", features = ["std", "derive"] } human_format = "1.1.0" humantime = "2.1.0" -image = { version = "0.25.2", features = ["png"], default-features = false } -img-parts = "0.3.0" -indicatif = { version = "0.17.8", features = [ +image = { version = "0.25.5", features = ["png"], default-features = false } +img-parts = "0.3.1" +indicatif = { version = "0.17.9", features = [ "rayon", ], default-features = false } -nalgebra = { version = "0.33.0" } +nalgebra = { version = "0.33.2" } palette = { version = "0.7.6", features = ["serializing"] } paste = { version = "1.0.15" } rand = { version = "0.8.5", features = ["small_rng"], default-features = false } rayon = "1.10.0" -serde = { version = "1.0.209", features = ["derive"], default-features = false } +serde = { version = "1.0.215", features = ["derive"], default-features = false } serde_json = { version = "1.0", features = ["alloc"], default-features = false } time = { version = "0.3.36", default-features = false } tracing = "0.1.40" tracing-subscriber = { version = "0.3.18", features = ["time"] } [dev-dependencies] -divan = "0.1.14" +divan = "0.1.16" proptest = "1" diff --git a/clovers/Cargo.toml b/clovers/Cargo.toml index 9b06ccf0..8daabecc 100644 --- a/clovers/Cargo.toml +++ b/clovers/Cargo.toml @@ -21,19 +21,19 @@ traces = ["tracing"] [dependencies] enum_dispatch = "0.3.13" gltf = { version = "1.4.1", optional = true } -nalgebra = { version = "0.33.0" } +nalgebra = { version = "0.33.2" } palette = { version = "0.7.6", features = ["serializing"] } ply-rs = { version = "0.1.3", optional = true } rand = { version = "0.8.5", features = ["small_rng"], default-features = false } rand_distr = { version = "0.4.3", features = ["std_math"] } -serde = { version = "1.0.209", features = [ +serde = { version = "1.0.215", features = [ "derive", ], default-features = false, optional = true } -stl_io = { version = "0.7.0", optional = true } +stl_io = { version = "0.8.3", optional = true } tracing = { version = "0.1.40", optional = true } [dev-dependencies] -divan = "0.1.14" +divan = "0.1.16" proptest = "1" [[bench]] diff --git a/clovers/src/aabb.rs b/clovers/src/aabb.rs index de249efa..a91871f0 100644 --- a/clovers/src/aabb.rs +++ b/clovers/src/aabb.rs @@ -122,12 +122,7 @@ impl AABB { // TODO: this api is kind of annoying #[must_use] pub fn axis(&self, n: usize) -> &Interval { - match n { - 0 => &self.x, - 1 => &self.y, - 2 => &self.z, - _ => panic!("AABB::axis called with invalid parameter: {n:?}"), - } + [&self.x, &self.y, &self.z][n] } /// Distance of a `Ray` to the bounding box. diff --git a/clovers/src/bvh/build/longest_axis_midpoint.rs b/clovers/src/bvh/build/longest_axis_midpoint.rs index b3def0c8..f912a559 100644 --- a/clovers/src/bvh/build/longest_axis_midpoint.rs +++ b/clovers/src/bvh/build/longest_axis_midpoint.rs @@ -17,11 +17,7 @@ pub fn build(mut hitables: Vec) -> BVHNode { // What is the axis with the largest span? // TODO: horribly inefficient, improve! let bounding: AABB = vec_bounding_box(&hitables).expect("No bounding box for objects"); - let spans = [ - bounding.axis(0).size(), - bounding.axis(1).size(), - bounding.axis(2).size(), - ]; + let spans = [bounding.x.size(), bounding.y.size(), bounding.z.size()]; let largest = Float::max(Float::max(spans[0], spans[1]), spans[2]); #[allow(clippy::float_cmp)] // TODO: better code for picking the largest axis... let axis: usize = spans.iter().position(|&x| x == largest).unwrap(); diff --git a/clovers/src/hitable.rs b/clovers/src/hitable.rs index 3e4b36d0..e876d578 100644 --- a/clovers/src/hitable.rs +++ b/clovers/src/hitable.rs @@ -138,9 +138,12 @@ pub struct HitableList<'scene> { impl<'scene> HitableList<'scene> { /// Creates a new [`HitableList`]. + /// + /// # Panics + /// This method may panic if no finite bounding box can be created for the given `hitables`. #[must_use] pub fn new(hitables: Vec>) -> Self { - let aabb = vec_bounding_box(&hitables).unwrap(); + let aabb = vec_bounding_box(&hitables).expect("No bounding box for hitables"); Self { hitables, aabb } } diff --git a/clovers/src/lib.rs b/clovers/src/lib.rs index 6c6936e6..c0a6e222 100644 --- a/clovers/src/lib.rs +++ b/clovers/src/lib.rs @@ -60,8 +60,6 @@ #![deny(missing_debug_implementations)] #![deny(missing_docs)] // TODO: temporarily allowing some in order to get a majority of clippy::pedantic enabled -#![allow(clippy::many_single_char_names)] // Lots of places with coordinates etc -#![allow(clippy::missing_panics_doc)] // TODO: remove panics where feasible later #![allow(clippy::module_name_repetitions)] // no_std required for gpu accelerated rendering #![cfg_attr(not(feature = "std"), no_std)] diff --git a/clovers/src/materials/cone_light.rs b/clovers/src/materials/cone_light.rs index e92a4c8f..0e7e8a7d 100644 --- a/clovers/src/materials/cone_light.rs +++ b/clovers/src/materials/cone_light.rs @@ -65,8 +65,8 @@ impl MaterialTrait for ConeLight { emit } else { // Make sure that the front face of the lamp is tinted, even outside the main lighting angle - let (r, g, b) = emit.into_components(); - let scaling_factor = r.max(g).max(b); + let (red, green, blue) = emit.into_components(); + let scaling_factor = red.max(green).max(blue); if scaling_factor > 1.0 { emit / scaling_factor } else { diff --git a/clovers/src/objects/gltf.rs b/clovers/src/objects/gltf.rs index 69f65c35..59a099e0 100644 --- a/clovers/src/objects/gltf.rs +++ b/clovers/src/objects/gltf.rs @@ -67,9 +67,12 @@ pub struct GLTF<'scene> { impl<'scene> GLTF<'scene> { #[must_use] /// Create a new STL object with the given initialization parameters. + /// + /// # Panics + /// This method may panic if no finite bounding box can be created for the given `hitables`. pub fn new(gltf_init: GLTFInit) -> Self { let hitables: Vec = gltf_init.into(); - let aabb = vec_bounding_box(&hitables).unwrap(); + let aabb = vec_bounding_box(&hitables).expect("No bounding box for hitables"); GLTF { hitables, aabb } } @@ -200,6 +203,7 @@ pub struct GLTFTriangle<'scene> { impl<'scene> GLTFTriangle<'scene> { #[must_use] + #[allow(clippy::many_single_char_names)] /// Initialize a new GLTF object pub fn new(triangle: [Vec3; 3], material: &'scene GLTFMaterial<'scene>) -> Self { // TODO: mostly adapted from Triangle, verify correctness! diff --git a/clovers/src/objects/ply.rs b/clovers/src/objects/ply.rs index 124a7c14..fd28a0b3 100644 --- a/clovers/src/objects/ply.rs +++ b/clovers/src/objects/ply.rs @@ -49,6 +49,9 @@ pub struct PLYInit { #[must_use] /// Initializes a PLY +/// +/// # Panics +/// This method may panic if a shared material is referenced in the object description, but cannot be found in the materials list. pub fn initialize_ply<'scene>( ply_init: PLYInit, materials: &'scene [SharedMaterial], @@ -105,7 +108,7 @@ pub fn initialize_ply<'scene>( hitables.push(Hitable::Triangle(triangle)); } // TODO: remove unwrap - let aabb = vec_bounding_box(&hitables).unwrap(); + let aabb = vec_bounding_box(&hitables).expect("No bounding box for hitables"); PLY { hitables, diff --git a/clovers/src/objects/quad.rs b/clovers/src/objects/quad.rs index 8e86183d..8c22d8b7 100644 --- a/clovers/src/objects/quad.rs +++ b/clovers/src/objects/quad.rs @@ -57,6 +57,7 @@ pub struct Quad<'scene> { impl<'scene> Quad<'scene> { /// Creates a new quad #[must_use] + #[allow(clippy::many_single_char_names)] pub fn new(q: Position, u: Vec3, v: Vec3, material: &'scene Material) -> Quad<'scene> { let n: Vec3 = u.cross(&v); let normal = Unit::new_normalize(n); diff --git a/clovers/src/objects/stl.rs b/clovers/src/objects/stl.rs index 6e02c8fe..dbaba6e3 100644 --- a/clovers/src/objects/stl.rs +++ b/clovers/src/objects/stl.rs @@ -46,6 +46,9 @@ pub struct STLInit { #[must_use] /// Initializes an STL +/// +/// # Panics +/// This method may panic if the referenced .stl file cannot be opened or if it cannot be parsed. pub fn initialize_stl<'scene>( stl_init: STLInit, materials: &'scene [SharedMaterial], diff --git a/clovers/src/objects/translate.rs b/clovers/src/objects/translate.rs index 97da4fff..7092cd2c 100644 --- a/clovers/src/objects/translate.rs +++ b/clovers/src/objects/translate.rs @@ -34,6 +34,9 @@ pub struct Translate<'scene> { impl<'scene> Translate<'scene> { /// Creates a new `Translate` object. It wraps the given [Object] and has adjusted `hit()` and `bounding_box()` methods based on the `offset` given. + /// + /// # Panics + /// This method may panic if the given object does not have a valid `AABB`. #[must_use] pub fn new(object: Box>, offset: Vec3) -> Self { // TODO: time diff --git a/clovers/src/objects/triangle.rs b/clovers/src/objects/triangle.rs index 34ec14be..bbb5028a 100644 --- a/clovers/src/objects/triangle.rs +++ b/clovers/src/objects/triangle.rs @@ -57,6 +57,7 @@ pub struct Triangle<'scene> { impl<'scene> Triangle<'scene> { /// Creates a new triangle from a coordinate point and two side vectors relative to the point #[must_use] + #[allow(clippy::many_single_char_names)] pub fn new(q: Position, u: Vec3, v: Vec3, material: &'scene Material) -> Triangle<'scene> { let n: Vec3 = u.cross(&v); let normal: Direction = Unit::new_normalize(n); @@ -101,6 +102,7 @@ impl<'scene> Triangle<'scene> { /// Creates a new triangle from three Cartesian space coordinates #[must_use] + #[allow(clippy::many_single_char_names)] pub fn from_coordinates( a: Vec3, b: Vec3, diff --git a/clovers/src/wavelength.rs b/clovers/src/wavelength.rs index 75cb1f56..dcbdcee1 100644 --- a/clovers/src/wavelength.rs +++ b/clovers/src/wavelength.rs @@ -31,6 +31,9 @@ pub fn random_wavelength(rng: &mut SmallRng) -> Wavelength { // TODO: clippy fixes possible? /// Given a sample seed from a sampler, return the approximate wavelenght. +/// +/// # Panics +/// This method may panic if the runtime asserts are triggered. This would indicate a bug in the implementation. #[must_use] #[allow(clippy::cast_possible_truncation)] #[allow(clippy::cast_sign_loss)] diff --git a/deny.toml b/deny.toml index 1b83df70..62aaff03 100644 --- a/deny.toml +++ b/deny.toml @@ -9,6 +9,11 @@ # The values provided in this template are the default values that will be used # when any section or field is not specified in your own configuration +# Root options + +# The graph table configures how the dependency graph is constructed and thus +# which crates the checks are performed against +[graph] # If 1 or more target triples (and optionally, target_features) are specified, # only the specified targets will be checked when running `cargo deny check`. # This means, if a particular package is only ever used as a target specific @@ -20,46 +25,56 @@ targets = [ # The triple can be any string, but only the target triples built in to # rustc (as of 1.40) can be checked against actual config expressions - #{ triple = "x86_64-unknown-linux-musl" }, + #"x86_64-unknown-linux-musl", # You can also specify which target_features you promise are enabled for a # particular target. target_features are currently not validated against # the actual valid features supported by the target architecture. #{ triple = "wasm32-unknown-unknown", features = ["atomics"] }, ] +# When creating the dependency graph used as the source of truth when checks are +# executed, this field can be used to prune crates from the graph, removing them +# from the view of cargo-deny. This is an extremely heavy hammer, as if a crate +# is pruned from the graph, all of its dependencies will also be pruned unless +# they are connected to another crate in the graph that hasn't been pruned, +# so it should be used with care. The identifiers are [Package ID Specifications] +# (https://doc.rust-lang.org/cargo/reference/pkgid-spec.html) +#exclude = [] +# If true, metadata will be collected with `--all-features`. Note that this can't +# be toggled off if true, if you want to conditionally enable `--all-features` it +# is recommended to pass `--all-features` on the cmd line instead +all-features = false +# If true, metadata will be collected with `--no-default-features`. The same +# caveat with `all-features` applies +no-default-features = false +# If set, these feature will be enabled when collecting metadata. If `--features` +# is specified on the cmd line they will take precedence over this option. +#features = [] + +# The output table provides options for how/if diagnostics are outputted +[output] +# When outputting inclusion graphs in diagnostics that include features, this +# option can be used to specify the depth at which feature edges will be added. +# This option is included since the graphs can be quite large and the addition +# of features from the crate(s) to all of the graph roots can be far too verbose. +# This option can be overridden via `--feature-depth` on the cmd line +feature-depth = 1 # This section is considered when running `cargo deny check advisories` # More documentation for the advisories section can be found here: # https://embarkstudios.github.io/cargo-deny/checks/advisories/cfg.html [advisories] -# The path where the advisory database is cloned/fetched into -db-path = "~/.cargo/advisory-db" +# The path where the advisory databases are cloned/fetched into +#db-path = "$CARGO_HOME/advisory-dbs" # The url(s) of the advisory databases to use -db-urls = ["https://github.com/rustsec/advisory-db"] -# The lint level for security vulnerabilities -vulnerability = "deny" -# The lint level for unmaintained crates -unmaintained = "warn" -# The lint level for crates that have been yanked from their source registry -yanked = "warn" -# The lint level for crates with security notices. Note that as of -# 2019-12-17 there are no security notice advisories in -# https://github.com/rustsec/advisory-db -notice = "warn" +#db-urls = ["https://github.com/rustsec/advisory-db"] # A list of advisory IDs to ignore. Note that ignored advisories will still # output a note when they are encountered. ignore = [ #"RUSTSEC-0000-0000", + #{ id = "RUSTSEC-0000-0000", reason = "you can specify a reason the advisory is ignored" }, + #"a-crate-that-is-yanked@0.1.1", # you can also ignore yanked crate versions if you wish + #{ crate = "a-crate-that-is-yanked@0.1.1", reason = "you can specify why you are ignoring the yanked crate" }, ] -# Threshold for security vulnerabilities, any vulnerability with a CVSS score -# lower than the range specified will be ignored. Note that ignored advisories -# will still output a note when they are encountered. -# * None - CVSS Score 0.0 -# * Low - CVSS Score 0.1 - 3.9 -# * Medium - CVSS Score 4.0 - 6.9 -# * High - CVSS Score 7.0 - 8.9 -# * Critical - CVSS Score 9.0 - 10.0 -#severity-threshold = - # If this is true, then cargo deny will use the git executable to fetch advisory database. # If this is false, then it uses a built-in git library. # Setting this to true can be helpful if you have special authentication requirements that cargo-deny does not support. @@ -70,41 +85,14 @@ ignore = [ # More documentation for the licenses section can be found here: # https://embarkstudios.github.io/cargo-deny/checks/licenses/cfg.html [licenses] -# The lint level for crates which do not have a detectable license -unlicensed = "deny" # List of explicitly allowed licenses # See https://spdx.org/licenses/ for list of possible licenses # [possible values: any SPDX 3.11 short identifier (+ optional exception)]. allow = [ - "Apache-2.0", - "BSD-2-Clause", - "BSD-3-Clause", - "CC0-1.0", - "ISC", "MIT", - "Unicode-DFS-2016", - "Zlib", -] -# List of explicitly disallowed licenses -# See https://spdx.org/licenses/ for list of possible licenses -# [possible values: any SPDX 3.11 short identifier (+ optional exception)]. -deny = [ - #"Nokia", + "Apache-2.0", + #"Apache-2.0 WITH LLVM-exception", ] -# Lint level for licenses considered copyleft -copyleft = "deny" -# Blanket approval or denial for OSI-approved or FSF Free/Libre licenses -# * both - The license will be approved if it is both OSI-approved *AND* FSF -# * either - The license will be approved if it is either OSI-approved *OR* FSF -# * osi-only - The license will be approved if is OSI-approved *AND NOT* FSF -# * fsf-only - The license will be approved if is FSF *AND NOT* OSI-approved -# * neither - This predicate is ignored and the default lint level is used -allow-osi-fsf-free = "neither" -# Lint level used when no other predicates are matched -# 1. License isn't in the allow or deny lists -# 2. License isn't copyleft -# 3. License isn't OSI/FSF, or allow-osi-fsf-free = "neither" -default = "deny" # The confidence threshold for detecting a license from license text. # The higher the value, the more closely the license text must be to the # canonical license text of a valid SPDX license file. @@ -115,17 +103,16 @@ confidence-threshold = 0.8 exceptions = [ # Each entry is the crate and version constraint, and its specific allow # list - #{ allow = ["Zlib"], name = "adler32", version = "*" }, + #{ allow = ["Zlib"], crate = "adler32" }, + { allow = ["Unicode-3.0"], crate = "unicode-ident" }, ] # Some crates don't have (easily) machine readable licensing information, # adding a clarification entry for it allows you to manually specify the # licensing information #[[licenses.clarify]] -# The name of the crate the clarification applies to -#name = "ring" -# The optional version constraint for the crate -#version = "*" +# The package spec the clarification applies to +#crate = "ring" # The SPDX expression for the license requirements of the crate #expression = "MIT AND ISC AND OpenSSL" # One or more files in the crate's source used as the "source of truth" for @@ -134,8 +121,8 @@ exceptions = [ # and the crate will be checked normally, which may produce warnings or errors # depending on the rest of your configuration #license-files = [ - # Each entry is a crate relative path, and the (opaque) hash of its contents - #{ path = "LICENSE", hash = 0xbd0eed23 } +# Each entry is a crate relative path, and the (opaque) hash of its contents +#{ path = "LICENSE", hash = 0xbd0eed23 } #] [licenses.private] @@ -165,30 +152,63 @@ wildcards = "allow" # * simplest-path - The path to the version with the fewest edges is highlighted # * all - Both lowest-version and simplest-path are used highlight = "all" +# The default lint level for `default` features for crates that are members of +# the workspace that is being checked. This can be overridden by allowing/denying +# `default` on a crate-by-crate basis if desired. +workspace-default-features = "allow" +# The default lint level for `default` features for external crates that are not +# members of the workspace. This can be overridden by allowing/denying `default` +# on a crate-by-crate basis if desired. +external-default-features = "allow" # List of crates that are allowed. Use with care! allow = [ - #{ name = "ansi_term", version = "=0.11.0" }, + #"ansi_term@0.11.0", + #{ crate = "ansi_term@0.11.0", reason = "you can specify a reason it is allowed" }, ] # List of crates to deny deny = [ - # Each entry the name of a crate and a version range. If version is - # not specified, all versions will be matched. - #{ name = "ansi_term", version = "=0.11.0" }, - # + #"ansi_term@0.11.0", + #{ crate = "ansi_term@0.11.0", reason = "you can specify a reason it is banned" }, # Wrapper crates can optionally be specified to allow the crate when it # is a direct dependency of the otherwise banned crate - #{ name = "ansi_term", version = "=0.11.0", wrappers = [] }, + #{ crate = "ansi_term@0.11.0", wrappers = ["this-crate-directly-depends-on-ansi_term"] }, ] + +# List of features to allow/deny +# Each entry the name of a crate and a version range. If version is +# not specified, all versions will be matched. +#[[bans.features]] +#crate = "reqwest" +# Features to not allow +#deny = ["json"] +# Features to allow +#allow = [ +# "rustls", +# "__rustls", +# "__tls", +# "hyper-rustls", +# "rustls", +# "rustls-pemfile", +# "rustls-tls-webpki-roots", +# "tokio-rustls", +# "webpki-roots", +#] +# If true, the allowed features must exactly match the enabled feature set. If +# this is set there is no point setting `deny` +#exact = true + # Certain crates/versions that will be skipped when doing duplicate detection. skip = [ - #{ name = "ansi_term", version = "=0.11.0" }, + #"ansi_term@0.11.0", + #{ crate = "ansi_term@0.11.0", reason = "you can specify a reason why it can't be updated/removed" }, ] # Similarly to `skip` allows you to skip certain crates during duplicate # detection. Unlike skip, it also includes the entire tree of transitive # dependencies starting at the specified crate, up to a certain depth, which is -# by default infinite +# by default infinite. skip-tree = [ - #{ name = "ansi_term", version = "=0.11.0", depth = 20 }, + #"ansi_term@0.11.0", # will be skipped along with _all_ of its direct and transitive dependencies + #{ crate = "ansi_term@0.11.0", depth = 20 }, ] # This section is considered when running `cargo deny check sources`. @@ -208,9 +228,9 @@ allow-registry = ["https://github.com/rust-lang/crates.io-index"] allow-git = [] [sources.allow-org] -# 1 or more github.com organizations to allow git sources for -github = ["EmbarkStudios", "hmeyer"] -# 1 or more gitlab.com organizations to allow git sources for -# gitlab = [""] -# 1 or more bitbucket.org organizations to allow git sources for -# bitbucket = [""] +# github.com organizations to allow git sources for +github = [] +# gitlab.com organizations to allow git sources for +gitlab = [] +# bitbucket.org organizations to allow git sources for +bitbucket = []