Skip to content

Commit

Permalink
Fix beta lints (#12980)
Browse files Browse the repository at this point in the history
# Objective

- Fixes #12976

## Solution

This one is a doozy.

- Run `cargo +beta clippy --workspace --all-targets --all-features` and
fix all issues
- This includes:
- Moving inner attributes to be outer attributes, when the item in
question has both inner and outer attributes
  - Use `ptr::from_ref` in more scenarios
- Extend the valid idents list used by `clippy:doc_markdown` with more
names
  - Use `Clone::clone_from` when possible
  - Remove redundant `ron` import
  - Add backticks to **so many** identifiers and items
    - I'm sorry whoever has to review this

---

## Changelog

- Added links to more identifiers in documentation.
  • Loading branch information
BD103 authored Apr 16, 2024
1 parent 9dde99f commit 7b8d502
Show file tree
Hide file tree
Showing 34 changed files with 116 additions and 106 deletions.
13 changes: 12 additions & 1 deletion clippy.toml
Original file line number Diff line number Diff line change
@@ -1 +1,12 @@
doc-valid-idents = ["GilRs", "glTF", "sRGB", "VSync", "WebGL2", "WebGPU", ".."]
doc-valid-idents = [
"GilRs",
"glTF",
"MacOS",
"NVidia",
"OpenXR",
"sRGB",
"VSync",
"WebGL2",
"WebGPU",
"..",
]
2 changes: 1 addition & 1 deletion crates/bevy_asset/src/assets.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ pub(crate) struct AssetIndexAllocator {
/// A monotonically increasing index.
next_index: AtomicU32,
recycled_queue_sender: Sender<AssetIndex>,
/// This receives every recycled AssetIndex. It serves as a buffer/queue to store indices ready for reuse.
/// This receives every recycled [`AssetIndex`]. It serves as a buffer/queue to store indices ready for reuse.
recycled_queue_receiver: Receiver<AssetIndex>,
recycled_sender: Sender<AssetIndex>,
recycled_receiver: Receiver<AssetIndex>,
Expand Down
2 changes: 1 addition & 1 deletion crates/bevy_asset/src/meta.rs
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ pub enum AssetAction<LoaderSettings, ProcessSettings> {
pub struct ProcessedInfo {
/// A hash of the asset bytes and the asset .meta data
pub hash: AssetHash,
/// A hash of the asset bytes, the asset .meta data, and the `full_hash` of every process_dependency
/// A hash of the asset bytes, the asset .meta data, and the `full_hash` of every `process_dependency`
pub full_hash: AssetHash,
/// Information about the "process dependencies" used to process this asset.
pub process_dependencies: Vec<ProcessDependencyInfo>,
Expand Down
4 changes: 2 additions & 2 deletions crates/bevy_asset/src/processor/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1058,7 +1058,7 @@ pub(crate) struct ProcessorAssetInfo {
/// _This lock must be locked whenever a read or write to processed assets occurs_
/// There are scenarios where processed assets (and their metadata) are being read and written in multiple places at once:
/// * when the processor is running in parallel with an app
/// * when processing assets in parallel, the processor might read an asset's process_dependencies when processing new versions of those dependencies
/// * when processing assets in parallel, the processor might read an asset's `process_dependencies` when processing new versions of those dependencies
/// * this second scenario almost certainly isn't possible with the current implementation, but its worth protecting against
/// This lock defends against those scenarios by ensuring readers don't read while processed files are being written. And it ensures
/// Because this lock is shared across meta and asset bytes, readers can ensure they don't read "old" versions of metadata with "new" asset data.
Expand Down Expand Up @@ -1101,7 +1101,7 @@ pub struct ProcessorAssetInfos {
/// The "current" in memory view of the asset space. During processing, if path does not exist in this, it should
/// be considered non-existent.
/// NOTE: YOU MUST USE `Self::get_or_insert` or `Self::insert` TO ADD ITEMS TO THIS COLLECTION TO ENSURE
/// non_existent_dependants DATA IS CONSUMED
/// `non_existent_dependants` DATA IS CONSUMED
infos: HashMap<AssetPath<'static>, ProcessorAssetInfo>,
/// Dependants for assets that don't exist. This exists to track "dangling" asset references due to deleted / missing files.
/// If the dependant asset is added, it can "resolve" these dependencies and re-compute those assets.
Expand Down
2 changes: 1 addition & 1 deletion crates/bevy_asset/src/server/info.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ pub(crate) struct AssetInfo {
/// [`LoadedAsset`]: crate::loader::LoadedAsset
loader_dependencies: HashMap<AssetPath<'static>, AssetHash>,
/// The number of handle drops to skip for this asset.
/// See usage (and comments) in get_or_create_path_handle for context.
/// See usage (and comments) in `get_or_create_path_handle` for context.
handle_drops_to_skip: usize,
}

Expand Down
10 changes: 5 additions & 5 deletions crates/bevy_core/src/task_pool_options.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ pub struct TaskPoolThreadAssignmentPolicy {
pub min_threads: usize,
/// Under no circumstance use more than this many threads for this pool
pub max_threads: usize,
/// Target using this percentage of total cores, clamped by min_threads and max_threads. It is
/// Target using this percentage of total cores, clamped by `min_threads` and `max_threads`. It is
/// permitted to use 1.0 to try to use all remaining threads
pub percent: f32,
}
Expand All @@ -34,11 +34,11 @@ impl TaskPoolThreadAssignmentPolicy {
/// set up [`TaskPoolPlugin`](super::TaskPoolPlugin)
#[derive(Clone, Debug)]
pub struct TaskPoolOptions {
/// If the number of physical cores is less than min_total_threads, force using
/// min_total_threads
/// If the number of physical cores is less than `min_total_threads`, force using
/// `min_total_threads`
pub min_total_threads: usize,
/// If the number of physical cores is greater than max_total_threads, force using
/// max_total_threads
/// If the number of physical cores is greater than `max_total_threads`, force using
/// `max_total_threads`
pub max_total_threads: usize,

/// Used to determine number of IO threads to allocate
Expand Down
2 changes: 1 addition & 1 deletion crates/bevy_core_pipeline/src/tonemapping/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@ pub enum Tonemapping {
AgX,
/// By Tomasz Stachowiak
/// Has little hue shifting in the darks and mids, but lots in the brights. Brights desaturate across the spectrum.
/// Is sort of between Reinhard and ReinhardLuminance. Conceptually similar to reinhard-jodie.
/// Is sort of between Reinhard and `ReinhardLuminance`. Conceptually similar to reinhard-jodie.
/// Designed as a compromise if you want e.g. decent skin tones in low light, but can't afford to re-do your
/// VFX to look good without hue shifting.
SomewhatBoringDisplayTransform,
Expand Down
4 changes: 1 addition & 3 deletions crates/bevy_ecs/src/entity/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1060,10 +1060,8 @@ mod tests {
}

#[test]
#[allow(clippy::nonminimal_bool)] // This is intentionally testing `lt` and `ge` as separate functions.
fn entity_comparison() {
// This is intentionally testing `lt` and `ge` as separate functions.
#![allow(clippy::nonminimal_bool)]

assert_eq!(
Entity::from_raw_and_generation(123, NonZeroU32::new(456).unwrap()),
Entity::from_raw_and_generation(123, NonZeroU32::new(456).unwrap())
Expand Down
2 changes: 1 addition & 1 deletion crates/bevy_ecs/src/event.rs
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ struct EventInstance<E: Event> {
#[derive(Debug, Resource)]
pub struct Events<E: Event> {
/// Holds the oldest still active events.
/// Note that a.start_event_count + a.len() should always === events_b.start_event_count.
/// Note that `a.start_event_count + a.len()` should always be equal to `events_b.start_event_count`.
events_a: EventSequence<E>,
/// Holds the newer events.
events_b: EventSequence<E>,
Expand Down
4 changes: 1 addition & 3 deletions crates/bevy_ecs/src/identifier/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -209,10 +209,8 @@ mod tests {

#[rustfmt::skip]
#[test]
#[allow(clippy::nonminimal_bool)] // This is intentionally testing `lt` and `ge` as separate functions.
fn id_comparison() {
// This is intentionally testing `lt` and `ge` as separate functions.
#![allow(clippy::nonminimal_bool)]

assert!(Identifier::new(123, 456, IdKind::Entity).unwrap() == Identifier::new(123, 456, IdKind::Entity).unwrap());
assert!(Identifier::new(123, 456, IdKind::Placeholder).unwrap() == Identifier::new(123, 456, IdKind::Placeholder).unwrap());
assert!(Identifier::new(123, 789, IdKind::Entity).unwrap() != Identifier::new(123, 456, IdKind::Entity).unwrap());
Expand Down
2 changes: 1 addition & 1 deletion crates/bevy_ecs/src/schedule/schedule.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1448,7 +1448,7 @@ impl ScheduleGraph {

/// Values returned by [`ScheduleGraph::process_configs`]
struct ProcessConfigsResult {
/// All nodes contained inside this process_configs call's [`NodeConfigs`] hierarchy,
/// All nodes contained inside this `process_configs` call's [`NodeConfigs`] hierarchy,
/// if `ancestor_chained` is true
nodes: Vec<NodeId>,
/// True if and only if all nodes are "densely chained", meaning that all nested nodes
Expand Down
10 changes: 5 additions & 5 deletions crates/bevy_ecs/src/schedule/stepping.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ enum SystemBehavior {
// schedule_order index, and schedule start point
#[derive(Debug, Default, Clone, Copy)]
struct Cursor {
/// index within Stepping.schedule_order
/// index within `Stepping::schedule_order`
pub schedule: usize,
/// index within the schedule's system list
pub system: usize,
Expand Down Expand Up @@ -606,11 +606,11 @@ struct ScheduleState {
/// per-system [`SystemBehavior`]
behaviors: HashMap<NodeId, SystemBehavior>,

/// order of NodeIds in the schedule
/// order of [`NodeId`]s in the schedule
///
/// This is a cached copy of SystemExecutable.system_ids. We need it
/// available here to be accessed by Stepping::cursor() so we can return
/// NodeIds to the caller.
/// This is a cached copy of `SystemExecutable::system_ids`. We need it
/// available here to be accessed by [`Stepping::cursor()`] so we can return
/// [`NodeId`]s to the caller.
node_ids: Vec<NodeId>,

/// changes to system behavior that should be applied the next time
Expand Down
5 changes: 4 additions & 1 deletion crates/bevy_ecs/src/storage/blob_vec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -713,7 +713,10 @@ mod tests {
let mut q = world.query::<&Zst>();
for zst in q.iter(&world) {
// Ensure that the references returned are properly aligned.
assert_eq!(zst as *const Zst as usize % mem::align_of::<Zst>(), 0);
assert_eq!(
std::ptr::from_ref::<Zst>(zst) as usize % mem::align_of::<Zst>(),
0
);
count += 1;
}

Expand Down
4 changes: 3 additions & 1 deletion crates/bevy_input/src/keyboard.rs
Original file line number Diff line number Diff line change
Expand Up @@ -183,6 +183,7 @@ pub enum NativeKeyCode {
derive(serde::Serialize, serde::Deserialize),
reflect(Serialize, Deserialize)
)]
#[allow(clippy::doc_markdown)] // Clippy doesn't like our use of <kbd>.
#[repr(u32)]
pub enum KeyCode {
/// This variant is used when the key cannot be translated to any other variant.
Expand All @@ -191,7 +192,7 @@ pub enum KeyCode {
/// key-press and key-release events by hashing the [`KeyCode`]. It is also possible to use
/// this for keybinds for non-standard keys, but such keybinds are tied to a given platform.
Unidentified(NativeKeyCode),
/// <kbd>`</kbd> on a US keyboard. This is also called a backtick or grave.
/// <kbd>\`</kbd> on a US keyboard. This is also called a backtick or grave.
/// This is the <kbd>半角</kbd>/<kbd>全角</kbd>/<kbd>漢字</kbd>
/// (hankaku/zenkaku/kanji) key on Japanese keyboards
Backquote,
Expand Down Expand Up @@ -700,6 +701,7 @@ pub enum NativeKey {
derive(serde::Serialize, serde::Deserialize),
reflect(Serialize, Deserialize)
)]
#[allow(clippy::doc_markdown)] // Clippy doesn't like our use of <kbd>.
pub enum Key {
/// A key string that corresponds to the character typed by the user, taking into account the
/// user’s current locale setting, and any system-level keyboard mapping overrides that are in
Expand Down
Loading

0 comments on commit 7b8d502

Please sign in to comment.