Skip to content

Commit

Permalink
clippy fix allow dirty
Browse files Browse the repository at this point in the history
  • Loading branch information
jurplel committed Nov 12, 2024
1 parent 904eeea commit 8e41131
Show file tree
Hide file tree
Showing 4 changed files with 8 additions and 16 deletions.
7 changes: 3 additions & 4 deletions optd-core/src/cascades/optimizer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
// Use of this source code is governed by an MIT-style license that can be found in the LICENSE file or at
// https://opensource.org/licenses/MIT.

use std::collections::{BTreeSet, HashMap, HashSet, VecDeque};
use std::collections::{BTreeSet, HashMap, HashSet};
use std::fmt::Display;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
Expand All @@ -12,7 +12,6 @@ use anyhow::Result;
use tracing::trace;

use super::memo::{ArcMemoPlanNode, GroupInfo, Memo};
use super::tasks::OptimizeGroupTask;
use super::{NaiveMemo, Task};
use crate::cascades::memo::Winner;
use crate::cascades::tasks::get_initial_task;
Expand Down Expand Up @@ -128,12 +127,12 @@ impl<T: NodeType> CascadesOptimizer<T, NaiveMemo<T>> {
let tasks = Vec::new();
// Assign rule IDs
let transformation_rules: Arc<[(RuleId, Arc<dyn Rule<T, Self>>)]> = transformation_rules
.into_iter()
.iter()
.enumerate()
.map(|(i, r)| (i, r.clone()))
.collect();
let implementation_rules: Arc<[(RuleId, Arc<dyn Rule<T, Self>>)]> = implementation_rules
.into_iter()
.iter()
.enumerate()
.map(|(i, r)| (i + transformation_rules.len(), r.clone()))
.collect();
Expand Down
6 changes: 0 additions & 6 deletions optd-core/src/cascades/tasks.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
// Use of this source code is governed by an MIT-style license that can be found in the LICENSE file or at
// https://opensource.org/licenses/MIT.

use anyhow::Result;

use super::{CascadesOptimizer, GroupId, Memo};
use crate::nodes::NodeType;
Expand All @@ -15,12 +14,7 @@ mod optimize_expr;
mod optimize_group;
mod optimize_inputs;

pub use apply_rule::ApplyRuleTask;
pub use explore_expr::ExploreExprTask;
pub use explore_group::ExploreGroupTask;
pub use optimize_expr::OptimizeExprTask;
pub use optimize_group::OptimizeGroupTask;
pub use optimize_inputs::OptimizeInputsTask;

pub trait Task<T: NodeType, M: Memo<T>>: 'static + Send + Sync {
fn execute(&self, optimizer: &mut CascadesOptimizer<T, M>);
Expand Down
7 changes: 3 additions & 4 deletions optd-core/src/cascades/tasks/apply_rule.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use std::{collections::HashMap, hash::Hash, sync::Arc};
use std::sync::Arc;

use itertools::Itertools;
use tracing::trace;
Expand All @@ -10,7 +10,7 @@ use crate::{
tasks::{explore_expr::ExploreExprTask, optimize_inputs::OptimizeInputsTask},
CascadesOptimizer, GroupId, Memo,
},
nodes::{ArcPlanNode, ArcPredNode, NodeType, PlanNode, PlanNodeOrGroup},
nodes::{ArcPlanNode, NodeType, PlanNode, PlanNodeOrGroup},
rules::{Rule, RuleMatcher},
};

Expand Down Expand Up @@ -178,8 +178,7 @@ fn transform<T: NodeType, M: Memo<T>>(
} else {
picked_datas
.into_iter()
.map(|picked_data| rule.apply(optimizer, picked_data))
.flatten()
.flat_map(|picked_data| rule.apply(optimizer, picked_data))
.collect()
}
}
Expand Down
4 changes: 2 additions & 2 deletions optd-core/src/cascades/tasks/optimize_inputs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ fn compute_cost<T: NodeType, M: Memo<T>>(
let cost = optimizer.cost();
let children_group_ids = expr.children.clone();
let context = RelNodeContext {
expr_id: expr_id,
expr_id,
group_id,
children_group_ids: children_group_ids.clone(),
};
Expand Down Expand Up @@ -204,7 +204,7 @@ impl<T: NodeType, M: Memo<T>> Task<T, M> for OptimizeInputsTask {
// TODO: add typ to more traces and iteration to traces below
trace!(task_id = self.task_id, parent_task_id = self.parent_task_id, event = "task_begin", task = "optimize_inputs", iteration = %self.iteration, group_id = %group_id, expr_id = %self.expr_id, expr = %expr);
let next_child_expr = expr.children.get(self.iteration);
if let None = next_child_expr {
if next_child_expr.is_none() {
// TODO: If we want to support interrupting the optimizer, it might
// behoove us to update the winner more often than this.
update_winner(self.expr_id, optimizer);
Expand Down

0 comments on commit 8e41131

Please sign in to comment.