Skip to content

Commit

Permalink
feat!: delete commented out redundant code #42
Browse files Browse the repository at this point in the history
  • Loading branch information
egouldo committed Sep 2, 2024
1 parent e8f9dc1 commit 90d3645
Showing 1 changed file with 0 additions and 117 deletions.
117 changes: 0 additions & 117 deletions index.qmd
Original file line number Diff line number Diff line change
Expand Up @@ -2933,123 +2933,6 @@ MA_yi_summary_stats <-
.keep = "none")},
by = join_by(dataset, estimate_type)) %>%
select(-tidy_mod_summary)
# --- old code ----
#
# back_transformed_predictions <-
# ManyEcoEvo_yi %>%
# prepare_response_variables_yi(estimate_type = "yi",
# param_table = ManyEcoEvo:::analysis_data_param_tables) %>%
# generate_yi_subsets()
#
#
# raw_mod_data_logged <-
# back_transformed_predictions %>%
# filter(dataset == "eucalyptus") %>%
# group_by(estimate_type) %>%
# select(estimate_type, data) %>%
# unnest(data) %>%
# rename(study_id = id_col) %>%
# hoist(params, param_mean = list("value", 1), param_sd = list("value", 2)) %>%
# rowwise() %>%
# mutate(exclusion_threshold = param_mean + 3*param_sd) %>%
# filter(fit < exclusion_threshold) %>%
# mutate(log_vals = map2(fit, se.fit, log_transform, 1000)) %>%
# unnest(log_vals) %>%
# select(study_id,
# TeamIdentifier,
# estimate_type,
# starts_with("response_"),
# -response_id_S2,
# ends_with("_log")) %>%
# group_by(estimate_type) %>%
# nest()
#
#
# mod_data_logged <- raw_mod_data_logged %>%
# mutate(MA_mod =
# map(data,
# ~ ManyEcoEvo::fit_MA_mv(effects_analysis = .x,
# Z_colname = mean_log,
# VZ_colname = se_log,
# estimate_type = "yi")))
#
#
# plot_data_logged <- mod_data_logged %>%
# mutate(tidy_mod = map(.x = MA_mod,
# ~broom::tidy(.x,
# conf.int = TRUE,
# include_studies = TRUE) %>%
# rename(study_id = term))) %>%
# select(tidy_mod) %>%
# unnest(cols = c(tidy_mod))
#
# MA_yi_summary_stats <- # ALL ON logged RESPONSE SCALE for EUC, standardized response values for BT
# plot_data_logged %>%
# mutate(response_scale = map2(estimate, std.error, log_back, 100)) %>%
# select(estimate_type, study_id, type, response_scale) %>%
# unnest(response_scale) %>%
# rename(estimate = mean_origin, conf.low = lower, conf.high = upper) %>%
# nest(tidy_mod = -estimate_type) %>%
# mutate(dataset = "eucalyptus") %>%
# bind_rows(., {
# ManyEcoEvo_yi_results %>%
# ungroup() %>%
# filter(exclusion_set == "complete", dataset == "blue tit") %>%
# select(dataset, estimate_type, MA_mod, effects_analysis, -exclusion_set) %>%
# group_by(estimate_type, dataset) %>%
# transmute(tidy_mod = map(.x = MA_mod,
# ~broom::tidy(.x,
# conf.int = TRUE,
# include_studies = TRUE) %>%
# rename(study_id = term)))
# }) %>%
# mutate(MA_mean = map(tidy_mod, filter, type == "summary")) %>%
# hoist(MA_mean,
# mean = "estimate",
# MA_conf.low = "conf.low",
# MA_conf.high = "conf.high") %>%
# mutate(max_min_est = map(tidy_mod,
# ~ filter(.x, type == "study") %>%
# summarise(max_est = max(estimate),
# min_est = min(estimate)))) %>%
# mutate(max_min_CI = map(tidy_mod,
# ~ filter(.x, type == "study") %>%
# summarise(max_upper_CI = max(conf.high),
# min_lower_CI = min(conf.low)))) %>%
# unnest_wider(col = c(max_min_est, max_min_CI)) %>%
# ungroup %>%
# rows_update({plot_data_logged %>% #hells yes to this gem of a function!
# mutate(dataset = "eucalyptus") %>%
# filter(type != "summary") %>%
# nest(tidy_mod = c(-estimate_type, -dataset))},
# by = c("dataset", "estimate_type")) %>%
# mutate(no_effect =
# map_int(tidy_mod,
# ~ filter(.x,
# estimate >0 & conf.low <= 0 | estimate <0 & conf.high >= 0,
# type == "study") %>%
# nrow() ),
# pos_sign =
# map_int(tidy_mod,
# ~ filter(.x, estimate >0, conf.low > 0,
# type == "study") %>%
# nrow()),
# neg_sign =
# map_int(tidy_mod,
# ~ filter(.x, estimate < 0, conf.high < 0,
# type == "study") %>%
# nrow()),
# total_effects =
# map_int(tidy_mod,
# ~ filter(.x,
# type == "study") %>%
# nrow()
# )) %>%
# select(-tidy_mod, -MA_mean) %>%
# rename(MA_mean = mean)
```
As with the effect size $Z_r$, we observed substantial variability in the size of out-of-sample predictions derived from the analysts' models.
Expand Down

0 comments on commit 90d3645

Please sign in to comment.