Skip to content

Commit

Permalink
chore (breaking): rename target_weights to case_weights
Browse files Browse the repository at this point in the history
  • Loading branch information
kapsner committed Apr 15, 2024
1 parent 1674811 commit 58d1f66
Show file tree
Hide file tree
Showing 15 changed files with 52 additions and 50 deletions.
4 changes: 2 additions & 2 deletions DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
Package: mllrnrs
Title: R6-Based ML Learners for 'mlexperiments'
Version: 0.0.3.9006
Version: 0.0.3.9007
Authors@R:
person("Lorenz A.", "Kapsner", , "[email protected]", role = c("cre", "aut", "cph"),
comment = c(ORCID = "0000-0003-1866-860X"))
Expand Down Expand Up @@ -40,7 +40,7 @@ VignetteBuilder:
knitr
Config/testthat/edition: 3
Config/testthat/parallel: false
Date/Publication: 2024-04-12 10:43:41.959072 UTC
Date/Publication: 2024-04-15 16:09:40.514639 UTC
Encoding: UTF-8
Roxygen: list(markdown = TRUE)
RoxygenNote: 7.3.1
4 changes: 3 additions & 1 deletion NEWS.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,15 @@

#### Other changes

- add weight support also for glmnet
([1674811](https://github.com/kapsner/mllrnrs/tree/1674811286f31ef4bfae5f351686396e5f1845ed))
- updated dev-version
([4b1625c](https://github.com/kapsner/mllrnrs/tree/4b1625cc389701ce114b44ee8565f0b147362483))
- updated news.md
([323f9cb](https://github.com/kapsner/mllrnrs/tree/323f9cb0dc0e38213154669dccff7a700f5071c2))

Full set of changes:
[`v0.0.3...935e0dc`](https://github.com/kapsner/mllrnrs/compare/v0.0.3...935e0dc)
[`v0.0.3...1674811`](https://github.com/kapsner/mllrnrs/compare/v0.0.3...1674811)

## v0.0.3 (2024-03-07)

Expand Down
20 changes: 10 additions & 10 deletions R/learner_glmnet.R
Original file line number Diff line number Diff line change
Expand Up @@ -246,14 +246,14 @@ glmnet_optimization <- function(
)
)

# rename mlexperiments "target_weights" to implementation specific (cv.glment)
# rename mlexperiments "case_weights" to implementation specific (cv.glment)
# "weights"
if ("target_weights" %in% names(cv_args)) {
if ("case_weights" %in% names(cv_args)) {
stopifnot(
"late fail: `target_weights` must be of same length as `y`" =
length(cv_args$target_weights) == length(y)
"late fail: `case_weights` must be of same length as `y`" =
length(cv_args$case_weights) == length(y)
)
names(cv_args)[which(names(cv_args) == "target_weights")] <-
names(cv_args)[which(names(cv_args) == "case_weights")] <-
"weights"
}

Expand Down Expand Up @@ -292,14 +292,14 @@ glmnet_fit <- function(x, y, ncores, seed, ...) {
kwargs
)

# rename mlexperiments "target_weights" to implementation specific (cv.glment)
# rename mlexperiments "case_weights" to implementation specific (cv.glment)
# "weights"
if ("target_weights" %in% names(fit_args)) {
if ("case_weights" %in% names(fit_args)) {
stopifnot(
"late fail: `target_weights` must be of same length as `y`" =
length(fit_args$target_weights) == length(y)
"late fail: `case_weights` must be of same length as `y`" =
length(fit_args$case_weights) == length(y)
)
names(fit_args)[which(names(fit_args) == "target_weights")] <-
names(fit_args)[which(names(fit_args) == "case_weights")] <-
"weights"
}

Expand Down
16 changes: 8 additions & 8 deletions R/learner_lightgbm.R
Original file line number Diff line number Diff line change
Expand Up @@ -253,17 +253,17 @@ lgb_dataset_wrapper <- function(x, y, params) {
y = y,
objective = params$objective
)
if ("target_weights" %in% names(params)) {
if ("case_weights" %in% names(params)) {
stopifnot(
"late fail: `target_weights` must be of same length as `y`" =
length(params$target_weights) == length(y)
"late fail: `case_weights` must be of same length as `y`" =
length(params$case_weights) == length(y)
)
dataset_args <- c(
dataset_args,
list(target_weights = params$target_weights)
list(case_weights = params$case_weights)
)
# remove target_weights-param from learner-args
params$target_weights <- NULL
# remove case_weights-param from learner-args
params$case_weights <- NULL
}
if ("cat_vars" %in% names(params)) {
cat_vars <- params$cat_vars
Expand Down Expand Up @@ -294,8 +294,8 @@ setup_lgb_dataset <- function(x, y, objective, ...) {
data = x,
label = y
)
if ("target_weights" %in% names(kwargs)) {
dataset_args <- c(dataset_args, list(weight = kwargs$target_weights))
if ("case_weights" %in% names(kwargs)) {
dataset_args <- c(dataset_args, list(weight = kwargs$case_weights))
}

if (!is.null(kwargs$cat_vars)) {
Expand Down
16 changes: 8 additions & 8 deletions R/learner_ranger.R
Original file line number Diff line number Diff line change
Expand Up @@ -179,9 +179,9 @@ ranger_cv <- function(
params
)

if ("target_weights" %in% names(args)) {
args$target_weights <- kdry::mlh_subset(
args$target_weights, ranger_train_idx
if ("case_weights" %in% names(args)) {
args$case_weights <- kdry::mlh_subset(
args$case_weights, ranger_train_idx
)
}

Expand Down Expand Up @@ -304,14 +304,14 @@ ranger_fit <- function(x, y, ncores, seed, ...) {

x <- kdry::dtr_matrix2df(matrix = x, cat_vars = cat_vars)

# rename mlexperiments "target_weights" to implementation specific (ranger)
# rename mlexperiments "case_weights" to implementation specific (ranger)
# "case.weights"
if ("target_weights" %in% names(ranger_params)) {
if ("case_weights" %in% names(ranger_params)) {
stopifnot(
"late fail: `target_weights` must be of same length as `y`" =
length(ranger_params$target_weights) == length(y)
"late fail: `case_weights` must be of same length as `y`" =
length(ranger_params$case_weights) == length(y)
)
names(ranger_params)[which(names(ranger_params) == "target_weights")] <-
names(ranger_params)[which(names(ranger_params) == "case_weights")] <-
"case.weights"
}

Expand Down
16 changes: 8 additions & 8 deletions R/learner_xgboost.R
Original file line number Diff line number Diff line change
Expand Up @@ -228,17 +228,17 @@ xgboost_dataset_wrapper <- function(x, y, params) {
y = y,
objective = params$objective
)
if ("target_weights" %in% names(params)) {
if ("case_weights" %in% names(params)) {
stopifnot(
"late fail: `target_weights` must be of same length as `y`" =
length(params$target_weights) == length(y)
"late fail: `case_weights` must be of same length as `y`" =
length(params$case_weights) == length(y)
)
dataset_args <- c(
dataset_args,
list(target_weights = params$target_weights)
list(case_weights = params$case_weights)
)
# remove target_weights-param from learner-args
params$target_weights <- NULL
# remove case_weights-param from learner-args
params$case_weights <- NULL
}
dtrain <- do.call(setup_xgb_dataset, dataset_args)

Expand Down Expand Up @@ -292,8 +292,8 @@ setup_xgb_dataset <- function(x, y, objective, ...) {
dtrain <- xgboost::xgb.DMatrix(x)
label <- y
xgboost::setinfo(dtrain, "label", label)
if ("target_weights" %in% names(kwargs)) {
xgboost::setinfo(dtrain, "weight", kwargs$target_weights)
if ("case_weights" %in% names(kwargs)) {
xgboost::setinfo(dtrain, "weight", kwargs$case_weights)
}
return(dtrain)
}
Expand Down
2 changes: 1 addition & 1 deletion data-raw/devstuffs.R
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ my_desc$set_authors(c(
# Remove some author fields
my_desc$del("Maintainer")
# Set the version
my_desc$set_version("0.0.3.9006")
my_desc$set_version("0.0.3.9007")
# The title of your package
my_desc$set(Title = "R6-Based ML Learners for 'mlexperiments'")
# The description of your package
Expand Down
2 changes: 1 addition & 1 deletion tests/testthat/test-glmnet_multiclass.R
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ test_that(
family = "multinomial",
type.measure = "class",
standardize = TRUE,
target_weights = y_weights
case_weights = y_weights
)
glmnet_optimizer$predict_args <- list(type = "response", reshape = TRUE)
glmnet_optimizer$performance_metric <- mlexperiments::metric("bacc")
Expand Down
2 changes: 1 addition & 1 deletion tests/testthat/test-lightgbm_multiclass.R
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ test_that(
objective = "multiclass",
metric = "multi_logloss",
num_class = 3,
target_weights = y_weights
case_weights = y_weights
)
lightgbm_optimizer$predict_args <- list(reshape = TRUE)
lightgbm_optimizer$performance_metric <- mlexperiments::metric("bacc")
Expand Down
2 changes: 1 addition & 1 deletion tests/testthat/test-ranger_multiclass.R
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ test_that(
y_weights <- ifelse(train_y == 1, 0.8, ifelse(train_y == 2, 1.2, 1))
ranger_optimizer$learner_args <- list(
classification = TRUE,
target_weights = y_weights
case_weights = y_weights
)

ranger_optimizer$performance_metric <- mlexperiments::metric("bacc")
Expand Down
2 changes: 1 addition & 1 deletion tests/testthat/test-xgboost_multiclass.R
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ test_that(
objective = "multi:softprob",
eval_metric = "mlogloss",
num_class = 3,
target_weights = y_weights
case_weights = y_weights
)
xgboost_optimizer$predict_args <- list(reshape = TRUE)
xgboost_optimizer$performance_metric <- mlexperiments::metric("bacc")
Expand Down
4 changes: 2 additions & 2 deletions vignettes/mllrnrs_glmnet_multiclass.Rmd
Original file line number Diff line number Diff line change
Expand Up @@ -453,7 +453,7 @@ tuner_w_weights <- mlexperiments::MLTuneParameters$new(
tuner_w_weights$parameter_grid <- parameter_grid
tuner_w_weights$learner_args <- c(
learner_args,
list(target_weights = y_weights)
list(case_weights = y_weights)
)
tuner_w_weights$split_type <- "stratified"
Expand Down Expand Up @@ -499,7 +499,7 @@ validator <- mlexperiments::MLCrossValidation$new(
# append the optimized setting from above with the newly created weights
validator$learner_args <- c(
tuner$results$best.setting[-1],
list("target_weights" = y_weights)
list("case_weights" = y_weights)
)
validator$predict_args <- predict_args
Expand Down
4 changes: 2 additions & 2 deletions vignettes/mllrnrs_lightgbm_multiclass.Rmd
Original file line number Diff line number Diff line change
Expand Up @@ -876,7 +876,7 @@ tuner_w_weights <- mlexperiments::MLTuneParameters$new(
tuner_w_weights$parameter_grid <- parameter_grid
tuner_w_weights$learner_args <- c(
learner_args,
list(target_weights = y_weights)
list(case_weights = y_weights)
)
tuner_w_weights$split_type <- "stratified"
Expand Down Expand Up @@ -931,7 +931,7 @@ validator <- mlexperiments::MLCrossValidation$new(
# append the optimized setting from above with the newly created weights
validator$learner_args <- c(
tuner$results$best.setting[-1],
list("target_weights" = y_weights)
list("case_weights" = y_weights)
)
validator$predict_args <- predict_args
Expand Down
4 changes: 2 additions & 2 deletions vignettes/mllrnrs_ranger_multiclass.Rmd
Original file line number Diff line number Diff line change
Expand Up @@ -522,7 +522,7 @@ tuner_w_weights <- mlexperiments::MLTuneParameters$new(
tuner_w_weights$parameter_grid <- parameter_grid
tuner_w_weights$learner_args <- c(
learner_args,
list(target_weights = y_weights)
list(case_weights = y_weights)
)
tuner_w_weights$split_type <- "stratified"
Expand Down Expand Up @@ -567,7 +567,7 @@ validator <- mlexperiments::MLCrossValidation$new(
# append the optimized setting from above with the newly created weights
validator$learner_args <- c(
tuner$results$best.setting[-1],
list("target_weights" = y_weights)
list("case_weights" = y_weights)
)
validator$predict_args <- predict_args
Expand Down
4 changes: 2 additions & 2 deletions vignettes/mllrnrs_xgboost_multiclass.Rmd
Original file line number Diff line number Diff line change
Expand Up @@ -481,7 +481,7 @@ tuner_w_weights <- mlexperiments::MLTuneParameters$new(
tuner_w_weights$parameter_grid <- parameter_grid
tuner_w_weights$learner_args <- c(
learner_args,
list(target_weights = y_weights)
list(case_weights = y_weights)
)
tuner_w_weights$split_type <- "stratified"
Expand Down Expand Up @@ -535,7 +535,7 @@ validator <- mlexperiments::MLCrossValidation$new(
# append the optimized setting from above with the newly created weights
validator$learner_args <- c(
tuner$results$best.setting[-1],
list("target_weights" = y_weights)
list("case_weights" = y_weights)
)
validator$predict_args <- predict_args
Expand Down

0 comments on commit 58d1f66

Please sign in to comment.