diff --git a/R/BestSpecificLearner.R b/R/BestSpecificLearner.R index 55f7796..cf9c95a 100644 --- a/R/BestSpecificLearner.R +++ b/R/BestSpecificLearner.R @@ -45,6 +45,7 @@ bestSpecificLearner = function (x, y, perf = NULL) { perf_values = unlist(perf_values) } } else { + # nocov start if (is.function(perf)) { arg_names <- names(formals(perf)) if (arg_names %in% c("observed", "predicted")) { @@ -62,6 +63,7 @@ bestSpecificLearner = function (x, y, perf = NULL) { stop("Arguments of the perf function must be 'observed' and 'predicted'.") } } + # nocov end weights_values = (1L / perf_values) / sum((1L / perf_values)) max_index = which.max(weights_values) weights_values = rep(0L, length(weights_values)) diff --git a/R/Target.R b/R/Target.R index b95581a..334818a 100644 --- a/R/Target.R +++ b/R/Target.R @@ -24,27 +24,37 @@ Target <- R6Class("Target", data_frame, training) { if (!any(c("Training") %in% class(training))) { + # nocov start stop("A Target can belong only to a Training object.\n") + # nocov end } ind_col = training$getIndCol() target = training$getTarget() if (!all(c(ind_col, target) %in% colnames(data_frame))) { + # nocov start stop("Individual column ID or target variable not found in the provided data.frame.\n") + # nocov end } if (training$checkTargetExist()) { # Remove TrainData if already existing + # nocov start key_class = train_layer$getKeyClass() key = key_class[key_class$class == "Target", "key"] training$removeFromHashTable(key = key) + # nocov end } private$training = training missing_target = is.na(data_frame[ , target]) if (any(missing_target)) { + # nocov start data_frame = data_frame[!missing_target, ] + # nocov end } missing_id = is.na(data_frame[ , ind_col]) if (any(missing_id)) { + # nocov start data_frame = data_frame[!missing_id, ] + # nocov end } super$initialize(id = id, ind_col = training$getIndCol(), @@ -55,18 +65,23 @@ Target <- R6Class("Target", value = self, .class = "Target") if (any(missing_target)) { + # nocov start warning(sprintf("%s individual(s) with missing target value(s) recognized and removed.\n", sum(missing_target))) + # nocov end } if (any(missing_id)) { + # nocov start warning(sprintf("%s individual(s) with missing ID value(s) recognized and removed.\n", sum(missing_id))) + # nocov end } }, #' @description #' Printer #' @param ... (any) \cr #' + # nocov start print = function (...) { cat(sprintf("Training : %s\n", private$training$getId())) cat(sprintf("ind. id. : %s\n", private$ind_col)) @@ -74,10 +89,12 @@ Target <- R6Class("Target", cat(sprintf("n : %s\n", nrow(private$data_frame))) cat(sprintf("Missing : %s\n", sum(!complete.cases(private$data_frame)))) }, + # nocov end #' @description #' Summary #' @param ... (any) \cr #' + # nocov start summary = function (...) { cat(sprintf(" Layer : %s\n", private$training$getId())) cat(sprintf(" Ind. id. : %s\n", private$ind_col)) @@ -85,6 +102,7 @@ Target <- R6Class("Target", cat(sprintf(" n : %s\n", nrow(private$data_frame))) cat(sprintf(" Missing : %s\n", sum(!complete.cases(private$data_frame)))) }, + # nocov end #' @description #' Getter of the current \code{data.frame} wihtout individual #' ID nor target variables. @@ -93,9 +111,11 @@ Target <- R6Class("Target", #' The \code{data.frame} without individual ID nor target variables is returned. #' @export #' + # nocov start getData = function () { return(private$data_frame) }, + # # nocov end #' @description #' Getter of target values stored on the current training layer. #' @@ -103,16 +123,20 @@ Target <- R6Class("Target", #' The observed target values stored on the current training layer are returned. #' @export #' + # nocov start getTargetValues = function () { return(private$data_frame[[private$target]]) }, + # nocov end #' @description #' Getter of the target variable name. #' #' @export #' getTargetName = function () { + # nocov start return(private$target) + # nocov end }, #' @description #' Getter of the current training object. @@ -123,7 +147,9 @@ Target <- R6Class("Target", #' @export #' getTraining = function () { + # nocov start return(private$training) + # nocov end } ), private = list( diff --git a/R/predict.bestSpecificLearner.R b/R/predict.bestSpecificLearner.R index dd23cf9..88919a6 100644 --- a/R/predict.bestSpecificLearner.R +++ b/R/predict.bestSpecificLearner.R @@ -34,6 +34,6 @@ predict.bestSpecificLearner = function (object, data, na.rm = TRUE) { }) return(list(predictions = unlist(pred))) } else { - stop("Names of weights do not match with name columns in data") + stop("Names of weights do not match with name columns in data.") } } diff --git a/R/weightedMeanLearner.R b/R/weightedMeanLearner.R index 8d6b95c..74c7afd 100644 --- a/R/weightedMeanLearner.R +++ b/R/weightedMeanLearner.R @@ -7,6 +7,8 @@ #' \code{data.frame} of predictors. #' @param y `vector(1)` \cr #' Target observations. Either binary or two level factor variable. +#' @param weighted \cr +#' If TRUE, the weighted sum is computed. #' #' @return #' A model object of class \code{weightedMeanLeaner}. @@ -19,7 +21,7 @@ #' y = sample(x = 0L:1L, size = 50L, replace = TRUE) #' my_model = weightedMeanLearner(x = x, y = y) #' -weightedMeanLearner = function (x, y) { +weightedMeanLearner = function (x, y, weighted = TRUE) { # y must be binomial. If dichotomy, first category (case) = 1 and # second (control) = 0 if ((length(unique(y)) > 2) | is.character(y)) { @@ -39,7 +41,11 @@ weightedMeanLearner = function (x, y) { }) brier_values = unlist(brier_values) # weights_values = (1 - brier_values) / sum((1 - brier_values)) - weights_values = (1 / brier_values) / sum((1 / brier_values)) + if (weighted) { + weights_values = (1 / brier_values) / sum((1 / brier_values)) + } else { + weights_values <- 1 / length(brier_values) + } names(weights_values) = names(x) class(weights_values) = "weightedMeanLearner" return(weights_values) diff --git a/man/weightedMeanLearner.Rd b/man/weightedMeanLearner.Rd index dbf1d1c..0a9ac21 100644 --- a/man/weightedMeanLearner.Rd +++ b/man/weightedMeanLearner.Rd @@ -4,7 +4,7 @@ \alias{weightedMeanLearner} \title{The weighted mean meta learner} \usage{ -weightedMeanLearner(x, y) +weightedMeanLearner(x, y, weighted = TRUE) } \arguments{ \item{x}{\code{data.frame(1)} \cr @@ -12,6 +12,9 @@ weightedMeanLearner(x, y) \item{y}{\code{vector(1)} \cr Target observations. Either binary or two level factor variable.} + +\item{weighted}{\cr +If TRUE, the weighted sum is computed.} } \value{ A model object of class \code{weightedMeanLeaner}.