From e1d6c200dd85b3152a241d1817d75d15b8f85362 Mon Sep 17 00:00:00 2001 From: Cesaire Joris Kuete Fouodo Date: Tue, 5 Nov 2024 15:24:05 +0100 Subject: [PATCH] Remove Param class --- .Rbuildignore | 2 + DESCRIPTION | 3 - NAMESPACE | 3 - R/Model.R | 6 +- R/PredictData.R | 3 +- R/PredictMetaLayer.R | 1 - R/Predicting.R | 15 ++- R/Target.R | 2 +- R/TestLayer.R | 2 +- R/TestMetaLayer.R | 2 +- R/Testing.R | 2 +- R/TrainData.R | 2 +- R/TrainLayer.R | 2 +- R/TrainMetaLayer.R | 2 +- R/Training.R | 2 +- R/VarSel.R | 10 +- README.Rmd | 47 ++++---- README.md | 153 ++++++++++++++------------- man/Lrner.Rd | 35 +++++- man/Model.Rd | 4 +- man/Param.Rd | 97 ----------------- man/ParamLrner.Rd | 110 ------------------- man/ParamVarSel.Rd | 94 ---------------- man/PredictMetaLayer.Rd | 3 +- man/Predicting.Rd | 11 +- man/Target.Rd | 2 +- man/TestLayer.Rd | 2 +- man/TestMetaLayer.Rd | 2 +- man/Testing.Rd | 2 +- man/TrainData.Rd | 2 +- man/TrainLayer.Rd | 2 +- man/TrainMetaLayer.Rd | 2 +- man/VarSel.Rd | 4 +- tests/testthat/test-Lrner.R | 23 ++-- tests/testthat/test-Model.R | 8 +- tests/testthat/test-Param.R | 20 ---- tests/testthat/test-ParamLrner.R | 18 ---- tests/testthat/test-ParamVarSel.R | 10 -- tests/testthat/test-TrainLayer.R | 14 +-- tests/testthat/test-TrainMetaLayer.R | 12 +-- tests/testthat/test-Training.R | 31 ++---- tests/testthat/test-VarSel.R | 16 ++- 42 files changed, 205 insertions(+), 578 deletions(-) delete mode 100644 man/Param.Rd delete mode 100644 man/ParamLrner.Rd delete mode 100644 man/ParamVarSel.Rd delete mode 100644 tests/testthat/test-Param.R delete mode 100644 tests/testthat/test-ParamLrner.R delete mode 100644 tests/testthat/test-ParamVarSel.R diff --git a/.Rbuildignore b/.Rbuildignore index de0f6d3..9e20373 100644 --- a/.Rbuildignore +++ b/.Rbuildignore @@ -5,3 +5,5 @@ ^README.Rmd ^README.files ^\.covrignore +^doc$ +^Meta$ diff --git a/DESCRIPTION b/DESCRIPTION index 06ba41f..4feb438 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -30,9 +30,6 @@ Collate: 'HashTable.R' 'Lrner.R' 'Model.R' - 'Param.R' - 'ParamLrner.R' - 'ParamVarSel.R' 'PredictData.R' 'PredictLayer.R' 'PredictMetaLayer.R' diff --git a/NAMESPACE b/NAMESPACE index 44f04a9..eb1bf43 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -6,9 +6,6 @@ export(Data) export(HashTable) export(Lrner) export(Model) -export(Param) -export(ParamLrner) -export(ParamVarSel) export(PredictData) export(PredictLayer) export(PredictMetaLayer) diff --git a/R/Model.R b/R/Model.R index 447231f..7ec954c 100644 --- a/R/Model.R +++ b/R/Model.R @@ -129,7 +129,7 @@ Model <- R6Class("Model", #' @param ind_subset `vector(1)` \cr #' Subset of individual IDs to be predicted. #' @param ... - #' Further parameters. + #' Further parameters to be passed to the basic predict function. #' #' @return #' The predicted object are returned. The predicted object must be either a vector or a list @@ -137,7 +137,7 @@ Model <- R6Class("Model", #' #' @export #' - predict = function (testing_data, ind_subset = NULL, ...) { + predict = function (testing_data, ind_subset = NULL) { tmp_lrner = self$getLrner() if(tmp_lrner$getTrainLayer()$getId() != testing_data$getTestLayer()$getId()) { stop("Learner and data must belong to the same layer.") @@ -165,7 +165,7 @@ Model <- R6Class("Model", var_name = testing_data$getIndCol(), value = ind_subset) } - pred_param <- list(...) + pred_param <- private$lrner$getParamPred() pred_param$object = self$getBaseModel() # Predict using the subset of variables utilized for training training_var = colnames(private$train_data$getData()) diff --git a/R/PredictData.R b/R/PredictData.R index aaca187..9ce0264 100644 --- a/R/PredictData.R +++ b/R/PredictData.R @@ -46,8 +46,7 @@ PredictData <- R6Class("PredictData", #' @export #' getPredictData = function () { - tmp_data <- private$data_frame - return(tmp_data) + return(private$data_frame) }, #' @description #' Getter of the current layer. diff --git a/R/PredictMetaLayer.R b/R/PredictMetaLayer.R index 478751e..877470b 100644 --- a/R/PredictMetaLayer.R +++ b/R/PredictMetaLayer.R @@ -14,7 +14,6 @@ PredictMetaLayer <- R6Class("PredictMetaLayer", #' constructor #' #' @param id (`character(1)`)\cr - #' See class Param #' @param predicting (`Predicting(1)`)\cr #' initialize = function (id, predicting) { diff --git a/R/Predicting.R b/R/Predicting.R index a26b0fd..0c1fb6c 100644 --- a/R/Predicting.R +++ b/R/Predicting.R @@ -1,13 +1,12 @@ #' @title Predicting Class #' #' @description -#' This class is the basic class of the present package. An object from this class -#' is designed to contain multiple layers, but only one meta layer. +#' This class is designed for predictions. #' #' The Predicting is structured as followed: -#' * [PredictLayer]: Can be clinical, gene expression, etc. -#' - [PredictData]: Specific to each layer, it must be set up by the user. -#' * [PredictMetaLayer]: Basically a [PredictLayer], but with some specific properties. +#' * [PredictLayer]: Exists for each modality. +#' - [PredictData]: Related class for modality-specific predictions. +#' * [PredictMetaLayer]: Related class for meta predictions. #' - [PredictData]: Specific to the meta layer, it is set up internally after cross-validation. #' #' Use the function \code{train} for training and \code{predict} for predicting. @@ -25,7 +24,7 @@ Predicting <- R6Class("Predicting", #' constructor #' #' @param id (`character(1)`)\cr - #' See class Param + #' Predicting id. #' @param ind_col (`character(1L)`) #' Name of column of individuals IDS initialize = function (id, ind_col) { @@ -55,15 +54,13 @@ Predicting <- R6Class("Predicting", key_class_predicting = self$getKeyClass() predicted_values = NULL for (k in key_class_predicting[ , "key"]) { - # FIXME: Maybe define a class Prediction instead of - # using Hashtable? pred_layer = self$getFromHashTable(key = k) pred_data = pred_layer$getPredictData() pred_values = pred_data$getPredictData() predicted_values = data.frame(rbind(predicted_values, pred_values)) } - # Will transform meta data.frame into wide format + # Will transform meta data.frame into wide format. In case of data.frame, only the first column is considered. predicted_values_wide = reshape(predicted_values, idvar = colnames(predicted_values)[2L], timevar = colnames(predicted_values)[1L], diff --git a/R/Target.R b/R/Target.R index b6b6d5d..b95581a 100644 --- a/R/Target.R +++ b/R/Target.R @@ -7,7 +7,7 @@ #' @export #' #' @importFrom R6 R6Class -#' @seealso [TrainLayer], [Lrner], [Model], [ParamLrner], [TestData] +#' @seealso [TrainLayer], [Lrner], [Model], [TestData] Target <- R6Class("Target", inherit = Data, public = list( diff --git a/R/TestLayer.R b/R/TestLayer.R index 6088461..05a3c75 100644 --- a/R/TestLayer.R +++ b/R/TestLayer.R @@ -16,7 +16,7 @@ TestLayer <- R6Class("TestLayer", #' constructor #' #' @param id (`character(1)`)\cr - #' See class Param + #' Testing layer id. #' @param testing (`Testing(1)`)\cr #' initialize = function (id, testing) { diff --git a/R/TestMetaLayer.R b/R/TestMetaLayer.R index f18aafa..22bf1dd 100644 --- a/R/TestMetaLayer.R +++ b/R/TestMetaLayer.R @@ -14,7 +14,7 @@ TestMetaLayer <- R6Class("TestMetaLayer", #' constructor #' #' @param id (`character(1)`)\cr - #' See class Param + #' Testing meta-layer id. #' @param testing (`Testing(1)`)\cr #' initialize = function (id, testing) { diff --git a/R/Testing.R b/R/Testing.R index 2b08808..6fc8f2e 100644 --- a/R/Testing.R +++ b/R/Testing.R @@ -20,7 +20,7 @@ Testing <- R6Class("Testing", #' constructor #' #' @param id (`character(1)`)\cr - #' See class Param + #' Testing id. #' @param ind_col (`character(1)`) #' Name of column of individuals IDS initialize = function (id, ind_col) { diff --git a/R/TrainData.R b/R/TrainData.R index 105a6f1..4d1a8d5 100644 --- a/R/TrainData.R +++ b/R/TrainData.R @@ -7,7 +7,7 @@ #' @export #' #' @importFrom R6 R6Class -#' @seealso [TrainLayer], [Lrner], [Model], [ParamLrner], [TestData] +#' @seealso [TrainLayer], [Lrner], [Model], [TestData] TrainData <- R6Class("TrainData", inherit = Data, public = list( diff --git a/R/TrainLayer.R b/R/TrainLayer.R index 7d09674..eee972b 100644 --- a/R/TrainLayer.R +++ b/R/TrainLayer.R @@ -24,7 +24,7 @@ TrainLayer <- R6Class("TrainLayer", #' constructor #' #' @param id (`character(1)`)\cr - #' See class Param + #' Training layer id. #' @param training (`Training(1)`)\cr #' initialize = function (id, training) { diff --git a/R/TrainMetaLayer.R b/R/TrainMetaLayer.R index e1edb51..be090fb 100644 --- a/R/TrainMetaLayer.R +++ b/R/TrainMetaLayer.R @@ -22,7 +22,7 @@ TrainMetaLayer <- R6Class("TrainMetaLayer", #' constructor #' #' @param id (`character(1)`)\cr - #' See class Param + #' Id of training meta-layer. #' @param training (`Training(1)`)\cr #' initialize = function (id, training) { diff --git a/R/Training.R b/R/Training.R index be84ff6..27b8e27 100644 --- a/R/Training.R +++ b/R/Training.R @@ -310,7 +310,7 @@ Training <- R6Class("Training", #' The predicted object. All layers and the meta layer are predicted. This is the final predicted object. #' @export #' - # TODO: Mention that our predictions based on cross-validation are different from that coming from the original learning method; e.g. that coming from ranger. + # Our predictions based on cross-validation are different from that coming from the original learning method; e.g. that coming from ranger. predict = function (testing, ind_subset = NULL) { # 0) Check consistency between training and testing layers diff --git a/R/VarSel.R b/R/VarSel.R index fb9eecf..625665c 100644 --- a/R/VarSel.R +++ b/R/VarSel.R @@ -22,19 +22,19 @@ VarSel <- R6Class("VarSel", #' Variable selection function name. Note: Variable selection functions, except \code{Boruta}, must return a vector of selected variables. #' @param varsel_fct (`character(1)`) \cr #' Variable selection parameters. - #' @param param (`ParamVarSel(1)`) \cr + #' @param varsel_param (`list(1)`) \cr #' Layer on which the learner is stored. #' @param train_layer (`TrainLayer(1)`) \cr #' The training layer where to store the learner. initialize = function (id, package = NULL, varsel_fct, - param, + varsel_param, train_layer) { private$id = id private$package = package private$varsel_fct = varsel_fct - private$param = param + private$param = varsel_param if (!any(c("TrainLayer") %in% class(train_layer))) { stop("A variable selection tool can only belong to object of class TrainLayer.") } @@ -92,7 +92,7 @@ VarSel <- R6Class("VarSel", varsel = sprintf('%s::%s', private$package, private$varsel_fct) } - varsel_param = private$param$getParamVarSel() + varsel_param = private$param # Prepare training dataset if (!is.null(ind_subset)) { train_data = train_data$getIndSubset( @@ -188,7 +188,7 @@ VarSel <- R6Class("VarSel", package = NULL, # Learn function name (like \code{ranger}). varsel_fct = NULL, - # Parameters (from class [Param]) of the learn function. + # Parameters of the variable selection function. param = NULL, # Training layer (from class [TainLayer] or [TrainMetaLayer]) of the current learner. train_layer = NULL, diff --git a/README.Rmd b/README.Rmd index cdcb5f5..8e29caf 100644 --- a/README.Rmd +++ b/README.Rmd @@ -124,34 +124,30 @@ training$upset(order.by = "freq") We need to set up variable selection methods to our training resources. Note that this can be the same method or different layer-specific methods. For simplicity, we will set up the same method on all layers. -- Preparation parameters of the variable selection method. - -```{r varsel_param, include=TRUE, eval=TRUE} -same_param_varsel <- ParamVarSel$new(id = "ParamVarSel", - param_list = list(num.trees = 1000L, - mtry = 3L, - probability = TRUE)) -print(same_param_varsel) -``` - - Instantiate the variable selection method and assign training layers. ```{r varsel_object, include=TRUE, eval=TRUE} varsel_ge <- VarSel$new(id = "varsel_geneexpr", package = "Boruta", varsel_fct = "Boruta", - param = same_param_varsel, + varsel_param = list(num.trees = 1000L, + mtry = 3L, + probability = TRUE), train_layer = tl_ge) varsel_pr <- VarSel$new(id = "varsel_proteinexpr", package = "Boruta", varsel_fct = "Boruta", - param = same_param_varsel, + varsel_param = list(num.trees = 1000L, + mtry = 3L, + probability = TRUE), train_layer = tl_pr) varsel_me <- VarSel$new(id = "varsel_methylation", package = "Boruta", varsel_fct = "Boruta", - param = same_param_varsel, + varsel_param = list(num.trees = 1000L, + mtry = 3L, + probability = TRUE), train_layer = tl_me) ``` @@ -169,38 +165,30 @@ For each layer, the variable selection results show the chosen variables. In thi We can now train our models using the subset of selected variables. Users can choose to set up layer-specific learners, but for illustration, we will use the same learner for all layers. -- Set up the same leaner parameters. - -```{r lrner_param, include=TRUE, eval=TRUE} -same_param <- ParamLrner$new(id = "ParamRanger", - param_list = list(probability = TRUE, - mtry = 1L), - hyperparam_list = list(num.trees = 1000L)) -``` - - Set up learners for each layer. We will use a weighted sum, implemented internally by `fuseMLR`, for the meta-analysis. ```{r lrner, include=TRUE, eval=TRUE} lrner_ge <- Lrner$new(id = "ranger", package = "ranger", lrn_fct = "ranger", - param = same_param, + param_train_list = list(probability = TRUE, + mtry = 1L), train_layer = tl_ge) lrner_pr <- Lrner$new(id = "ranger", package = "ranger", lrn_fct = "ranger", - param = same_param, + param_train_list = list(probability = TRUE, + mtry = 1L), train_layer = tl_pr) lrner_me <- Lrner$new(id = "ranger", package = "ranger", lrn_fct = "ranger", - param = same_param, + param_train_list = list(probability = TRUE, + mtry = 1L), train_layer = tl_me) lrner_meta <- Lrner$new(id = "weighted", lrn_fct = "weightedMeanLearner", - param = ParamLrner$new(id = "ParamWeighted", - param_list = list(), - hyperparam_list = list()), + param_train_list = list(), na_rm = FALSE, train_layer = tl_meta) ``` @@ -215,7 +203,8 @@ disease <- training$getTargetValues()$disease trained <- training$train(resampling_method = "caret::createFolds", resampling_arg = list(y = disease, k = 10L), - use_var_sel = TRUE) + use_var_sel = TRUE, + verbose = FALSE) # Let us now check the status of our training resources. print(trained) # Let us check the status of a layer as well. diff --git a/README.md b/README.md index 643e4b4..4bb05bd 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,12 @@ +--- +title: "fuseMLR" +author: Cesaire J. K. Fouodo +output: + md_document: + variant: gfm + preserve_yaml: true +--- + [![R-CMD-check](https://github.com/imbs-hl/fuseMLR/actions/workflows/R-CMD-check.yaml/badge.svg)](https://github.com/imbs-hl/fuseMLR/actions/workflows/R-CMD-check.yaml) @@ -135,7 +144,7 @@ print(training) ## Training : training ## Status : Not trained - ## Number of layers: 1 + ## Number of layers: 0 ## Layers trained : 0 ## n : 70 @@ -194,7 +203,7 @@ print(training) ## Training : training ## Status : Not trained - ## Number of layers: 5 + ## Number of layers: 4 ## Layers trained : 0 ## n : 70 @@ -213,46 +222,30 @@ We need to set up variable selection methods to our training resources. Note that this can be the same method or different layer-specific methods. For simplicity, we will set up the same method on all layers. -- Preparation parameters of the variable selection method. - -``` r -same_param_varsel <- ParamVarSel$new(id = "ParamVarSel", - param_list = list(num.trees = 1000L, - mtry = 3L, - probability = TRUE)) -print(same_param_varsel) -``` - - ## Class: ParamVarSel - ## id : ParamVarSel - ## Parameter combination - ## $num.trees - ## [1] 1000 - ## - ## $mtry - ## [1] 3 - ## - ## $probability - ## [1] TRUE - - Instantiate the variable selection method and assign training layers. ``` r varsel_ge <- VarSel$new(id = "varsel_geneexpr", package = "Boruta", varsel_fct = "Boruta", - param = same_param_varsel, + varsel_param = list(num.trees = 1000L, + mtry = 3L, + probability = TRUE), train_layer = tl_ge) varsel_pr <- VarSel$new(id = "varsel_proteinexpr", package = "Boruta", varsel_fct = "Boruta", - param = same_param_varsel, + varsel_param = list(num.trees = 1000L, + mtry = 3L, + probability = TRUE), train_layer = tl_pr) varsel_me <- VarSel$new(id = "varsel_methylation", package = "Boruta", varsel_fct = "Boruta", - param = same_param_varsel, + varsel_param = list(num.trees = 1000L, + mtry = 3L, + probability = TRUE), train_layer = tl_me) ``` @@ -261,6 +254,21 @@ varsel_me <- VarSel$new(id = "varsel_methylation", ``` r set.seed(5467) var_sel_res <- training$varSelection() +``` + + ## Variable selection on layer geneexpr started. + + ## Variable selection on layer geneexpr done. + + ## Variable selection on layer proteinexpr started. + + ## Variable selection on layer proteinexpr done. + + ## Variable selection on layer methylation started. + + ## Variable selection on layer methylation done. + +``` r print(var_sel_res) ``` @@ -319,15 +327,6 @@ We can now train our models using the subset of selected variables. Users can choose to set up layer-specific learners, but for illustration, we will use the same learner for all layers. -- Set up the same leaner parameters. - -``` r -same_param <- ParamLrner$new(id = "ParamRanger", - param_list = list(probability = TRUE, - mtry = 1L), - hyperparam_list = list(num.trees = 1000L)) -``` - - Set up learners for each layer. We will use a weighted sum, implemented internally by `fuseMLR`, for the meta-analysis. @@ -335,23 +334,24 @@ same_param <- ParamLrner$new(id = "ParamRanger", lrner_ge <- Lrner$new(id = "ranger", package = "ranger", lrn_fct = "ranger", - param = same_param, + param_train_list = list(probability = TRUE, + mtry = 1L), train_layer = tl_ge) lrner_pr <- Lrner$new(id = "ranger", package = "ranger", lrn_fct = "ranger", - param = same_param, + param_train_list = list(probability = TRUE, + mtry = 1L), train_layer = tl_pr) lrner_me <- Lrner$new(id = "ranger", package = "ranger", lrn_fct = "ranger", - param = same_param, + param_train_list = list(probability = TRUE, + mtry = 1L), train_layer = tl_me) lrner_meta <- Lrner$new(id = "weighted", lrn_fct = "weightedMeanLearner", - param = ParamLrner$new(id = "ParamWeighted", - param_list = list(), - hyperparam_list = list()), + param_train_list = list(), na_rm = FALSE, train_layer = tl_meta) ``` @@ -366,14 +366,15 @@ disease <- training$getTargetValues()$disease trained <- training$train(resampling_method = "caret::createFolds", resampling_arg = list(y = disease, k = 10L), - use_var_sel = TRUE) + use_var_sel = TRUE, + verbose = FALSE) # Let us now check the status of our training resources. print(trained) ``` ## Training : training ## Status : Trained - ## Number of layers: 5 + ## Number of layers: 4 ## Layers trained : 4 ## n : 70 @@ -399,7 +400,7 @@ print(tmp_model$getBaseModel()) ``` ## geneexpr proteinexpr methylation - ## 0.2336235 0.4249688 0.3414078 + ## 0.2326579 0.4241035 0.3432386 ## attr(,"class") ## [1] "weightedMeanLearner" @@ -485,35 +486,35 @@ print(predictions) ## ## $predicted_values ## IDS geneexpr proteinexpr methylation meta_layer - ## 1 patient23 0.3899925 0.59490714 0.17449127 0.40350102 - ## 2 patient77 0.4222480 0.49866468 0.14058889 0.35856210 - ## 3 patient62 0.7736817 0.96993571 NA 0.90031823 - ## 4 patient43 0.3301254 NA NA 0.33012540 - ## 5 patient8 0.7346532 0.85212024 0.83084960 0.81741522 - ## 6 patient74 0.5555329 0.69613294 0.54980476 0.61332790 - ## 7 patient29 0.3362905 0.46632857 0.27474444 0.37054031 - ## 8 patient17 0.3997508 0.34566627 NA 0.36485176 - ## 9 patient25 0.2778349 0.45710040 0.09602103 0.29194448 - ## 10 patient54 0.7863302 NA 0.84842857 0.82319925 - ## 11 patient60 0.7415452 0.84184524 0.79166865 0.80128213 - ## 12 patient44 0.3717952 NA NA 0.37179524 - ## 13 patient1 0.8211250 0.93648373 NA 0.89556234 - ## 14 patient76 0.6840357 NA 0.60861508 0.63925695 - ## 15 patient16 0.6923929 NA 0.69758968 0.69547832 - ## 16 patient27 0.3699286 NA 0.20396786 0.27139431 - ## 17 patient58 0.5648032 0.75506071 0.76331349 0.71342965 - ## 18 patient52 0.4850317 0.13170119 NA 0.25703870 - ## 19 patient10 0.2517774 NA NA 0.25177738 - ## 20 patient72 0.7087675 0.94573135 0.62339762 0.78032379 - ## 21 patient39 NA 0.08271032 NA 0.08271032 - ## 25 patient46 NA 0.22185635 0.51994405 0.35464937 - ## 26 patient97 NA 0.70887738 0.86840476 0.77994413 - ## 27 patient31 NA 0.28228095 NA 0.28228095 - ## 31 patient87 NA 0.29549603 0.26039048 0.27985710 - ## 33 patient59 NA 0.14183968 0.38772183 0.25137601 - ## 34 patient2 NA 0.55434325 0.79045437 0.65952675 - ## 53 patient85 NA NA 0.15880913 0.15880913 - ## 60 patient3 NA NA 0.57764881 0.57764881 + ## 1 patient23 0.3955167 0.6035587 0.17541746 0.4082015 + ## 2 patient77 0.4194508 0.4910333 0.12029762 0.3471283 + ## 3 patient62 0.7698508 0.9696040 NA 0.8988413 + ## 4 patient43 0.3206127 NA NA 0.3206127 + ## 5 patient8 0.7545587 0.8471222 0.83950000 0.8229704 + ## 6 patient74 0.5489651 0.6835690 0.53626667 0.6016925 + ## 7 patient29 0.3272770 0.4641143 0.28055873 0.3692746 + ## 8 patient17 0.4088706 0.3569238 NA 0.3753260 + ## 9 patient25 0.2823317 0.4480825 0.09887937 0.2896593 + ## 10 patient54 0.8051635 NA 0.84849444 0.8309891 + ## 11 patient60 0.7333056 0.8435198 0.80658413 0.8051999 + ## 12 patient44 0.3889190 NA NA 0.3889190 + ## 13 patient1 0.8166127 0.9415270 NA 0.8972761 + ## 14 patient76 0.6997905 NA 0.62434603 0.6548250 + ## 15 patient16 0.7076770 NA 0.69410000 0.6995850 + ## 16 patient27 0.3591865 NA 0.22121032 0.2769517 + ## 17 patient58 0.5753381 0.7549960 0.76922857 0.7180823 + ## 18 patient52 0.4747833 0.1256032 NA 0.2493004 + ## 19 patient10 0.2418214 NA NA 0.2418214 + ## 20 patient72 0.7309365 0.9540413 0.61715873 0.7865031 + ## 21 patient39 NA 0.0834881 NA 0.0834881 + ## 25 patient46 NA 0.2154611 0.53634524 0.3589953 + ## 26 patient97 NA 0.7014659 0.86992143 0.7768175 + ## 27 patient31 NA 0.2656349 NA 0.2656349 + ## 31 patient87 NA 0.2897254 0.25276032 0.2731906 + ## 33 patient59 NA 0.1475627 0.39242143 0.2570901 + ## 34 patient2 NA 0.5379889 0.81212460 0.6606121 + ## 53 patient85 NA NA 0.15531746 0.1553175 + ## 60 patient3 NA NA 0.58667143 0.5866714 - Prediction performances for layer-specific available patients, and all patients on the meta layer. @@ -536,7 +537,7 @@ print(perf_estimated) ``` ## geneexpr proteinexpr methylation meta_layer - ## 0.1147740 0.1645159 0.0815040 0.1231755 + ## 0.11205017 0.16567829 0.07991965 0.12184525 - Prediction performances for overlapping individuals. @@ -552,7 +553,7 @@ print(perf_overlapping) ``` ## geneexpr proteinexpr methylation meta_layer - ## 0.12551583 0.13679344 0.06749516 0.09629256 + ## 0.12296962 0.13685581 0.06797495 0.09559852 Note that our example is based on simulated data for usage illustration; only one run is not enough to appreciate the performances of our models. diff --git a/man/Lrner.Rd b/man/Lrner.Rd index 9d47efb..a5555c2 100644 --- a/man/Lrner.Rd +++ b/man/Lrner.Rd @@ -20,6 +20,7 @@ This class implements a learner. A \link{Lrner} object can only exist as a compo \item \href{#method-Lrner-getPackage}{\code{Lrner$getPackage()}} \item \href{#method-Lrner-getIndSubset}{\code{Lrner$getIndSubset()}} \item \href{#method-Lrner-getVarSubset}{\code{Lrner$getVarSubset()}} +\item \href{#method-Lrner-getParamPred}{\code{Lrner$getParamPred()}} } } \if{html}{\out{
}} @@ -28,7 +29,15 @@ This class implements a learner. A \link{Lrner} object can only exist as a compo \subsection{Method \code{new()}}{ Initialize a default parameters list. \subsection{Usage}{ -\if{html}{\out{
}}\preformatted{Lrner$new(id, package = NULL, lrn_fct, param, train_layer, na_rm = TRUE)}\if{html}{\out{
}} +\if{html}{\out{
}}\preformatted{Lrner$new( + id, + package = NULL, + lrn_fct, + param_train_list, + param_pred_list = list(), + train_layer, + na_rm = TRUE +)}\if{html}{\out{
}} } \subsection{Arguments}{ @@ -38,13 +47,16 @@ Initialize a default parameters list. Learner ID.} \item{\code{package}}{(\code{character(1)}) \cr -Package that implements the learn function. If NULL, the -learn function is called from the current environment.} +Package that implements the learn function. If NULL, the} \item{\code{lrn_fct}}{(\code{character(1)}) \cr -Learn function name.} +learn function is called from the current environment.} + +\item{\code{param_train_list}}{\cr +List of parameter for training.} -\item{\code{param}}{(\code{ParamLrner(1)}) \cr +\item{\code{param_pred_list}}{\cr +List of parameter for testing. Learn parameters.} \item{\code{train_layer}}{(\code{TrainLayer(1)}) \cr @@ -192,4 +204,17 @@ Getter of the variable subset used for training. The list of variables used for training is returned. } } +\if{html}{\out{
}} +\if{html}{\out{}} +\if{latex}{\out{\hypertarget{method-Lrner-getParamPred}{}}} +\subsection{Method \code{getParamPred()}}{ +Getter predicting parameter list. +\subsection{Usage}{ +\if{html}{\out{
}}\preformatted{Lrner$getParamPred()}\if{html}{\out{
}} +} + +\subsection{Returns}{ +The list of predicting parameters. +} +} } diff --git a/man/Model.Rd b/man/Model.Rd index 2c90163..916a74d 100644 --- a/man/Model.Rd +++ b/man/Model.Rd @@ -161,7 +161,7 @@ ID value} Predict target values for the new data (from class \link{TestData}) taken as into. \subsection{Usage}{ -\if{html}{\out{
}}\preformatted{Model$predict(testing_data, ind_subset = NULL, ...)}\if{html}{\out{
}} +\if{html}{\out{
}}\preformatted{Model$predict(testing_data, ind_subset = NULL)}\if{html}{\out{
}} } \subsection{Arguments}{ @@ -173,7 +173,7 @@ An object from class \link{TestData}.} \item{\code{ind_subset}}{\code{vector(1)} \cr Subset of individual IDs to be predicted.} -\item{\code{...}}{Further parameters.} +\item{\code{...}}{Further parameters to be passed to the basic predict function.} } \if{html}{\out{}} } diff --git a/man/Param.Rd b/man/Param.Rd deleted file mode 100644 index 2513eb9..0000000 --- a/man/Param.Rd +++ /dev/null @@ -1,97 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/Param.R -\name{Param} -\alias{Param} -\title{Class Param} -\description{ -Implements a parameter set. Objects from this class contain non-tunable parameters. -} -\seealso{ -\link{ParamLrner} -} -\section{Methods}{ -\subsection{Public methods}{ -\itemize{ -\item \href{#method-Param-new}{\code{Param$new()}} -\item \href{#method-Param-print}{\code{Param$print()}} -\item \href{#method-Param-getId}{\code{Param$getId()}} -\item \href{#method-Param-getParamList}{\code{Param$getParamList()}} -\item \href{#method-Param-clone}{\code{Param$clone()}} -} -} -\if{html}{\out{
}} -\if{html}{\out{}} -\if{latex}{\out{\hypertarget{method-Param-new}{}}} -\subsection{Method \code{new()}}{ -Initialize a default parameters list. -\subsection{Usage}{ -\if{html}{\out{
}}\preformatted{Param$new(id, param_list)}\if{html}{\out{
}} -} - -\subsection{Arguments}{ -\if{html}{\out{
}} -\describe{ -\item{\code{id}}{(\code{character(1)})\cr -The ID of current parameter object.} - -\item{\code{param_list}}{(\code{list(1)})\cr -List of parameters.} -} -\if{html}{\out{
}} -} -} -\if{html}{\out{
}} -\if{html}{\out{}} -\if{latex}{\out{\hypertarget{method-Param-print}{}}} -\subsection{Method \code{print()}}{ -Printer -\subsection{Usage}{ -\if{html}{\out{
}}\preformatted{Param$print(...)}\if{html}{\out{
}} -} - -\subsection{Arguments}{ -\if{html}{\out{
}} -\describe{ -\item{\code{...}}{(any) \cr} -} -\if{html}{\out{
}} -} -} -\if{html}{\out{
}} -\if{html}{\out{}} -\if{latex}{\out{\hypertarget{method-Param-getId}{}}} -\subsection{Method \code{getId()}}{ -Getter of parameter ID. -\subsection{Usage}{ -\if{html}{\out{
}}\preformatted{Param$getId()}\if{html}{\out{
}} -} - -} -\if{html}{\out{
}} -\if{html}{\out{}} -\if{latex}{\out{\hypertarget{method-Param-getParamList}{}}} -\subsection{Method \code{getParamList()}}{ -Getter of parameter list. -\subsection{Usage}{ -\if{html}{\out{
}}\preformatted{Param$getParamList()}\if{html}{\out{
}} -} - -} -\if{html}{\out{
}} -\if{html}{\out{}} -\if{latex}{\out{\hypertarget{method-Param-clone}{}}} -\subsection{Method \code{clone()}}{ -The objects of this class are cloneable with this method. -\subsection{Usage}{ -\if{html}{\out{
}}\preformatted{Param$clone(deep = FALSE)}\if{html}{\out{
}} -} - -\subsection{Arguments}{ -\if{html}{\out{
}} -\describe{ -\item{\code{deep}}{Whether to make a deep clone.} -} -\if{html}{\out{
}} -} -} -} diff --git a/man/ParamLrner.Rd b/man/ParamLrner.Rd deleted file mode 100644 index 811f6dd..0000000 --- a/man/ParamLrner.Rd +++ /dev/null @@ -1,110 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/ParamLrner.R -\name{ParamLrner} -\alias{ParamLrner} -\title{Class ParamLrner.} -\description{ -Implement the list of parameters to be passed to the \link{Lrner} object. -Non-tunable parameters and tunable paramters are stored in the object -from this class. -} -\section{Super class}{ -\code{\link[fuseMLR:Param]{fuseMLR::Param}} -> \code{ParamLrner} -} -\section{Methods}{ -\subsection{Public methods}{ -\itemize{ -\item \href{#method-ParamLrner-new}{\code{ParamLrner$new()}} -\item \href{#method-ParamLrner-print}{\code{ParamLrner$print()}} -\item \href{#method-ParamLrner-getParamLrner}{\code{ParamLrner$getParamLrner()}} -\item \href{#method-ParamLrner-getHyperparam}{\code{ParamLrner$getHyperparam()}} -\item \href{#method-ParamLrner-clone}{\code{ParamLrner$clone()}} -} -} -\if{html}{\out{ -
Inherited methods - -
-}} -\if{html}{\out{
}} -\if{html}{\out{}} -\if{latex}{\out{\hypertarget{method-ParamLrner-new}{}}} -\subsection{Method \code{new()}}{ -constructor -\subsection{Usage}{ -\if{html}{\out{
}}\preformatted{ParamLrner$new(id, param_list, hyperparam_list = list())}\if{html}{\out{
}} -} - -\subsection{Arguments}{ -\if{html}{\out{
}} -\describe{ -\item{\code{id}}{(\code{character(1)})\cr -See class Param} - -\item{\code{param_list}}{(\code{list(1)})\cr -See class Param} - -\item{\code{hyperparam_list}}{(\code{list(1)})\cr -List of hyperparameters. Default is an empty list.} -} -\if{html}{\out{
}} -} -} -\if{html}{\out{
}} -\if{html}{\out{}} -\if{latex}{\out{\hypertarget{method-ParamLrner-print}{}}} -\subsection{Method \code{print()}}{ -Printer -\subsection{Usage}{ -\if{html}{\out{
}}\preformatted{ParamLrner$print(...)}\if{html}{\out{
}} -} - -\subsection{Arguments}{ -\if{html}{\out{
}} -\describe{ -\item{\code{...}}{(any) \cr} -} -\if{html}{\out{
}} -} -} -\if{html}{\out{
}} -\if{html}{\out{}} -\if{latex}{\out{\hypertarget{method-ParamLrner-getParamLrner}{}}} -\subsection{Method \code{getParamLrner()}}{ -Getter of learner parameters. -\subsection{Usage}{ -\if{html}{\out{
}}\preformatted{ParamLrner$getParamLrner()}\if{html}{\out{
}} -} - -} -\if{html}{\out{
}} -\if{html}{\out{}} -\if{latex}{\out{\hypertarget{method-ParamLrner-getHyperparam}{}}} -\subsection{Method \code{getHyperparam()}}{ -Getter of hyperparameters. -\subsection{Usage}{ -\if{html}{\out{
}}\preformatted{ParamLrner$getHyperparam()}\if{html}{\out{
}} -} - -} -\if{html}{\out{
}} -\if{html}{\out{}} -\if{latex}{\out{\hypertarget{method-ParamLrner-clone}{}}} -\subsection{Method \code{clone()}}{ -The objects of this class are cloneable with this method. -\subsection{Usage}{ -\if{html}{\out{
}}\preformatted{ParamLrner$clone(deep = FALSE)}\if{html}{\out{
}} -} - -\subsection{Arguments}{ -\if{html}{\out{
}} -\describe{ -\item{\code{deep}}{Whether to make a deep clone.} -} -\if{html}{\out{
}} -} -} -} diff --git a/man/ParamVarSel.Rd b/man/ParamVarSel.Rd deleted file mode 100644 index 53398c2..0000000 --- a/man/ParamVarSel.Rd +++ /dev/null @@ -1,94 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/ParamVarSel.R -\name{ParamVarSel} -\alias{ParamVarSel} -\title{Class ParamVarSel.} -\description{ -Implement the list of parameters to be passed to the \link{VarSel} object. -} -\section{Super class}{ -\code{\link[fuseMLR:Param]{fuseMLR::Param}} -> \code{ParamVarSel} -} -\section{Methods}{ -\subsection{Public methods}{ -\itemize{ -\item \href{#method-ParamVarSel-new}{\code{ParamVarSel$new()}} -\item \href{#method-ParamVarSel-print}{\code{ParamVarSel$print()}} -\item \href{#method-ParamVarSel-getParamVarSel}{\code{ParamVarSel$getParamVarSel()}} -\item \href{#method-ParamVarSel-clone}{\code{ParamVarSel$clone()}} -} -} -\if{html}{\out{ -
Inherited methods - -
-}} -\if{html}{\out{
}} -\if{html}{\out{}} -\if{latex}{\out{\hypertarget{method-ParamVarSel-new}{}}} -\subsection{Method \code{new()}}{ -constructor -\subsection{Usage}{ -\if{html}{\out{
}}\preformatted{ParamVarSel$new(id, param_list)}\if{html}{\out{
}} -} - -\subsection{Arguments}{ -\if{html}{\out{
}} -\describe{ -\item{\code{id}}{(\code{character(1)})\cr -See class Param} - -\item{\code{param_list}}{(\code{list(1)})\cr -See class Param} -} -\if{html}{\out{
}} -} -} -\if{html}{\out{
}} -\if{html}{\out{}} -\if{latex}{\out{\hypertarget{method-ParamVarSel-print}{}}} -\subsection{Method \code{print()}}{ -Printer -\subsection{Usage}{ -\if{html}{\out{
}}\preformatted{ParamVarSel$print(...)}\if{html}{\out{
}} -} - -\subsection{Arguments}{ -\if{html}{\out{
}} -\describe{ -\item{\code{...}}{(any) \cr} -} -\if{html}{\out{
}} -} -} -\if{html}{\out{
}} -\if{html}{\out{}} -\if{latex}{\out{\hypertarget{method-ParamVarSel-getParamVarSel}{}}} -\subsection{Method \code{getParamVarSel()}}{ -Getter of learner parameters. -\subsection{Usage}{ -\if{html}{\out{
}}\preformatted{ParamVarSel$getParamVarSel()}\if{html}{\out{
}} -} - -} -\if{html}{\out{
}} -\if{html}{\out{}} -\if{latex}{\out{\hypertarget{method-ParamVarSel-clone}{}}} -\subsection{Method \code{clone()}}{ -The objects of this class are cloneable with this method. -\subsection{Usage}{ -\if{html}{\out{
}}\preformatted{ParamVarSel$clone(deep = FALSE)}\if{html}{\out{
}} -} - -\subsection{Arguments}{ -\if{html}{\out{
}} -\describe{ -\item{\code{deep}}{Whether to make a deep clone.} -} -\if{html}{\out{
}} -} -} -} diff --git a/man/PredictMetaLayer.Rd b/man/PredictMetaLayer.Rd index a98eb67..72db014 100644 --- a/man/PredictMetaLayer.Rd +++ b/man/PredictMetaLayer.Rd @@ -49,8 +49,7 @@ constructor \subsection{Arguments}{ \if{html}{\out{
}} \describe{ -\item{\code{id}}{(\code{character(1)})\cr -See class Param} +\item{\code{id}}{(\code{character(1)})\cr} \item{\code{predicting}}{(\code{Predicting(1)})\cr} } diff --git a/man/Predicting.Rd b/man/Predicting.Rd index dc92677..a123c94 100644 --- a/man/Predicting.Rd +++ b/man/Predicting.Rd @@ -4,16 +4,15 @@ \alias{Predicting} \title{Predicting Class} \description{ -This class is the basic class of the present package. An object from this class -is designed to contain multiple layers, but only one meta layer. +This class is designed for predictions. The Predicting is structured as followed: \itemize{ -\item \link{PredictLayer}: Can be clinical, gene expression, etc. +\item \link{PredictLayer}: Exists for each modality. \itemize{ -\item \link{PredictData}: Specific to each layer, it must be set up by the user. +\item \link{PredictData}: Related class for modality-specific predictions. } -\item \link{PredictMetaLayer}: Basically a \link{PredictLayer}, but with some specific properties. +\item \link{PredictMetaLayer}: Related class for meta predictions. \itemize{ \item \link{PredictData}: Specific to the meta layer, it is set up internally after cross-validation. } @@ -66,7 +65,7 @@ constructor \if{html}{\out{
}} \describe{ \item{\code{id}}{(\code{character(1)})\cr -See class Param} +Predicting id.} \item{\code{ind_col}}{(\code{character(1L)}) Name of column of individuals IDS} diff --git a/man/Target.Rd b/man/Target.Rd index a6b52ed..b3f96cd 100644 --- a/man/Target.Rd +++ b/man/Target.Rd @@ -8,7 +8,7 @@ This class implements the target object. A \link{Target} object can only exist as a component of a \link{Training} object. } \seealso{ -\link{TrainLayer}, \link{Lrner}, \link{Model}, \link{ParamLrner}, \link{TestData} +\link{TrainLayer}, \link{Lrner}, \link{Model}, \link{TestData} } \section{Super class}{ \code{\link[fuseMLR:Data]{fuseMLR::Data}} -> \code{Target} diff --git a/man/TestLayer.Rd b/man/TestLayer.Rd index bc41ea1..246e4b5 100644 --- a/man/TestLayer.Rd +++ b/man/TestLayer.Rd @@ -52,7 +52,7 @@ constructor \if{html}{\out{
}} \describe{ \item{\code{id}}{(\code{character(1)})\cr -See class Param} +Testing layer id.} \item{\code{testing}}{(\code{Testing(1)})\cr} } diff --git a/man/TestMetaLayer.Rd b/man/TestMetaLayer.Rd index cf60bc1..4517dfa 100644 --- a/man/TestMetaLayer.Rd +++ b/man/TestMetaLayer.Rd @@ -51,7 +51,7 @@ constructor \if{html}{\out{
}} \describe{ \item{\code{id}}{(\code{character(1)})\cr -See class Param} +Testing meta-layer id.} \item{\code{testing}}{(\code{Testing(1)})\cr} } diff --git a/man/Testing.Rd b/man/Testing.Rd index 2f56b01..0c61a04 100644 --- a/man/Testing.Rd +++ b/man/Testing.Rd @@ -57,7 +57,7 @@ constructor \if{html}{\out{
}} \describe{ \item{\code{id}}{(\code{character(1)})\cr -See class Param} +Testing id.} \item{\code{ind_col}}{(\code{character(1)}) Name of column of individuals IDS} diff --git a/man/TrainData.Rd b/man/TrainData.Rd index 8f9c27f..0783c3a 100644 --- a/man/TrainData.Rd +++ b/man/TrainData.Rd @@ -8,7 +8,7 @@ This class implements the training data. A \link{TrainData} object can only exist as a component of a \link{TrainLayer} or a \link{TrainMetaLayer} object. } \seealso{ -\link{TrainLayer}, \link{Lrner}, \link{Model}, \link{ParamLrner}, \link{TestData} +\link{TrainLayer}, \link{Lrner}, \link{Model}, \link{TestData} } \section{Super class}{ \code{\link[fuseMLR:Data]{fuseMLR::Data}} -> \code{TrainData} diff --git a/man/TrainLayer.Rd b/man/TrainLayer.Rd index 14e5270..29af144 100644 --- a/man/TrainLayer.Rd +++ b/man/TrainLayer.Rd @@ -74,7 +74,7 @@ constructor \if{html}{\out{
}} \describe{ \item{\code{id}}{(\code{character(1)})\cr -See class Param} +Training layer id.} \item{\code{training}}{(\code{Training(1)})\cr} } diff --git a/man/TrainMetaLayer.Rd b/man/TrainMetaLayer.Rd index 01cfbf1..8b357a7 100644 --- a/man/TrainMetaLayer.Rd +++ b/man/TrainMetaLayer.Rd @@ -69,7 +69,7 @@ constructor \if{html}{\out{
}} \describe{ \item{\code{id}}{(\code{character(1)})\cr -See class Param} +Id of training meta-layer.} \item{\code{training}}{(\code{Training(1)})\cr} } diff --git a/man/VarSel.Rd b/man/VarSel.Rd index d5ae326..9ec50ed 100644 --- a/man/VarSel.Rd +++ b/man/VarSel.Rd @@ -28,7 +28,7 @@ Variable selection parameter list. Learner ID. \subsection{Usage}{ -\if{html}{\out{
}}\preformatted{VarSel$new(id, package = NULL, varsel_fct, param, train_layer)}\if{html}{\out{
}} +\if{html}{\out{
}}\preformatted{VarSel$new(id, package = NULL, varsel_fct, varsel_param, train_layer)}\if{html}{\out{
}} } \subsection{Arguments}{ @@ -45,7 +45,7 @@ Variable selection function name. Note: Variable selection functions, except \co \item{\code{varsel_fct}}{(\code{character(1)}) \cr Variable selection parameters.} -\item{\code{param}}{(\code{ParamVarSel(1)}) \cr +\item{\code{varsel_param}}{(\code{list(1)}) \cr Layer on which the learner is stored.} \item{\code{train_layer}}{(\code{TrainLayer(1)}) \cr diff --git a/tests/testthat/test-Lrner.R b/tests/testthat/test-Lrner.R index f5e6ecd..f4308fd 100644 --- a/tests/testthat/test-Lrner.R +++ b/tests/testthat/test-Lrner.R @@ -7,20 +7,15 @@ training <- Training$new(id = "training", tl_geneexpr <- TrainLayer$new(id = "geneexpr", training = training) -# Parameters -ranger_param_lrner <- ParamLrner$new(id = "ParamRanger", - param_list = list(seed = 421L), - hyperparam_list = list(probability = TRUE, - mtry = 1L, - num.trees = 1000L)) - # Learner expect_error({ lrner_geneexpr <- Lrner$new(id = "ranger", package = "ranger", lrn_fct = "ranger", - param = ranger_param_lrner, + param_train_list = list(probability = TRUE, + mtry = 1L, + num.trees = 1000L), train_layer = "not_a_train_layer") }) @@ -28,7 +23,9 @@ expect_error({ lrner_geneexpr <- Lrner$new(id = "ranger", package = "ranger", lrn_fct = "ranger", - param = ranger_param_lrner, + param_train_list = list(probability = TRUE, + mtry = 1L, + num.trees = 1000L), na_rm = "not_logical", train_layer = tl_geneexpr) }) @@ -36,7 +33,9 @@ expect_error({ lrner_geneexpr <- Lrner$new(id = "ranger", package = "ranger", lrn_fct = "ranger", - param = ranger_param_lrner, + param_train_list = list(probability = TRUE, + mtry = 1L, + num.trees = 1000L), train_layer = tl_geneexpr) expect_no_error({ @@ -45,7 +44,9 @@ expect_no_error({ lrner_geneexpr <- Lrner$new(id = "ranger", package = "ranger", lrn_fct = "ranger", - param = ranger_param_lrner, + param_train_list = list(probability = TRUE, + mtry = 1L, + num.trees = 1000L), train_layer = tl_geneexpr) }) diff --git a/tests/testthat/test-Model.R b/tests/testthat/test-Model.R index 141fd80..5d3ee00 100644 --- a/tests/testthat/test-Model.R +++ b/tests/testthat/test-Model.R @@ -6,14 +6,12 @@ test_that("Model: all tests", { target = "disease", target_df = entities$training$target) tl_ge <- TrainLayer$new(id = "geneexpr", training = training) - same_param <- ParamLrner$new(id = "ParamRanger", - param_list = list(probability = TRUE, - mtry = 2L), - hyperparam_list = list(num.trees = 10L)) lrner_ge <- Lrner$new(id = "ranger", package = "ranger", lrn_fct = "ranger", - param = same_param, + param_train_list = list(probability = TRUE, + mtry = 2L, + num.trees = 10L), train_layer = tl_ge) train_data_ge <- TrainData$new(id = "geneexpr", train_layer = tl_ge, diff --git a/tests/testthat/test-Param.R b/tests/testthat/test-Param.R deleted file mode 100644 index 327efe3..0000000 --- a/tests/testthat/test-Param.R +++ /dev/null @@ -1,20 +0,0 @@ -ranger_param <- Param$new(id = "test_param", - param_list = list(probability = TRUE, - mtry = 1)) -test_that("Param: instantialize and print", { - expect_true(R6::is.R6(ranger_param)) - expect_equal(class(ranger_param)[1], "Param") - print(ranger_param) -}) - -test_that("Param: getId", { - expect_no_error({ - ranger_param$getId() - }) -}) - -test_that("Param: getParamList", { - expect_no_error({ - ranger_param$getParamList() - }) -}) diff --git a/tests/testthat/test-ParamLrner.R b/tests/testthat/test-ParamLrner.R deleted file mode 100644 index a62eae6..0000000 --- a/tests/testthat/test-ParamLrner.R +++ /dev/null @@ -1,18 +0,0 @@ -ranger_param_lrner <- ParamLrner$new(id = "ParamRanger", - param_list = list(seed = 421L), - hyperparam_list = list(probability = TRUE, - mtry = 1L, - num.trees = 1000L)) -test_that("ParamLrner: initialize and print", { - expect_true(R6::is.R6(ranger_param_lrner)) - expect_equal(class(ranger_param_lrner)[1], "ParamLrner") - print(ranger_param_lrner) -}) - -test_that("ParamLrner: getParamLrner", { - expect_no_error(ranger_param_lrner$getParamLrner()) -}) - -test_that("ParamLrner: getHyperparam", { - expect_no_error(ranger_param_lrner$getHyperparam()) -}) diff --git a/tests/testthat/test-ParamVarSel.R b/tests/testthat/test-ParamVarSel.R deleted file mode 100644 index b390993..0000000 --- a/tests/testthat/test-ParamVarSel.R +++ /dev/null @@ -1,10 +0,0 @@ -test_that("ParamVarSel: initialize and print", { - param_varsel <- ParamVarSel$new(id = "ParamVarSel", - param_list = list(num.trees = 1000L, - mtry = 3L)) - expect_true(R6::is.R6(param_varsel)) - expect_equal(class(param_varsel)[1], "ParamVarSel") - print(param_varsel) - expect_no_error(param_varsel$getParamVarSel()) -}) - diff --git a/tests/testthat/test-TrainLayer.R b/tests/testthat/test-TrainLayer.R index 8da5381..3a0046a 100644 --- a/tests/testthat/test-TrainLayer.R +++ b/tests/testthat/test-TrainLayer.R @@ -23,14 +23,12 @@ test_that("TrainLayer: all tests", { expect_error({ tl_ge$getLrner() }) - same_param <- ParamLrner$new(id = "ParamRanger", - param_list = list(probability = TRUE, - mtry = 2L), - hyperparam_list = list(num.trees = 10L)) lrner_ge <- Lrner$new(id = "ranger", package = "ranger", lrn_fct = "ranger", - param = same_param, + param_train_list = list(probability = TRUE, + mtry = 2L, + num.trees = 10L), train_layer = tl_ge) expect_no_error({ tl_ge$getLrner() @@ -42,13 +40,11 @@ test_that("TrainLayer: all tests", { expect_warning({tl_ge$varSelection()}) expect_error({tl_ge$getVarSel()}) expect_error({tl_ge$getPredictions()}) - same_param_varsel <- ParamVarSel$new(id = "ParamVarSel", - param_list = list(num.trees = 50L, - mtry = 3L)) varsel_ge <- VarSel$new(id = "varsel_geneexpr", package = "Boruta", varsel_fct = "Boruta", - param = same_param_varsel, + varsel_param = list(num.trees = 50L, + mtry = 3L), train_layer = tl_ge) # Must fail because of not existing training data expect_error({tl_ge$varSelection()}) diff --git a/tests/testthat/test-TrainMetaLayer.R b/tests/testthat/test-TrainMetaLayer.R index 62bb11b..36e01f2 100644 --- a/tests/testthat/test-TrainMetaLayer.R +++ b/tests/testthat/test-TrainMetaLayer.R @@ -22,9 +22,7 @@ test_that("TrainLayer: all tests", { }) lrner_meta <- Lrner$new(id = "weighted", lrn_fct = "weightedMeanLearner", - param = ParamLrner$new(id = "ParamWeighted", - param_list = list(), - hyperparam_list = list()), + param_train_list = list(), train_layer = tl_meta) # ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ # Tests for training empty meta layer + @@ -33,15 +31,13 @@ test_that("TrainLayer: all tests", { # ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ # Tests for training empty not meta layer + # ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - same_param <- ParamLrner$new(id = "ParamRanger", - param_list = list(probability = TRUE, - mtry = 2L), - hyperparam_list = list(num.trees = 10L)) tl_ge <- TrainLayer$new(id = "geneexpr", training = training) lrner_ge <- Lrner$new(id = "ranger", package = "ranger", lrn_fct = "ranger", - param = same_param, + param_train_list = list(probability = TRUE, + mtry = 2L, + num.trees = 10L), train_layer = tl_ge) train_data_ge <- TrainData$new(id = "geneexpr", train_layer = tl_ge, diff --git a/tests/testthat/test-Training.R b/tests/testthat/test-Training.R index 2709073..8e41a09 100644 --- a/tests/testthat/test-Training.R +++ b/tests/testthat/test-Training.R @@ -139,13 +139,11 @@ test_that("Training: all tests", { # ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ # Variable selection works expect_warning({ - same_param_varsel <- ParamVarSel$new(id = "ParamVarSel", - param_list = list(num.trees = 50L, - mtry = 3L)) varsel_ge <- VarSel$new(id = "varsel_geneexpr", package = "Boruta", varsel_fct = "Boruta", - param = same_param_varsel, + varsel_param = list(num.trees = 50L, + mtry = 3L), train_layer = tl_ge) # varsel_pr <- VarSel$new(id = "varsel_proteinexpr", @@ -161,29 +159,23 @@ test_that("Training: all tests", { # Tests for training with loaded learners. + # ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ # Lrner parameters - expect_no_error({ - same_param <- ParamLrner$new(id = "ParamRanger", - param_list = list(probability = TRUE, - mtry = 2L), - hyperparam_list = list(num.trees = 25L)) - }) # Lrner expect_no_error({ lrner_ge <- Lrner$new(id = "ranger", package = "ranger", lrn_fct = "ranger", - param = same_param, + param_train_list = list(probability = TRUE, + mtry = 2L, num.trees = 25L), train_layer = tl_ge) lrner_pr <- Lrner$new(id = "ranger", package = "ranger", lrn_fct = "ranger", - param = same_param, + param_train_list = list(probability = TRUE, + mtry = 2L, num.trees = 25L), train_layer = tl_pr) lrner_meta <- Lrner$new(id = "weighted", lrn_fct = "weightedMeanLearner", - param = ParamLrner$new(id = "ParamWeighted", - param_list = list(), - hyperparam_list = list()), + param_train_list = list(), na_rm = FALSE, train_layer = tl_meta) }) @@ -192,10 +184,6 @@ test_that("Training: all tests", { # Tests for training with for training. + # ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ expect_no_error({ - same_param <- ParamLrner$new(id = "ParamRanger", - param_list = list(probability = TRUE, - mtry = 1L), - hyperparam_list = list(num.trees = 10L)) disease <- training$getTargetValues()$disease }) @@ -212,11 +200,6 @@ test_that("Training: all tests", { expect_equal(length(three_warning), 3L) expect_error({ - same_param <- ParamLrner$new(id = "ParamRanger", - param_list = list(probability = TRUE, - mtry = 2L), - hyperparam_list = list(num.trees = 10L)) - disease <- training$getTargetValues()$disease trained <- training$train(resampling_method = "stats::rnorm", resampling_arg = list(n = 10L), diff --git a/tests/testthat/test-VarSel.R b/tests/testthat/test-VarSel.R index fe13ec8..1ec1ca7 100644 --- a/tests/testthat/test-VarSel.R +++ b/tests/testthat/test-VarSel.R @@ -7,21 +7,20 @@ test_that("VarSel: with package for variable selection", { train_data_ge <- TrainData$new(id = "geneexpr", train_layer = tl_ge, data_frame = entities$training$geneexpr) - same_param_varsel <- ParamVarSel$new(id = "ParamVarSel", - param_list = list(num.trees = 50L, - mtry = 3L)) expect_error({ var_sel <- VarSel$new(id = "varsel_geneexpr", package = "Boruta", varsel_fct = "Boruta", - param = same_param_varsel, + varsel_param = list(num.trees = 50L, + mtry = 3L), train_layer = "not_a_Train_layer") }) expect_no_error({ varsel_ge <- VarSel$new(id = "varsel_geneexpr", package = "Boruta", varsel_fct = "Boruta", - param = same_param_varsel, + varsel_param = list(num.trees = 50L, + mtry = 3L), train_layer = tl_ge) print(varsel_ge) }) @@ -36,7 +35,8 @@ test_that("VarSel: with package for variable selection", { varsel_ge <- VarSel$new(id = "varsel_geneexpr", package = "Boruta", varsel_fct = "Boruta", - param = same_param_varsel, + varsel_param = list(num.trees = 50L, + mtry = 3L), train_layer = tl_ge) }) expect_no_error({ @@ -51,14 +51,12 @@ test_that("VarSel: with function for variable selection", { target = "disease", target_df = entities$training$target) tl_ge <- TrainLayer$new(id = "geneexpr", training = training) - same_param_varsel <- ParamVarSel$new(id = "ParamVarSel", - param_list = list()) train_data_ge <- TrainData$new(id = "geneexpr", train_layer = tl_ge, data_frame = entities$training$geneexpr) var_sel <- VarSel$new(id = "varsel_geneexpr", varsel_fct = "test_var_sel", - param = same_param_varsel, + varsel_param = list(), train_layer = tl_ge) expect_error({ #TODO: check why it only works in interactiv mode