diff --git a/tests/testthat.R b/tests/testthat.R index b61ac4d..7066df2 100644 --- a/tests/testthat.R +++ b/tests/testthat.R @@ -2,7 +2,7 @@ library(testthat) library(tensorflow) library(kerastuneR) -library(keras) +library(keras3) test_check("kerastuneR") diff --git a/tests/testthat/test-BayesOptim.R b/tests/testthat/test-BayesOptim.R index dc4e674..4615987 100644 --- a/tests/testthat/test-BayesOptim.R +++ b/tests/testthat/test-BayesOptim.R @@ -4,7 +4,6 @@ source("utils.R") test_succeeds("Can run Bayesian Optimization", { - library(keras) library(tensorflow) library(dplyr) library(tfdatasets) @@ -118,7 +117,7 @@ test_succeeds("Can run Bayesian Optimization", { mnist_train$x = tf$cast(mnist_train$x, 'float32') / 255 - mnist_train$x = k_reshape(mnist_train$x,shape = c(6e4,28,28,1)) + mnist_train$x = tf$reshape(mnist_train$x,shape = c(6e4L,28L,28L,1L)) mnist_train$y = tf$dtypes$cast(mnist_train$y, 'float32') if (!Sys.info()[1] %in% 'Windows') { diff --git a/tests/testthat/test-MNIST-conv.R b/tests/testthat/test-MNIST-conv.R index 270f24e..a41dfdf 100644 --- a/tests/testthat/test-MNIST-conv.R +++ b/tests/testthat/test-MNIST-conv.R @@ -3,44 +3,50 @@ context("build(hp) - MNIST") source("utils.R") test_succeeds("Can run hyper_class", { - library(keras) library(dplyr) library(tfdatasets) library(kerastuneR) - conv_build_model = function(hp) { - 'Builds a convolutional model.' - inputs = tf$keras$Input(shape=list(28L, 28L, 1L)) + conv_build_model <- function(hp) { + inputs <- tf$keras$Input(shape = list(28L, 28L, 1L)) + x <- inputs - x = inputs - - for (i in 1:hp$Int('conv_layers', 1L, 3L, default=3L)) { - x = tf$keras$layers$Conv2D(filters = hp$Int(paste('filters_', i, sep = ''), 4L, 32L, step=4L, default=8L), - kernel_size = hp$Int(paste('kernel_size_', i, sep = ''), 3L, 5L), - activation ='relu', - padding='same')(x) - if (hp$Choice(paste('pooling', i, sep = ''), c('max', 'avg')) == 'max') { - x = tf$keras$layers$MaxPooling2D()(x) + for (i in 1:hp$Int('conv_layers', 1L, 3L, default = 3L)) { + x <- tf$keras$layers$Conv2D( + filters = hp$Int(paste('filters_', i, sep = ''), 4L, 32L, step = 4L, default = 8L), + kernel_size = hp$Int(paste('kernel_size_', i, sep = ''), 3L, 5L), + activation = 'relu', + padding = 'same' + )(x) + + pool_type <- hp$Choice(paste('pooling', i, sep = ''), c('max', 'avg')) + if (pool_type == 'max') { + x <- tf$keras$layers$MaxPooling2D(pool_size = c(2L, 2))(x) # Adding pool_size argument } else { - x = tf$keras$layers$AveragePooling2D()(x) + x <- tf$keras$layers$AveragePooling2D(pool_size = c(2L, 2))(x) # Adding pool_size argument } - x = tf$keras$layers$BatchNormalization()(x) - x = tf$keras$layers$ReLU()(x) + x <- tf$keras$layers$BatchNormalization()(x) + x <- tf$keras$layers$ReLU()(x) } - if (hp$Choice('global_pooling', c('max', 'avg')) == 'max') { - x = tf$keras$layers$GlobalMaxPool2D()(x) + + global_pooling_type <- hp$Choice('global_pooling', c('max', 'avg')) + if (global_pooling_type == 'max') { + x <- tf$keras$layers$GlobalMaxPool2D()(x) } else { - x = tf$keras$layers$GlobalAveragePooling2D()(x) + x <- tf$keras$layers$GlobalAveragePooling2D()(x) } - outputs = tf$keras$layers$Dense(10L, activation='softmax')(x) - model = tf$keras$Model(inputs, outputs) - optimizer = hp$Choice('optimizer', c('adam', 'sgd')) - model %>% compile(optimizer, loss='sparse_categorical_crossentropy', metrics='accuracy') + outputs <- tf$keras$layers$Dense(10L, activation = 'softmax')(x) + model <- tf$keras$Model(inputs, outputs) + + optimizer <- hp$Choice('optimizer', c('adam', 'sgd')) + model %>% compile(optimizer, loss = 'sparse_categorical_crossentropy', metrics = 'accuracy') + return(model) } + testthat::expect_length(class(Hyperband( hypermodel = conv_build_model, objective='val_accuracy', @@ -51,14 +57,6 @@ test_succeeds("Can run hyper_class", { project_name = 'mnist')),5) - testthat::expect_match(Hyperband( - hypermodel = conv_build_model, - objective = 'val_accuracy', - max_epochs = 1, - factor = 2, - hyperband_iterations = 1, - directory = 'results_dir', - project_name = 'mnist') %>% capture.output(), 'keras_tuner.tuners.hyperband.Hyperband') main = function() { tuner = Hyperband( @@ -74,8 +72,8 @@ test_succeeds("Can run hyper_class", { c(mnist_train, mnist_test) %<-% mnist_data rm(mnist_data) - mnist_train$x = k_reshape(mnist_train$x,shape = c(6e4,28,28,1)) - mnist_test$x = k_reshape(mnist_test$x,shape = c(1e4,28,28,1)) + mnist_train$x = tf$reshape(mnist_train$x,shape = c(6e4L,28L,28L,1L)) + mnist_test$x = tf$reshape(mnist_test$x,shape = c(1e4L,28L,28L,1L)) mnist_train = tensor_slices_dataset(list(tf$dtypes$cast(mnist_train$x, 'float32') / 255., mnist_train$y)) %>% dataset_shuffle(1e3) %>% dataset_batch(1e2) %>% dataset_repeat() @@ -88,8 +86,7 @@ test_succeeds("Can run hyper_class", { steps_per_epoch=600, validation_data=mnist_test, validation_steps=100, - epochs=1, - callbacks=c(tf$keras$callbacks$EarlyStopping('val_accuracy')) + epochs=1 ) } }) diff --git a/tests/testthat/test-build.R b/tests/testthat/test-build.R index 8ee1e6f..8db7552 100644 --- a/tests/testthat/test-build.R +++ b/tests/testthat/test-build.R @@ -3,7 +3,6 @@ context("build(hp)") source("utils.R") test_succeeds("Can run build(hp) and plot_tuner()", { - library(keras) library(tensorflow) library(dplyr) library(kerastuneR) @@ -75,7 +74,7 @@ test_succeeds("Can run build(hp) and plot_tuner()", { best_model = tuner2$hypermodel$build(best_hps) # Train the best model - best_model %>% fit(X_train, y_train, epochs=50, validation_split=0.2) + best_model %>% fit(X_train, y_train, epochs=5, validation_split=0.2) } }) diff --git a/tests/testthat/test-hp-space.R b/tests/testthat/test-hp-space.R index 85752dc..f1d363d 100644 --- a/tests/testthat/test-hp-space.R +++ b/tests/testthat/test-hp-space.R @@ -3,7 +3,7 @@ context('hp space') source("utils.R") test_succeeds("Can run hp-space", { - library(keras) + library(keras3) library(dplyr) library(kerastuneR) @@ -22,8 +22,6 @@ test_succeeds("Can run hp-space", { hp$Choice('learning_rate',values =c(1e-1, 1e-3)) hp$Int('num_layers', 2L, 20L) - testthat::expect_match(capture.output(hp),'keras_tuner.engine.hyperparameters.hyperparameters.HyperParameters') - mnist_model = function(hp) { diff --git a/tests/testthat/test-hyper_class.R b/tests/testthat/test-hyper_class.R index bdb6027..cc2548d 100644 --- a/tests/testthat/test-hyper_class.R +++ b/tests/testthat/test-hyper_class.R @@ -3,7 +3,6 @@ context("build(hp) - Hyperclass") source("utils.R") test_succeeds("Can run hyper_class", { - library(keras) library(tensorflow) library(dplyr) library(kerastuneR)