Skip to content

Commit

Permalink
BREAKING CHANGE: remove internal search space argument (#488)
Browse files Browse the repository at this point in the history
  • Loading branch information
sebffischer authored Jan 22, 2025
1 parent 535c418 commit b4d2504
Show file tree
Hide file tree
Showing 23 changed files with 30 additions and 307 deletions.
6 changes: 2 additions & 4 deletions R/AutoTuner.R
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@
#' @template param_measure
#' @template param_terminator
#' @template param_search_space
#' @template param_internal_search_space
#' @template param_store_tuning_instance
#' @template param_store_benchmark_result
#' @template param_store_models
Expand Down Expand Up @@ -132,7 +131,6 @@ AutoTuner = R6Class("AutoTuner",
measure = NULL,
terminator,
search_space = NULL,
internal_search_space = NULL,
store_tuning_instance = TRUE,
store_benchmark_result = TRUE,
store_models = FALSE,
Expand All @@ -147,13 +145,13 @@ AutoTuner = R6Class("AutoTuner",
stop("If the values of the ParamSet of the Learner contain TuneTokens you cannot supply a search_space.")
}


ia = list()
self$tuner = assert_tuner(tuner)
ia$learner = learner
ia$resampling = assert_resampling(resampling)$clone()
if (!is.null(measure)) ia$measure = assert_measure(as_measure(measure), learner = learner)
if (!is.null(search_space)) ia$search_space = assert_param_set(as_search_space(search_space))$clone()
if (!is.null(internal_search_space)) ia$internal_search_space = assert_param_set(as_search_space(internal_search_space))$clone()
if (!is.null(search_space)) ia$search_space = search_space
ia$terminator = assert_terminator(terminator)$clone()

ia$store_models = assert_flag(store_models)
Expand Down
39 changes: 3 additions & 36 deletions R/TuningInstanceAsyncMulticrit.R
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
#' @template param_measures
#' @template param_terminator
#' @template param_search_space
#' @template param_internal_search_space
#' @template param_store_benchmark_result
#' @template param_store_models
#' @template param_check_values
Expand All @@ -34,7 +33,6 @@
TuningInstanceAsyncMultiCrit = R6Class("TuningInstanceAsyncMultiCrit",
inherit = OptimInstanceAsyncMultiCrit,
public = list(

internal_search_space = NULL,

#' @description
Expand All @@ -46,7 +44,6 @@ TuningInstanceAsyncMultiCrit = R6Class("TuningInstanceAsyncMultiCrit",
measures,
terminator,
search_space = NULL,
internal_search_space = NULL,
store_benchmark_result = TRUE,
store_models = FALSE,
check_values = FALSE,
Expand All @@ -71,39 +68,9 @@ TuningInstanceAsyncMultiCrit = R6Class("TuningInstanceAsyncMultiCrit",
as_search_space(search_space)
}

# get ids of primary and internal hyperparameters
sids = search_space$ids()
internal_tune_ids = search_space$ids(any_tags = "internal_tuning")

# get internal search space
self$internal_search_space = if (is.null(internal_search_space)) {
# We DO NOT subset the search space because there we might keep an extra_trafo which is not allowed
# for the internal tuning search space
if (length(internal_tune_ids)) {
if (search_space_from_tokens) {
learner$param_set$subset(internal_tune_ids)$search_space()
} else {
search_space$subset(internal_tune_ids)
}
}
} else {
if (length(internal_tune_ids)) {
stopf("Either tag parameters in the `search_space` with 'internal_tuning' OR provide an `internal_search_space`.")
}
as_search_space(internal_search_space)
}

# subset search space to primary hyperparameters
if (length(internal_tune_ids)) {
search_space = search_space$subset(setdiff(sids, internal_tune_ids))
}


if (!is.null(self$internal_search_space) && self$internal_search_space$has_trafo) {
stopf("Internal tuning and parameter transformations are currently not supported.
If you manually provided a search space that has a trafo and parameters tagged with 'internal_tuning',
please pass the latter separately via the argument `internal_search_space`.")
}
tmp = split_internal_search_space(search_space)
search_space = tmp$search_space
self$internal_search_space = tmp$internal_search_space

# set internal search space
if (!is.null(self$internal_search_space)) {
Expand Down
30 changes: 3 additions & 27 deletions R/TuningInstanceAsyncSingleCrit.R
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
#' @template param_measure
#' @template param_terminator
#' @template param_search_space
#' @template param_internal_search_space
#' @template param_store_benchmark_result
#' @template param_store_models
#' @template param_check_values
Expand Down Expand Up @@ -56,7 +55,6 @@ TuningInstanceAsyncSingleCrit = R6Class("TuningInstanceAsyncSingleCrit",
measure = NULL,
terminator,
search_space = NULL,
internal_search_space = NULL,
store_benchmark_result = TRUE,
store_models = FALSE,
check_values = FALSE,
Expand All @@ -82,31 +80,9 @@ TuningInstanceAsyncSingleCrit = R6Class("TuningInstanceAsyncSingleCrit",
}

# get ids of primary and internal hyperparameters
sids = search_space$ids()
internal_tune_ids = search_space$ids(any_tags = "internal_tuning")

# get internal search space
self$internal_search_space = if (is.null(internal_search_space)) {
# We DO NOT subset the search space because there we might keep an extra_trafo which is not allowed
# for the internal tuning search space
if (length(internal_tune_ids)) {
if (search_space_from_tokens) {
learner$param_set$subset(internal_tune_ids)$search_space()
} else {
search_space$subset(internal_tune_ids)
}
}
} else {
if (length(internal_tune_ids)) {
stopf("Either tag parameters in the `search_space` with 'internal_tuning' OR provide an `internal_search_space`.")
}
as_search_space(internal_search_space)
}

# subset search space to primary hyperparameters
if (length(internal_tune_ids)) {
search_space = search_space$subset(setdiff(sids, internal_tune_ids))
}
tmp = split_internal_search_space(search_space)
search_space = tmp$search_space
self$internal_search_space = tmp$internal_search_space

if (!is.null(self$internal_search_space) && self$internal_search_space$has_trafo) {
stopf("Internal tuning and parameter transformations are currently not supported.
Expand Down
36 changes: 3 additions & 33 deletions R/TuningInstanceBatchMulticrit.R
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
#' @template param_measures
#' @template param_terminator
#' @template param_search_space
#' @template param_internal_search_space
#' @template param_store_benchmark_result
#' @template param_store_models
#' @template param_check_values
Expand Down Expand Up @@ -77,7 +76,6 @@ TuningInstanceBatchMultiCrit = R6Class("TuningInstanceBatchMultiCrit",
measures,
terminator,
search_space = NULL,
internal_search_space = NULL,
store_benchmark_result = TRUE,
store_models = FALSE,
check_values = FALSE,
Expand All @@ -101,37 +99,9 @@ TuningInstanceBatchMultiCrit = R6Class("TuningInstanceBatchMultiCrit",
}

# get ids of primary and internal hyperparameters
sids = search_space$ids()
internal_tune_ids = search_space$ids(any_tags = "internal_tuning")

# get internal search space
self$internal_search_space = if (is.null(internal_search_space)) {
# We DO NOT subset the search space because there we might keep an extra_trafo which is not allowed
# for the internal tuning search space
if (length(internal_tune_ids)) {
if (search_space_from_tokens) {
learner$param_set$subset(internal_tune_ids)$search_space()
} else {
search_space$subset(internal_tune_ids)
}
}
} else {
if (length(internal_tune_ids)) {
stopf("Either tag parameters in the `search_space` with 'internal_tuning' OR provide an `internal_search_space`.")
}
as_search_space(internal_search_space)
}

# subset search space to primary hyperparameters
if (length(internal_tune_ids)) {
search_space = search_space$subset(setdiff(sids, internal_tune_ids))
}

if (!is.null(self$internal_search_space) && self$internal_search_space$has_trafo) {
stopf("Internal tuning and parameter transformations are currently not supported.
If you manually provided a search space that has a trafo and parameters tagged with 'internal_tuning',
please pass the latter separately via the argument `internal_search_space`.")
}
tmp = split_internal_search_space(search_space)
search_space = tmp$search_space
self$internal_search_space = tmp$internal_search_space

# set internal search space
if (!is.null(self$internal_search_space)) {
Expand Down
38 changes: 3 additions & 35 deletions R/TuningInstanceBatchSingleCrit.R
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,6 @@
#' @template param_measure
#' @template param_terminator
#' @template param_search_space
#' @template param_internal_search_space
#' @template param_store_benchmark_result
#' @template param_store_models
#' @template param_check_values
Expand Down Expand Up @@ -127,7 +126,6 @@ TuningInstanceBatchSingleCrit = R6Class("TuningInstanceBatchSingleCrit",
measure = NULL,
terminator,
search_space = NULL,
internal_search_space = NULL,
store_benchmark_result = TRUE,
store_models = FALSE,
check_values = FALSE,
Expand All @@ -150,40 +148,10 @@ TuningInstanceBatchSingleCrit = R6Class("TuningInstanceBatchSingleCrit",
as_search_space(search_space)
}

# get ids of primary and internal hyperparameters
sids = search_space$ids()
internal_tune_ids = search_space$ids(any_tags = "internal_tuning")
tmp = split_internal_search_space(search_space)
search_space = tmp$search_space
self$internal_search_space = tmp$internal_search_space

# get internal search space
self$internal_search_space = if (is.null(internal_search_space)) {
# We DO NOT subset the search space because there we might keep an extra_trafo which is not allowed
# for the internal tuning search space
if (length(internal_tune_ids)) {
if (search_space_from_tokens) {
learner$param_set$subset(internal_tune_ids)$search_space()
} else {
search_space$subset(internal_tune_ids)
}
}
} else {
if (length(internal_tune_ids)) {
stopf("Either tag parameters in the `search_space` with 'internal_tuning' OR provide an `internal_search_space`.")
}
as_search_space(internal_search_space)
}

# subset search space to primary hyperparameters
if (length(internal_tune_ids)) {
search_space = search_space$subset(setdiff(sids, internal_tune_ids))
}

if (!is.null(self$internal_search_space) && self$internal_search_space$has_trafo) {
stopf("Internal tuning and parameter transformations are currently not supported.
If you manually provided a search space that has a trafo and parameters tagged with 'internal_tuning',
please pass the latter separately via the argument `internal_search_space`.")
}

# set internal search space
if (!is.null(self$internal_search_space)) {
# the learner dictates how to interpret the to_tune(..., inner)
learner$param_set$set_values(.values = learner$param_set$convert_internal_search_space(self$internal_search_space))
Expand Down
3 changes: 0 additions & 3 deletions R/auto_tuner.R
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
#' @template param_term_time
#' @template param_terminator
#' @template param_search_space
#' @template param_internal_search_space
#' @template param_store_tuning_instance
#' @template param_store_benchmark_result
#' @template param_store_models
Expand Down Expand Up @@ -44,7 +43,6 @@ auto_tuner = function(
term_time = NULL,
terminator = NULL,
search_space = NULL,
internal_search_space = NULL,
store_tuning_instance = TRUE,
store_benchmark_result = TRUE,
store_models = FALSE,
Expand All @@ -62,7 +60,6 @@ auto_tuner = function(
measure = measure,
terminator = terminator,
search_space = search_space,
internal_search_space = internal_search_space,
store_tuning_instance = store_tuning_instance,
store_benchmark_result = store_benchmark_result,
store_models = store_models,
Expand Down
11 changes: 11 additions & 0 deletions R/helper.R
Original file line number Diff line number Diff line change
Expand Up @@ -27,3 +27,14 @@ extract_inner_tuned_values = function(resample_result, internal_search_space) {
internal_tuned_values = transpose_list(map(get_private(resample_result)$.data$learner_states(get_private(resample_result)$.view), "internal_tuned_values"))
internal_search_space$aggr_internal_tuned_values(internal_tuned_values)
}


split_internal_search_space = function(search_space) {
internal_tune_ids = search_space$ids(any_tags = "internal_tuning")
if (length(internal_tune_ids)) {
internal_search_space = search_space$subset(internal_tune_ids)
search_space = search_space$subset(setdiff(search_space$ids(), internal_tune_ids))
return(list(search_space = search_space, internal_search_space = internal_search_space))
}
list(search_space = search_space, internal_search_space = NULL)
}
6 changes: 0 additions & 6 deletions R/sugar.R
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,6 @@ tnrs = function(.keys, ...) {
#' @template param_resampling
#' @template param_terminator
#' @template param_search_space
#' @template param_internal_search_space
#' @template param_store_benchmark_result
#' @template param_store_models
#' @template param_check_values
Expand All @@ -62,7 +61,6 @@ ti = function(
measures = NULL,
terminator,
search_space = NULL,
internal_search_space = NULL,
store_benchmark_result = TRUE,
store_models = FALSE,
check_values = FALSE,
Expand All @@ -77,7 +75,6 @@ ti = function(
measures,
terminator = terminator,
search_space = search_space,
internal_search_space = internal_search_space,
store_benchmark_result = store_benchmark_result,
store_models = store_models,
check_values = check_values,
Expand All @@ -98,7 +95,6 @@ ti = function(
#' @template param_resampling
#' @template param_terminator
#' @template param_search_space
#' @template param_internal_search_space
#' @template param_store_benchmark_result
#' @template param_store_models
#' @template param_check_values
Expand All @@ -117,7 +113,6 @@ ti_async = function(
measures = NULL,
terminator,
search_space = NULL,
internal_search_space = NULL,
store_benchmark_result = TRUE,
store_models = FALSE,
check_values = FALSE,
Expand All @@ -133,7 +128,6 @@ ti_async = function(
measures,
terminator = terminator,
search_space = search_space,
internal_search_space = internal_search_space,
store_benchmark_result = store_benchmark_result,
store_models = store_models,
check_values = check_values,
Expand Down
4 changes: 0 additions & 4 deletions R/tune.R
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,6 @@
#' @template param_term_evals
#' @template param_term_time
#' @template param_search_space
#' @template param_internal_search_space
#' @template param_store_benchmark_result
#' @template param_store_models
#' @template param_check_values
Expand Down Expand Up @@ -88,7 +87,6 @@ tune = function(
terminator = NULL,
search_space = NULL,
store_benchmark_result = TRUE,
internal_search_space = NULL,
store_models = FALSE,
check_values = FALSE,
callbacks = NULL,
Expand All @@ -106,7 +104,6 @@ tune = function(
measures,
terminator = terminator,
search_space = search_space,
internal_search_space = internal_search_space,
store_benchmark_result = store_benchmark_result,
store_models = store_models,
check_values = check_values,
Expand All @@ -122,7 +119,6 @@ tune = function(
measures,
terminator = terminator,
search_space = search_space,
internal_search_space = internal_search_space,
store_benchmark_result = store_benchmark_result,
store_models = store_models,
check_values = check_values,
Expand Down
Loading

0 comments on commit b4d2504

Please sign in to comment.