File: tuneParamsMultiCrit.Rd

package info (click to toggle)
r-cran-mlr 2.19.2%2Bdfsg-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid, trixie
  • size: 8,264 kB
  • sloc: ansic: 65; sh: 13; makefile: 5
file content (87 lines) | stat: -rw-r--r-- 3,092 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
% Generated by roxygen2: do not edit by hand
% Please edit documentation in R/tuneParamsMultiCrit.R
\name{tuneParamsMultiCrit}
\alias{tuneParamsMultiCrit}
\title{Hyperparameter tuning for multiple measures at once.}
\usage{
tuneParamsMultiCrit(
  learner,
  task,
  resampling,
  measures,
  par.set,
  control,
  show.info = getMlrOption("show.info"),
  resample.fun = resample
)
}
\arguments{
\item{learner}{(\link{Learner} | \code{character(1)})\cr
The learner.
If you pass a string the learner will be created via \link{makeLearner}.}

\item{task}{(\link{Task})\cr
The task.}

\item{resampling}{(\link{ResampleInstance} | \link{ResampleDesc})\cr
Resampling strategy to evaluate points in hyperparameter space. If you pass a description,
it is instantiated once at the beginning by default, so all points are
evaluated on the same training/test sets.
If you want to change that behavior, look at \link{TuneMultiCritControl}.}

\item{measures}{[list of \link{Measure})\cr
Performance measures to optimize simultaneously.}

\item{par.set}{(\link[ParamHelpers:makeParamSet]{ParamHelpers::ParamSet})\cr
Collection of parameters and their constraints for optimization.
Dependent parameters with a \code{requires} field must use \code{quote} and not
\code{expression} to define it.}

\item{control}{(\link{TuneMultiCritControl})\cr
Control object for search method. Also selects the optimization algorithm for tuning.}

\item{show.info}{(\code{logical(1)})\cr
Print verbose output on console?
Default is set via \link{configureMlr}.}

\item{resample.fun}{(\link{closure})\cr
The function to use for resampling. Defaults to \link{resample} and should take the
same arguments as, and return the same result type as, \link{resample}.}
}
\value{
(\link{TuneMultiCritResult}).
}
\description{
Optimizes the hyperparameters of a learner in a multi-criteria fashion.
Allows for different optimization methods, such as grid search, evolutionary strategies, etc.
You can select such an algorithm (and its settings)
by passing a corresponding control object. For a complete list of implemented algorithms look at
\link{TuneMultiCritControl}.
}
\examples{
\dontshow{ if (requireNamespace("kernlab")) \{ }
\donttest{
\dontshow{ if (requireNamespace("mco")) \{ }
\dontshow{ if (requireNamespace("kernlab")) \{ }
# multi-criteria optimization of (tpr, fpr) with NGSA-II
lrn = makeLearner("classif.ksvm")
rdesc = makeResampleDesc("Holdout")
ps = makeParamSet(
  makeNumericParam("C", lower = -12, upper = 12, trafo = function(x) 2^x),
  makeNumericParam("sigma", lower = -12, upper = 12, trafo = function(x) 2^x)
)
ctrl = makeTuneMultiCritControlNSGA2(popsize = 4L, generations = 1L)
res = tuneParamsMultiCrit(lrn, sonar.task, rdesc, par.set = ps,
  measures = list(tpr, fpr), control = ctrl)
plotTuneMultiCritResult(res, path = TRUE)
\dontshow{ \} }
\dontshow{ \} }
}
\dontshow{ \} }
}
\seealso{
Other tune_multicrit: 
\code{\link{TuneMultiCritControl}},
\code{\link{plotTuneMultiCritResult}()}
}
\concept{tune_multicrit}