File: performance.Rd

package info (click to toggle)
r-cran-mlr 2.19.2%2Bdfsg-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid, trixie
  • size: 8,264 kB
  • sloc: ansic: 65; sh: 13; makefile: 5
file content (71 lines) | stat: -rw-r--r-- 2,277 bytes parent folder | download | duplicates (3)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
% Generated by roxygen2: do not edit by hand
% Please edit documentation in R/performance.R
\name{performance}
\alias{performance}
\title{Measure performance of prediction.}
\usage{
performance(
  pred,
  measures,
  task = NULL,
  model = NULL,
  feats = NULL,
  simpleaggr = FALSE
)
}
\arguments{
\item{pred}{(\link{Prediction})\cr
Prediction object.}

\item{measures}{(\link{Measure} | list of \link{Measure})\cr
Performance measure(s) to evaluate.
Default is the default measure for the task, see here \link{getDefaultMeasure}.}

\item{task}{(\link{Task})\cr
Learning task, might be requested by performance measure, usually not needed except for clustering or survival.}

\item{model}{(\link{WrappedModel})\cr
Model built on training data, might be requested by performance measure, usually not needed except for survival.}

\item{feats}{(\link{data.frame})\cr
Features of predicted data, usually not needed except for clustering.
If the prediction was generated from a \code{task}, you can also pass this instead and the features
are extracted from it.}

\item{simpleaggr}{(\link{logical})\cr
If TRUE, aggregation of \code{ResamplePrediction} objects is skipped. This is used internally for threshold tuning. Default is \code{FALSE}.}
}
\value{
(named \link{numeric}). Performance value(s), named by measure(s).
}
\description{
Measures the quality of a prediction w.r.t. some performance measure.
}
\examples{
training.set = seq(1, nrow(iris), by = 2)
test.set = seq(2, nrow(iris), by = 2)

task = makeClassifTask(data = iris, target = "Species")
lrn = makeLearner("classif.lda")
mod = train(lrn, task, subset = training.set)
pred = predict(mod, newdata = iris[test.set, ])
performance(pred, measures = mmce)

# Compute multiple performance measures at once
ms = list("mmce" = mmce, "acc" = acc, "timetrain" = timetrain)
performance(pred, measures = ms, task, mod)
}
\seealso{
Other performance: 
\code{\link{ConfusionMatrix}},
\code{\link{calculateConfusionMatrix}()},
\code{\link{calculateROCMeasures}()},
\code{\link{estimateRelativeOverfitting}()},
\code{\link{makeCostMeasure}()},
\code{\link{makeCustomResampledMeasure}()},
\code{\link{makeMeasure}()},
\code{\link{measures}},
\code{\link{setAggregation}()},
\code{\link{setMeasurePars}()}
}
\concept{performance}