File: calculateROCMeasures.Rd

package info (click to toggle)
r-cran-mlr 2.19.2%2Bdfsg-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid, trixie
  • size: 8,264 kB
  • sloc: ansic: 65; sh: 13; makefile: 5
file content (86 lines) | stat: -rw-r--r-- 2,709 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
% Generated by roxygen2: do not edit by hand
% Please edit documentation in R/calculateROCMeasures.R
\name{calculateROCMeasures}
\alias{calculateROCMeasures}
\alias{print.ROCMeasures}
\title{Calculate receiver operator measures.}
\usage{
calculateROCMeasures(pred)

\method{print}{ROCMeasures}(x, abbreviations = TRUE, digits = 2, ...)
}
\arguments{
\item{pred}{(\link{Prediction})\cr
Prediction object.}

\item{x}{(\code{ROCMeasures})\cr
Created by \link{calculateROCMeasures}.}

\item{abbreviations}{(\code{logical(1)})\cr
If \code{TRUE} a short paragraph with explanations of the used measures is printed additionally.}

\item{digits}{(\code{integer(1)})\cr
Number of digits the measures are rounded to.}

\item{...}{\code{(any)}\cr
Currently not used.}
}
\value{
(\code{ROCMeasures}).
A list containing two elements \code{confusion.matrix} which is
the 2 times 2 confusion matrix of absolute frequencies and \code{measures}, a list of the above mentioned measures.
}
\description{
Calculate the absolute number of correct/incorrect classifications and the following evaluation measures:
\itemize{
\item \code{tpr} True positive rate (Sensitivity, Recall)
\item \code{fpr} False positive rate (Fall-out)
\item \code{fnr} False negative rate (Miss rate)
\item \code{tnr} True negative rate (Specificity)
\item \code{ppv} Positive predictive value (Precision)
\item \code{for} False omission rate
\item \code{lrp} Positive likelihood ratio (LR+)
\item \code{fdr} False discovery rate
\item \code{npv} Negative predictive value
\item \code{acc} Accuracy
\item \code{lrm} Negative likelihood ratio (LR-)
\item \code{dor} Diagnostic odds ratio
}

For details on the used measures see \link{measures} and also
\url{https://en.wikipedia.org/wiki/Receiver_operating_characteristic}.

The element for the false omission rate in the resulting object is not called \code{for} but
\code{fomr} since \code{for} should never be used as a variable name in an object.
}
\section{Functions}{
\itemize{
\item \code{print(ROCMeasures)}: 

}}
\examples{
\dontshow{ if (requireNamespace("rpart")) \{ }
lrn = makeLearner("classif.rpart", predict.type = "prob")
fit = train(lrn, sonar.task)
pred = predict(fit, task = sonar.task)
calculateROCMeasures(pred)
\dontshow{ \} }
}
\seealso{
Other roc: 
\code{\link{asROCRPrediction}()}

Other performance: 
\code{\link{ConfusionMatrix}},
\code{\link{calculateConfusionMatrix}()},
\code{\link{estimateRelativeOverfitting}()},
\code{\link{makeCostMeasure}()},
\code{\link{makeCustomResampledMeasure}()},
\code{\link{makeMeasure}()},
\code{\link{measures}},
\code{\link{performance}()},
\code{\link{setAggregation}()},
\code{\link{setMeasurePars}()}
}
\concept{performance}
\concept{roc}