File: ddalpha.getErrorRateCV.Rd

package info (click to toggle)
r-cran-ddalpha 1.3.11-1
  • links: PTS, VCS
  • area: main
  • in suites: bullseye
  • size: 1,656 kB
  • sloc: cpp: 3,556; fortran: 886; ansic: 159; makefile: 2
file content (75 lines) | stat: -rw-r--r-- 2,516 bytes parent folder | download | duplicates (4)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
\name{ddalpha.getErrorRateCV}
\alias{ddalpha.getErrorRateCV}
\title{
Test DD-Classifier
}
\description{
Performs a cross-validation procedure over the given data. 
On each step every \code{numchunks} observation is removed from the data, the DD-classifier is trained on these data and tested on the removed observations.
}
\usage{
ddalpha.getErrorRateCV (data, numchunks = 10,  ...)
}
\arguments{
  \item{data}{
Matrix containing training sample where each of \eqn{n} rows is one object of the training sample where first \eqn{d} entries are inputs and the last entry is output (class label).
}
  \item{numchunks}{
number of subsets of testing data. Equals to the number of times the classifier is trained.
}
  \item{\dots}{
additional parameters passed to \code{\link{ddalpha.train}}
}
}

\value{

  \item{errors}{
  the part of incorrectly classified data
  }
  \item{time}{
  the mean training time
  }
  \item{time_sd}{
  the standard deviation of training time
  }

}


\seealso{
\code{\link{ddalpha.train}} to train the DD\eqn{\alpha}-classifier, 
\code{\link{ddalpha.classify}} for classification using DD\eqn{\alpha}-classifier, 
\code{\link{ddalpha.test}} to test the DD-classifier on particular learning and testing data,
\code{\link{ddalpha.getErrorRatePart}} to perform a benchmark study of the DD-classifier on particular data.
}
\examples{
# Generate a bivariate normal location-shift classification task
# containing 200 training objects and 200 to test with
class1 <- mvrnorm(150, c(0,0), 
                  matrix(c(1,1,1,4), nrow = 2, ncol = 2, byrow = TRUE))
class2 <- mvrnorm(150, c(2,2), 
                  matrix(c(1,1,1,4), nrow = 2, ncol = 2, byrow = TRUE))
propertyVars <- c(1:2)
classVar <- 3
data <- rbind(cbind(class1, rep(1, 150)), cbind(class2, rep(2, 150)))

# Train 1st DDalpha-classifier (default settings) 
# and get the classification error rate
stat <- ddalpha.getErrorRateCV(data, numchunks = 5)
cat("1. Classification error rate (defaults): ", 
    stat$error, ".\n", sep = "")

# Train 2nd DDalpha-classifier (zonoid depth, maximum Mahalanobis 
# depth classifier with defaults as outsider treatment) 
# and get the classification error rate
stat2 <- ddalpha.getErrorRateCV(data, depth = "zonoid", 
                          outsider.methods = "depth.Mahalanobis")
cat("2. Classification error rate (depth.Mahalanobis): ", 
    stat2$error, ".\n", sep = "")


}
% Add one or more standard keywords, see file 'KEYWORDS' in the
% R documentation directory.
\keyword{ benchmark }