File: lmrob.S.Rd

package info (click to toggle)
robustbase 0.8-1-1-1
  • links: PTS
  • area: main
  • in suites: wheezy
  • size: 3,156 kB
  • sloc: fortran: 2,553; ansic: 2,419; makefile: 1
file content (62 lines) | stat: -rw-r--r-- 2,320 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
\name{lmrob.S}
\alias{lmrob.S}
\title{ S-regression estimators }
\description{
  Computes an S-estimator for linear regression,
  using the \dQuote{fast S} algorithm.% of Matias Salibian & Victor Yohai ...
}
\usage{
lmrob.S(x, y, control, trace.lev = 0)
}
\arguments{
  \item{x}{ design matrix }
  \item{y}{ response vector }
  \item{control}{ list as returned by \code{\link{lmrob.control}} }
  \item{trace.lev}{integer indicating if the progress of the algorithm
    should be traced (increasingly); default \code{trace.lev = 0} does
    no tracing.}
}
\details{
  This function is used by \code{\link{lmrob.fit}} and not
  intended to be used on its own (because an S-estimator has too low
  efficiency \sQuote{on its own}).
}
\value{
  A list with components
  \item{coefficients}{numeric vector (length \eqn{p}) of S-regression coefficient estimates.}
  \item{scale}{the S-scale residual estimate}% 'residual estimate' ?? % resid. VAR !?
  % \item{cov}{covariance matrix (\eqn{p \times p}{p x p}) of the
  %   coefficient estimates.}
  \item{fitted.values}{numeric vector (length \eqn{n}) of the fitted
    values.}
  \item{residuals}{numberic vector (legnth \eqn{n}) of the residuals.}
  \item{weights}{numeric vector (length \eqn{n}) of the robustness weights.}
  \item{k.iter}{(maximal) number of refinement iterations used.}
  \item{converged}{logical indicating if \bold{all} refinement
    iterations had converged.}
  \item{control}{the same list as the \code{control} argument.}
}
\seealso{\code{\link{lmrob}}, also for references.
}
\examples{
set.seed(33)
x1 <- sort(rnorm(30)); x2 <- sort(rnorm(30)); x3 <- sort(rnorm(30))
X. <- cbind(x1, x2, x3)
y <-  10 + X. \%*\% (10*(2:4)) + rnorm(30)/10
y[1] <- 500   # a moderate outlier
X.[2,1] <- 20 # an X outlier
X1  <- cbind(1, X.)

(m.lm <- lm(y ~ X.))
set.seed(12)
m.lmS <- lmrob.S(x=X1, y=y,
                    control = lmrob.control(nRes = 20), trace.lev=1)
m.lmS[c("coefficients","scale")]
all.equal(m.lmS$coef, 10 * (1:4), tol = 0.005, check.attributes = FALSE)
stopifnot(all.equal(m.lmS$coef, 10 * (1:4),
                    tol = 0.005, check.attributes = FALSE),
          all.equal(m.lmS$scale, 1/10, tol = 0.09))
}
\author{ Matias Salibian-Barrera and Manuel Koller (and Martin Maechler for minor details) }
\keyword{robust}
\keyword{regression}