File: nlrob-algos.Rd

package info (click to toggle)
robustbase 0.99-4-1-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid, trixie
  • size: 4,552 kB
  • sloc: fortran: 3,245; ansic: 3,243; sh: 15; makefile: 2
file content (183 lines) | stat: -rw-r--r-- 7,116 bytes parent folder | download | duplicates (3)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
\name{nlrob-algorithms}
\alias{nlrob.algorithms}
\alias{nlrob.MM}
\alias{nlrob.tau}
\alias{nlrob.CM}
\alias{nlrob.mtl}
\title{MM-, Tau-, CM-, and MTL- Estimators for Nonlinear Robust Regression}
\description{
  \describe{
    \item{"MM":}{Compute an MM-estimator for nonlinear robust (constrained)
      regression.}
    \item{"tau":}{Compute a Tau-estimator for nonlinear robust (constrained)
      regression.}
    \item{"CM":}{Compute a \dQuote{Constrained M} (=: CM) estimator for
      nonlinear robust (constrained) regression.}
    \item{"MTL":}{Compute a \dQuote{Maximum Trimmed Likelihood} (=: MTL)
      estimator for nonlinear robust (constrained) regression.}
  }
}
\usage{
## You can *not* call the  nlrob(*, method = <M>) like this ==> see  help(nlrob)
## ------- ===== ------------------------------------------

nlrob.MM(formula, data, lower, upper,
	 tol = 1e-06,
	 psi = c("bisquare", "lqq", "optimal", "hampel"),
         init = c("S", "lts"),
	 ctrl = nlrob.control("MM", psi = psi, init = init, fnscale = NULL,
		       tuning.chi.scale = .psi.conv.cc(psi, .Mchi.tuning.defaults[[psi]]),
		       tuning.psi.M     = .psi.conv.cc(psi, .Mpsi.tuning.defaults[[psi]]),
		       optim.control = list(), optArgs = list(...)),
	 ...)

nlrob.tau(formula, data, lower, upper,
	  tol = 1e-06, psi = c("bisquare", "optimal"),
	  ctrl = nlrob.control("tau", psi = psi, fnscale = NULL,
			tuning.chi.scale = NULL, tuning.chi.tau = NULL,
			optArgs = list(...)),
	  ...)

nlrob.CM(formula, data, lower, upper,
	 tol = 1e-06,
	 psi = c("bisquare", "lqq", "welsh", "optimal", "hampel", "ggw"),
	 ctrl = nlrob.control("CM", psi = psi, fnscale = NULL,
                        tuning.chi = NULL, optArgs = list(...)),
	 ...)

nlrob.mtl(formula, data, lower, upper,
	  tol = 1e-06,
	  ctrl = nlrob.control("mtl", cutoff = 2.5, optArgs = list(...)),
	  ...)
}
\arguments{
  \item{formula}{nonlinear regression \code{\link{formula}}, using both
    variable names from \code{data} and parameter names from either
	  \code{lower} or \code{upper}.}
  \item{data}{data to be used, a \code{\link{data.frame}}}
  \item{lower, upper}{bounds aka \dQuote{box constraints} for all the
    parameters, in the case "CM" and "mtl" these must include the error
    standard deviation as \code{"sigma"}, see \code{\link{nlrob}()}
    about its \code{\link{names}}, etc.

    Note that one of these two must be a properly \dQuote{named}, e.g.,
    \code{names(lower)} being a \code{\link{character}} vector of parameter names
    (used in \code{formula} above).
  }
  \item{tol}{numerical convergence tolerance.}
  \item{psi, init}{see \code{\link{nlrob.control}}.}
  \item{ctrl}{a \code{\link{list}}, typically the result of a call to
    \code{\link{nlrob.control}}.}

  \item{tuning.psi.M}{..}% FIXME
  \item{optim.control}{..}% FIXME
  \item{optArgs}{a \code{\link{list}} of optional arguments for
    optimization, e.g., \code{trace = TRUE}, passed to to the optimizer,
    which currently must be \code{\link[DEoptimR]{JDEoptim}(.)}.}
  \item{...}{alternative way to pass the \code{optArgs} above.}
}
\value{
  an \R object of \code{\link{class}} \code{"nlrob.<meth>"}, basically a
  list with components
  %% FIXME
}
\details{
  Copyright 2013, Eduardo L. T. Conceicao.  Available under
  the GPL (>= 2)

  Currently, all four methods use \code{\link[DEoptimR]{JDEoptim}()}
  from \CRANpkg{DEoptimR}, which subsamples using \code{\link{sample}()}.
  From \R version 3.6.0, \code{\link{sample}} depends on
  \code{\link{RNGkind}(*, sample.kind)}, such that exact reproducibility of
  results from \R versions 3.5.3 and earlier requires setting
  \code{\link{RNGversion}("3.5.0")}.
  In any case, do use \code{\link{set.seed}()} additionally for reproducibility!
}
\author{
  Eduardo L. T. Conceicao; compatibility (to \code{\link{nlrob}})
  tweaks and generalizations, inference, by Martin Maechler.
}
\source{
  For \code{"MTL"}:
  Maronna, Ricardo A., Martin, R. Douglas, and Yohai, Victor J. (2006).
  \emph{Robust Statistics: Theory and Methods} Wiley, Chichester, p. 133.
}
\references{
  \describe{
    \item{"MM":}{
      Yohai, V.J. (1987)
      High breakdown-point and high efficiency robust estimates for
      regression.
      \emph{The Annals of Statistics} \bold{15}, 642--656.
    }
    \item{"tau":}{
      Yohai, V.J., and Zamar, R.H. (1988).
      High breakdown-point estimates of regression by means of the
      minimization of an efficient scale.
      \emph{Journal of the American Statistical Association} \bold{83},
      406--413.
    }
    \item{"CM":}{
      Mendes, B.V.M., and Tyler, D.E. (1996)
      Constrained M-estimation for regression.

      In: \emph{Robust Statistics, Data Analysis and Computer Intensive
	Methods}, Lecture Notes in Statistics 109, Springer, New York, 299--320.
      %% not yet -- e.g. tuning constants for Welsh:
      %% Edlund, O. and Ekblom, H. (2005)
      %% Computing the constrained M-estimates for regression.
      %% Computational Statistics Data Analysis \bold{49}(1): 19--32.
    }
    \item{"MTL":}{
      Hadi, Ali S., and Luceno, Alberto (1997).
      Maximum trimmed likelihood estimators: a unified approach,
      examples, and algorithms.
      Computational Statistics & Data Analysis \bold{25}, 251--272.

      Gervini, Daniel, and Yohai, Victor J. (2002).
      A class of robust and fully efficient regression estimators.
      The Annals of Statistics \bold{30}, 583--616.
    }

  }%describe
}
\examples{%% for more, --> ../tests/nlregrob-tst.R
DNase1 <- DNase[DNase$Run == 1,]
form <- density ~ Asym/(1 + exp(( xmid -log(conc) )/scal ))
pnms <- c("Asym", "xmid", "scal")
set.seed(47) # as these by default use randomized optimization:

fMM <- robustbase:::nlrob.MM(form, data = DNase1,
           lower = setNames(c(0,0,0), pnms), upper = 3,
           ## call to nlrob.control to pass 'optim.control':
           ctrl = nlrob.control("MM", optim.control = list(trace = 1),
                                optArgs = list(trace = TRUE)))

## The same via nlrob() {recommended; same random seed to necessarily give the same}:
set.seed(47)
gMM  <- nlrob(form, data = DNase1, method = "MM",
              lower = setNames(c(0,0,0), pnms), upper = 3, trace = TRUE)
gMM
summary(gMM)
## and they are the same {apart from 'call' and 'ctrl' and new stuff in gMM}:
ni <- names(fMM); ni <- ni[is.na(match(ni, c("call","ctrl")))]
stopifnot(all.equal(fMM[ni], gMM[ni]))
\dontshow{
if(doExtras <- robustbase:::doExtras()) {
 gtau <- nlrob(form, data = DNase1, method = "tau",
	       lower = setNames(c(0,0,0), pnms), upper = 3, trace = TRUE)

 ## these two have "sigma" also as parameter :
 psNms <- c(pnms, "sigma")
 gCM  <- nlrob(form, data = DNase1, method = "CM",
	       lower = setNames(c(0,0,0,0), psNms), upper = 3, trace = TRUE)
 gmtl <- nlrob(form, data = DNase1, method = "mtl",
	       lower = setNames(c(0,0,0,0), psNms), upper = 3, trace = TRUE)
 stopifnot(identical(sapply(list(gMM, gCM, gmtl), estimethod),
                     c("MM", "CM", "mtl")))
}% doExtras
}% dontshow
}
\keyword{robust}
\keyword{regression}
\keyword{nonlinear}