File: stats4-Ex.Rout.save

package info (click to toggle)
r-base 4.5.2-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 112,924 kB
  • sloc: ansic: 291,338; fortran: 111,889; javascript: 14,798; yacc: 6,154; sh: 5,689; makefile: 5,239; tcl: 4,562; perl: 963; objc: 791; f90: 758; asm: 258; java: 31; sed: 1
file content (340 lines) | stat: -rw-r--r-- 8,484 bytes parent folder | download | duplicates (3)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340

R Under development (unstable) (2022-03-19 r81942) -- "Unsuffered Consequences"
Copyright (C) 2022 The R Foundation for Statistical Computing
Platform: x86_64-pc-linux-gnu (64-bit)

R is free software and comes with ABSOLUTELY NO WARRANTY.
You are welcome to redistribute it under certain conditions.
Type 'license()' or 'licence()' for distribution details.

  Natural language support but running in an English locale

R is a collaborative project with many contributors.
Type 'contributors()' for more information and
'citation()' on how to cite R or R packages in publications.

Type 'demo()' for some demos, 'help()' for on-line help, or
'help.start()' for an HTML browser interface to help.
Type 'q()' to quit R.

> pkgname <- "stats4"
> source(file.path(R.home("share"), "R", "examples-header.R"))
> options(warn = 1)
> library('stats4')
> 
> base::assign(".oldSearch", base::search(), pos = 'CheckExEnv')
> base::assign(".old_wd", base::getwd(), pos = 'CheckExEnv')
> cleanEx()
> nameEx("mle")
> ### * mle
> 
> flush(stderr()); flush(stdout())
> 
> ### Name: mle
> ### Title: Maximum Likelihood Estimation
> ### Aliases: mle
> ### Keywords: models
> 
> ### ** Examples
> 
> ## Avoid printing to unwarranted accuracy
> od <- options(digits = 5)
> 
> ## Simulated EC50 experiment with count data
> x <- 0:10
> y <- c(26, 17, 13, 12, 20, 5, 9, 8, 5, 4, 8)
> 
> ## Easy one-dimensional MLE:
> nLL <- function(lambda) -sum(stats::dpois(y, lambda, log = TRUE))
> fit0 <- mle(nLL, start = list(lambda = 5), nobs = NROW(y))
> 
> ## sanity check --- notice that "nobs" must be input
> ## (not guaranteed to be meaningful for any likelihood)
> stopifnot(nobs(fit0) == length(y))
> 
> 
> # For 1D, this is preferable:
> fit1 <- mle(nLL, start = list(lambda = 5), nobs = NROW(y),
+             method = "Brent", lower = 1, upper = 20)
> 
> ## This needs a constrained parameter space: most methods will accept NA
> ll <- function(ymax = 15, xhalf = 6) {
+     if(ymax > 0 && xhalf > 0)
+       -sum(stats::dpois(y, lambda = ymax/(1+x/xhalf), log = TRUE))
+     else NA
+ }
> (fit <- mle(ll, nobs = length(y)))

Call:
mle(minuslogl = ll, nobs = length(y))

Coefficients:
   ymax   xhalf 
24.9931  3.0571 
> mle(ll, fixed = list(xhalf = 6))

Call:
mle(minuslogl = ll, fixed = list(xhalf = 6))

Coefficients:
  ymax  xhalf 
19.288  6.000 
> 
> ## Alternative using bounds on optimization
> ll2 <- function(ymax = 15, xhalf = 6)
+     -sum(stats::dpois(y, lambda = ymax/(1+x/xhalf), log = TRUE))
> mle(ll2, lower = rep(0, 2))

Call:
mle(minuslogl = ll2, lower = rep(0, 2))

Coefficients:
   ymax   xhalf 
24.9994  3.0558 
> 
> AIC(fit)
[1] 61.208
> BIC(fit)
[1] 62.004
> 
> summary(fit)
Maximum likelihood estimation

Call:
mle(minuslogl = ll, nobs = length(y))

Coefficients:
      Estimate Std. Error
ymax   24.9931     4.2244
xhalf   3.0571     1.0348

-2 log L: 57.208 
> logLik(fit)
'log Lik.' -28.604 (df=2)
> vcov(fit)
         ymax   xhalf
ymax  17.8459 -3.7206
xhalf -3.7206  1.0708
> plot(profile(fit), absVal = FALSE)
> confint(fit)
Profiling...
        2.5 %  97.5 %
ymax  17.8845 34.6194
xhalf  1.6616  6.4792
> 
> ## Use bounded optimization
> ## The lower bounds are really > 0,
> ## but we use >=0 to stress-test profiling
> (fit2 <- mle(ll2, lower = c(0, 0)))

Call:
mle(minuslogl = ll2, lower = c(0, 0))

Coefficients:
   ymax   xhalf 
24.9994  3.0558 
> plot(profile(fit2), absVal = FALSE)
> 
> ## A better parametrization:
> ll3 <- function(lymax = log(15), lxhalf = log(6))
+     -sum(stats::dpois(y, lambda = exp(lymax)/(1+x/exp(lxhalf)), log = TRUE))
> (fit3 <- mle(ll3))

Call:
mle(minuslogl = ll3)

Coefficients:
 lymax lxhalf 
3.2189 1.1170 
> plot(profile(fit3), absVal = FALSE)
> exp(confint(fit3))
Profiling...
         2.5 %  97.5 %
lymax  17.8815 34.6186
lxhalf  1.6615  6.4794
> 
> # Regression tests for bounded cases (this was broken in R 3.x)
> fit4 <- mle(ll, lower = c(0, 4)) # has max on boundary
> confint(fit4)
Profiling...
       2.5 %  97.5 %
ymax  17.446 26.5081
xhalf     NA  6.9109
> 
> ## direct check that fixed= and constraints work together
> mle(ll, lower = c(0, 4), fixed=list(ymax=23)) # has max on boundary

Call:
mle(minuslogl = ll, fixed = list(ymax = 23), lower = c(0, 4))

Coefficients:
 ymax xhalf 
   23     4 
> 
> ## Linear regression using MLE
> x <- 1:10 
> y <- c(0.48, 2.24, 2.22, 5.15, 4.64, 5.53, 7, 8.8, 7.67, 9.23)
> 
> LM_mll <- function(formula, data = environment(formula))
+ {
+      y <- model.response(model.frame(formula, data))
+      X <- model.matrix(formula, data)
+      b0 <- numeric(NCOL(X))
+      names(b0) <- colnames(X)
+      function(b=b0, sigma=1)
+          -sum(dnorm(y, X %*% b, sigma, log=TRUE))
+ }
> 
> mll <- LM_mll(y ~ x)
> 
> summary(lm(y~x)) # for comparison -- notice variance bias in MLE

Call:
lm(formula = y ~ x)

Residuals:
   Min     1Q Median     3Q    Max 
-0.937 -0.500 -0.211  0.278  1.273 

Coefficients:
            Estimate Std. Error t value Pr(>|t|)    
(Intercept)   0.0927     0.5376    0.17     0.87    
x             0.9461     0.0866   10.92  4.4e-06 ***
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Residual standard error: 0.787 on 8 degrees of freedom
Multiple R-squared:  0.937,	Adjusted R-squared:  0.929 
F-statistic:  119 on 1 and 8 DF,  p-value: 4.39e-06

> summary(mle(mll, lower=c(-Inf,-Inf, 0.01)))
Maximum likelihood estimation

Call:
mle(minuslogl = mll, lower = c(-Inf, -Inf, 0.01))

Coefficients:
              Estimate Std. Error
b.(Intercept) 0.092667   0.480869
b.x           0.946061   0.077499
sigma         0.703919   0.157400

-2 log L: 21.357 
> summary(mle(mll, lower=list(sigma = 0.01))) # alternative specification
Maximum likelihood estimation

Call:
mle(minuslogl = mll, lower = list(sigma = 0.01))

Coefficients:
              Estimate Std. Error
b.(Intercept) 0.092667   0.480869
b.x           0.946061   0.077499
sigma         0.703919   0.157400

-2 log L: 21.357 
> 
> confint(mle(mll, lower=list(sigma = 0.01)))
Profiling...
                 2.5 % 97.5 %
b.(Intercept) -0.94831 1.1336
b.x            0.77829 1.1138
sigma          0.48017 1.1755
> plot(profile(mle(mll, lower=list(sigma = 0.01))))
> 
> Binom_mll <- function(x, n)
+ {
+     force(x); force(n) ## beware lazy evaluation
+     function(p=.5) -dbinom(x, n, p, log=TRUE)
+ }
> 
> ## Likelihood functions for different x.
> ## This code goes wrong, if force(x) is not used in Binom_mll:
> 
> curve(Binom_mll(0, 10)(p), xname="p", ylim=c(0, 10))
> mll_list <- list(10)
> for (x in 1:10)
+     mll_list[[x]] <- Binom_mll(x, 10)
> for (mll in mll_list)
+     curve(mll(p), xname="p", add=TRUE)
> 
> mll <- Binom_mll(4,10)
> mle(mll, lower = 1e-16, upper = 1-1e-16) # limits must be inside (0,1)

Call:
mle(minuslogl = mll, lower = 1e-16, upper = 1 - 1e-16)

Coefficients:
  p 
0.4 
> 
> ## Boundary case: This works, but fails if limits are set closer to 0 and 1  
> mll <- Binom_mll(0, 10)
> mle(mll, lower=.005, upper=.995)

Call:
mle(minuslogl = mll, lower = 0.005, upper = 0.995)

Coefficients:
    p 
0.005 
> 
> ## Not run: 
> ##D ## We can use limits closer to the boundaries if we use the
> ##D ## drop-in replacement optimr() from the optimx package.
> ##D 
> ##D mle(mll, lower = 1e-16, upper = 1-1e-16, optim=optimx::optimr)
> ## End(Not run)
> 
> 
> options(od)
> 
> 
> 
> cleanEx()
> nameEx("update-methods")
> ### * update-methods
> 
> flush(stderr()); flush(stdout())
> 
> ### Name: update-methods
> ### Title: Methods for Function 'update' in Package 'stats4'
> ### Aliases: update-methods update,ANY-method update,mle-method
> ### Keywords: methods
> 
> ### ** Examples
> 
> x <- 0:10
> y <- c(26, 17, 13, 12, 20, 5, 9, 8, 5, 4, 8)
> ll <- function(ymax = 15, xhalf = 6)
+     -sum(stats::dpois(y, lambda = ymax/(1+x/xhalf), log = TRUE))
> fit <- mle(ll)
Warning in stats::dpois(y, lambda = ymax/(1 + x/xhalf), log = TRUE) :
  NaNs produced
> ## note the recorded call contains ..1, a problem with S4 dispatch
> update(fit, fixed = list(xhalf = 3))

Call:
mle(minuslogl = ll, fixed = ..1)

Coefficients:
    ymax    xhalf 
25.19609  3.00000 
> 
> 
> 
> ### * <FOOTER>
> ###
> cleanEx()
> options(digits = 7L)
> base::cat("Time elapsed: ", proc.time() - base::get("ptime", pos = 'CheckExEnv'),"\n")
Time elapsed:  2.16 0.024 2.2 0 0 
> grDevices::dev.off()
null device 
          1 
> ###
> ### Local variables: ***
> ### mode: outline-minor ***
> ### outline-regexp: "\\(> \\)?### [*]+" ***
> ### End: ***
> quit('no')