diff --git a/ch08.r b/ch08.r index 659bf9d..3ff9cb5 100644 --- a/ch08.r +++ b/ch08.r @@ -55,7 +55,9 @@ lambda = as.data.frame(load.image("cameraman.tif")) lambda = xtabs(value ~ x+y, data=lambda) T = 100 x = c() -for (i in 1:T) x = append(x, rpois(length(lambda), lambda)) +for (i in 1:T) { + x = append(x, rpois(length(lambda), lambda)) +} x = array(x, c(256, 256, 100)) y = (x>=1) mu = apply(y, c(1,2), mean) @@ -70,4 +72,29 @@ image(flip_matrix(fig1), col=gray.colors(255)) image(flip_matrix(lambdahat), col=gray.colors(255)) ############# +# Chapter 8.2 Properties of the ML estimation + +## Visualizing the invariance principle + +# R code +N = 50 +S = 20 +theta = seq(0.1, 0.9, (0.1+0.9)/1000) +L = S * log(theta) + (N-S) * log(1-theta) +plot(theta, L, type="n", xlab=expression(theta), ylab=expression(paste("Log L(", theta, "|S = 20)"))) +title("Bernoulli") +lines(theta, L, lwd=6, col="#8080BF") +grid() + +h_theta = -log(1-theta) +plot(theta, h_theta, type="n", xlab=expression(theta), ylab=expression(paste(eta, " = h(", theta, ")"))) +lines(theta, h_theta, lwd=6) +grid() + +theta = seq(0.1, 2.5, (0.1+2.5)/1000) +L = S * log(1-exp(-theta)) - theta * (N-S) +plot(theta, L, type="n") +title("Truncated Poisson") +lines(theta, L, lwd=6, col="#0000BF") +grid()