LoginSignup
1
0

実践ベイズモデリング ー解析技法と認知モデルー 朝倉出版 豊田秀樹先生等

Last updated at Posted at 2024-03-23

#ガンベル分布の最尤推定

#表1.2のデータ
x=c(8.95,8.68,8.7,8.74,8.71,8.58,8.63,8.48,8.6,8.65,8.41,8.52,8.53,8.6,8.6,8.56,8.66,8.73,8.74,8.47,8.54,8.35,8.56,8.51,8.52)
n=length(x)

#loglik=function(s){
#val=-log(sqrt(s[2]^2))-(x-s[1])/sqrt(s[2]^2)-exp(-(x-s[1])/sqrt(s[2]^2))  
#return(sum(val))
#}

#pre-Broyden Algorithm p.211
#最尤方程式
f=function(s){
f1=n-sum(((x-s[1])/sqrt(s[2]^2))*(1-exp(-(x-s[1])/sqrt(s[2]^2))))
f2=n-sum(exp(-(x-s[1])/sqrt(s[2]^2)))
return(c(f2,f1))
}


X=c(mean(x),sd(x))
ite=10^2
B=diag((f(X+h)-f(X))/h)
h=0.01

for(l in 1:ite){
if(sum(abs(f(X)))>10^(-9)){  
X_pre=X
X=X-solve(B)%*%f(X)
s=X-X_pre
y=f(X)-f(X_pre)
B=B+((y-B%*%s)/as.numeric(t(s)%*%s))%*%t(s)
print(sum(abs(f(X))))
#print(loglik(X))
}}

param=X

#期待値
param[1]+0.577*(param[2])

#中央値
param[1]+log(log(2))

#分散
(pi*sqrt(param[2]))^2/6

#標本から%を計算
p=exp(-exp((param[1]-x)/(param[2])))

#再現期間r
1/(1-p)



1
0
0

Register as a new user and use Qiita more conveniently

  1. You get articles that match your needs
  2. You can efficiently read back useful information
  3. You can use dark theme
What you can do with signing up
1
0