LoginSignup
0
0

More than 3 years have passed since last update.

集団回帰(渡辺洋先生 ベイズ統計学入門)

Last updated at Posted at 2019-10-07
#集団回帰

library(dplyr)

d=1;n_vec=c()

class=10

X=array(0,dim=c(1,d+1))

Y=array(0,dim=c(1,d+1))


for(j in 1:class){

#val=rpois(d,sample(5:10,1))

n=rpois(1,sample(100:200,1))

n_vec=c(n_vec,n)

X_mat=array(0,dim=c(n,d))

Y_mat=array(0,dim=c(n,d))

for(i in 1:ncol(X_mat)){

X_mat[,i]=rpois(n,sample(10:30,1))

Y_mat[,i]=rpois(n,sample(20:60,1))


}


X=rbind(X,cbind(rep(j,n),X_mat))

Y=rbind(Y,cbind(rep(j,n),Y_mat))

}

X=tail(X,nrow(X)-1);Y=tail(Y,nrow(Y)-1)



X_data=data.frame(X);Y_data=data.frame(Y)


#calculating initial parameter values

alpha=c();error=c()

beta=c();sigma=c()

for(j in 1:class){

X_sub=X_data %>% filter(X1==j) %>% select(-X1)

Y_sub=Y_data %>% filter(X1==j) %>% select(-X1)

X_sub=as.matrix((X_sub));Y_sub=as.matrix((Y_sub))

b=sum((X_sub-mean(X_sub))*(Y_sub-mean(Y_sub)))/sum((X_sub-mean(X_sub))^2)

beta=c(beta,b)

a=mean(Y_sub)-b*mean(X_sub)

alpha=c(alpha,a)

sigma=c(sigma,sum((Y_sub-(a+b*X_sub))^2)/(n-2))

error=c(error,sum((X_sub-mean(X_sub))^2))

}

sigma2=sigma;beta2=beta;alpha2=alpha

t_a=


#iterations1

ite=10000

k=1

beta_hat=rep(5,class);alpha_hat=rep(1,class)

sigma_hat=rep(1,class)

alpha_error=c()

beta_error=c()

sigma_error=c()

#t_a_vec=c();t_b_vec=c()

for(i in 1:ite){

alpha_hat_pre=alpha;beta_hat_pre=beta;sigma_hat_pre=sigma  



for(j in 1:class){

X_sub=X_data %>% filter(X1==j) %>% select(-X1)

Y_sub=Y_data %>% filter(X1==j) %>% select(-X1)

X_sub=as.matrix((X_sub));Y_sub=as.matrix((Y_sub))


lambda_a=2;lambda_b=2

b=((sum(X_sub*Y_sub)-alpha[j]*sum(X_sub))*(lambda_b+sum((beta-mean(beta))^2))/sum(X_sub^2)+mean(beta)*class*sigma[j]/sum(X_sub^2))

b=b/(lambda_b+sum((beta-mean(beta))^2)+class*sigma[j]/sum(X_sub^2))

a=((mean(Y_sub)-beta[j]*mean(X_sub))*(lambda_a+sum((alpha-mean(alpha))^2))+mean(alpha)*class*sigma[j]/length(X_sub))

a=a/((lambda_a+sum((alpha-mean(alpha))^2))+class*sigma[j]/length(X_sub))

sig=((sum((Y_sub-a-b*mean(X_sub))^2)/(length(X_sub)+2))*(mean(log(sigma))-log(1/(mean(1/sigma)+k)))+(class+1)/((class*(length(Y_sub)+2))*(mean(1/sigma)+k)))

sig=sig/((mean(log(sigma))-log(1/(mean(1/sigma)+k)))+(class+1)/((class*(length(Y_sub)+2))))

beta_hat[j]=b;alpha_hat[j]=a

sigma_hat[j]=sig

}

alpha=alpha_hat;beta=beta_hat;sigma=sigma_hat

#t_b=sum((beta2-mean(beta2))^2)/(class-1)-sum(sigma/error)/class

#t_a=sum((alpha-mean(alpha))^2)/(class-1)

#t_a_vec=c(t_a_vec,t_a);t_b_vec=c(t_b_vec,t_b)

#print(paste0("sigma_hat:",sigma_hat))

print(sum(abs(alpha-alpha_hat_pre)+abs(beta-beta_hat_pre)+abs(sigma-sigma_hat_pre)))

alpha_error=c(alpha_error,sum(abs(alpha-alpha_hat_pre)))

beta_error=c(beta_error,sum(abs(beta-beta_hat_pre)))

sigma_error=c(sigma_error,sum(abs(sigma-sigma_hat_pre)))

}



#iterations approximations


beta_hat=rep(1,class);alpha_hat=rep(1,class)

sigma_hat=rep(1,class)


for(j in 1:class){

X_sub=X_data %>% filter(X1==j) %>% select(-X1)

Y_sub=Y_data %>% filter(X1==j) %>% select(-X1)

X_sub=as.matrix((X_sub));Y_sub=as.matrix((Y_sub))

t_b=sum((beta2-mean(beta2))^2)/(class-1)-sum(sigma2/error)/class

t_a=sum((alpha_hat-mean(alpha_hat))^2)/(class-1)

b=(beta2[j]*(1-1/class)*t_b+mean(beta2)*sigma2[j]/error[j])/((1-1/class)*t_b+sigma2[j]/error[j])

beta_hat[j]=b;alpha_hat[j]=mean(Y_sub)-b*mean(X_sub)

sigma_hat[j]=sum((Y_sub-a-b*mean(X_sub))^2)/(length(X_sub)+2)

}


0
0
0

Register as a new user and use Qiita more conveniently

  1. You get articles that match your needs
  2. You can efficiently read back useful information
  3. You can use dark theme
What you can do with signing up
0
0