LoginSignup
0
0

More than 5 years have passed since last update.

動径関数回帰

Last updated at Posted at 2019-04-10

#ガウスカーネル

library(dplyr)

kernel=function(x,u){

  return(exp(-(x-u)^2/(2*1^2)))

}

h_n=0.1

val1=-3;val2=8;x=seq(val1,val2,h_n)

kernel_data=data.frame(x=seq(val1,val2,h_n),K_h_n=0,F_h_n=0,m_h=0,y=cos(seq(val1,val2,h_n)^2))

ave_y=mean(kernel_data$y)

#kernel_data$y=kernel_data$y-mean(kernel_data$y)

n=nrow(kernel_data)

mu=seq(val1,val2,0.7)

pthi=array(0,dim=c(n,length(mu)))

for(j in 1:nrow(pthi)){
for(i in 1:ncol(pthi)){  

if(j==1){

pthi[j,i]=1  

}else{  

pthi[j,i]=kernel(kernel_data$x[j],mu[i]) 

}

}
}

W=solve(t(pthi)%*%pthi)%*%t(pthi)%*%kernel_data$y

predict=(pthi)%*%W

K=(pthi)%*%t(pthi)

plot(c(1:length(predict)),predict,col=2,type="o",ylim=c(min(predict,kernel_data$y),max(predict,kernel_data$y)),xlab="index",ylab="values")

par(new=T)

plot(c(1:nrow(kernel_data)),kernel_data$y,col=3,type="o",ylim=c(min(predict,kernel_data$y),max(predict,kernel_data$y)),xlab="index",ylab="values")

library(mvtnorm)

times=10

Y_mat=array(0,dim=c(times,nrow(K)))

for(j in 1:times){

Y=c(rmvnorm(1,mean=rep(0,nrow(K)),sigma=(K)))

Y_mat[j,]=Y

}

for(j in 1:times){

if(j==times){

  plot(c(1:ncol(Y_mat)),Y_mat[j,],type="p",col=j,ylim=c(min(Y_mat),max(Y_mat)),xlab = "index",ylab="values")

}else{

plot(c(1:ncol(Y_mat)),Y_mat[j,],type="p",col=j,ylim=c(min(Y_mat),max(Y_mat)),xlab = "index",ylab="values")

par(new=T)  

}

}


plot(c(1:ncol(Y_mat)),kernel_data$y,col=1,type="p")

y=kernel_data$y


#指数カーネル

library(dplyr)

kernel=function(x,u){

  return(exp(-abs(x-u)/2))

}

h_n=0.1

val1=-3;val2=8;x=seq(val1,val2,h_n)

kernel_data=data.frame(x=seq(val1,val2,h_n),K_h_n=0,F_h_n=0,m_h=0,y=cos(seq(val1,val2,h_n)^2))

ave_y=mean(kernel_data$y)

#kernel_data$y=kernel_data$y-mean(kernel_data$y)

n=nrow(kernel_data)

mu=seq(val1,val2,0.7)

pthi=array(0,dim=c(n,length(mu)))

for(j in 1:nrow(pthi)){
for(i in 1:ncol(pthi)){  

if(j==1){

pthi[j,i]=1  

}else{  

pthi[j,i]=kernel(kernel_data$x[j],mu[i]) 

}

}
}

W=solve(t(pthi)%*%pthi)%*%t(pthi)%*%kernel_data$y

predict=(pthi)%*%W

K=(pthi)%*%t(pthi)

plot(c(1:length(predict)),predict,col=2,type="o",ylim=c(min(predict,kernel_data$y),max(predict,kernel_data$y)),xlab="index",ylab="values")

par(new=T)

plot(c(1:nrow(kernel_data)),kernel_data$y,col=3,type="o",ylim=c(min(predict,kernel_data$y),max(predict,kernel_data$y)),xlab="index",ylab="values")

library(mvtnorm)

times=10

Y_mat=array(0,dim=c(times,nrow(K)))

for(j in 1:times){

Y=c(rmvnorm(1,mean=rep(0,nrow(K)),sigma=(K)))

Y_mat[j,]=Y

}

for(j in 1:times){

if(j==times){

  plot(c(1:ncol(Y_mat)),Y_mat[j,],type="p",col=j,ylim=c(min(Y_mat),max(Y_mat)),xlab = "index",ylab="values")

}else{

plot(c(1:ncol(Y_mat)),Y_mat[j,],type="p",col=j,ylim=c(min(Y_mat),max(Y_mat)),xlab = "index",ylab="values")

par(new=T)  

}

}


plot(c(1:ncol(Y_mat)),kernel_data$y,col=1,type="p")

y=kernel_data$y


#周期カーネル

library(dplyr)

kernel=function(x,u){

  return(exp(2*cos(abs(x-u)/2)))

}

h_n=0.1

val1=-3;val2=8;x=seq(val1,val2,h_n)

kernel_data=data.frame(x=seq(val1,val2,h_n),K_h_n=0,F_h_n=0,m_h=0,y=cos(seq(val1,val2,h_n)^2))

ave_y=mean(kernel_data$y)

#kernel_data$y=kernel_data$y-mean(kernel_data$y)

n=nrow(kernel_data)

mu=seq(val1,val2,0.7)

pthi=array(0,dim=c(n,length(mu)))

for(j in 1:nrow(pthi)){
for(i in 1:ncol(pthi)){  

if(j==1){

pthi[j,i]=1  

}else{  

pthi[j,i]=kernel(kernel_data$x[j],mu[i]) 

}

}
}

W=solve(t(pthi)%*%pthi)%*%t(pthi)%*%kernel_data$y

predict=(pthi)%*%W

K=(pthi)%*%t(pthi)

plot(c(1:length(predict)),predict,col=2,type="o",ylim=c(min(predict,kernel_data$y),max(predict,kernel_data$y)),xlab="index",ylab="values")

par(new=T)

plot(c(1:nrow(kernel_data)),kernel_data$y,col=3,type="o",ylim=c(min(predict,kernel_data$y),max(predict,kernel_data$y)),xlab="index",ylab="values")

library(mvtnorm)

times=10

Y_mat=array(0,dim=c(times,nrow(K)))

for(j in 1:times){

Y=c(rmvnorm(1,mean=rep(0,nrow(K)),sigma=(K)))

Y_mat[j,]=Y

}

for(j in 1:times){

if(j==times){

  plot(c(1:ncol(Y_mat)),Y_mat[j,],type="p",col=j,ylim=c(min(Y_mat),max(Y_mat)),xlab = "index",ylab="values")

}else{

plot(c(1:ncol(Y_mat)),Y_mat[j,],type="p",col=j,ylim=c(min(Y_mat),max(Y_mat)),xlab = "index",ylab="values")

par(new=T)  

}

}


plot(c(1:ncol(Y_mat)),kernel_data$y,col=1,type="p")

y=kernel_data$y


#RBF

library(dplyr)

kernel=function(x,u){

  return(2*exp(-abs(x-u)^2/3))

}

h_n=0.1

val1=-3;val2=8;x=seq(val1,val2,h_n)

kernel_data=data.frame(x=seq(val1,val2,h_n),K_h_n=0,F_h_n=0,m_h=0,y=cos(seq(val1,val2,h_n)^2))

ave_y=mean(kernel_data$y)

#kernel_data$y=kernel_data$y-mean(kernel_data$y)

n=nrow(kernel_data)

mu=seq(val1,val2,0.7)

pthi=array(0,dim=c(n,length(mu)))

for(j in 1:nrow(pthi)){
for(i in 1:ncol(pthi)){  

if(j==1){

pthi[j,i]=1  

}else{  

pthi[j,i]=kernel(kernel_data$x[j],mu[i]) 

}

}
}

W=solve(t(pthi)%*%pthi)%*%t(pthi)%*%kernel_data$y

predict=(pthi)%*%W

K=(pthi)%*%t(pthi)

plot(c(1:length(predict)),predict,col=2,type="o",ylim=c(min(predict,kernel_data$y),max(predict,kernel_data$y)),xlab="index",ylab="values")

par(new=T)

plot(c(1:nrow(kernel_data)),kernel_data$y,col=3,type="o",ylim=c(min(predict,kernel_data$y),max(predict,kernel_data$y)),xlab="index",ylab="values")

library(mvtnorm)

times=10

Y_mat=array(0,dim=c(times,nrow(K)))

for(j in 1:times){

Y=c(rmvnorm(1,mean=rep(0,nrow(K)),sigma=(K)))

Y_mat[j,]=Y

}

for(j in 1:times){

if(j==times){

  plot(c(1:ncol(Y_mat)),Y_mat[j,],type="p",col=j,ylim=c(min(Y_mat),max(Y_mat)),xlab = "index",ylab="values")

}else{

plot(c(1:ncol(Y_mat)),Y_mat[j,],type="p",col=j,ylim=c(min(Y_mat),max(Y_mat)),xlab = "index",ylab="values")

par(new=T)  

}

}


plot(c(1:ncol(Y_mat)),kernel_data$y,col=1,type="p")

y=kernel_data$y


#matern kernel

library(dplyr)

kernel=function(x,u){

  r=abs(x-u);sita=1;nu=1/2

  return((2^(1-nu))*(sqrt(2*nu*r^2)^nu)*besselK(sqrt(2*nu*r)/sita, nu, expon.scaled=F)/(gamma(nu)*sita^nu))

}

h_n=0.5

val1=-3;val2=8;x=seq(val1,val2,h_n)

kernel_data=data.frame(x=seq(val1,val2,h_n),K_h_n=0,F_h_n=0,m_h=0,y=cos(seq(val1,val2,h_n)^2))

ave_y=mean(kernel_data$y)

#kernel_data$y=kernel_data$y-mean(kernel_data$y)

n=nrow(kernel_data)

mu=seq(val1,val2,0.7)

pthi=array(0,dim=c(n,length(mu)))

for(j in 1:nrow(pthi)){
for(i in 1:ncol(pthi)){  

if(j==1){

pthi[j,i]=1  

}else{  

pthi[j,i]=kernel(kernel_data$x[j],mu[i]) 

}

}
}

pthi[is.nan(pthi)>0]=0

W=solve(t(pthi)%*%pthi)%*%t(pthi)%*%kernel_data$y

predict=(pthi)%*%W

K=(pthi)%*%t(pthi)

plot(c(1:length(predict)),predict,col=2,type="o",ylim=c(min(predict,kernel_data$y),max(predict,kernel_data$y)),xlab="index",ylab="values")

par(new=T)

plot(c(1:nrow(kernel_data)),kernel_data$y,col=3,type="o",ylim=c(min(predict,kernel_data$y),max(predict,kernel_data$y)),xlab="index",ylab="values")

library(mvtnorm)

times=10

Y_mat=array(0,dim=c(times,nrow(K)))

for(j in 1:times){

Y=c(rmvnorm(1,mean=rep(0,nrow(K)),sigma=(K)))

Y_mat[j,]=Y

}

for(j in 1:times){

if(j==times){

  plot(c(1:ncol(Y_mat)),Y_mat[j,],type="p",col=j,ylim=c(min(Y_mat),max(Y_mat)),xlab = "index",ylab="values")

}else{

plot(c(1:ncol(Y_mat)),Y_mat[j,],type="p",col=j,ylim=c(min(Y_mat),max(Y_mat)),xlab = "index",ylab="values")

par(new=T)  

}

}


plot(c(1:ncol(Y_mat)),kernel_data$y,col=1,type="p")

y=kernel_data$y

cor(y,predict)

#gauss processes regressions

library(dplyr)

kernel=function(x,u){

  return(exp(-sum((abs(x-u))^(2))/(10)))

}

kernel_data=data.frame(num=1:20,y=c(0.20, 0.10, 0.49, 0.26, 0.92, 0.95, 0.18, 0.19, 0.44, 0.79, 0.61, 0.41, 0.49, 0.34, 0.62, 0.99, 0.38, 0.22,0.71,0.40),x1=c(84,87,86,85,82,87,92,94,88,84.9,78,90,88,87,82,84,86,83,85.2,82),x2=c(61,55.5,57,57,50,50,66.5,65,60.5,49.5,49.5,61,59.5,58.4,53.5,54,60,58.8,54,56),x3=c(24.5,24,23,22.5,22,24,25,26,25,24,23,21,26,25.5,24,23,24,24,24,22))

x=as.matrix(kernel_data[,colnames(kernel_data) %in% c("x1","x2")]);y=kernel_data$y

N=length(y)

train_N=round(N*3/4)

test_N=N-train_N


K_mat=array(0,dim=c(train_N,train_N))

for(j in 1:train_N){
for(i in 1:train_N){  

K_mat[i,j]=kernel(x[i,],x[j,]) 

}
}


yy=solve(K_mat)%*%y[1:train_N]

mu=c()

var=c()

for(i in 1:test_N){

k_vec=c()  

for(j in 1:train_N){  

 k_vec=c(k_vec,kernel(x[j,],x[(train_N+i),])) 

}

s=kernel(x[(train_N+i),],x[(train_N+i),])  

mu=c(mu,sum(k_vec*yy))

var=c(var,s-k_vec%*%solve(K_mat)%*%k_vec)

}  


#confidence interval

plot(c(1:length(mu)),mu+sqrt(var),col=2,type="o",ylim=c(min(c(mu-2*sqrt(var))),max(mu+2*sqrt(var))),ylab="values")

par(new=T)

plot(c(1:length(mu)),mu-sqrt(var),col=2,type="o",ylim=c(min(c(mu-2*sqrt(var))),max(mu+2*sqrt(var))),ylab="values")

par(new=T)

plot(c(1:length(y[(train_N+1):length(y)])),y[(train_N+1):length(y)],col=3,type="o",ylim=c(min(c(mu-2*sqrt(var))),max(mu+2*sqrt(var))),ylab="values",main="confidence interval")


#predict

plot(c(1:length(mu)),mu,col=2,type="o",ylim=c(min(c(mu-2*sqrt(var))),max(mu+2*sqrt(var))),ylab="values")

par(new=T)

plot(c(1:length(y[(train_N+1):length(y)])),y[(train_N+1):length(y)],col=3,type="o",ylim=c(min(c(mu-2*sqrt(var))),max(mu+2*sqrt(var))),ylab="values",main="predict")



0
0
0

Register as a new user and use Qiita more conveniently

  1. You get articles that match your needs
  2. You can efficiently read back useful information
  3. You can use dark theme
What you can do with signing up
0
0