import numpy as np
import pandas as pd
import tensorflow as tf
# Exponential smoothing ETS(A,A,A)
def ets(history, params):
alpha = params[0]
beta = params[1]
gamma = params[2]
level = params[3]
trend = params[4]
seasonal = params[5:16]
seasonal.append(-sum(seasonal))
predict = []
error = []
n = len(history)
for i in range(n):
s = i % 12
pbase = level + trend
predict.append(pbase + seasonal[s])
error.append(history[i] - predict[i])
level = pbase + alpha * error[i]
trend = trend + beta * error[i]
seasonal[s] = seasonal[s] + gamma * error[i]
return error
# Exponential smoothing ETS(A,A,A)
def tfEts(history, n, params):
alpha = params[0]
beta = params[1]
gamma = params[2]
level = params[3]
trend = params[4]
seasonal = tf.split(params[5:16], 11)
s11 = params[5:16]
s12 = tf.negative(tf.reduce_sum(s11))
seasonal.extend([s12])
predict = []
error = []
for i in range(n):
s = i % 12
pbase = level + trend
predict.append(pbase + seasonal[s])
error.append(history[i] - predict[i])
level = pbase + alpha * error[i]
trend = trend + beta * error[i]
seasonal[s] = seasonal[s] + gamma * error[i]
err = tf.stack(error)
return err
if __name__ == '__main__':
AirPassengers = [
340,318,362,348,363,435,491,505,404,359,310,337,
360,342,406,396,420,472,548,559,463,407,362,405,
417,391,419,461,472,535,622,606,508,461,390,432]
history = np.array(AirPassengers)
m1 = history.mean()
m2 = history.std(ddof=1)
y1 = (history - m1) / m2
N = y1.size
# etsにより得られたパラメータ
params = [0.6489863, 0, 0.0216017]
params.extend([-0.4014818, 0])
params.extend([
-0.64847373, -0.89031436, -0.31948061, -0.21345777,
-0.00713472, 0.71451547, 1.62174834, 1.60011895,
0.30963273, -0.33910409, -1.11356078])
error = ets(y1, params)
sse = sum([e * e for e in error])
print("sse=%f" % (sse / N))
print("AIC=%f" % (N * np.log(sse) + 2 * 15))
# 回帰直線を求めてパラメータを初期化
x = np.arange(y1.size)
x = x - x.mean()
b = sum(x * y1) / sum(x * x)
l = y1 - b * x
params = [0.0, 0.0, 0.0]
params.extend([(x[0] - 1) * b, b])
params.extend([l[i] for i in range(11)])
yt = tf.placeholder(tf.float32, shape=[N])
pt = tf.Variable(params[0:16], dtype=tf.float32)
err = tfEts(yt, N, pt)
loss = tf.reduce_mean(tf.square(err))
grad = tf.gradients(loss, pt)
train_step = tf.train.AdamOptimizer(1e-3).minimize(loss)
pt_ = tf.clip_by_value(pt, [0]*3+[-100]*13, [1]*3+[100]*13)
clip_step = tf.assign(pt, pt_)
result = []
CYCLE = 1000
MD = 100
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
for i in range(CYCLE):
train_step.run(feed_dict={yt: y1})
sess.run(clip_step)
if (i+1) % MD == 0:
a,b,c = sess.run([loss,pt,grad], feed_dict={yt: y1})
result.append(np.concatenate([[a],b,c[0]]))
print("i=%d,loss=%f" % (i, a))
names = ['loss']
names.extend(["alpha","beta","gamma","level","trend"])
names.extend(['s'+str(i) for i in range(11)])
names.extend(["alpha","beta","gamma","level","trend"])
names.extend(['d'+str(i) for i in range(11)])
df = pd.DataFrame(result, columns=names)
df.to_csv("result.txt", sep="\t", index=False, encoding="shift_jis")
print("%s, %f" % (names[0], a))
for i in range(16):
print("%s, %f, %f" % (names[i+1], b[i], c[0][i]))
params = list(b)
error = ets(y1, params)
sse = sum([e * e for e in error])
print("sse=%f" % (sse / N))