概要
kerasのbackendで、xor問題やってみた。
実行結果
0 [ 0.99838728]
100 [ 0.99522972]
200 [ 0.86496431]
300 [ 0.81149882]
400 [ 0.75091565]
500 [ 0.67710733]
600 [ 0.60705864]
700 [ 0.54741299]
800 [ 0.49332687]
900 [ 0.43740678]
1000 [ 0.37347558]
1100 [ 0.29807252]
1200 [ 0.21222651]
1300 [ 0.12819625]
1400 [ 0.06629251]
1500 [ 0.03200632]
1600 [ 0.01577413]
1700 [ 0.00830716]
1800 [ 0.00472095]
1900 [ 0.00288095]
[0.0, 0.0] [[ 0.0213472]]
[0.0, 1.0] [[ 0.8747952]]
[1.0, 0.0] [[ 0.8718797]]
[1.0, 1.0] [[ 0.03415748]]
サンプルコード
from tensorflow.contrib.keras.python.keras import backend as K
from tensorflow.contrib.keras.python.keras.optimizers import SGD
dx, dy = [], []
dx.append([0.0, 0.0])
dx.append([0.0, 1.0])
dx.append([1.0, 0.0])
dx.append([1.0, 1.0])
dy.append(0.0)
dy.append(1.0)
dy.append(1.0)
dy.append(0.0)
input_dim = 2
output_dim = 1
hidden_dim = 8
x = K.placeholder(shape = (None, input_dim), name = "x")
ytrue = K.placeholder(shape = (None, output_dim), name = "y")
W1 = K.random_uniform_variable((input_dim, hidden_dim), 0, 1, name = "W1")
W2 = K.random_uniform_variable((hidden_dim, output_dim), 0, 1, name = "W2")
b1 = K.random_uniform_variable((hidden_dim, ), 0, 1, name = "b1")
b2 = K.random_uniform_variable((output_dim, ), 0, 1, name = "b2")
params = [W1, b1, W2, b2]
hidden = K.tanh(K.dot(x, W1) + b1)
ypred = K.tanh(K.dot(hidden, W2) + b2)
loss = K.mean(K.square(ypred - ytrue), axis = -1)
opt = SGD()
updates = opt.get_updates(params, [], loss)
train = K.function(inputs = [x, ytrue], outputs = [loss], updates = updates)
test = K.function(inputs = [x], outputs = [ypred])
for ep in range(2000):
for i in range(4):
st = train([[dx[i]], [[dy[i]]]])
if ep % 100 == 0:
print (ep, st[0])
for i in range(4):
st = test([[dx[i]]])
print (dx[i], st[0])
以上。