概要
wsl(wsl2じゃない)で、elixirやってみた。
練習問題やってみた。
練習問題
AxonでXORを求よ。
setup
Mix.install(
[
{:axon, "~> 0.2.0"},
{:exla, "~> 0.3.0"},
{:nx, "~> 0.3.0"},
{:kino, "~> 0.10.0"}
],
config: [
nx: [default_backend: EXLA.Backend]
]
サンプルコード
x1_input = Axon.input("x1", shape: {nil, 1})
x2_input = Axon.input("x2", shape: {nil, 1})
model =
x1_input
|> Axon.concatenate(x2_input)
|> Axon.dense(8, activation: :tanh)
|> Axon.dense(1, activation: :sigmoid)
batch_size = 32
data =
Stream.repeatedly(fn ->
x1 = Nx.random_uniform({batch_size, 1}, 0, 2)
x2 = Nx.random_uniform({batch_size, 1}, 0, 2)
y = Nx.logical_xor(x1, x2)
{%{"x1" => x1, "x2" => x2}, y}
end)
epochs = 10
params =
model
|> Axon.Loop.trainer(:binary_cross_entropy, :sgd)
|> Axon.Loop.run(data, %{}, epochs: epochs, iterations: 1000)
実行結果
Epoch: 0, Batch: 1000, loss: 0.6645805
Epoch: 1, Batch: 1000, loss: 0.6334236
Epoch: 2, Batch: 1000, loss: 0.5894425
Epoch: 3, Batch: 1000, loss: 0.5315313
Epoch: 4, Batch: 1000, loss: 0.4695869
Epoch: 5, Batch: 1000, loss: 0.4142583
Epoch: 6, Batch: 1000, loss: 0.3682923
Epoch: 7, Batch: 1000, loss: 0.3305883
Epoch: 8, Batch: 1000, loss: 0.2995510
Epoch: 9, Batch: 1000, loss: 0.2737042
Axon.predict(model, params, %{
"x1" => Nx.tensor([[0]]),
"x2" => Nx.tensor([[1]])
})
Evaluated
#Nx.Tensor<
f32[1][1]
EXLA.Backend<host:0, 0.204904100.2404515847.97079>
[
[0.9617963433265686]
]
>
Axon.predict(model, params, %{
"x1" => Nx.tensor([[1]]),
"x2" => Nx.tensor([[0]])
})
Evaluated
#Nx.Tensor<
f32[1][1]
EXLA.Backend<host:0, 0.204904100.2404515847.97088>
[
[0.9612305164337158]
]
>
Axon.predict(model, params, %{
"x1" => Nx.tensor([[1]]),
"x2" => Nx.tensor([[1]])
})
Evaluated
#Nx.Tensor<
f32[1][1]
EXLA.Backend<host:0, 0.204904100.2404515847.97097>
[
[0.05178850144147873]
]
>
Axon.predict(model, params, %{
"x1" => Nx.tensor([[0]]),
"x2" => Nx.tensor([[0]])
})
Evaluated
#Nx.Tensor<
f32[1][1]
EXLA.Backend<host:0, 0.204904100.2404515847.97106>
[
[0.01657288148999214]
]
>
以上。