0
0

Delete article

Deleted articles cannot be recovered.

Draft of this article would be also deleted.

Are you sure you want to delete this article?

Pythonで学ぶ線形代数学

Posted at

【コード:最小二乗法を NN で学習 + matplotlib 可視化】

import numpy as np
import matplotlib.pyplot as plt

# -----------------------------
# 1. データ
# -----------------------------
x = np.array([1, 2, 3, 4, 5], dtype=float)
y = np.array([2, 4, 5, 4, 5], dtype=float)
n = len(x)

# -----------------------------
# 2. NN のパラメータ
# -----------------------------
a = np.random.randn()
b = np.random.randn()

lr = 0.01               # 学習率
epochs = 2000           # 学習回数

a_list = []             # 学習経過記録(可視化用)
b_list = []
loss_list = []

# -----------------------------
# 3. 学習ループ(勾配降下)
# -----------------------------
for epoch in range(epochs):
    y_pred = a * x + b
    error = y_pred - y
    loss = np.mean(error**2)

    # 勾配
    d_a = 2 * np.mean(error * x)
    d_b = 2 * np.mean(error)

    # 更新
    a -= lr * d_a
    b -= lr * d_b

    a_list.append(a)
    b_list.append(b)
    loss_list.append(loss)

# -----------------------------
# 4. 散布図と回帰直線の表示
# -----------------------------
plt.figure(figsize=(14, 4))

plt.subplot(1, 3, 1)
plt.scatter(x, y, label="Data")
plt.plot(x, a*x + b, color="red", label="Fitted Line")
plt.xlabel("x")
plt.ylabel("y")
plt.title("Linear Regression by Gradient Descent")
plt.legend()
plt.grid(True)

# -----------------------------
# 5. Loss (放物線の谷を降りていく様子)
# -----------------------------
plt.subplot(1, 3, 2)
plt.plot(loss_list)
plt.xlabel("Epoch")
plt.ylabel("MSE Loss")
plt.title("Loss Convergence")
plt.grid(True)

# -----------------------------
# 6. パラメータ (a, b) の収束の様子
# -----------------------------
plt.subplot(1, 3, 3)
plt.plot(a_list, label="a")
plt.plot(b_list, label="b")
plt.xlabel("Epoch")
plt.ylabel("Parameter Value")
plt.title("Parameter Convergence")
plt.legend()
plt.grid(True)

plt.tight_layout()
plt.show()

print("最終パラメータ:")
print("a =", a)
print("b =", b)
import numpy as np

print("===== 1層目:基本の線形変換(y = W @ x + b) =====")

# -----------------------------
# 1. 入力ベクトル
# -----------------------------
x = np.array([1.0, 2.0, 3.0])          # 3 次元入力

# -----------------------------
# 2. 重み行列(2 × 3)
# -----------------------------
W = np.array([
    [0.2, -0.1, 0.5],
    [1.0,  0.3, 0.2]
])

# -----------------------------
# 3. バイアス(2 次元)
# -----------------------------
b = np.array([0.5, -0.2])

# -----------------------------
# 4. 線形変換(Affine Transformation)
# -----------------------------
y1 = W @ x + b

print("入力 x =", x)
print("重み W =\n", W)
print("バイアス b =", b)
print("出力 y1 =", y1)


print("\n===== 2層ニューラルネット(ReLUを使用) =====")

# -----------------------------
# 5. 1層目(W1, b1)
# -----------------------------
W1 = np.array([
    [0.2, -0.1, 0.5],
    [1.0,  0.3, 0.2]
])

b1 = np.array([0.1, -0.3])

# ReLU
def relu(z):
    return np.maximum(0, z)

# 1層目の計算
z1 = W1 @ x + b1
h1 = relu(z1)

print("1層目の線形変換 z1 =", z1)
print("1層目の活性化 h1 =", h1)

# -----------------------------
# 6. 2層目(W2, b2)
# -----------------------------
W2 = np.array([
    [0.5, -0.4],
    [0.7,  1.0]
])

b2 = np.array([0.2, 0.1])

# 2層目の計算 → 出力
y2 = W2 @ h1 + b2

print("2層目の出力 y2 =", y2)

print("\n=== 完了:行列ベースNN(2層) ===")
0
0
0

Register as a new user and use Qiita more conveniently

  1. You get articles that match your needs
  2. You can efficiently read back useful information
  3. You can use dark theme
What you can do with signing up
0
0

Delete article

Deleted articles cannot be recovered.

Draft of this article would be also deleted.

Are you sure you want to delete this article?