10
4

Delete article

Deleted articles cannot be recovered.

Draft of this article would be also deleted.

Are you sure you want to delete this article?

More than 3 years have passed since last update.

CPUに機械学習やらせてみた件

Last updated at Posted at 2019-11-22

#はじめに
*注意 この記事には、間違った情報があるかもしれません。
今回はRustでニューラルネットワークを組み、GPUを使わずに学習させるとどうなるかやってみました。

なぜCPUを使ったかというと、筆者はGPUを使ったプログラミングができなかったのと、
純粋にCPUだけ使ったらどうなるのか、という興味で深い意味はないです。

あと、Qiita初投稿です。

#ニューラルネットワーク
今回は、入力層に4つ、中間層に3つ、出力層に4つのニューロンを実装しました、
ニューラルネットワークを組みました。
活性化関数にはシグモイド関数を使っています。

main.rs
//ニューラルネットワークの構造体
struct NeuronAndWeight {
	number: i32,
	I2HWeight: [[f64;4];3],
	H2OWeight: [[f64;3];4],
	InputBias: [f64;4],
    HiddenBias: [f64;3],
    OutputBias: [f64;4],
    TeacherSignal: [f64;4]
}
//各数値の実装
impl NeuronAndWeight {
   fn new(number: i32, I2HWeight: [[f64;4];3], H2OWeight: [[f64;3];4], InputBias: [f64;4], HiddenBias: [f64;3], OutputBias: [f64;4], TeacherSignal: [f64;4]) -> NeuronAndWeight {
        NeuronAndWeight{
        	number: number,
        	I2HWeight: I2HWeight,
        	H2OWeight: H2OWeight,
        	InputBias: InputBias,
            HiddenBias: HiddenBias,
            OutputBias: OutputBias,
            TeacherSignal: TeacherSignal
        }
	}
    //入力層の実装
	fn inputlayer(&mut self) -> [f64;4] {
		let mut InsideOfInput: [f64;4] =  [0.0;4];
        let neipier: f64 =  2.71;
        let mut x = 0;
        let mut Output: [f64;4] = [0.0;4];
        loop{
            if self.number == 1{
                InsideOfInput[x] = 1.0;
                x = 0;
                break
            }else if self.number % 2 == 0{
                InsideOfInput[x] = 0.0;
                self.number /= 2;
                x += 1;
            }else if self.number % 2 == 1{
                InsideOfInput[x] = 1.0;
                self.number /= 2;
                x += 1;
            }
        }
        for i in 0..4{
            InsideOfInput[i] = InsideOfInput[i] + self.InputBias[i];
        }
        for i in 0..4{
            Output[i] = 1.0 / (1.0 + neipier.powf(InsideOfInput[i] * -1.0));
        }
        Output
	}
    //中間層の実装
	fn Hiddenlayer(&mut self, Input: [f64;4]) -> [f64;3] {
        let mut GInsideOfHidden: [[f64;4];3] = [[0.0;4];3];
        let mut InsideOfHidden: [f64;3] = [0.0;3];
        let neipier: f64 = 2.71;
        let mut Output: [f64;3] = [0.0;3];
        for i in 0..3{
            for j in 0..4{
                GInsideOfHidden[i][j] = self.I2HWeight[i][j] * Input[j];
            }
        }
        for i in 0..3{
            for j in 0..4{
                InsideOfHidden[i] =  InsideOfHidden[i] + GInsideOfHidden[i][j];
            }
            InsideOfHidden[i] = InsideOfHidden[i] + self.HiddenBias[i];
        }
        for i in 0..3{
            Output[i] = 1.0 / (1.0 + neipier.powf(InsideOfHidden[i] * -1.0));
        }
        Output
	}
    //出力層の実装
    fn Outputlayer(&mut self, Hidden: [f64;3]) -> [f64;4] {
        let mut GInsideOfOutput: [[f64;3];4] = [[0.0;3];4];
        let mut InsideOfOutput: [f64;4] = [0.0;4];
        let neipier: f64 = 2.71;
        let mut Output: [f64;4] = [0.0;4];
        for i in 0..4{
            for j in 0..3{
                GInsideOfOutput[i][j] = self.H2OWeight[i][j] * Hidden[j];
            }
        }
        for i in 0..4{
            for j in 0..3{
                InsideOfOutput[i] = InsideOfOutput[i] + GInsideOfOutput[i][j];
            }
            InsideOfOutput[i] = InsideOfOutput[i] + self.OutputBias[i];
        }
        for i in 0..4{
            Output[i] = 1.0 / (1.0 + neipier.powf(InsideOfOutput[i] * -1.0));
        }
        Output
    }
}

#学習を行う部分
誤差逆伝播法を行う関数。

main.rs
//中間層と出力層の結合係数の計算
fn CalculateDeltaWeight1(number: i32, I2HWeight: [[f64;4];3], H2OWeight: [[f64;3];4], InputBias: [f64;4], HiddenBias: [f64;3], OutputBias: [f64;4], TeacherSignal: [f64;4]) -> [[f64;4];3] {
    let mut NeuralNetwork = NeuronAndWeight::new(number, I2HWeight, H2OWeight, InputBias, HiddenBias, OutputBias, TeacherSignal);
    let mut inputlayer = NeuralNetwork.inputlayer();
    let mut Hiddenlayer = NeuralNetwork.Hiddenlayer(inputlayer);
    let mut Outputlayer = NeuralNetwork.Outputlayer(Hiddenlayer);
    let mut DeltaWeight: [[f64;4];3] = [[0.0;4];3];
    for i in 0..3{
        for j in 0..4{
            DeltaWeight[i][j] = -0.19 * (Outputlayer[j] - TeacherSignal[j]) * Outputlayer[j] * (1.0 - Outputlayer[j]) * Hiddenlayer[i];
        }
    }
    DeltaWeight
}
    


//入力層と中間層の結合係数の計算
fn CalculateDeltaWeight2(number: i32, I2HWeight: [[f64;4];3], H2OWeight: [[f64;3];4], InputBias: [f64;4], HiddenBias: [f64;3], OutputBias: [f64;4], TeacherSignal: [f64;4]) -> [[f64;3];4] {
   let mut NeuralNetwork = NeuronAndWeight::new(number, I2HWeight, H2OWeight, InputBias, HiddenBias, OutputBias, TeacherSignal);
   let mut inputlayer = NeuralNetwork.inputlayer();
   let mut Hiddenlayer = NeuralNetwork.Hiddenlayer(inputlayer);
   let mut Outputlayer = NeuralNetwork.Outputlayer(Hiddenlayer);
   let mut DeltaWeight: [[f64;3];4] = [[0.0;3];4];
   let mut Sigma: [f64;4] = [0.0;4];
   let mut sum: f64 = 0.0;
    for i in 0..4{
        for j in 0..3{
            for h in 0..4{
                Sigma[h] = (Outputlayer[h] - TeacherSignal[h]) * Outputlayer[h] * (1.0 - Outputlayer[h]) * H2OWeight[h][j];
            }
        }
        for h in 0..4{
            sum = sum + Sigma[h];
        }
        for j in 0..3{
            DeltaWeight[i][j] = -0.23 * sum * Hiddenlayer[j] * (1.0 - Hiddenlayer[j]) * inputlayer[i];
        }
        sum = 0.0;
    }
    DeltaWeight
}

コードの中にある”結合係数"とはシナプスの重みです。

#実際に動かしてみた
結論から言うと、ありえないほど時間がかかります。
そもそも何故、GPUを機械学習に使うかと言うと、GPUは並列して一気に計算できるからなんですね。
そこで、多くのシナプスの重みを計算しなければないらないニューラルネットワークにおいて並列して計算できないCPUを使うとすごい時間がかかります。

#あとがき
Qiita初投稿な上にちょっとかじったレベルの知識で書いているのでだいぶ雑な記事になってしまいました。

GPUによる計算の勉強やニューラルネットワークの知識を深めて、Rustでまた機械学習に挑戦しようと思っています。

10
4
2

Register as a new user and use Qiita more conveniently

  1. You get articles that match your needs
  2. You can efficiently read back useful information
  3. You can use dark theme
What you can do with signing up
10
4

Delete article

Deleted articles cannot be recovered.

Draft of this article would be also deleted.

Are you sure you want to delete this article?