2
0

More than 5 years have passed since last update.

概要

cでnnやってみた。
fizzbuzz問題やってみた。

ライブラリー

結果

Output for 1 is 0.01.(0.00)
1
Output for 2 is 0.00.(0.00)
2
Output for 3 is 0.72.(0.66)
Fizz
Output for 4 is 0.00.(0.00)
4
Output for 5 is 0.44.(0.33)
Buzz
Output for 6 is 0.60.(0.66)
Fizz
Output for 7 is 0.01.(0.00)
7
Output for 8 is 0.00.(0.00)
8
Output for 9 is 0.68.(0.66)
Fizz
Output for 10 is 0.55.(0.33)
Buzz
Output for 11 is 0.01.(0.00)
11
Output for 12 is 0.74.(0.66)
Fizz
Output for 13 is 0.02.(0.00)
13
Output for 14 is 0.01.(0.00)
14
Output for 15 is 0.94.(0.99)
FizzBuzz
Output for 16 is 0.01.(0.00)
16
Output for 17 is 0.01.(0.00)
17
Output for 18 is 0.72.(0.66)
Fizz
Output for 19 is 0.01.(0.00)
19
Output for 20 is 0.47.(0.33)
Buzz
Output for 21 is 0.69.(0.66)
Fizz
Output for 22 is 0.01.(0.00)
22
Output for 23 is 0.01.(0.00)
23
Output for 24 is 0.68.(0.66)
Fizz
Output for 25 is 0.39.(0.33)
Buzz
Output for 26 is 0.01.(0.00)
26
Output for 27 is 0.69.(0.66)
Fizz
Output for 28 is 0.02.(0.00)
28
Output for 29 is 0.01.(0.00)
29
Output for 30 is 0.94.(0.99)
FizzBuzz
Output for 31 is 0.01.(0.00)
31
Output for 32 is 0.00.(0.00)
32
Output for 33 is 0.71.(0.66)
Fizz
Output for 34 is 0.00.(0.00)
34
Output for 35 is 0.35.(0.33)
Buzz
Output for 36 is 0.61.(0.66)
Fizz
Output for 37 is 0.01.(0.00)
37
Output for 38 is 0.00.(0.00)
38
Output for 39 is 0.64.(0.66)
Fizz
Output for 40 is 0.46.(0.33)
Buzz
Output for 41 is 0.01.(0.00)
41
Output for 42 is 0.63.(0.66)
Fizz
Output for 43 is 0.05.(0.00)
43
Output for 44 is 0.01.(0.00)
44
Output for 45 is 0.94.(0.99)
FizzBuzz
Output for 46 is 0.03.(0.00)
46
Output for 47 is 0.01.(0.00)
47
Output for 48 is 0.71.(0.66)
Fizz
Output for 49 is 0.02.(0.00)
49
Output for 50 is 0.37.(0.33)
Buzz
Output for 51 is 0.69.(0.66)
Fizz
Output for 52 is 0.01.(0.00)
52
Output for 53 is 0.02.(0.00)
53
Output for 54 is 0.64.(0.66)
Fizz
Output for 55 is 0.38.(0.33)
Buzz
Output for 56 is 0.01.(0.00)
56
Output for 57 is 0.69.(0.66)
Fizz
Output for 58 is 0.05.(0.00)
58
Output for 59 is 0.01.(0.00)
59
Output for 60 is 0.94.(0.99)
FizzBuzz
Output for 61 is 0.01.(0.00)
61
Output for 62 is 0.01.(0.00)
62
Output for 63 is 0.82.(0.66)
Fizz
Output for 64 is 0.00.(0.00)
64
Output for 65 is 0.34.(0.33)
Buzz
Output for 66 is 0.55.(0.66)
Buzz
Output for 67 is 0.00.(0.00)
67
Output for 68 is 0.01.(0.00)
68
Output for 69 is 0.66.(0.66)
Fizz
Output for 70 is 0.35.(0.33)
Buzz
Output for 71 is 0.01.(0.00)
71
Output for 72 is 0.79.(0.66)
Fizz
Output for 73 is 0.00.(0.00)
73
Output for 74 is 0.02.(0.00)
74
Output for 75 is 0.97.(0.99)
FizzBuzz
Output for 76 is 0.00.(0.00)
76
Output for 77 is 0.01.(0.00)
77
Output for 78 is 0.53.(0.66)
Buzz
Output for 79 is 0.02.(0.00)
79
Output for 80 is 0.33.(0.33)
Buzz
Output for 81 is 0.67.(0.66)
Fizz
Output for 82 is 0.00.(0.00)
82
Output for 83 is 0.01.(0.00)
83
Output for 84 is 0.66.(0.66)
Fizz
Output for 85 is 0.31.(0.33)
Buzz
Output for 86 is 0.01.(0.00)
86
Output for 87 is 0.74.(0.66)
Fizz
Output for 88 is 0.00.(0.00)
88
Output for 89 is 0.00.(0.00)
89
Output for 90 is 0.97.(0.99)
FizzBuzz
Output for 91 is 0.02.(0.00)
91
Output for 92 is 0.01.(0.00)
92
Output for 93 is 0.67.(0.66)
Fizz
Output for 94 is 0.02.(0.00)
94
Output for 95 is 0.24.(0.33)
95
Output for 96 is 0.57.(0.66)
Buzz
Output for 97 is 0.00.(0.00)
97
Output for 98 is 0.00.(0.00)
98
Output for 99 is 0.64.(0.66)
Fizz

サンプルコード

#include <stdio.h>

#ifdef __cplusplus
extern "C" {
#endif
#ifndef GENANN_RANDOM
#define GENANN_RANDOM() (((double) rand()) / RAND_MAX)
#endif
struct genann;
typedef double (*genann_actfun)(const struct genann *ann, double a);
typedef struct genann {
    int inputs,
        hidden_layers,
        hidden,
        outputs;
    genann_actfun activation_hidden;
    genann_actfun activation_output;
    int total_weights;
    int total_neurons;
    double * weight;
    double * output;
    double * delta;
} genann;
genann *genann_init(int inputs, int hidden_layers, int hidden, int outputs);
genann *genann_read(FILE *in);
void genann_randomize(genann *ann);
genann *genann_copy(genann const *ann);
void genann_free(genann *ann);
double const *genann_run(genann const *ann, double const *inputs);
void genann_train(genann const *ann, double const *inputs, double const *desired_outputs, double learning_rate);
void genann_write(genann const *ann, FILE *out);
void genann_init_sigmoid_lookup(const genann *ann);
double genann_act_sigmoid(const genann *ann, double a);
double genann_act_sigmoid_cached(const genann *ann, double a);
double genann_act_threshold(const genann *ann, double a);
double genann_act_linear(const genann *ann, double a);
#ifdef __cplusplus
}
#endif
#include <assert.h>
#include <errno.h>
#include <math.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#ifndef genann_act
#define genann_act_hidden genann_act_hidden_indirect
#define genann_act_output genann_act_output_indirect
#else
#define genann_act_hidden genann_act
#define genann_act_output genann_act
#endif
#define LOOKUP_SIZE 4096
double genann_act_hidden_indirect(const struct genann * ann, double a)
{
    return ann->activation_hidden(ann, a);
}
double genann_act_output_indirect(const struct genann * ann, double a)
{
    return ann->activation_output(ann, a);
}
const double sigmoid_dom_min = -15.0;
const double sigmoid_dom_max = 15.0;
double interval;
double lookup[LOOKUP_SIZE];
#ifdef __GNUC__
#define likely(x)       __builtin_expect(!!(x), 1)
#define unlikely(x)     __builtin_expect(!!(x), 0)
#define unused          __attribute__((unused))
#else
#define likely(x)       x
#define unlikely(x)     x
#define unused
#pragma warning(disable : 4996)
#endif
double inline genann_act_sigmoid(const genann * ann unused, double a)
{
    if (a < -45.0) return 0;
    if (a > 45.0) return 1;
    return 1.0 / (1 + exp(-a));
}
void genann_init_sigmoid_lookup(const genann * ann)
{
    const double f = (sigmoid_dom_max - sigmoid_dom_min) / LOOKUP_SIZE;
    int i;
    interval = LOOKUP_SIZE / (sigmoid_dom_max - sigmoid_dom_min);
    for (i = 0; i < LOOKUP_SIZE; ++i)
    {
        lookup[i] = genann_act_sigmoid(ann, sigmoid_dom_min + f * i);
    }
}
double inline genann_act_sigmoid_cached(const genann * ann unused, double a)
{
    assert(!isnan(a));
    if (a < sigmoid_dom_min) return lookup[0];
    if (a >= sigmoid_dom_max) return lookup[LOOKUP_SIZE - 1];
    size_t j = (size_t) ((a - sigmoid_dom_min) * interval + 0.5);
    if (unlikely(j >= LOOKUP_SIZE)) return lookup[LOOKUP_SIZE - 1];
    return lookup[j];
}
double inline genann_act_linear(const struct genann * ann unused, double a)
{
    return a;
}
double inline genann_act_threshold(const struct genann * ann unused, double a)
{
    return a > 0;
}
genann * genann_init(int inputs, int hidden_layers, int hidden, int outputs)
{
    if (hidden_layers < 0) return 0;
    if (inputs < 1) return 0;
    if (outputs < 1) return 0;
    if (hidden_layers > 0 && hidden < 1) return 0;
    const int hidden_weights = hidden_layers ? (inputs + 1) * hidden + (hidden_layers - 1) * (hidden + 1) * hidden : 0;
    const int output_weights = (hidden_layers ? (hidden + 1) : (inputs + 1)) * outputs;
    const int total_weights = (hidden_weights + output_weights);
    const int total_neurons = (inputs + hidden * hidden_layers + outputs);
    const int size = sizeof(genann) + sizeof(double) * (total_weights + total_neurons + (total_neurons - inputs));
    genann * ret = malloc(size);
    if (!ret) return 0;
    ret->inputs = inputs;
    ret->hidden_layers = hidden_layers;
    ret->hidden = hidden;
    ret->outputs = outputs;
    ret->total_weights = total_weights;
    ret->total_neurons = total_neurons;
    ret->weight = (double *) ((char *) ret + sizeof(genann));
    ret->output = ret->weight + ret->total_weights;
    ret->delta = ret->output + ret->total_neurons;
    genann_randomize(ret);
    ret->activation_hidden = genann_act_sigmoid_cached;
    ret->activation_output = genann_act_sigmoid_cached;
    genann_init_sigmoid_lookup(ret);
    return ret;
}
genann * genann_read(FILE * in)
{
    int inputs,
        hidden_layers,
        hidden,
        outputs;
    int rc;
    errno = 0;
    rc = fscanf(in, "%d %d %d %d", &inputs, &hidden_layers, &hidden, &outputs);
    if (rc < 4 || errno != 0)
    {
        perror("fscanf");
        return NULL;
    }
    genann * ann = genann_init(inputs, hidden_layers, hidden, outputs);
    int i;
    for (i = 0; i < ann->total_weights; ++i)
    {
        errno = 0;
        rc = fscanf(in, " %le", ann->weight + i);
        if (rc < 1 || errno != 0)
        {
            perror("fscanf");
            genann_free(ann);
            return NULL;
        }
    }
    return ann;
}
genann * genann_copy(genann const * ann)
{
    const int size = sizeof(genann) + sizeof(double) * (ann->total_weights + ann->total_neurons + (ann->total_neurons - ann->inputs));
    genann * ret = malloc(size);
    if (!ret) return 0;
    memcpy(ret, ann, size);
    ret->weight = (double *) ((char *) ret + sizeof(genann));
    ret->output = ret->weight + ret->total_weights;
    ret->delta = ret->output + ret->total_neurons;
    return ret;
}
void genann_randomize(genann * ann)
{
    int i;
    for (i = 0; i < ann->total_weights; ++i)
    {
        double r = GENANN_RANDOM();
        ann->weight[i] = r - 0.5;
    }
}
void genann_free(genann * ann)
{
    free(ann);
}
double const * genann_run(genann const * ann, double const * inputs)
{
    double const * w = ann->weight;
    double * o = ann->output + ann->inputs;
    double const * i = ann->output;
    memcpy(ann->output, inputs, sizeof(double) * ann->inputs);
    int h,
        j,
        k;
    if (!ann->hidden_layers)
    {
        double * ret = o;
        for (j = 0; j < ann->outputs; ++j)
        {
            double sum = * w++ * -1.0;
            for (k = 0; k < ann->inputs; ++k)
            {
                sum += * w++ * i[k];
            }
            * o++ = genann_act_output(ann, sum);
        }
        return ret;
    }
    for (j = 0; j < ann->hidden; ++j)
    {
        double sum = * w++ * -1.0;
        for (k = 0; k < ann->inputs; ++k)
        {
            sum += * w++ * i[k];
        }
        * o++ = genann_act_hidden(ann, sum);
    }
    i += ann->inputs;
    for (h = 1; h < ann->hidden_layers; ++h)
    {
        for (j = 0; j < ann->hidden; ++j)
        {
            double sum = * w++ * -1.0;
            for (k = 0; k < ann->hidden; ++k)
            {
                sum += * w++ * i[k];
            }
            * o++ = genann_act_hidden(ann, sum);
        }
        i += ann->hidden;
    }
    double const * ret = o;
    for (j = 0; j < ann->outputs; ++j)
    {
        double sum = * w++ * -1.0;
        for (k = 0; k < ann->hidden; ++k)
        {
            sum += * w++ * i[k];
        }
        * o++ = genann_act_output(ann, sum);
    }
    assert(w - ann->weight == ann->total_weights);
    assert(o - ann->output == ann->total_neurons);
    return ret;
}
void genann_train(genann const * ann, double const * inputs, double const * desired_outputs, double learning_rate)
{
    genann_run(ann, inputs);
    int h,
        j,
        k;
    {
        double const * o = ann->output + ann->inputs + ann->hidden * ann->hidden_layers;
        double * d = ann->delta + ann->hidden * ann->hidden_layers;
        double const * t = desired_outputs;
        if (genann_act_output == genann_act_linear || ann->activation_output == genann_act_linear)
        {
            for (j = 0; j < ann->outputs; ++j)
            {
                * d++ = * t++ - * o++;
            }
        }
        else
        {
            for (j = 0; j < ann->outputs; ++j)
            {
                * d++ = (* t - * o) * * o * (1.0 - * o);
                ++o;
                ++t;
            }
        }
    }
    for (h = ann->hidden_layers - 1; h >= 0; --h)
    {
        double const * o = ann->output + ann->inputs + (h * ann->hidden);
        double * d = ann->delta + (h * ann->hidden);
        double const * const dd = ann->delta + ((h + 1) * ann->hidden);
        double const * const ww = ann->weight + ((ann->inputs + 1) * ann->hidden) + ((ann->hidden + 1) * ann->hidden * (h));
        for (j = 0; j < ann->hidden; ++j)
        {
            double delta = 0;
            for (k = 0; k < (h == ann->hidden_layers - 1 ? ann->outputs : ann->hidden); ++k)
            {
                const double forward_delta = dd[k];
                const int windex = k * (ann->hidden + 1) + (j + 1);
                const double forward_weight = ww[windex];
                delta += forward_delta * forward_weight;
            }
            * d = * o * (1.0 - * o) * delta;
            ++d;
            ++o;
        }
    }
    {
        double const * d = ann->delta + ann->hidden * ann->hidden_layers;
        double * w = ann->weight + (ann->hidden_layers ? ((ann->inputs + 1) * ann->hidden + (ann->hidden + 1) * ann->hidden * (ann->hidden_layers - 1)) : (0));
        double const * const i = ann->output + (ann->hidden_layers ? (ann->inputs + (ann->hidden) * (ann->hidden_layers - 1)) : 0);
        for (j = 0; j < ann->outputs; ++j)
        {
            * w++ += * d * learning_rate * - 1.0;
            for (k = 1; k < (ann->hidden_layers ? ann->hidden : ann->inputs) + 1; ++k)
            {
                * w++ += * d * learning_rate * i[k - 1];
            }
            ++d;
        }
        assert(w - ann->weight == ann->total_weights);
    }
    for (h = ann->hidden_layers - 1; h >= 0; --h)
    {
        double const * d = ann->delta + (h * ann->hidden);
        double const * i = ann->output + (h ? (ann->inputs + ann->hidden * (h - 1)) : 0);
        double * w = ann->weight + (h ? ((ann->inputs + 1) * ann->hidden + (ann->hidden + 1) * (ann->hidden) * (h - 1)) : 0);
        for (j = 0; j < ann->hidden; ++j)
        {
            * w++ += * d * learning_rate * -1.0;
            for (k = 1; k < (h == 0 ? ann->inputs : ann->hidden) + 1; ++k)
            {
                * w++ += * d * learning_rate * i[k - 1];
            }
            ++d;
        }
    }
}
void genann_write(genann const * ann, FILE * out)
{
    fprintf(out, "%d %d %d %d", ann->inputs, ann->hidden_layers, ann->hidden, ann->outputs);
    int i;
    for (i = 0; i < ann->total_weights; ++i)
    {
        fprintf(out, " %.20e", ann->weight[i]);
    }
}
#include <stdio.h>
#include <stdlib.h>
#include <math.h>

int main(int argc, char * argv[])
{
    char * str[] = {
        "%d\n", 
        "Buzz\n", 
        "Fizz\n", 
        "FizzBuzz\n"
    };
    double input[100][7];
    double output[100];
    int i;
    int j;
    for (i = 0; i < 100; i++)
    {
        j = (i >> 0) & 0x01;
        //printf("%d ", j);
        input[i][0] = j;
        j = (i >> 1) & 0x01;
        //printf("%d ", j);
        input[i][1] = j;
        j = (i >> 2) & 0x01;
        //printf("%d ", j);
        input[i][2] = j;
        j = (i >> 3) & 0x01;
        //printf("%d ", j);
        input[i][3] = j;
        j = (i >> 4) & 0x01;
        //printf("%d ", j);
        input[i][4] = j;
        j = (i >> 5) & 0x01;
        //printf("%d ", j);
        input[i][5] = j;
        j = (i >> 6) & 0x01;
        //printf("%d \n", j);
        input[i][6] = j;
    }
    for (i = 0; i < 100; i++)
    {
        if (i % 3 == 0 && i % 5 == 0) 
        {
            //printf("fizzbuzz ");
            output[i] = 0.99;
        }
        else if (i % 3 == 0) 
        {
            //printf("fizz ");
            output[i] = 0.66;
        }
        else if (i % 5 == 0) 
        {
            //printf("buzz ");
            output[i] = 0.33;
        }
        else
        {
            //printf("%d ", i);
            output[i] = 0;
        }
    }
    genann * ann = genann_init(7, 1, 20, 1);
    for (j = 0; j < 4000; j++)
    {
        for (i = 0; i < 100; i++)
        {
            genann_train(ann, input[i], output + i, 3);
        }
    }
    for (i = 1; i < 100; i++)
    {
        j =  * genann_run(ann, input[i]) * 3 + 0.2;
        printf ("Output for %d is %1.2f.(%1.2f)\n", i, * genann_run(ann, input[i]), output[i]);
        printf (str[j], i);
        //printf ("%d\n", j);
    }
    genann_free(ann);
    return 0;
}

成果物

以上。

2
0
0

Register as a new user and use Qiita more conveniently

  1. You get articles that match your needs
  2. You can efficiently read back useful information
  3. You can use dark theme
What you can do with signing up
2
0