LoginSignup
0
0

More than 5 years have passed since last update.

raspberry pi 1でtensorflow lite その13

Posted at

概要

raspberry pi 1でtensorflow liteやってみた。
kerasモデルからtfliteファイルを作ってラズパイで、実行。
データセットは、fizzbuzz.

Makefileを書く。

CXXFLAGS ?= -I../tensorflow -I../tensorflow/tensorflow/lite/tools/make/downloads/flatbuffers/include
LDFLAGS ?= -L../tensorflow/tensorflow/lite/tools/make/gen/rpi_armv6l/lib

.PHONY: all clean

all: lite2

lite2: main.cpp
    g++ --std=c++11 main.cpp -O2 $(CXXFLAGS) $(LDFLAGS) -ltensorflow-lite -lstdc++ -lpthread -ldl -lm -o lite2

clean:
    rm -f lite2

Makeして実行。

#include <vector>
#include <chrono>
#include <iostream>
#include "tensorflow/lite/model.h"
#include "tensorflow/lite/interpreter.h"
#include "tensorflow/lite/kernels/register.h"
#include <iostream>
#include <fstream>
#include <stdlib.h>
using namespace std;

bool is_error(TfLiteStatus const & status) 
{
    return status != kTfLiteOk;
}
int main(int argc, char const * argv[]) 
{
    std::string a = "fizzbuzz.tflite";
    TfLiteStatus status;
    std::unique_ptr<tflite::FlatBufferModel> model;
    std::unique_ptr<tflite::Interpreter> interpreter;
    std::cout << "0: Loading model: " << a << std::endl;
    model = tflite::FlatBufferModel::BuildFromFile(a.c_str());
    if (!model) 
    {
      std::cerr << "0: Failed to load the model." << std::endl;
      return -1;
    }
    std::cout << "1: The model was loaded successful." << std::endl;
    tflite::ops::builtin::BuiltinOpResolver resolver;
    tflite::InterpreterBuilder(* model, resolver)(& interpreter);
    std::cout << "2: interpreter was build successful." << std::endl;
    status = interpreter->AllocateTensors();
    if (is_error(status)) 
    {
      std::cerr << "2: Failed to allocate the memory for tensors." << std::endl;
      return -1;
    }
    std::cout << "3: The model was allocated successful." << std::endl;
    float * in = interpreter->typed_input_tensor<float>(0);
    float * out = interpreter->typed_output_tensor<float>(0);
    int i;
    for (i = 1; i < 100; i++)
    {
        in[0] = i & 0x1 ? 1.0f : 0.0f;
        in[1] = (i >> 1) & 0x1 ? 1.0f : 0.0f;
        in[2] = (i >> 2) & 0x1 ? 1.0f : 0.0f;
        in[3] = (i >> 3) & 0x1 ? 1.0f : 0.0f;
        in[4] = (i >> 4) & 0x1 ? 1.0f : 0.0f;
        in[5] = (i >> 5) & 0x1 ? 1.0f : 0.0f;
        in[6] = (i >> 6) & 0x1 ? 1.0f : 0.0f;
        status = interpreter->Invoke();
        if (is_error(status)) 
        {
            std::cerr << "3: Failed to invoke the interpreter." << std::endl;
            return -1;
        }
        if (out[0] > 0.5f) std::printf ("%2d  ", i);
        if (out[1] > 0.5f) std::printf ("fizz  ");
        if (out[2] > 0.5f) std::printf ("buzz  ");
        if (out[3] > 0.5f) std::printf ("fizzbuzz  ");

    }
    cout << "ok" << endl;
    return 0;
}



結果

0: Loading model: fizzbuzz.tflite
1: The model was loaded successful.
2: interpreter was build successful.
3: The model was allocated successful.
 1   2  fizz   4  buzz  fizz   7   8  fizz  buzz  11  fizz  13  14  fizzbuzz  16  17  fizz  19  buzz  fizz  22  23  fizz  buzz  26  fizz  28  29  fizzbuzz  31  32  fizz  34  buzz  fizz  37  38  fizz  buzz  41  fizz  43  44  fizzbuzz  46  47  fizz  49  buzz  fizz  52  53  fizz  buzz  56  fizz  58  59  fizzbuzz  61  62  fizz  64  buzz  fizz  67  68  fizz  buzz  71  fizz  73  74  fizzbuzz  76  77  fizz  79  buzz  fizz  82  83  fizz  buzz  86  fizz  88  89  fizzbuzz  91  92  fizz  94  buzz  fizz  97  98  fizz  ok

以上。

0
0
0

Register as a new user and use Qiita more conveniently

  1. You get articles that match your needs
  2. You can efficiently read back useful information
  3. You can use dark theme
What you can do with signing up
0
0