Hello,
I'm new to TensorFlow Lite and Arduino development. I recently decided to create a model that recognizes animals and insects (cats, dogs, butterflies, etc.). I converted this model to model.h. In order to recognize the picture, I decided to send one picture to the Arduino through the serial port, and a Python script was written. The size of one picture ~40000 bytes. When I run the code, there are no results through the Serial port for some reason. I would be very grateful if someone could explain how to read/send the results.
P.S.
Another point I can’t understand is that every time when I upload a sketch to Arduino, the port constantly changes, sometimes it is COM3 and sometimes appears COM4 (sometimes immediately after loading the sketch). Could this be related to my code?
Scatch:
#include "Arduino.h"
#include <TensorFlowLite.h>
// #include "tensorflow/lite/micro/all_ops_resolver.h"
#include "tensorflow/lite/micro/tflite_bridge/micro_error_reporter.h"
#include "tensorflow/lite/micro/micro_interpreter.h"
#include <tensorflow/lite/micro/micro_mutable_op_resolver.h>
#include <tensorflow/lite/schema/schema_generated.h>
#include "model.h"
tflite::ErrorReporter* micro_error_reporter = nullptr;
const tflite::Model* tflite_model = nullptr;
tflite::MicroInterpreter* interpreter = nullptr;
TfLiteTensor* input_tensor = nullptr;
TfLiteTensor* output_tensor = nullptr;
int8_t* input_data = nullptr;
int8_t* output_data = nullptr;
constexpr int kTensorArenaSize = 136 * 1024;
static uint8_t tensor_arena[kTensorArenaSize];
void setup() {
// Start Serial communication
Serial.begin(9600);
Serial.setTimeout(1);
load_model_from_serial();
}
void loop() {
// Get image data from serial port
TfLiteStatus image_status = getImage(input_data);
if (image_status != kTfLiteOk) {
return;
}
// Run inference on the input image
if (kTfLiteOk != interpreter->Invoke()) {
TF_LITE_REPORT_ERROR(micro_error_reporter, "Invoke failed.");
}
// Print the output
for (int i = 0; i < output_tensor->dims->data[0]; i++) {
Serial.print(output_data[i]);
Serial.print(" ");
}
Serial.println();
}
// Declare function to load the TensorFlow Lite model from serial port
// Assumes that the model has been pre-converted to a .cc file using xxd
void load_model_from_serial() {
Serial.println("Animal classification");
Serial.println("--------------------------------------------");
Serial.println("Arduino Nano 33 BLE Sense running TensorFlow Lite Micro");
Serial.println("");
// Read the size of the model from serial port
int model_size;
while (!Serial.available());
model_size = Serial.parseInt();
// Allocate a buffer to hold the model data
uint8_t* model_data = (uint8_t*)malloc(model_size);
if (model_data == nullptr) {
Serial.println("Failed to allocate memory for model data");
while (true);
}
// Read the model data from serial port
int num_bytes_read = 0;
while (num_bytes_read < model_size) {
if (Serial.available()) {
//int num_bytes = Serial.readBytes(model_data + num_bytes_read, model_size - num_bytes_read);
int num_bytes = Serial.readString().toInt();
if (num_bytes < 0) {
Serial.println("Error reading model data from serial port");
while (true);
}
num_bytes_read += num_bytes;
}
}
// Build a TensorFlow Lite model from the model data
tflite_model = tflite::GetModel(model_data);
if (tflite_model == nullptr) {
Serial.println("Failed to create TensorFlow Lite model");
while (true);
}
// Allocate memory for the interpreter
static tflite::MicroMutableOpResolver<6> resolver;
resolver.AddAveragePool2D();
resolver.AddConv2D();
resolver.AddDepthwiseConv2D();
resolver.AddReshape();
resolver.AddFullyConnected();
resolver.AddSoftmax();
static tflite::MicroInterpreter static_interpreter(tflite_model, resolver, tensor_arena, kTensorArenaSize);
interpreter = &static_interpreter;
TfLiteStatus allocate_status = interpreter->AllocateTensors();
if (allocate_status != kTfLiteOk) {
Serial.println("Failed to allocate memory for TensorFlow Lite interpreter");
while (true);
}
// Get pointers to the input and output tensors
input_tensor = interpreter->input(0);
output_tensor = interpreter->output(0);
input_data = input_tensor->data.int8;
output_data = output_tensor->data.int8;
// Free the buffer used to hold the model data
free(model_data);
}
// Declare function to get image data from serial port
// Assumes that the image data is being sent as an array of int8_t
// Returns kTfLiteOk if successful, or an error code if there was an error
TfLiteStatus getImage(int8_t* image_data) {
// Read the size of the image data from serial port
int image_size;
while (!Serial.available());
image_size = Serial.parseInt();
// Read the image data from serial port
int num_bytes_read = 0;
while (num_bytes_read < image_size) {
if (Serial.available()) {
//int num_bytes = Serial.readBytes(reinterpret_cast<char*>(image_data + num_bytes_read), image_size - num_bytes_read);
int num_bytes = Serial.readString().toInt();
if (num_bytes < 0) {
return kTfLiteError;
}
num_bytes_read += num_bytes;
}
}
return kTfLiteOk;
}
Script for running:
import serial
import time
arduino = serial.Serial(port='COM3', baudrate=9600, timeout=.1)
def write_read(x):
arduino.write(x)
time.sleep(0.05)
data = arduino.readline()
return data
image = None
with open('test.jpg', 'rb') as f:
image = f.read()
print('Sending to arduino...')
value = write_read(image)
print(value) # printing the value