Adding cpp binding and proper installation

This commit is contained in:
tonibofarull 2022-06-13 11:55:53 +02:00
parent eb4cd59c2a
commit 1f34eb8388
9 changed files with 83 additions and 34 deletions

View File

@ -3,16 +3,3 @@ cd /root/src/tflite
git clone https://github.com/tensorflow/tensorflow.git tensorflow_src \
--branch v2.8.2
mkdir -p build
cd build
cmake ../tensorflow_src/tensorflow/lite
cmake --build . -j $(grep -c ^processor /proc/cpuinfo)
mkdir /usr/local/lib/tflite; find . | grep -E "\.a$" | xargs cp -t /usr/local/lib/tflite
cp -r flatbuffers/include/flatbuffers /usr/local/include
cp -r ../tensorflow_src/tensorflow /usr/local/include

View File

@ -80,6 +80,11 @@ if (WAMR_BUILD_WASI_NN EQUAL 1)
execute_process(COMMAND ${WAMR_ROOT_DIR}/build-scripts/install_tensorflow.sh
RESULT_VARIABLE TENSORFLOW_RESULT
)
set(TENSORFLOW_SOURCE_DIR "/root/src/tflite/tensorflow_src")
add_subdirectory(
"${TENSORFLOW_SOURCE_DIR}/tensorflow/lite"
"${CMAKE_CURRENT_BINARY_DIR}/tensorflow-lite" EXCLUDE_FROM_ALL)
include (${IWASM_DIR}/libraries/wasi-nn/wasi_nn.cmake)
endif ()

View File

@ -0,0 +1,23 @@
#include "lib_run_inference.hpp"
#include <tensorflow/lite/interpreter.h>
#include <tensorflow/lite/kernels/register.h>
#include <tensorflow/lite/model.h>
#include <tensorflow/lite/optional_debug_tools.h>
std::unique_ptr<tflite::Interpreter> interpreter = NULL;
std::unique_ptr<tflite::FlatBufferModel> model = NULL;
uint32_t _load(graph_builder_array builder, graph_encoding encoding) {
uint32_t *size = (uint32_t*) builder[1];
printf("inside _load: %d\n", *size);
return suces;
// tflite::ErrorReporter *error_reporter;
// model = tflite::FlatBufferModel::BuildFromBuffer(
// (const char *)builder[0],
// 1000, // TODO: find how to pass buffer size
// error_reporter
// );
// tflite::ops::builtin::BuiltinOpResolver resolver;
// tflite::InterpreterBuilder(*model, resolver)(&interpreter);
}

View File

@ -0,0 +1,18 @@
#ifndef LIB_RUN_INFERENCE_HPP
#define LIB_RUN_INFERENCE_HPP
#include <stdio.h>
#include "wasi_nn.h"
#ifdef __cplusplus
extern "C" {
#endif
uint32_t _load(graph_builder_array builder, graph_encoding encoding);
#ifdef __cplusplus
}
#endif
#endif

View File

@ -1,5 +1,5 @@
#include "wasi_nn.h"
#include <stdio.h>
#include <assert.h>
#include <errno.h>
#include <string.h>
@ -7,25 +7,35 @@
#include "wasm_export.h"
// #include <tensorflow/lite/interpreter.h>
// #include <tensorflow/lite/kernels/register.h>
// #include <tensorflow/lite/model.h>
// #include <tensorflow/lite/optional_debug_tools.h>
#include "lib_run_inference.hpp"
// std::unique_ptr<tflite::Interpreter> interpreter = NULL;
// std::unique_ptr<tflite::FlatBufferModel> model = NULL;
/**
* @brief loader of tensorflow
*
* @param builder array of 2 pointers: first its the buffer, second its the size
*/
void load_tensorflow(wasm_module_inst_t instance, graph_builder_array builder) {
printf("Loading tensorflow...\n");
for (int i = 0; i < 2; ++i)
builder[i] = (graph_builder) wasm_runtime_addr_app_to_native(instance, builder[i]);
}
void wasi_nn_load(wasm_exec_env_t exec_env, graph_builder_array builder, graph_encoding encoding)
uint32_t wasi_nn_load(wasm_exec_env_t exec_env, uint32_t builder, uint32_t encoding)
{
// tflite::ErrorReporter *error_reporter;
// model = tflite::FlatBufferModel::BuildFromBuffer(
// (const char *)builder[0],
// 1000, // TODO: find how to pass buffer size
// error_reporter
// );
// tflite::ops::builtin::BuiltinOpResolver resolver;
// tflite::InterpreterBuilder(*model, resolver)(&interpreter);
printf("Inside wasi_nn_load!\n\n");
wasm_module_inst_t instance = wasm_runtime_get_module_inst(exec_env);
graph_builder_array buf = (graph_builder_array) wasm_runtime_addr_app_to_native(instance, builder);
load_return res;
switch ((graph_encoding) encoding) {
case openvino:
return invalid_argument;
case tensorflow:
load_tensorflow(instance, buf);
break;
case onnx:
return invalid_argument;
}
return _load(buf, (graph_encoding) encoding);
}
void wasi_nn_init_execution_context()
@ -54,7 +64,7 @@ void wasi_nn_get_output()
/* clang-format on */
static NativeSymbol native_symbols_wasi_nn[] = {
REG_NATIVE_FUNC(load, "(ii)"),
REG_NATIVE_FUNC(load, "(ii)i"),
};
uint32_t

View File

@ -2,6 +2,6 @@ set (WASI_NN_DIR ${CMAKE_CURRENT_LIST_DIR})
add_definitions (-DWASM_ENABLE_WASI_NN=1)
file (GLOB_RECURSE source_all ${WASI_NN_DIR}/*.c)
file (GLOB_RECURSE source_all ${WASI_NN_DIR}/*.c ${WASI_NN_DIR}/*.cpp)
set (LIBC_WASI_NN_SOURCE ${source_all})

View File

@ -50,7 +50,7 @@ typedef enum {
tpu
} execution_target;
void load(graph_builder_array builder, graph_encoding encoding);
uint32_t load(graph_builder_array builder, graph_encoding encoding);
void init_execution_context();

View File

@ -82,5 +82,6 @@ add_executable (basic src/main.c src/native_impl.c ${UNCOMMON_SHARED_SOURCE})
if (APPLE)
target_link_libraries (basic vmlib -lm -ldl -lpthread)
else ()
target_link_libraries (basic vmlib -lm -ldl -lpthread -lrt)
# TODO: add this in the vmlib. Otherwise, it will only work in the samples/basic
target_link_libraries (basic vmlib -lm -ldl -lpthread -lrt tensorflow-lite)
endif ()

View File

@ -22,7 +22,12 @@ calculate_native(int32_t n, int32_t func1, int32_t func2);
float
generate_float(int iteration, double seed1, float seed2)
{
load(NULL, 0);
char *buf = strdup("test_message");
uint32_t *size = malloc(sizeof(uint32_t));
*size = 4096;
graph_builder_array arr[] = {(graph_builder)buf, (graph_builder)size};
load(arr, 1);
float ret;
printf("calling into WASM function: %s\n", __FUNCTION__);