wasi-nn: Add support of wasi-nn as shared lib (#2310)

## Context

Currently, WAMR supports compiling iwasm with flag `WAMR_BUILD_WASI_NN`.
However, there are scenarios where the user might prefer having it as a shared library.

## Proposed Changes

Decouple wasi-nn context management by internally managing the context given
a module instance reference.
This commit is contained in:
tonibofarull 2023-06-27 12:18:26 +02:00 committed by GitHub
parent 0a0739ef23
commit ab96e01f5e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 212 additions and 112 deletions

View File

@ -1201,17 +1201,6 @@ aot_instantiate(AOTModule *module, bool is_sub_inst, WASMExecEnv *exec_env_main,
}
#endif
#if WASM_ENABLE_WASI_NN != 0
if (!is_sub_inst) {
if (!(((AOTModuleInstanceExtra *)module_inst->e)->wasi_nn_ctx =
wasi_nn_initialize())) {
set_error_buf(error_buf, error_buf_size,
"wasi nn initialization failed");
goto fail;
}
}
#endif
/* Initialize the thread related data */
if (stack_size == 0)
stack_size = DEFAULT_WASM_STACK_SIZE;
@ -1310,12 +1299,8 @@ aot_deinstantiate(AOTModuleInstance *module_inst, bool is_sub_inst)
((AOTModuleInstanceExtra *)module_inst->e)->c_api_func_imports);
#if WASM_ENABLE_WASI_NN != 0
if (!is_sub_inst) {
WASINNContext *wasi_nn_ctx =
((AOTModuleInstanceExtra *)module_inst->e)->wasi_nn_ctx;
if (wasi_nn_ctx)
wasi_nn_destroy(wasi_nn_ctx);
}
if (!is_sub_inst)
wasi_nn_destroy(module_inst);
#endif
wasm_runtime_free(module_inst);

View File

@ -89,9 +89,6 @@ typedef struct AOTFunctionInstance {
typedef struct AOTModuleInstanceExtra {
CApiFuncImport *c_api_func_imports;
#if WASM_ENABLE_WASI_NN != 0
WASINNContext *wasi_nn_ctx;
#endif
} AOTModuleInstanceExtra;
#if defined(OS_ENABLE_HW_BOUND_CHECK) && defined(BH_PLATFORM_WINDOWS)

View File

@ -2097,16 +2097,6 @@ wasm_instantiate(WASMModule *module, bool is_sub_inst,
}
#endif
#if WASM_ENABLE_WASI_NN != 0
if (!is_sub_inst) {
if (!(module_inst->e->wasi_nn_ctx = wasi_nn_initialize())) {
set_error_buf(error_buf, error_buf_size,
"wasi nn initialization failed");
goto fail;
}
}
#endif
#if WASM_ENABLE_DEBUG_INTERP != 0
if (!is_sub_inst) {
/* Add module instance into module's instance list */
@ -2265,11 +2255,8 @@ wasm_deinstantiate(WASMModuleInstance *module_inst, bool is_sub_inst)
wasm_runtime_free(module_inst->e->c_api_func_imports);
#if WASM_ENABLE_WASI_NN != 0
if (!is_sub_inst) {
WASINNContext *wasi_nn_ctx = module_inst->e->wasi_nn_ctx;
if (wasi_nn_ctx)
wasi_nn_destroy(wasi_nn_ctx);
}
if (!is_sub_inst)
wasi_nn_destroy(module_inst);
#endif
wasm_runtime_free(module_inst);

View File

@ -241,10 +241,6 @@ typedef struct WASMModuleInstanceExtra {
&& WASM_ENABLE_LAZY_JIT != 0)
WASMModuleInstance *next;
#endif
#if WASM_ENABLE_WASI_NN != 0
WASINNContext *wasi_nn_ctx;
#endif
} WASMModuleInstanceExtra;
struct AOTFuncPerfProfInfo;

View File

@ -0,0 +1,58 @@
# Copyright (C) 2019 Intel Corporation. All rights reserved.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
cmake_minimum_required(VERSION 3.16)
project(wasi-nn C CXX)
set(CMAKE_POSITION_INDEPENDENT_CODE ON)
set(WAMR_ROOT_DIR ${CMAKE_CURRENT_LIST_DIR}/../../../../..)
set(WASI_NN_ROOT_DIR ${CMAKE_CURRENT_LIST_DIR}/..)
if(NOT CMAKE_BUILD_TYPE)
set(CMAKE_BUILD_TYPE Debug)
endif()
#### libvmlib ####
# NOTE: we build vmlib as a shared library here so that it can be
# shared between iwasm and native libraries.
include(${WASI_NN_ROOT_DIR}/cmake/iwasm_helper.cmake)
include(${WAMR_ROOT_DIR}/build-scripts/runtime_lib.cmake)
add_library(vmlib SHARED ${WAMR_RUNTIME_LIB_SOURCE})
# iwasm
include(${SHARED_DIR}/utils/uncommon/shared_uncommon.cmake)
set(RUNTIME_SOURCE_ALL
${WAMR_ROOT_DIR}/product-mini/platforms/${WAMR_BUILD_PLATFORM}/main.c
${UNCOMMON_SHARED_SOURCE}
)
add_executable(iwasm ${RUNTIME_SOURCE_ALL})
target_link_libraries(iwasm vmlib -lpthread -lm -ldl)
#### TensorFlow ####
include(${WASI_NN_ROOT_DIR}/cmake/wasi_nn.cmake)
#### WASI-NN ####
include_directories(
${WAMR_ROOT_DIR}/core/iwasm/include
${WAMR_ROOT_DIR}/core/shared/utils
${WAMR_ROOT_DIR}/core/shared/platform/linux
)
add_library(wasi-nn SHARED
${WASI_NN_SOURCES}
)
# Add `get_native_lib` symbol
target_compile_definitions(wasi-nn PUBLIC
WASI_NN_SHARED
)
target_link_libraries(wasi-nn
${WASI_NN_LIBS}
vmlib
)

View File

@ -0,0 +1,13 @@
# wasi-nn as shared library
Example on how to create libwasi-nn (external library) instead of embedding wasi-nn inside iwasm
From folder `core/iwasm/libraries/wasi-nn/test`, build the test and run
```sh
../external/build/iwasm \
--dir=. \
--env="TARGET=cpu" \
--native-lib=../external/build/libwasi-nn.so \
test_tensorflow.wasm
```

View File

@ -9,16 +9,18 @@
#include <assert.h>
#include <errno.h>
#include <string.h>
#include <stdint.h>
#include "wasi_nn.h"
#include "wasi_nn_private.h"
#include "wasi_nn_app_native.h"
#include "logger.h"
#include "wasi_nn_tensorflowlite.hpp"
#include "logger.h"
#include "bh_platform.h"
#include "wasm_export.h"
#include "wasm_runtime.h"
#include "aot_runtime.h"
#define HASHMAP_INITIAL_SIZE 20
/* Definition of 'wasi_nn.h' structs in WASM app format (using offset) */
@ -51,6 +53,119 @@ static api_function lookup[] = {
tensorflowlite_get_output }
};
static HashMap *hashmap;
static void
wasi_nn_ctx_destroy(WASINNContext *wasi_nn_ctx);
/* Get wasi-nn context from module instance */
static uint32
hash_func(const void *key)
{
// fnv1a_hash
const uint32 FNV_PRIME = 16777619;
const uint32 FNV_OFFSET_BASIS = 2166136261U;
uint32 hash = FNV_OFFSET_BASIS;
const unsigned char *bytes = (const unsigned char *)key;
for (size_t i = 0; i < sizeof(uintptr_t); ++i) {
hash ^= bytes[i];
hash *= FNV_PRIME;
}
return hash;
}
static bool
key_equal_func(void *key1, void *key2)
{
return key1 == key2;
}
static void
key_destroy_func(void *key1)
{}
static void
value_destroy_func(void *value)
{
wasi_nn_ctx_destroy((WASINNContext *)value);
}
static WASINNContext *
wasi_nn_initialize_context()
{
NN_DBG_PRINTF("Initializing wasi-nn context");
WASINNContext *wasi_nn_ctx =
(WASINNContext *)wasm_runtime_malloc(sizeof(WASINNContext));
if (wasi_nn_ctx == NULL) {
NN_ERR_PRINTF("Error when allocating memory for WASI-NN context");
return NULL;
}
wasi_nn_ctx->is_model_loaded = false;
tensorflowlite_initialize(&wasi_nn_ctx->tflite_ctx);
return wasi_nn_ctx;
}
static bool
wasi_nn_initialize()
{
NN_DBG_PRINTF("Initializing wasi-nn");
hashmap = bh_hash_map_create(HASHMAP_INITIAL_SIZE, true, hash_func,
key_equal_func, key_destroy_func,
value_destroy_func);
if (hashmap == NULL) {
NN_ERR_PRINTF("Error while initializing hashmap");
return false;
}
return true;
}
static WASINNContext *
wasm_runtime_get_wasi_nn_ctx(wasm_module_inst_t instance)
{
WASINNContext *wasi_nn_ctx =
(WASINNContext *)bh_hash_map_find(hashmap, (void *)instance);
if (wasi_nn_ctx == NULL) {
wasi_nn_ctx = wasi_nn_initialize_context();
if (wasi_nn_ctx == NULL)
return NULL;
bool ok =
bh_hash_map_insert(hashmap, (void *)instance, (void *)wasi_nn_ctx);
if (!ok) {
NN_ERR_PRINTF("Error while storing context");
wasi_nn_ctx_destroy(wasi_nn_ctx);
return NULL;
}
}
NN_DBG_PRINTF("Returning ctx");
return wasi_nn_ctx;
}
static void
wasi_nn_ctx_destroy(WASINNContext *wasi_nn_ctx)
{
if (wasi_nn_ctx == NULL) {
NN_ERR_PRINTF(
"Error when deallocating memory. WASI-NN context is NULL");
return;
}
NN_DBG_PRINTF("Freeing wasi-nn");
NN_DBG_PRINTF("-> is_model_loaded: %d", wasi_nn_ctx->is_model_loaded);
NN_DBG_PRINTF("-> current_encoding: %d", wasi_nn_ctx->current_encoding);
tensorflowlite_destroy(wasi_nn_ctx->tflite_ctx);
wasm_runtime_free(wasi_nn_ctx);
}
void
wasi_nn_destroy(wasm_module_inst_t instance)
{
WASINNContext *wasi_nn_ctx = wasm_runtime_get_wasi_nn_ctx(instance);
wasi_nn_ctx_destroy(wasi_nn_ctx);
}
/* Utils */
static bool
@ -64,36 +179,13 @@ is_encoding_implemented(graph_encoding encoding)
static error
is_model_initialized(WASINNContext *wasi_nn_ctx)
{
if (!wasi_nn_ctx->is_initialized) {
if (!wasi_nn_ctx->is_model_loaded) {
NN_ERR_PRINTF("Model not initialized.");
return runtime_error;
}
return success;
}
WASINNContext *
wasm_runtime_get_wasi_nn_ctx(wasm_module_inst_t instance)
{
WASINNContext *wasi_nn_ctx = NULL;
#if WASM_ENABLE_INTERP != 0
if (instance->module_type == Wasm_Module_Bytecode) {
NN_DBG_PRINTF("Getting ctx from WASM");
WASMModuleInstance *module_inst = (WASMModuleInstance *)instance;
wasi_nn_ctx = ((WASMModuleInstanceExtra *)module_inst->e)->wasi_nn_ctx;
}
#endif
#if WASM_ENABLE_AOT != 0
if (instance->module_type == Wasm_Module_AoT) {
NN_DBG_PRINTF("Getting ctx from AOT");
AOTModuleInstance *module_inst = (AOTModuleInstance *)instance;
wasi_nn_ctx = ((AOTModuleInstanceExtra *)module_inst->e)->wasi_nn_ctx;
}
#endif
bh_assert(wasi_nn_ctx != NULL);
NN_DBG_PRINTF("Returning ctx");
return wasi_nn_ctx;
}
/* WASI-NN implementation */
error
@ -131,7 +223,7 @@ wasi_nn_load(wasm_exec_env_t exec_env, graph_builder_array_wasm *builder,
NN_DBG_PRINTF("wasi_nn_load finished with status %d [graph=%d]", res, *g);
wasi_nn_ctx->current_encoding = encoding;
wasi_nn_ctx->is_initialized = true;
wasi_nn_ctx->is_model_loaded = true;
fail:
// XXX: Free intermediate structure pointers
@ -250,39 +342,6 @@ wasi_nn_get_output(wasm_exec_env_t exec_env, graph_execution_context ctx,
return res;
}
/* Non-exposed public functions */
WASINNContext *
wasi_nn_initialize()
{
NN_DBG_PRINTF("Initializing wasi-nn");
WASINNContext *wasi_nn_ctx =
(WASINNContext *)wasm_runtime_malloc(sizeof(WASINNContext));
if (wasi_nn_ctx == NULL) {
NN_ERR_PRINTF("Error when allocating memory for WASI-NN context");
return NULL;
}
wasi_nn_ctx->is_initialized = true;
wasi_nn_ctx->current_encoding = 3;
tensorflowlite_initialize(&wasi_nn_ctx->tflite_ctx);
return wasi_nn_ctx;
}
void
wasi_nn_destroy(WASINNContext *wasi_nn_ctx)
{
if (wasi_nn_ctx == NULL) {
NN_ERR_PRINTF(
"Error when deallocating memory. WASI-NN context is NULL");
return;
}
NN_DBG_PRINTF("Freeing wasi-nn");
NN_DBG_PRINTF("-> is_initialized: %d", wasi_nn_ctx->is_initialized);
NN_DBG_PRINTF("-> current_encoding: %d", wasi_nn_ctx->current_encoding);
tensorflowlite_destroy(wasi_nn_ctx->tflite_ctx);
wasm_runtime_free(wasi_nn_ctx);
}
/* Register WASI-NN in WAMR */
/* clang-format off */
@ -299,8 +358,19 @@ static NativeSymbol native_symbols_wasi_nn[] = {
};
uint32_t
get_wasi_nn_export_apis(NativeSymbol **p_libc_wasi_apis)
get_wasi_nn_export_apis(NativeSymbol **p_native_symbols)
{
*p_libc_wasi_apis = native_symbols_wasi_nn;
if (!wasi_nn_initialize())
return 0;
*p_native_symbols = native_symbols_wasi_nn;
return sizeof(native_symbols_wasi_nn) / sizeof(NativeSymbol);
}
#if defined(WASI_NN_SHARED)
uint32_t
get_native_lib(char **p_module_name, NativeSymbol **p_native_symbols)
{
*p_module_name = "wasi_nn";
return get_wasi_nn_export_apis(p_native_symbols);
}
#endif

View File

@ -7,25 +7,20 @@
#define WASI_NN_PRIVATE_H
#include "wasi_nn_types.h"
#include "wasm_export.h"
typedef struct {
bool is_initialized;
bool is_model_loaded;
graph_encoding current_encoding;
void *tflite_ctx;
} WASINNContext;
/**
* @brief Initialize wasi-nn
*
*/
WASINNContext *
wasi_nn_initialize();
/**
* @brief Destroy wasi-nn on app exists
*
*/
void
wasi_nn_destroy(WASINNContext *wasi_nn_ctx);
wasi_nn_destroy(wasm_module_inst_t instance);
#endif

View File

@ -7,9 +7,8 @@
#include "wasi_nn_tensorflowlite.hpp"
#include "logger.h"
#include "bh_common.h"
#include "bh_platform.h"
#include "platform_common.h"
#include "wasm_export.h"
#include <tensorflow/lite/interpreter.h>
#include <tensorflow/lite/kernels/register.h>