mirror of
https://github.com/bytecodealliance/wasm-micro-runtime.git
synced 2025-02-11 09:25:20 +00:00
wasi-nn: Apply new architecture (#3692)
ps. https://github.com/bytecodealliance/wasm-micro-runtime/issues/3677
This commit is contained in:
parent
4e31bd63b7
commit
140ff25d46
|
@ -438,12 +438,13 @@ if (WAMR_BUILD_WASI_NN EQUAL 1)
|
||||||
if (NOT WAMR_BUILD_WASI_NN_TFLITE EQUAL 1 AND NOT WAMR_BUILD_WASI_NN_OPENVINO EQUAL 1)
|
if (NOT WAMR_BUILD_WASI_NN_TFLITE EQUAL 1 AND NOT WAMR_BUILD_WASI_NN_OPENVINO EQUAL 1)
|
||||||
message (FATAL_ERROR " Need to select a backend for WASI-NN")
|
message (FATAL_ERROR " Need to select a backend for WASI-NN")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (WAMR_BUILD_WASI_NN_TFLITE EQUAL 1)
|
if (WAMR_BUILD_WASI_NN_TFLITE EQUAL 1)
|
||||||
message (" WASI-NN backend tflite enabled")
|
message (" WASI-NN: backend tflite enabled")
|
||||||
add_definitions (-DWASM_ENABLE_WASI_NN_TFLITE)
|
add_definitions (-DWASM_ENABLE_WASI_NN_TFLITE)
|
||||||
endif ()
|
endif ()
|
||||||
if (WAMR_BUILD_WASI_NN_OPENVINO EQUAL 1)
|
if (WAMR_BUILD_WASI_NN_OPENVINO EQUAL 1)
|
||||||
message (" WASI-NN backend openvino enabled")
|
message (" WASI-NN: backend openvino enabled")
|
||||||
add_definitions (-DWASM_ENABLE_WASI_NN_OPENVINO)
|
add_definitions (-DWASM_ENABLE_WASI_NN_OPENVINO)
|
||||||
endif ()
|
endif ()
|
||||||
# Variant devices
|
# Variant devices
|
||||||
|
@ -459,7 +460,7 @@ if (WAMR_BUILD_WASI_NN EQUAL 1)
|
||||||
add_definitions (-DWASM_WASI_NN_EXTERNAL_DELEGATE_PATH="${WAMR_BUILD_WASI_NN_EXTERNAL_DELEGATE_PATH}")
|
add_definitions (-DWASM_WASI_NN_EXTERNAL_DELEGATE_PATH="${WAMR_BUILD_WASI_NN_EXTERNAL_DELEGATE_PATH}")
|
||||||
endif ()
|
endif ()
|
||||||
if (WAMR_BUILD_WASI_EPHEMERAL_NN EQUAL 1)
|
if (WAMR_BUILD_WASI_EPHEMERAL_NN EQUAL 1)
|
||||||
message (" WASI-NN: WASI-Ephemeral-NN enabled")
|
message (" WASI-NN: use 'wasi_ephemeral_nn' instead of 'wasi-nn'")
|
||||||
add_definitions (-DWASM_ENABLE_WASI_EPHEMERAL_NN=1)
|
add_definitions (-DWASM_ENABLE_WASI_EPHEMERAL_NN=1)
|
||||||
endif()
|
endif()
|
||||||
endif ()
|
endif ()
|
||||||
|
|
|
@ -15,6 +15,9 @@
|
||||||
#if WASM_ENABLE_THREAD_MGR != 0
|
#if WASM_ENABLE_THREAD_MGR != 0
|
||||||
#include "../libraries/thread-mgr/thread_manager.h"
|
#include "../libraries/thread-mgr/thread_manager.h"
|
||||||
#endif
|
#endif
|
||||||
|
#if WASM_ENABLE_WASI_NN != 0 || WASM_ENABLE_WASI_EPHEMERAL_NN != 0
|
||||||
|
#include "wasi_nn_host.h"
|
||||||
|
#endif
|
||||||
|
|
||||||
static NativeSymbolsList g_native_symbols_list = NULL;
|
static NativeSymbolsList g_native_symbols_list = NULL;
|
||||||
|
|
||||||
|
@ -472,11 +475,12 @@ quick_aot_entry_init();
|
||||||
bool
|
bool
|
||||||
wasm_native_init()
|
wasm_native_init()
|
||||||
{
|
{
|
||||||
#if WASM_ENABLE_SPEC_TEST != 0 || WASM_ENABLE_LIBC_BUILTIN != 0 \
|
#if WASM_ENABLE_SPEC_TEST != 0 || WASM_ENABLE_LIBC_BUILTIN != 0 \
|
||||||
|| WASM_ENABLE_BASE_LIB != 0 || WASM_ENABLE_LIBC_EMCC != 0 \
|
|| WASM_ENABLE_BASE_LIB != 0 || WASM_ENABLE_LIBC_EMCC != 0 \
|
||||||
|| WASM_ENABLE_LIB_RATS != 0 || WASM_ENABLE_WASI_NN != 0 \
|
|| WASM_ENABLE_LIB_RATS != 0 || WASM_ENABLE_WASI_NN != 0 \
|
||||||
|| WASM_ENABLE_APP_FRAMEWORK != 0 || WASM_ENABLE_LIBC_WASI != 0 \
|
|| WASM_ENABLE_APP_FRAMEWORK != 0 || WASM_ENABLE_LIBC_WASI != 0 \
|
||||||
|| WASM_ENABLE_LIB_PTHREAD != 0 || WASM_ENABLE_LIB_WASI_THREADS != 0
|
|| WASM_ENABLE_LIB_PTHREAD != 0 || WASM_ENABLE_LIB_WASI_THREADS != 0 \
|
||||||
|
|| WASM_ENABLE_WASI_NN != 0 || WASM_ENABLE_WASI_EPHEMERAL_NN != 0
|
||||||
NativeSymbol *native_symbols;
|
NativeSymbol *native_symbols;
|
||||||
uint32 n_native_symbols;
|
uint32 n_native_symbols;
|
||||||
#endif
|
#endif
|
||||||
|
@ -562,13 +566,30 @@ wasm_native_init()
|
||||||
goto fail;
|
goto fail;
|
||||||
#endif /* WASM_ENABLE_LIB_RATS */
|
#endif /* WASM_ENABLE_LIB_RATS */
|
||||||
|
|
||||||
|
#if WASM_ENABLE_WASI_NN != 0 || WASM_ENABLE_WASI_EPHEMERAL_NN != 0
|
||||||
|
if (!wasi_nn_initialize())
|
||||||
|
goto fail;
|
||||||
|
|
||||||
|
n_native_symbols = get_wasi_nn_export_apis(&native_symbols);
|
||||||
|
if (n_native_symbols > 0
|
||||||
|
&& !wasm_native_register_natives(
|
||||||
|
#if WASM_ENABLE_WASI_EPHEMERAL_NN != 0
|
||||||
|
"wasi_ephemeral_nn",
|
||||||
|
#else
|
||||||
|
"wasi_nn",
|
||||||
|
#endif /* WASM_ENABLE_WASI_EPHEMERAL_NN != 0 */
|
||||||
|
native_symbols, n_native_symbols))
|
||||||
|
goto fail;
|
||||||
|
#endif /* WASM_ENABLE_WASI_NN != 0 || WASM_ENABLE_WASI_EPHEMERAL_NN != 0 */
|
||||||
|
|
||||||
#if WASM_ENABLE_QUICK_AOT_ENTRY != 0
|
#if WASM_ENABLE_QUICK_AOT_ENTRY != 0
|
||||||
if (!quick_aot_entry_init()) {
|
if (!quick_aot_entry_init()) {
|
||||||
#if WASM_ENABLE_SPEC_TEST != 0 || WASM_ENABLE_LIBC_BUILTIN != 0 \
|
#if WASM_ENABLE_SPEC_TEST != 0 || WASM_ENABLE_LIBC_BUILTIN != 0 \
|
||||||
|| WASM_ENABLE_BASE_LIB != 0 || WASM_ENABLE_LIBC_EMCC != 0 \
|
|| WASM_ENABLE_BASE_LIB != 0 || WASM_ENABLE_LIBC_EMCC != 0 \
|
||||||
|| WASM_ENABLE_LIB_RATS != 0 || WASM_ENABLE_WASI_NN != 0 \
|
|| WASM_ENABLE_LIB_RATS != 0 || WASM_ENABLE_WASI_NN != 0 \
|
||||||
|| WASM_ENABLE_APP_FRAMEWORK != 0 || WASM_ENABLE_LIBC_WASI != 0 \
|
|| WASM_ENABLE_APP_FRAMEWORK != 0 || WASM_ENABLE_LIBC_WASI != 0 \
|
||||||
|| WASM_ENABLE_LIB_PTHREAD != 0 || WASM_ENABLE_LIB_WASI_THREADS != 0
|
|| WASM_ENABLE_LIB_PTHREAD != 0 || WASM_ENABLE_LIB_WASI_THREADS != 0 \
|
||||||
|
|| WASM_ENABLE_WASI_NN != 0 || WASM_ENABLE_WASI_EPHEMERAL_NN != 0
|
||||||
goto fail;
|
goto fail;
|
||||||
#else
|
#else
|
||||||
return false;
|
return false;
|
||||||
|
@ -577,11 +598,12 @@ wasm_native_init()
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
#if WASM_ENABLE_SPEC_TEST != 0 || WASM_ENABLE_LIBC_BUILTIN != 0 \
|
#if WASM_ENABLE_SPEC_TEST != 0 || WASM_ENABLE_LIBC_BUILTIN != 0 \
|
||||||
|| WASM_ENABLE_BASE_LIB != 0 || WASM_ENABLE_LIBC_EMCC != 0 \
|
|| WASM_ENABLE_BASE_LIB != 0 || WASM_ENABLE_LIBC_EMCC != 0 \
|
||||||
|| WASM_ENABLE_LIB_RATS != 0 || WASM_ENABLE_WASI_NN != 0 \
|
|| WASM_ENABLE_LIB_RATS != 0 || WASM_ENABLE_WASI_NN != 0 \
|
||||||
|| WASM_ENABLE_APP_FRAMEWORK != 0 || WASM_ENABLE_LIBC_WASI != 0 \
|
|| WASM_ENABLE_APP_FRAMEWORK != 0 || WASM_ENABLE_LIBC_WASI != 0 \
|
||||||
|| WASM_ENABLE_LIB_PTHREAD != 0 || WASM_ENABLE_LIB_WASI_THREADS != 0
|
|| WASM_ENABLE_LIB_PTHREAD != 0 || WASM_ENABLE_LIB_WASI_THREADS != 0 \
|
||||||
|
|| WASM_ENABLE_WASI_NN != 0 || WASM_ENABLE_WASI_EPHEMERAL_NN != 0
|
||||||
fail:
|
fail:
|
||||||
wasm_native_destroy();
|
wasm_native_destroy();
|
||||||
return false;
|
return false;
|
||||||
|
@ -599,6 +621,7 @@ wasm_native_destroy()
|
||||||
g_wasi_context_key = NULL;
|
g_wasi_context_key = NULL;
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if WASM_ENABLE_LIB_PTHREAD != 0
|
#if WASM_ENABLE_LIB_PTHREAD != 0
|
||||||
lib_pthread_destroy();
|
lib_pthread_destroy();
|
||||||
#endif
|
#endif
|
||||||
|
@ -607,6 +630,10 @@ wasm_native_destroy()
|
||||||
lib_wasi_threads_destroy();
|
lib_wasi_threads_destroy();
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#if WASM_ENABLE_WASI_NN != 0 || WASM_ENABLE_WASI_EPHEMERAL_NN != 0
|
||||||
|
wasi_nn_destroy();
|
||||||
|
#endif
|
||||||
|
|
||||||
node = g_native_symbols_list;
|
node = g_native_symbols_list;
|
||||||
while (node) {
|
while (node) {
|
||||||
node_next = node->next;
|
node_next = node->next;
|
||||||
|
|
|
@ -1,12 +1,13 @@
|
||||||
# Copyright (C) 2019 Intel Corporation. All rights reserved.
|
# Copyright (C) 2019 Intel Corporation. All rights reserved.
|
||||||
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
|
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
|
||||||
|
|
||||||
|
find_library(TENSORFLOW_LITE
|
||||||
find_library(TENSORFLOW_LITE
|
NAMES tensorflow-lite
|
||||||
NAMES tensorflow-lite
|
HINTS ${CMAKE_CURRENT_BINARY_DIR}/tensorflow-lite
|
||||||
|
NO_DEFAULT_PATHS
|
||||||
)
|
)
|
||||||
|
|
||||||
if(NOT EXISTS ${TENSORFLOW_LITE})
|
if(NOT TENSORFLOW_LITE)
|
||||||
if(NOT EXISTS "${WAMR_ROOT_DIR}/core/deps/tensorflow-src")
|
if(NOT EXISTS "${WAMR_ROOT_DIR}/core/deps/tensorflow-src")
|
||||||
execute_process(
|
execute_process(
|
||||||
COMMAND "${WAMR_ROOT_DIR}/core/deps/install_tensorflow.sh"
|
COMMAND "${WAMR_ROOT_DIR}/core/deps/install_tensorflow.sh"
|
||||||
|
@ -32,11 +33,15 @@ if(NOT EXISTS ${TENSORFLOW_LITE})
|
||||||
"${TENSORFLOW_SOURCE_DIR}/tensorflow/lite"
|
"${TENSORFLOW_SOURCE_DIR}/tensorflow/lite"
|
||||||
"${CMAKE_CURRENT_BINARY_DIR}/tensorflow-lite"
|
"${CMAKE_CURRENT_BINARY_DIR}/tensorflow-lite"
|
||||||
EXCLUDE_FROM_ALL
|
EXCLUDE_FROM_ALL
|
||||||
)
|
)
|
||||||
|
else ()
|
||||||
set(TENSORFLOW_LITE_INCLUDE_DIR "${TENSORFLOW_SOURCE_DIR}")
|
message(STATUS "TensorFlow Lite library found: ${TENSORFLOW_LITE}")
|
||||||
set(FLATBUFFER_INCLUDE_DIR "${CMAKE_CURRENT_BINARY_DIR}/flatbuffers/include")
|
set(TENSORFLOW_SOURCE_DIR "${WAMR_ROOT_DIR}/core/deps/tensorflow-src")
|
||||||
|
|
||||||
include_directories(${TENSORFLOW_LITE_INCLUDE_DIR})
|
|
||||||
include_directories(${FLATBUFFER_INCLUDE_DIR})
|
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
|
set(TENSORFLOW_LITE_INCLUDE_DIR "${TENSORFLOW_SOURCE_DIR}/tensorflow/lite")
|
||||||
|
set(FLATBUFFER_INCLUDE_DIR "${CMAKE_CURRENT_BINARY_DIR}/flatbuffers/include")
|
||||||
|
|
||||||
|
include_directories(${TENSORFLOW_SOURCE_DIR})
|
||||||
|
include_directories(${FLATBUFFER_INCLUDE_DIR})
|
||||||
|
link_directories(${CMAKE_CURRENT_BINARY_DIR}/tensorflow-lite)
|
||||||
|
|
|
@ -27,61 +27,48 @@ endif()
|
||||||
#
|
#
|
||||||
# wasi-nn general
|
# wasi-nn general
|
||||||
set(WASI_NN_ROOT ${CMAKE_CURRENT_LIST_DIR}/..)
|
set(WASI_NN_ROOT ${CMAKE_CURRENT_LIST_DIR}/..)
|
||||||
add_library(
|
set(WASI_NN_SOURCES
|
||||||
wasi-nn-general
|
${WASI_NN_ROOT}/src/wasi_nn.c
|
||||||
SHARED
|
${WASI_NN_ROOT}/src/utils/wasi_nn_app_native.c
|
||||||
${WASI_NN_ROOT}/src/wasi_nn.c
|
|
||||||
${WASI_NN_ROOT}/src/utils/wasi_nn_app_native.c
|
|
||||||
)
|
)
|
||||||
target_include_directories(
|
include_directories(${WASI_NN_ROOT}/include)
|
||||||
wasi-nn-general
|
add_compile_definitions(
|
||||||
PUBLIC
|
$<$<CONFIG:Debug>:NN_LOG_LEVEL=0>
|
||||||
${WASI_NN_ROOT}/include
|
$<$<CONFIG:Release>:NN_LOG_LEVEL=2>
|
||||||
${WASI_NN_ROOT}/src
|
|
||||||
${WASI_NN_ROOT}/src/utils
|
|
||||||
)
|
|
||||||
target_link_libraries(
|
|
||||||
wasi-nn-general
|
|
||||||
PUBLIC
|
|
||||||
libiwasm
|
|
||||||
)
|
|
||||||
target_compile_definitions(
|
|
||||||
wasi-nn-general
|
|
||||||
PUBLIC
|
|
||||||
$<$<CONFIG:Debug>:NN_LOG_LEVEL=0>
|
|
||||||
$<$<CONFIG:Release>:NN_LOG_LEVEL=2>
|
|
||||||
)
|
)
|
||||||
|
|
||||||
#
|
#
|
||||||
# wasi-nn backends
|
# wasi-nn backends
|
||||||
|
#
|
||||||
# - tflite
|
# - tflite
|
||||||
if(WAMR_BUILD_WASI_NN_TFLITE EQUAL 1)
|
if(WAMR_BUILD_WASI_NN_TFLITE EQUAL 1)
|
||||||
add_library(
|
add_library(
|
||||||
wasi-nn-tflite
|
wasi_nn_tflite
|
||||||
SHARED
|
SHARED
|
||||||
${WASI_NN_ROOT}/src/wasi_nn_tensorflowlite.cpp
|
${WASI_NN_ROOT}/src/wasi_nn_tensorflowlite.cpp
|
||||||
)
|
)
|
||||||
|
|
||||||
target_link_libraries(
|
target_link_libraries(
|
||||||
wasi-nn-tflite
|
wasi_nn_tflite
|
||||||
PUBLIC
|
PUBLIC
|
||||||
|
libiwasm
|
||||||
tensorflow-lite
|
tensorflow-lite
|
||||||
wasi-nn-general
|
|
||||||
)
|
)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
# - openvino
|
# - openvino
|
||||||
if(WAMR_BUILD_WASI_NN_OPENVINO EQUAL 1)
|
if(WAMR_BUILD_WASI_NN_OPENVINO EQUAL 1)
|
||||||
add_library(
|
add_library(
|
||||||
wasi-nn-openvino
|
wasi_nn_openvino
|
||||||
SHARED
|
SHARED
|
||||||
${WASI_NN_ROOT}/src/wasi_nn_openvino.c
|
${WASI_NN_ROOT}/src/wasi_nn_openvino.c
|
||||||
)
|
)
|
||||||
|
|
||||||
target_link_libraries(
|
target_link_libraries(
|
||||||
wasi-nn-openvino
|
wasi_nn_openvino
|
||||||
PUBLIC
|
PUBLIC
|
||||||
|
libiwasm
|
||||||
openvino::runtime
|
openvino::runtime
|
||||||
openvino::runtime::c
|
openvino::runtime::c
|
||||||
wasi-nn-general
|
|
||||||
)
|
)
|
||||||
endif()
|
endif()
|
20
core/iwasm/libraries/wasi-nn/include/wasi_nn_host.h
Normal file
20
core/iwasm/libraries/wasi-nn/include/wasi_nn_host.h
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
/*
|
||||||
|
* Copyright (C) 2019 Intel Corporation. All rights reserved.
|
||||||
|
* SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef WASI_NN_HOST_H
|
||||||
|
#define WASI_NN_HOST_H
|
||||||
|
|
||||||
|
#include "lib_export.h"
|
||||||
|
|
||||||
|
uint32_t
|
||||||
|
get_wasi_nn_export_apis(NativeSymbol **p_native_symbols);
|
||||||
|
|
||||||
|
bool
|
||||||
|
wasi_nn_initialize();
|
||||||
|
|
||||||
|
void
|
||||||
|
wasi_nn_destroy();
|
||||||
|
|
||||||
|
#endif /* WASI_NN_HOST_H */
|
|
@ -126,6 +126,7 @@ typedef enum {
|
||||||
tensorflowlite,
|
tensorflowlite,
|
||||||
ggml,
|
ggml,
|
||||||
autodetect,
|
autodetect,
|
||||||
|
unknown_backend,
|
||||||
} graph_encoding;
|
} graph_encoding;
|
||||||
|
|
||||||
// Define where the graph should be executed.
|
// Define where the graph should be executed.
|
||||||
|
@ -161,9 +162,6 @@ typedef struct {
|
||||||
BACKEND_DEINITIALIZE deinit;
|
BACKEND_DEINITIALIZE deinit;
|
||||||
} api_function;
|
} api_function;
|
||||||
|
|
||||||
bool
|
|
||||||
wasi_nn_register_backend(api_function apis);
|
|
||||||
|
|
||||||
void
|
void
|
||||||
wasi_nn_dump_tensor_dimension(tensor_dimensions *dim, int32_t output_len,
|
wasi_nn_dump_tensor_dimension(tensor_dimensions *dim, int32_t output_len,
|
||||||
char *output);
|
char *output);
|
||||||
|
|
|
@ -10,40 +10,37 @@
|
||||||
#include <errno.h>
|
#include <errno.h>
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
#include <stdint.h>
|
#include <stdint.h>
|
||||||
|
#include <dlfcn.h>
|
||||||
|
|
||||||
#include "wasi_nn_private.h"
|
#include "wasi_nn_private.h"
|
||||||
#include "wasi_nn_app_native.h"
|
#include "utils/wasi_nn_app_native.h"
|
||||||
#include "logger.h"
|
#include "utils/logger.h"
|
||||||
|
|
||||||
#include "bh_platform.h"
|
#include "bh_platform.h"
|
||||||
#include "wasi_nn_types.h"
|
#include "wasi_nn_types.h"
|
||||||
#include "wasm_export.h"
|
#include "wasm_export.h"
|
||||||
|
|
||||||
#define HASHMAP_INITIAL_SIZE 20
|
#define HASHMAP_INITIAL_SIZE 20
|
||||||
|
#define TFLITE_BACKEND_LIB "libwasi_nn_tflite.so"
|
||||||
|
#define OPENVINO_BACKEND_LIB "libwasi_nn_openvino.so"
|
||||||
|
#define LLAMACPP_BACKEND_LIB "libwasi_nn_llamacpp.so"
|
||||||
|
|
||||||
/* Global variables */
|
/* Global variables */
|
||||||
// if using `load_by_name`, there is no known `encoding` at the time of loading
|
struct backends_api_functions {
|
||||||
// so, just keep one `api_function` is enough
|
void *backend_handle;
|
||||||
static api_function lookup = { 0 };
|
api_function functions;
|
||||||
|
} lookup[autodetect] = { 0 };
|
||||||
|
|
||||||
#define call_wasi_nn_func(wasi_error, func, ...) \
|
#define call_wasi_nn_func(backend_encoding, func, wasi_error, ...) \
|
||||||
do { \
|
do { \
|
||||||
if (lookup.func) { \
|
wasi_error = lookup[backend_encoding].functions.func(__VA_ARGS__); \
|
||||||
wasi_error = lookup.func(__VA_ARGS__); \
|
if (wasi_error != success) \
|
||||||
if (wasi_error != success) \
|
NN_ERR_PRINTF("Error %s() -> %d", #func, wasi_error); \
|
||||||
NN_ERR_PRINTF("Error %s: %d", #func, wasi_error); \
|
|
||||||
} \
|
|
||||||
else { \
|
|
||||||
NN_ERR_PRINTF("Error %s is not registered", #func); \
|
|
||||||
wasi_error = unsupported_operation; \
|
|
||||||
} \
|
|
||||||
} while (0)
|
} while (0)
|
||||||
|
|
||||||
|
/* HashMap utils */
|
||||||
static HashMap *hashmap;
|
static HashMap *hashmap;
|
||||||
|
|
||||||
static void
|
|
||||||
wasi_nn_ctx_destroy(WASINNContext *wasi_nn_ctx);
|
|
||||||
|
|
||||||
static uint32
|
static uint32
|
||||||
hash_func(const void *key)
|
hash_func(const void *key)
|
||||||
{
|
{
|
||||||
|
@ -74,12 +71,51 @@ key_destroy_func(void *key1)
|
||||||
/* key type is wasm_module_inst_t*. do nothing */
|
/* key type is wasm_module_inst_t*. do nothing */
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static void
|
||||||
|
wasi_nn_ctx_destroy(WASINNContext *wasi_nn_ctx)
|
||||||
|
{
|
||||||
|
NN_DBG_PRINTF("[WASI NN] DEINIT...");
|
||||||
|
|
||||||
|
if (wasi_nn_ctx == NULL) {
|
||||||
|
NN_ERR_PRINTF(
|
||||||
|
"Error when deallocating memory. WASI-NN context is NULL");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
NN_DBG_PRINTF("Freeing wasi-nn");
|
||||||
|
NN_DBG_PRINTF("-> is_model_loaded: %d", wasi_nn_ctx->is_model_loaded);
|
||||||
|
NN_DBG_PRINTF("-> current_encoding: %d", wasi_nn_ctx->backend);
|
||||||
|
|
||||||
|
/* deinit() the backend */
|
||||||
|
wasi_nn_error res;
|
||||||
|
call_wasi_nn_func(wasi_nn_ctx->backend, deinit, res,
|
||||||
|
wasi_nn_ctx->backend_ctx);
|
||||||
|
|
||||||
|
wasm_runtime_free(wasi_nn_ctx);
|
||||||
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
value_destroy_func(void *value)
|
value_destroy_func(void *value)
|
||||||
{
|
{
|
||||||
wasi_nn_ctx_destroy((WASINNContext *)value);
|
wasi_nn_ctx_destroy((WASINNContext *)value);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool
|
||||||
|
wasi_nn_initialize()
|
||||||
|
{
|
||||||
|
NN_DBG_PRINTF("[WASI NN General] Initializing wasi-nn");
|
||||||
|
|
||||||
|
// hashmap { instance: wasi_nn_ctx }
|
||||||
|
hashmap = bh_hash_map_create(HASHMAP_INITIAL_SIZE, true, hash_func,
|
||||||
|
key_equal_func, key_destroy_func,
|
||||||
|
value_destroy_func);
|
||||||
|
if (hashmap == NULL) {
|
||||||
|
NN_ERR_PRINTF("Error while initializing hashmap");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
static WASINNContext *
|
static WASINNContext *
|
||||||
wasi_nn_initialize_context()
|
wasi_nn_initialize_context()
|
||||||
{
|
{
|
||||||
|
@ -91,34 +127,11 @@ wasi_nn_initialize_context()
|
||||||
NN_ERR_PRINTF("Error when allocating memory for WASI-NN context");
|
NN_ERR_PRINTF("Error when allocating memory for WASI-NN context");
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
wasi_nn_ctx->is_model_loaded = false;
|
|
||||||
|
|
||||||
/* only one backend can be registered */
|
|
||||||
wasi_nn_error res;
|
|
||||||
call_wasi_nn_func(res, init, &wasi_nn_ctx->backend_ctx);
|
|
||||||
if (res != success) {
|
|
||||||
wasm_runtime_free(wasi_nn_ctx);
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
memset(wasi_nn_ctx, 0, sizeof(WASINNContext));
|
||||||
return wasi_nn_ctx;
|
return wasi_nn_ctx;
|
||||||
}
|
}
|
||||||
|
|
||||||
static bool
|
|
||||||
wasi_nn_initialize()
|
|
||||||
{
|
|
||||||
NN_DBG_PRINTF("[WASI NN General] Initializing wasi-nn");
|
|
||||||
// hashmap { instance: wasi_nn_ctx }
|
|
||||||
hashmap = bh_hash_map_create(HASHMAP_INITIAL_SIZE, true, hash_func,
|
|
||||||
key_equal_func, key_destroy_func,
|
|
||||||
value_destroy_func);
|
|
||||||
if (hashmap == NULL) {
|
|
||||||
NN_ERR_PRINTF("Error while initializing hashmap");
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Get wasi-nn context from module instance */
|
/* Get wasi-nn context from module instance */
|
||||||
static WASINNContext *
|
static WASINNContext *
|
||||||
wasm_runtime_get_wasi_nn_ctx(wasm_module_inst_t instance)
|
wasm_runtime_get_wasi_nn_ctx(wasm_module_inst_t instance)
|
||||||
|
@ -129,6 +142,7 @@ wasm_runtime_get_wasi_nn_ctx(wasm_module_inst_t instance)
|
||||||
wasi_nn_ctx = wasi_nn_initialize_context();
|
wasi_nn_ctx = wasi_nn_initialize_context();
|
||||||
if (wasi_nn_ctx == NULL)
|
if (wasi_nn_ctx == NULL)
|
||||||
return NULL;
|
return NULL;
|
||||||
|
|
||||||
bool ok =
|
bool ok =
|
||||||
bh_hash_map_insert(hashmap, (void *)instance, (void *)wasi_nn_ctx);
|
bh_hash_map_insert(hashmap, (void *)instance, (void *)wasi_nn_ctx);
|
||||||
if (!ok) {
|
if (!ok) {
|
||||||
|
@ -141,36 +155,31 @@ wasm_runtime_get_wasi_nn_ctx(wasm_module_inst_t instance)
|
||||||
return wasi_nn_ctx;
|
return wasi_nn_ctx;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
|
||||||
wasi_nn_ctx_destroy(WASINNContext *wasi_nn_ctx)
|
|
||||||
{
|
|
||||||
NN_DBG_PRINTF("[WASI NN] DEINIT...");
|
|
||||||
|
|
||||||
if (wasi_nn_ctx == NULL) {
|
|
||||||
NN_ERR_PRINTF(
|
|
||||||
"Error when deallocating memory. WASI-NN context is NULL");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
NN_DBG_PRINTF("Freeing wasi-nn");
|
|
||||||
NN_DBG_PRINTF("-> is_model_loaded: %d", wasi_nn_ctx->is_model_loaded);
|
|
||||||
NN_DBG_PRINTF("-> current_encoding: %d", wasi_nn_ctx->current_encoding);
|
|
||||||
|
|
||||||
/* only one backend can be registered */
|
|
||||||
wasi_nn_error res;
|
|
||||||
call_wasi_nn_func(res, deinit, wasi_nn_ctx->backend_ctx);
|
|
||||||
|
|
||||||
wasm_runtime_free(wasi_nn_ctx);
|
|
||||||
}
|
|
||||||
|
|
||||||
void
|
void
|
||||||
wasi_nn_destroy()
|
wasi_nn_destroy()
|
||||||
{
|
{
|
||||||
// destroy hashmap will destroy keys and values
|
// destroy hashmap will destroy keys and values
|
||||||
bh_hash_map_destroy(hashmap);
|
bh_hash_map_destroy(hashmap);
|
||||||
|
|
||||||
|
// close backends' libraries and registered functions
|
||||||
|
for (unsigned i = 0; i < sizeof(lookup) / sizeof(lookup[0]); i++) {
|
||||||
|
if (lookup[i].backend_handle) {
|
||||||
|
dlclose(lookup[i].backend_handle);
|
||||||
|
lookup[i].backend_handle = NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
lookup[i].functions.init = NULL;
|
||||||
|
lookup[i].functions.deinit = NULL;
|
||||||
|
lookup[i].functions.load = NULL;
|
||||||
|
lookup[i].functions.load_by_name = NULL;
|
||||||
|
lookup[i].functions.init_execution_context = NULL;
|
||||||
|
lookup[i].functions.set_input = NULL;
|
||||||
|
lookup[i].functions.compute = NULL;
|
||||||
|
lookup[i].functions.get_output = NULL;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Utils */
|
/* Utils */
|
||||||
|
|
||||||
static wasi_nn_error
|
static wasi_nn_error
|
||||||
is_model_initialized(WASINNContext *wasi_nn_ctx)
|
is_model_initialized(WASINNContext *wasi_nn_ctx)
|
||||||
{
|
{
|
||||||
|
@ -181,8 +190,169 @@ is_model_initialized(WASINNContext *wasi_nn_ctx)
|
||||||
return success;
|
return success;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* WASI-NN implementation */
|
/*
|
||||||
|
*TODO: choose a proper backend based on
|
||||||
|
* - hardware
|
||||||
|
* - model file format
|
||||||
|
* - on device ML framework
|
||||||
|
*/
|
||||||
|
static graph_encoding
|
||||||
|
choose_a_backend()
|
||||||
|
{
|
||||||
|
void *handle;
|
||||||
|
|
||||||
|
handle = dlopen(LLAMACPP_BACKEND_LIB, RTLD_LAZY);
|
||||||
|
if (handle) {
|
||||||
|
NN_INFO_PRINTF("Using llama.cpp backend");
|
||||||
|
dlclose(handle);
|
||||||
|
return ggml;
|
||||||
|
}
|
||||||
|
|
||||||
|
handle = dlopen(OPENVINO_BACKEND_LIB, RTLD_LAZY);
|
||||||
|
if (handle) {
|
||||||
|
NN_INFO_PRINTF("Using openvino backend");
|
||||||
|
dlclose(handle);
|
||||||
|
return openvino;
|
||||||
|
}
|
||||||
|
|
||||||
|
handle = dlopen(TFLITE_BACKEND_LIB, RTLD_LAZY);
|
||||||
|
if (handle) {
|
||||||
|
NN_INFO_PRINTF("Using tflite backend");
|
||||||
|
dlclose(handle);
|
||||||
|
return tensorflowlite;
|
||||||
|
}
|
||||||
|
|
||||||
|
return unknown_backend;
|
||||||
|
}
|
||||||
|
|
||||||
|
static bool
|
||||||
|
register_backend(void *handle, api_function *functions)
|
||||||
|
{
|
||||||
|
BACKEND_INITIALIZE init = (BACKEND_INITIALIZE)dlsym(handle, "init_backend");
|
||||||
|
if (!init) {
|
||||||
|
NN_WARN_PRINTF("init_backend() not found");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
functions->init = init;
|
||||||
|
|
||||||
|
BACKEND_DEINITIALIZE deinit =
|
||||||
|
(BACKEND_DEINITIALIZE)dlsym(handle, "deinit_backend");
|
||||||
|
if (!deinit) {
|
||||||
|
NN_WARN_PRINTF("deinit_backend() not found");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
functions->deinit = deinit;
|
||||||
|
|
||||||
|
LOAD load = (LOAD)dlsym(handle, "load");
|
||||||
|
if (!load) {
|
||||||
|
NN_WARN_PRINTF("load() not found");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
functions->load = load;
|
||||||
|
|
||||||
|
LOAD_BY_NAME load_by_name = (LOAD_BY_NAME)dlsym(handle, "load_by_name");
|
||||||
|
if (!load_by_name) {
|
||||||
|
NN_WARN_PRINTF("load_by_name() not found");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
functions->load_by_name = load_by_name;
|
||||||
|
|
||||||
|
INIT_EXECUTION_CONTEXT init_execution_context =
|
||||||
|
(INIT_EXECUTION_CONTEXT)dlsym(handle, "init_execution_context");
|
||||||
|
if (!init_execution_context) {
|
||||||
|
NN_WARN_PRINTF("init_execution_context() not found");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
functions->init_execution_context = init_execution_context;
|
||||||
|
|
||||||
|
SET_INPUT set_input = (SET_INPUT)dlsym(handle, "set_input");
|
||||||
|
if (!set_input) {
|
||||||
|
NN_WARN_PRINTF("set_input() not found");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
functions->set_input = set_input;
|
||||||
|
|
||||||
|
COMPUTE compute = (COMPUTE)dlsym(handle, "compute");
|
||||||
|
if (!compute) {
|
||||||
|
NN_WARN_PRINTF("compute() not found");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
functions->compute = compute;
|
||||||
|
|
||||||
|
GET_OUTPUT get_output = (GET_OUTPUT)dlsym(handle, "get_output");
|
||||||
|
if (!get_output) {
|
||||||
|
NN_WARN_PRINTF("get_output() not found");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
functions->get_output = get_output;
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
static bool
|
||||||
|
prepare_backend(const char *lib_name, struct backends_api_functions *backend)
|
||||||
|
{
|
||||||
|
NN_DBG_PRINTF("[Native Register] prepare_backend %s", lib_name);
|
||||||
|
|
||||||
|
void *handle;
|
||||||
|
handle = dlopen(lib_name, RTLD_LAZY);
|
||||||
|
if (!handle) {
|
||||||
|
NN_ERR_PRINTF("Error loading %s. %s", lib_name, dlerror());
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!register_backend(handle, &(backend->functions))) {
|
||||||
|
NN_ERR_PRINTF("Error when registering functions of %s", lib_name);
|
||||||
|
dlclose(handle);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
backend->backend_handle = handle;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
static const char *
|
||||||
|
graph_encoding_to_backend_lib_name(graph_encoding encoding)
|
||||||
|
{
|
||||||
|
switch (encoding) {
|
||||||
|
case openvino:
|
||||||
|
return OPENVINO_BACKEND_LIB;
|
||||||
|
case tensorflowlite:
|
||||||
|
return TFLITE_BACKEND_LIB;
|
||||||
|
case ggml:
|
||||||
|
return LLAMACPP_BACKEND_LIB;
|
||||||
|
default:
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static bool
|
||||||
|
detect_and_load_backend(graph_encoding backend_hint,
|
||||||
|
struct backends_api_functions *backends,
|
||||||
|
graph_encoding *loaded_backed)
|
||||||
|
{
|
||||||
|
if (backend_hint >= autodetect)
|
||||||
|
return false;
|
||||||
|
|
||||||
|
if (backend_hint == autodetect)
|
||||||
|
backend_hint = choose_a_backend();
|
||||||
|
|
||||||
|
/* if already loaded */
|
||||||
|
if (lookup[backend_hint].backend_handle) {
|
||||||
|
*loaded_backed = backend_hint;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
*loaded_backed = backend_hint;
|
||||||
|
const char *backend_lib_name =
|
||||||
|
graph_encoding_to_backend_lib_name(backend_hint);
|
||||||
|
if (!backend_lib_name)
|
||||||
|
return false;
|
||||||
|
|
||||||
|
return prepare_backend(backend_lib_name, backends + backend_hint);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* WASI-NN implementation */
|
||||||
#if WASM_ENABLE_WASI_EPHEMERAL_NN != 0
|
#if WASM_ENABLE_WASI_EPHEMERAL_NN != 0
|
||||||
wasi_nn_error
|
wasi_nn_error
|
||||||
wasi_nn_load(wasm_exec_env_t exec_env, graph_builder_wasm *builder,
|
wasi_nn_load(wasm_exec_env_t exec_env, graph_builder_wasm *builder,
|
||||||
|
@ -222,13 +392,28 @@ wasi_nn_load(wasm_exec_env_t exec_env, graph_builder_array_wasm *builder,
|
||||||
goto fail;
|
goto fail;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
graph_encoding loaded_backed = autodetect;
|
||||||
|
if (!detect_and_load_backend(encoding, lookup, &loaded_backed)) {
|
||||||
|
res = invalid_encoding;
|
||||||
|
NN_ERR_PRINTF("load backend failed");
|
||||||
|
goto fail;
|
||||||
|
}
|
||||||
|
|
||||||
WASINNContext *wasi_nn_ctx = wasm_runtime_get_wasi_nn_ctx(instance);
|
WASINNContext *wasi_nn_ctx = wasm_runtime_get_wasi_nn_ctx(instance);
|
||||||
call_wasi_nn_func(res, load, wasi_nn_ctx->backend_ctx, &builder_native,
|
wasi_nn_ctx->backend = loaded_backed;
|
||||||
encoding, target, g);
|
|
||||||
|
/* init() the backend */
|
||||||
|
call_wasi_nn_func(wasi_nn_ctx->backend, init, res,
|
||||||
|
&wasi_nn_ctx->backend_ctx);
|
||||||
if (res != success)
|
if (res != success)
|
||||||
goto fail;
|
goto fail;
|
||||||
|
|
||||||
wasi_nn_ctx->current_encoding = encoding;
|
call_wasi_nn_func(wasi_nn_ctx->backend, load, res, wasi_nn_ctx->backend_ctx,
|
||||||
|
&builder_native, encoding, target, g);
|
||||||
|
if (res != success)
|
||||||
|
goto fail;
|
||||||
|
|
||||||
|
wasi_nn_ctx->backend = loaded_backed;
|
||||||
wasi_nn_ctx->is_model_loaded = true;
|
wasi_nn_ctx->is_model_loaded = true;
|
||||||
|
|
||||||
fail:
|
fail:
|
||||||
|
@ -251,22 +436,37 @@ wasi_nn_load_by_name(wasm_exec_env_t exec_env, char *name, uint32_t name_len,
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!wasm_runtime_validate_native_addr(instance, name, name_len)) {
|
if (!wasm_runtime_validate_native_addr(instance, name, name_len)) {
|
||||||
|
NN_ERR_PRINTF("name is invalid");
|
||||||
return invalid_argument;
|
return invalid_argument;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!wasm_runtime_validate_native_addr(instance, g,
|
if (!wasm_runtime_validate_native_addr(instance, g,
|
||||||
(uint64)sizeof(graph))) {
|
(uint64)sizeof(graph))) {
|
||||||
|
NN_ERR_PRINTF("graph is invalid");
|
||||||
return invalid_argument;
|
return invalid_argument;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
graph_encoding loaded_backed = autodetect;
|
||||||
|
if (detect_and_load_backend(autodetect, lookup, &loaded_backed)) {
|
||||||
|
NN_ERR_PRINTF("load backend failed");
|
||||||
|
return invalid_encoding;
|
||||||
|
}
|
||||||
|
|
||||||
WASINNContext *wasi_nn_ctx = wasm_runtime_get_wasi_nn_ctx(instance);
|
WASINNContext *wasi_nn_ctx = wasm_runtime_get_wasi_nn_ctx(instance);
|
||||||
wasi_nn_error res;
|
wasi_nn_error res;
|
||||||
call_wasi_nn_func(res, load_by_name, wasi_nn_ctx->backend_ctx, name,
|
|
||||||
name_len, g);
|
/* init() the backend */
|
||||||
|
call_wasi_nn_func(wasi_nn_ctx->backend, init, res,
|
||||||
|
&wasi_nn_ctx->backend_ctx);
|
||||||
if (res != success)
|
if (res != success)
|
||||||
return res;
|
return res;
|
||||||
|
|
||||||
wasi_nn_ctx->current_encoding = autodetect;
|
call_wasi_nn_func(wasi_nn_ctx->backend, load_by_name, res,
|
||||||
|
wasi_nn_ctx->backend_ctx, name, name_len, g);
|
||||||
|
if (res != success)
|
||||||
|
return res;
|
||||||
|
|
||||||
|
wasi_nn_ctx->backend = loaded_backed;
|
||||||
wasi_nn_ctx->is_model_loaded = true;
|
wasi_nn_ctx->is_model_loaded = true;
|
||||||
return success;
|
return success;
|
||||||
}
|
}
|
||||||
|
@ -294,8 +494,8 @@ wasi_nn_init_execution_context(wasm_exec_env_t exec_env, graph g,
|
||||||
return invalid_argument;
|
return invalid_argument;
|
||||||
}
|
}
|
||||||
|
|
||||||
call_wasi_nn_func(res, init_execution_context, wasi_nn_ctx->backend_ctx, g,
|
call_wasi_nn_func(wasi_nn_ctx->backend, init_execution_context, res,
|
||||||
ctx);
|
wasi_nn_ctx->backend_ctx, g, ctx);
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -322,7 +522,8 @@ wasi_nn_set_input(wasm_exec_env_t exec_env, graph_execution_context ctx,
|
||||||
&input_tensor_native)))
|
&input_tensor_native)))
|
||||||
return res;
|
return res;
|
||||||
|
|
||||||
call_wasi_nn_func(res, set_input, wasi_nn_ctx->backend_ctx, ctx, index,
|
call_wasi_nn_func(wasi_nn_ctx->backend, set_input, res,
|
||||||
|
wasi_nn_ctx->backend_ctx, ctx, index,
|
||||||
&input_tensor_native);
|
&input_tensor_native);
|
||||||
// XXX: Free intermediate structure pointers
|
// XXX: Free intermediate structure pointers
|
||||||
if (input_tensor_native.dimensions)
|
if (input_tensor_native.dimensions)
|
||||||
|
@ -347,7 +548,8 @@ wasi_nn_compute(wasm_exec_env_t exec_env, graph_execution_context ctx)
|
||||||
if (success != (res = is_model_initialized(wasi_nn_ctx)))
|
if (success != (res = is_model_initialized(wasi_nn_ctx)))
|
||||||
return res;
|
return res;
|
||||||
|
|
||||||
call_wasi_nn_func(res, compute, wasi_nn_ctx->backend_ctx, ctx);
|
call_wasi_nn_func(wasi_nn_ctx->backend, compute, res,
|
||||||
|
wasi_nn_ctx->backend_ctx, ctx);
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -383,12 +585,14 @@ wasi_nn_get_output(wasm_exec_env_t exec_env, graph_execution_context ctx,
|
||||||
}
|
}
|
||||||
|
|
||||||
#if WASM_ENABLE_WASI_EPHEMERAL_NN != 0
|
#if WASM_ENABLE_WASI_EPHEMERAL_NN != 0
|
||||||
call_wasi_nn_func(res, get_output, wasi_nn_ctx->backend_ctx, ctx, index,
|
call_wasi_nn_func(wasi_nn_ctx->backend, get_output, res,
|
||||||
output_tensor, &output_tensor_len);
|
wasi_nn_ctx->backend_ctx, ctx, index, output_tensor,
|
||||||
|
&output_tensor_len);
|
||||||
*output_tensor_size = output_tensor_len;
|
*output_tensor_size = output_tensor_len;
|
||||||
#else /* WASM_ENABLE_WASI_EPHEMERAL_NN == 0 */
|
#else /* WASM_ENABLE_WASI_EPHEMERAL_NN == 0 */
|
||||||
call_wasi_nn_func(res, get_output, wasi_nn_ctx->backend_ctx, ctx, index,
|
call_wasi_nn_func(wasi_nn_ctx->backend, get_output, res,
|
||||||
output_tensor, output_tensor_size);
|
wasi_nn_ctx->backend_ctx, ctx, index, output_tensor,
|
||||||
|
output_tensor_size);
|
||||||
#endif /* WASM_ENABLE_WASI_EPHEMERAL_NN != 0 */
|
#endif /* WASM_ENABLE_WASI_EPHEMERAL_NN != 0 */
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
@ -423,44 +627,3 @@ get_wasi_nn_export_apis(NativeSymbol **p_native_symbols)
|
||||||
*p_native_symbols = native_symbols_wasi_nn;
|
*p_native_symbols = native_symbols_wasi_nn;
|
||||||
return sizeof(native_symbols_wasi_nn) / sizeof(NativeSymbol);
|
return sizeof(native_symbols_wasi_nn) / sizeof(NativeSymbol);
|
||||||
}
|
}
|
||||||
|
|
||||||
__attribute__((used)) uint32_t
|
|
||||||
get_native_lib(char **p_module_name, NativeSymbol **p_native_symbols)
|
|
||||||
{
|
|
||||||
NN_DBG_PRINTF("[Native Register] get_native_lib");
|
|
||||||
|
|
||||||
#if WASM_ENABLE_WASI_EPHEMERAL_NN != 0
|
|
||||||
*p_module_name = "wasi_ephemeral_nn";
|
|
||||||
#else /* WASM_ENABLE_WASI_EPHEMERAL_NN == 0 */
|
|
||||||
*p_module_name = "wasi_nn";
|
|
||||||
#endif /* WASM_ENABLE_WASI_EPHEMERAL_NN != 0 */
|
|
||||||
|
|
||||||
return get_wasi_nn_export_apis(p_native_symbols);
|
|
||||||
}
|
|
||||||
|
|
||||||
__attribute__((used)) int
|
|
||||||
init_native_lib()
|
|
||||||
{
|
|
||||||
NN_DBG_PRINTF("[Native Register] init_native_lib");
|
|
||||||
|
|
||||||
if (!wasi_nn_initialize())
|
|
||||||
return 1;
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
__attribute__((used)) void
|
|
||||||
deinit_native_lib()
|
|
||||||
{
|
|
||||||
NN_DBG_PRINTF("[Native Register] deinit_native_lib");
|
|
||||||
|
|
||||||
wasi_nn_destroy();
|
|
||||||
}
|
|
||||||
|
|
||||||
__attribute__((used)) bool
|
|
||||||
wasi_nn_register_backend(api_function apis)
|
|
||||||
{
|
|
||||||
NN_DBG_PRINTF("[Native Register] wasi_nn_register_backend");
|
|
||||||
lookup = apis;
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
|
|
||||||
#include "wasi_nn_types.h"
|
#include "wasi_nn_types.h"
|
||||||
#include "wasi_nn_openvino.h"
|
#include "wasi_nn_openvino.h"
|
||||||
#include "logger.h"
|
#include "utils/logger.h"
|
||||||
#include "bh_platform.h"
|
#include "bh_platform.h"
|
||||||
|
|
||||||
#include "openvino/c/openvino.h"
|
#include "openvino/c/openvino.h"
|
||||||
|
@ -82,7 +82,7 @@ dump_ov_shape_t(const ov_shape_t *shape, int32_t output_len, char *output)
|
||||||
static void
|
static void
|
||||||
print_model_input_output_info(ov_model_t *model)
|
print_model_input_output_info(ov_model_t *model)
|
||||||
{
|
{
|
||||||
wasi_nn_error ov_error;
|
wasi_nn_error ov_error = success;
|
||||||
char *friendly_name = NULL;
|
char *friendly_name = NULL;
|
||||||
size_t input_size = 0;
|
size_t input_size = 0;
|
||||||
ov_output_const_port_t *input_port = NULL;
|
ov_output_const_port_t *input_port = NULL;
|
||||||
|
@ -136,6 +136,7 @@ print_model_input_output_info(ov_model_t *model)
|
||||||
output_port = NULL;
|
output_port = NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ov_error = ov_error;
|
||||||
fail:
|
fail:
|
||||||
if (friendly_name)
|
if (friendly_name)
|
||||||
ov_free(friendly_name);
|
ov_free(friendly_name);
|
||||||
|
@ -157,16 +158,23 @@ wasi_nn_tensor_type_to_openvino_element_type(tensor_type wasi_nn_type)
|
||||||
return F16;
|
return F16;
|
||||||
case fp32:
|
case fp32:
|
||||||
return F32;
|
return F32;
|
||||||
|
#if WASM_ENABLE_WASI_EPHEMERAL_NN != 0
|
||||||
case fp64:
|
case fp64:
|
||||||
return F64;
|
return F64;
|
||||||
case bf16:
|
case bf16:
|
||||||
return BF16;
|
return BF16;
|
||||||
|
case i64:
|
||||||
|
return I64;
|
||||||
case u8:
|
case u8:
|
||||||
return U8;
|
return U8;
|
||||||
case i32:
|
case i32:
|
||||||
return I32;
|
return I32;
|
||||||
case i64:
|
#else
|
||||||
return I64;
|
case up8:
|
||||||
|
return U8;
|
||||||
|
case ip32:
|
||||||
|
return I32;
|
||||||
|
#endif
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -178,7 +186,7 @@ wasi_nn_tensor_type_to_openvino_element_type(tensor_type wasi_nn_type)
|
||||||
static wasi_nn_error
|
static wasi_nn_error
|
||||||
uint32_array_to_int64_array(uint32_t array_size, uint32_t *src, int64_t **dst)
|
uint32_array_to_int64_array(uint32_t array_size, uint32_t *src, int64_t **dst)
|
||||||
{
|
{
|
||||||
*dst = malloc(array_size * sizeof(int64_t));
|
*dst = os_malloc(array_size * sizeof(int64_t));
|
||||||
if (!(*dst))
|
if (!(*dst))
|
||||||
return runtime_error;
|
return runtime_error;
|
||||||
|
|
||||||
|
@ -189,9 +197,9 @@ uint32_array_to_int64_array(uint32_t array_size, uint32_t *src, int64_t **dst)
|
||||||
return success;
|
return success;
|
||||||
}
|
}
|
||||||
|
|
||||||
wasi_nn_error
|
__attribute__((visibility("default"))) wasi_nn_error
|
||||||
openvino_load(void *ctx, graph_builder_array *builder, graph_encoding encoding,
|
load(void *ctx, graph_builder_array *builder, graph_encoding encoding,
|
||||||
execution_target target, graph *g)
|
execution_target target, graph *g)
|
||||||
{
|
{
|
||||||
OpenVINOContext *ov_ctx = (OpenVINOContext *)ctx;
|
OpenVINOContext *ov_ctx = (OpenVINOContext *)ctx;
|
||||||
wasi_nn_error ret = unsupported_operation;
|
wasi_nn_error ret = unsupported_operation;
|
||||||
|
@ -227,7 +235,7 @@ openvino_load(void *ctx, graph_builder_array *builder, graph_encoding encoding,
|
||||||
|
|
||||||
/* transfer weight to an ov tensor */
|
/* transfer weight to an ov tensor */
|
||||||
{
|
{
|
||||||
ov_ctx->weight_data = malloc(weight.size);
|
ov_ctx->weight_data = os_malloc(weight.size);
|
||||||
if (!ov_ctx->weight_data)
|
if (!ov_ctx->weight_data)
|
||||||
goto fail;
|
goto fail;
|
||||||
memcpy(ov_ctx->weight_data, weight.buf, weight.size);
|
memcpy(ov_ctx->weight_data, weight.buf, weight.size);
|
||||||
|
@ -255,9 +263,8 @@ fail:
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
wasi_nn_error
|
__attribute__((visibility("default"))) wasi_nn_error
|
||||||
openvino_load_by_name(void *ctx, const char *filename, uint32_t filename_len,
|
load_by_name(void *ctx, const char *filename, uint32_t filename_len, graph *g)
|
||||||
graph *g)
|
|
||||||
{
|
{
|
||||||
OpenVINOContext *ov_ctx = (OpenVINOContext *)ctx;
|
OpenVINOContext *ov_ctx = (OpenVINOContext *)ctx;
|
||||||
wasi_nn_error ret = unsupported_operation;
|
wasi_nn_error ret = unsupported_operation;
|
||||||
|
@ -270,16 +277,15 @@ fail:
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
wasi_nn_error
|
__attribute__((visibility("default"))) wasi_nn_error
|
||||||
openvino_init_execution_context(void *ctx, graph g,
|
init_execution_context(void *ctx, graph g, graph_execution_context *exec_ctx)
|
||||||
graph_execution_context *exec_ctx)
|
|
||||||
{
|
{
|
||||||
return success;
|
return success;
|
||||||
}
|
}
|
||||||
|
|
||||||
wasi_nn_error
|
__attribute__((visibility("default"))) wasi_nn_error
|
||||||
openvino_set_input(void *ctx, graph_execution_context exec_ctx, uint32_t index,
|
set_input(void *ctx, graph_execution_context exec_ctx, uint32_t index,
|
||||||
tensor *wasi_nn_tensor)
|
tensor *wasi_nn_tensor)
|
||||||
{
|
{
|
||||||
OpenVINOContext *ov_ctx = (OpenVINOContext *)ctx;
|
OpenVINOContext *ov_ctx = (OpenVINOContext *)ctx;
|
||||||
wasi_nn_error ret = unsupported_operation;
|
wasi_nn_error ret = unsupported_operation;
|
||||||
|
@ -405,7 +411,7 @@ openvino_set_input(void *ctx, graph_execution_context exec_ctx, uint32_t index,
|
||||||
|
|
||||||
fail:
|
fail:
|
||||||
if (ov_dims)
|
if (ov_dims)
|
||||||
free(ov_dims);
|
os_free(ov_dims);
|
||||||
ov_shape_free(&input_shape);
|
ov_shape_free(&input_shape);
|
||||||
if (ppp)
|
if (ppp)
|
||||||
ov_preprocess_prepostprocessor_free(ppp);
|
ov_preprocess_prepostprocessor_free(ppp);
|
||||||
|
@ -429,8 +435,8 @@ fail:
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
wasi_nn_error
|
__attribute__((visibility("default"))) wasi_nn_error
|
||||||
openvino_compute(void *ctx, graph_execution_context exec_ctx)
|
compute(void *ctx, graph_execution_context exec_ctx)
|
||||||
{
|
{
|
||||||
OpenVINOContext *ov_ctx = (OpenVINOContext *)ctx;
|
OpenVINOContext *ov_ctx = (OpenVINOContext *)ctx;
|
||||||
wasi_nn_error ret = unsupported_operation;
|
wasi_nn_error ret = unsupported_operation;
|
||||||
|
@ -441,9 +447,9 @@ fail:
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
wasi_nn_error
|
__attribute__((visibility("default"))) wasi_nn_error
|
||||||
openvino_get_output(void *ctx, graph_execution_context exec_ctx, uint32_t index,
|
get_output(void *ctx, graph_execution_context exec_ctx, uint32_t index,
|
||||||
tensor_data output_tensor, uint32_t *output_tensor_size)
|
tensor_data output_tensor, uint32_t *output_tensor_size)
|
||||||
{
|
{
|
||||||
OpenVINOContext *ov_ctx = (OpenVINOContext *)ctx;
|
OpenVINOContext *ov_ctx = (OpenVINOContext *)ctx;
|
||||||
wasi_nn_error ret = unsupported_operation;
|
wasi_nn_error ret = unsupported_operation;
|
||||||
|
@ -471,8 +477,8 @@ fail:
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
wasi_nn_error
|
__attribute__((visibility("default"))) wasi_nn_error
|
||||||
openvino_initialize(void **ctx)
|
init_backend(void **ctx)
|
||||||
{
|
{
|
||||||
ov_version_t version;
|
ov_version_t version;
|
||||||
OpenVINOContext *ov_ctx = NULL;
|
OpenVINOContext *ov_ctx = NULL;
|
||||||
|
@ -509,8 +515,8 @@ fail:
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
wasi_nn_error
|
__attribute__((visibility("default"))) wasi_nn_error
|
||||||
openvino_destroy(void *ctx)
|
deinit_backend(void *ctx)
|
||||||
{
|
{
|
||||||
OpenVINOContext *ov_ctx = (OpenVINOContext *)ctx;
|
OpenVINOContext *ov_ctx = (OpenVINOContext *)ctx;
|
||||||
|
|
||||||
|
@ -518,7 +524,7 @@ openvino_destroy(void *ctx)
|
||||||
return invalid_argument;
|
return invalid_argument;
|
||||||
|
|
||||||
if (ov_ctx->weight_data)
|
if (ov_ctx->weight_data)
|
||||||
free(ov_ctx->weight_data);
|
os_free(ov_ctx->weight_data);
|
||||||
|
|
||||||
if (ov_ctx->weights_tensor)
|
if (ov_ctx->weights_tensor)
|
||||||
ov_tensor_free(ov_ctx->weights_tensor);
|
ov_tensor_free(ov_ctx->weights_tensor);
|
||||||
|
@ -541,19 +547,3 @@ openvino_destroy(void *ctx)
|
||||||
os_free(ov_ctx);
|
os_free(ov_ctx);
|
||||||
return success;
|
return success;
|
||||||
}
|
}
|
||||||
|
|
||||||
__attribute__((constructor(200))) void
|
|
||||||
openvino_register_backend()
|
|
||||||
{
|
|
||||||
api_function apis = {
|
|
||||||
.load = openvino_load,
|
|
||||||
.load_by_name = openvino_load_by_name,
|
|
||||||
.init_execution_context = openvino_init_execution_context,
|
|
||||||
.set_input = openvino_set_input,
|
|
||||||
.compute = openvino_compute,
|
|
||||||
.get_output = openvino_get_output,
|
|
||||||
.init = openvino_initialize,
|
|
||||||
.deinit = openvino_destroy,
|
|
||||||
};
|
|
||||||
wasi_nn_register_backend(apis);
|
|
||||||
}
|
|
|
@ -8,29 +8,28 @@
|
||||||
|
|
||||||
#include "wasi_nn_types.h"
|
#include "wasi_nn_types.h"
|
||||||
|
|
||||||
wasi_nn_error
|
__attribute__((visibility("default"))) wasi_nn_error
|
||||||
openvino_load(void *ctx, graph_builder_array *builder, graph_encoding encoding,
|
load(void *ctx, graph_builder_array *builder, graph_encoding encoding,
|
||||||
execution_target target, graph *g);
|
execution_target target, graph *g);
|
||||||
|
|
||||||
wasi_nn_error
|
__attribute__((visibility("default"))) wasi_nn_error
|
||||||
openvino_init_execution_context(void *ctx, graph g,
|
init_execution_context(void *ctx, graph g, graph_execution_context *exec_ctx);
|
||||||
graph_execution_context *exec_ctx);
|
|
||||||
|
|
||||||
wasi_nn_error
|
__attribute__((visibility("default"))) wasi_nn_error
|
||||||
openvino_set_input(void *ctx, graph_execution_context exec_ctx, uint32_t index,
|
set_input(void *ctx, graph_execution_context exec_ctx, uint32_t index,
|
||||||
tensor *input_tensor);
|
tensor *input_tensor);
|
||||||
|
|
||||||
wasi_nn_error
|
__attribute__((visibility("default"))) wasi_nn_error
|
||||||
openvino_compute(void *ctx, graph_execution_context exec_ctx);
|
compute(void *ctx, graph_execution_context exec_ctx);
|
||||||
|
|
||||||
wasi_nn_error
|
__attribute__((visibility("default"))) wasi_nn_error
|
||||||
openvino_get_output(void *ctx, graph_execution_context exec_ctx, uint32_t index,
|
get_output(void *ctx, graph_execution_context exec_ctx, uint32_t index,
|
||||||
tensor_data output_tensor, uint32_t *output_tensor_size);
|
tensor_data output_tensor, uint32_t *output_tensor_size);
|
||||||
|
|
||||||
wasi_nn_error
|
__attribute__((visibility("default"))) wasi_nn_error
|
||||||
openvino_initialize(void **ctx);
|
init_backend(void **ctx);
|
||||||
|
|
||||||
wasi_nn_error
|
__attribute__((visibility("default"))) wasi_nn_error
|
||||||
openvino_destroy(void *ctx);
|
deinit_backend(void *ctx);
|
||||||
|
|
||||||
#endif /* WASI_NN_OPENVINO_HPP */
|
#endif /* WASI_NN_OPENVINO_HPP */
|
|
@ -11,8 +11,7 @@
|
||||||
|
|
||||||
typedef struct {
|
typedef struct {
|
||||||
bool is_model_loaded;
|
bool is_model_loaded;
|
||||||
// Optional
|
graph_encoding backend;
|
||||||
graph_encoding current_encoding;
|
|
||||||
void *backend_ctx;
|
void *backend_ctx;
|
||||||
} WASINNContext;
|
} WASINNContext;
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#include "wasi_nn_tensorflowlite.hpp"
|
#include "wasi_nn_tensorflowlite.hpp"
|
||||||
#include "logger.h"
|
#include "utils/logger.h"
|
||||||
|
|
||||||
#include "bh_platform.h"
|
#include "bh_platform.h"
|
||||||
#include "wasi_nn_types.h"
|
#include "wasi_nn_types.h"
|
||||||
|
@ -113,10 +113,9 @@ is_valid_graph_execution_context(TFLiteContext *tfl_ctx,
|
||||||
}
|
}
|
||||||
|
|
||||||
/* WASI-NN (tensorflow) implementation */
|
/* WASI-NN (tensorflow) implementation */
|
||||||
|
__attribute__((visibility("default"))) wasi_nn_error
|
||||||
wasi_nn_error
|
load(void *tflite_ctx, graph_builder_array *builder, graph_encoding encoding,
|
||||||
tensorflowlite_load(void *tflite_ctx, graph_builder_array *builder,
|
execution_target target, graph *g)
|
||||||
graph_encoding encoding, execution_target target, graph *g)
|
|
||||||
{
|
{
|
||||||
TFLiteContext *tfl_ctx = (TFLiteContext *)tflite_ctx;
|
TFLiteContext *tfl_ctx = (TFLiteContext *)tflite_ctx;
|
||||||
|
|
||||||
|
@ -168,9 +167,9 @@ tensorflowlite_load(void *tflite_ctx, graph_builder_array *builder,
|
||||||
return success;
|
return success;
|
||||||
}
|
}
|
||||||
|
|
||||||
wasi_nn_error
|
__attribute__((visibility("default"))) wasi_nn_error
|
||||||
tensorflowlite_load_by_name(void *tflite_ctx, const char *filename,
|
load_by_name(void *tflite_ctx, const char *filename, uint32_t filename_len,
|
||||||
uint32_t filename_len, graph *g)
|
graph *g)
|
||||||
{
|
{
|
||||||
TFLiteContext *tfl_ctx = (TFLiteContext *)tflite_ctx;
|
TFLiteContext *tfl_ctx = (TFLiteContext *)tflite_ctx;
|
||||||
|
|
||||||
|
@ -192,9 +191,8 @@ tensorflowlite_load_by_name(void *tflite_ctx, const char *filename,
|
||||||
return success;
|
return success;
|
||||||
}
|
}
|
||||||
|
|
||||||
wasi_nn_error
|
__attribute__((visibility("default"))) wasi_nn_error
|
||||||
tensorflowlite_init_execution_context(void *tflite_ctx, graph g,
|
init_execution_context(void *tflite_ctx, graph g, graph_execution_context *ctx)
|
||||||
graph_execution_context *ctx)
|
|
||||||
{
|
{
|
||||||
TFLiteContext *tfl_ctx = (TFLiteContext *)tflite_ctx;
|
TFLiteContext *tfl_ctx = (TFLiteContext *)tflite_ctx;
|
||||||
|
|
||||||
|
@ -281,9 +279,9 @@ tensorflowlite_init_execution_context(void *tflite_ctx, graph g,
|
||||||
return success;
|
return success;
|
||||||
}
|
}
|
||||||
|
|
||||||
wasi_nn_error
|
__attribute__((visibility("default"))) wasi_nn_error
|
||||||
tensorflowlite_set_input(void *tflite_ctx, graph_execution_context ctx,
|
set_input(void *tflite_ctx, graph_execution_context ctx, uint32_t index,
|
||||||
uint32_t index, tensor *input_tensor)
|
tensor *input_tensor)
|
||||||
{
|
{
|
||||||
TFLiteContext *tfl_ctx = (TFLiteContext *)tflite_ctx;
|
TFLiteContext *tfl_ctx = (TFLiteContext *)tflite_ctx;
|
||||||
|
|
||||||
|
@ -352,8 +350,8 @@ tensorflowlite_set_input(void *tflite_ctx, graph_execution_context ctx,
|
||||||
return success;
|
return success;
|
||||||
}
|
}
|
||||||
|
|
||||||
wasi_nn_error
|
__attribute__((visibility("default"))) wasi_nn_error
|
||||||
tensorflowlite_compute(void *tflite_ctx, graph_execution_context ctx)
|
compute(void *tflite_ctx, graph_execution_context ctx)
|
||||||
{
|
{
|
||||||
TFLiteContext *tfl_ctx = (TFLiteContext *)tflite_ctx;
|
TFLiteContext *tfl_ctx = (TFLiteContext *)tflite_ctx;
|
||||||
|
|
||||||
|
@ -365,10 +363,9 @@ tensorflowlite_compute(void *tflite_ctx, graph_execution_context ctx)
|
||||||
return success;
|
return success;
|
||||||
}
|
}
|
||||||
|
|
||||||
wasi_nn_error
|
__attribute__((visibility("default"))) wasi_nn_error
|
||||||
tensorflowlite_get_output(void *tflite_ctx, graph_execution_context ctx,
|
get_output(void *tflite_ctx, graph_execution_context ctx, uint32_t index,
|
||||||
uint32_t index, tensor_data output_tensor,
|
tensor_data output_tensor, uint32_t *output_tensor_size)
|
||||||
uint32_t *output_tensor_size)
|
|
||||||
{
|
{
|
||||||
TFLiteContext *tfl_ctx = (TFLiteContext *)tflite_ctx;
|
TFLiteContext *tfl_ctx = (TFLiteContext *)tflite_ctx;
|
||||||
|
|
||||||
|
@ -434,8 +431,8 @@ tensorflowlite_get_output(void *tflite_ctx, graph_execution_context ctx,
|
||||||
return success;
|
return success;
|
||||||
}
|
}
|
||||||
|
|
||||||
wasi_nn_error
|
__attribute__((visibility("default"))) wasi_nn_error
|
||||||
tensorflowlite_initialize(void **tflite_ctx)
|
init_backend(void **tflite_ctx)
|
||||||
{
|
{
|
||||||
TFLiteContext *tfl_ctx = new TFLiteContext();
|
TFLiteContext *tfl_ctx = new TFLiteContext();
|
||||||
if (tfl_ctx == NULL) {
|
if (tfl_ctx == NULL) {
|
||||||
|
@ -461,8 +458,8 @@ tensorflowlite_initialize(void **tflite_ctx)
|
||||||
return success;
|
return success;
|
||||||
}
|
}
|
||||||
|
|
||||||
wasi_nn_error
|
__attribute__((visibility("default"))) wasi_nn_error
|
||||||
tensorflowlite_destroy(void *tflite_ctx)
|
deinit_backend(void *tflite_ctx)
|
||||||
{
|
{
|
||||||
/*
|
/*
|
||||||
TensorFlow Lite memory is internally managed by tensorflow
|
TensorFlow Lite memory is internally managed by tensorflow
|
||||||
|
@ -513,19 +510,3 @@ tensorflowlite_destroy(void *tflite_ctx)
|
||||||
NN_DBG_PRINTF("Memory free'd.");
|
NN_DBG_PRINTF("Memory free'd.");
|
||||||
return success;
|
return success;
|
||||||
}
|
}
|
||||||
|
|
||||||
__attribute__((constructor(200))) void
|
|
||||||
tflite_register_backend()
|
|
||||||
{
|
|
||||||
api_function apis = {
|
|
||||||
.load = tensorflowlite_load,
|
|
||||||
.load_by_name = tensorflowlite_load_by_name,
|
|
||||||
.init_execution_context = tensorflowlite_init_execution_context,
|
|
||||||
.set_input = tensorflowlite_set_input,
|
|
||||||
.compute = tensorflowlite_compute,
|
|
||||||
.get_output = tensorflowlite_get_output,
|
|
||||||
.init = tensorflowlite_initialize,
|
|
||||||
.deinit = tensorflowlite_destroy,
|
|
||||||
};
|
|
||||||
wasi_nn_register_backend(apis);
|
|
||||||
}
|
|
|
@ -12,31 +12,33 @@
|
||||||
extern "C" {
|
extern "C" {
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
wasi_nn_error
|
__attribute__((visibility("default"))) wasi_nn_error
|
||||||
tensorflowlite_load(void *tflite_ctx, graph_builder_array *builder,
|
load(void *tflite_ctx, graph_builder_array *builder, graph_encoding encoding,
|
||||||
graph_encoding encoding, execution_target target, graph *g);
|
execution_target target, graph *g);
|
||||||
|
|
||||||
wasi_nn_error
|
__attribute__((visibility("default"))) wasi_nn_error
|
||||||
tensorflowlite_init_execution_context(void *tflite_ctx, graph g,
|
load_by_name(void *tflite_ctx, const char *filename, uint32_t filename_len,
|
||||||
graph_execution_context *ctx);
|
graph *g);
|
||||||
|
|
||||||
wasi_nn_error
|
__attribute__((visibility("default"))) wasi_nn_error
|
||||||
tensorflowlite_set_input(void *tflite_ctx, graph_execution_context ctx,
|
init_execution_context(void *tflite_ctx, graph g, graph_execution_context *ctx);
|
||||||
uint32_t index, tensor *input_tensor);
|
|
||||||
|
|
||||||
wasi_nn_error
|
__attribute__((visibility("default"))) wasi_nn_error
|
||||||
tensorflowlite_compute(void *tflite_ctx, graph_execution_context ctx);
|
set_input(void *tflite_ctx, graph_execution_context ctx, uint32_t index,
|
||||||
|
tensor *input_tensor);
|
||||||
|
|
||||||
wasi_nn_error
|
__attribute__((visibility("default"))) wasi_nn_error
|
||||||
tensorflowlite_get_output(void *tflite_ctx, graph_execution_context ctx,
|
compute(void *tflite_ctx, graph_execution_context ctx);
|
||||||
uint32_t index, tensor_data output_tensor,
|
|
||||||
uint32_t *output_tensor_size);
|
|
||||||
|
|
||||||
wasi_nn_error
|
__attribute__((visibility("default"))) wasi_nn_error
|
||||||
tensorflowlite_initialize(void **tflite_ctx);
|
get_output(void *tflite_ctx, graph_execution_context ctx, uint32_t index,
|
||||||
|
tensor_data output_tensor, uint32_t *output_tensor_size);
|
||||||
|
|
||||||
wasi_nn_error
|
__attribute__((visibility("default"))) wasi_nn_error
|
||||||
tensorflowlite_destroy(void *tflite_ctx);
|
init_backend(void **tflite_ctx);
|
||||||
|
|
||||||
|
__attribute__((visibility("default"))) wasi_nn_error
|
||||||
|
deinit_backend(void *tflite_ctx);
|
||||||
|
|
||||||
#ifdef __cplusplus
|
#ifdef __cplusplus
|
||||||
}
|
}
|
||||||
|
|
|
@ -78,23 +78,22 @@ RUN cd openvino-mobilenet-raw \
|
||||||
&& ./download_mobilenet.sh . \
|
&& ./download_mobilenet.sh . \
|
||||||
&& ls -l mobilenet.xml mobilenet.bin tensor-1x224x224x3-f32.bgr
|
&& ls -l mobilenet.xml mobilenet.bin tensor-1x224x224x3-f32.bgr
|
||||||
|
|
||||||
# RUN apt update \
|
|
||||||
# && apt install -y valgrind
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# iwasm. build from source
|
# iwasm. build from source
|
||||||
WORKDIR /workspaces/wamr
|
WORKDIR /workspaces/wamr
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
WORKDIR /workspaces/wamr/product-mini/platforms/linux
|
WORKDIR /workspaces/wamr/product-mini/platforms/linux
|
||||||
|
|
||||||
RUN OpenVINO_DIR=/usr/lib/openvino-2023.2.0 \
|
RUN OpenVINO_DIR=/usr/lib/openvino-2023.2.0 \
|
||||||
cmake -S . -B build \
|
cmake -S . -B build \
|
||||||
-DWAMR_BUILD_WASI_NN=1 -DWAMR_BUILD_WASI_EPHEMERAL_NN=1 \
|
-DWAMR_BUILD_WASI_NN=1 -DWAMR_BUILD_WASI_EPHEMERAL_NN=1 \
|
||||||
-DWAMR_BUILD_WASI_NN_OPENVINO=1 -DWAMR_BUILD_WASI_NN_TFLITE=1 \
|
-DWAMR_BUILD_WASI_NN_OPENVINO=1 -DWAMR_BUILD_WASI_NN_TFLITE=1 \
|
||||||
&& cmake --build build
|
&& cmake --build build
|
||||||
RUN ln -sf "$(realpath ./build/iwasm)" /usr/local/bin/iwasm
|
|
||||||
|
|
||||||
#
|
ENV PATH=/workspaces/wamr/product-mini/platforms/linux/build:${PATH}
|
||||||
|
ENV LD_LIBRARY_PATH=/workspaces/wamr/product-mini/platforms/linux/build
|
||||||
|
|
||||||
# add smoke test script
|
# add smoke test script
|
||||||
COPY core/iwasm/libraries/wasi-nn/test/run_smoke_test.py /
|
COPY core/iwasm/libraries/wasi-nn/test/run_smoke_test.py /
|
||||||
|
|
||||||
|
|
|
@ -163,7 +163,6 @@ def execute_tflite_birds_v1_image(iwasm_bin: str, wasmedge_bin: str, cwd: Path):
|
||||||
iwasm_output = execute_tflite_birds_v1_image_once(
|
iwasm_output = execute_tflite_birds_v1_image_once(
|
||||||
iwasm_bin,
|
iwasm_bin,
|
||||||
[
|
[
|
||||||
"--native-lib=/workspaces/wamr/product-mini/platforms/linux/build/libwasi-nn-tflite.so",
|
|
||||||
"--map-dir=.:.",
|
"--map-dir=.:.",
|
||||||
],
|
],
|
||||||
cwd,
|
cwd,
|
||||||
|
@ -182,7 +181,6 @@ def execute_openvino_mobilenet_image(iwasm_bin: str, wasmedge_bin: str, cwd: Pat
|
||||||
iwasm_output = execute_openvino_mobilenet_image_once(
|
iwasm_output = execute_openvino_mobilenet_image_once(
|
||||||
iwasm_bin,
|
iwasm_bin,
|
||||||
[
|
[
|
||||||
"--native-lib=/workspaces/wamr/product-mini/platforms/linux/build/libwasi-nn-openvino.so",
|
|
||||||
"--map-dir=.:.",
|
"--map-dir=.:.",
|
||||||
],
|
],
|
||||||
cwd,
|
cwd,
|
||||||
|
@ -201,7 +199,6 @@ def execute_openvino_mobilenet_raw(iwasm_bin: str, wasmedge_bin: str, cwd: Path)
|
||||||
iwasm_output = execute_openvino_mobilenet_raw_once(
|
iwasm_output = execute_openvino_mobilenet_raw_once(
|
||||||
iwasm_bin,
|
iwasm_bin,
|
||||||
[
|
[
|
||||||
"--native-lib=/workspaces/wamr/product-mini/platforms/linux/build/libwasi-nn-openvino.so",
|
|
||||||
"--map-dir=.:.",
|
"--map-dir=.:.",
|
||||||
],
|
],
|
||||||
cwd,
|
cwd,
|
||||||
|
@ -239,7 +236,6 @@ def execute_openvino_road_segmentation_adas(
|
||||||
iwasm_output = execute_openvino_road_segmentation_adas_once(
|
iwasm_output = execute_openvino_road_segmentation_adas_once(
|
||||||
iwasm_bin,
|
iwasm_bin,
|
||||||
[
|
[
|
||||||
"--native-lib=/workspaces/wamr/product-mini/platforms/linux/build/libwasi-nn-openvino.so",
|
|
||||||
"--map-dir=.:.",
|
"--map-dir=.:.",
|
||||||
],
|
],
|
||||||
cwd,
|
cwd,
|
||||||
|
|
Loading…
Reference in New Issue
Block a user