Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
FROM ubuntu:20.04

ENV DEBIAN_FRONTEND=noninteractive

RUN apt-get update && apt-get install -y \
build.essential \
cmake \
git \
wget

RUN wget -q https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-14/wasi-sdk-14.0-linux.tar.gz && \
tar xf wasi-sdk-*-linux.tar.gz -C /opt && rm -f wasi-sdk-*-linux.tar.gz && \
mv /opt/wasi-sdk-14.0 /opt/wasi-sdk
5 changes: 5 additions & 0 deletions build-scripts/install_tensorflow.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
mkdir -p /root/src/tflite
cd /root/src/tflite

git clone https://github.com/tensorflow/tensorflow.git tensorflow_src \
--branch v2.8.2
14 changes: 14 additions & 0 deletions build-scripts/runtime_lib.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,19 @@ elseif (WAMR_BUILD_LIBC_WASI EQUAL 1)
include (${IWASM_DIR}/libraries/libc-wasi/libc_wasi.cmake)
endif ()

if (WAMR_BUILD_WASI_NN EQUAL 1)
execute_process(COMMAND ${WAMR_ROOT_DIR}/build-scripts/install_tensorflow.sh
RESULT_VARIABLE TENSORFLOW_RESULT
)
set(TENSORFLOW_SOURCE_DIR "/root/src/tflite/tensorflow_src")
include_directories (${CMAKE_CURRENT_BINARY_DIR}/flatbuffers/include)
include_directories (${TENSORFLOW_SOURCE_DIR})
add_subdirectory(
"${TENSORFLOW_SOURCE_DIR}/tensorflow/lite"
"${CMAKE_CURRENT_BINARY_DIR}/tensorflow-lite" EXCLUDE_FROM_ALL)
include (${IWASM_DIR}/libraries/wasi-nn/wasi_nn.cmake)
endif ()

if (WAMR_BUILD_LIB_PTHREAD EQUAL 1)
include (${IWASM_DIR}/libraries/lib-pthread/lib_pthread.cmake)
# Enable the dependent feature if lib pthread is enabled
Expand Down Expand Up @@ -133,6 +146,7 @@ set (source_all
${UTILS_SHARED_SOURCE}
${LIBC_BUILTIN_SOURCE}
${LIBC_WASI_SOURCE}
${LIBC_WASI_NN_SOURCE}
${IWASM_COMMON_SOURCE}
${IWASM_INTERP_SOURCE}
${IWASM_AOT_SOURCE}
Expand Down
10 changes: 10 additions & 0 deletions core/iwasm/common/wasm_native.c
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,9 @@ get_spectest_export_apis(NativeSymbol **p_libc_builtin_apis);
uint32
get_libc_wasi_export_apis(NativeSymbol **p_libc_wasi_apis);

uint32_t
get_wasi_nn_export_apis(NativeSymbol **p_libc_wasi_apis);

uint32
get_base_lib_export_apis(NativeSymbol **p_base_lib_apis);

Expand Down Expand Up @@ -414,6 +417,13 @@ wasm_native_init()
return false;
#endif /* WASM_ENABLE_LIBC_EMCC */

#if WASM_ENABLE_WASI_NN != 0
n_native_symbols = get_wasi_nn_export_apis(&native_symbols);
if (!wasm_native_register_natives("env", native_symbols, // TODO: check env or wasi_nn tag
n_native_symbols))
return false;
#endif

return true;
}

Expand Down
41 changes: 41 additions & 0 deletions core/iwasm/libraries/wasi-nn/lib_run_inference.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
#include "lib_run_inference.hpp"

#include <tensorflow/lite/interpreter.h>
#include <tensorflow/lite/kernels/register.h>
#include <tensorflow/lite/model.h>
#include <tensorflow/lite/optional_debug_tools.h>
#include <tensorflow/lite/error_reporter.h>

enum Idx {GRAPH=0, GRAPH_SIZE=1 };


std::unique_ptr<tflite::Interpreter> interpreter = NULL;
std::unique_ptr<tflite::FlatBufferModel> model = NULL;

uint32_t _load(graph_builder_array graph_builder, graph_encoding encoding) {

if(encoding!=tensorflow){return invalid_argument;}

uint32_t *size = (uint32_t*) graph_builder[Idx::GRAPH_SIZE];

tflite::ErrorReporter *error_reporter;

model = tflite::FlatBufferModel::BuildFromBuffer((const char *)graph_builder[Idx::GRAPH], *size, error_reporter);

if(model== nullptr){
printf("failure: null model \n");
return invalid_argument;
}

// Build the interpreter with the InterpreterBuilder.
tflite::ops::builtin::BuiltinOpResolver resolver;
tflite::InterpreterBuilder builder(*model, resolver);
builder(&interpreter);

if(interpreter==nullptr){
printf("failure: null interpreter \n");
return invalid_argument;
}

return success;
}
18 changes: 18 additions & 0 deletions core/iwasm/libraries/wasi-nn/lib_run_inference.hpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
#ifndef LIB_RUN_INFERENCE_HPP
#define LIB_RUN_INFERENCE_HPP

#include <stdio.h>

#include "wasi_nn.h"

#ifdef __cplusplus
extern "C" {
#endif

uint32_t _load(graph_builder_array graph_builder, graph_encoding encoding);

#ifdef __cplusplus
}
#endif

#endif
74 changes: 74 additions & 0 deletions core/iwasm/libraries/wasi-nn/wasi_nn.c
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
#include "wasi_nn.h"
#include <stdio.h>
#include <assert.h>
#include <errno.h>
#include <string.h>
#include <stdlib.h>

#include "wasm_export.h"

#include "lib_run_inference.hpp"

/**
* @brief loader of tensorflow
*
* @param builder array of 2 pointers: first its the buffer, second its the size
*/
void load_tensorflow(wasm_module_inst_t instance, graph_builder_array builder) {
printf("Loading tensorflow...\n");
for (int i = 0; i < 2; ++i)
builder[i] = (graph_builder) wasm_runtime_addr_app_to_native(instance, builder[i]);
}

uint32_t wasi_nn_load(wasm_exec_env_t exec_env, uint32_t builder, uint32_t encoding)
{
printf("Inside wasi_nn_load!\n\n");
wasm_module_inst_t instance = wasm_runtime_get_module_inst(exec_env);
graph_builder_array buf = (graph_builder_array) wasm_runtime_addr_app_to_native(instance, builder);
switch ((graph_encoding) encoding) {
case openvino:
return invalid_argument;
case tensorflow:
load_tensorflow(instance, buf);
break;
case onnx:
return invalid_argument;
}
return _load(buf, (graph_encoding) encoding);
}

void wasi_nn_init_execution_context()
{

}

void wasi_nn_set_input()
{
// interpreter->AllocateTensors();
}

void wasi_nn_compute()
{

}

void wasi_nn_get_output()
{

}

/* clang-format off */
#define REG_NATIVE_FUNC(func_name, signature) \
{ #func_name, wasi_nn_##func_name, signature, NULL }
/* clang-format on */

static NativeSymbol native_symbols_wasi_nn[] = {
REG_NATIVE_FUNC(load, "(ii)i"),
};

uint32_t
get_wasi_nn_export_apis(NativeSymbol **p_libc_wasi_apis)
{
*p_libc_wasi_apis = native_symbols_wasi_nn;
return sizeof(native_symbols_wasi_nn) / sizeof(NativeSymbol);
}
7 changes: 7 additions & 0 deletions core/iwasm/libraries/wasi-nn/wasi_nn.cmake
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
set (WASI_NN_DIR ${CMAKE_CURRENT_LIST_DIR})

add_definitions (-DWASM_ENABLE_WASI_NN=1)

file (GLOB_RECURSE source_all ${WASI_NN_DIR}/*.c ${WASI_NN_DIR}/*.cpp)

set (LIBC_WASI_NN_SOURCE ${source_all})
63 changes: 63 additions & 0 deletions core/iwasm/libraries/wasi-nn/wasi_nn.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
#ifndef WASI_NN_H
#define WASI_NN_H

#include <stdint.h>

/**
* Following definition from:
* https://github.com/WebAssembly/wasi-nn/blob/c557b2e9f84b6630f13b3185b43607f0388343b2/phases/ephemeral/witx/wasi_ephemeral_nn.witx
*/

typedef uint32_t buffer_size;

typedef enum {
success = 0,
invalid_argument,
missing_memory,
busy
} nn_erno;

typedef uint32_t * tensor_dimensions;

typedef enum {
f16 = 0,
f32,
u8,
i32
} tensor_type;

typedef uint8_t* tensor_data;

typedef struct {
tensor_dimensions dimensions;
tensor_type type;
tensor_data data;
} tensor;

typedef uint8_t * graph_builder;

typedef graph_builder * graph_builder_array;

typedef enum {
openvino = 0,
tensorflow,
onnx
} graph_encoding;

typedef enum {
cpu = 0,
gpu,
tpu
} execution_target;

uint32_t load(graph_builder_array builder, graph_encoding encoding);

void init_execution_context();

void set_input();

void compute();

void get_output();

#endif
1 change: 1 addition & 0 deletions run.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
docker run -it -v $PWD:/home wamr
4 changes: 3 additions & 1 deletion samples/basic/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ set (WAMR_BUILD_INTERP 1)
set (WAMR_BUILD_AOT 1)
set (WAMR_BUILD_JIT 0)
set (WAMR_BUILD_LIBC_BUILTIN 1)
set (WAMR_BUILD_WASI_NN 1)

if (NOT MSVC)
set (WAMR_BUILD_LIBC_WASI 1)
Expand Down Expand Up @@ -81,5 +82,6 @@ add_executable (basic src/main.c src/native_impl.c ${UNCOMMON_SHARED_SOURCE})
if (APPLE)
target_link_libraries (basic vmlib -lm -ldl -lpthread)
else ()
target_link_libraries (basic vmlib -lm -ldl -lpthread -lrt)
# TODO: add this in the vmlib. Otherwise, it will only work in the samples/basic
target_link_libraries (basic vmlib -lm -ldl -lpthread -lrt tensorflow-lite)
endif ()
1 change: 1 addition & 0 deletions samples/basic/build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ OUT_FILE=${i%.*}.wasm
-Wl,--export=float_to_string \
-Wl,--export=calculate\
-Wl,--allow-undefined \
-I/home/core/iwasm/libraries/wasi-nn \
-o ${OUT_DIR}/wasm-apps/${OUT_FILE} ${APP_SRC}


Expand Down
7 changes: 7 additions & 0 deletions samples/basic/wasm-apps/testapp.c
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
#include <stdlib.h>
#include <string.h>
#include <stdint.h>
#include "wasi_nn.h"

int
intToStr(int x, char *str, int str_len, int digit);
Expand All @@ -21,6 +22,12 @@ calculate_native(int32_t n, int32_t func1, int32_t func2);
float
generate_float(int iteration, double seed1, float seed2)
{
char *buf = strdup("test_message");
uint32_t *size = malloc(sizeof(uint32_t));
*size = 4096;
graph_builder_array arr[] = {(graph_builder)buf, (graph_builder)size};

load(arr, 1);
float ret;

printf("calling into WASM function: %s\n", __FUNCTION__);
Expand Down