Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
38 commits
Select commit Hold shift + click to select a range
0c5442d
First initial design of wasi nn
tonibofarull Jun 8, 2022
f00ef2f
feat: partial witx api of build and load
Ahmedounet Jun 10, 2022
f786168
feat: added tensorflow submodule
Ahmedounet Jun 11, 2022
cb699fd
fix: witx functions fix
Ahmedounet Jun 12, 2022
b1451d8
Update wasi nn definition
tonibofarull Jun 12, 2022
3883ba6
Installation script of tensorflow
tonibofarull Jun 12, 2022
4ae7c97
Added export api in runtime
tonibofarull Jun 12, 2022
620e064
Adding source in runtime creation
tonibofarull Jun 12, 2022
a847d38
Update sample application to use load from wasi
tonibofarull Jun 12, 2022
0ef7092
Update build and add testing dockerfile and run
tonibofarull Jun 12, 2022
51d75c8
Remove submodule
tonibofarull Jun 12, 2022
97c9103
Fix dockerfile
tonibofarull Jun 12, 2022
00ef633
Adding cpp binding and proper installation
tonibofarull Jun 13, 2022
bab3387
fix: "compilation bug fixed"
Ahmedounet Jun 13, 2022
98215a8
SP-193 load_function (#4)
Ahmedounet Jun 13, 2022
3fc13d0
fix: clang format convention
Ahmedounet Jun 13, 2022
de1010a
SP-222 set input (#5)
Ahmedounet Jun 14, 2022
52c3310
SP-222 set input (#6)
Ahmedounet Jun 22, 2022
316b372
feat: security checking wasi-nn (#7)
Ahmedounet Jun 27, 2022
8f3ab83
WASI-NN tests (#8)
Ahmedounet Sep 20, 2022
df7b2d8
Update with latest specification (#9)
tonibofarull Sep 26, 2022
12352ad
Formatting
tonibofarull Sep 26, 2022
c79d7ae
Fix
tonibofarull Sep 27, 2022
dd70322
Remove unused file
tonibofarull Sep 27, 2022
6691f32
Improve docs
tonibofarull Sep 27, 2022
107272b
Same version
tonibofarull Sep 27, 2022
a081e6b
Dockerfile
tonibofarull Sep 27, 2022
8cbb9b0
Add logging and use deps folder
tonibofarull Oct 4, 2022
13e88c3
Move install into deps
tonibofarull Oct 4, 2022
49cfeaf
Fix path
tonibofarull Oct 4, 2022
d3e45d6
Improve install tensorflow
tonibofarull Oct 4, 2022
040d17e
Fix
tonibofarull Oct 4, 2022
80cf3b7
Parameterize
tonibofarull Oct 4, 2022
49a9553
Update cmakelist
tonibofarull Oct 4, 2022
9937612
Fix
tonibofarull Oct 4, 2022
cc42c57
Flatbuffer alpine incompatibility
tonibofarull Oct 4, 2022
d9cdd4b
Fix comments
tonibofarull Oct 10, 2022
06e4362
Fix
tonibofarull Oct 11, 2022
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
.cache
.vs
.vscode
.venv
/.idea
**/cmake-build-*/
**/*build/
Expand Down
3 changes: 3 additions & 0 deletions build-scripts/config_common.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -291,3 +291,6 @@ if (WAMR_BUILD_SGX_IPFS EQUAL 1)
add_definitions (-DWASM_ENABLE_SGX_IPFS=1)
message (" SGX IPFS enabled")
endif ()
if (WAMR_BUILD_WASI_NN EQUAL 1)
message (" WASI-NN enabled")
endif ()
14 changes: 14 additions & 0 deletions build-scripts/runtime_lib.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,19 @@ if (WAMR_BUILD_LIB_PTHREAD_SEMAPHORE EQUAL 1)
set (WAMR_BUILD_LIB_PTHREAD 1)
endif ()

if (WAMR_BUILD_WASI_NN EQUAL 1)
execute_process(COMMAND ${WAMR_ROOT_DIR}/core/deps/install_tensorflow.sh
RESULT_VARIABLE TENSORFLOW_RESULT
)
set(TENSORFLOW_SOURCE_DIR "${WAMR_ROOT_DIR}/core/deps/tensorflow-src")
include_directories (${CMAKE_CURRENT_BINARY_DIR}/flatbuffers/include)
include_directories (${TENSORFLOW_SOURCE_DIR})
add_subdirectory(
"${TENSORFLOW_SOURCE_DIR}/tensorflow/lite"
"${CMAKE_CURRENT_BINARY_DIR}/tensorflow-lite" EXCLUDE_FROM_ALL)
include (${IWASM_DIR}/libraries/wasi-nn/wasi_nn.cmake)
endif ()

if (WAMR_BUILD_LIB_PTHREAD EQUAL 1)
include (${IWASM_DIR}/libraries/lib-pthread/lib_pthread.cmake)
# Enable the dependent feature if lib pthread is enabled
Expand Down Expand Up @@ -152,6 +165,7 @@ set (source_all
${UTILS_SHARED_SOURCE}
${LIBC_BUILTIN_SOURCE}
${LIBC_WASI_SOURCE}
${LIBC_WASI_NN_SOURCE}
${IWASM_COMMON_SOURCE}
${IWASM_INTERP_SOURCE}
${IWASM_AOT_SOURCE}
Expand Down
11 changes: 11 additions & 0 deletions core/deps/install_tensorflow.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
#!/bin/sh

DEPS_ROOT=$(cd "$(dirname "$0")/" && pwd)
cd ${DEPS_ROOT}

echo "Downloading tensorflow in ${PWD}..."

git clone https://github.com/tensorflow/tensorflow.git tensorflow-src \
--branch v2.9.2

exit 0
10 changes: 10 additions & 0 deletions core/iwasm/common/wasm_native.c
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,9 @@ get_spectest_export_apis(NativeSymbol **p_libc_builtin_apis);
uint32
get_libc_wasi_export_apis(NativeSymbol **p_libc_wasi_apis);

uint32_t
get_wasi_nn_export_apis(NativeSymbol **p_libc_wasi_apis);

uint32
get_base_lib_export_apis(NativeSymbol **p_base_lib_apis);

Expand Down Expand Up @@ -425,6 +428,13 @@ wasm_native_init()
goto fail;
#endif /* WASM_ENABLE_LIB_RATS */

#if WASM_ENABLE_WASI_NN != 0
n_native_symbols = get_wasi_nn_export_apis(&native_symbols);
if (!wasm_native_register_natives("wasi_nn", native_symbols,
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@tonibofarull where did this "wasi_nn" module name come from?
can you give me a reference to an exact version of the spec this was based on?
eg. commit id of wasi-nn repo
as far as i researched wasi-nn has always been using "wasi_ephemeral_nn".

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It was an arbitrary name as I couldn't get an API list based on witx/wit.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ok. so this is not compatible with any versions of wasi-nn. right?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Compatibility is not guaranteed.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

So, there will be no harm if we replace wasi-nn with wasi_ephemeral_nn in the repo?

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Compatibility is not guaranteed.

well, actually incompatibility is guaranteed because of the different module name, isn't it?

n_native_symbols))
return false;
#endif

return true;
fail:
wasm_native_destroy();
Expand Down
1 change: 1 addition & 0 deletions core/iwasm/libraries/wasi-nn/.dockerignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
**/Dockerfile
43 changes: 43 additions & 0 deletions core/iwasm/libraries/wasi-nn/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
# WASI-NN

## How to use

Enable WASI-NN in the WAMR by spefiying it in the cmake building configuration as follows,

```
set (WAMR_BUILD_WASI_NN 1)
```

The definition of the functions provided by WASI-NN is in the header file `core/iwasm/libraries/wasi-nn/wasi_nn.h`.

By only including this file in your WASM application you will bind WASI-NN into your module.

## Tests

To run the tests we assume that the current directory is the root of the repository.


1. Build the docker image,

```
docker build -t wasi-nn -f core/iwasm/libraries/wasi-nn/test/Dockerfile .
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not sure whether the test is just a simple interface test (like unit test), or a sample? If it is a sample, please put it under <wamr_root>/samples, for example, samples/wasi-nn.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm testing multiple neural network models. Once it's ready for a release we can add a real sample!

```

2. Run the container

```
docker run wasi-nn
```

If all the tests have run properly you will the the following message in the terminal,

```
Tests: passed!
```

## What is missing

* Only 1 model at a time is supported.
* `graph` and `graph-execution-context` are ignored.
* Only `tensorflow` (lite) is supported.
* Only `cpu` is supported.
55 changes: 55 additions & 0 deletions core/iwasm/libraries/wasi-nn/logger.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
/*
* Copyright (C) 2019 Intel Corporation. All rights reserved.
* SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
*/

#ifndef WASI_NN_LOGGER_H
#define WASI_NN_LOGGER_H

#include <stdio.h>
#include <string.h>

#define __FILENAME__ \
(strrchr(__FILE__, '/') ? strrchr(__FILE__, '/') + 1 : __FILE__)

/* Disable a level by removing the define */
#define ENABLE_ERR_LOG
#define ENABLE_WARN_LOG
#define ENABLE_DBG_LOG
#define ENABLE_INFO_LOG

// Definition of the levels
#ifdef ENABLE_ERR_LOG
#define NN_ERR_PRINTF(fmt, ...) \
printf("[%s:%d] " fmt, __FILENAME__, __LINE__, ##__VA_ARGS__); \
printf("\n"); \
fflush(stdout)
#else
#define NN_ERR_PRINTF(fmt, ...)
#endif
#ifdef ENABLE_WARN_LOG
#define NN_WARN_PRINTF(fmt, ...) \
printf("[%s:%d] " fmt, __FILENAME__, __LINE__, ##__VA_ARGS__); \
printf("\n"); \
fflush(stdout)
#else
#define NN_WARN_PRINTF(fmt, ...)
#endif
#ifdef ENABLE_DBG_LOG
#define NN_DBG_PRINTF(fmt, ...) \
printf("[%s:%d] " fmt, __FILENAME__, __LINE__, ##__VA_ARGS__); \
printf("\n"); \
fflush(stdout)
#else
#define NN_DBG_PRINTF(fmt, ...)
#endif
#ifdef ENABLE_INFO_LOG
#define NN_INFO_PRINTF(fmt, ...) \
printf("[%s:%d] " fmt, __FILENAME__, __LINE__, ##__VA_ARGS__); \
printf("\n"); \
fflush(stdout)
#else
#define NN_INFO_PRINTF(fmt, ...)
#endif

#endif
178 changes: 178 additions & 0 deletions core/iwasm/libraries/wasi-nn/test/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,178 @@
# Copyright (C) 2019 Intel Corporation. All rights reserved.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception

cmake_minimum_required (VERSION 2.9)

project (iwasm)

set (CMAKE_VERBOSE_MAKEFILE OFF)
# Reset default linker flags
set (CMAKE_SHARED_LIBRARY_LINK_C_FLAGS "")
set (CMAKE_SHARED_LIBRARY_LINK_CXX_FLAGS "")
set (CMAKE_C_STANDARD 99)
set (CMAKE_CXX_STANDARD 14)

if (NOT DEFINED WAMR_BUILD_PLATFORM)
set (WAMR_BUILD_PLATFORM "linux")
endif ()

# Set WAMR_BUILD_TARGET, currently values supported:
# "X86_64", "AMD_64", "X86_32", "AARCH64[sub]", "ARM[sub]", "THUMB[sub]",
# "MIPS", "XTENSA", "RISCV64[sub]", "RISCV32[sub]"
if (NOT DEFINED WAMR_BUILD_TARGET)
if (CMAKE_SYSTEM_PROCESSOR MATCHES "^(arm64|aarch64)")
set (WAMR_BUILD_TARGET "AARCH64")
elseif (CMAKE_SYSTEM_PROCESSOR STREQUAL "riscv64")
set (WAMR_BUILD_TARGET "RISCV64")
elseif (CMAKE_SIZEOF_VOID_P EQUAL 8)
# Build as X86_64 by default in 64-bit platform
set (WAMR_BUILD_TARGET "X86_64")
elseif (CMAKE_SIZEOF_VOID_P EQUAL 4)
# Build as X86_32 by default in 32-bit platform
set (WAMR_BUILD_TARGET "X86_32")
else ()
message(SEND_ERROR "Unsupported build target platform!")
endif ()
endif ()

if (NOT CMAKE_BUILD_TYPE)
set(CMAKE_BUILD_TYPE Release)
endif ()

if (NOT DEFINED WAMR_BUILD_INTERP)
# Enable Interpreter by default
set (WAMR_BUILD_INTERP 1)
endif ()

if (NOT DEFINED WAMR_BUILD_AOT)
# Enable AOT by default.
set (WAMR_BUILD_AOT 1)
endif ()

if (NOT DEFINED WAMR_BUILD_JIT)
# Disable JIT by default.
set (WAMR_BUILD_JIT 0)
endif ()

if (NOT DEFINED WAMR_BUILD_FAST_JIT)
# Disable Fast JIT by default
set (WAMR_BUILD_FAST_JIT 0)
endif ()

if (NOT DEFINED WAMR_BUILD_LIBC_BUILTIN)
# Enable libc builtin support by default
set (WAMR_BUILD_LIBC_BUILTIN 1)
endif ()

if (NOT DEFINED WAMR_BUILD_LIBC_WASI)
# Enable libc wasi support by default
set (WAMR_BUILD_LIBC_WASI 1)
endif ()

if (NOT DEFINED WAMR_BUILD_FAST_INTERP)
# Enable fast interpreter
set (WAMR_BUILD_FAST_INTERP 1)
endif ()

if (NOT DEFINED WAMR_BUILD_MULTI_MODULE)
# Disable multiple modules by default
set (WAMR_BUILD_MULTI_MODULE 0)
endif ()

if (NOT DEFINED WAMR_BUILD_LIB_PTHREAD)
# Disable pthread library by default
set (WAMR_BUILD_LIB_PTHREAD 0)
endif ()

if (NOT DEFINED WAMR_BUILD_MINI_LOADER)
# Disable wasm mini loader by default
set (WAMR_BUILD_MINI_LOADER 0)
endif ()

if (NOT DEFINED WAMR_BUILD_SIMD)
# Enable SIMD by default
set (WAMR_BUILD_SIMD 1)
endif ()

if (NOT DEFINED WAMR_BUILD_REF_TYPES)
# Disable reference types by default
set (WAMR_BUILD_REF_TYPES 0)
endif ()

if (NOT DEFINED WAMR_BUILD_DEBUG_INTERP)
# Disable Debug feature by default
set (WAMR_BUILD_DEBUG_INTERP 0)
endif ()

if (WAMR_BUILD_DEBUG_INTERP EQUAL 1)
set (WAMR_BUILD_FAST_INTERP 0)
set (WAMR_BUILD_MINI_LOADER 0)
set (WAMR_BUILD_SIMD 0)
endif ()

if (COLLECT_CODE_COVERAGE EQUAL 1)
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fprofile-arcs -ftest-coverage")
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fprofile-arcs -ftest-coverage")
endif ()

set (WAMR_ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../../../../..)

include (${WAMR_ROOT_DIR}/build-scripts/runtime_lib.cmake)
add_library(vmlib ${WAMR_RUNTIME_LIB_SOURCE})

set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,--gc-sections -pie -fPIE")

set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wall -Wextra -Wformat -Wformat-security -Wshadow")
# set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wconversion -Wsign-conversion")

set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wextra -Wformat -Wformat-security -Wno-unused")

if (WAMR_BUILD_TARGET MATCHES "X86_.*" OR WAMR_BUILD_TARGET STREQUAL "AMD_64")
if (NOT (CMAKE_C_COMPILER MATCHES ".*clang.*" OR CMAKE_C_COMPILER_ID MATCHES ".*Clang"))
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -mindirect-branch-register")
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -mindirect-branch-register")
# UNDEFINED BEHAVIOR, refer to https://en.cppreference.com/w/cpp/language/ub
if(CMAKE_BUILD_TYPE STREQUAL "Debug" AND NOT WAMR_BUILD_JIT EQUAL 1)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fsanitize=undefined \
-fno-sanitize=bounds,bounds-strict,alignment \
-fno-sanitize-recover")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsanitize=undefined \
-fno-sanitize=bounds,bounds-strict,alignment \
-fno-sanitize-recover")
endif()
else ()
# UNDEFINED BEHAVIOR, refer to https://en.cppreference.com/w/cpp/language/ub
if(CMAKE_BUILD_TYPE STREQUAL "Debug" AND NOT WAMR_BUILD_JIT EQUAL 1)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fsanitize=undefined \
-fno-sanitize=bounds,alignment \
-fno-sanitize-recover")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsanitize=undefined \
-fno-sanitize=bounds,alignment \
-fno-sanitize-recover")
endif()
endif ()
endif ()

# The following flags are to enhance security, but it may impact performance,
# we disable them by default.
#if (WAMR_BUILD_TARGET MATCHES "X86_.*" OR WAMR_BUILD_TARGET STREQUAL "AMD_64")
# set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -ftrapv -D_FORTIFY_SOURCE=2")
#endif ()
#set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fstack-protector-strong --param ssp-buffer-size=4")
#set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wl,-z,noexecstack,-z,relro,-z,now")

include (${SHARED_DIR}/utils/uncommon/shared_uncommon.cmake)

add_executable (iwasm ${WAMR_ROOT_DIR}/product-mini/platforms/${WAMR_BUILD_PLATFORM}/main.c ${UNCOMMON_SHARED_SOURCE})

install (TARGETS iwasm DESTINATION bin)

target_link_libraries (iwasm vmlib ${LLVM_AVAILABLE_LIBS} ${UV_A_LIBS} ${TENSORFLOW_LIB} -lm -ldl -lpthread)

add_library (libiwasm SHARED ${WAMR_RUNTIME_LIB_SOURCE})

install (TARGETS libiwasm DESTINATION lib)

set_target_properties (libiwasm PROPERTIES OUTPUT_NAME iwasm)

target_link_libraries (libiwasm ${LLVM_AVAILABLE_LIBS} ${UV_A_LIBS} -lm -ldl -lpthread)
32 changes: 32 additions & 0 deletions core/iwasm/libraries/wasi-nn/test/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
# Copyright (C) 2019 Intel Corporation. All rights reserved.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception

FROM ubuntu:22.04

ENV DEBIAN_FRONTEND=noninteractive

RUN apt-get update && apt-get install -y \
cmake build-essential git wget python3.10 python3-pip

RUN wget -q https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-14/wasi-sdk-14.0-linux.tar.gz && \
tar xf wasi-sdk-*-linux.tar.gz -C /opt && rm -f wasi-sdk-*-linux.tar.gz && \
mv /opt/wasi-sdk-14.0 /opt/wasi-sdk

WORKDIR /home/wamr

COPY core core
COPY build-scripts build-scripts
COPY product-mini product-mini

RUN pip3 install -r core/iwasm/libraries/wasi-nn/test/requirements.txt

WORKDIR /home/wamr/core/iwasm/libraries/wasi-nn/test/build

RUN cmake -DWAMR_BUILD_WASI_NN=1 ..
RUN make -j $(grep -c ^processor /proc/cpuinfo)

WORKDIR /home/wamr/core/iwasm/libraries/wasi-nn/test

RUN ./build.sh

ENTRYPOINT [ "./build/iwasm", "--dir=.", "test_tensorflow.wasm" ]
Loading