Compare commits

...

41 Commits

Author SHA1 Message Date
liang.he
f090909a30
Merge 58a9bae9c6 into 418be9dfee 2025-11-27 04:15:44 +03:00
Yosh
418be9dfee
Update link to WASI Proposals.md (#4734) 2025-11-27 08:53:59 +08:00
liang.he
a75dc9be51
Remove local fuzzing server (#4729)
Some checks failed
compilation on macos / build_iwasm (${{ needs.build_llvm_libraries_on_intel_macos.outputs.cache_key }}, -DWAMR_DISABLE_HW_BOUND_CHECK=1, $FAST_INTERP_BUILD_OPTIONS, macos-13, darwin) (push) Has been cancelled
compilation on macos / build_iwasm (${{ needs.build_llvm_libraries_on_intel_macos.outputs.cache_key }}, -DWAMR_DISABLE_HW_BOUND_CHECK=1, $LLVM_EAGER_JIT_BUILD_OPTIONS, macos-13, darwin) (push) Has been cancelled
compilation on macos / build_iwasm (${{ needs.build_llvm_libraries_on_intel_macos.outputs.cache_key }}, -DWAMR_DISABLE_HW_BOUND_CHECK=1, $LLVM_LAZY_JIT_BUILD_OPTIONS, macos-13, darwin) (push) Has been cancelled
compilation on macos / build_samples_wasm_c_api (${{ needs.build_llvm_libraries_on_intel_macos.outputs.cache_key }}, $AOT_BUILD_OPTIONS, macos-13) (push) Has been cancelled
compilation on macos / build_samples_wasm_c_api (${{ needs.build_llvm_libraries_on_intel_macos.outputs.cache_key }}, $CLASSIC_INTERP_BUILD_OPTIONS, macos-13) (push) Has been cancelled
compilation on macos / build_samples_wasm_c_api (${{ needs.build_llvm_libraries_on_intel_macos.outputs.cache_key }}, $FAST_INTERP_BUILD_OPTIONS, macos-13) (push) Has been cancelled
compilation on macos / build_samples_wasm_c_api (${{ needs.build_llvm_libraries_on_intel_macos.outputs.cache_key }}, $FAST_JIT_BUILD_OPTIONS, macos-13) (push) Has been cancelled
compilation on macos / build_samples_wasm_c_api (${{ needs.build_llvm_libraries_on_intel_macos.outputs.cache_key }}, $LLVM_EAGER_JIT_BUILD_OPTIONS, macos-13) (push) Has been cancelled
compilation on macos / build_samples_wasm_c_api (${{ needs.build_llvm_libraries_on_intel_macos.outputs.cache_key }}, $LLVM_LAZY_JIT_BUILD_OPTIONS, macos-13) (push) Has been cancelled
compilation on macos / build_samples_wasm_c_api (${{ needs.build_llvm_libraries_on_intel_macos.outputs.cache_key }}, $MULTI_TIER_JIT_BUILD_OPTIONS, macos-13) (push) Has been cancelled
compilation on macos / build_samples_others (${{ needs.build_llvm_libraries_on_arm_macos.outputs.cache_key }}, macos-14) (push) Has been cancelled
compilation on macos / build_samples_others (${{ needs.build_llvm_libraries_on_intel_macos.outputs.cache_key }}, macos-13) (push) Has been cancelled
compilation on SGX / run_samples_file (-DWAMR_BUILD_SGX_IPFS=1, $AOT_BUILD_OPTIONS, ${{ needs.build_llvm_libraries.outputs.cache_key }}, ubuntu-22.04, linux-sgx) (push) Has been cancelled
compilation on SGX / run_samples_file (-DWAMR_BUILD_SGX_IPFS=1, $CLASSIC_INTERP_BUILD_OPTIONS, ${{ needs.build_llvm_libraries.outputs.cache_key }}, ubuntu-22.04, linux-sgx) (push) Has been cancelled
compilation on SGX / run_samples_file (-DWAMR_BUILD_SGX_IPFS=1, $FAST_INTERP_BUILD_OPTIONS, ${{ needs.build_llvm_libraries.outputs.cache_key }}, ubuntu-22.04, linux-sgx) (push) Has been cancelled
compilation on SGX / run_samples_file (-DWAMR_BUILD_SGX_IPFS=1, $FAST_JIT_BUILD_OPTIONS, ${{ needs.build_llvm_libraries.outputs.cache_key }}, ubuntu-22.04, linux-sgx) (push) Has been cancelled
compilation on SGX / spec_test_default (${{ needs.build_llvm_libraries.outputs.cache_key }}, ubuntu-22.04, aot, $DEFAULT_TEST_OPTIONS) (push) Has been cancelled
compilation on SGX / spec_test_default (${{ needs.build_llvm_libraries.outputs.cache_key }}, ubuntu-22.04, aot, $SIMD_TEST_OPTIONS) (push) Has been cancelled
compilation on SGX / spec_test_default (${{ needs.build_llvm_libraries.outputs.cache_key }}, ubuntu-22.04, aot, $XIP_TEST_OPTIONS) (push) Has been cancelled
compilation on SGX / spec_test_default (${{ needs.build_llvm_libraries.outputs.cache_key }}, ubuntu-22.04, classic-interp, $DEFAULT_TEST_OPTIONS) (push) Has been cancelled
compilation on SGX / spec_test_default (${{ needs.build_llvm_libraries.outputs.cache_key }}, ubuntu-22.04, fast-jit, $DEFAULT_TEST_OPTIONS) (push) Has been cancelled
compilation on windows-2022 / build_wamrc (${{ needs.build_llvm_libraries_on_windows.outputs.cache_key }}, windows-2022) (push) Has been cancelled
compilation on windows-2022 / test (classic-interp, $DEFAULT_TEST_OPTIONS) (push) Has been cancelled
compilation on windows-2022 / test (classic-interp, $MULTI_MODULES_TEST_OPTIONS) (push) Has been cancelled
compilation on windows-2022 / test (classic-interp, $THREADS_TEST_OPTIONS) (push) Has been cancelled
compilation on windows-2022 / test (classic-interp, $WASI_TEST_OPTIONS) (push) Has been cancelled
compilation on windows-2022 / test (fast-interp, $DEFAULT_TEST_OPTIONS) (push) Has been cancelled
compilation on windows-2022 / test (fast-interp, $MULTI_MODULES_TEST_OPTIONS) (push) Has been cancelled
compilation on windows-2022 / test (fast-interp, $THREADS_TEST_OPTIONS) (push) Has been cancelled
compilation on windows-2022 / test (fast-interp, $WASI_TEST_OPTIONS) (push) Has been cancelled
The local fuzzing server is a feature that has not been used for a long
time and is currently unmaintained. Temporarily remove it from the
codebase, and restore it when needed.
2025-11-25 21:02:10 +08:00
dependabot[bot]
b4e4c5afee
build(deps): Bump actions/checkout from 5 to 6 (#4726)
Bumps [actions/checkout](https://github.com/actions/checkout) from 5 to 6.
- [Release notes](https://github.com/actions/checkout/releases)
- [Commits](https://github.com/actions/checkout/compare/v5...v6)

---
updated-dependencies:
- dependency-name: actions/checkout
  dependency-version: '6'
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-25 21:01:45 +08:00
Kakhaber
85343e7830
Add support for esp32s2 target to idf_component.yml (#4728) 2025-11-25 21:01:31 +08:00
dependabot[bot]
6182505c00
build(deps): Bump github/codeql-action from 4.31.3 to 4.31.5 (#4727)
Some checks are pending
compilation on zephyr / smoke_test (push) Waiting to run
Bumps [github/codeql-action](https://github.com/github/codeql-action) from 4.31.3 to 4.31.5.
- [Release notes](https://github.com/github/codeql-action/releases)
- [Commits](https://github.com/github/codeql-action/compare/v4.31.3...v4.31.5)

---
updated-dependencies:
- dependency-name: github/codeql-action
  dependency-version: 4.31.5
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-25 07:04:23 +08:00
Krisztian
a192467067
Namespace generated headers with zephyr/ (#4670)
Some checks are pending
compilation on SGX / run_samples_file (-DWAMR_BUILD_SGX_IPFS=1, $CLASSIC_INTERP_BUILD_OPTIONS, ${{ needs.build_llvm_libraries.outputs.cache_key }}, ubuntu-22.04, linux-sgx) (push) Blocked by required conditions
compilation on SGX / run_samples_file (-DWAMR_BUILD_SGX_IPFS=1, $FAST_INTERP_BUILD_OPTIONS, ${{ needs.build_llvm_libraries.outputs.cache_key }}, ubuntu-22.04, linux-sgx) (push) Blocked by required conditions
compilation on SGX / run_samples_file (-DWAMR_BUILD_SGX_IPFS=1, $FAST_JIT_BUILD_OPTIONS, ${{ needs.build_llvm_libraries.outputs.cache_key }}, ubuntu-22.04, linux-sgx) (push) Blocked by required conditions
compilation on SGX / spec_test_default (${{ needs.build_llvm_libraries.outputs.cache_key }}, ubuntu-22.04, aot, $DEFAULT_TEST_OPTIONS) (push) Blocked by required conditions
compilation on SGX / spec_test_default (${{ needs.build_llvm_libraries.outputs.cache_key }}, ubuntu-22.04, aot, $SIMD_TEST_OPTIONS) (push) Blocked by required conditions
compilation on SGX / spec_test_default (${{ needs.build_llvm_libraries.outputs.cache_key }}, ubuntu-22.04, aot, $XIP_TEST_OPTIONS) (push) Blocked by required conditions
compilation on SGX / spec_test_default (${{ needs.build_llvm_libraries.outputs.cache_key }}, ubuntu-22.04, classic-interp, $DEFAULT_TEST_OPTIONS) (push) Blocked by required conditions
compilation on SGX / spec_test_default (${{ needs.build_llvm_libraries.outputs.cache_key }}, ubuntu-22.04, fast-jit, $DEFAULT_TEST_OPTIONS) (push) Blocked by required conditions
compilation on windows-2022 / build_llvm_libraries_on_windows (push) Waiting to run
compilation on windows-2022 / build_iwasm (-DWAMR_BUILD_AOT=0) (push) Waiting to run
compilation on windows-2022 / build_iwasm (-DWAMR_BUILD_AOT=1 -DWAMR_BUILD_INTERP=0) (push) Waiting to run
compilation on windows-2022 / build_iwasm (-DWAMR_BUILD_CUSTOM_NAME_SECTION=1) (push) Waiting to run
compilation on windows-2022 / build_iwasm (-DWAMR_BUILD_DEBUG_INTERP=1) (push) Waiting to run
compilation on windows-2022 / build_iwasm (-DWAMR_BUILD_LIBC_UVWASI=0 -DWAMR_BUILD_LIBC_WASI=1) (push) Waiting to run
compilation on windows-2022 / build_iwasm (-DWAMR_BUILD_LIB_PTHREAD=1) (push) Waiting to run
compilation on windows-2022 / build_iwasm (-DWAMR_BUILD_LIB_WASI_THREADS=1) (push) Waiting to run
compilation on windows-2022 / build_iwasm (-DWAMR_BUILD_REF_TYPES=1) (push) Waiting to run
compilation on windows-2022 / build_iwasm (-DWAMR_BUILD_SIMD=1) (push) Waiting to run
compilation on windows-2022 / build_iwasm (-DWAMR_BUILD_TAIL_CALL=1) (push) Waiting to run
compilation on windows-2022 / build_iwasm (-DWAMR_DISABLE_HW_BOUND_CHECK=1) (push) Waiting to run
compilation on windows-2022 / build_wamrc (${{ needs.build_llvm_libraries_on_windows.outputs.cache_key }}, windows-2022) (push) Blocked by required conditions
compilation on windows-2022 / test (classic-interp, $DEFAULT_TEST_OPTIONS) (push) Blocked by required conditions
compilation on windows-2022 / test (classic-interp, $MULTI_MODULES_TEST_OPTIONS) (push) Blocked by required conditions
compilation on windows-2022 / test (classic-interp, $THREADS_TEST_OPTIONS) (push) Blocked by required conditions
compilation on windows-2022 / test (classic-interp, $WASI_TEST_OPTIONS) (push) Blocked by required conditions
compilation on windows-2022 / test (fast-interp, $DEFAULT_TEST_OPTIONS) (push) Blocked by required conditions
compilation on windows-2022 / test (fast-interp, $MULTI_MODULES_TEST_OPTIONS) (push) Blocked by required conditions
compilation on windows-2022 / test (fast-interp, $THREADS_TEST_OPTIONS) (push) Blocked by required conditions
compilation on windows-2022 / test (fast-interp, $WASI_TEST_OPTIONS) (push) Blocked by required conditions
compilation on zephyr / smoke_test (push) Waiting to run
* Namespace generated headers with zephyr/

---------

Signed-off-by: Krisztian Szilvasi <34309983+kr-t@users.noreply.github.com>
2025-11-24 19:20:53 +08:00
YAMAMOTO Takashi
912c2a6e31
add a set of apis to configure wasi via InstantiationArgs2 (#4707)
* track if WASIArguments is configured by user

i plan to use this to decide which wasi arguments
(the one from module or the one from InstantiationArgs2)
to use.

* add WASIArguments to InstantiationArgs2

* use wasi configuration from InstantiationArgs2 if any

fallback to the via-module configuration for now.

* add a few api to configure wasi via InstantiationArgs2

* configure wasi via InstantiationArgs2 for platforms using libc_wasi.c

* rt-thread: migrate to libc_wasi_set_init_args

* common/libc_wasi.c: retire libc_wasi_init

* fix build without wasi
2025-11-24 19:20:21 +08:00
Huang Qi
0cefefab1e
ci: add raspberrypi-pico-2 to nuttx build matrix (#4721)
Add Raspberry Pi Pico 2 (Cortex-M33) board configuration to the NuttX CI build matrix to ensure WAMR compatibility testing covers the Cortex-M33 platform. The board configuration path "boards/arm/rp23xx/raspberrypi-pico-2/configs/nsh" is inserted after the cortex-m7 configuration to maintain logical architecture ordering.
2025-11-24 19:19:52 +08:00
Huang Qi
0af3af4eed
chore: update NuttX version to 12.11 in CI workflows (#4720)
The NuttX project released version 12.11 with improvements and bug fixes.
Updating the CI workflows to use the latest stable version ensures that
WAMR testing and compilation verification runs against the most current
NuttX release.

Updated 4 repository references total across 2 workflow files to point to
the new release branch.

Signed-off-by: Huang Qi <huangqi3@xiaomi.com>
2025-11-24 19:19:40 +08:00
Huang Qi
1d255ce0b5
refactor: Improve debug experience with CFI directives and proper label naming in Thumb invokeNative (#4719)
This change enhances debugging support and improves assembly code quality:

1. Add CFI (Call Frame Information) directives (.cfi_startproc, .cfi_def_cfa_offset, .cfi_offset, .cfi_endproc) to provide proper call frame information for debuggers
2. Improve label naming by using dot-prefixed conventions (.Lreturn, .Lcall_func, etc.) following assembly best practices

These improvements enhance debugging capabilities and code maintainability while preserving functionality.

Signed-off-by: Huang Qi <huangqi3@xiaomi.com>
2025-11-24 19:19:18 +08:00
liang.he
53ffe451d0
Fast-interp should report unsupported opcode (#4723)
Some checks are pending
compilation on SGX / run_samples_file (-DWAMR_BUILD_SGX_IPFS=1, $CLASSIC_INTERP_BUILD_OPTIONS, ${{ needs.build_llvm_libraries.outputs.cache_key }}, ubuntu-22.04, linux-sgx) (push) Blocked by required conditions
compilation on SGX / run_samples_file (-DWAMR_BUILD_SGX_IPFS=1, $FAST_INTERP_BUILD_OPTIONS, ${{ needs.build_llvm_libraries.outputs.cache_key }}, ubuntu-22.04, linux-sgx) (push) Blocked by required conditions
compilation on SGX / run_samples_file (-DWAMR_BUILD_SGX_IPFS=1, $FAST_JIT_BUILD_OPTIONS, ${{ needs.build_llvm_libraries.outputs.cache_key }}, ubuntu-22.04, linux-sgx) (push) Blocked by required conditions
compilation on SGX / spec_test_default (${{ needs.build_llvm_libraries.outputs.cache_key }}, ubuntu-22.04, aot, $DEFAULT_TEST_OPTIONS) (push) Blocked by required conditions
compilation on SGX / spec_test_default (${{ needs.build_llvm_libraries.outputs.cache_key }}, ubuntu-22.04, aot, $SIMD_TEST_OPTIONS) (push) Blocked by required conditions
compilation on SGX / spec_test_default (${{ needs.build_llvm_libraries.outputs.cache_key }}, ubuntu-22.04, aot, $XIP_TEST_OPTIONS) (push) Blocked by required conditions
compilation on SGX / spec_test_default (${{ needs.build_llvm_libraries.outputs.cache_key }}, ubuntu-22.04, classic-interp, $DEFAULT_TEST_OPTIONS) (push) Blocked by required conditions
compilation on SGX / spec_test_default (${{ needs.build_llvm_libraries.outputs.cache_key }}, ubuntu-22.04, fast-jit, $DEFAULT_TEST_OPTIONS) (push) Blocked by required conditions
compilation on windows-2022 / build_llvm_libraries_on_windows (push) Waiting to run
compilation on windows-2022 / build_iwasm (-DWAMR_BUILD_AOT=0) (push) Waiting to run
compilation on windows-2022 / build_iwasm (-DWAMR_BUILD_AOT=1 -DWAMR_BUILD_INTERP=0) (push) Waiting to run
compilation on windows-2022 / build_iwasm (-DWAMR_BUILD_CUSTOM_NAME_SECTION=1) (push) Waiting to run
compilation on windows-2022 / build_iwasm (-DWAMR_BUILD_DEBUG_INTERP=1) (push) Waiting to run
compilation on windows-2022 / build_iwasm (-DWAMR_BUILD_LIBC_UVWASI=0 -DWAMR_BUILD_LIBC_WASI=1) (push) Waiting to run
compilation on windows-2022 / build_iwasm (-DWAMR_BUILD_LIB_PTHREAD=1) (push) Waiting to run
compilation on windows-2022 / build_iwasm (-DWAMR_BUILD_LIB_WASI_THREADS=1) (push) Waiting to run
compilation on windows-2022 / build_iwasm (-DWAMR_BUILD_REF_TYPES=1) (push) Waiting to run
compilation on windows-2022 / build_iwasm (-DWAMR_BUILD_SIMD=1) (push) Waiting to run
compilation on windows-2022 / build_iwasm (-DWAMR_BUILD_TAIL_CALL=1) (push) Waiting to run
compilation on windows-2022 / build_iwasm (-DWAMR_DISABLE_HW_BOUND_CHECK=1) (push) Waiting to run
compilation on windows-2022 / build_wamrc (${{ needs.build_llvm_libraries_on_windows.outputs.cache_key }}, windows-2022) (push) Blocked by required conditions
compilation on windows-2022 / test (classic-interp, $DEFAULT_TEST_OPTIONS) (push) Blocked by required conditions
compilation on windows-2022 / test (classic-interp, $MULTI_MODULES_TEST_OPTIONS) (push) Blocked by required conditions
compilation on windows-2022 / test (classic-interp, $THREADS_TEST_OPTIONS) (push) Blocked by required conditions
compilation on windows-2022 / test (classic-interp, $WASI_TEST_OPTIONS) (push) Blocked by required conditions
compilation on windows-2022 / test (fast-interp, $DEFAULT_TEST_OPTIONS) (push) Blocked by required conditions
compilation on windows-2022 / test (fast-interp, $MULTI_MODULES_TEST_OPTIONS) (push) Blocked by required conditions
compilation on windows-2022 / test (fast-interp, $THREADS_TEST_OPTIONS) (push) Blocked by required conditions
compilation on windows-2022 / test (fast-interp, $WASI_TEST_OPTIONS) (push) Blocked by required conditions
compilation on zephyr / smoke_test (push) Waiting to run
when meeting simd opcode on non-posix platforms
2025-11-24 15:48:29 +08:00
liang.he
071e2aaed5
Merge commit from fork
* fix: remove SIMD prefix handling from interpreter and update opcode definitions

* test: add case for classic interpreter handling of unsupported SIMD opcodes
2025-11-24 14:37:46 +08:00
Zhenwei Jin
33fdc4a671
Merge commit from fork 2025-11-24 14:23:04 +08:00
liang.he@intel.com
58a9bae9c6 fix: exclude standard library functions from generated checked headers 2025-11-10 12:57:03 +00:00
liang.he@intel.com
096461cbaf Use sorted lists to maintain consistency between CI and local. 2025-11-10 12:45:35 +00:00
liang.he@intel.com
94dce59c7a add generated checked APIs 2025-11-10 12:45:35 +00:00
liang.he@intel.com
bef501b5a2 feat: A new job in CI to check if checked APIs are up to date 2025-11-10 12:45:34 +00:00
liang.he@intel.com
612b4bd614 fix: include stdbool.h for boolean type support in aot_comp_option.h 2025-11-10 12:45:34 +00:00
liang.he@intel.com
ab079c1394 refactor: enhance header generation script with default headers and formatting support 2025-11-10 12:45:34 +00:00
liang.he@intel.com
29d5070e7f refactor: update CMakeLists.txt for testing support and modify demo output 2025-11-10 12:45:34 +00:00
liang.he@intel.com
be4dc9fcf9 fix: correct indentation in RESULT_STRUCT_TEMPLATE for clarity 2025-11-10 12:45:34 +00:00
liang.he@intel.com
39e8343152 refactor: remove unused resolve_typedef function and simplify type resolution 2025-11-10 12:45:34 +00:00
liang.he@intel.com
15bc48b70b fix: update copyright notice and improve header file generation comments 2025-11-10 12:45:34 +00:00
liang.he@intel.com
b7126c18fc refactor: rename and break process_headers into small ones 2025-11-10 12:45:34 +00:00
liang.he@intel.com
42ec04cd9f Add docstring to introduce script usage, arguments, and output 2025-11-10 12:45:34 +00:00
liang.he@intel.com
1a1112f3d9 Add command-line options to accept paths of headers as a list of multiple file paths 2025-11-10 12:45:34 +00:00
liang.he@intel.com
7a9d37a20e feat: add checked API sample with Fibonacci example and CMake configuration 2025-11-10 12:45:34 +00:00
liang.he@intel.com
22a969e67c fix: correct error code assignment for boolean return type in generate_checked_function 2025-11-10 12:45:34 +00:00
liang.he@intel.com
170b23d27b fix: add duplicate typedef check and improve pointer handling in generate_checked_function 2025-11-10 12:45:34 +00:00
liang.he@intel.com
4cd5e2e689 fix: update generate_checked_function to resolve typedefs and enhance Result handling for new return types 2025-11-10 12:45:34 +00:00
liang.he@intel.com
edefd13a8d fix: enhance return type handling in generate_checked_function to support pointer types 2025-11-10 12:45:34 +00:00
liang.he@intel.com
2de30f7b6f Revert "WIP. fix bugs about returning a pointer"
This reverts commit 4f9f6422cd9c32b71890d5ef668a8e3c15e15aa8.
2025-11-10 12:45:34 +00:00
liang.he@intel.com
629d01b9bd WIP. fix bugs about returning a pointer 2025-11-10 12:45:34 +00:00
liang.he@intel.com
4e0b85eccf fix: update generate_checked_function to include static inline in function declaration 2025-11-10 12:45:34 +00:00
liang.he@intel.com
90bfd394da feat: enhance generate_checked_function to support variadic arguments and update Result handling 2025-11-10 12:45:34 +00:00
liang.he@intel.com
16d35155a5 refactor: streamline Result handling by consolidating return type assignments and excluding void type 2025-11-10 12:45:34 +00:00
liang.he@intel.com
e1a10571a6 feat: include original wasm_export.h and necessary headers in generated header file 2025-11-10 12:45:34 +00:00
liang.he@intel.com
a94ca0b5d3 refactor: Make Result struct definition locally and remove dynamic type addition 2025-11-10 12:45:34 +00:00
liang.he@intel.com
2860ead566 refactor: based on current file location to adjust header file paths 2025-11-10 12:45:34 +00:00
liang.he@intel.com
356b575148 feat: add generate_checked_functions script to dynamically create checked function wrappers 2025-11-10 12:45:34 +00:00
92 changed files with 14625 additions and 6696 deletions

View File

@ -26,7 +26,7 @@ jobs:
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v5 uses: actions/checkout@v6
- name: Build and save Docker image(wasm-debug-server:${{ inputs.ver_num }}) to tar file - name: Build and save Docker image(wasm-debug-server:${{ inputs.ver_num }}) to tar file
run: | run: |

View File

@ -104,7 +104,7 @@ jobs:
contents: write # for uploading release artifacts contents: write # for uploading release artifacts
steps: steps:
- uses: actions/checkout@v5 - uses: actions/checkout@v6
- name: get cached LLVM libraries - name: get cached LLVM libraries
id: retrieve_llvm_libs id: retrieve_llvm_libs

View File

@ -45,7 +45,7 @@ jobs:
steps: steps:
- name: checkout - name: checkout
uses: actions/checkout@v5 uses: actions/checkout@v6
- name: install dependencies for non macos-14 - name: install dependencies for non macos-14
if: inputs.os != 'macos-14' if: inputs.os != 'macos-14'

View File

@ -55,7 +55,7 @@ jobs:
contents: write # for uploading release artifacts contents: write # for uploading release artifacts
steps: steps:
- uses: actions/checkout@v5 - uses: actions/checkout@v6
- name: download and install wasi-sdk - name: download and install wasi-sdk
run: | run: |

View File

@ -45,7 +45,7 @@ jobs:
contents: write # for uploading release artifacts contents: write # for uploading release artifacts
steps: steps:
- uses: actions/checkout@v5 - uses: actions/checkout@v6
- name: download wamr-app-framework - name: download wamr-app-framework
run: | run: |

View File

@ -24,7 +24,7 @@ jobs:
contents: write # for uploading release artifacts contents: write # for uploading release artifacts
steps: steps:
- uses: actions/checkout@v5 - uses: actions/checkout@v6
- name: Use Node.js 18.x - name: Use Node.js 18.x
uses: actions/setup-node@v6 uses: actions/setup-node@v6

View File

@ -28,7 +28,7 @@ jobs:
os: [ubuntu-22.04] os: [ubuntu-22.04]
steps: steps:
- name: checkout - name: checkout
uses: actions/checkout@v5 uses: actions/checkout@v6
- name: install-wasi-sdk-wabt - name: install-wasi-sdk-wabt
uses: ./.github/actions/install-wasi-sdk-wabt uses: ./.github/actions/install-wasi-sdk-wabt

View File

@ -41,7 +41,7 @@ jobs:
contents: write # for uploading release artifacts contents: write # for uploading release artifacts
steps: steps:
- uses: actions/checkout@v5 - uses: actions/checkout@v6
- name: get cached LLVM libraries - name: get cached LLVM libraries
id: retrieve_llvm_libs id: retrieve_llvm_libs

View File

@ -14,7 +14,7 @@ jobs:
steps: steps:
- name: checkout - name: checkout
uses: actions/checkout@v5 uses: actions/checkout@v6
- name: cmake execute to generate version.h - name: cmake execute to generate version.h
run: cmake -B build_version -S . run: cmake -B build_version -S .

View File

@ -43,13 +43,13 @@ jobs:
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v5 uses: actions/checkout@v6
with: with:
submodules: recursive submodules: recursive
# Initializes the CodeQL tools for scanning. # Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL - name: Initialize CodeQL
uses: github/codeql-action/init@v4.31.3 uses: github/codeql-action/init@v4.31.5
with: with:
languages: ${{ matrix.language }} languages: ${{ matrix.language }}
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
@ -61,7 +61,7 @@ jobs:
./.github/scripts/codeql_buildscript.sh ./.github/scripts/codeql_buildscript.sh
- name: Perform CodeQL Analysis - name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v4.31.3 uses: github/codeql-action/analyze@v4.31.5
with: with:
category: "/language:${{matrix.language}}" category: "/language:${{matrix.language}}"
upload: false upload: false
@ -114,7 +114,7 @@ jobs:
output: ${{ steps.step1.outputs.sarif-output }}/cpp.sarif output: ${{ steps.step1.outputs.sarif-output }}/cpp.sarif
- name: Upload CodeQL results to code scanning - name: Upload CodeQL results to code scanning
uses: github/codeql-action/upload-sarif@v4.31.3 uses: github/codeql-action/upload-sarif@v4.31.5
with: with:
sarif_file: ${{ steps.step1.outputs.sarif-output }} sarif_file: ${{ steps.step1.outputs.sarif-output }}
category: "/language:${{matrix.language}}" category: "/language:${{matrix.language}}"

View File

@ -22,7 +22,7 @@ jobs:
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
steps: steps:
- name: checkout - name: checkout
uses: actions/checkout@v5 uses: actions/checkout@v6
with: with:
fetch-depth: 0 fetch-depth: 0

View File

@ -101,7 +101,7 @@ jobs:
llvm_cache_key: ${{ needs.build_llvm_libraries_on_ubuntu_2204.outputs.cache_key }} llvm_cache_key: ${{ needs.build_llvm_libraries_on_ubuntu_2204.outputs.cache_key }}
steps: steps:
- name: checkout - name: checkout
uses: actions/checkout@v5 uses: actions/checkout@v6
# since jobs.id can't contain the dot character # since jobs.id can't contain the dot character
# it is hard to use `format` to assemble the cache key # it is hard to use `format` to assemble the cache key
@ -270,7 +270,7 @@ jobs:
llvm_cache_key: ${{ needs.build_llvm_libraries_on_ubuntu_2204.outputs.cache_key }} llvm_cache_key: ${{ needs.build_llvm_libraries_on_ubuntu_2204.outputs.cache_key }}
steps: steps:
- name: checkout - name: checkout
uses: actions/checkout@v5 uses: actions/checkout@v6
# only download llvm cache when needed # only download llvm cache when needed
- name: Get LLVM libraries - name: Get LLVM libraries
@ -328,7 +328,7 @@ jobs:
steps: steps:
- name: checkout - name: checkout
uses: actions/checkout@v5 uses: actions/checkout@v6
- name: Get LLVM libraries - name: Get LLVM libraries
id: retrieve_llvm_libs id: retrieve_llvm_libs
@ -386,7 +386,7 @@ jobs:
steps: steps:
- name: checkout - name: checkout
uses: actions/checkout@v5 uses: actions/checkout@v6
- name: Get LLVM libraries - name: Get LLVM libraries
id: retrieve_llvm_libs id: retrieve_llvm_libs
@ -442,7 +442,7 @@ jobs:
steps: steps:
- name: checkout - name: checkout
uses: actions/checkout@v5 uses: actions/checkout@v6
- name: Get LLVM libraries - name: Get LLVM libraries
id: retrieve_llvm_libs id: retrieve_llvm_libs
@ -504,7 +504,7 @@ jobs:
steps: steps:
- name: checkout - name: checkout
uses: actions/checkout@v5 uses: actions/checkout@v6
- name: Get LLVM libraries - name: Get LLVM libraries
id: retrieve_llvm_libs id: retrieve_llvm_libs
@ -675,7 +675,7 @@ jobs:
steps: steps:
- name: checkout - name: checkout
uses: actions/checkout@v5 uses: actions/checkout@v6
- name: Set-up OCaml - name: Set-up OCaml
uses: ocaml/setup-ocaml@v3 uses: ocaml/setup-ocaml@v3

View File

@ -86,7 +86,7 @@ jobs:
llvm_cache_key: ${{ needs.build_llvm_libraries_on_intel_macos.outputs.cache_key }} llvm_cache_key: ${{ needs.build_llvm_libraries_on_intel_macos.outputs.cache_key }}
steps: steps:
- name: checkout - name: checkout
uses: actions/checkout@v5 uses: actions/checkout@v6
- name: Get LLVM libraries - name: Get LLVM libraries
id: retrieve_llvm_libs id: retrieve_llvm_libs
@ -190,7 +190,7 @@ jobs:
llvm_cache_key: ${{ needs.build_llvm_libraries_on_intel_macos.outputs.cache_key }} llvm_cache_key: ${{ needs.build_llvm_libraries_on_intel_macos.outputs.cache_key }}
steps: steps:
- name: checkout - name: checkout
uses: actions/checkout@v5 uses: actions/checkout@v6
# only download llvm cache when needed # only download llvm cache when needed
- name: Get LLVM libraries - name: Get LLVM libraries
@ -243,7 +243,7 @@ jobs:
steps: steps:
- name: checkout - name: checkout
uses: actions/checkout@v5 uses: actions/checkout@v6
- name: Get LLVM libraries - name: Get LLVM libraries
id: retrieve_llvm_libs id: retrieve_llvm_libs
@ -301,7 +301,7 @@ jobs:
llvm_cache_key: ${{ needs.build_llvm_libraries_on_arm_macos.outputs.cache_key }} llvm_cache_key: ${{ needs.build_llvm_libraries_on_arm_macos.outputs.cache_key }}
steps: steps:
- name: checkout - name: checkout
uses: actions/checkout@v5 uses: actions/checkout@v6
- name: install-wasi-sdk-wabt - name: install-wasi-sdk-wabt
uses: ./.github/actions/install-wasi-sdk-wabt uses: ./.github/actions/install-wasi-sdk-wabt

View File

@ -64,6 +64,8 @@ jobs:
"boards/arm/rp2040/raspberrypi-pico/configs/nsh", "boards/arm/rp2040/raspberrypi-pico/configs/nsh",
# cortex-m7 # cortex-m7
"boards/arm/stm32h7/nucleo-h743zi/configs/nsh", "boards/arm/stm32h7/nucleo-h743zi/configs/nsh",
# cortex-m33
"boards/arm/rp23xx/raspberrypi-pico-2/configs/nsh",
# riscv32gc # riscv32gc
"boards/risc-v/qemu-rv/rv-virt/configs/nsh", "boards/risc-v/qemu-rv/rv-virt/configs/nsh",
# riscv64gc # riscv64gc
@ -85,21 +87,21 @@ jobs:
steps: steps:
- name: Checkout NuttX - name: Checkout NuttX
uses: actions/checkout@v5 uses: actions/checkout@v6
with: with:
repository: apache/nuttx repository: apache/nuttx
ref: releases/12.9 ref: releases/12.11
path: nuttx path: nuttx
- name: Checkout NuttX Apps - name: Checkout NuttX Apps
uses: actions/checkout@v5 uses: actions/checkout@v6
with: with:
repository: apache/nuttx-apps repository: apache/nuttx-apps
ref: releases/12.9 ref: releases/12.11
path: apps path: apps
- name: Checkout WAMR - name: Checkout WAMR
uses: actions/checkout@v5 uses: actions/checkout@v6
with: with:
repository: ${{ github.repository }} repository: ${{ github.repository }}
path: apps/interpreters/wamr/wamr path: apps/interpreters/wamr/wamr
@ -108,6 +110,7 @@ jobs:
working-directory: nuttx working-directory: nuttx
run: | run: |
tools/configure.sh ${{ matrix.nuttx_board_config }} tools/configure.sh ${{ matrix.nuttx_board_config }}
kconfig-tweak --disable CONFIG_RP2040_UF2_BINARY
kconfig-tweak --enable CONFIG_PSEUDOFS_SOFTLINKS kconfig-tweak --enable CONFIG_PSEUDOFS_SOFTLINKS
kconfig-tweak --enable CONFIG_INTERPRETERS_WAMR kconfig-tweak --enable CONFIG_INTERPRETERS_WAMR
kconfig-tweak --enable CONFIG_INTERPRETERS_IWASM_TASK kconfig-tweak --enable CONFIG_INTERPRETERS_IWASM_TASK
@ -122,7 +125,7 @@ jobs:
run: make -j$(nproc) EXTRAFLAGS=-Werror run: make -j$(nproc) EXTRAFLAGS=-Werror
- name: Checkout Bloaty - name: Checkout Bloaty
uses: actions/checkout@v5 uses: actions/checkout@v6
with: with:
repository: google/bloaty repository: google/bloaty
submodules: recursive submodules: recursive

View File

@ -116,7 +116,7 @@ jobs:
make_options_feature: "-DWAMR_BUILD_MINI_LOADER=1" make_options_feature: "-DWAMR_BUILD_MINI_LOADER=1"
steps: steps:
- name: checkout - name: checkout
uses: actions/checkout@v5 uses: actions/checkout@v6
- name: install SGX SDK and necessary libraries - name: install SGX SDK and necessary libraries
uses: ./.github/actions/install-linux-sgx uses: ./.github/actions/install-linux-sgx
@ -159,7 +159,7 @@ jobs:
steps: steps:
- name: checkout - name: checkout
uses: actions/checkout@v5 uses: actions/checkout@v6
- name: install-wasi-sdk-wabt - name: install-wasi-sdk-wabt
uses: ./.github/actions/install-wasi-sdk-wabt uses: ./.github/actions/install-wasi-sdk-wabt
@ -255,7 +255,7 @@ jobs:
steps: steps:
- name: checkout - name: checkout
uses: actions/checkout@v5 uses: actions/checkout@v6
- name: Get LLVM libraries - name: Get LLVM libraries
if: matrix.running_mode == 'aot' if: matrix.running_mode == 'aot'

View File

@ -85,7 +85,7 @@ jobs:
"-DWAMR_BUILD_LIBC_UVWASI=0 -DWAMR_BUILD_LIBC_WASI=1", "-DWAMR_BUILD_LIBC_UVWASI=0 -DWAMR_BUILD_LIBC_WASI=1",
] ]
steps: steps:
- uses: actions/checkout@v5 - uses: actions/checkout@v6
- name: clone uvwasi library - name: clone uvwasi library
if: ${{ !contains(matrix.build_options, '-DWAMR_BUILD_LIBC_UVWASI=0') }} if: ${{ !contains(matrix.build_options, '-DWAMR_BUILD_LIBC_UVWASI=0') }}
@ -109,7 +109,7 @@ jobs:
llvm_cache_key: ${{ needs.build_llvm_libraries_on_windows.outputs.cache_key }} llvm_cache_key: ${{ needs.build_llvm_libraries_on_windows.outputs.cache_key }}
steps: steps:
- name: checkout - name: checkout
uses: actions/checkout@v5 uses: actions/checkout@v6
# since jobs.id can't contain the dot character # since jobs.id can't contain the dot character
# it is hard to use `format` to assemble the cache key # it is hard to use `format` to assemble the cache key
@ -151,7 +151,7 @@ jobs:
] ]
steps: steps:
- name: checkout - name: checkout
uses: actions/checkout@v5 uses: actions/checkout@v6
- name: download and install wasi-sdk - name: download and install wasi-sdk
if: matrix.test_option == '$WASI_TEST_OPTIONS' if: matrix.test_option == '$WASI_TEST_OPTIONS'

View File

@ -80,7 +80,7 @@ jobs:
# └─── application/ --> DUMMY. keep west_lite.yml here # └─── application/ --> DUMMY. keep west_lite.yml here
- name: Checkout code - name: Checkout code
uses: actions/checkout@v5 uses: actions/checkout@v6
with: with:
path: modules/wasm-micro-runtime path: modules/wasm-micro-runtime

View File

@ -29,7 +29,7 @@ jobs:
contents: write # create and push tags contents: write # create and push tags
steps: steps:
- uses: actions/checkout@v5 - uses: actions/checkout@v6
# Full git history is needed to get a proper list of commits and tags # Full git history is needed to get a proper list of commits and tags
with: with:
fetch-depth: 0 fetch-depth: 0

View File

@ -37,7 +37,7 @@ jobs:
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v5 uses: actions/checkout@v6
# on default, hadolint will fail on warnings and errors # on default, hadolint will fail on warnings and errors
- name: Run hadolint on dockerfiles - name: Run hadolint on dockerfiles

View File

@ -67,7 +67,7 @@ jobs:
llvm_cache_key: ${{ needs.build_llvm_libraries_on_ubuntu.outputs.cache_key }} llvm_cache_key: ${{ needs.build_llvm_libraries_on_ubuntu.outputs.cache_key }}
steps: steps:
- name: checkout - name: checkout
uses: actions/checkout@v5 uses: actions/checkout@v6
# since jobs.id can't contain the dot character # since jobs.id can't contain the dot character
# it is hard to use `format` to assemble the cache key # it is hard to use `format` to assemble the cache key
@ -233,7 +233,7 @@ jobs:
steps: steps:
- name: checkout - name: checkout
uses: actions/checkout@v5 uses: actions/checkout@v6
# only download llvm cache when needed # only download llvm cache when needed
- name: Get LLVM libraries - name: Get LLVM libraries
@ -387,7 +387,7 @@ jobs:
sanitizer: asan sanitizer: asan
steps: steps:
- name: checkout - name: checkout
uses: actions/checkout@v5 uses: actions/checkout@v6
- name: Get LLVM libraries - name: Get LLVM libraries
id: retrieve_llvm_libs id: retrieve_llvm_libs
@ -440,7 +440,7 @@ jobs:
llvm_cache_key: ${{ needs.build_llvm_libraries_on_ubuntu.outputs.cache_key }} llvm_cache_key: ${{ needs.build_llvm_libraries_on_ubuntu.outputs.cache_key }}
steps: steps:
- name: checkout - name: checkout
uses: actions/checkout@v5 uses: actions/checkout@v6
- name: install-wasi-sdk-wabt - name: install-wasi-sdk-wabt
uses: ./.github/actions/install-wasi-sdk-wabt uses: ./.github/actions/install-wasi-sdk-wabt
@ -634,7 +634,7 @@ jobs:
sanitizer: ubsan sanitizer: ubsan
steps: steps:
- name: checkout - name: checkout
uses: actions/checkout@v5 uses: actions/checkout@v6
- name: install-wasi-sdk-wabt - name: install-wasi-sdk-wabt
if: matrix.test_option == '$WASI_TEST_OPTIONS' if: matrix.test_option == '$WASI_TEST_OPTIONS'

View File

@ -55,7 +55,7 @@ jobs:
outputs: outputs:
upload_url: ${{ steps.create_release.outputs.upload_url }} upload_url: ${{ steps.create_release.outputs.upload_url }}
steps: steps:
- uses: actions/checkout@v5 - uses: actions/checkout@v6
- name: prepare the release note - name: prepare the release note
run: | run: |

View File

@ -34,7 +34,7 @@ jobs:
contents: write # for creating realease and uploading release artifacts contents: write # for creating realease and uploading release artifacts
steps: steps:
- uses: actions/checkout@v5 - uses: actions/checkout@v6
# Full git history is needed to get a proper list of commits and tags # Full git history is needed to get a proper list of commits and tags
with: with:
fetch-depth: 0 fetch-depth: 0

View File

@ -143,21 +143,21 @@ jobs:
# Note: we use an unreleased version nuttx for xtensa because # Note: we use an unreleased version nuttx for xtensa because
# 12.4 doesn't contain necessary esp32s3 changes. # 12.4 doesn't contain necessary esp32s3 changes.
- name: Checkout NuttX - name: Checkout NuttX
uses: actions/checkout@v5 uses: actions/checkout@v6
with: with:
repository: apache/nuttx repository: apache/nuttx
ref: ${{ matrix.target_config.target == 'xtensa' && '985d395b025cf2012b22f6bb4461959fa6d87645' || 'releases/12.9' }} ref: ${{ matrix.target_config.target == 'xtensa' && '985d395b025cf2012b22f6bb4461959fa6d87645' || 'releases/12.11' }}
path: nuttx path: nuttx
- name: Checkout NuttX Apps - name: Checkout NuttX Apps
uses: actions/checkout@v5 uses: actions/checkout@v6
with: with:
repository: apache/nuttx-apps repository: apache/nuttx-apps
ref: ${{ matrix.target_config.target == 'xtensa' && '2ef3eb25c0cec944b13792185f7e5d5a05990d5f' || 'releases/12.9' }} ref: ${{ matrix.target_config.target == 'xtensa' && '2ef3eb25c0cec944b13792185f7e5d5a05990d5f' || 'releases/12.11' }}
path: apps path: apps
- name: Checkout WAMR - name: Checkout WAMR
uses: actions/checkout@v5 uses: actions/checkout@v6
with: with:
repository: ${{ github.repository }} repository: ${{ github.repository }}
path: apps/interpreters/wamr/wamr path: apps/interpreters/wamr/wamr

View File

@ -34,7 +34,7 @@ jobs:
steps: steps:
- name: "Checkout code" - name: "Checkout code"
uses: actions/checkout@71cf2267d89c5cb81562390fa70a37fa40b1305e # v3.1.0 uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v3.1.0
with: with:
persist-credentials: false persist-credentials: false
@ -60,6 +60,6 @@ jobs:
# Upload the results to GitHub's code scanning dashboard. # Upload the results to GitHub's code scanning dashboard.
- name: "Upload to code-scanning" - name: "Upload to code-scanning"
uses: github/codeql-action/upload-sarif@c1a2b73420f0c02efb863cc6921c531bc1a54f4f uses: github/codeql-action/upload-sarif@52f930e50a5971cb8a0163b8ae04f2344c26154c
with: with:
sarif_file: results.sarif sarif_file: results.sarif

View File

@ -0,0 +1,59 @@
# Copyright (C) 2019 Intel Corporation. All rights reserved.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
name: Verify core/iwasm/include checked APIs to see if they are up to date
on:
# will be triggered on PR events
pull_request:
types:
- opened
- synchronize
paths:
- "core/iwasm/include/**"
- ".github/workflows/verify_checked_apis.yml"
- "ci/generate_checked_functions.py"
push:
paths:
- "core/iwasm/include/**"
- ".github/workflows/verify_checked_apis.yml"
- "ci/generate_checked_functions.py"
# Cancel any in-flight jobs for the same PR/branch so there's only one active
# at a time
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions:
contents: read
jobs:
verify_checked_apis:
name: Verify checked APIs
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.x"
- name: Install dependencies
run: |
pip install pycparser
sudo apt-get update
sudo apt-get install -y clang-format-14
- name: Generate checked APIs
id: generate_checked_apis
run: |
python3 ci/generate_checked_functions.py
- name: Check for differences
run: |
#it exits with 1 if there were differences and 0 means no differences
git diff --exit-code

View File

@ -30,7 +30,7 @@ jobs:
os: [ubuntu-22.04, macos-13, macos-14] os: [ubuntu-22.04, macos-13, macos-14]
steps: steps:
- name: checkout - name: checkout
uses: actions/checkout@v5 uses: actions/checkout@v6
- name: install-wasi-sdk-wabt - name: install-wasi-sdk-wabt
uses: ./.github/actions/install-wasi-sdk-wabt uses: ./.github/actions/install-wasi-sdk-wabt

View File

@ -0,0 +1,318 @@
"""
This script generates "checked" versions of functions from the specified header files.
Usage:
python3 generate_checked_functions.py --headers <header1.h> <header2.h> ...
Arguments:
--headers: A list of header file paths to process. Each header file will be parsed, and a corresponding
"_checked.h" file will be generated with additional null pointer checks and error handling.
If not provided, a default list of headers under "core/iwasm/include/" will be used.
Example:
python3 generate_checked_functions.py
# OR
python3 generate_checked_functions.py --headers core/iwasm/include/wasm_export.h
Description:
The script parses the provided header files using `pycparser` to extract function declarations and typedefs.
For each function, it generates a "checked" version that includes:
- Null pointer checks for pointer parameters.
- Error handling using a `Result` struct.
- Support for variadic arguments (e.g., ...).
The generated "_checked.h" files include the original header file and define the `Result` struct, which
encapsulates the return value and error codes. The `Result` struct is dynamically generated based on the
return types of the functions in the header file.
Dependencies:
- pycparser: Install it using `pip install pycparser`.
- clang-format-14: Ensure it is installed for formatting the generated files.
Output:
For each input header file, a corresponding "_checked.h" file is created in the same directory.
The generated files are automatically formatted using clang-format-14.
"""
import argparse
from pathlib import Path
from pycparser import c_ast, parse_file
import subprocess
# Constants for repeated strings
CPP_ARGS = [
"-E",
"-D__attribute__(x)=",
"-D__asm__(x)=",
"-D__asm(x)=",
"-D__builtin_va_list=int",
"-D__extension__=",
"-D__inline__=",
"-D__restrict=",
"-D__restrict__=",
"-D_Static_assert(x, y)=",
"-D__signed=",
"-D__volatile__(x)=",
"-Dstatic_assert(x, y)=",
]
RESULT_STRUCT_TEMPLATE = """
typedef struct {
int error_code; // Error code (0 for success, non-zero for errors)
union {
// Add other types as needed
} value;
} Result;
"""
COPYRIGHT = """
/*
* Copyright (C) 2025 Intel Corporation. All rights reserved.
* SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
*/
"""
INCLUDE_HEADERS = ["<stdbool.h>", "<stdint.h>", "<stdlib.h>"]
def extract_typedefs(ast):
"""Extract all typedefs from the AST."""
return {node.name: node.type for node in ast.ext if isinstance(node, c_ast.Typedef)}
def generate_result_struct(return_types):
"""Generate the Result struct based on return types."""
result_struct = RESULT_STRUCT_TEMPLATE
for return_type in return_types:
if return_type == "void":
continue
result_struct = result_struct.replace(
"// Add other types as needed",
f" {return_type} {return_type}_value;\n // Add other types as needed",
)
return result_struct
def write_checked_header(output_path, result_struct, functions, typedefs):
"""Write the checked header file."""
with open(output_path, "w") as f:
# copyright
f.write(COPYRIGHT)
f.write("\n")
f.write("/*\n")
f.write(" * THIS FILE IS GENERATED AUTOMATICALLY, DO NOT EDIT!\n")
f.write(" */\n")
# include guard
f.write(
f"#ifndef {output_path.stem.upper()}_H\n#define {output_path.stem.upper()}_H\n\n"
)
for header in INCLUDE_HEADERS:
f.write(f"#include {header}\n")
f.write("\n")
# include original header
original_header = output_path.stem.replace("_checked", "") + ".h"
f.write(f'#include "{original_header}"\n')
f.write("\n")
f.write(result_struct + "\n")
for func in functions:
new_func = generate_checked_function(func, typedefs)
f.write(new_func + "\n\n")
f.write(f"#endif // {output_path.stem.upper()}_H\n")
def generate_checked_function(func, typedefs):
"""Generate a checked version of the given function."""
func_name = func.name # Access the name directly from Decl
new_func_name = f"{func_name}_checked"
# Extract parameters
params = func.type.args.params if func.type.args else []
# Determine the return type
return_pointer = False
return_type = "void" # Default to void if no return type is specified
if isinstance(func.type.type, c_ast.TypeDecl):
return_type = " ".join(func.type.type.type.names)
resolved_type = typedefs.get(return_type, return_type)
if isinstance(resolved_type, c_ast.PtrDecl):
return_pointer = True
# Start building the new function
new_func = [f"static inline Result {new_func_name}("]
param_list = []
for param in params:
if isinstance(param, c_ast.EllipsisParam):
# Handle variadic arguments (e.g., ...)
param_list.append("...")
new_func.append(" ...,")
continue
param_name = param.name if param.name else ""
param_list.append(param_name)
param_type = (
" ".join(param.type.type.names)
if isinstance(param.type, c_ast.TypeDecl)
else "void*"
)
new_func.append(f" {param_type} {param_name},")
if param_list:
new_func[-1] = new_func[-1].rstrip(",") # Remove trailing comma
new_func.append(") {")
# Add null checks for pointer parameters
new_func.append(f" Result res;")
has_variadic = False
for param in params:
if isinstance(param, c_ast.EllipsisParam):
# Restructure to use va_list
new_func.append(" va_list args;")
has_variadic = True
elif isinstance(param.type, c_ast.PtrDecl):
new_func.append(f" // Check for null pointer parameter: {param.name}")
new_func.append(f" if ({param.name} == NULL) {{")
new_func.append(f" res.error_code = -1;")
new_func.append(f" return res;")
new_func.append(f" }}")
# Call the original function
new_func.append(f" // Execute the original function")
if return_type == "void":
new_func.append(f" {func_name}({', '.join(param_list)});")
elif has_variadic:
new_func.append(" va_start(args, " + param_list[-2] + ");")
new_func.append(
f" {return_type} original_result = {func_name}({', '.join(param_list[:-1])}, args);"
)
new_func.append(" va_end(args);")
else:
new_func.append(
f" {return_type} original_result = {func_name}({', '.join(param_list)});"
)
# Handle returned values
new_func.append(f" // Assign return value and error code")
if return_type == "void":
new_func.append(f" res.error_code = 0;")
elif return_type == "_Bool":
new_func.append(f" res.error_code = original_result ? 0 : -2;")
new_func.append(f" res.value._Bool_value = original_result;")
# if return type is a pointer or typedef from pointer
elif return_pointer:
new_func.append(f" if (original_result != NULL) {{")
new_func.append(f" res.error_code = 0;")
new_func.append(f" res.value.{return_type}_value = original_result;")
new_func.append(f" }} else {{")
new_func.append(f" res.error_code = -2;")
new_func.append(f" }}")
else:
new_func.append(f" if (original_result == 0) {{")
new_func.append(f" res.error_code = 0;")
new_func.append(f" res.value.{return_type}_value = original_result;")
new_func.append(f" }} else {{")
new_func.append(f" res.error_code = -2;")
new_func.append(f" }}")
new_func.append(f" return res;")
new_func.append(f"}}")
return "\n".join(new_func)
def parse_arguments():
"""Parse command-line arguments."""
parser = argparse.ArgumentParser(
description="Generate checked functions from header files."
)
parser.add_argument(
"--headers",
nargs="+",
required=False,
help="List of header file paths to process. Relative to the project root.",
default=[
"core/iwasm/include/aot_comp_option.h",
"core/iwasm/include/aot_export.h",
"core/iwasm/include/gc_export.h",
"core/iwasm/include/lib_export.h",
"core/iwasm/include/wasm_c_api.h",
"core/iwasm/include/wasm_export.h",
],
)
return parser.parse_args()
def generate_checked_headers(header_paths):
"""Process each header file and generate checked versions."""
output_header = []
for input_header in header_paths:
input_path = Path(input_header)
output_path = input_path.with_name(input_path.stem + "_checked.h")
ast = parse_file(
str(input_path),
use_cpp=True,
cpp_path="gcc",
cpp_args=CPP_ARGS,
)
typedefs = extract_typedefs(ast)
functions = [
node
for node in ast.ext
if isinstance(node, c_ast.Decl) and isinstance(node.type, c_ast.FuncDecl)
]
# remove std headers functions
functions = [
f
for f in functions
if f.name
not in (
"__mempcpy",
"__stpcpy",
"memmem",
"memmove",
"mempcpy",
"memset",
"strcasestr",
"strcat",
"strchrnul",
"strcmp",
"strlcat",
"strlcpy",
"strlen",
)
]
functions = sorted(functions, key=lambda f: f.name)
return_types = {
" ".join(func.type.type.type.names)
for func in functions
if isinstance(func.type.type, c_ast.TypeDecl)
}
return_types = sorted(return_types)
result_struct = generate_result_struct(return_types)
write_checked_header(output_path, result_struct, functions, typedefs)
output_header.append(output_path)
return output_header
def main():
args = parse_arguments()
generated_headers = generate_checked_headers(args.headers)
# format the generated files using clang-format-14
for header in generated_headers:
subprocess.run(["clang-format-14", "--style=file", "-i", str(header)])
if __name__ == "__main__":
main()

View File

@ -2081,17 +2081,25 @@ aot_instantiate(AOTModule *module, AOTModuleInstance *parent,
#if WASM_ENABLE_LIBC_WASI != 0 #if WASM_ENABLE_LIBC_WASI != 0
if (!is_sub_inst) { if (!is_sub_inst) {
const WASIArguments *wasi_args = &args->wasi;
if (module->wasi_args.set_by_user) {
if (wasi_args->set_by_user) {
set_error_buf(error_buf, error_buf_size,
"WASI configuration was given via both of module "
"and InstantiationArgs2");
goto fail;
}
wasi_args = &module->wasi_args;
}
if (!wasm_runtime_init_wasi( if (!wasm_runtime_init_wasi(
(WASMModuleInstanceCommon *)module_inst, (WASMModuleInstanceCommon *)module_inst, wasi_args->dir_list,
module->wasi_args.dir_list, module->wasi_args.dir_count, wasi_args->dir_count, wasi_args->map_dir_list,
module->wasi_args.map_dir_list, module->wasi_args.map_dir_count, wasi_args->map_dir_count, wasi_args->env, wasi_args->env_count,
module->wasi_args.env, module->wasi_args.env_count, wasi_args->addr_pool, wasi_args->addr_count,
module->wasi_args.addr_pool, module->wasi_args.addr_count, wasi_args->ns_lookup_pool, wasi_args->ns_lookup_count,
module->wasi_args.ns_lookup_pool, wasi_args->argv, wasi_args->argc, wasi_args->stdio[0],
module->wasi_args.ns_lookup_count, module->wasi_args.argv, wasi_args->stdio[1], wasi_args->stdio[2], error_buf,
module->wasi_args.argc, module->wasi_args.stdio[0], error_buf_size))
module->wasi_args.stdio[1], module->wasi_args.stdio[2],
error_buf, error_buf_size))
goto fail; goto fail;
} }
#endif #endif

View File

@ -12,7 +12,7 @@ invokeNative:
.globl _invokeNative .globl _invokeNative
_invokeNative: _invokeNative:
#endif /* end of BH_PLATFORM_DARWIN */ #endif /* end of BH_PLATFORM_DARWIN */
.cfi_startproc
/* /*
* Arguments passed in: * Arguments passed in:
* *
@ -24,34 +24,40 @@ _invokeNative:
push {r4, r5, r6, r7} push {r4, r5, r6, r7}
push {lr} push {lr}
sub sp, sp, #4 /* make sp 8 byte aligned */ sub sp, sp, #4 /* make sp 8 byte aligned */
.cfi_def_cfa_offset 24
.cfi_offset lr, -20
.cfi_offset r4, -16
.cfi_offset r5, -12
.cfi_offset r6, -8
.cfi_offset r7, -4
mov ip, r0 /* ip = function ptr */ mov ip, r0 /* ip = function ptr */
mov r4, r1 /* r4 = argv */ mov r4, r1 /* r4 = argv */
mov r5, r2 /* r5 = argc */ mov r5, r2 /* r5 = argc */
cmp r5, #1 /* at least one argument required: exec_env */ cmp r5, #1 /* at least one argument required: exec_env */
blt return blt .Lreturn
mov r6, #0 /* increased stack size */ mov r6, #0 /* increased stack size */
ldr r0, [r4] /* r0 = argv[0] = exec_env */ ldr r0, [r4] /* r0 = argv[0] = exec_env */
add r4, r4, #4 /* r4 += 4 */ add r4, r4, #4 /* r4 += 4 */
cmp r5, #1 cmp r5, #1
beq call_func beq .Lcall_func
ldr r1, [r4] /* r1 = argv[1] */ ldr r1, [r4] /* r1 = argv[1] */
add r4, r4, #4 add r4, r4, #4
cmp r5, #2 cmp r5, #2
beq call_func beq .Lcall_func
ldr r2, [r4] /* r2 = argv[2] */ ldr r2, [r4] /* r2 = argv[2] */
add r4, r4, #4 add r4, r4, #4
cmp r5, #3 cmp r5, #3
beq call_func beq .Lcall_func
ldr r3, [r4] /* r3 = argv[3] */ ldr r3, [r4] /* r3 = argv[3] */
add r4, r4, #4 add r4, r4, #4
cmp r5, #4 cmp r5, #4
beq call_func beq .Lcall_func
sub r5, r5, #4 /* argc -= 4, now we have r0 ~ r3 */ sub r5, r5, #4 /* argc -= 4, now we have r0 ~ r3 */
@ -66,29 +72,31 @@ _invokeNative:
mov sp, r7 mov sp, r7
mov lr, r2 /* save r2 */ mov lr, r2 /* save r2 */
loop_args: /* copy left arguments to stack */
.Lloop_args: /* copy left arguments to stack */
cmp r5, #0 cmp r5, #0
beq call_func1 beq .Lcall_func1
ldr r2, [r4] ldr r2, [r4]
add r4, r4, #4 add r4, r4, #4
str r2, [r7] str r2, [r7]
add r7, r7, #4 add r7, r7, #4
sub r5, r5, #1 sub r5, r5, #1
b loop_args b .Lloop_args
call_func1: .Lcall_func1:
mov r2, lr /* restore r2 */ mov r2, lr /* restore r2 */
call_func: .Lcall_func:
blx ip blx ip
add sp, sp, r6 /* restore sp */ add sp, sp, r6 /* restore sp */
return: .Lreturn:
add sp, sp, #4 /* make sp 8 byte aligned */ add sp, sp, #4 /* make sp 8 byte aligned */
pop {r3} pop {r3}
pop {r4, r5, r6, r7} pop {r4, r5, r6, r7}
mov lr, r3 mov lr, r3
bx lr bx lr
.cfi_endproc
#if defined(__linux__) && defined(__ELF__) #if defined(__linux__) && defined(__ELF__)
.section .note.GNU-stack,"",%progbits .section .note.GNU-stack,"",%progbits
#endif #endif

View File

@ -1648,6 +1648,9 @@ void
wasm_runtime_instantiation_args_set_defaults(struct InstantiationArgs2 *args) wasm_runtime_instantiation_args_set_defaults(struct InstantiationArgs2 *args)
{ {
memset(args, 0, sizeof(*args)); memset(args, 0, sizeof(*args));
#if WASM_ENABLE_LIBC_WASI != 0
wasi_args_set_defaults(&args->wasi);
#endif
} }
WASMModuleInstanceCommon * WASMModuleInstanceCommon *
@ -1714,6 +1717,84 @@ wasm_runtime_instantiation_args_set_max_memory_pages(
p->v1.max_memory_pages = v; p->v1.max_memory_pages = v;
} }
#if WASM_ENABLE_LIBC_WASI != 0
void
wasm_runtime_instantiation_args_set_wasi_arg(struct InstantiationArgs2 *p,
char *argv[], int argc)
{
WASIArguments *wasi_args = &p->wasi;
wasi_args->argv = argv;
wasi_args->argc = (uint32)argc;
wasi_args->set_by_user = true;
}
void
wasm_runtime_instantiation_args_set_wasi_env(struct InstantiationArgs2 *p,
const char *env[],
uint32 env_count)
{
WASIArguments *wasi_args = &p->wasi;
wasi_args->env = env;
wasi_args->env_count = env_count;
wasi_args->set_by_user = true;
}
void
wasm_runtime_instantiation_args_set_wasi_dir(struct InstantiationArgs2 *p,
const char *dir_list[],
uint32 dir_count,
const char *map_dir_list[],
uint32 map_dir_count)
{
WASIArguments *wasi_args = &p->wasi;
wasi_args->dir_list = dir_list;
wasi_args->dir_count = dir_count;
wasi_args->map_dir_list = map_dir_list;
wasi_args->map_dir_count = map_dir_count;
wasi_args->set_by_user = true;
}
void
wasm_runtime_instantiation_args_set_wasi_stdio(struct InstantiationArgs2 *p,
int64 stdinfd, int64 stdoutfd,
int64 stderrfd)
{
WASIArguments *wasi_args = &p->wasi;
wasi_args->stdio[0] = (os_raw_file_handle)stdinfd;
wasi_args->stdio[1] = (os_raw_file_handle)stdoutfd;
wasi_args->stdio[2] = (os_raw_file_handle)stderrfd;
wasi_args->set_by_user = true;
}
void
wasm_runtime_instantiation_args_set_wasi_addr_pool(struct InstantiationArgs2 *p,
const char *addr_pool[],
uint32 addr_pool_size)
{
WASIArguments *wasi_args = &p->wasi;
wasi_args->addr_pool = addr_pool;
wasi_args->addr_count = addr_pool_size;
wasi_args->set_by_user = true;
}
void
wasm_runtime_instantiation_args_set_wasi_ns_lookup_pool(
struct InstantiationArgs2 *p, const char *ns_lookup_pool[],
uint32 ns_lookup_pool_size)
{
WASIArguments *wasi_args = &p->wasi;
wasi_args->ns_lookup_pool = ns_lookup_pool;
wasi_args->ns_lookup_count = ns_lookup_pool_size;
wasi_args->set_by_user = true;
}
#endif /* WASM_ENABLE_LIBC_WASI != 0 */
WASMModuleInstanceCommon * WASMModuleInstanceCommon *
wasm_runtime_instantiate_ex2(WASMModuleCommon *module, wasm_runtime_instantiate_ex2(WASMModuleCommon *module,
const struct InstantiationArgs2 *args, const struct InstantiationArgs2 *args,
@ -3494,6 +3575,7 @@ wasm_runtime_set_wasi_args_ex(WASMModuleCommon *module, const char *dir_list[],
wasi_args->stdio[0] = (os_raw_file_handle)stdinfd; wasi_args->stdio[0] = (os_raw_file_handle)stdinfd;
wasi_args->stdio[1] = (os_raw_file_handle)stdoutfd; wasi_args->stdio[1] = (os_raw_file_handle)stdoutfd;
wasi_args->stdio[2] = (os_raw_file_handle)stderrfd; wasi_args->stdio[2] = (os_raw_file_handle)stderrfd;
wasi_args->set_by_user = true;
#if WASM_ENABLE_MULTI_MODULE != 0 #if WASM_ENABLE_MULTI_MODULE != 0
#if WASM_ENABLE_INTERP != 0 #if WASM_ENABLE_INTERP != 0
@ -3524,6 +3606,7 @@ wasm_runtime_set_wasi_addr_pool(wasm_module_t module, const char *addr_pool[],
if (wasi_args) { if (wasi_args) {
wasi_args->addr_pool = addr_pool; wasi_args->addr_pool = addr_pool;
wasi_args->addr_count = addr_pool_size; wasi_args->addr_count = addr_pool_size;
wasi_args->set_by_user = true;
} }
} }
@ -3537,6 +3620,7 @@ wasm_runtime_set_wasi_ns_lookup_pool(wasm_module_t module,
if (wasi_args) { if (wasi_args) {
wasi_args->ns_lookup_pool = ns_lookup_pool; wasi_args->ns_lookup_pool = ns_lookup_pool;
wasi_args->ns_lookup_count = ns_lookup_pool_size; wasi_args->ns_lookup_count = ns_lookup_pool_size;
wasi_args->set_by_user = true;
} }
} }

View File

@ -614,6 +614,9 @@ wasm_runtime_get_exec_env_tls(void);
struct InstantiationArgs2 { struct InstantiationArgs2 {
InstantiationArgs v1; InstantiationArgs v1;
#if WASM_ENABLE_LIBC_WASI != 0
WASIArguments wasi;
#endif
}; };
void void
@ -735,6 +738,43 @@ void
wasm_runtime_instantiation_args_set_max_memory_pages( wasm_runtime_instantiation_args_set_max_memory_pages(
struct InstantiationArgs2 *p, uint32 v); struct InstantiationArgs2 *p, uint32 v);
/* See wasm_export.h for description */
WASM_RUNTIME_API_EXTERN void
wasm_runtime_instantiation_args_set_wasi_arg(struct InstantiationArgs2 *p,
char *argv[], int argc);
/* See wasm_export.h for description */
WASM_RUNTIME_API_EXTERN void
wasm_runtime_instantiation_args_set_wasi_env(struct InstantiationArgs2 *p,
const char *env[],
uint32 env_count);
/* See wasm_export.h for description */
WASM_RUNTIME_API_EXTERN void
wasm_runtime_instantiation_args_set_wasi_dir(struct InstantiationArgs2 *p,
const char *dir_list[],
uint32 dir_count,
const char *map_dir_list[],
uint32 map_dir_count);
/* See wasm_export.h for description */
WASM_RUNTIME_API_EXTERN void
wasm_runtime_instantiation_args_set_wasi_stdio(struct InstantiationArgs2 *p,
int64 stdinfd, int64 stdoutfd,
int64 stderrfd);
/* See wasm_export.h for description */
WASM_RUNTIME_API_EXTERN void
wasm_runtime_instantiation_args_set_wasi_addr_pool(struct InstantiationArgs2 *p,
const char *addr_pool[],
uint32 addr_pool_size);
/* See wasm_export.h for description */
WASM_RUNTIME_API_EXTERN void
wasm_runtime_instantiation_args_set_wasi_ns_lookup_pool(
struct InstantiationArgs2 *p, const char *ns_lookup_pool[],
uint32 ns_lookup_pool_size);
/* See wasm_export.h for description */ /* See wasm_export.h for description */
WASM_RUNTIME_API_EXTERN WASMModuleInstanceCommon * WASM_RUNTIME_API_EXTERN WASMModuleInstanceCommon *
wasm_runtime_instantiate_ex2(WASMModuleCommon *module, wasm_runtime_instantiate_ex2(WASMModuleCommon *module,

View File

@ -7,6 +7,7 @@
#define __AOT_COMP_OPTION_H__ #define __AOT_COMP_OPTION_H__
#include <stdint.h> #include <stdint.h>
#include <stdbool.h>
#ifdef __cplusplus #ifdef __cplusplus
extern "C" { extern "C" {

View File

@ -0,0 +1,42 @@
/*
* Copyright (C) 2025 Intel Corporation. All rights reserved.
* SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
*/
/*
* THIS FILE IS GENERATED AUTOMATICALLY, DO NOT EDIT!
*/
#ifndef AOT_COMP_OPTION_CHECKED_H
#define AOT_COMP_OPTION_CHECKED_H
#include <stdbool.h>
#include <stdint.h>
#include <stdlib.h>
#include "aot_comp_option.h"
typedef struct {
int error_code; // Error code (0 for success, non-zero for errors)
union {
// Add other types as needed
} value;
} Result;
static inline Result
aot_call_stack_features_init_default_checked(void *features)
{
Result res;
// Check for null pointer parameter: features
if (features == NULL) {
res.error_code = -1;
return res;
}
// Execute the original function
aot_call_stack_features_init_default(features);
// Assign return value and error code
res.error_code = 0;
return res;
}
#endif // AOT_COMP_OPTION_CHECKED_H

View File

@ -0,0 +1,334 @@
/*
* Copyright (C) 2025 Intel Corporation. All rights reserved.
* SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
*/
/*
* THIS FILE IS GENERATED AUTOMATICALLY, DO NOT EDIT!
*/
#ifndef AOT_EXPORT_CHECKED_H
#define AOT_EXPORT_CHECKED_H
#include <stdbool.h>
#include <stdint.h>
#include <stdlib.h>
#include "aot_export.h"
typedef struct {
int error_code; // Error code (0 for success, non-zero for errors)
union {
_Bool _Bool_value;
aot_comp_context_t aot_comp_context_t_value;
aot_comp_data_t aot_comp_data_t_value;
aot_obj_data_t aot_obj_data_t_value;
uint32_t uint32_t_value;
// Add other types as needed
} value;
} Result;
static inline Result
aot_call_stack_features_init_default_checked(void *features)
{
Result res;
// Check for null pointer parameter: features
if (features == NULL) {
res.error_code = -1;
return res;
}
// Execute the original function
aot_call_stack_features_init_default(features);
// Assign return value and error code
res.error_code = 0;
return res;
}
static inline Result
aot_compile_wasm_checked(aot_comp_context_t comp_ctx)
{
Result res;
// Execute the original function
_Bool original_result = aot_compile_wasm(comp_ctx);
// Assign return value and error code
res.error_code = original_result ? 0 : -2;
res.value._Bool_value = original_result;
return res;
}
static inline Result
aot_compiler_destroy_checked(void)
{
Result res;
// Execute the original function
aot_compiler_destroy();
// Assign return value and error code
res.error_code = 0;
return res;
}
static inline Result
aot_compiler_init_checked(void)
{
Result res;
// Execute the original function
_Bool original_result = aot_compiler_init();
// Assign return value and error code
res.error_code = original_result ? 0 : -2;
res.value._Bool_value = original_result;
return res;
}
static inline Result
aot_create_comp_context_checked(aot_comp_data_t comp_data,
aot_comp_option_t option)
{
Result res;
// Execute the original function
aot_comp_context_t original_result =
aot_create_comp_context(comp_data, option);
// Assign return value and error code
if (original_result != NULL) {
res.error_code = 0;
res.value.aot_comp_context_t_value = original_result;
}
else {
res.error_code = -2;
}
return res;
}
static inline Result
aot_create_comp_data_checked(void *wasm_module, void *target_arch,
_Bool gc_enabled)
{
Result res;
// Check for null pointer parameter: wasm_module
if (wasm_module == NULL) {
res.error_code = -1;
return res;
}
// Check for null pointer parameter: target_arch
if (target_arch == NULL) {
res.error_code = -1;
return res;
}
// Execute the original function
aot_comp_data_t original_result =
aot_create_comp_data(wasm_module, target_arch, gc_enabled);
// Assign return value and error code
if (original_result != NULL) {
res.error_code = 0;
res.value.aot_comp_data_t_value = original_result;
}
else {
res.error_code = -2;
}
return res;
}
static inline Result
aot_destroy_aot_file_checked(void *aot_file)
{
Result res;
// Check for null pointer parameter: aot_file
if (aot_file == NULL) {
res.error_code = -1;
return res;
}
// Execute the original function
aot_destroy_aot_file(aot_file);
// Assign return value and error code
res.error_code = 0;
return res;
}
static inline Result
aot_destroy_comp_context_checked(aot_comp_context_t comp_ctx)
{
Result res;
// Execute the original function
aot_destroy_comp_context(comp_ctx);
// Assign return value and error code
res.error_code = 0;
return res;
}
static inline Result
aot_destroy_comp_data_checked(aot_comp_data_t comp_data)
{
Result res;
// Execute the original function
aot_destroy_comp_data(comp_data);
// Assign return value and error code
res.error_code = 0;
return res;
}
static inline Result
aot_emit_aot_file_checked(aot_comp_context_t comp_ctx,
aot_comp_data_t comp_data, void *file_name)
{
Result res;
// Check for null pointer parameter: file_name
if (file_name == NULL) {
res.error_code = -1;
return res;
}
// Execute the original function
_Bool original_result = aot_emit_aot_file(comp_ctx, comp_data, file_name);
// Assign return value and error code
res.error_code = original_result ? 0 : -2;
res.value._Bool_value = original_result;
return res;
}
static inline Result
aot_emit_aot_file_buf_checked(aot_comp_context_t comp_ctx,
aot_comp_data_t comp_data, void *p_aot_file_size)
{
Result res;
// Check for null pointer parameter: p_aot_file_size
if (p_aot_file_size == NULL) {
res.error_code = -1;
return res;
}
// Execute the original function
aot_emit_aot_file_buf(comp_ctx, comp_data, p_aot_file_size);
// Assign return value and error code
res.error_code = 0;
return res;
}
static inline Result
aot_emit_aot_file_buf_ex_checked(aot_comp_context_t comp_ctx,
aot_comp_data_t comp_data,
aot_obj_data_t obj_data, void *aot_file_buf,
uint32_t aot_file_size)
{
Result res;
// Check for null pointer parameter: aot_file_buf
if (aot_file_buf == NULL) {
res.error_code = -1;
return res;
}
// Execute the original function
_Bool original_result = aot_emit_aot_file_buf_ex(
comp_ctx, comp_data, obj_data, aot_file_buf, aot_file_size);
// Assign return value and error code
res.error_code = original_result ? 0 : -2;
res.value._Bool_value = original_result;
return res;
}
static inline Result
aot_emit_llvm_file_checked(aot_comp_context_t comp_ctx, void *file_name)
{
Result res;
// Check for null pointer parameter: file_name
if (file_name == NULL) {
res.error_code = -1;
return res;
}
// Execute the original function
_Bool original_result = aot_emit_llvm_file(comp_ctx, file_name);
// Assign return value and error code
res.error_code = original_result ? 0 : -2;
res.value._Bool_value = original_result;
return res;
}
static inline Result
aot_emit_object_file_checked(aot_comp_context_t comp_ctx, void *file_name)
{
Result res;
// Check for null pointer parameter: file_name
if (file_name == NULL) {
res.error_code = -1;
return res;
}
// Execute the original function
_Bool original_result = aot_emit_object_file(comp_ctx, file_name);
// Assign return value and error code
res.error_code = original_result ? 0 : -2;
res.value._Bool_value = original_result;
return res;
}
static inline Result
aot_get_aot_file_size_checked(aot_comp_context_t comp_ctx,
aot_comp_data_t comp_data,
aot_obj_data_t obj_data)
{
Result res;
// Execute the original function
uint32_t original_result =
aot_get_aot_file_size(comp_ctx, comp_data, obj_data);
// Assign return value and error code
if (original_result == 0) {
res.error_code = 0;
res.value.uint32_t_value = original_result;
}
else {
res.error_code = -2;
}
return res;
}
static inline Result
aot_get_last_error_checked(void)
{
Result res;
// Execute the original function
aot_get_last_error();
// Assign return value and error code
res.error_code = 0;
return res;
}
static inline Result
aot_get_plt_table_size_checked(void)
{
Result res;
// Execute the original function
uint32_t original_result = aot_get_plt_table_size();
// Assign return value and error code
if (original_result == 0) {
res.error_code = 0;
res.value.uint32_t_value = original_result;
}
else {
res.error_code = -2;
}
return res;
}
static inline Result
aot_obj_data_create_checked(aot_comp_context_t comp_ctx)
{
Result res;
// Execute the original function
aot_obj_data_t original_result = aot_obj_data_create(comp_ctx);
// Assign return value and error code
if (original_result != NULL) {
res.error_code = 0;
res.value.aot_obj_data_t_value = original_result;
}
else {
res.error_code = -2;
}
return res;
}
static inline Result
aot_obj_data_destroy_checked(aot_obj_data_t obj_data)
{
Result res;
// Execute the original function
aot_obj_data_destroy(obj_data);
// Assign return value and error code
res.error_code = 0;
return res;
}
#endif // AOT_EXPORT_CHECKED_H

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,49 @@
/*
* Copyright (C) 2025 Intel Corporation. All rights reserved.
* SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
*/
/*
* THIS FILE IS GENERATED AUTOMATICALLY, DO NOT EDIT!
*/
#ifndef LIB_EXPORT_CHECKED_H
#define LIB_EXPORT_CHECKED_H
#include <stdbool.h>
#include <stdint.h>
#include <stdlib.h>
#include "lib_export.h"
typedef struct {
int error_code; // Error code (0 for success, non-zero for errors)
union {
uint32_t uint32_t_value;
// Add other types as needed
} value;
} Result;
static inline Result
get_base_lib_export_apis_checked(void *p_base_lib_apis)
{
Result res;
// Check for null pointer parameter: p_base_lib_apis
if (p_base_lib_apis == NULL) {
res.error_code = -1;
return res;
}
// Execute the original function
uint32_t original_result = get_base_lib_export_apis(p_base_lib_apis);
// Assign return value and error code
if (original_result == 0) {
res.error_code = 0;
res.value.uint32_t_value = original_result;
}
else {
res.error_code = -2;
}
return res;
}
#endif // LIB_EXPORT_CHECKED_H

File diff suppressed because it is too large Load Diff

View File

@ -764,6 +764,38 @@ WASM_RUNTIME_API_EXTERN void
wasm_runtime_instantiation_args_set_max_memory_pages( wasm_runtime_instantiation_args_set_max_memory_pages(
struct InstantiationArgs2 *p, uint32_t v); struct InstantiationArgs2 *p, uint32_t v);
WASM_RUNTIME_API_EXTERN void
wasm_runtime_instantiation_args_set_wasi_arg(struct InstantiationArgs2 *p,
char *argv[], int argc);
WASM_RUNTIME_API_EXTERN void
wasm_runtime_instantiation_args_set_wasi_env(struct InstantiationArgs2 *p,
const char *env[],
uint32_t env_count);
WASM_RUNTIME_API_EXTERN void
wasm_runtime_instantiation_args_set_wasi_dir(struct InstantiationArgs2 *p,
const char *dir_list[],
uint32_t dir_count,
const char *map_dir_list[],
uint32_t map_dir_count);
WASM_RUNTIME_API_EXTERN void
wasm_runtime_instantiation_args_set_wasi_stdio(struct InstantiationArgs2 *p,
int64_t stdinfd,
int64_t stdoutfd,
int64_t stderrfd);
WASM_RUNTIME_API_EXTERN void
wasm_runtime_instantiation_args_set_wasi_addr_pool(struct InstantiationArgs2 *p,
const char *addr_pool[],
uint32_t addr_pool_size);
WASM_RUNTIME_API_EXTERN void
wasm_runtime_instantiation_args_set_wasi_ns_lookup_pool(
struct InstantiationArgs2 *p, const char *ns_lookup_pool[],
uint32_t ns_lookup_pool_size);
/** /**
* Instantiate a WASM module, with specified instantiation arguments * Instantiate a WASM module, with specified instantiation arguments
* *

File diff suppressed because it is too large Load Diff

View File

@ -848,6 +848,7 @@ typedef struct WASIArguments {
char **argv; char **argv;
uint32 argc; uint32 argc;
os_raw_file_handle stdio[3]; os_raw_file_handle stdio[3];
bool set_by_user;
} WASIArguments; } WASIArguments;
#endif #endif

View File

@ -6580,12 +6580,10 @@ wasm_interp_call_func_bytecode(WASMModuleInstance *module,
HANDLE_OP(WASM_OP_CATCH_ALL) HANDLE_OP(WASM_OP_CATCH_ALL)
HANDLE_OP(EXT_OP_TRY) HANDLE_OP(EXT_OP_TRY)
#endif #endif
#if WASM_ENABLE_JIT != 0 && WASM_ENABLE_SIMD != 0
/* SIMD isn't supported by interpreter, but when JIT is /* SIMD isn't supported by interpreter, but when JIT is
enabled, `iwasm --interp <wasm_file>` may be run to enabled, `iwasm --interp <wasm_file>` may be run to
trigger the SIMD opcode in interpreter */ trigger the SIMD opcode in interpreter */
HANDLE_OP(WASM_OP_SIMD_PREFIX) HANDLE_OP(WASM_OP_SIMD_PREFIX)
#endif
HANDLE_OP(WASM_OP_UNUSED_0x16) HANDLE_OP(WASM_OP_UNUSED_0x16)
HANDLE_OP(WASM_OP_UNUSED_0x17) HANDLE_OP(WASM_OP_UNUSED_0x17)
HANDLE_OP(WASM_OP_UNUSED_0x27) HANDLE_OP(WASM_OP_UNUSED_0x27)

View File

@ -7534,6 +7534,9 @@ wasm_interp_call_func_bytecode(WASMModuleInstance *module,
HANDLE_OP(EXT_OP_LOOP) HANDLE_OP(EXT_OP_LOOP)
HANDLE_OP(EXT_OP_IF) HANDLE_OP(EXT_OP_IF)
HANDLE_OP(EXT_OP_BR_TABLE_CACHE) HANDLE_OP(EXT_OP_BR_TABLE_CACHE)
#if WASM_ENABLE_SIMDE == 0
HANDLE_OP(WASM_OP_SIMD_PREFIX)
#endif
{ {
wasm_set_exception(module, "unsupported opcode"); wasm_set_exception(module, "unsupported opcode");
goto got_exception; goto got_exception;

View File

@ -9720,6 +9720,16 @@ preserve_local_for_block(WASMLoaderContext *loader_ctx, uint8 opcode,
/* preserve locals before blocks to ensure that "tee/set_local" inside /* preserve locals before blocks to ensure that "tee/set_local" inside
blocks will not influence the value of these locals */ blocks will not influence the value of these locals */
uint32 frame_offset_cell =
(uint32)(loader_ctx->frame_offset - loader_ctx->frame_offset_bottom);
uint32 frame_ref_cell =
(uint32)(loader_ctx->frame_ref - loader_ctx->frame_ref_bottom);
if (frame_offset_cell < loader_ctx->stack_cell_num
|| frame_ref_cell < loader_ctx->stack_cell_num) {
set_error_buf(error_buf, error_buf_size, "stack cell num error");
return false;
}
while (i < loader_ctx->stack_cell_num) { while (i < loader_ctx->stack_cell_num) {
int16 cur_offset = loader_ctx->frame_offset_bottom[i]; int16 cur_offset = loader_ctx->frame_offset_bottom[i];
uint8 cur_type = loader_ctx->frame_ref_bottom[i]; uint8 cur_type = loader_ctx->frame_ref_bottom[i];
@ -12106,13 +12116,19 @@ re_scan:
} }
#endif #endif
uint8 *frame_ref_before_pop = loader_ctx->frame_ref;
POP_TYPE( POP_TYPE(
wasm_type->types[wasm_type->param_count - i - 1]); wasm_type->types[wasm_type->param_count - i - 1]);
#if WASM_ENABLE_FAST_INTERP != 0 #if WASM_ENABLE_FAST_INTERP != 0
/* decrease the frame_offset pointer accordingly to keep /* decrease the frame_offset pointer accordingly to keep
* consistent with frame_ref stack */ * consistent with frame_ref stack. Use the actual
cell_num = wasm_value_type_cell_num( * popped cell count instead of
wasm_type->types[wasm_type->param_count - i - 1]); * wasm_value_type_cell_num() because when the stack top
* is VALUE_TYPE_ANY, wasm_loader_pop_frame_ref always
* pops exactly 1 cell regardless of the expected type
*/
cell_num = (uint32)(frame_ref_before_pop
- loader_ctx->frame_ref);
loader_ctx->frame_offset -= cell_num; loader_ctx->frame_offset -= cell_num;
if (loader_ctx->frame_offset if (loader_ctx->frame_offset

View File

@ -790,13 +790,6 @@ typedef enum WASMAtomicEXTOpcode {
#endif #endif
#define SET_GOTO_TABLE_ELEM(opcode) [opcode] = HANDLE_OPCODE(opcode) #define SET_GOTO_TABLE_ELEM(opcode) [opcode] = HANDLE_OPCODE(opcode)
#if WASM_ENABLE_SIMDE != 0
#define SET_GOTO_TABLE_SIMD_PREFIX_ELEM() \
SET_GOTO_TABLE_ELEM(WASM_OP_SIMD_PREFIX),
#else
#define SET_GOTO_TABLE_SIMD_PREFIX_ELEM()
#endif
#if WASM_ENABLE_SIMDE != 0 #if WASM_ENABLE_SIMDE != 0
#define DEF_EXT_V128_HANDLE() \ #define DEF_EXT_V128_HANDLE() \
SET_GOTO_TABLE_ELEM(EXT_OP_SET_LOCAL_FAST_V128), /* 0xdd */ \ SET_GOTO_TABLE_ELEM(EXT_OP_SET_LOCAL_FAST_V128), /* 0xdd */ \
@ -1038,7 +1031,7 @@ typedef enum WASMAtomicEXTOpcode {
HANDLE_OPCODE(EXT_OP_TRY), /* 0xdb */ \ HANDLE_OPCODE(EXT_OP_TRY), /* 0xdb */ \
SET_GOTO_TABLE_ELEM(WASM_OP_GC_PREFIX), /* 0xfb */ \ SET_GOTO_TABLE_ELEM(WASM_OP_GC_PREFIX), /* 0xfb */ \
SET_GOTO_TABLE_ELEM(WASM_OP_MISC_PREFIX), /* 0xfc */ \ SET_GOTO_TABLE_ELEM(WASM_OP_MISC_PREFIX), /* 0xfc */ \
SET_GOTO_TABLE_SIMD_PREFIX_ELEM() /* 0xfd */ \ SET_GOTO_TABLE_ELEM(WASM_OP_SIMD_PREFIX), /* 0xfd */ \
SET_GOTO_TABLE_ELEM(WASM_OP_ATOMIC_PREFIX), /* 0xfe */ \ SET_GOTO_TABLE_ELEM(WASM_OP_ATOMIC_PREFIX), /* 0xfe */ \
DEF_DEBUG_BREAK_HANDLE() DEF_EXT_V128_HANDLE() \ DEF_DEBUG_BREAK_HANDLE() DEF_EXT_V128_HANDLE() \
}; };

View File

@ -3276,17 +3276,25 @@ wasm_instantiate(WASMModule *module, WASMModuleInstance *parent,
#if WASM_ENABLE_LIBC_WASI != 0 #if WASM_ENABLE_LIBC_WASI != 0
/* The sub-instance will get the wasi_ctx from main-instance */ /* The sub-instance will get the wasi_ctx from main-instance */
if (!is_sub_inst) { if (!is_sub_inst) {
const WASIArguments *wasi_args = &args->wasi;
if (module->wasi_args.set_by_user) {
if (wasi_args->set_by_user) {
set_error_buf(error_buf, error_buf_size,
"WASI configuration was given via both of module "
"and InstantiationArgs2");
goto fail;
}
wasi_args = &module->wasi_args;
}
if (!wasm_runtime_init_wasi( if (!wasm_runtime_init_wasi(
(WASMModuleInstanceCommon *)module_inst, (WASMModuleInstanceCommon *)module_inst, wasi_args->dir_list,
module->wasi_args.dir_list, module->wasi_args.dir_count, wasi_args->dir_count, wasi_args->map_dir_list,
module->wasi_args.map_dir_list, module->wasi_args.map_dir_count, wasi_args->map_dir_count, wasi_args->env, wasi_args->env_count,
module->wasi_args.env, module->wasi_args.env_count, wasi_args->addr_pool, wasi_args->addr_count,
module->wasi_args.addr_pool, module->wasi_args.addr_count, wasi_args->ns_lookup_pool, wasi_args->ns_lookup_count,
module->wasi_args.ns_lookup_pool, wasi_args->argv, wasi_args->argc, wasi_args->stdio[0],
module->wasi_args.ns_lookup_count, module->wasi_args.argv, wasi_args->stdio[1], wasi_args->stdio[2], error_buf,
module->wasi_args.argc, module->wasi_args.stdio[0], error_buf_size)) {
module->wasi_args.stdio[1], module->wasi_args.stdio[2],
error_buf, error_buf_size)) {
goto fail; goto fail;
} }
} }

View File

@ -7,8 +7,24 @@
#ifndef _PLATFORM_INTERNAL_H #ifndef _PLATFORM_INTERNAL_H
#define _PLATFORM_INTERNAL_H #define _PLATFORM_INTERNAL_H
/*
* Modern Zephyr uses zephyr/ namespace.
*
* Note: Cannot use KERNEL_VERSION_NUMBER here as it's defined in version.h
* which we're trying to include. Must use feature detection instead.
*/
#ifdef __has_include
#if __has_include(<zephyr/autoconf.h>)
#include <zephyr/autoconf.h>
#include <zephyr/version.h>
#else
#include <autoconf.h> #include <autoconf.h>
#include <version.h> #include <version.h>
#endif
#else
#include <autoconf.h>
#include <version.h>
#endif
#if KERNEL_VERSION_NUMBER < 0x030200 /* version 3.2.0 */ #if KERNEL_VERSION_NUMBER < 0x030200 /* version 3.2.0 */
#include <zephyr.h> #include <zephyr.h>

View File

@ -2,7 +2,7 @@
This document is intended to describe the current status of WebAssembly proposals and WASI proposals in WAMR. This document is intended to describe the current status of WebAssembly proposals and WASI proposals in WAMR.
Only track proposals that are followed in the [WebAssembly proposals](https://github.com/WebAssembly/proposals) and [WASI proposals](https://github.com/WebAssembly/WASI/blob/main/Proposals.md). Only track proposals that are followed in the [WebAssembly proposals](https://github.com/WebAssembly/proposals) and [WASI proposals](https://github.com/WebAssembly/WASI/blob/main/docs/Proposals.md).
Normally, the document tracks proposals that are in phase 4. However, if a proposal in an earlier phase receives support, it will be added to the list below. Normally, the document tracks proposals that are in phase 4. However, if a proposal in an earlier phase receives support, it will be added to the list below.

View File

@ -8,6 +8,7 @@ dependencies:
idf: ">=4.4" idf: ">=4.4"
targets: targets:
- esp32 - esp32
- esp32s2
- esp32s3 - esp32s3
- esp32c3 - esp32c3
- esp32c6 - esp32c6

View File

@ -162,16 +162,18 @@ libc_wasi_parse(char *arg, libc_wasi_parse_context_t *ctx)
return LIBC_WASI_PARSE_RESULT_OK; return LIBC_WASI_PARSE_RESULT_OK;
} }
void static void
libc_wasi_init(wasm_module_t wasm_module, int argc, char **argv, libc_wasi_set_init_args(struct InstantiationArgs2 *args, int argc, char **argv,
libc_wasi_parse_context_t *ctx) libc_wasi_parse_context_t *ctx)
{ {
wasm_runtime_set_wasi_args(wasm_module, ctx->dir_list, ctx->dir_list_size, wasm_runtime_instantiation_args_set_wasi_arg(args, argv, argc);
ctx->map_dir_list, ctx->map_dir_list_size, wasm_runtime_instantiation_args_set_wasi_env(args, ctx->env_list,
ctx->env_list, ctx->env_list_size, argv, argc); ctx->env_list_size);
wasm_runtime_instantiation_args_set_wasi_dir(
wasm_runtime_set_wasi_addr_pool(wasm_module, ctx->addr_pool, args, ctx->dir_list, ctx->dir_list_size, ctx->map_dir_list,
ctx->addr_pool_size); ctx->map_dir_list_size);
wasm_runtime_set_wasi_ns_lookup_pool(wasm_module, ctx->ns_lookup_pool, wasm_runtime_instantiation_args_set_wasi_addr_pool(args, ctx->addr_pool,
ctx->ns_lookup_pool_size); ctx->addr_pool_size);
wasm_runtime_instantiation_args_set_wasi_ns_lookup_pool(
args, ctx->ns_lookup_pool, ctx->ns_lookup_pool_size);
} }

View File

@ -962,10 +962,6 @@ main(int argc, char *argv[])
} }
#endif #endif
#if WASM_ENABLE_LIBC_WASI != 0
libc_wasi_init(wasm_module, argc, argv, &wasi_parse_ctx);
#endif
if (!wasm_runtime_instantiation_args_create(&inst_args)) { if (!wasm_runtime_instantiation_args_create(&inst_args)) {
printf("failed to create instantiate args\n"); printf("failed to create instantiate args\n");
goto fail3; goto fail3;
@ -974,6 +970,9 @@ main(int argc, char *argv[])
stack_size); stack_size);
wasm_runtime_instantiation_args_set_host_managed_heap_size(inst_args, wasm_runtime_instantiation_args_set_host_managed_heap_size(inst_args,
heap_size); heap_size);
#if WASM_ENABLE_LIBC_WASI != 0
libc_wasi_set_init_args(inst_args, argc, argv, &wasi_parse_ctx);
#endif
/* instantiate the module */ /* instantiate the module */
wasm_module_inst = wasm_runtime_instantiate_ex2( wasm_module_inst = wasm_runtime_instantiate_ex2(

View File

@ -380,7 +380,7 @@ iwasm(int argc, char **argv)
wasm_runtime_instantiation_args_set_host_managed_heap_size(inst_args, wasm_runtime_instantiation_args_set_host_managed_heap_size(inst_args,
heap_size); heap_size);
#if WASM_ENABLE_LIBC_WASI != 0 #if WASM_ENABLE_LIBC_WASI != 0
libc_wasi_init(wasm_module, argc, argv, &wasi_parse_ctx); libc_wasi_set_init_args(wasm_module, argc, argv, &wasi_parse_ctx);
#endif #endif
rt_memset(error_buf, 0x00, sizeof(error_buf)); rt_memset(error_buf, 0x00, sizeof(error_buf));

View File

@ -598,10 +598,6 @@ main(int argc, char *argv[])
goto fail2; goto fail2;
} }
#if WASM_ENABLE_LIBC_WASI != 0
libc_wasi_init(wasm_module, argc, argv, &wasi_parse_ctx);
#endif
if (!wasm_runtime_instantiation_args_create(&inst_args)) { if (!wasm_runtime_instantiation_args_create(&inst_args)) {
printf("failed to create instantiate args\n"); printf("failed to create instantiate args\n");
goto fail3; goto fail3;
@ -610,6 +606,9 @@ main(int argc, char *argv[])
stack_size); stack_size);
wasm_runtime_instantiation_args_set_host_managed_heap_size(inst_args, wasm_runtime_instantiation_args_set_host_managed_heap_size(inst_args,
heap_size); heap_size);
#if WASM_ENABLE_LIBC_WASI != 0
libc_wasi_set_init_args(inst_args, argc, argv, &wasi_parse_ctx);
#endif
/* instantiate the module */ /* instantiate the module */
wasm_module_inst = wasm_runtime_instantiate_ex2( wasm_module_inst = wasm_runtime_instantiate_ex2(

View File

@ -0,0 +1,61 @@
# Copyright (C) 2019 Intel Corporation. All rights reserved.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
cmake_minimum_required (VERSION 3.14)
project(checked_api_sample)
# assertion required
set(CMAKE_BUILD_TYPE Debug)
set(CMAKE_C_STANDARD 23)
list(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/../cmake)
find_package(WASISDK REQUIRED)
################ runtime settings ################
string (TOLOWER ${CMAKE_HOST_SYSTEM_NAME} WAMR_BUILD_PLATFORM)
include(CheckPIESupported)
# aot and interp by default
set(WAMR_BUILD_AOT 1)
set(WAMR_BUILD_INTERP 1)
set(WAMR_BUILD_JIT 0)
# wasm32-wasi
set(WAMR_BUILD_LIBC_BUILTIN 0)
set(WAMR_BUILD_LIBC_WASI 1)
# mvp
set(WAMR_BUILD_BULK_MEMORY 1)
set(WAMR_BUILD_REF_TYPES 1)
set(WAMR_BUILD_SIMD 1)
set(WAMR_BUILD_TAIL_CALL 1)
# trap information
set(WAMR_BUILD_DUMP_CALL_STACK 1)
# vmlib
set(WAMR_ROOT_DIR ${CMAKE_CURRENT_LIST_DIR}/../..)
include(${WAMR_ROOT_DIR}/build-scripts/runtime_lib.cmake)
add_library(vmlib SHARED ${WAMR_RUNTIME_LIB_SOURCE})
target_include_directories(vmlib INTERFACE ${WAMR_ROOT_DIR}/core/iwasm/include)
target_link_libraries (vmlib ${LLVM_AVAILABLE_LIBS} -lm -ldl)
################ host ################
include (${SHARED_DIR}/utils/uncommon/shared_uncommon.cmake)
add_executable(${PROJECT_NAME} src/demo.c ${UNCOMMON_SHARED_SOURCE})
target_link_libraries(${PROJECT_NAME} vmlib)
################ aot + wasm ################
include(ExternalProject)
ExternalProject_Add(wasm
SOURCE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/wasm-apps"
CONFIGURE_COMMAND ${CMAKE_COMMAND} -S ${CMAKE_CURRENT_SOURCE_DIR}/wasm-apps -B build
-DCMAKE_TOOLCHAIN_FILE=${WASISDK_TOOLCHAIN}
BUILD_COMMAND ${CMAKE_COMMAND} --build build
INSTALL_COMMAND ${CMAKE_COMMAND} --install build --prefix ${CMAKE_CURRENT_BINARY_DIR}
)
enable_testing()
add_test(NAME checked_api_sample_test
COMMAND ${PROJECT_NAME}
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
)

View File

@ -0,0 +1,110 @@
#include <stdio.h>
#include <stdlib.h>
#include "bh_platform.h"
#include "bh_read_file.h"
#include "wasm_export_checked.h"
#define VERIFY_API_RESULT(callee, result, fail_label) \
do { \
if (result.error_code != 0) { \
printf("%s failed with error code: %d\n", #callee, \
result.error_code); \
goto fail_label; \
} \
} while (0)
int
main(int argc, char *argv_main[])
{
Result api_result;
wasm_module_t module = NULL;
uint32 buf_size, stack_size = 8092, heap_size = 8092;
wasm_module_inst_t module_inst = NULL;
wasm_function_inst_t func = NULL;
wasm_exec_env_t exec_env = NULL;
int ret = EXIT_FAILURE;
RuntimeInitArgs init_args;
// 512Kb
static char global_heap_buf[512 * 1024];
char *wasm_path = "fib.wasm";
char *buffer;
char error_buf[128];
memset(&init_args, 0, sizeof(RuntimeInitArgs));
init_args.mem_alloc_type = Alloc_With_Pool;
init_args.mem_alloc_option.pool.heap_buf = global_heap_buf;
init_args.mem_alloc_option.pool.heap_size = sizeof(global_heap_buf);
api_result = wasm_runtime_full_init_checked(&init_args);
VERIFY_API_RESULT(wasm_runtime_full_init_checked, api_result, fail);
api_result = wasm_runtime_set_log_level_checked(WASM_LOG_LEVEL_VERBOSE);
VERIFY_API_RESULT(wasm_runtime_set_log_level_checked, api_result,
release_runtime);
buffer = bh_read_file_to_buffer(wasm_path, &buf_size);
if (buffer == NULL) {
printf("Open wasm app file [%s] failed.\n", wasm_path);
goto release_runtime;
}
api_result = wasm_runtime_load_checked((uint8 *)buffer, buf_size, error_buf,
sizeof(error_buf));
VERIFY_API_RESULT(wasm_runtime_load_checked, api_result, release_file);
module = api_result.value.wasm_module_t_value;
api_result = wasm_runtime_instantiate_checked(module, stack_size, heap_size,
error_buf, sizeof(error_buf));
VERIFY_API_RESULT(wasm_runtime_instantiate_checked, api_result,
release_module);
module_inst = api_result.value.wasm_module_inst_t_value;
api_result = wasm_runtime_create_exec_env_checked(module_inst, stack_size);
VERIFY_API_RESULT(wasm_runtime_create_exec_env_checked, api_result,
release_instance);
exec_env = api_result.value.wasm_exec_env_t_value;
api_result = wasm_runtime_lookup_function_checked(module_inst, "fib");
VERIFY_API_RESULT(wasm_runtime_lookup_function_checked, api_result,
release_exec_env);
func = api_result.value.wasm_function_inst_t_value;
wasm_val_t result[1] = { { .kind = WASM_I32 } };
wasm_val_t arguments[1] = {
{ .kind = WASM_I32, .of.i32 = 6 },
};
api_result = wasm_runtime_call_wasm_a_checked(exec_env, func, 1, result, 1,
arguments);
VERIFY_API_RESULT(wasm_runtime_call_wasm_a_checked, api_result,
release_runtime);
printf("Native finished calling wasm function: fib(%d), returned: %d\n",
arguments[0].of.i32, result[0].of.i32);
bh_assert(result[0].of.i32 == 8);
arguments[0].of.i32 = 2;
api_result = wasm_runtime_call_wasm_a_checked(exec_env, func, 1, result, 1,
arguments);
VERIFY_API_RESULT(wasm_runtime_call_wasm_a_checked, api_result,
release_runtime);
printf("Native finished calling wasm function: fib(%d), returned: %d\n",
arguments[0].of.i32, result[0].of.i32);
bh_assert(result[0].of.i32 == 1);
ret = EXIT_SUCCESS;
release_exec_env:
wasm_runtime_destroy_exec_env_checked(exec_env);
release_instance:
wasm_runtime_deinstantiate_checked(module_inst);
release_module:
wasm_runtime_unload_checked(module);
release_file:
wasm_runtime_free(buffer);
release_runtime:
wasm_runtime_destroy_checked();
fail:
return ret;
}

View File

@ -0,0 +1,17 @@
# Copyright (C) 2019 Intel Corporation. All rights reserved.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
cmake_minimum_required (VERSION 3.14)
project(checked_api_wasm_apps)
include(CMakePrintHelpers)
if(NOT CMAKE_BUILD_TYPE)
set(CMAKE_BUILD_TYPE Release)
endif()
################ wasm ################
add_executable(fib fib.c)
set_target_properties(fib PROPERTIES SUFFIX .wasm)
install(FILES ${CMAKE_CURRENT_BINARY_DIR}/fib.wasm DESTINATION .)

View File

@ -0,0 +1,31 @@
#include <stdio.h>
#include <stdlib.h>
int
fibonacci(int n)
{
if (n <= 0)
return 0;
if (n == 1)
return 1;
return fibonacci(n - 1) + fibonacci(n - 2);
}
__attribute__((export_name("fib"))) int
fib(int n)
{
int result = fibonacci(n);
return result;
}
int
main(int argc, char **argv)
{
int n = atoi(argv[1]);
printf("fibonacci(%d)=%d\n", n, fibonacci(n));
return 0;
}

View File

@ -48,36 +48,3 @@ $ ./build/wasm-mutator/wasm_mutator_fuzz ./build/CORPUS_DIR
$ ./build/aot-compiler/aot_compiler_fuzz ./build/CORPUS_DIR $ ./build/aot-compiler/aot_compiler_fuzz ./build/CORPUS_DIR
```` ````
## Fuzzing Server
```shell
1. Installation Dependent Environment
$ cd server
$ pip install -r requirements.txt
2. Database Migration
$ python3 app/manager.py db init
$ python3 app/manager.py db migrate
$ python3 app/manager.py db upgrade
3. Change localhost to your machine's IP address
$ cd ../portal
$ vim .env # Change localhost to your machine's IP address # http://<ip>:16667
4. Run Server and Portal
$ cd .. # Switch to the original directory
If you want to customize the front-end deployment port: # defaut 9999
$ vim .env # Please change the portal_port to the port you want to use
The server is deployed on port 16667 by default, If you want to change the server deployment port:
$ vim .env # Please change the server_port to the port you want to use
$ vim portal/.env # Please change the VITE_SERVER_URL to the port you want to use # http://ip:<port>
If your network needs to set up a proxy
$ vim .env # Change proxy to your proxy address
$ docker-compose up --build -d
Wait for completion, Access the port set by env
```

View File

@ -1,29 +0,0 @@
# yaml configuration
services:
web:
platform: linux/amd64
container_name: fuzz_web
build:
context: ./portal
dockerfile: Dockerfile
args:
- proxy=${proxy}
volumes:
- "./portal:/portal"
ports:
- "${portal_port}:80"
server:
build:
context: ../../..
dockerfile: ./tests/fuzz/wasm-mutator-fuzz/server/Dockerfile
args:
- proxy=${proxy}
ports:
- "${server_port}:16667"
container_name: fuzz_server
volumes:
- "./server/app/data.db:/wamr-test/tests/fuzz/wasm-mutator-fuzz/server/app/data.db"
- "./workspace:/wamr-test/tests/fuzz/wasm-mutator-fuzz/workspace"
environment:
- "TZ=Asia/Shanghai"
restart: on-failure

View File

@ -1 +0,0 @@
VITE_SERVER_URL=http://localhost:16667

View File

@ -1,24 +0,0 @@
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
pnpm-debug.log*
lerna-debug.log*
node_modules
dist
dist-ssr
*.local
# Editor directories and files
.vscode/*
!.vscode/extensions.json
.idea
.DS_Store
*.suo
*.ntvs*
*.njsproj
*.sln
*.sw?

View File

@ -1,24 +0,0 @@
FROM node:16 as builder
WORKDIR /portal
COPY . .
ARG proxy=""
RUN if [ "$proxy" != "" ]; \
then npm config set proxy "$proxy" && npm config set https-proxy "$proxy"; \
else echo Do not set proxy; \
fi
RUN npm install && chmod +x node_modules/.bin/tsc \
&& chmod +x node_modules/.bin/vite \
&& npm run build
FROM nginx:alpine
WORKDIR /portal
COPY --from=builder /portal/dist/ /usr/share/nginx/html/
RUN rm /etc/nginx/conf.d/default.conf
COPY nginx.conf /etc/nginx/nginx.conf
COPY default.conf.template /etc/nginx/conf.d
# hadolint ignore=DL3025
CMD /bin/sh -c "envsubst '80' < /etc/nginx/conf.d/default.conf.template > /etc/nginx/conf.d/default.conf" && nginx -g 'daemon off;'

View File

@ -1,53 +0,0 @@
server {
listen 80 default_server;
location ^~ / {
root /usr/share/nginx/html;
index index.html index.htm;
try_files $uri $uri/ /index.html;
}
location @router {
rewrite ^.*$ /index.html last; # important!
}
location ~* \.(?:manifest|appcache|html?|xml|json)$ {
root /usr/share/nginx/html;
if ($request_uri ~* .*[.](manifest|appcache|xml|json)$) {
add_header Cache-Control "public, max-age=2592000";
}
if ($request_filename ~* ^.*[.](html|htm)$) {
add_header Cache-Control "public, no-cache";
}
expires -1;
}
location ~* \.(?:js|css|map|jpg|png|svg|ico)$ {
root /usr/share/nginx/html;
try_files $uri =404;
expires 1y;
access_log off;
add_header Cache-Control "public";
}
location ~ ^.+\..+$ {
root /usr/share/nginx/html;
try_files $uri =404;
include /etc/nginx/mime.types;
}
error_page 500 502 503 504 /50x.html;
location = /50x.html {
root /usr/share/nginx/html;
}
}

View File

@ -1,13 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>WAMR fuzzing test system</title>
</head>
<body>
<div id="root"></div>
<script type="module" src="/src/main.tsx"></script>
</body>
</html>

View File

@ -1,15 +0,0 @@
error_log stderr;
pid /var/run/nginx.pid;
events {
worker_connections 1024;
}
http {
access_log /dev/stdout;
server_tokens off;
include /etc/nginx/mime.types;
include /etc/nginx/conf.d/*.conf;
}

View File

@ -1,59 +0,0 @@
# GHSA-67hx-6x53-jw92
[[PackageOverrides]]
name = "@babel/traverse"
ecosystem = "npm"
ignore = true
reason = "Accepted known vulnerabilities for testing purposes"
# GHSA-67hx-6x53-jw92
[[PackageOverrides]]
name = "babel-traverse"
ecosystem = "npm"
ignore = true
reason = "Accepted known vulnerabilities for testing purposes"
# GHSA-9c47-m6qq-7p4h
[[PackageOverrides]]
name = "json5"
ecosystem = "npm"
ignore = true
reason = "Dependency not critical for security"
# GHSA-7fh5-64p2-3v2j
[[PackageOverrides]]
name = "postcss"
ecosystem = "npm"
ignore = true
reason = "Vulnerabilities do not affect current use case"
# GHSA-gcx4-mw62-g8wm
[[PackageOverrides]]
name = "rollup"
ecosystem = "npm"
ignore = true
reason = "Legacy build tool under controlled environment"
# GHSA-c2qf-rxjj-qqgw
[[PackageOverrides]]
name = "semver"
ecosystem = "npm"
ignore = true
reason = "Version parsing is managed securely"
# GHSA-353f-5xf4-qw67
# GHSA-c24v-8rfc-w8vw
# GHSA-8jhw-289h-jh2g
# GHSA-64vr-g452-qvp3
# GHSA-9cwx-2883-4wfx
[[PackageOverrides]]
name = "vite"
ecosystem = "npm"
ignore = true
reason = "Development server not exposed to untrusted networks"
# GHSA-mwcw-c2x4-8c55
[[PackageOverrides]]
name = "nanoid"
ecosystem = "npm"
ignore = true
reason = "Accepted known vulnerabilities for testing purposes"

File diff suppressed because it is too large Load Diff

View File

@ -1,27 +0,0 @@
{
"name": "my-react",
"private": true,
"version": "0.0.0",
"type": "module",
"scripts": {
"dev": "vite",
"build": "tsc && vite build",
"preview": "vite preview"
},
"dependencies": {
"@ant-design/icons": "^4.7.0",
"antd": "^4.22.8",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"react-highlight-words": "^0.18.0",
"react-router-dom": "^6.3.0",
"scripts": "^0.1.0"
},
"devDependencies": {
"@types/react": "^18.0.17",
"@types/react-dom": "^18.0.6",
"@vitejs/plugin-react": "^4.3.4",
"typescript": "^4.6.4",
"vite": "^6.2.2"
}
}

View File

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="31.88" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 257"><defs><linearGradient id="IconifyId1813088fe1fbc01fb466" x1="-.828%" x2="57.636%" y1="7.652%" y2="78.411%"><stop offset="0%" stop-color="#41D1FF"></stop><stop offset="100%" stop-color="#BD34FE"></stop></linearGradient><linearGradient id="IconifyId1813088fe1fbc01fb467" x1="43.376%" x2="50.316%" y1="2.242%" y2="89.03%"><stop offset="0%" stop-color="#FFEA83"></stop><stop offset="8.333%" stop-color="#FFDD35"></stop><stop offset="100%" stop-color="#FFA800"></stop></linearGradient></defs><path fill="url(#IconifyId1813088fe1fbc01fb466)" d="M255.153 37.938L134.897 252.976c-2.483 4.44-8.862 4.466-11.382.048L.875 37.958c-2.746-4.814 1.371-10.646 6.827-9.67l120.385 21.517a6.537 6.537 0 0 0 2.322-.004l117.867-21.483c5.438-.991 9.574 4.796 6.877 9.62Z"></path><path fill="url(#IconifyId1813088fe1fbc01fb467)" d="M185.432.063L96.44 17.501a3.268 3.268 0 0 0-2.634 3.014l-5.474 92.456a3.268 3.268 0 0 0 3.997 3.378l24.777-5.718c2.318-.535 4.413 1.507 3.936 3.838l-7.361 36.047c-.495 2.426 1.782 4.5 4.151 3.78l15.304-4.649c2.372-.72 4.652 1.36 4.15 3.788l-11.698 56.621c-.732 3.542 3.979 5.473 5.943 2.437l1.313-2.028l72.516-144.72c1.215-2.423-.88-5.186-3.54-4.672l-25.505 4.922c-2.396.462-4.435-1.77-3.759-4.114l16.646-57.705c.677-2.35-1.37-4.583-3.769-4.113Z"></path></svg>

Before

Width:  |  Height:  |  Size: 1.5 KiB

View File

@ -1,50 +0,0 @@
#root {
background-color: rgba(230, 240, 240, 0.9);
max-width: 100%;
height: 100%;
margin: 0 auto;
padding: 2rem;
text-align: center;
}
.logo {
height: 6em;
padding: 1.5em;
will-change: filter;
}
.logo:hover {
filter: drop-shadow(0 0 2em #646cffaa);
}
.logo.react:hover {
filter: drop-shadow(0 0 2em #61dafbaa);
}
@keyframes logo-spin {
from {
transform: rotate(0deg);
}
to {
transform: rotate(360deg);
}
}
@media (prefers-reduced-motion: no-preference) {
a:nth-of-type(2) .logo {
animation: logo-spin infinite 20s linear;
}
}
.card {
padding: 2em;
}
.read-the-docs {
color: #888;
}
.col-item-value {
overflow: hidden;
line-height: 35px;
white-space: nowrap;
text-overflow: ellipsis;
}

View File

@ -1,110 +0,0 @@
// Copyright (C) 2019 Intel Corporation. All rights reserved.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
import { useEffect, useState } from "react";
import "./App.css";
import CardMenu from "./CardMenu";
import { Divider, Typography, Col, Row, Button } from "antd";
import { Empty, Spin } from "antd";
import Description from "./Descrpition";
const { Title } = Typography;
function App() {
const [dataList, setDataList] = useState<Array<any>>([]);
const [results, setResults] = useState<any>({});
const [id, setId] = useState<number>();
const [resultReload, setResultReload] = useState<number>(0);
const [tableLoading, setTableLoading] = useState<boolean>(false);
const [isLoaded, setIsLoaded] = useState<boolean>(false);
const [result, setResult] = useState<any>({});
useEffect(() => {
fetch(import.meta.env.VITE_SERVER_URL + "/get_list")
.then((res) => {
return res.json();
})
.then((body) => {
setDataList(body.results);
setIsLoaded(true);
});
const timer = setInterval(() => {
fetch(import.meta.env.VITE_SERVER_URL + "/get_list")
.then((res) => {
return res.json();
})
.then((body) => {
setDataList(body.results);
setIsLoaded(true);
});
}, 3000);
}, []);
useEffect(() => {
setTableLoading(true);
fetch(import.meta.env.VITE_SERVER_URL + `/get_list?id=${id}`)
.then((res) => {
return res.json();
})
.then((body) => {
setResults(body);
console.log(results);
setTableLoading(false);
});
}, [id, resultReload]);
const select_uuid = {
res: dataList,
setId,
setResult
};
if (!isLoaded) {
return (
<div className="App" style={{ width: document.body.clientWidth }}>
<Spin size="large" />
</div>
);
}
if (isLoaded && !dataList) {
return (
<div className="App" style={{ width: document.body.clientWidth }}>
<Empty />
</div>
);
}
return (
<div className="App">
<Typography>
<br />
<Title>WebAssembly Micro Runtime fuzzing test system</Title>
<Divider />
</Typography>
<Row gutter={16}>
<Col span={9}>
{/* {dataList && <RunTable {...select_uuid} />} */}
{<Description {...select_uuid} />}
</Col>
<Col span={15}>
{
<CardMenu
{...{
result: results,
detail_result: result,
tableLoading,
resultReload,
setResultReload
}}
/>
}
</Col>
</Row>
<Row gutter={16}>
<Col span={9}></Col>
</Row>
</div>
);
}
export default App;

View File

@ -1,551 +0,0 @@
// Copyright (C) 2019 Intel Corporation. All rights reserved.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
import {
Button,
Modal,
Table,
Card,
TableColumnsType,
Tooltip,
Tag,
Form,
message,
Input,
Progress
} from "antd";
import React, { useEffect, useState } from "react";
import "antd/dist/antd.css";
import type { ColumnsType } from "antd/es/table";
import { SyncOutlined, ArrowDownOutlined } from "@ant-design/icons";
import { useSearchParams } from "react-router-dom";
const { TextArea } = Input;
const tabList2 = [
{
key: "error",
tab: "error"
},
{
key: "stdout",
tab: "stdout"
},
{
key: "stderr",
tab: "stderr"
}
];
interface ErrorDataType {
id: number;
name: string;
fuzzing_id: number;
data: any;
status: string;
create_time: string;
update_time: string;
comment: any;
}
const CardMenu: React.FC<{
result: any;
detail_result: any;
tableLoading: boolean;
resultReload: number;
setResultReload: any;
}> = ({ result, detail_result, tableLoading, resultReload, setResultReload }) => {
const [selectedRowKeys, setSelectedRowKeys] = useState<React.Key[]>([]);
const [modalVisible, setModalVisible] = useState(false);
const [modal2Visible, setModal2Visible] = useState(false);
const [modal3Visible, setModal3Visible] = useState(false);
const [reloadLoading, setRelLoading] = useState(false);
const [errorTabData, setErrorTabData] = useState("");
const [downloadLoading, setDownLoading] = useState(false);
result.results &&
(result.results = result.results.map((t: any) => ({
key: t.id,
...t
})));
const error_columns: ColumnsType<ErrorDataType> = [
{
title: "ErrorName",
width: "13%",
dataIndex: "name",
render: (value) => {
return (
<Tooltip placement="topLeft" title={value}>
<div className="col-item-value">{value}</div>
</Tooltip>
);
}
},
{
title: "CreateTime",
dataIndex: "create_time",
width: "13%",
render: (value) => {
return (
<Tooltip placement="topLeft" title={value}>
<div className="col-item-value">{value}</div>
</Tooltip>
);
}
},
{
title: "UpdateTime",
dataIndex: "update_time",
width: "13.5%",
render: (value) => {
return (
<Tooltip placement="topLeft" title={value}>
<div className="col-item-value">{value}</div>
</Tooltip>
);
}
},
{
title: "Comment",
dataIndex: "comment",
width: "12%",
render: (value) => {
return (
<Tooltip placement="topLeft" title={value?.comment}>
<div className="col-item-value">{value?.comment}</div>
</Tooltip>
);
}
},
{
title: "Assign",
dataIndex: "comment",
width: "9%",
render: (value) => {
return (
<Tooltip placement="topLeft" title={value?.assign}>
<div className="col-item-value">{value?.assign}</div>
</Tooltip>
);
}
},
{
title: "Status",
dataIndex: "status",
width: "14%",
filters: [
{ text: "Pending", value: 2 },
{ text: "Error", value: 1 },
{ text: "OK", value: 0 }
],
onFilter: (value, record) => {
return record.status === value;
},
render: (value, Object) => {
var colors: string = "";
var val: string = "";
if (value === 1) {
colors = "red";
val = `Error(${Object.name.split("-")[0]})`;
} else if (value === 0) {
colors = "green";
val = "OK";
} else if (value === 2) {
colors = "";
val = "pending";
}
return (
<>
{/* <Tooltip placement="topLeft" title={Object?.wamr_commit}> */}
<div className="col-item-value">
<Tag color={colors}> {val} </Tag>
{/* <a
href={`https://github.com/bytecodealliance/wasm-micro-runtime/commit/${Object?.wamr_commit}`}
>
{Object?.wamr_commit}
</a>
</Tooltip> */}
</div>
</>
);
}
},
{
title: "Action",
dataIndex: "",
// width: "15%",
render: (value, Object) => {
return (
<>
<Button
type="primary"
onClick={() => {
console.log(Object.data);
fetch(import.meta.env.VITE_SERVER_URL + `/get_error_out?id=${Object.id}`)
.then((res) => {
return res.json();
})
.then((body) => {
setErrorTabData(body.result.std_out);
setModal3Visible(true);
});
}}
>
Priview
</Button>
<Button
key="0"
type="link"
onClick={async () => {
try {
const response = await fetch(
import.meta.env.VITE_SERVER_URL + `/get_error_txt?id=${Object.id}`,
{
method: "GET"
}
);
console.log(Object.name);
get_cases(response, Object.name);
} catch (err) {
message.error("Download timeout");
}
}}
>
<ArrowDownOutlined />
</Button>
</>
);
}
}
];
const onSelectChange = (newSelectedRowKeys: React.Key[]) => {
console.log("selectedRowKeys changed: ", selectedRowKeys);
setSelectedRowKeys(newSelectedRowKeys);
};
const start = (repo: string, branch: string, build_args: string) => {
setRelLoading(true);
fetch(import.meta.env.VITE_SERVER_URL + "/error_restart", {
method: "POST",
headers: {
Accept: "application/json",
"Content-Type": "application/json"
},
body: JSON.stringify({
id: selectedRowKeys,
repo: repo,
branch: branch,
build_args: build_args
})
})
.then((res) => {
return res.json();
})
.then((body) => {
setRelLoading(false);
if (body?.status === 1) {
setResultReload(resultReload + 1);
message.loading("pending");
} else {
message.error(body?.msg ? body?.msg : "Server Error");
}
});
};
const rowSelection = {
selectedRowKeys,
onChange: onSelectChange,
getCheckboxProps: (record: ErrorDataType) => ({
disabled: Number(record.status) === 2
})
};
const hasSelected = selectedRowKeys.length > 0;
const [form] = Form.useForm();
const set_comment = (comment: string, assign: string) => {
setRelLoading(true);
fetch(import.meta.env.VITE_SERVER_URL + "/set_commend", {
method: "POST",
headers: {
Accept: "application/json",
"Content-Type": "application/json"
},
body: JSON.stringify({
id: selectedRowKeys,
comment: {
comment: comment,
assign: assign
}
})
})
.then((res) => {
return res.json();
})
.then((body) => {
setRelLoading(false);
if (body?.status === 1) {
setResultReload(resultReload + 1);
message.success("success");
} else {
message.error("Server Error");
}
});
};
const get_cases = async (response: Response, name: string) => {
try {
if (response.headers.get("content-type") !== "application/json") {
response
.blob()
.then((blob) => {
const a = window.document.createElement("a");
const downUrl = window.URL.createObjectURL(
new Blob([blob], { type: "multipart/form-data" })
);
//定义导出文件的命名
let filename = name;
if (
response.headers.get("content-disposition") &&
response.headers?.get("content-disposition")?.indexOf("filename=") !== -1
) {
filename =
response.headers?.get("content-disposition")?.split("filename=")[1] || name;
a.href = downUrl;
a.download = `${decodeURI(filename.split('"')[1])}` || name;
a.click();
window.URL.revokeObjectURL(downUrl);
} else {
a.href = downUrl;
a.download = name;
a.click();
window.URL.revokeObjectURL(downUrl);
}
})
.catch((error) => {
message.error(error);
});
} else {
let res = await response.json();
message.error(res.msg);
}
} catch (err) {
console.log(err);
message.error("Download timeout");
}
};
return (
<>
<br />
<Button></Button>
<Card
type={"inner"}
style={{
width: "100%",
height: document.body.clientHeight - 210,
textAlign: "left",
borderRadius: "10px",
overflow: "hidden"
}}
// headStyle={{ backgroundColor: "#87CEFAB7" }}
title="errors"
// extra={<a href="#">More</a>}
// tabList={tabList}
loading={tableLoading}
>
<div>
<div
style={{
marginBottom: 16,
textAlign: "left"
}}
>
<Button
loading={reloadLoading}
type="primary"
onClick={() => {
setModalVisible(true);
}}
disabled={!hasSelected}
>
Verify
</Button>
<> </>
<Button
loading={reloadLoading}
type="primary"
onClick={() => {
setModal2Visible(true);
}}
disabled={!hasSelected}
>
Comment
</Button>
<> </>
<Button
loading={downloadLoading}
type="primary"
onClick={async () => {
setDownLoading(true);
try {
const response = await fetch(import.meta.env.VITE_SERVER_URL + "/get_cases_zip", {
method: "POST",
headers: {
Accept: "application/json",
"Content-Type": "application/json"
},
body: JSON.stringify({
id: selectedRowKeys
})
});
get_cases(response, "cases.zip");
} catch (err) {
message.error("Download timeout");
}
setSelectedRowKeys([]);
setDownLoading(false);
}}
disabled={!hasSelected}
>
Download Selected
</Button>
<> </>
<Button
type="primary"
icon={<SyncOutlined spin={tableLoading} />}
onClick={() => {
setResultReload(resultReload + 1);
}}
/>
<span style={{ marginLeft: 8 }}>
{hasSelected ? `Selected ${selectedRowKeys.length} items` : ""}
</span>
</div>
<Modal
title="Priview"
centered
width={"60%"}
bodyStyle={{ height: 400 }}
visible={modal3Visible}
footer={
<>
{" "}
<Button key="close" onClick={() => setModal3Visible(false)}>
close
</Button>{" "}
</>
}
// onOk={() => setModal3Visible(false)}
onCancel={() => setModal3Visible(false)}
>
<div
style={{
whiteSpace: "pre-wrap",
height: "350px",
overflow: "auto"
}}
>
{errorTabData}
</div>
</Modal>
<Modal
title="verify"
centered
visible={modalVisible}
onOk={() => {
let repo = form.getFieldsValue(["repo", "branch", "build_args"]).repo;
let branch = form.getFieldsValue(["repo", "branch", "build_args"]).branch;
let build_args = form.getFieldsValue(["repo", "branch", "build_args"]).build_args;
if (repo === "" || branch === "") {
message.error("repo and branch cannot be empty");
return;
}
if (repo === undefined) {
repo = detail_result.repo;
}
if (branch === undefined) {
branch = detail_result.branch;
}
if (build_args === undefined) {
build_args = detail_result.build_args;
}
start(repo, branch, build_args);
setModalVisible(false);
setSelectedRowKeys([]);
}}
onCancel={() => {
setModalVisible(false);
}}
>
<Form form={form} name="domain" labelCol={{ span: 4 }} wrapperCol={{ span: 24 }}>
<Form.Item
label="repo"
name="repo"
rules={[{ required: true, message: "Please input your repo!" }]}
>
<TextArea defaultValue={detail_result.repo} placeholder="Please enter repo" />
</Form.Item>
<Form.Item
label="branch"
name="branch"
rules={[{ required: true, message: "Please input your branch!" }]}
>
<Input defaultValue={detail_result.branch} />
</Form.Item>
<Form.Item label="build_args" name="build_args">
<Input defaultValue={detail_result.build_args} placeholder="Please enter build" />
</Form.Item>
</Form>
</Modal>
<Modal
title="Write Comment and Assign"
centered
visible={modal2Visible}
onOk={() => {
const data_any = form.getFieldsValue(["comment", "assign"]);
const comment = data_any.comment;
const assign = data_any.assign;
set_comment(comment, assign);
setModal2Visible(false);
}}
onCancel={() => {
setModal2Visible(false);
}}
>
<Form
form={form}
name="domain"
// autoComplete="off"
labelCol={{ span: 4 }}
wrapperCol={{ span: 24 }}
>
<Form.Item label="comment" name="comment">
<TextArea placeholder="Please enter comment" />
</Form.Item>
<Form.Item label="assign" name="assign">
<Input placeholder="Please enter assign" />
</Form.Item>
</Form>
</Modal>
<Table
bordered
rowSelection={rowSelection}
columns={error_columns}
dataSource={result.results}
scroll={{ y: document.body.clientHeight - 450 }}
/>
</div>
</Card>
</>
);
};
export default CardMenu;

View File

@ -1,389 +0,0 @@
// Copyright (C) 2019 Intel Corporation. All rights reserved.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
import {
Descriptions,
DatePicker,
Card,
Space,
Button,
Badge,
Divider,
Row,
Statistic,
Col,
Modal,
Form,
Input,
message,
Upload,
UploadFile
} from "antd";
import { useEffect, useState } from "react";
import moment from "moment";
import "antd/dist/antd.css";
import { UploadOutlined } from "@ant-design/icons";
import type { DatePickerProps, RangePickerProps } from "antd/es/date-picker";
const { TextArea } = Input;
interface DataType {
id: number;
branch: string;
build_args: string;
start_time: string;
end_time: string;
status: string;
repo: string;
data: any;
wamr_commit: string;
fuzz_time: number;
end_error: number;
error: number;
}
interface select_uuid {
res: Array<DataType>;
setId: any;
setResult: any;
}
const normFile = (e: any) => {
console.log("Upload event:", e);
if (Array.isArray(e)) {
return e;
}
return e?.fileList;
};
const Description = ({ res, setId, setResult }: select_uuid) => {
// const formRef = react
const range = (start: number, end: number) => {
const result = [];
for (let i = start; i < end; i++) {
result.push(i);
}
return result;
};
const [modalVisible, setModalVisible] = useState<boolean>(false);
const [modal2Visible, setModal2Visible] = useState<boolean>(false);
const [form] = Form.useForm();
// const [fileList, setFileList] = useState<UploadFile[]>([]);
const disabledDate: RangePickerProps["disabledDate"] = (current) => {
return current && current < moment().subtract(1, "day").endOf("day");
};
// let fileList: UploadFile[] = [];
var fileList: Array<string> = [];
const new_fuzzing = (repo: string, branch: string, fuzz_time: number, build_args: string) => {
fetch(import.meta.env.VITE_SERVER_URL + "/new_fuzzing", {
method: "POST",
headers: {
Accept: "application/json",
"Content-Type": "application/json"
},
body: JSON.stringify({
repo: repo,
branch: branch,
fuzz_time: fuzz_time,
build_args: build_args
})
})
.then((res) => {
return res.json();
})
.then((body) => {
if (body.status === 0) {
message.error(body.msg);
} else {
message.success("new fuzzing success");
}
});
};
return (
<>
<Row gutter={16}>
<Col span={5}>
<Button
type="primary"
style={{}}
onClick={() => {
setModalVisible(true);
}}
>
New fuzzing test
</Button>
</Col>
<> </>
<Col span={8}>
<Button
type="primary"
style={{}}
onClick={() => {
setModal2Visible(true);
}}
>
Upload Case
</Button>
</Col>
</Row>
<Modal
title="Write Comment and Assign"
centered
visible={modalVisible}
onOk={() => {
const fields_value = form.getFieldsValue(["repo", "branch", "end_time", "build_args"]);
let repo = fields_value.repo;
let branch = fields_value.branch;
let fuzz_time = fields_value.end_time;
const build_args = fields_value.build_args;
if (repo !== "" || branch !== "") {
repo =
repo === undefined
? "https://github.com/bytecodealliance/wasm-micro-runtime.git"
: repo;
branch = branch === undefined ? "main" : branch;
if (fuzz_time) {
const this_time = Date.parse(new Date().toString());
fuzz_time = Date.parse(fuzz_time);
if (fuzz_time > this_time) {
fuzz_time = (fuzz_time - this_time) / 1000;
} else {
fuzz_time = 1;
}
}
new_fuzzing(repo, branch, fuzz_time, build_args);
setModalVisible(false);
} else {
message.error("please enter repo and branch");
}
}}
onCancel={() => {
setModalVisible(false);
}}
>
<Form
form={form}
name="domain"
// autoComplete="off"
labelCol={{ span: 4 }}
wrapperCol={{ span: 24 }}
initialValues={{ remember: true }}
>
<Form.Item
label="repo"
name="repo"
rules={[{ required: true, message: "Please input your repo!" }]}
>
<TextArea
defaultValue="https://github.com/bytecodealliance/wasm-micro-runtime.git"
placeholder="Please enter repo"
/>
</Form.Item>
<Form.Item
label="branch"
name="branch"
rules={[{ required: true, message: "Please input your branch!" }]}
>
<Input defaultValue="main" placeholder="Please enter branch" />
</Form.Item>
<Form.Item label="end_time" name="end_time">
<DatePicker
format="YYYY-MM-DD HH:mm:ss"
disabledDate={disabledDate}
// disabledTime={disabledDateTime}
showTime={{ defaultValue: moment("00:00:00", "HH:mm:ss") }}
/>
</Form.Item>
<Form.Item label="build_args" name="build_args">
<Input placeholder="Please enter build_args" />
</Form.Item>
</Form>
</Modal>
<Modal
title="Upload Cases"
footer={[]}
onCancel={() => {
form.resetFields();
setModal2Visible(false);
}}
onOk={() => {
// console.log(123123, fileList);
form.resetFields();
setModal2Visible(false);
}}
visible={modal2Visible}
>
<Form
form={form}
name="upload"
// action={import.meta.env.VITE_SERVER_URL + "/uplad_case"}
// method="post"
// encType="multipart/form-data"
autoComplete="off"
labelCol={{ span: 4 }}
wrapperCol={{ span: 24 }}
initialValues={{ remember: true }}
>
<Form.Item
name="upload"
label="upload"
valuePropName="fileList"
getValueFromEvent={normFile}
>
{/* <input type="file" /> */}
<Upload
name="file"
listType="picture"
action={import.meta.env.VITE_SERVER_URL + "/upload_case"}
// action=""
// fileList={fileList}
beforeUpload={(file) => {
return new Promise((resolve, reject) => {
let fileName = file.name;
const file_config = fileName.split(".");
if (file_config[file_config.length - 1] !== "wasm") {
message.error("Wrong file type");
return reject(false);
}
return resolve(true);
});
}}
onRemove={(file) => {
// import.meta.env.VITE_SERVER_URL + "/remove_case"
// console.log(file.name);
fetch(import.meta.env.VITE_SERVER_URL + "/remove_case", {
method: "POST",
headers: {
"Content-Type": "application/json"
},
body: JSON.stringify({
filename: file.name
})
});
}}
>
<Button icon={<UploadOutlined />}>Click to upload</Button>
</Upload>
</Form.Item>
</Form>
</Modal>
<br />
<Space
direction="vertical"
size="middle"
style={{
display: "flex",
height: document.body.clientHeight - 210,
overflow: "auto"
}}
>
{Object.keys(res).map((r: any) => (
<Card
type="inner"
title={res[r].repo + ": " + res[r].branch}
style={{
width: "99.9%",
textAlign: "left",
borderRadius: "10px",
overflow: "hidden"
}}
headStyle={{ backgroundColor: "#87CEFAB7" }}
>
<Descriptions
size="default"
column={2}
// title={"pid: " + (res[r].data?.pid ? res[r].data?.pid : "")}
extra={
Number(res[r].status) === 2 ? (
res[r].data?.error ? (
<Badge status="error" text={res[r].data?.error} />
) : (
<Badge status="processing" text="to be operated" />
)
) : Number(res[r].status) === 1 ? (
<Badge status="processing" text="Running" />
) : (
<Badge status="default" text="End" />
)
}
>
<Descriptions.Item label="Start time">{res[r].start_time}</Descriptions.Item>
<Descriptions.Item label="End time">{res[r].end_time}</Descriptions.Item>
<Descriptions.Item label="Build args">{res[r].build_args}</Descriptions.Item>
<Descriptions.Item label="WAMR commit">
<a
href={`https://github.com/bytecodealliance/wasm-micro-runtime/commit/${res[r]?.wamr_commit}`}
>
{res[r]?.wamr_commit}
</a>
</Descriptions.Item>
<Descriptions.Item label="">
<Row gutter={24}>
<Col span={10}>
<Button
type="primary"
onClick={() => {
setId(res[r].id);
setResult(res[r]);
}}
>
Detail
</Button>
</Col>
<Col span={10}>
<Button
disabled={Number(res[r].status) !== 1}
type="primary"
danger
onClick={() => {
fetch(import.meta.env.VITE_SERVER_URL + "/end_fuzzing", {
method: "POST",
headers: {
Accept: "application/json",
"Content-Type": "application/json"
},
body: JSON.stringify({
id: res[r].id
})
})
.then((res) => {
return res.json();
})
.then((body) => {
if (body.status === 0) {
message.error(body.msg);
} else {
message.success("Stop fuzzing success");
}
});
}}
>
Stop
</Button>
</Col>
</Row>
</Descriptions.Item>
</Descriptions>
<Divider />
<Row gutter={24}>
<Col span={6}>
<Statistic title="Total Error" value={res[r].error + res[r].end_error} />
</Col>
<Col span={6}>
<Statistic title="Fixed" value={res[r].end_error} />
</Col>
<Col span={8}>
<Statistic title="Remaining Errors" value={res[r].error} />
</Col>
</Row>
</Card>
))}
</Space>
</>
);
};
export default Description;

View File

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="35.93" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 228"><path fill="#00D8FF" d="M210.483 73.824a171.49 171.49 0 0 0-8.24-2.597c.465-1.9.893-3.777 1.273-5.621c6.238-30.281 2.16-54.676-11.769-62.708c-13.355-7.7-35.196.329-57.254 19.526a171.23 171.23 0 0 0-6.375 5.848a155.866 155.866 0 0 0-4.241-3.917C100.759 3.829 77.587-4.822 63.673 3.233C50.33 10.957 46.379 33.89 51.995 62.588a170.974 170.974 0 0 0 1.892 8.48c-3.28.932-6.445 1.924-9.474 2.98C17.309 83.498 0 98.307 0 113.668c0 15.865 18.582 31.778 46.812 41.427a145.52 145.52 0 0 0 6.921 2.165a167.467 167.467 0 0 0-2.01 9.138c-5.354 28.2-1.173 50.591 12.134 58.266c13.744 7.926 36.812-.22 59.273-19.855a145.567 145.567 0 0 0 5.342-4.923a168.064 168.064 0 0 0 6.92 6.314c21.758 18.722 43.246 26.282 56.54 18.586c13.731-7.949 18.194-32.003 12.4-61.268a145.016 145.016 0 0 0-1.535-6.842c1.62-.48 3.21-.974 4.76-1.488c29.348-9.723 48.443-25.443 48.443-41.52c0-15.417-17.868-30.326-45.517-39.844Zm-6.365 70.984c-1.4.463-2.836.91-4.3 1.345c-3.24-10.257-7.612-21.163-12.963-32.432c5.106-11 9.31-21.767 12.459-31.957c2.619.758 5.16 1.557 7.61 2.4c23.69 8.156 38.14 20.213 38.14 29.504c0 9.896-15.606 22.743-40.946 31.14Zm-10.514 20.834c2.562 12.94 2.927 24.64 1.23 33.787c-1.524 8.219-4.59 13.698-8.382 15.893c-8.067 4.67-25.32-1.4-43.927-17.412a156.726 156.726 0 0 1-6.437-5.87c7.214-7.889 14.423-17.06 21.459-27.246c12.376-1.098 24.068-2.894 34.671-5.345a134.17 134.17 0 0 1 1.386 6.193ZM87.276 214.515c-7.882 2.783-14.16 2.863-17.955.675c-8.075-4.657-11.432-22.636-6.853-46.752a156.923 156.923 0 0 1 1.869-8.499c10.486 2.32 22.093 3.988 34.498 4.994c7.084 9.967 14.501 19.128 21.976 27.15a134.668 134.668 0 0 1-4.877 4.492c-9.933 8.682-19.886 14.842-28.658 17.94ZM50.35 144.747c-12.483-4.267-22.792-9.812-29.858-15.863c-6.35-5.437-9.555-10.836-9.555-15.216c0-9.322 13.897-21.212 37.076-29.293c2.813-.98 5.757-1.905 8.812-2.773c3.204 10.42 7.406 21.315 12.477 32.332c-5.137 11.18-9.399 22.249-12.634 32.792a134.718 134.718 0 0 1-6.318-1.979Zm12.378-84.26c-4.811-24.587-1.616-43.134 6.425-47.789c8.564-4.958 27.502 2.111 47.463 19.835a144.318 144.318 0 0 1 3.841 3.545c-7.438 7.987-14.787 17.08-21.808 26.988c-12.04 1.116-23.565 2.908-34.161 5.309a160.342 160.342 0 0 1-1.76-7.887Zm110.427 27.268a347.8 347.8 0 0 0-7.785-12.803c8.168 1.033 15.994 2.404 23.343 4.08c-2.206 7.072-4.956 14.465-8.193 22.045a381.151 381.151 0 0 0-7.365-13.322Zm-45.032-43.861c5.044 5.465 10.096 11.566 15.065 18.186a322.04 322.04 0 0 0-30.257-.006c4.974-6.559 10.069-12.652 15.192-18.18ZM82.802 87.83a323.167 323.167 0 0 0-7.227 13.238c-3.184-7.553-5.909-14.98-8.134-22.152c7.304-1.634 15.093-2.97 23.209-3.984a321.524 321.524 0 0 0-7.848 12.897Zm8.081 65.352c-8.385-.936-16.291-2.203-23.593-3.793c2.26-7.3 5.045-14.885 8.298-22.6a321.187 321.187 0 0 0 7.257 13.246c2.594 4.48 5.28 8.868 8.038 13.147Zm37.542 31.03c-5.184-5.592-10.354-11.779-15.403-18.433c4.902.192 9.899.29 14.978.29c5.218 0 10.376-.117 15.453-.343c-4.985 6.774-10.018 12.97-15.028 18.486Zm52.198-57.817c3.422 7.8 6.306 15.345 8.596 22.52c-7.422 1.694-15.436 3.058-23.88 4.071a382.417 382.417 0 0 0 7.859-13.026a347.403 347.403 0 0 0 7.425-13.565Zm-16.898 8.101a358.557 358.557 0 0 1-12.281 19.815a329.4 329.4 0 0 1-23.444.823c-7.967 0-15.716-.248-23.178-.732a310.202 310.202 0 0 1-12.513-19.846h.001a307.41 307.41 0 0 1-10.923-20.627a310.278 310.278 0 0 1 10.89-20.637l-.001.001a307.318 307.318 0 0 1 12.413-19.761c7.613-.576 15.42-.876 23.31-.876H128c7.926 0 15.743.303 23.354.883a329.357 329.357 0 0 1 12.335 19.695a358.489 358.489 0 0 1 11.036 20.54a329.472 329.472 0 0 1-11 20.722Zm22.56-122.124c8.572 4.944 11.906 24.881 6.52 51.026c-.344 1.668-.73 3.367-1.15 5.09c-10.622-2.452-22.155-4.275-34.23-5.408c-7.034-10.017-14.323-19.124-21.64-27.008a160.789 160.789 0 0 1 5.888-5.4c18.9-16.447 36.564-22.941 44.612-18.3ZM128 90.808c12.625 0 22.86 10.235 22.86 22.86s-10.235 22.86-22.86 22.86s-22.86-10.235-22.86-22.86s10.235-22.86 22.86-22.86Z"></path></svg>

Before

Width:  |  Height:  |  Size: 4.0 KiB

View File

@ -1,70 +0,0 @@
:root {
font-family: Inter, Avenir, Helvetica, Arial, sans-serif;
font-size: 16px;
line-height: 24px;
font-weight: 400;
color-scheme: light dark;
color: rgba(255, 255, 255, 0.87);
background-color: #242424;
font-synthesis: none;
text-rendering: optimizeLegibility;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
-webkit-text-size-adjust: 100%;
}
a {
font-weight: 500;
color: #646cff;
text-decoration: inherit;
}
a:hover {
color: #535bf2;
}
body {
margin: 0;
display: flex;
place-items: center;
min-width: 320px;
min-height: 100vh;
}
h1 {
font-size: 3.2em;
line-height: 1.1;
}
button {
border-radius: 8px;
border: 1px solid transparent;
padding: 0.6em 1.2em;
font-size: 1em;
font-weight: 500;
font-family: inherit;
background-color: #1a1a1a;
cursor: pointer;
transition: border-color 0.25s;
}
button:hover {
border-color: #646cff;
}
button:focus,
button:focus-visible {
outline: 4px auto -webkit-focus-ring-color;
}
@media (prefers-color-scheme: light) {
:root {
color: #213547;
background-color: #ffffff;
}
a:hover {
color: #747bff;
}
button {
background-color: #f9f9f9;
}
}

View File

@ -1,13 +0,0 @@
// Copyright (C) 2019 Intel Corporation. All rights reserved.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
import React from "react";
import ReactDOM from "react-dom/client";
import App from "./App";
import "./index.css";
ReactDOM.createRoot(document.getElementById("root") as HTMLElement).render(
<React.StrictMode>
<App />
</React.StrictMode>
);

View File

@ -1 +0,0 @@
/// <reference types="vite/client" />

View File

@ -1,21 +0,0 @@
{
"compilerOptions": {
"target": "ESNext",
"useDefineForClassFields": true,
"lib": ["DOM", "DOM.Iterable", "ESNext"],
"allowJs": false,
"skipLibCheck": true,
"esModuleInterop": false,
"allowSyntheticDefaultImports": true,
"strict": true,
"forceConsistentCasingInFileNames": true,
"module": "ESNext",
"moduleResolution": "Node",
"resolveJsonModule": true,
"isolatedModules": true,
"noEmit": true,
"jsx": "react-jsx"
},
"include": ["src"],
"references": [{ "path": "./tsconfig.node.json" }]
}

View File

@ -1,9 +0,0 @@
{
"compilerOptions": {
"composite": true,
"module": "ESNext",
"moduleResolution": "Node",
"allowSyntheticDefaultImports": true
},
"include": ["vite.config.ts"]
}

View File

@ -1,7 +0,0 @@
import { defineConfig } from 'vite'
import react from '@vitejs/plugin-react'
// https://vitejs.dev/config/
export default defineConfig({
plugins: [react()]
})

View File

@ -1,4 +0,0 @@
data.db
Dockerfile copy*
migrations/
app/test.py

View File

@ -1,39 +0,0 @@
FROM ubuntu:20.04
WORKDIR /wamr-test/tests/fuzz/wasm-mutator-fuzz/server
COPY ./tests/fuzz/wasm-mutator-fuzz/server/requirements.txt /requirements.txt
ARG proxy=""
RUN if [ "$proxy" != "" ]; \
then export http_proxy="$proxy" && export https_proxy="$proxy"; \
else echo Do not set proxy; \
fi
ARG DEBIAN_FRONTEND=noninteractive
ENV TZ=Asian/Shanghai
# hadolint ignore=DL3008
RUN apt-get -o Acquire::http::proxy="$proxy" update \
&& apt-get -o Acquire::http::proxy="$proxy" install \
curl clang rustc cargo python3 python3-pip git \
gcc build-essential cmake g++-multilib libunwind-dev \
wget -y --no-install-recommends && rm -rf /var/lib/apt/lists/* \
&& pip install --no-cache-dir -U -r /requirements.txt --proxy=$proxy
COPY ./tests/fuzz /wamr-test/tests/fuzz
RUN if [ "$proxy" != "" ]; \
then git config --global http.proxy $proxy && git config --global https.proxy $proxy; \
else echo Do not set proxy for git; \
fi
WORKDIR /wamr-test/tests/fuzz/wasm-mutator-fuzz
RUN wget --progress=dot:giga -e "https_proxy=$proxy" \
https://github.com/bytecodealliance/wasm-tools/releases/download/v1.201.0/wasm-tools-1.201.0-x86_64-linux.tar.gz \
&& tar -xzf wasm-tools-1.201.0-x86_64-linux.tar.gz && mv wasm-tools-1.201.0-x86_64-linux wasm-tools
ENV PATH="/wamr-test/tests/fuzz/wasm-mutator-fuzz/wasm-tools:$PATH"
WORKDIR /wamr-test/tests/fuzz/wasm-mutator-fuzz/server/app
# hadolint ignore=DL3025
CMD nohup sh -c 'python3 main.py'

View File

@ -1,518 +0,0 @@
#!/usr/bin/env python
#
# Copyright (C) 2019 Intel Corporation. All rights reserved.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
#
# coding=utf-8
from sched import scheduler
from flask import Flask, request, jsonify, send_file
from flask_sqlalchemy import SQLAlchemy
from flask_cors import CORS, cross_origin
from datetime import datetime, timedelta
from urllib.parse import quote
from pathlib import Path
from flask_caching import Cache
from flask_apscheduler import APScheduler
from zipfile import ZipFile, ZIP_DEFLATED
from io import BytesIO
from multiprocessing import Process
import os
import sys
import copy
import getopt
import signal
import psutil
import shutil
import subprocess
current_dir = Path(__file__).parent.resolve()
wasm_mutator_dir = current_dir.parent.parent
fuzz_dir = wasm_mutator_dir.parent
app = Flask(__name__)
# cors
cors = CORS(app)
app.config['CORS_HEADERS'] = 'Content-Type'
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
scheduler = APScheduler()
# sqlite URI
WIN = sys.platform.startswith('win')
if WIN:
prefix = 'sqlite:///'
else:
prefix = 'sqlite:////'
app.config['SQLALCHEMY_DATABASE_URI'] = os.getenv(
'DATABASE_URL', prefix + os.path.join(app.root_path, 'data.db'))
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.secret_key = os.urandom(12).hex()
db = SQLAlchemy(app)
def to_json(inst, cls):
ret_dict = {}
for i in cls.__table__.columns:
value = getattr(inst, i.name)
if isinstance(value, datetime):
value = value.strftime('%Y-%m-%d %H:%M:%S')
ret_dict[i.name] = value
return ret_dict
class Fuzzing(db.Model):
__tablename__ = 'fuzzing_task'
id = db.Column(db.Integer, autoincrement=True,
primary_key=True, nullable=False)
repo = db.Column(db.String(200), nullable=False, default='')
branch = db.Column(db.String(200), nullable=False, default='')
build_args = db.Column(db.String(200), nullable=False, default='')
fuzz_time = db.Column(db.Integer, default=0)
wamr_commit = db.Column(
db.String(200), nullable=False, default='')
data = db.Column(db.JSON)
start_time = db.Column(db.DateTime, nullable=False,
default=datetime.utcnow() + timedelta(hours=8))
end_time = db.Column(db.DateTime)
status = db.Column(db.Integer, default=2)
@property
def serialize(self):
return to_json(self, self.__class__)
class TaskError(db.Model):
__tablename__ = 'task_error'
id = db.Column(db.Integer, autoincrement=True,
primary_key=True, nullable=False)
fuzzing_id = db.Column(db.Integer, db.ForeignKey("fuzzing_task.id"))
name = db.Column(db.String(200), nullable=False, default='')
std_out = db.Column(db.Text, default='')
data = db.Column(db.JSON)
comment = db.Column(db.JSON)
create_time = db.Column(db.DateTime, nullable=False,
default=datetime.utcnow() + timedelta(hours=8))
update_time = db.Column(db.DateTime, nullable=False,
default=datetime.utcnow() + timedelta(hours=8))
status = db.Column(db.Integer, default=1)
@property
def serialize(self):
return to_json(self, self.__class__)
def to_data(data):
data['data']['id'] = data['id']
return data['data']
def error_count(data):
error = len(TaskError.query.filter(
TaskError.fuzzing_id == data.get('id'), TaskError.status.in_([1, 2])).all())
end_error = len(TaskError.query.filter(
TaskError.fuzzing_id == data.get('id'), TaskError.status == 0).all())
data['error'] = error
data['end_error'] = end_error
return data
def getstatusoutput(cmd):
try:
data = subprocess.check_output(
cmd, shell=True, text=True, stderr=subprocess.STDOUT, executable='/bin/bash')
exitcode = 0
except subprocess.CalledProcessError as ex:
data = ex.output
exitcode = ex.returncode
if data[-1:] == '\n':
data = data[:-1]
return exitcode, data
def get_wamr_commit(repo_root_dir):
wamr_repo_dir = repo_root_dir / 'wamr'
cmd = f'cd {wamr_repo_dir} && git log -1 --pretty=format:"%h"'
status, resp = getstatusoutput(cmd)
if status != 0:
return "-"
return resp
@app.route('/get_list', methods=["GET"])
@cross_origin()
def show_fuzz_list():
data = request.args
id = data.get('id')
if id:
all_error = TaskError.query.filter(
TaskError.fuzzing_id == id).with_entities(TaskError.id, TaskError.fuzzing_id,
TaskError.create_time, TaskError.data,
TaskError.name, TaskError.status,
TaskError.update_time, TaskError.comment).order_by(TaskError.status.desc(), TaskError.update_time.desc(), TaskError.id.desc()).all()
data_message = [{'id': error['id'], "fuzzing_id": error['fuzzing_id'],
"name": error['name'], "data": error['data'],
'create_time': error['create_time'].strftime('%Y-%m-%d %H:%M:%S'),
'update_time': error['update_time'].strftime('%Y-%m-%d %H:%M:%S'),
'status': error['status'], "comment": error["comment"]} for error in all_error]
return jsonify({"status": 1, "results": data_message, 'msg': "success", "count": len(data_message)})
else:
all_fuzz = Fuzzing.query.order_by(
Fuzzing.status.desc(), Fuzzing.end_time.desc(), Fuzzing.id.desc()).all()
data_message = list(map(lambda i: i.serialize, all_fuzz))
data_message = list(map(error_count, data_message))
return jsonify({"status": 1, "results": data_message, 'msg': "success", "count": len(data_message)})
@app.route('/new_fuzzing', methods=["POST"])
@cross_origin()
def New_fuzzing():
data = request.json
repo = data.get('repo', '')
branch = data.get('branch', '')
build_args = data.get('build_args', '')
fuzz_time = data.get('fuzz_time', 0)
if not repo or not branch:
return jsonify({"status": 0, "result": "", 'msg': "repo and branch are required !"})
fuzz = Fuzzing(repo=repo, branch=branch,
build_args=build_args, fuzz_time=fuzz_time, start_time=datetime.utcnow() + timedelta(hours=8))
db.session.add(fuzz)
db.session.commit()
fuzz_cmd = wasm_mutator_dir / \
'workspace' / f'build_{fuzz.id}'
Path(fuzz_cmd).mkdir(exist_ok=True)
os.system(
f'cd {fuzz_cmd} && git clone --branch {branch} --depth=1 {repo} wamr')
if not Path(fuzz_cmd / 'wamr').exists():
print('------ error: clone repo not folder exists ------')
# curd.set_error_status_to(list(map(lambda x: x.id, error_list)), db)
# Fuzzing.query.filter_by(id=fuzz.id).delete()
fuzz.data = {'error': "Clone repo Error"}
db.session.commit()
return jsonify({"status": 0, "result": "", "msg": "Clone repo Error"})
wamr_path_parent = fuzz_dir.parent.parent
wamr_path = wamr_path_parent / 'wamr'
wamr_path_to = wamr_path_parent / f'wamr_{fuzz.id}'
wamr_folder = Path(wamr_path).exists()
try:
if wamr_folder:
os.rename(wamr_path, wamr_path_to)
except Exception as e:
print(f'------ error: fail wamr folder rename, error: {e} ------')
return jsonify({"status": 0, "result": "", "msg": "fail wamr folder rename"})
try:
os.system(f'ln -s {fuzz_cmd / "wamr"} {wamr_path_parent}')
except Exception as e:
print('------ error: fail wamr_repo to wamr ------')
if wamr_folder:
os.rename(wamr_path_to, wamr_path)
return jsonify({"status": 0, "result": "", "msg": "fail wamr_repo to wamr"})
os.system(
f'cd {fuzz_cmd} && cmake .. -DCUSTOM_MUTATOR=1 {build_args} && make -j$(nproc)')
os.system(f'rm -rf {wamr_path}')
if wamr_folder:
os.rename(wamr_path_to, wamr_path)
os.system(
f"ln -s {wasm_mutator_dir / 'build' / 'CORPUS_DIR'} {fuzz_cmd}")
cmd_max_time = ''
if fuzz_time != 0:
cmd_max_time = f"-max_total_time={fuzz_time}"
cmd = f'cd {fuzz_cmd} && ./wasm_mutator_fuzz CORPUS_DIR {cmd_max_time} -ignore_crashes=1 -fork=2'
process_tcpdump = subprocess.Popen(
cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, preexec_fn=os.setsid)
commit_id = get_wamr_commit(fuzz_cmd)
fuzz.data = {"pid": process_tcpdump.pid}
fuzz.status = 1
fuzz.wamr_commit = commit_id
db.session.commit()
return jsonify({'status': 1, 'msg': 'success', 'result': ''})
@app.route('/end_fuzzing', methods=["POST"])
@cross_origin()
def End_fuzzing():
data = request.json
id = data.get('id')
if not id:
return jsonify({'status': 0, 'msg': 'id must pass'})
fuzz_model = Fuzzing.query.get(id)
pid = fuzz_model.data.get('pid')
try:
os.killpg(pid, signal.SIGTERM)
except Exception as e:
pass
fuzz_model.status = 0
fuzz_model.end_time = datetime.utcnow() + timedelta(hours=8)
db.session.commit()
return jsonify({'status': 1, 'msg': 'success'})
@scheduler.task('interval', id="run_task", seconds=5, misfire_grace_time=60)
def scheduler_run_task():
fuzz_query = Fuzzing.query.filter(Fuzzing.status == 1).all()
for fuzz in fuzz_query:
# if fuzz.fuzz_time == 0:
# continue
if fuzz.data.get('pid', 0) not in psutil.pids() or psutil.Process(fuzz.data.get('pid', 0)).status() == "zombie":
fuzz.status = 0
fuzz.end_time = datetime.utcnow() + timedelta(hours=8)
db.session.commit()
for fuzz in fuzz_query:
all_error = TaskError.query.filter(
TaskError.fuzzing_id == fuzz.id).with_entities(TaskError.name).all()
fuzz_cmd = wasm_mutator_dir / \
'workspace' / f'build_{fuzz.id}'
dir_list = filter(lambda x: x.startswith(
'crash-') or x.startswith('oom-') or x.startswith('slow-unit-') or x.startswith('leak-'), os.listdir(fuzz_cmd))
all_error = [error['name'] for error in all_error]
dir_list = list(filter(lambda x: x not in all_error, dir_list))
for dir in dir_list:
cmd = f'cd {fuzz_cmd} && ./wasm_mutator_fuzz {dir}'
status, resp = getstatusoutput(cmd)
task_error = TaskError(name=dir, std_out=resp, fuzzing_id=fuzz.id,
create_time=datetime.utcnow() + timedelta(hours=8))
db.session.add(task_error)
db.session.commit()
@app.route("/get_error_out", methods=["GET"])
def get_error_out():
data = request.args
id = data.get('id')
if id:
error = TaskError.query.get(id)
data_message = error.serialize
return jsonify({"status": 1, "result": data_message, 'msg': "success"})
return jsonify({"status": 0, "results": [], 'msg': "Error"})
@app.route("/get_error_txt", methods=["GET"])
def get_error_txt():
data = request.args
id = data.get('id')
if not id:
return jsonify({"status": 0, "results": [], 'msg': "Error"})
error = TaskError.query.get(id)
fuzz_cmd = wasm_mutator_dir / \
'workspace' / f'build_{error.fuzzing_id}'
file_cmd = fuzz_cmd / error.name
response = send_file(file_cmd, as_attachment=True,
attachment_filename=error.name)
response.headers['Content-Disposition'] += "; filename*=utf-8''{}".format(
error.name)
return response
@app.route("/set_commend", methods=["POST"])
def set_commend():
data = request.json
id = data.get('id')
comment = data.get('comment')
if not id:
return jsonify({"status": 0, "results": [], 'msg': "Error"})
try:
TaskError.query.filter(TaskError.id.in_(
id)).update({"comment": comment, "update_time": datetime.utcnow() + timedelta(hours=8)})
db.session.commit()
except Exception as e:
return jsonify({"status": 0, "results": [], 'msg': "Update error"})
return jsonify({"status": 1, "results": [], 'msg': "Success"})
@app.route("/get_cases_zip", methods=["POST"])
def get_cases_zip():
data = request.json
id_list = data.get('id')
task_query = TaskError.query.filter(TaskError.id.in_(id_list)).all()
memory_file = BytesIO()
with ZipFile(memory_file, "w", ZIP_DEFLATED) as zf:
for task_error in task_query:
fuzz_cmd = wasm_mutator_dir / \
'workspace' / f'build_{task_error.fuzzing_id}'
file_cmd = fuzz_cmd / task_error.name
zf.write(str(file_cmd), arcname=task_error.name)
memory_file.seek(0)
return send_file(memory_file, attachment_filename='cases.zip', as_attachment=True)
class processClass:
def __init__(self, fuzz_cmd, restart_cmd, error_query):
p = Process(target=self.run, args=(fuzz_cmd, restart_cmd, error_query))
p.daemon = True # Daemonize it
p.start() # Start the execution
def run(self, fuzz_cmd, restart_cmd, error_query):
for error in error_query:
shutil.copyfile(fuzz_cmd / error.name, restart_cmd / error.name)
commit = get_wamr_commit(restart_cmd)
cmd = f"cd {restart_cmd} && ./wasm_mutator_fuzz {error.name}"
status, resp = getstatusoutput(cmd)
data = copy.deepcopy(error.data)
if type(data) == dict:
data['wamr_commit'] = commit
else:
data = {'wamr_commit': commit}
error.data = data
error.status = 0 if status == 0 else 1
error.update_time = datetime.utcnow() + timedelta(hours=8)
error.std_out = resp if status != 0 else error.std_out
db.session.commit()
#
# This might take several minutes to complete
@app.route("/error_restart", methods=["POST"])
def error_restart():
data = request.json
id_list = data.get('id')
repo = data.get('repo')
branch = data.get('branch')
build_args = data.get('build_args', '')
if len(id_list) == [] or repo == "":
return jsonify({"status": 0, "msg": 'parameter is incorrect'})
run_status = cache.get('runStatus')
if run_status:
return jsonify({"status": 0, "results": [], 'msg': "There are already tasks in progress"})
task_query = TaskError.query.filter(TaskError.id.in_(id_list)).all()
fuzzing_id = task_query[0].fuzzing_id
fuzz_cmd = wasm_mutator_dir / \
'workspace' / f'build_{fuzzing_id}'
restart_cmd = wasm_mutator_dir / \
'workspace' / f'error_restart_build_{fuzzing_id}'
if not Path(restart_cmd).exists():
Path(restart_cmd).mkdir(exist_ok=True)
os.system(
f'cd {restart_cmd} && git clone --branch {branch} --depth=1 {repo} wamr')
if not Path(restart_cmd / 'wamr').exists():
print('------ error: clone repo not folder exists ------')
# fuzz.data = {'error': "Clone repo Error"}
db.session.commit()
return jsonify({"status": 0, "result": "", "msg": "Clone repo Error"})
wamr_path_parent = fuzz_dir.parent.parent
wamr_path = wamr_path_parent / 'wamr'
wamr_path_to = wamr_path_parent / f'wamr_restart_{fuzzing_id}'
wamr_folder = Path(wamr_path).exists()
try:
if wamr_folder:
os.rename(wamr_path, wamr_path_to)
except Exception as e:
print(f'------ error: fail wamr folder rename, error: {e} ------')
return jsonify({"status": 0, "result": "", "msg": "fail wamr folder rename"})
try:
os.system(f'ln -s {restart_cmd / "wamr"} {wamr_path_parent}')
except Exception as e:
print('------ error: fail wamr_repo to wamr ------')
if wamr_folder:
os.rename(wamr_path_to, wamr_path)
return jsonify({"status": 0, "result": "", "msg": "fail wamr_repo to wamr"})
os.system(
f'cd {restart_cmd} && cmake .. -DCUSTOM_MUTATOR=1 {build_args} && make -j$(nproc)')
os.system(f'rm -rf {wamr_path}')
if wamr_folder:
os.rename(wamr_path_to, wamr_path)
cache.delete('runStatus')
TaskError.query.filter(TaskError.id.in_(id_list)).update(
{'status': 2, "update_time": datetime.utcnow() + timedelta(hours=8)})
db.session.commit()
processClass(fuzz_cmd, restart_cmd, task_query)
return jsonify({"status": 1, "result": "", "msg": "Pending"})
@app.route('/upload_case', methods=['POST'])
def do_upload():
file = request.files['file']
filename = file.filename
upload_file_cmd = wasm_mutator_dir / "upload_path"
build_cmd = wasm_mutator_dir / "build" / "CORPUS_DIR"
if not Path(upload_file_cmd).exists():
Path(upload_file_cmd).mkdir(exist_ok=True)
file.save(str(upload_file_cmd / filename))
file.save(str(build_cmd / filename))
# os.system(f"copy {upload_file_cmd / file} {build_cmd / file}")
return jsonify({"status": 1, "result": "", "msg": "success"})
@app.route('/remove_case', methods=['POST'])
def remove_case():
file = request.json
filename = file.get('filename')
print(filename)
upload_file_cmd = wasm_mutator_dir / "upload_path" / filename
build_cmd = wasm_mutator_dir / "build" / "CORPUS_DIR" / filename
os.system(f'rm -rf "{upload_file_cmd}" "{build_cmd}"')
return jsonify({"status": 1, "result": "", "msg": "success"})
if __name__ == '__main__':
scheduler.init_app(app)
scheduler.start()
os.chdir(wasm_mutator_dir)
os.system('./smith_wasm.sh 100')
os.chdir(current_dir)
try:
opts, args = getopt.getopt(sys.argv[1:], "hp:d:", [
"help", "port=", "debug="])
except getopt.GetoptError:
print(
'test_arg.py -h <host> -p <port> -d <debug? True: False>')
print(
' or: test_arg.py --host=<host> --port=<port> --debug=<True: False>')
print('''
host: default[0.0.0.0]
port: default[16667]
debug: default[False]
''')
sys.exit(2)
run_dict = {
"host": "0.0.0.0",
"port": 16667,
"debug": False
}
for opt, arg in opts:
if opt in ("-h", "--help"):
print(
'test_arg.py -h <host> -p <port> -d <debug? True: False>')
print(
' or: test_arg.py --host=<host> --port=<port> --debug=<True: False>')
print('''
host: default[0.0.0.0]
port: default[16667]
debug: default[False]
''')
sys.exit()
elif opt in ('-h', '--host'):
run_dict['host'] = arg
elif opt in ("-p", "--port"):
run_dict['port'] = int(arg)
elif opt in ("-d", "--debug"):
run_dict['debug'] = bool(arg)
app.run(**run_dict)

View File

@ -1,18 +0,0 @@
#!/usr/bin/env python
#
# Copyright (C) 2019 Intel Corporation. All rights reserved.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
#
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
from main import app, db
manager = Manager(app)
migrate = Migrate(app, db)
manager.add_command("db", MigrateCommand)
if __name__ == "__main__":
manager.run()

View File

@ -1,32 +0,0 @@
# GHSA-m2qf-hxjv-5gpq / PYSEC-2023-62
[[PackageOverrides]]
name = "Flask"
ecosystem = "PyPI"
ignore = true
reason = "Accepted known vulnerabilities for testing purposes"
# GHSA-m2qf-hxjv-5gpq / PYSEC-2023-62
[[PackageOverrides]]
name = "flask"
ecosystem = "PyPI"
ignore = true
reason = "Accepted known vulnerabilities for testing purposes"
# GHSA-84pr-m4jr-85g5
# GHSA-hxwh-jpp2-84pm / PYSEC-2024-71
[[PackageOverrides]]
name = "flask-cors"
ecosystem = "PyPI"
ignore = true
reason = "Accepted known vulnerabilities for testing purposes"
# GHSA-2g68-c3qc-8985
# GHSA-hrfv-mqp8-q5rw / PYSEC-2023-221
# GHSA-px8h-6qxv-m22q / PYSEC-2023-57
# GHSA-xg9f-g7g7-2323 / PYSEC-2023-58
# PYSEC-2022-203
[[PackageOverrides]]
name = "werkzeug"
ecosystem = "PyPI"
ignore = true
reason = "Accepted known vulnerabilities for testing purposes"

View File

@ -1,11 +0,0 @@
Flask==1.1.4
Flask_SQLAlchemy==2.5.1
flask-migrate==2.7.0
flask-script==2.0.6
flask-cors==3.0.10
flask-caching==2.0.0
werkzeug==1.0.1
markupsafe==2.0.1
flask-apscheduler==1.12.4
psutil==5.9.2
SQLAlchemy==1.4.39

View File

@ -1,141 +0,0 @@
# Copyright (C) 2019 Intel Corporation. All rights reserved.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
cmake_minimum_required (VERSION 3.14)
project(wasm_mutator)
set (CMAKE_BUILD_TYPE Debug)
set (CMAKE_C_COMPILER "clang")
set (CMAKE_CXX_COMPILER "clang++")
set (WAMR_BUILD_PLATFORM "linux")
# Reset default linker flags
set (CMAKE_SHARED_LIBRARY_LINK_C_FLAGS "")
set (CMAKE_SHARED_LIBRARY_LINK_CXX_FLAGS "")
set (CMAKE_C_STANDARD 99)
# Set WAMR_BUILD_TARGET, currently values supported:
# "X86_64", "AMD_64", "X86_32", "AARCH64[sub]", "ARM[sub]", "THUMB[sub]",
# "MIPS", "XTENSA", "RISCV64[sub]", "RISCV32[sub]"
if (NOT DEFINED WAMR_BUILD_TARGET)
if (CMAKE_SYSTEM_PROCESSOR MATCHES "^(arm64|aarch64)")
set (WAMR_BUILD_TARGET "AARCH64")
elseif (CMAKE_SYSTEM_PROCESSOR STREQUAL "riscv64")
set (WAMR_BUILD_TARGET "RISCV64")
elseif (CMAKE_SIZEOF_VOID_P EQUAL 8)
# Build as X86_64 by default in 64-bit platform
set (WAMR_BUILD_TARGET "X86_64")
elseif (CMAKE_SIZEOF_VOID_P EQUAL 4)
# Build as X86_32 by default in 32-bit platform
set (WAMR_BUILD_TARGET "X86_32")
else ()
message(SEND_ERROR "Unsupported build target platform!")
endif ()
endif ()
if(CUSTOM_MUTATOR EQUAL 1)
add_compile_definitions(CUSTOM_MUTATOR)
endif()
if (NOT DEFINED WAMR_BUILD_INTERP)
# Enable Interpreter by default
set (WAMR_BUILD_INTERP 1)
endif ()
if (NOT DEFINED WAMR_BUILD_AOT)
# Enable AOT by default.
set (WAMR_BUILD_AOT 1)
endif ()
if (NOT DEFINED WAMR_BUILD_JIT)
# Disable JIT by default.
set (WAMR_BUILD_JIT 0)
endif ()
if (NOT DEFINED WAMR_BUILD_LIBC_BUILTIN)
# Enable libc builtin support by default
set (WAMR_BUILD_LIBC_BUILTIN 1)
endif ()
if (NOT DEFINED WAMR_BUILD_LIBC_WASI)
# Enable libc wasi support by default
set (WAMR_BUILD_LIBC_WASI 1)
endif ()
if (NOT DEFINED WAMR_BUILD_FAST_INTERP)
# Enable fast interpreter
set (WAMR_BUILD_FAST_INTERP 1)
endif ()
if (NOT DEFINED WAMR_BUILD_MULTI_MODULE)
# Enable multiple modules
set (WAMR_BUILD_MULTI_MODULE 0)
endif ()
if (NOT DEFINED WAMR_BUILD_LIB_PTHREAD)
# Disable pthread library by default
set (WAMR_BUILD_LIB_PTHREAD 0)
endif ()
if (NOT DEFINED WAMR_BUILD_MINI_LOADER)
# Disable wasm mini loader by default
set (WAMR_BUILD_MINI_LOADER 0)
endif ()
if (NOT DEFINED WAMR_BUILD_SIMD)
# Enable SIMD by default
set (WAMR_BUILD_SIMD 1)
endif ()
if (NOT DEFINED WAMR_BUILD_REF_TYPES)
# Enable reference type by default
set (WAMR_BUILD_REF_TYPES 1)
endif ()
if (NOT DEFINED WAMR_BUILD_DEBUG_INTERP)
# Disable Debug feature by default
set (WAMR_BUILD_DEBUG_INTERP 0)
endif ()
if (WAMR_BUILD_DEBUG_INTERP EQUAL 1)
set (WAMR_BUILD_FAST_INTERP 0)
set (WAMR_BUILD_MINI_LOADER 0)
set (WAMR_BUILD_SIMD 0)
endif ()
set (REPO_ROOT_DIR ${CMAKE_CURRENT_LIST_DIR}/../../../..)
message([ceith]:REPO_ROOT_DIR, ${REPO_ROOT_DIR})
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS}")
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
add_definitions(-DWAMR_USE_MEM_POOL=0 -DWASM_ENABLE_FUZZ_TEST=1)
# Enable fuzzer
add_compile_options(-fsanitize=fuzzer)
add_link_options(-fsanitize=fuzzer)
# if not calling from oss-fuzz helper, enable all support sanitizers
# oss-fuzz always defines `HELPER=True`
if (NOT "$ENV{HELPER}" STREQUAL "True")
add_compile_options(
-fsanitize=signed-integer-overflow
-fprofile-instr-generate -fcoverage-mapping
-fsanitize=address,undefined
)
add_link_options(-fsanitize=address)
endif ()
include(${REPO_ROOT_DIR}/core/shared/utils/uncommon/shared_uncommon.cmake)
include(${REPO_ROOT_DIR}/build-scripts/runtime_lib.cmake)
add_library(vmlib
${WAMR_RUNTIME_LIB_SOURCE}
)
add_executable(wasm_mutator_fuzz wasm_mutator_fuzz.cc)
target_link_libraries(wasm_mutator_fuzz vmlib -lm)

View File

@ -1,133 +0,0 @@
// Copyright (C) 2019 Intel Corporation. All rights reserved.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
#include "wasm_runtime_common.h"
#include "wasm_export.h"
#include "bh_read_file.h"
#include <stdlib.h>
#include <stdio.h>
#include <errno.h>
#include <string.h>
#include <iostream>
#include <vector>
using namespace std;
extern "C" WASMModuleCommon *
wasm_runtime_load(uint8 *buf, uint32 size, char *error_buf,
uint32 error_buf_size);
extern "C" WASMModuleInstanceCommon *
wasm_runtime_instantiate(WASMModuleCommon *module, uint32 stack_size,
uint32 heap_size, char *error_buf,
uint32 error_buf_size);
extern "C" int
LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size)
{
/* libfuzzer don't allow us to modify the given Data, so we copy the data
* here */
std::vector<uint8_t> myData(Data, Data + Size);
/* init runtime environment */
wasm_runtime_init();
wasm_module_t module =
wasm_runtime_load((uint8_t *)myData.data(), Size, nullptr, 0);
if (module) {
wasm_runtime_unload(module);
}
/* destroy runtime environment */
wasm_runtime_destroy();
return 0; /* Values other than 0 and -1 are reserved for future use. */
}
/* Forward-declare the libFuzzer's mutator callback. */
extern "C" size_t
LLVMFuzzerMutate(uint8_t *Data, size_t Size, size_t MaxSize);
/* The custom mutator: */
#ifdef CUSTOM_MUTATOR
extern "C" size_t
LLVMFuzzerCustomMutator(uint8_t *Data, size_t Size, size_t MaxSize,
unsigned int Seed)
{
if ((NULL != Data) && (Size > 10)) {
int mutate_ret = -1;
/* delete */
if (access("./cur.wasm", 0) == 0) {
remove("./cur.wasm");
}
/* 1.write data to cur.wasm */
FILE *fwrite_fp = fopen("./cur.wasm", "wb");
if (NULL == fwrite_fp) {
printf("Faild to open cur.wasm file!\n");
return 0;
}
fwrite(Data, sizeof(uint8_t), Size, fwrite_fp);
fclose(fwrite_fp);
fwrite_fp = NULL;
/* 2.wasm-tools mutate modify cur.wasm */
char cmd_tmp[150] = { 0 };
/* clang-format off */
const char *preserve_semantic = (Seed % 2) ? "--preserve-semantics" : "";
sprintf(cmd_tmp, "wasm-tools mutate cur.wasm --seed %d -o modified.wasm %s > /dev/null 2>&1", Seed, preserve_semantic);
/* clang-format on */
mutate_ret = system(cmd_tmp);
memset(cmd_tmp, 0, sizeof(cmd_tmp));
if (mutate_ret != 0) {
/* If source file not valid, use libfuzzer's own modifier */
return LLVMFuzzerMutate(Data, Size, MaxSize);
}
/* 3.read modified file */
int read_len = 0;
int file_len = 0;
int res = 0;
uint8_t *buf = NULL;
FILE *fread_fp = fopen("./modified.wasm", "rb");
if (NULL == fread_fp) {
printf("Faild to open modified.wasm file!\n");
exit(0);
}
fseek(fread_fp, 0, SEEK_END); /* location to file end */
file_len = ftell(fread_fp); /* get file size */
buf = (uint8_t *)malloc(file_len);
if (NULL != buf) {
fseek(fread_fp, 0, SEEK_SET); /* location to file start */
read_len = fread(buf, 1, file_len, fread_fp);
if ((read_len == file_len) && (read_len < MaxSize)) {
/* 4.fill Data buffer */
memcpy(Data, buf, read_len);
res = read_len;
}
else {
res = 0;
}
}
else {
res = 0;
}
memset(buf, 0, file_len);
free(buf);
fclose(fread_fp);
fread_fp = NULL;
return res;
}
else {
if (access("./modified.wasm", 0) == 0) {
remove("./modified.wasm");
}
memset(Data, 0, Size);
Size = 0;
return 0;
}
}
#endif // CUSTOM_MUTATOR

View File

@ -0,0 +1,70 @@
(module
(global $g0 (mut i32) (i32.const 0))
(global $g1 (mut i32) (i32.const 0))
(global $g2 (mut i32) (i32.const 0))
(global $g3 (mut i32) (i32.const 0))
(global $g4 (mut i32) (i32.const 0))
(global $g5 (mut i32) (i32.const 0))
(global $g6 (mut i32) (i32.const 0))
(global $g7 (mut i32) (i32.const 0))
(export "test" (func $0))
(func $0
(local i32)
global.get $g0
global.get $g1
global.get $g2
global.get $g3
global.get $g4
global.get $g5
global.get $g6
global.get $g7
global.get $g0
global.get $g1
global.get $g2
global.get $g3
global.get $g4
global.get $g5
global.get $g6
global.get $g7
global.get $g0
global.get $g1
global.get $g2
global.get $g3
global.get $g4
global.get $g4
global.get $g4
global.get $g4
global.get $g4
global.get $g4
global.get $g4
global.get $g4
global.get $g4
global.get $g0
;; has consumed 30 elements, left 2 elements on stack
block
block
f64.const 3.14
;; RESET current block stack and mark polymorphic
unreachable
;; PUSH ANY
select
loop (param i64) (result i32)
;; NOW, unmatched stacks. Enlarge frame_ref stack. Keep frame_offset stack unchanged.
global.get $g0
i32.eqz
;; OUT-OF-BOUNDS
if
unreachable
end
i32.wrap_i64
end
local.set 0
end
end
unreachable
)
)

View File

@ -0,0 +1,13 @@
(module
(type (;0;) (func))
(func (;0;) (type 0)
i32.const 0
i32.const 16
v128.load
i32.const 32
v128.load
i64x2.eq
v128.store)
(memory (;0;) 1 1)
(export "mem" (memory 0))
(export "main" (func 0)))

View File

@ -1786,6 +1786,38 @@
"stdout content": "", "stdout content": "",
"description": "load successfully" "description": "load successfully"
} }
},
{
"deprecated": false,
"ids": [
980000
],
"runtime": "iwasm-default",
"file": "frame_offset_overflow.wasm",
"mode": "fast-interp",
"options": "-f test",
"argument": "",
"expected return": {
"ret code": 1,
"stdout content": "Exception: unreachable",
"description": "no 'frame offset overflow'"
}
},
{
"deprecated": false,
"ids": [
980001
],
"runtime": "iwasm-llvm-jit",
"file": "v128.wasm",
"mode": "classic-interp",
"options": "-f main",
"argument": "",
"expected return": {
"ret code": 1,
"stdout content": "Exception: unsupported opcode",
"description": "classic-interp will exit gracefully when meeting simd opcodes"
}
} }
] ]
} }