mirror of
https://github.com/bytecodealliance/wasm-micro-runtime.git
synced 2025-09-04 00:43:57 +00:00
Compare commits
7 Commits
8c7b9de5a4
...
e30c824119
Author | SHA1 | Date | |
---|---|---|---|
![]() |
e30c824119 | ||
![]() |
6c3f6fd017 | ||
![]() |
42851ca821 | ||
![]() |
1a56951a6a | ||
![]() |
9cb1cc4af6 | ||
![]() |
6374178746 | ||
![]() |
efc1ad820b |
205
.github/scripts/generate_release_notes.py
vendored
Normal file
205
.github/scripts/generate_release_notes.py
vendored
Normal file
|
@ -0,0 +1,205 @@
|
|||
# Copyright (C) 2019 Intel Corporation. All rights reserved.
|
||||
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
|
||||
|
||||
# get the last release tag from git, and use it to find all merged PRs since
|
||||
# that tag. Since their titles might not following the same format, we use
|
||||
# gh cli to search merged PR by commit SHA.
|
||||
#
|
||||
# Once having those PRs' information, extract their titles, labels and PR
|
||||
# numbers and classify them into break changes, new features, enhancements,
|
||||
# bug fixes, and others based on their labels.
|
||||
#
|
||||
# The release version is generated based on the last release tag. The tag
|
||||
# should be in the format of "WAMR-major.minor.patch", where major, minor,
|
||||
# and patch are numbers. If there is new feature in merged PRs, the minor
|
||||
# version should be increased by 1, and the patch version should be reset to 0.
|
||||
# If there is no new feature, the patch version should be increased by 1.
|
||||
#
|
||||
# new content should be inserted into the beginning of the RELEASE_NOTES.md file.
|
||||
# in a form like:
|
||||
#
|
||||
# ``` markdown
|
||||
# ## WAMR-major.minor.patch
|
||||
#
|
||||
# ### Breaking Changes
|
||||
#
|
||||
# ### New Features
|
||||
#
|
||||
# ### Bug Fixes
|
||||
#
|
||||
# ### Enhancements
|
||||
#
|
||||
# ### Others
|
||||
# ```
|
||||
# The path of RELEASE_NOTES.md is passed in as an command line argument.
|
||||
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def run_cmd(cmd):
|
||||
result = subprocess.run(
|
||||
cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
|
||||
)
|
||||
if result.returncode != 0:
|
||||
print(f"Error running command: {cmd}\n{result.stderr}")
|
||||
sys.exit(1)
|
||||
return result.stdout.strip()
|
||||
|
||||
|
||||
def get_last_release_tag():
|
||||
tags = run_cmd("git tag --sort=-creatordate").splitlines()
|
||||
for tag in tags:
|
||||
if tag.startswith("WAMR-"):
|
||||
return tag
|
||||
return None
|
||||
|
||||
|
||||
def get_merged_prs_since(tag):
|
||||
# Get commits since the last release tag
|
||||
log_cmd = f'git log {tag}..HEAD --pretty=format:"%s %H"'
|
||||
logs = run_cmd(log_cmd).splitlines()
|
||||
|
||||
print(f"Found {len(logs)} merge commits since last tag '{tag}'.")
|
||||
|
||||
pr_numbers = []
|
||||
for line in logs:
|
||||
_, sha = line.rsplit(" ", 1)
|
||||
# Use SHA to find merged PRs
|
||||
pr_cmd = f"gh pr list --search {sha} --state merged --json number,title"
|
||||
pr_json = run_cmd(pr_cmd)
|
||||
pr_data = json.loads(pr_json)
|
||||
|
||||
for pr in pr_data:
|
||||
pr_number = pr.get("number")
|
||||
print(f"Found PR #{pr_number} {pr['title']}")
|
||||
if pr_number and pr_number not in pr_numbers:
|
||||
pr_numbers.append(f"{pr_number}")
|
||||
|
||||
return pr_numbers
|
||||
|
||||
|
||||
def get_pr_info(pr_number):
|
||||
# Use GitHub CLI to get PR info
|
||||
pr_json = run_cmd(f"gh pr view {pr_number} --json title,labels,url")
|
||||
pr_data = json.loads(pr_json)
|
||||
title = pr_data.get("title", "")
|
||||
labels = [label["name"] for label in pr_data.get("labels", [])]
|
||||
url = pr_data.get("url", "")
|
||||
return title, labels, url
|
||||
|
||||
|
||||
def classify_pr(title, labels, url):
|
||||
entry = f"- {title} (#{url.split('/')[-1]})"
|
||||
if "breaking-change" in labels:
|
||||
return "Breaking Changes", entry
|
||||
elif "new feature" in labels:
|
||||
return "New Features", entry
|
||||
elif "enhancement" in labels:
|
||||
return "Enhancements", entry
|
||||
elif "bug-fix" in labels:
|
||||
return "Bug Fixes", entry
|
||||
else:
|
||||
return "Others", entry
|
||||
|
||||
|
||||
def generate_release_notes(pr_numbers):
|
||||
sections = {
|
||||
"Breaking Changes": [],
|
||||
"New Features": [],
|
||||
"Bug Fixes": [],
|
||||
"Enhancements": [],
|
||||
"Others": [],
|
||||
}
|
||||
for pr_num in pr_numbers:
|
||||
title, labels, url = get_pr_info(pr_num)
|
||||
section, entry = classify_pr(title, labels, url)
|
||||
sections[section].append(entry)
|
||||
return sections
|
||||
|
||||
|
||||
def generate_version_string(last_tag, sections):
|
||||
last_tag_parts = last_tag.split("-")[-1]
|
||||
major, minor, patch = map(int, last_tag_parts.split("."))
|
||||
|
||||
if sections["New Features"]:
|
||||
minor += 1
|
||||
patch = 0
|
||||
else:
|
||||
patch += 1
|
||||
|
||||
return f"WAMR-{major}.{minor}.{patch}"
|
||||
|
||||
|
||||
def format_release_notes(version, sections):
|
||||
notes = [f"## {version}\n"]
|
||||
for section in [
|
||||
"Breaking Changes",
|
||||
"New Features",
|
||||
"Bug Fixes",
|
||||
"Enhancements",
|
||||
"Others",
|
||||
]:
|
||||
notes.append(f"### {section}\n")
|
||||
if sections[section]:
|
||||
notes.extend(sections[section])
|
||||
else:
|
||||
notes.append("")
|
||||
notes.append("")
|
||||
return "\n".join(notes) + "\n"
|
||||
|
||||
|
||||
def insert_release_notes(notes, RELEASE_NOTES_FILE):
|
||||
with open(RELEASE_NOTES_FILE, "r", encoding="utf-8") as f:
|
||||
old_content = f.read()
|
||||
with open(RELEASE_NOTES_FILE, "w", encoding="utf-8") as f:
|
||||
f.write(notes + old_content)
|
||||
|
||||
|
||||
def set_action_output(name, value):
|
||||
"""Set the output for GitHub Actions."""
|
||||
if not os.getenv("GITHUB_OUTPUT"):
|
||||
return
|
||||
|
||||
print(f"{name}={value}")
|
||||
|
||||
|
||||
def main(RELEASE_NOTES_FILE):
|
||||
last_tag = get_last_release_tag()
|
||||
if not last_tag:
|
||||
print("No release tag found.")
|
||||
sys.exit(1)
|
||||
|
||||
print(f"Last release tag: {last_tag}")
|
||||
|
||||
pr_numbers = get_merged_prs_since(last_tag)
|
||||
if not pr_numbers:
|
||||
print("No merged PRs since last release.")
|
||||
sys.exit(0)
|
||||
|
||||
print(f"Found {len(pr_numbers)} merged PRs since last release.")
|
||||
print(f"PR numbers: {', '.join(pr_numbers)}")
|
||||
|
||||
sections = generate_release_notes(pr_numbers)
|
||||
|
||||
next_version = generate_version_string(last_tag, sections)
|
||||
print(f"Next version will be: {next_version}")
|
||||
|
||||
notes = format_release_notes(next_version, sections)
|
||||
insert_release_notes(notes, RELEASE_NOTES_FILE)
|
||||
print(f"Release notes for {next_version} generated and inserted.")
|
||||
|
||||
set_action_output("next_version", next_version)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) > 1:
|
||||
RELEASE_NOTES_FILE = sys.argv[1]
|
||||
else:
|
||||
RELEASE_NOTES_FILE = os.path.join(
|
||||
os.path.dirname(__file__), "../../RELEASE_NOTES.md"
|
||||
)
|
||||
|
||||
main(RELEASE_NOTES_FILE)
|
6
.github/workflows/codeql.yml
vendored
6
.github/workflows/codeql.yml
vendored
|
@ -53,7 +53,7 @@ jobs:
|
|||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.29.10
|
||||
uses: github/codeql-action/init@v3.29.11
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
|
||||
|
@ -70,7 +70,7 @@ jobs:
|
|||
- run: |
|
||||
./.github/scripts/codeql_buildscript.sh
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.29.10
|
||||
uses: github/codeql-action/analyze@v3.29.11
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
upload: false
|
||||
|
@ -99,7 +99,7 @@ jobs:
|
|||
output: ${{ steps.step1.outputs.sarif-output }}/cpp.sarif
|
||||
|
||||
- name: Upload CodeQL results to code scanning
|
||||
uses: github/codeql-action/upload-sarif@v3.29.10
|
||||
uses: github/codeql-action/upload-sarif@v3.29.11
|
||||
with:
|
||||
sarif_file: ${{ steps.step1.outputs.sarif-output }}
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
|
66
.github/workflows/prepare_release.yml
vendored
Normal file
66
.github/workflows/prepare_release.yml
vendored
Normal file
|
@ -0,0 +1,66 @@
|
|||
# Copyright (C) 2019 Intel Corporation. All rights reserved.
|
||||
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
|
||||
|
||||
# It includes:
|
||||
# - add new content into the RELEASE_NOTES.md
|
||||
# - update the version in build-scripts/version.cmake and core/version.h
|
||||
#
|
||||
# The new content is added to the beginning of the RELEASE_NOTES.md file.
|
||||
# It includes all merged PR titles since the last release(tag).
|
||||
# Based on every PR's label, it will be categorized into different sections.
|
||||
#
|
||||
# The version number is updated to the next version.
|
||||
# Based on new content in the RELEASE_NOTES.md, it will be determined
|
||||
# 1. if there is breaking change or new features, the next version will be a minor version
|
||||
# 2. if there is no breaking change and new features, the next version will be a patch version
|
||||
#
|
||||
# At the end, file a PR to update the files.
|
||||
|
||||
name: preparation for a release.
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
# Cancel any in-flight jobs for the same PR/branch so there's only one active
|
||||
# at a time
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
prepare_release:
|
||||
permissions:
|
||||
contents: write # update files
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: prepare the release note
|
||||
id: generate_release_notes
|
||||
run: |
|
||||
python3 ./.github/scripts/generate_release_notes.py ./RELEASE_NOTES.md"
|
||||
|
||||
- name: extract next version from previous step's output
|
||||
id: extract_version
|
||||
run: |
|
||||
echo "next_version=${{ steps.generate_release_notes.outputs.next_version }}" >> $GITHUB_ENV
|
||||
|
||||
- name: update version files
|
||||
run: |
|
||||
python3 ./.github/scripts/update_version_files.py ${{ env.next_version }}
|
||||
|
||||
- name: file a PR
|
||||
id: file_pr
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
title: "Prepare for the next release"
|
||||
body: |
|
||||
This PR prepares for the next release.
|
||||
It updates the version files and adds new content to the RELEASE_NOTES.md.
|
||||
commit-message: "prepare for the next release"
|
||||
branch: prepare-release-${{ github.run_id }}
|
||||
paths: |
|
||||
RELEASE_NOTES.md
|
||||
build-scripts/version.cmake
|
||||
core/version.h
|
2
.github/workflows/supply_chain.yml
vendored
2
.github/workflows/supply_chain.yml
vendored
|
@ -60,6 +60,6 @@ jobs:
|
|||
|
||||
# Upload the results to GitHub's code scanning dashboard.
|
||||
- name: "Upload to code-scanning"
|
||||
uses: github/codeql-action/upload-sarif@e96e340c1e95e91449de06aabfa9525b7b98113f
|
||||
uses: github/codeql-action/upload-sarif@5b49155c7f37b5ec074ffd26b428e6b64b1bf412
|
||||
with:
|
||||
sarif_file: results.sarif
|
||||
|
|
|
@ -1 +1 @@
|
|||
requests==2.32.4
|
||||
requests==2.32.5
|
|
@ -285,7 +285,7 @@ fd_prestats_get_entry(struct fd_prestats *pt, __wasi_fd_t fd,
|
|||
struct fd_prestat **ret) REQUIRES_SHARED(pt->lock)
|
||||
{
|
||||
// Test for file descriptor existence.
|
||||
if (fd >= pt->size)
|
||||
if ((size_t)fd >= pt->size)
|
||||
return __WASI_EBADF;
|
||||
struct fd_prestat *prestat = &pt->prestats[fd];
|
||||
if (prestat->dir == NULL)
|
||||
|
@ -301,7 +301,7 @@ static __wasi_errno_t
|
|||
fd_prestats_remove_entry(struct fd_prestats *pt, __wasi_fd_t fd)
|
||||
{
|
||||
// Test for file descriptor existence.
|
||||
if (fd >= pt->size)
|
||||
if ((size_t)fd >= pt->size)
|
||||
return __WASI_EBADF;
|
||||
struct fd_prestat *prestat = &pt->prestats[fd];
|
||||
|
||||
|
@ -356,7 +356,7 @@ fd_table_get_entry(struct fd_table *ft, __wasi_fd_t fd,
|
|||
REQUIRES_SHARED(ft->lock)
|
||||
{
|
||||
// Test for file descriptor existence.
|
||||
if (fd >= ft->size) {
|
||||
if ((size_t)fd >= ft->size) {
|
||||
return __WASI_EBADF;
|
||||
}
|
||||
|
||||
|
|
|
@ -57,7 +57,7 @@ cmake -DWAMR_BUILD_PLATFORM=linux -DWAMR_BUILD_TARGET=ARM
|
|||
|
||||
- **WAMR_BUILD_LIBC_UVWASI**=1/0 (Experiment), build the [WASI](https://github.com/WebAssembly/WASI) libc subset for WASM app based on [uvwasi](https://github.com/nodejs/uvwasi) implementation, default to disable if not set
|
||||
|
||||
> Note: for platform which doesn't support **WAMR_BUILD_LIBC_WASI**, e.g. Windows, developer can try using **WAMR_BUILD_LIBC_UVWASI**.
|
||||
> Note: WAMR doesn't support a safe sandbox on all platforms. For platforms that do not support **WAMR_BUILD_LIBC_WASI**, e.g. Windows, developers can try using an unsafe uvwasi-based WASI implementation by using **WAMR_BUILD_LIBC_UVWASI**.
|
||||
|
||||
### **Enable Multi-Module feature**
|
||||
|
||||
|
|
Loading…
Reference in New Issue
Block a user