mirror of
https://github.com/PaddlePaddle/FastDeploy.git
synced 2025-10-05 08:37:06 +08:00
Add unit test run and coverage report generation (#3011)
* Add unit test run and coverage report generation * fix * fix: upload coverage report failure * fix * update * fix * fix * update
This commit is contained in:
6
.github/workflows/_build_linux.yml
vendored
6
.github/workflows/_build_linux.yml
vendored
@@ -44,7 +44,7 @@ on:
|
|||||||
value: ${{ jobs.fd-build.outputs.wheel_path }}
|
value: ${{ jobs.fd-build.outputs.wheel_path }}
|
||||||
jobs:
|
jobs:
|
||||||
fd-build:
|
fd-build:
|
||||||
runs-on: [self-hosted, GPU-h1z1-4Cards]
|
runs-on: [self-hosted, GPU-Build]
|
||||||
outputs:
|
outputs:
|
||||||
wheel_path: ${{ steps.set_output.outputs.wheel_path }}
|
wheel_path: ${{ steps.set_output.outputs.wheel_path }}
|
||||||
steps:
|
steps:
|
||||||
@@ -88,10 +88,10 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
set -x
|
set -x
|
||||||
runner_name="${{ runner.name }}"
|
runner_name="${{ runner.name }}"
|
||||||
CARD_ID=$(echo "${runner_name}" | cut -d'-' -f2)
|
CARD_ID=$(echo "${runner_name}" | awk -F'-' '{print $NF}')
|
||||||
gpu_id=$(echo "$CARD_ID" | fold -w1 | paste -sd,)
|
gpu_id=$(echo "$CARD_ID" | fold -w1 | paste -sd,)
|
||||||
|
|
||||||
CACHE_DIR=${CACHE_DIR:-${{ github.workspace }}}
|
CACHE_DIR="${CACHE_DIR:-$(dirname "$(dirname "${{ github.workspace }}")")}"
|
||||||
echo "CACHE_DIR is set to ${CACHE_DIR}"
|
echo "CACHE_DIR is set to ${CACHE_DIR}"
|
||||||
if [ ! -f "${CACHE_DIR}/gitconfig" ]; then
|
if [ ! -f "${CACHE_DIR}/gitconfig" ]; then
|
||||||
touch "${CACHE_DIR}/gitconfig"
|
touch "${CACHE_DIR}/gitconfig"
|
||||||
|
173
.github/workflows/_unit_test_coverage.yml
vendored
Normal file
173
.github/workflows/_unit_test_coverage.yml
vendored
Normal file
@@ -0,0 +1,173 @@
|
|||||||
|
name: Run FastDeploy Unit Tests and Coverage
|
||||||
|
description: "Run FastDeploy Unit Tests and Coverage"
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
DOCKER_IMAGE:
|
||||||
|
description: "Build Images"
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
default: "ccr-2vdh3abv-pub.cnc.bj.baidubce.com/paddlepaddle/paddleqa:cuda126-py310"
|
||||||
|
FASTDEPLOY_ARCHIVE_URL:
|
||||||
|
description: "URL of the compressed FastDeploy code archive."
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
FASTDEPLOY_WHEEL_URL:
|
||||||
|
description: "URL of the FastDeploy Wheel."
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
CACHE_DIR:
|
||||||
|
description: "Cache Dir Use"
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: ""
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
run_tests_with_coverage:
|
||||||
|
runs-on: [self-hosted, GPU-h1z1-4Cards]
|
||||||
|
outputs:
|
||||||
|
diff_cov_file_url: ${{ steps.cov_upload.outputs.diff_cov_file_url }}
|
||||||
|
unittest_failed_url: ${{ steps.unittest_failed.outputs.unittest_failed_url }}
|
||||||
|
steps:
|
||||||
|
- name: Code Prepare
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
docker_image: ${{ inputs.DOCKER_IMAGE }}
|
||||||
|
fd_archive_url: ${{ inputs.FASTDEPLOY_ARCHIVE_URL }}
|
||||||
|
run: |
|
||||||
|
set -x
|
||||||
|
REPO="https://github.com/${{ github.repository }}.git"
|
||||||
|
FULL_REPO="${{ github.repository }}"
|
||||||
|
REPO_NAME="${FULL_REPO##*/}"
|
||||||
|
BASE_BRANCH="${{ github.base_ref }}"
|
||||||
|
|
||||||
|
# Clean the repository directory before starting
|
||||||
|
docker run --rm --net=host -v $(pwd):/workspace -w /workspace \
|
||||||
|
-e "REPO_NAME=${REPO_NAME}" \
|
||||||
|
${docker_image} /bin/bash -c '
|
||||||
|
if [ -d ${REPO_NAME} ]; then
|
||||||
|
echo "Directory ${REPO_NAME} exists, removing it..."
|
||||||
|
rm -rf ${REPO_NAME}*
|
||||||
|
fi
|
||||||
|
'
|
||||||
|
|
||||||
|
wget -q ${fd_archive_url}
|
||||||
|
tar -xf FastDeploy.tar.gz
|
||||||
|
rm -rf FastDeploy.tar.gz
|
||||||
|
cd FastDeploy
|
||||||
|
git config --global user.name "FastDeployCI"
|
||||||
|
git config --global user.email "fastdeploy_ci@example.com"
|
||||||
|
git log -n 3 --oneline
|
||||||
|
- name: Run FastDeploy Unit Tests and Coverage
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
docker_image: ${{ inputs.DOCKER_IMAGE }}
|
||||||
|
fd_wheel_url: ${{ inputs.FASTDEPLOY_WHEEL_URL }}
|
||||||
|
CACHE_DIR: ${{ inputs.CACHE_DIR }}
|
||||||
|
BASE_REF: ${{ github.event.pull_request.base.ref }}
|
||||||
|
run: |
|
||||||
|
set -x
|
||||||
|
runner_name="${{ runner.name }}"
|
||||||
|
CARD_ID=$(echo "${runner_name}" | awk -F'-' '{print $NF}')
|
||||||
|
gpu_id=$(echo "$CARD_ID" | fold -w1 | paste -sd,)
|
||||||
|
|
||||||
|
CACHE_DIR="${CACHE_DIR:-$(dirname "$(dirname "${{ github.workspace }}")")}"
|
||||||
|
echo "CACHE_DIR is set to ${CACHE_DIR}"
|
||||||
|
if [ ! -f "${CACHE_DIR}/gitconfig" ]; then
|
||||||
|
touch "${CACHE_DIR}/gitconfig"
|
||||||
|
fi
|
||||||
|
PARENT_DIR=$(dirname "$WORKSPACE")
|
||||||
|
echo "PARENT_DIR:$PARENT_DIR"
|
||||||
|
docker run --rm --net=host \
|
||||||
|
--cap-add=SYS_PTRACE --privileged --shm-size=64G \
|
||||||
|
-v $(pwd):/workspace -w /workspace \
|
||||||
|
-v "${CACHE_DIR}/gitconfig:/etc/gitconfig:ro" \
|
||||||
|
-v "${CACHE_DIR}/.cache:/root/.cache" \
|
||||||
|
-v "${CACHE_DIR}/ConfigDir:/root/.config" \
|
||||||
|
-e TZ="Asia/Shanghai" \
|
||||||
|
-e "fd_wheel_url=${fd_wheel_url}" \
|
||||||
|
-e "BASE_REF=${BASE_REF}" \
|
||||||
|
--gpus "\"device=${gpu_id}\"" ${docker_image} /bin/bash -c '
|
||||||
|
|
||||||
|
git config --global --add safe.directory /workspace/FastDeploy
|
||||||
|
cd FastDeploy
|
||||||
|
pip config set global.index-url http://pip.baidu.com/root/baidu/+simple/
|
||||||
|
pip config set install.trusted-host pip.baidu.com
|
||||||
|
pip config set global.extra-index-url https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple
|
||||||
|
|
||||||
|
python -m pip install coverage
|
||||||
|
python -m pip install diff-cover
|
||||||
|
python -m pip install --pre paddlepaddle-gpu -i https://www.paddlepaddle.org.cn/packages/nightly/cu126/
|
||||||
|
python -m pip install ${fd_wheel_url}
|
||||||
|
export COVERAGE_FILE=/workspace/FastDeploy/coveragedata/.coverage
|
||||||
|
export COVERAGE_RCFILE=/workspace/FastDeploy/scripts/.coveragerc
|
||||||
|
TEST_EXIT_CODE=0
|
||||||
|
bash scripts/coverage_run.sh || TEST_EXIT_CODE=8
|
||||||
|
git diff origin/${BASE_REF}..HEAD --unified=0 > diff.txt
|
||||||
|
echo "TEST_EXIT_CODE=${TEST_EXIT_CODE}" >> exit_code.env
|
||||||
|
coverage combine coveragedata/
|
||||||
|
coverage xml -o python_coverage_all.xml
|
||||||
|
COVERAGE_EXIT_CODE=0
|
||||||
|
diff-cover python_coverage_all.xml --diff-file=diff.txt --fail-under=90 || COVERAGE_EXIT_CODE=9
|
||||||
|
echo "COVERAGE_EXIT_CODE=${COVERAGE_EXIT_CODE}" >> exit_code.env
|
||||||
|
python scripts/generate_diff_coverage_xml.py diff.txt python_coverage_all.xml
|
||||||
|
'
|
||||||
|
if [ -f FastDeploy/exit_code.env ]; then
|
||||||
|
cat FastDeploy/exit_code.env >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
- name: Upload unit resule and diff coverage to bos
|
||||||
|
id: cov_upload
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
cd FastDeploy
|
||||||
|
commit_id=${{ github.event.pull_request.head.sha }}
|
||||||
|
pr_num=${{ github.event.pull_request.number }}
|
||||||
|
target_path=paddle-github-action/PR/FastDeploy/${pr_num}/${commit_id}/SM${compile_arch//,/_}/CoverageData
|
||||||
|
wget -q --no-proxy --no-check-certificate https://paddle-qa.bj.bcebos.com/CodeSync/develop/PaddlePaddle/PaddleTest/tools/bos_tools.py
|
||||||
|
push_file=$(realpath bos_tools.py)
|
||||||
|
python -m pip install bce-python-sdk==0.9.29
|
||||||
|
diff_cov_file="diff_coverage.xml"
|
||||||
|
if [ -f ${diff_cov_file} ];then
|
||||||
|
python ${push_file} ${diff_cov_file} ${target_path}
|
||||||
|
target_path_stripped="${target_path#paddle-github-action/}"
|
||||||
|
DIFF_COV_FILE_URL=https://paddle-github-action.bj.bcebos.com/${target_path_stripped}/CoverageData/${diff_cov_file}
|
||||||
|
echo "diff_cov_file_url=${DIFF_COV_FILE_URL}" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
- name: Determine Unit Succ and whether the coverage rate reaches 90%
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
if [ "$TEST_EXIT_CODE" -eq 8 ]; then
|
||||||
|
echo "Unit tests failed (exit code 8)"
|
||||||
|
exit "$TEST_EXIT_CODE"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$COVERAGE_EXIT_CODE" -eq 9 ]; then
|
||||||
|
echo "Coverage generation failed (exit code 9)"
|
||||||
|
exit "$COVERAGE_EXIT_CODE"
|
||||||
|
fi
|
||||||
|
echo "All tests and coverage passed"
|
||||||
|
exit 0
|
||||||
|
|
||||||
|
diff_coverage_report:
|
||||||
|
needs: run_tests_with_coverage
|
||||||
|
if: always()
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: coverage diff file download
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
diff_cov_file_url: ${{ needs.run_tests_with_coverage.outputs.diff_cov_file_url }}
|
||||||
|
run: |
|
||||||
|
if [ -z "${diff_cov_file_url}" ]; then
|
||||||
|
echo "No diff coverage file URL provided."
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
wget "${diff_cov_file_url}" -O ./diff_coverage.xml || echo "Download cov file failed, but continuing..."
|
||||||
|
- name: Upload diff coverage report
|
||||||
|
if: ${{ needs.run_tests_with_coverage.outputs.diff_cov_file_url != null && needs.run_tests_with_coverage.outputs.diff_cov_file_url != '' }}
|
||||||
|
uses: codecov/codecov-action@v5
|
||||||
|
with:
|
||||||
|
files: ./diff_coverage.xml
|
||||||
|
name: python diff coverage
|
||||||
|
verbose: true
|
9
.github/workflows/pr_build_and_test.yml
vendored
9
.github/workflows/pr_build_and_test.yml
vendored
@@ -33,3 +33,12 @@ jobs:
|
|||||||
- name: Print wheel path
|
- name: Print wheel path
|
||||||
run: |
|
run: |
|
||||||
echo "The built wheel is located at: ${{ needs.build.outputs.wheel_path }}"
|
echo "The built wheel is located at: ${{ needs.build.outputs.wheel_path }}"
|
||||||
|
|
||||||
|
unittest_coverage:
|
||||||
|
name: Run FastDeploy Unit Tests and Coverage
|
||||||
|
needs: [clone,build]
|
||||||
|
uses: ./.github/workflows/_unit_test_coverage.yml
|
||||||
|
with:
|
||||||
|
DOCKER_IMAGE: ccr-2vdh3abv-pub.cnc.bj.baidubce.com/paddlepaddle/paddleqa:cuda126-py310
|
||||||
|
FASTDEPLOY_ARCHIVE_URL: ${{ needs.clone.outputs.repo_archive_url }}
|
||||||
|
FASTDEPLOY_WHEEL_URL: ${{ needs.build.outputs.wheel_path }}
|
||||||
|
16
scripts/.coveragerc
Normal file
16
scripts/.coveragerc
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
[run]
|
||||||
|
source = fastdeploy
|
||||||
|
parallel = True
|
||||||
|
|
||||||
|
[paths]
|
||||||
|
source =
|
||||||
|
fastdeploy
|
||||||
|
*/site-packages/fastdeploy
|
||||||
|
*/lib/python3.10/site-packages/fastdeploy
|
||||||
|
*/fastdeploy
|
||||||
|
|
||||||
|
[report]
|
||||||
|
omit =
|
||||||
|
*/site-packages/*/tests/*
|
||||||
|
*/site-packages/setuptools/*
|
||||||
|
*/dist-packages/*
|
73
scripts/coverage_run.sh
Normal file
73
scripts/coverage_run.sh
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
echo "$DIR"
|
||||||
|
|
||||||
|
run_path="$DIR/../test/"
|
||||||
|
cd ${run_path}
|
||||||
|
ls
|
||||||
|
|
||||||
|
dirs=("layers" "operators" "worker")
|
||||||
|
failed_tests_file="failed_tests.log"
|
||||||
|
> "$failed_tests_file"
|
||||||
|
disabled_tests=(
|
||||||
|
layers/test_sampler.py
|
||||||
|
layers/test_append_attention.py
|
||||||
|
layers/test_attention.py
|
||||||
|
operators/test_rejection_top_p_sampling.py
|
||||||
|
operators/test_perchannel_gemm.py
|
||||||
|
operators/test_scaled_gemm_f8_i4_f16.py
|
||||||
|
operators/test_topp_sampling.py
|
||||||
|
operators/test_stop_generation.py
|
||||||
|
operators/test_air_topp_sampling.py
|
||||||
|
operators/test_fused_moe.py
|
||||||
|
)
|
||||||
|
is_disabled() {
|
||||||
|
local test_file_rel="$1"
|
||||||
|
for disabled in "${disabled_tests[@]}"; do
|
||||||
|
if [[ "$test_file_rel" == "$disabled" ]]; then
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
total=0
|
||||||
|
fail=0
|
||||||
|
success=0
|
||||||
|
|
||||||
|
for dir in "${dirs[@]}"; do
|
||||||
|
if [ -d "$dir" ]; then
|
||||||
|
echo "Running tests in directory: $dir"
|
||||||
|
while IFS= read -r -d '' test_file; do
|
||||||
|
total=$((total + 1))
|
||||||
|
echo "Running $test_file"
|
||||||
|
|
||||||
|
if is_disabled "$test_file"; then
|
||||||
|
echo "Skipping disabled test: $test_file"
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
python -m coverage run "$test_file"
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
echo "$test_file" >> "$failed_tests_file"
|
||||||
|
fail=$((fail + 1))
|
||||||
|
else
|
||||||
|
success=$((success + 1))
|
||||||
|
fi
|
||||||
|
done < <(find "$dir" -type f -name "test_*.py" -print0)
|
||||||
|
else
|
||||||
|
echo "Directory $dir not found, skipping."
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "===================================="
|
||||||
|
echo "Total test files run: $total"
|
||||||
|
echo "Successful tests: $success"
|
||||||
|
echo "Failed tests: $fail"
|
||||||
|
echo "Failed test cases are listed in $failed_tests_file"
|
||||||
|
|
||||||
|
if [ "$fail" -ne 0 ]; then
|
||||||
|
echo "Failed test cases:"
|
||||||
|
cat "$failed_tests_file"
|
||||||
|
exit 8
|
||||||
|
fi
|
71
scripts/generate_diff_coverage_xml.py
Normal file
71
scripts/generate_diff_coverage_xml.py
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
import re
|
||||||
|
import sys
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
|
||||||
|
def get_changed_lines_from_file(diff_txt_path):
|
||||||
|
"""Parse diff.txt to get changed lines per file"""
|
||||||
|
file_changes = defaultdict(set)
|
||||||
|
current_file = None
|
||||||
|
|
||||||
|
with open(diff_txt_path, encoding="utf-8") as f:
|
||||||
|
for line in f:
|
||||||
|
if line.startswith("+++ b/"):
|
||||||
|
current_file = line[6:].strip()
|
||||||
|
elif line.startswith("@@"):
|
||||||
|
match = re.search(r"\+(\d+)(?:,(\d+))?", line)
|
||||||
|
if match and current_file:
|
||||||
|
start_line = int(match.group(1))
|
||||||
|
line_count = int(match.group(2) or "1")
|
||||||
|
for i in range(start_line, start_line + line_count):
|
||||||
|
file_changes[current_file].add(i)
|
||||||
|
return file_changes
|
||||||
|
|
||||||
|
|
||||||
|
def generate_diff_coverage(original_xml, diff_lines, output_xml):
|
||||||
|
"""Generate a new coverage.xml containing only changed lines"""
|
||||||
|
tree = ET.parse(original_xml)
|
||||||
|
root = tree.getroot()
|
||||||
|
|
||||||
|
for package in root.findall(".//packages/package"):
|
||||||
|
classes = package.find("classes")
|
||||||
|
new_classes = ET.Element("classes")
|
||||||
|
|
||||||
|
for cls in classes.findall("class"):
|
||||||
|
filename = cls.attrib["filename"]
|
||||||
|
if filename not in diff_lines:
|
||||||
|
continue
|
||||||
|
|
||||||
|
lines = cls.find("lines")
|
||||||
|
new_lines = ET.Element("lines")
|
||||||
|
|
||||||
|
for line in lines.findall("line"):
|
||||||
|
line_num = int(line.attrib["number"])
|
||||||
|
if line_num in diff_lines[filename]:
|
||||||
|
new_lines.append(line)
|
||||||
|
|
||||||
|
if len(new_lines) > 0:
|
||||||
|
new_cls = ET.Element("class", cls.attrib)
|
||||||
|
new_cls.append(new_lines)
|
||||||
|
new_classes.append(new_cls)
|
||||||
|
|
||||||
|
package.remove(classes)
|
||||||
|
package.append(new_classes)
|
||||||
|
|
||||||
|
ET.indent(tree, space=" ")
|
||||||
|
tree.write(output_xml, encoding="utf-8", xml_declaration=True)
|
||||||
|
print(f"Generated diff coverage file: {output_xml}")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
if len(sys.argv) != 3:
|
||||||
|
print("Usage: python generate_diff_coverage_xml.py diff.txt coverage.xml")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
diff_path = sys.argv[1]
|
||||||
|
coverage_path = sys.argv[2]
|
||||||
|
output_path = "diff_coverage.xml"
|
||||||
|
|
||||||
|
diff_lines = get_changed_lines_from_file(diff_path)
|
||||||
|
generate_diff_coverage(coverage_path, diff_lines, output_path)
|
Reference in New Issue
Block a user