mirror of
https://github.com/PaddlePaddle/FastDeploy.git
synced 2025-09-26 20:41:53 +08:00
fix test name (#3493)
* fix test name * update * update * fix * fix * update * update * update * update * update * fix * update
This commit is contained in:
@@ -1,104 +1,85 @@
|
||||
#!/bin/bash
|
||||
DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
echo "$DIR"
|
||||
|
||||
run_path="$DIR/../tests/"
|
||||
cd ${run_path}
|
||||
ls
|
||||
|
||||
exclude=("ci_use" "ce")
|
||||
for d in */ ; do
|
||||
dir_name="${d%/}"
|
||||
if [[ -d "$dir_name" ]]; then
|
||||
skip=false
|
||||
for ex in "${exclude[@]}"; do
|
||||
if [[ "$dir_name" == "$ex" ]]; then
|
||||
skip=true
|
||||
break
|
||||
fi
|
||||
done
|
||||
if ! $skip; then
|
||||
dirs+=("$dir_name")
|
||||
fi
|
||||
fi
|
||||
done
|
||||
export PYTEST_INI="$DIR/../tests/pytest.ini"
|
||||
cd "$run_path" || exit 1
|
||||
|
||||
failed_tests_file="failed_tests.log"
|
||||
> "$failed_tests_file"
|
||||
disabled_tests=(
|
||||
layers/test_append_attention.py
|
||||
layers/test_attention.py
|
||||
operators/test_rejection_top_p_sampling.py
|
||||
operators/test_perchannel_gemm.py
|
||||
operators/test_scaled_gemm_f8_i4_f16.py
|
||||
operators/test_topp_sampling.py
|
||||
operators/test_stop_generation.py
|
||||
operators/test_air_topp_sampling.py
|
||||
operators/test_fused_moe.py
|
||||
operators/test_stop_generation_multi_ends.py
|
||||
graph_optimization/test_cuda_graph.py
|
||||
|
||||
##################################
|
||||
# 执行特殊单测case(不符合unittest/pytest格式)
|
||||
##################################
|
||||
special_tests=(
|
||||
"graph_optimization/test_cuda_graph_dynamic_subgraph.py"
|
||||
"graph_optimization/test_cuda_graph_spec_decode.py"
|
||||
"layers/test_quant_layer.py"
|
||||
"operators/test_token_penalty.py"
|
||||
"operators/test_split_fuse.py"
|
||||
"operators/test_flash_mask_attn.py"
|
||||
"operators/test_w4afp8_gemm.py"
|
||||
"model_loader/test_load_ernie_vl.py"
|
||||
"operators/test_tree_mask.py"
|
||||
)
|
||||
is_disabled() {
|
||||
local test_file_rel="$1"
|
||||
for disabled in "${disabled_tests[@]}"; do
|
||||
if [[ "$test_file_rel" == "$disabled" ]]; then
|
||||
return 0
|
||||
|
||||
failed_special=0
|
||||
success_special=0
|
||||
|
||||
for test_file in "${special_tests[@]}"; do
|
||||
if [ -f "$test_file" ]; then
|
||||
echo "Running special test: $test_file"
|
||||
python -m coverage run --parallel-mode "$test_file"
|
||||
status=$?
|
||||
if [ "$status" -ne 0 ]; then
|
||||
echo "$test_file" >> "$failed_tests_file"
|
||||
failed_special=$((failed_special+1))
|
||||
else
|
||||
success_special=$((success_special+1))
|
||||
fi
|
||||
else
|
||||
echo "Warning: $test_file not found"
|
||||
failed_special=$((failed_special+1))
|
||||
fi
|
||||
done
|
||||
return 1
|
||||
}
|
||||
|
||||
total=0
|
||||
fail=0
|
||||
success=0
|
||||
|
||||
for dir in "${dirs[@]}"; do
|
||||
if [ -d "$dir" ]; then
|
||||
echo "Running tests in directory: $dir"
|
||||
while IFS= read -r -d '' test_file; do
|
||||
total=$((total + 1))
|
||||
echo "Running $test_file"
|
||||
|
||||
if is_disabled "$test_file"; then
|
||||
echo "Skipping disabled test: $test_file"
|
||||
continue
|
||||
fi
|
||||
# TODO: Add a framework to manage unit test execution time
|
||||
timeout 600 python -m coverage run "$test_file"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "$test_file" >> "$failed_tests_file"
|
||||
fail=$((fail + 1))
|
||||
|
||||
PORTS=($FLASK_PORT $FD_API_PORT $FD_ENGINE_QUEUE_PORT $FD_METRICS_PORT)
|
||||
echo "==== PORT CLEAN AFTER UT FAILED ===="
|
||||
|
||||
for port in "${PORTS[@]}"; do
|
||||
PIDS=$(lsof -t -i :$port)
|
||||
if [ -n "$PIDS" ]; then
|
||||
echo "Port $port is occupied by PID(s): $PIDS"
|
||||
echo "$PIDS" | xargs -r kill -9
|
||||
echo "Port $port cleared"
|
||||
else
|
||||
echo "Port $port is free"
|
||||
fi
|
||||
done
|
||||
else
|
||||
success=$((success + 1))
|
||||
fi
|
||||
done < <(find "$dir" -type f -name "test_*.py" -print0)
|
||||
else
|
||||
echo "Directory $dir not found, skipping."
|
||||
fi
|
||||
done
|
||||
|
||||
echo "===================================="
|
||||
echo "Total test files run: $total"
|
||||
echo "Successful tests: $success"
|
||||
echo "Failed tests: $fail"
|
||||
echo "Failed test cases are listed in $failed_tests_file"
|
||||
##################################
|
||||
# 执行 pytest,每个文件单独跑
|
||||
##################################
|
||||
# 收集 pytest 文件
|
||||
TEST_FILES=$(python -m pytest --collect-only -q -c pytest.ini --disable-warnings | grep -Eo '^.*test_.*\.py' | sort | uniq)
|
||||
|
||||
if [ "$fail" -ne 0 ]; then
|
||||
echo "Failed test cases:"
|
||||
cat "$failed_tests_file"
|
||||
exit 8
|
||||
|
||||
failed_pytest=0
|
||||
success_pytest=0
|
||||
|
||||
for file in $TEST_FILES; do
|
||||
echo "Running pytest file: $file"
|
||||
python -m coverage run --parallel-mode -m pytest "$file"
|
||||
status=$?
|
||||
if [ "$status" -ne 0 ]; then
|
||||
echo "$file" >> "$failed_tests_file"
|
||||
failed_pytest=$((failed_pytest+1))
|
||||
else
|
||||
success_pytest=$((success_pytest+1))
|
||||
fi
|
||||
done
|
||||
|
||||
##################################
|
||||
# 汇总结果
|
||||
##################################
|
||||
echo "===================================="
|
||||
echo "Pytest total: $((failed_pytest + success_pytest))"
|
||||
echo "Pytest successful: $success_pytest"
|
||||
echo "Pytest failed: $failed_pytest"
|
||||
|
||||
echo "Special tests total: ${#special_tests[@]}"
|
||||
echo "Special tests successful: $success_special"
|
||||
echo "Special tests failed: $failed_special"
|
||||
|
||||
if [ "$failed_pytest" -ne 0 ] || [ "$failed_special" -ne 0 ]; then
|
||||
echo "Failed test cases are listed in $failed_tests_file"
|
||||
cat "$failed_tests_file"
|
||||
exit 8
|
||||
fi
|
||||
|
||||
echo "All tests passed!"
|
||||
|
@@ -14,6 +14,7 @@
|
||||
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import signal
|
||||
import socket
|
||||
import subprocess
|
||||
@@ -52,8 +53,14 @@ def kill_process_on_port(port: int):
|
||||
"""
|
||||
try:
|
||||
output = subprocess.check_output(f"lsof -i:{port} -t", shell=True).decode().strip()
|
||||
current_pid = os.getpid()
|
||||
parent_pid = os.getppid()
|
||||
for pid in output.splitlines():
|
||||
os.kill(int(pid), signal.SIGKILL)
|
||||
pid = int(pid)
|
||||
if pid in (current_pid, parent_pid):
|
||||
print(f"Skip killing current process (pid={pid}) on port {port}")
|
||||
continue
|
||||
os.kill(pid, signal.SIGKILL)
|
||||
print(f"Killed process on port {port}, pid={pid}")
|
||||
except subprocess.CalledProcessError:
|
||||
pass
|
||||
@@ -65,6 +72,7 @@ def clean_ports():
|
||||
"""
|
||||
for port in PORTS_TO_CLEAN:
|
||||
kill_process_on_port(port)
|
||||
time.sleep(2)
|
||||
|
||||
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
@@ -78,7 +86,9 @@ def setup_and_run_server():
|
||||
"""
|
||||
print("Pre-test port cleanup...")
|
||||
clean_ports()
|
||||
|
||||
print("log dir clean ")
|
||||
if os.path.exists("log") and os.path.isdir("log"):
|
||||
shutil.rmtree("log")
|
||||
base_path = os.getenv("MODEL_PATH")
|
||||
if base_path:
|
||||
model_path = os.path.join(base_path, "ernie-4_5-21b-a3b-bf16-paddle")
|
@@ -15,6 +15,7 @@
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import signal
|
||||
import socket
|
||||
import subprocess
|
||||
@@ -53,8 +54,14 @@ def kill_process_on_port(port: int):
|
||||
"""
|
||||
try:
|
||||
output = subprocess.check_output(f"lsof -i:{port} -t", shell=True).decode().strip()
|
||||
current_pid = os.getpid()
|
||||
parent_pid = os.getppid()
|
||||
for pid in output.splitlines():
|
||||
os.kill(int(pid), signal.SIGKILL)
|
||||
pid = int(pid)
|
||||
if pid in (current_pid, parent_pid):
|
||||
print(f"Skip killing current process (pid={pid}) on port {port}")
|
||||
continue
|
||||
os.kill(pid, signal.SIGKILL)
|
||||
print(f"Killed process on port {port}, pid={pid}")
|
||||
except subprocess.CalledProcessError:
|
||||
pass
|
||||
@@ -66,6 +73,7 @@ def clean_ports():
|
||||
"""
|
||||
for port in PORTS_TO_CLEAN:
|
||||
kill_process_on_port(port)
|
||||
time.sleep(2)
|
||||
|
||||
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
@@ -79,6 +87,9 @@ def setup_and_run_server():
|
||||
"""
|
||||
print("Pre-test port cleanup...")
|
||||
clean_ports()
|
||||
print("log dir clean ")
|
||||
if os.path.exists("log") and os.path.isdir("log"):
|
||||
shutil.rmtree("log")
|
||||
|
||||
base_path = os.getenv("MODEL_PATH")
|
||||
if base_path:
|
||||
@@ -150,6 +161,7 @@ def setup_and_run_server():
|
||||
try:
|
||||
os.killpg(process.pid, signal.SIGTERM)
|
||||
print(f"API server (pid={process.pid}) terminated")
|
||||
clean_ports()
|
||||
except Exception as e:
|
||||
print(f"Failed to terminate API server: {e}")
|
||||
|
||||
@@ -524,7 +536,8 @@ def test_chat_with_thinking(openai_client, capsys):
|
||||
stream=True,
|
||||
max_tokens=10,
|
||||
)
|
||||
completion_tokens = reasoning_tokens = 1
|
||||
completion_tokens = 1
|
||||
reasoning_tokens = 0
|
||||
total_tokens = 0
|
||||
for chunk_id, chunk in enumerate(response):
|
||||
if chunk_id == 0: # the first chunk is an extra chunk
|
@@ -16,6 +16,7 @@ import concurrent.futures
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import signal
|
||||
import socket
|
||||
import subprocess
|
||||
@@ -55,8 +56,14 @@ def kill_process_on_port(port: int):
|
||||
"""
|
||||
try:
|
||||
output = subprocess.check_output(f"lsof -i:{port} -t", shell=True).decode().strip()
|
||||
current_pid = os.getpid()
|
||||
parent_pid = os.getppid()
|
||||
for pid in output.splitlines():
|
||||
os.kill(int(pid), signal.SIGKILL)
|
||||
pid = int(pid)
|
||||
if pid in (current_pid, parent_pid):
|
||||
print(f"Skip killing current process (pid={pid}) on port {port}")
|
||||
continue
|
||||
os.kill(pid, signal.SIGKILL)
|
||||
print(f"Killed process on port {port}, pid={pid}")
|
||||
except subprocess.CalledProcessError:
|
||||
pass
|
||||
@@ -68,6 +75,7 @@ def clean_ports():
|
||||
"""
|
||||
for port in PORTS_TO_CLEAN:
|
||||
kill_process_on_port(port)
|
||||
time.sleep(2)
|
||||
|
||||
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
@@ -82,6 +90,10 @@ def setup_and_run_server():
|
||||
print("Pre-test port cleanup...")
|
||||
clean_ports()
|
||||
|
||||
print("log dir clean ")
|
||||
if os.path.exists("log") and os.path.isdir("log"):
|
||||
shutil.rmtree("log")
|
||||
|
||||
base_path = os.getenv("MODEL_PATH")
|
||||
if base_path:
|
||||
model_path = os.path.join(base_path, "Qwen2-7B-Instruct")
|
||||
@@ -139,6 +151,7 @@ def setup_and_run_server():
|
||||
print("\n===== Post-test server cleanup... =====")
|
||||
try:
|
||||
os.killpg(process.pid, signal.SIGTERM)
|
||||
clean_ports()
|
||||
print(f"API server (pid={process.pid}) terminated")
|
||||
except Exception as e:
|
||||
print(f"Failed to terminate API server: {e}")
|
27
tests/pytest.ini
Normal file
27
tests/pytest.ini
Normal file
@@ -0,0 +1,27 @@
|
||||
[pytest]
|
||||
# 跳过目录
|
||||
addopts =
|
||||
--ignore=ci_use
|
||||
--ignore=ce
|
||||
--ignore=layers/test_append_attention.py
|
||||
--ignore=layers/test_attention.py
|
||||
--ignore=operators/test_rejection_top_p_sampling.py
|
||||
--ignore=operators/test_perchannel_gemm.py
|
||||
--ignore=operators/test_scaled_gemm_f8_i4_f16.py
|
||||
--ignore=operators/test_topp_sampling.py
|
||||
--ignore=operators/test_stop_generation.py
|
||||
--ignore=operators/test_air_topp_sampling.py
|
||||
--ignore=operators/test_fused_moe.py
|
||||
--ignore=operators/test_stop_generation_multi_ends.py
|
||||
--ignore=graph_optimization/test_cuda_graph.py
|
||||
--ignore=graph_optimization/test_cuda_graph_dynamic_subgraph.py
|
||||
--ignore=graph_optimization/test_cuda_graph_spec_decode
|
||||
--ignore=layers/test_quant_layer.py
|
||||
--ignore=operators/test_token_penalty.py
|
||||
--ignore=operators/test_split_fuse.py
|
||||
--ignore=operators/test_flash_mask_attn.py
|
||||
--ignore=operators/test_w4afp8_gemm.py
|
||||
--ignore=operators/test_tree_mask.py
|
||||
|
||||
# 输出更详细的结果
|
||||
console_output_style = progress
|
Reference in New Issue
Block a user