Bugfix test exception (#4171)

* feat(log):add_request_and_response_log

* modify default error type
This commit is contained in:
xiaolei373
2025-09-19 11:48:49 +08:00
committed by GitHub
parent c3b8ebeb18
commit ddf5606263
4 changed files with 18 additions and 12 deletions

View File

@@ -87,7 +87,7 @@ class OpenAIServingChat:
f"Only master node can accept completion request, please send request to master node: {self.master_ip}" f"Only master node can accept completion request, please send request to master node: {self.master_ip}"
) )
api_server_logger.error(err_msg) api_server_logger.error(err_msg)
return ErrorResponse(error=ErrorInfo(message=err_msg, type=ErrorType.SERVER_ERROR)) return ErrorResponse(error=ErrorInfo(message=err_msg, type=ErrorType.INTERNAL_ERROR))
if self.models: if self.models:
is_supported, request.model = self.models.is_supported_model(request.model) is_supported, request.model = self.models.is_supported_model(request.model)
@@ -95,7 +95,7 @@ class OpenAIServingChat:
err_msg = f"Unsupported model: [{request.model}], support [{', '.join([x.name for x in self.models.model_paths])}] or default" err_msg = f"Unsupported model: [{request.model}], support [{', '.join([x.name for x in self.models.model_paths])}] or default"
api_server_logger.error(err_msg) api_server_logger.error(err_msg)
return ErrorResponse( return ErrorResponse(
error=ErrorInfo(message=err_msg, type=ErrorType.SERVER_ERROR, code=ErrorCode.MODEL_NOT_SUPPORT) error=ErrorInfo(message=err_msg, type=ErrorType.INTERNAL_ERROR, code=ErrorCode.MODEL_NOT_SUPPORT)
) )
try: try:
@@ -145,7 +145,7 @@ class OpenAIServingChat:
except Exception as e: except Exception as e:
error_msg = f"request[{request_id}]full generator error: {str(e)}, {str(traceback.format_exc())}" error_msg = f"request[{request_id}]full generator error: {str(e)}, {str(traceback.format_exc())}"
api_server_logger.error(error_msg) api_server_logger.error(error_msg)
return ErrorResponse(error=ErrorInfo(message=error_msg, type=ErrorType.SERVER_ERROR)) return ErrorResponse(error=ErrorInfo(message=error_msg, type=ErrorType.INTERNAL_ERROR))
except Exception as e: except Exception as e:
error_msg = ( error_msg = (
f"request[{request_id}] waiting error: {str(e)}, {str(traceback.format_exc())}, " f"request[{request_id}] waiting error: {str(e)}, {str(traceback.format_exc())}, "
@@ -158,7 +158,7 @@ class OpenAIServingChat:
def _create_streaming_error_response(self, message: str) -> str: def _create_streaming_error_response(self, message: str) -> str:
api_server_logger.error(message) api_server_logger.error(message)
error_response = ErrorResponse(error=ErrorInfo(message=message, type=ErrorType.SERVER_ERROR)) error_response = ErrorResponse(error=ErrorInfo(message=message, type=ErrorType.INTERNAL_ERROR))
return error_response.model_dump_json() return error_response.model_dump_json()
async def chat_completion_stream_generator( async def chat_completion_stream_generator(

View File

@@ -64,14 +64,14 @@ class OpenAIServingCompletion:
f"Only master node can accept completion request, please send request to master node: {self.master_ip}" f"Only master node can accept completion request, please send request to master node: {self.master_ip}"
) )
api_server_logger.error(err_msg) api_server_logger.error(err_msg)
return ErrorResponse(error=ErrorInfo(message=err_msg, type=ErrorType.SERVER_ERROR)) return ErrorResponse(error=ErrorInfo(message=err_msg, type=ErrorType.INTERNAL_ERROR))
if self.models: if self.models:
is_supported, request.model = self.models.is_supported_model(request.model) is_supported, request.model = self.models.is_supported_model(request.model)
if not is_supported: if not is_supported:
err_msg = f"Unsupported model: [{request.model}], support [{', '.join([x.name for x in self.models.model_paths])}] or default" err_msg = f"Unsupported model: [{request.model}], support [{', '.join([x.name for x in self.models.model_paths])}] or default"
api_server_logger.error(err_msg) api_server_logger.error(err_msg)
return ErrorResponse( return ErrorResponse(
error=ErrorInfo(message=err_msg, type=ErrorType.SERVER_ERROR, code=ErrorCode.MODEL_NOT_SUPPORT) error=ErrorInfo(message=err_msg, type=ErrorType.INTERNAL_ERROR, code=ErrorCode.MODEL_NOT_SUPPORT)
) )
created_time = int(time.time()) created_time = int(time.time())
if request.user is not None: if request.user is not None:
@@ -115,7 +115,7 @@ class OpenAIServingCompletion:
except Exception as e: except Exception as e:
error_msg = f"OpenAIServingCompletion create_completion: {e}, {str(traceback.format_exc())}" error_msg = f"OpenAIServingCompletion create_completion: {e}, {str(traceback.format_exc())}"
api_server_logger.error(error_msg) api_server_logger.error(error_msg)
return ErrorResponse(error=ErrorInfo(message=error_msg, type=ErrorType.SERVER_ERROR)) return ErrorResponse(error=ErrorInfo(message=error_msg, type=ErrorType.INTERNAL_ERROR))
if request_prompt_ids is not None: if request_prompt_ids is not None:
request_prompts = request_prompt_ids request_prompts = request_prompt_ids
@@ -189,12 +189,12 @@ class OpenAIServingCompletion:
f"OpenAIServingCompletion completion_full_generator error: {e}, {str(traceback.format_exc())}" f"OpenAIServingCompletion completion_full_generator error: {e}, {str(traceback.format_exc())}"
) )
api_server_logger.error(error_msg) api_server_logger.error(error_msg)
return ErrorResponse(error=ErrorInfo(message=error_msg, type=ErrorType.SERVER_ERROR)) return ErrorResponse(error=ErrorInfo(message=error_msg, type=ErrorType.INTERNAL_ERROR))
except Exception as e: except Exception as e:
error_msg = f"OpenAIServingCompletion create_completion error: {e}, {str(traceback.format_exc())}" error_msg = f"OpenAIServingCompletion create_completion error: {e}, {str(traceback.format_exc())}"
api_server_logger.error(error_msg) api_server_logger.error(error_msg)
return ErrorResponse(error=ErrorInfo(message=error_msg, type=ErrorType.SERVER_ERROR)) return ErrorResponse(error=ErrorInfo(message=error_msg, type=ErrorType.INTERNAL_ERROR))
async def completion_full_generator( async def completion_full_generator(
self, self,

View File

@@ -87,7 +87,7 @@ class OpenAIServingModels:
f"Only master node can accept models request, please send request to master node: {self.master_ip}" f"Only master node can accept models request, please send request to master node: {self.master_ip}"
) )
api_server_logger.error(err_msg) api_server_logger.error(err_msg)
return ErrorResponse(error=ErrorInfo(message=err_msg, type=ErrorType.SERVER_ERROR)) return ErrorResponse(error=ErrorInfo(message=err_msg, type=ErrorType.INTERNAL_ERROR))
model_infos = [ model_infos = [
ModelInfo( ModelInfo(
id=model.name, max_model_len=self.max_model_len, root=model.model_path, permission=[ModelPermission()] id=model.name, max_model_len=self.max_model_len, root=model.model_path, permission=[ModelPermission()]

View File

@@ -8,6 +8,10 @@ from fastapi.responses import JSONResponse
from fastdeploy.utils import ErrorCode, ExceptionHandler, ParameterError from fastdeploy.utils import ErrorCode, ExceptionHandler, ParameterError
class DummyRequest:
url = "http://testserver/test"
class TestParameterError(unittest.TestCase): class TestParameterError(unittest.TestCase):
def test_parameter_error_init(self): def test_parameter_error_init(self):
exc = ParameterError("param1", "error message") exc = ParameterError("param1", "error message")
@@ -30,7 +34,8 @@ class TestExceptionHandler(unittest.IsolatedAsyncioTestCase):
async def test_handle_request_validation_missing_messages(self): async def test_handle_request_validation_missing_messages(self):
"""缺少 messages 参数时,应返回 missing_required_parameter""" """缺少 messages 参数时,应返回 missing_required_parameter"""
exc = RequestValidationError([{"loc": ("body", "messages"), "msg": "Field required", "type": "missing"}]) exc = RequestValidationError([{"loc": ("body", "messages"), "msg": "Field required", "type": "missing"}])
resp: JSONResponse = await ExceptionHandler.handle_request_validation_exception(None, exc) dummy_request = DummyRequest()
resp: JSONResponse = await ExceptionHandler.handle_request_validation_exception(dummy_request, exc)
data = json.loads(resp.body.decode()) data = json.loads(resp.body.decode())
self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST)
self.assertEqual(data["error"]["param"], "messages") self.assertEqual(data["error"]["param"], "messages")
@@ -42,7 +47,8 @@ class TestExceptionHandler(unittest.IsolatedAsyncioTestCase):
exc = RequestValidationError( exc = RequestValidationError(
[{"loc": ("body", "top_p"), "msg": "Input should be less than or equal to 1", "type": "value_error"}] [{"loc": ("body", "top_p"), "msg": "Input should be less than or equal to 1", "type": "value_error"}]
) )
resp: JSONResponse = await ExceptionHandler.handle_request_validation_exception(None, exc) dummy_request = DummyRequest()
resp: JSONResponse = await ExceptionHandler.handle_request_validation_exception(dummy_request, exc)
data = json.loads(resp.body.decode()) data = json.loads(resp.body.decode())
self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST) self.assertEqual(resp.status_code, HTTPStatus.BAD_REQUEST)
self.assertEqual(data["error"]["param"], "top_p") self.assertEqual(data["error"]["param"], "top_p")