mirror of
https://github.com/PaddlePaddle/FastDeploy.git
synced 2025-10-05 16:48:03 +08:00

Some checks failed
Deploy GitHub Pages / deploy (push) Has been cancelled
* multi-source download * multi-source download * huggingface download revision * requirement * style * add revision arg * test * pre-commit * Change default download * change requirements.txt * modify English Documentation * documentation * modify model download path * add requirements * error optimization * 连接失败兜底 * 连接失败兜底 * 连接失败兜底 * unit test * unit test * unit test * test * test
71 lines
2.2 KiB
Python
71 lines
2.2 KiB
Python
import os
|
|
import unittest
|
|
|
|
from fastdeploy.utils import retrive_model_from_server
|
|
|
|
|
|
class TestAistudioDownload(unittest.TestCase):
|
|
"""
|
|
Test cases for downloading models from different sources using FastDeploy utilities.
|
|
"""
|
|
|
|
def test_retrive_model_from_server_unsupported_source(self):
|
|
"""
|
|
Test case for retrieving a model from an unsupported source.
|
|
"""
|
|
os.environ["FD_MODEL_SOURCE"] = "UNSUPPORTED_SOURCE"
|
|
os.environ["FD_MODEL_CACHE"] = "./models"
|
|
|
|
model_name_or_path = "baidu/ERNIE-4.5-0.3B-PT"
|
|
with self.assertRaises(ValueError):
|
|
retrive_model_from_server(model_name_or_path)
|
|
|
|
os.environ.clear()
|
|
|
|
def test_retrive_model_from_modelscope_server_model_not_exist(self):
|
|
"""
|
|
Test case for retrieving a model from ModelScope server when it doesn't exist.
|
|
"""
|
|
os.environ["FD_MODEL_SOURCE"] = "MODELSCOPE"
|
|
os.environ["FD_MODEL_CACHE"] = "./model"
|
|
|
|
model_name_or_path = "non_existing_model_modelscope"
|
|
|
|
with self.assertRaises(Exception):
|
|
retrive_model_from_server(model_name_or_path)
|
|
|
|
os.environ.clear()
|
|
|
|
def test_retrive_model_from_huggingface_server_model_not_exist(self):
|
|
"""
|
|
Test case for retrieving a model from Hugging Face server when it doesn't exist.
|
|
"""
|
|
os.environ["FD_MODEL_SOURCE"] = "HUGGINGFACE"
|
|
os.environ["FD_MODEL_CACHE"] = "./models"
|
|
|
|
model_name_or_path = "non_existing_model_hf"
|
|
|
|
with self.assertRaises(Exception):
|
|
retrive_model_from_server(model_name_or_path)
|
|
|
|
os.environ.clear()
|
|
|
|
def test_retrive_model_from_aistudio_server_(self):
|
|
"""
|
|
Test case for retrieving a model from AI Studio server.
|
|
"""
|
|
os.environ["FD_MODEL_SOURCE"] = "AISTUDIO"
|
|
os.environ["FD_MODEL_CACHE"] = "./models"
|
|
|
|
model_name_or_path = "baidu/ERNIE-4.5-0.3B-PT"
|
|
revision = "aaa"
|
|
expected_path = "./models/PaddlePaddle/ERNIE-4.5-0.3B-PT"
|
|
result = retrive_model_from_server(model_name_or_path, revision)
|
|
self.assertEqual(expected_path, result)
|
|
|
|
os.environ.clear()
|
|
|
|
|
|
if __name__ == "__main__":
|
|
unittest.main()
|