Files
FastDeploy/setup.py
Jiang-Jia-Jun 4880c16be3 Update setup.py
2025-07-31 20:30:24 +08:00

238 lines
8.2 KiB
Python

"""
# Copyright (c) 2025 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
import os
import re
import subprocess
import sys
from pathlib import Path
import paddle
from setuptools import Extension, find_packages, setup
from setuptools.command.build_ext import build_ext
from setuptools.command.install import install
from wheel.bdist_wheel import bdist_wheel
long_description = "FastDeploy: Large Language Model Serving.\n\n"
long_description += "GitHub: https://github.com/PaddlePaddle/FastDeploy\n"
long_description += "Email: dltp@baidu.com"
# Platform to CMake mapping
PLAT_TO_CMAKE = {
"win32": "Win32",
"win-amd64": "x64",
"win-arm32": "ARM",
"win-arm64": "ARM64",
}
class CustomBdistWheel(bdist_wheel):
"""Custom wheel builder for pure Python packages."""
def finalize_options(self):
"""Configure wheel as pure Python and platform-independent."""
super().finalize_options()
self.root_is_pure = True
self.python_tag = "py3"
self.abi_tag = "none"
self.plat_name_supplied = True
self.plat_name = "any"
class CMakeExtension(Extension):
"""A setuptools Extension for CMake-based builds."""
def __init__(self, name: str, sourcedir: str = "", version: str = None) -> None:
"""
Initialize CMake extension.
Args:
name (str): Name of the extension.
sourcedir (str): Source directory path.
version (str): Optional version string (set to None to disable version info)
"""
super().__init__(name, sources=[])
self.sourcedir = os.fspath(Path(sourcedir).resolve())
self.version = version
class CMakeBuild(build_ext):
"""Custom build_ext command using CMake."""
def get_ext_filename(self, ext_name):
"""Remove Python version tag from extension filename"""
return ext_name.split(".")[0] + ".so"
def build_extension(self, ext: CMakeExtension) -> None:
"""
Build the CMake extension.
Args:
ext (CMakeExtension): The extension to build.
"""
ext_fullpath = Path.cwd() / self.get_ext_fullpath(ext.name)
extdir = ext_fullpath.parent.resolve()
cfg = "Debug" if int(os.environ.get("DEBUG", 0)) else "Release"
cmake_args = [
f"-DCMAKE_LIBRARY_OUTPUT_DIRECTORY={extdir}{os.sep}",
f"-DPYTHON_EXECUTABLE={sys.executable}",
f"-DCMAKE_BUILD_TYPE={cfg}",
"-DVERSION_INFO=",
"-DPYBIND11_PYTHON_VERSION=",
"-DPYTHON_VERSION=",
f"-DPYTHON_INCLUDE_DIR={sys.prefix}/include/python{sys.version_info.major}.{sys.version_info.minor}",
f"-DPYTHON_LIBRARY={sys.prefix}/lib/libpython{sys.version_info.major}.{sys.version_info.minor}.so",
]
build_args = []
cmake_generator = os.environ.get("CMAKE_GENERATOR", "")
if self.compiler.compiler_type != "msvc":
if not cmake_generator or cmake_generator == "Ninja":
try:
import ninja
ninja_executable_path = Path(ninja.BIN_DIR) / "ninja"
cmake_args += [
"-GNinja",
f"-DCMAKE_MAKE_PROGRAM:FILEPATH={ninja_executable_path}",
]
except ImportError:
pass
else:
if "NMake" not in cmake_generator and "Ninja" not in cmake_generator:
cmake_args += ["-A", PLAT_TO_CMAKE[self.plat_name]]
if "NMake" not in cmake_generator and "Ninja" not in cmake_generator:
cmake_args += [f"-DCMAKE_LIBRARY_OUTPUT_DIRECTORY_{cfg.upper()}={extdir}"]
build_args += ["--config", cfg]
if sys.platform.startswith("darwin"):
archs = re.findall(r"-arch (\S+)", os.environ.get("ARCHFLAGS", ""))
if archs:
cmake_args += ["-DCMAKE_OSX_ARCHITECTURES={}".format(";".join(archs))]
if "CMAKE_BUILD_PARALLEL_LEVEL" not in os.environ and hasattr(self, "parallel") and self.parallel:
build_args += [f"-j{self.parallel}"]
build_temp = Path(self.build_temp) / ext.name
build_temp.mkdir(parents=True, exist_ok=True)
subprocess.run(["cmake", ext.sourcedir, *cmake_args], cwd=build_temp, check=True)
subprocess.run(["cmake", "--build", ".", *build_args], cwd=build_temp, check=True)
class PostInstallCommand(install):
"""在标准安装完成后执行自定义命令"""
def run(self):
# 先执行标准安装步骤
install.run(self)
# 执行自定义命令
subprocess.check_call(["opentelemetry-bootstrap", "-a", "install"])
def load_requirements():
"""Load dependencies from requirements.txt"""
requirements_file_name = "requirements.txt"
if paddle.is_compiled_with_custom_device("iluvatar_gpu"):
requirements_file_name = "requirements_iluvatar.txt"
elif paddle.is_compiled_with_rocm():
requirements_file_name = "requirements_dcu.txt"
requirements_path = os.path.join(os.path.dirname(__file__), requirements_file_name)
with open(requirements_path, "r") as f:
return [line.strip() for line in f if line.strip() and not line.startswith("#")]
def get_device_type():
"""Get the device type (rocm/gpu/xpu/npu/cpu) that paddle is compiled with."""
if paddle.is_compiled_with_rocm():
return "rocm"
elif paddle.is_compiled_with_cuda():
return "gpu"
elif paddle.is_compiled_with_xpu():
return "xpu"
elif paddle.is_compiled_with_custom_device("npu"):
return "npu"
elif paddle.is_compiled_with_custom_device("iluvatar_gpu"):
return "iluvatar-gpu"
elif paddle.is_compiled_with_custom_device("gcu"):
return "gcu"
else:
return "cpu"
def get_name():
"""get package name"""
return "fastdeploy-" + get_device_type()
cmdclass_dict = {"bdist_wheel": CustomBdistWheel}
cmdclass_dict["build_ext"] = CMakeBuild
FASTDEPLOY_VERSION = os.environ.get("FASTDEPLOY_VERSION", "2.1.0")
cmdclass_dict["build_optl"] = PostInstallCommand
setup(
name=get_name(),
version=FASTDEPLOY_VERSION,
author="PaddlePaddle",
author_email="dltp@baidu.com",
description="FastDeploy: Large Language Model Serving.",
long_description=long_description,
long_description_content_type="text/plain",
url="https://github.com/PaddlePaddle/FastDeploy",
packages=find_packages(),
package_dir={"fastdeploy": "fastdeploy/"},
package_data={
"fastdeploy": [
"model_executor/ops/gpu/*",
"model_executor/ops/gpu/deep_gemm/include/**/*",
"model_executor/ops/cpu/*",
"model_executor/ops/xpu/*",
"model_executor/ops/xpu/libs/*",
"model_executor/ops/npu/*",
"model_executor/ops/base/*",
"model_executor/ops/iluvatar/*",
"model_executor/models/*",
"model_executor/layers/*",
"input/mm_processor/utils/*",
"model_executor/ops/gcu/*",
"version.txt",
]
},
install_requires=load_requirements(),
ext_modules=(
[
CMakeExtension(
"rdma_comm",
sourcedir="fastdeploy/cache_manager/transfer_factory/kvcache_transfer",
version=None,
)
]
if os.getenv("ENABLE_FD_RDMA", "0") == "1"
else []
),
cmdclass=cmdclass_dict if os.getenv("ENABLE_FD_RDMA", "0") == "1" else {},
zip_safe=False,
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
],
license="Apache 2.0",
python_requires=">=3.7",
extras_require={"test": ["pytest>=6.0"]},
)