diff --git a/vllm-0.6.6.post1.tar.gz b/vllm-0.7.3.tar.gz similarity index 38% rename from vllm-0.6.6.post1.tar.gz rename to vllm-0.7.3.tar.gz index f0884126b4a05cb81c1825f94ee1b1ef5c6834a9..d58972a9f9feba5a553789bb581fc98945998cc5 100644 Binary files a/vllm-0.6.6.post1.tar.gz and b/vllm-0.7.3.tar.gz differ diff --git a/vllm.spec b/vllm.spec index cc5f9d67e618e67d32aae5998d94b5190f926481..df5f0c2d9ed4f41bdc5233ac3a70056d90d9fc33 100644 --- a/vllm.spec +++ b/vllm.spec @@ -1,9 +1,10 @@ +%global _pip_huawei_install pip install --trusted-host mirrors.huaweicloud.com -i https://mirrors.huaweicloud.com/repository/pypi/simple %global _name vllm %global _description A high-throughput and memory-efficient inference and serving engine for LLMs Name: vllm -Version: 0.6.6.post1 -Release: 2 +Version: 0.7.3 +Release: 1 Summary: Powerful engine for LLMs License: (Apache-2.0 AND BSD-3-Clause) OR BSD-3-CLause URL: https://github.com/vllm-project/vllm @@ -16,8 +17,8 @@ BuildArch: noarch %package -n python3-%{_name} Summary: %{summary} -Buildrequires: cmake python3-pip python3-devel python3-setuptools python3-pytest -Buildrequires: python3-setuptools_scm python3-wheel python3-grpcio +Buildrequires: cmake python3-pip python3-devel python3-pytest +Buildrequires: python3-wheel python3-grpcio Buildrequires: python3-pytorch %{?python_provide:%python_provide python3-%{_name}} @@ -28,6 +29,8 @@ Buildrequires: python3-pytorch %prep %autosetup -n %{name}-%{version} -N +%{_pip_huawei_install} install setuptools==68.0.0 +%{_pip_huawei_install} install setuptools-scm==6.0.0 %build export SETUPTOOLS_SCM_PRETEND_VERSION=%{version} @@ -69,6 +72,9 @@ mv %{buildroot}/filelist.lst . %files -n python3-%{_name} -f filelist.lst %changelog +* Tue Apr 8 2025 renwenjie - 0.7.3-1 +- Change the baseline version to 0.7.3 + * Thu Mar 27 2025 renwenjie - 0.6.6.post1-2 - solve 'no module named vllm._version' problem and adapted for openeuler22.03