代码拉取完成,页面将自动刷新
FROM quay.io/ascend/cann:8.0.0-910b-openeuler22.03-py3.10
RUN yum update -y && \
yum install -y python3-pip git vim wget net-tools && \
rm -rf /var/cache/yum &&\
rm -rf /tmp/*
# Install vLLM
ARG VLLM_REPO=https://github.com/vllm-project/vllm.git
ARG VLLM_TAG=v0.7.3
ARG VLLM_ASCEND_REPO=https://github.com/vllm-project/vllm-ascend.git
ARG VLLM_ASCEND_TAG=v0.7.3rc2
RUN git clone $VLLM_REPO --branch $VLLM_TAG /workspace/vllm
RUN VLLM_TARGET_DEVICE="empty" python3 -m pip install /workspace/vllm/ --extra-index https://download.pytorch.org/whl/cpu/
# In x86, triton will be installed by vllm. But in Ascend, triton doesn't work correctly. we need to uninstall it.
RUN python3 -m pip uninstall -y triton
RUN git clone $VLLM_ASCEND_REPO --branch $VLLM_ASCEND_TAG /workspace/vllm-ascend
# Install vllm-ascend
RUN python3 -m pip install /workspace/vllm-ascend/ --extra-index https://download.pytorch.org/whl/cpu/
# Install torch-npu
RUN bash /workspace/vllm-ascend/pta_install.sh
# Install modelscope (for fast download) and ray (for multinode)
RUN python3 -m pip install modelscope ray
CMD ["/bin/bash"]
此处可能存在不合适展示的内容,页面不予展示。您可通过相关编辑功能自查并修改。
如您确认内容无涉及 不当用语 / 纯广告导流 / 暴力 / 低俗色情 / 侵权 / 盗版 / 虚假 / 无价值内容或违法国家有关法律法规的内容,可点击提交进行申诉,我们将尽快为您处理。