Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 24 additions & 0 deletions docker/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,9 +1,12 @@
ARG CUDA_VERSION=12.9.1
FROM nvidia/cuda:${CUDA_VERSION}-cudnn-devel-ubuntu22.04 AS base
ARG TARGETARCH

ARG BUILD_TYPE=all
ARG BRANCH_TYPE=remote
ARG DEEPEP_COMMIT=9af0e0d0e74f3577af1979c9b9e1ac2cad0104ee
ARG FLASHMLA_COMMIT=1408756a88e52a25196b759eaf8db89d2b51b5a1
ARG FAST_HADAMARD_TRANSFORM_COMMIT=f3cdeed95b0f3284b5df3da9b3311d3d0600ce2b
ARG CMAKE_BUILD_PARALLEL_LEVEL=2
ARG SGL_KERNEL_VERSION=0.3.12
ENV DEBIAN_FRONTEND=noninteractive \
Expand Down Expand Up @@ -136,6 +139,27 @@ RUN cd /sgl-workspace/DeepEP && \
esac && \
NVSHMEM_DIR=${NVSHMEM_DIR} TORCH_CUDA_ARCH_LIST="${CHOSEN_TORCH_CUDA_ARCH_LIST}" pip install .

# Install flashmla
RUN if [ "$TARGETARCH" = "amd64" ]; then \
git clone https://github.com/deepseek-ai/FlashMLA.git flash-mla && \
cd flash-mla && \
git checkout ${FLASHMLA_COMMIT} && \
git submodule update --init --recursive && \
pip install -v . ; \
fi

# Install fast-hadamard-transform
RUN if [ "$TARGETARCH" = "amd64" ]; then \
git clone https://github.com/Dao-AILab/fast-hadamard-transform && \
cd fast-hadamard-transform && \
git checkout ${FAST_HADAMARD_TRANSFORM_COMMIT} && \
pip install . ; \
fi

# Install tilelang
RUN if [ "$TARGETARCH" = "amd64" ]; then \
pip install tilelang==0.1.6.post1 ; \
fi

# Python tools
RUN python3 -m pip install --no-cache-dir \
Expand Down
Loading