Skip to content

Commit 0196cd0

Browse files
authored
[Enhancement]: Update dockerfile and add github release note (open-mmlab#1952)
* update dockerfile and add github release note * fix typo in get_started * fix comment
1 parent 8cab33c commit 0196cd0

File tree

5 files changed

+46
-7
lines changed

5 files changed

+46
-7
lines changed

.github/release.yml

+32
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
changelog:
2+
categories:
3+
- title: 🚀 Features
4+
labels:
5+
- feature
6+
- enhancement
7+
- title: 💥 Improvements
8+
labels:
9+
- improvement
10+
- title: 🐞 Bug fixes
11+
labels:
12+
- bug
13+
- Bug:P0
14+
- Bug:P1
15+
- Bug:P2
16+
- Bug:P3
17+
- title: 📚 Documentations
18+
labels:
19+
- documentation
20+
- title: 🌐 Other
21+
labels:
22+
- '*'
23+
exclude:
24+
labels:
25+
- feature
26+
- enhancement
27+
- bug
28+
- documentation
29+
- Bug:P0
30+
- Bug:P1
31+
- Bug:P2
32+
- Bug:P3

.github/workflows/prebuild.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ jobs:
6161
linux_build_cxx11abi:
6262
runs-on: [self-hosted, linux-3090]
6363
container:
64-
image: openmmlab/mmdeploy:build-ubuntu18.04-cuda11.3
64+
image: openmmlab/mmdeploy:build-ubuntu16.04-cuda11.3
6565
options: "--gpus=all --ipc=host"
6666
volumes:
6767
- /data2/actions-runner/prebuild:/__w/mmdeploy/prebuild

docker/prebuild/Dockerfile

+11-4
Original file line numberDiff line numberDiff line change
@@ -27,9 +27,11 @@ WORKDIR /root/workspace
2727
ENV FORCE_CUDA="1"
2828

2929
# install toolset
30-
RUN yum install devtoolset-${TOOLSET_VERSION}-gcc devtoolset-${TOOLSET_VERSION}-gcc-c++ -y
30+
RUN yum install centos-release-scl devtoolset-${TOOLSET_VERSION}-gcc* -y
3131

32-
ENV PATH=/opt/rh/devtoolset-${TOOLSET_VERSION}/root/usr/bin:$PATH
32+
ENV TOOLSET_DIR=/opt/rh/devtoolset-${TOOLSET_VERSION}/root/usr
33+
ENV PATH=$TOOLSET_DIR/bin:$PATH
34+
ENV LD_LIBRARY_PATH=$TOOLSET_DIR/lib:$TOOLSET_DIR/lib64:/usr/local/lib64
3335

3436
# install cuda cudnn
3537
RUN curl -fsSL -v -o ./cuda_install.run -O $CUDA_URL &&\
@@ -46,6 +48,10 @@ RUN curl -fsSL -v -o ./onnxruntime.tgz -O https://github.com/microsoft/onnxrunti
4648
tar -xzvf onnxruntime.tgz &&\
4749
rm onnxruntime.tgz &&\
4850
mv onnxruntime* /opt/onnxruntime &&\
51+
curl -fsSL -v -o ./onnxruntime.tgz -O https://github.com/microsoft/onnxruntime/releases/download/v${ONNXRUNTIME_VERSION}/onnxruntime-linux-x64-gpu-${ONNXRUNTIME_VERSION}.tgz &&\
52+
tar -xzvf onnxruntime.tgz &&\
53+
rm onnxruntime.tgz &&\
54+
mv onnxruntime* /opt/onnxruntime-gpu &&\
4955
curl -fsSL -v -o ./tensorrt.tgz -O $TENSORRT_URL &&\
5056
tar -xzvf ./tensorrt.tgz &&\
5157
rm -f ./tensorrt.tgz &&\
@@ -56,10 +62,11 @@ RUN curl -fsSL -v -o ./onnxruntime.tgz -O https://github.com/microsoft/onnxrunti
5662
ENV CUDA_TOOLKIT_ROOT_DIR=/usr/local/cuda
5763
ENV CUDNN_DIR=/opt/cudnn
5864
ENV ONNXRUNTIME_DIR=/opt/onnxruntime
65+
ENV ONNXRUNTIME_GPU_DIR=/opt/onnxruntime-gpu
5966
ENV TENSORRT_DIR=/opt/TensorRT
6067

6168
ENV LD_LIBRARY_PATH=$CUDA_TOOLKIT_ROOT_DIR/lib64:$CUDNN_DIR/lib64:$LD_LIBRARY_PATH
62-
ENV LD_LIBRARY_PATH=${ONNXRUNTIME_DIR}/lib:$TENSORRT_DIR/lib:$LD_LIBRARY_PATH
69+
ENV LD_LIBRARY_PATH=${ONNXRUNTIME_GPU_DIR}/lib:$TENSORRT_DIR/lib:$LD_LIBRARY_PATH
6370
ENV PATH=$TENSORRT_DIR/bin:$PATH
6471

6572
### install ppl.cv
@@ -106,7 +113,7 @@ RUN curl -fsSL -v -o ~/miniconda.sh -O https://repo.anaconda.com/miniconda/Mini
106113
/opt/conda/envs/torch${TORCH_VERSION}/bin/pip install --no-cache-dir torch==${TORCH_VERSION}+cu${CUDA_INT} \
107114
torchvision==${TORCHVISION_VERSION}+cu${CUDA_INT} -f https://download.pytorch.org/whl/torch_stable.html &&\
108115
/opt/conda/bin/conda init bash &&\
109-
/opt/conda/bin/conda clean -ya &&\
116+
/opt/conda/bin/conda clean -ya
110117

111118
ENV CONDA=/opt/conda
112119
ENV PATH=$CONDA/bin:$PATH

docs/en/get_started.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -125,7 +125,7 @@ pip install mmdeploy==1.0.0rc3
125125
# 2.1 support onnxruntime
126126
pip install mmdeploy-runtime==1.0.0rc3
127127
# 2.2 support onnxruntime-gpu, tensorrt
128-
pip install mmdeploy-runtime-cuda==1.0.0rc3
128+
pip install mmdeploy-runtime-gpu==1.0.0rc3
129129

130130
# 3. install inference engine
131131
# 3.1 install TensorRT

docs/zh_cn/get_started.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,7 @@ pip install mmdeploy==1.0.0rc3
120120
# 2.1 支持 onnxruntime 推理
121121
pip install mmdeploy-runtime==1.0.0rc3
122122
# 2.2 支持 onnxruntime-gpu tensorrt 推理
123-
pip install mmdeploy-runtime-cuda==1.0.0rc3
123+
pip install mmdeploy-runtime-gpu==1.0.0rc3
124124

125125
# 3. 安装推理引擎
126126
# 3.1 安装推理引擎 TensorRT

0 commit comments

Comments
 (0)