Skip to content

Commit 8cab33c

Browse files
authored
Update get_started.md (open-mmlab#1947)
* update generate_build_config to support cxx11abi tag * test prebuild ci * update docs/zh_cn/get_started.md * update docs/en/get_started.md * fix prebuild ci * update prebuilt_package_windows.md * update prebuild ci deps * fix prebuild ci * try to fix prebuild ci * fix prebuild ci * remove trigger [no ci]
1 parent d76c7b6 commit 8cab33c

File tree

9 files changed

+223
-228
lines changed

9 files changed

+223
-228
lines changed

.github/workflows/prebuild.yml

+63-20
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@ jobs:
2828
export MMDEPLOY_VERSION=$(python3 -c "import sys; sys.path.append('mmdeploy');from version import __version__;print(__version__)")
2929
echo $MMDEPLOY_VERSION
3030
echo "MMDEPLOY_VERSION=$MMDEPLOY_VERSION" >> $GITHUB_ENV
31+
echo "OUTPUT_DIR=$MMDEPLOY_VERSION-$GITHUB_RUN_ID" >> $GITHUB_ENV
3132
- name: Build MMDeploy
3233
run: |
3334
source activate mmdeploy-3.6
@@ -50,17 +51,55 @@ jobs:
5051
cd pack
5152
python ../tools/package_tools/generate_build_config.py --backend 'ort;trt' \
5253
--system linux --output config.yml --device cuda --build-sdk --build-sdk-monolithic \
53-
--build-sdk-python --sdk-dynamic-net
54+
--build-sdk-python --sdk-dynamic-net --onnxruntime-dir=$ONNXRUNTIME_GPU_DIR
55+
python ../tools/package_tools/mmdeploy_builder.py --config config.yml
56+
- name: Move artifact
57+
run: |
58+
mkdir -p /__w/mmdeploy/prebuild/$OUTPUT_DIR
59+
cp -r pack/* /__w/mmdeploy/prebuild/$OUTPUT_DIR
60+
61+
linux_build_cxx11abi:
62+
runs-on: [self-hosted, linux-3090]
63+
container:
64+
image: openmmlab/mmdeploy:build-ubuntu18.04-cuda11.3
65+
options: "--gpus=all --ipc=host"
66+
volumes:
67+
- /data2/actions-runner/prebuild:/__w/mmdeploy/prebuild
68+
steps:
69+
- name: Checkout repository
70+
uses: actions/checkout@v3
71+
with:
72+
submodules: recursive
73+
- name: Get mmdeploy version
74+
run: |
75+
export MMDEPLOY_VERSION=$(python3 -c "import sys; sys.path.append('mmdeploy');from version import __version__;print(__version__)")
76+
echo $MMDEPLOY_VERSION
77+
echo "MMDEPLOY_VERSION=$MMDEPLOY_VERSION" >> $GITHUB_ENV
78+
echo "OUTPUT_DIR=$MMDEPLOY_VERSION-$GITHUB_RUN_ID" >> $GITHUB_ENV
79+
- name: Build sdk cpu backend
80+
run: |
81+
mkdir pack; cd pack
82+
python ../tools/package_tools/generate_build_config.py --backend 'ort' \
83+
--system linux --output config.yml --device cpu --build-sdk --build-sdk-monolithic \
84+
--sdk-dynamic-net --cxx11abi
85+
python ../tools/package_tools/mmdeploy_builder.py --config config.yml
86+
- name: Build sdk cuda backend
87+
run: |
88+
cd pack
89+
python ../tools/package_tools/generate_build_config.py --backend 'ort;trt' \
90+
--system linux --output config.yml --device cuda --build-sdk --build-sdk-monolithic \
91+
--sdk-dynamic-net --cxx11abi --onnxruntime-dir=$ONNXRUNTIME_GPU_DIR --cudnn-dir /usr
5492
python ../tools/package_tools/mmdeploy_builder.py --config config.yml
5593
- name: Move artifact
5694
run: |
57-
mkdir -p /__w/mmdeploy/prebuild/$MMDEPLOY_VERSION
58-
rm -rf /__w/mmdeploy/prebuild/$MMDEPLOY_VERSION/*
59-
mv pack/* /__w/mmdeploy/prebuild/$MMDEPLOY_VERSION
95+
mkdir -p /__w/mmdeploy/prebuild/$OUTPUT_DIR
96+
cp -r pack/* /__w/mmdeploy/prebuild/$OUTPUT_DIR
6097
6198
linux_test:
6299
runs-on: [self-hosted, linux-3090]
63-
needs: linux_build
100+
needs:
101+
- linux_build
102+
- linux_build_cxx11abi
64103
container:
65104
image: openmmlab/mmdeploy:ubuntu20.04-cuda11.3
66105
options: "--gpus=all --ipc=host"
@@ -75,13 +114,14 @@ jobs:
75114
export MMDEPLOY_VERSION=$(python3 -c "import sys; sys.path.append('mmdeploy');from version import __version__;print(__version__)")
76115
echo $MMDEPLOY_VERSION
77116
echo "MMDEPLOY_VERSION=$MMDEPLOY_VERSION" >> $GITHUB_ENV
117+
echo "OUTPUT_DIR=$MMDEPLOY_VERSION-$GITHUB_RUN_ID" >> $GITHUB_ENV
78118
- name: Test python
79119
run: |
80-
cd /__w/mmdeploy/prebuild/$MMDEPLOY_VERSION
120+
cd /__w/mmdeploy/prebuild/$OUTPUT_DIR
81121
bash $GITHUB_WORKSPACE/tools/package_tools/test/test_sdk_python.sh
82122
- name: Test c/cpp
83123
run: |
84-
cd /__w/mmdeploy/prebuild/$MMDEPLOY_VERSION
124+
cd /__w/mmdeploy/prebuild/$OUTPUT_DIR
85125
bash $GITHUB_WORKSPACE/tools/package_tools/test/test_sdk.sh
86126
87127
linux_upload:
@@ -99,20 +139,21 @@ jobs:
99139
export MMDEPLOY_VERSION=$(python3 -c "import sys; sys.path.append('mmdeploy');from version import __version__;print(__version__)")
100140
echo $MMDEPLOY_VERSION
101141
echo "MMDEPLOY_VERSION=$MMDEPLOY_VERSION" >> $GITHUB_ENV
142+
echo "OUTPUT_DIR=$MMDEPLOY_VERSION-$GITHUB_RUN_ID" >> $GITHUB_ENV
102143
- name: Upload mmdeploy
103144
run: |
104-
cd $PREBUILD_DIR/$MMDEPLOY_VERSION/mmdeploy
145+
cd $PREBUILD_DIR/$OUTPUT_DIR/mmdeploy
105146
pip install twine
106147
# twine upload * --repository testpypi -u __token__ -p ${{ secrets.test_pypi_password }}
107148
twine upload * -u __token__ -p ${{ secrets.pypi_password }}
108149
- name: Upload mmdeploy_runtime
109150
run: |
110-
cd $PREBUILD_DIR/$MMDEPLOY_VERSION/mmdeploy_runtime
151+
cd $PREBUILD_DIR/$OUTPUT_DIR/mmdeploy_runtime
111152
# twine upload * --repository testpypi -u __token__ -p ${{ secrets.test_pypi_password }}
112153
twine upload * -u __token__ -p ${{ secrets.pypi_password }}
113154
- name: Zip mmdeploy sdk
114155
run: |
115-
cd $PREBUILD_DIR/$MMDEPLOY_VERSION/sdk
156+
cd $PREBUILD_DIR/$OUTPUT_DIR/sdk
116157
for folder in *
117158
do
118159
tar czf $folder.tar.gz $folder
@@ -121,7 +162,7 @@ jobs:
121162
uses: softprops/action-gh-release@v1
122163
with:
123164
files: |
124-
$PREBUILD_DIR/$MMDEPLOY_VERSION/sdk/*.tar.gz
165+
$PREBUILD_DIR/$OUTPUT_DIR/sdk/*.tar.gz
125166
126167
127168
windows_build:
@@ -137,6 +178,7 @@ jobs:
137178
$env:MMDEPLOY_VERSION=(python -c "import sys; sys.path.append('mmdeploy');from version import __version__;print(__version__)")
138179
echo $env:MMDEPLOY_VERSION
139180
echo "MMDEPLOY_VERSION=$env:MMDEPLOY_VERSION" >> $env:GITHUB_ENV
181+
echo "OUTPUT_DIR=$env:MMDEPLOY_VERSION-$env:GITHUB_RUN_ID" >> $env:GITHUB_ENV
140182
- name: Build MMDeploy
141183
run: |
142184
. D:\DEPS\cienv\prebuild_gpu_env.ps1
@@ -165,9 +207,8 @@ jobs:
165207
python ../tools/package_tools/mmdeploy_builder.py --config config.yml
166208
- name: Move artifact
167209
run: |
168-
New-Item "D:/DEPS/ciartifact/$env:MMDEPLOY_VERSION" -ItemType Directory -Force
169-
Remove-Item "D:/DEPS/ciartifact/$env:MMDEPLOY_VERSION/*" -Force -Recurse
170-
Move-Item pack/* "D:/DEPS/ciartifact/$env:MMDEPLOY_VERSION"
210+
New-Item "D:/DEPS/ciartifact/$env:OUTPUT_DIR" -ItemType Directory -Force
211+
Move-Item pack/* "D:/DEPS/ciartifact/$env:OUTPUT_DIR"
171212
172213
windows_test:
173214
runs-on: [self-hosted, win10-3080]
@@ -181,15 +222,16 @@ jobs:
181222
$env:MMDEPLOY_VERSION=(python -c "import sys; sys.path.append('mmdeploy');from version import __version__;print(__version__)")
182223
echo $env:MMDEPLOY_VERSION
183224
echo "MMDEPLOY_VERSION=$env:MMDEPLOY_VERSION" >> $env:GITHUB_ENV
225+
echo "OUTPUT_DIR=$env:MMDEPLOY_VERSION-$env:GITHUB_RUN_ID" >> $env:GITHUB_ENV
184226
- name: Test python
185227
run: |
186-
cd "D:/DEPS/ciartifact/$env:MMDEPLOY_VERSION"
228+
cd "D:/DEPS/ciartifact/$env:OUTPUT_DIR"
187229
. D:\DEPS\cienv\prebuild_cpu_env.ps1
188230
conda activate ci-test
189231
& "$env:GITHUB_WORKSPACE/tools/package_tools/test/test_sdk_python.ps1"
190232
- name: Test c/cpp
191233
run: |
192-
cd "D:/DEPS/ciartifact/$env:MMDEPLOY_VERSION"
234+
cd "D:/DEPS/ciartifact/$env:OUTPUT_DIR"
193235
. D:\DEPS\cienv\prebuild_cpu_env.ps1
194236
& "$env:GITHUB_WORKSPACE/tools/package_tools/test/test_sdk.ps1"
195237
@@ -207,21 +249,22 @@ jobs:
207249
$env:MMDEPLOY_VERSION=(python -c "import sys; sys.path.append('mmdeploy');from version import __version__;print(__version__)")
208250
echo $env:MMDEPLOY_VERSION
209251
echo "MMDEPLOY_VERSION=$env:MMDEPLOY_VERSION" >> $env:GITHUB_ENV
252+
echo "OUTPUT_DIR=$env:MMDEPLOY_VERSION-$env:GITHUB_RUN_ID" >> $env:GITHUB_ENV
210253
- name: Upload mmdeploy
211254
run: |
212-
cd "D:/DEPS/ciartifact/$env:MMDEPLOY_VERSION/mmdeploy"
255+
cd "D:/DEPS/ciartifact/$env:OUTPUT_DIR/mmdeploy"
213256
conda activate mmdeploy-3.8
214257
# twine upload * --repository testpypi -u __token__ -p ${{ secrets.test_pypi_password }}
215258
twine upload * -u __token__ -p ${{ secrets.pypi_password }}
216259
- name: Upload mmdeploy_runtime
217260
run: |
218-
cd "D:/DEPS/ciartifact/$env:MMDEPLOY_VERSION/mmdeploy_runtime"
261+
cd "D:/DEPS/ciartifact/$env:OUTPUT_DIR/mmdeploy_runtime"
219262
conda activate mmdeploy-3.8
220263
# twine upload * --repository testpypi -u __token__ -p ${{ secrets.test_pypi_password }}
221264
twine upload * -u __token__ -p ${{ secrets.pypi_password }}
222265
- name: Zip mmdeploy sdk
223266
run: |
224-
cd "D:/DEPS/ciartifact/$env:MMDEPLOY_VERSION/sdk"
267+
cd "D:/DEPS/ciartifact/$env:OUTPUT_DIR/sdk"
225268
$folders = $(ls).Name
226269
foreach ($folder in $folders) {
227270
Compress-Archive -Path $folder -DestinationPath "$folder.zip"
@@ -230,4 +273,4 @@ jobs:
230273
uses: softprops/action-gh-release@v1
231274
with:
232275
files: |
233-
D:/DEPS/ciartifact/$env:MMDEPLOY_VERSION/sdk/*.zip
276+
D:/DEPS/ciartifact/$env:OUTPUT_DIR/sdk/*.zip

docs/en/02-how-to-run/prebuilt_package_windows.md

+31-60
Original file line numberDiff line numberDiff line change
@@ -21,26 +21,27 @@
2121

2222
______________________________________________________________________
2323

24-
This tutorial takes `mmdeploy-1.0.0rc3-windows-amd64-onnxruntime1.8.1.zip` and `mmdeploy-1.0.0rc3-windows-amd64-cuda11.1-tensorrt8.2.3.0.zip` as examples to show how to use the prebuilt packages.
24+
This tutorial takes `mmdeploy-1.0.0rc3-windows-amd64.zip` and `mmdeploy-1.0.0rc3-windows-amd64-cuda11.3.zip` as examples to show how to use the prebuilt packages. The former support onnxruntime cpu inference, the latter support onnxruntime-gpu and tensorrt inference.
2525

2626
The directory structure of the prebuilt package is as follows, where the `dist` folder is about model converter, and the `sdk` folder is related to model inference.
2727

2828
```
2929
.
30-
|-- dist
31-
`-- sdk
32-
|-- bin
33-
|-- example
34-
|-- include
35-
|-- lib
36-
`-- python
30+
├── build_sdk.ps1
31+
├── example
32+
├── include
33+
├── install_opencv.ps1
34+
├── lib
35+
├── README.md
36+
├── set_env.ps1
37+
└── thirdparty
3738
```
3839

3940
## Prerequisite
4041

4142
In order to use the prebuilt package, you need to install some third-party dependent libraries.
4243

43-
1. Follow the [get_started](../get_started.md) documentation to create a virtual python environment and install pytorch, torchvision and mmcv-full. To use the C interface of the SDK, you need to install [vs2019+](https://visualstudio.microsoft.com/), [OpenCV](https://github.com/opencv/opencv/releases).
44+
1. Follow the [get_started](../get_started.md) documentation to create a virtual python environment and install pytorch, torchvision and mmcv. To use the C interface of the SDK, you need to install [vs2019+](https://visualstudio.microsoft.com/), [OpenCV](https://github.com/opencv/opencv/releases).
4445

4546
:point_right: It is recommended to use `pip` instead of `conda` to install pytorch and torchvision
4647

@@ -80,9 +81,8 @@ In order to use `ONNX Runtime` backend, you should also do the following steps.
8081
5. Install `mmdeploy` (Model Converter) and `mmdeploy_runtime` (SDK Python API).
8182

8283
```bash
83-
# download mmdeploy-1.0.0rc3-windows-amd64-onnxruntime1.8.1.zip
84-
pip install .\mmdeploy-1.0.0rc3-windows-amd64-onnxruntime1.8.1\dist\mmdeploy-1.0.0rc3-py38-none-win_amd64.whl
85-
pip install .\mmdeploy-1.0.0rc3-windows-amd64-onnxruntime1.8.1\sdk\python\mmdeploy_runtime-1.0.0rc3-cp38-none-win_amd64.whl
84+
pip install mmdeploy==1.0.0rc3
85+
pip install mmdeploy-runtime==1.0.0rc3
8686
```
8787

8888
:point_right: If you have installed it before, please uninstall it first.
@@ -100,16 +100,17 @@ In order to use `ONNX Runtime` backend, you should also do the following steps.
100100
![sys-path](https://user-images.githubusercontent.com/16019484/181463801-1d7814a8-b256-46e9-86f2-c08de0bc150b.png)
101101
:exclamation: Restart powershell to make the environment variables setting take effect. You can check whether the settings are in effect by `echo $env:PATH`.
102102

103+
8. Download SDK C/cpp Library mmdeploy-1.0.0rc3-windows-amd64.zip
104+
103105
### TensorRT
104106

105107
In order to use `TensorRT` backend, you should also do the following steps.
106108

107109
5. Install `mmdeploy` (Model Converter) and `mmdeploy_runtime` (SDK Python API).
108110

109111
```bash
110-
# download mmdeploy-1.0.0rc3-windows-amd64-cuda11.1-tensorrt8.2.3.0.zip
111-
pip install .\mmdeploy-1.0.0rc3-windows-amd64-cuda11.1-tensorrt8.2.3.0\dist\mmdeploy-1.0.0rc3-py38-none-win_amd64.whl
112-
pip install .\mmdeploy-1.0.0rc3-windows-amd64-cuda11.1-tensorrt8.2.3.0\sdk\python\mmdeploy_runtime-1.0.0rc3-cp38-none-win_amd64.whl
112+
pip install mmdeploy==1.0.0rc3
113+
pip install mmdeploy-runtime-gpu==1.0.0rc3
113114
```
114115

115116
:point_right: If you have installed it before, please uninstall it first.
@@ -128,6 +129,8 @@ In order to use `TensorRT` backend, you should also do the following steps.
128129

129130
7. Install pycuda by `pip install pycuda`
130131

132+
8. Download SDK C/cpp Library mmdeploy-1.0.0rc3-windows-amd64-cuda11.3.zip
133+
131134
## Model Convert
132135

133136
### ONNX Runtime Example
@@ -138,7 +141,7 @@ After preparation work, the structure of the current working directory should be
138141

139142
```
140143
..
141-
|-- mmdeploy-1.0.0rc3-windows-amd64-onnxruntime1.8.1
144+
|-- mmdeploy-1.0.0rc3-windows-amd64
142145
|-- mmclassification
143146
|-- mmdeploy
144147
`-- resnet18_8xb32_in1k_20210831-fbbb1da6.pth
@@ -186,7 +189,7 @@ After installation of mmdeploy-tensorrt prebuilt package, the structure of the c
186189

187190
```
188191
..
189-
|-- mmdeploy-1.0.0rc3-windows-amd64-cuda11.1-tensorrt8.2.3.0
192+
|-- mmdeploy-1.0.0rc3-windows-amd64-cuda11.3
190193
|-- mmclassification
191194
|-- mmdeploy
192195
`-- resnet18_8xb32_in1k_20210831-fbbb1da6.pth
@@ -299,7 +302,7 @@ python .\mmdeploy\demo\python\image_classification.py cpu .\work_dir\onnx\resnet
299302

300303
#### TensorRT
301304

302-
```
305+
```bash
303306
python .\mmdeploy\demo\python\image_classification.py cuda .\work_dir\trt\resnet\ .\mmclassification\demo\demo.JPEG
304307
```
305308

@@ -309,71 +312,39 @@ The following describes how to use the SDK's C API for inference
309312

310313
#### ONNXRuntime
311314

312-
1. Build examples
313-
314-
Under `mmdeploy-1.0.0rc3-windows-amd64-onnxruntime1.8.1\sdk\example` directory
315-
316-
```
317-
// Path should be modified according to the actual location
318-
mkdir build
319-
cd build
320-
cmake ..\cpp -A x64 -T v142 `
321-
-DOpenCV_DIR=C:\Deps\opencv\build\x64\vc15\lib `
322-
-DMMDeploy_DIR=C:\workspace\mmdeploy-1.0.0rc3-windows-amd64-onnxruntime1.8.1\sdk\lib\cmake\MMDeploy `
323-
-DONNXRUNTIME_DIR=C:\Deps\onnxruntime\onnxruntime-win-gpu-x64-1.8.1
324-
325-
cmake --build . --config Release
326-
```
315+
1. Add environment variables
327316

328-
2. Add environment variables or copy the runtime libraries to the same level directory of exe
317+
Refer to the README.md in sdk folder
329318

330-
:point_right: The purpose is to make the exe find the relevant dll
319+
2. Build examples
331320

332-
If choose to add environment variables, add the runtime libraries path of `mmdeploy` (`mmdeploy-1.0.0rc3-windows-amd64-onnxruntime1.8.1\sdk\bin`) to the `PATH`.
333-
334-
If choose to copy the dynamic libraries, copy the dll in the bin directory to the same level directory of the just compiled exe (build/Release).
321+
Refer to the README.md in sdk folder
335322

336323
3. Inference:
337324

338325
It is recommended to use `CMD` here.
339326

340-
Under `mmdeploy-1.0.0rc3-windows-amd64-onnxruntime1.8.1\\sdk\\example\\build\\Release` directory:
327+
Under `mmdeploy-1.0.0rc3-windows-amd64\\example\\cpp\\build\\Release` directory:
341328

342329
```
343330
.\image_classification.exe cpu C:\workspace\work_dir\onnx\resnet\ C:\workspace\mmclassification\demo\demo.JPEG
344331
```
345332

346333
#### TensorRT
347334

348-
1. Build examples
349-
350-
Under `mmdeploy-1.0.0rc3-windows-amd64-cuda11.1-tensorrt8.2.3.0\\sdk\\example` directory
351-
352-
```
353-
// Path should be modified according to the actual location
354-
mkdir build
355-
cd build
356-
cmake ..\cpp -A x64 -T v142 `
357-
-DOpenCV_DIR=C:\Deps\opencv\build\x64\vc15\lib `
358-
-DMMDeploy_DIR=C:\workspace\mmdeploy-1.0.0rc3-windows-amd64-cuda11.1-tensorrt8 2.3.0\sdk\lib\cmake\MMDeploy `
359-
-DTENSORRT_DIR=C:\Deps\tensorrt\TensorRT-8.2.3.0 `
360-
-DCUDNN_DIR=C:\Deps\cudnn\8.2.1
361-
cmake --build . --config Release
362-
```
363-
364-
2. Add environment variables or copy the runtime libraries to the same level directory of exe
335+
1. Add environment variables
365336

366-
:point_right: The purpose is to make the exe find the relevant dll
337+
Refer to the README.md in sdk folder
367338

368-
If choose to add environment variables, add the runtime libraries path of `mmdeploy` (`mmdeploy-1.0.0rc3-windows-amd64-cuda11.1-tensorrt8.2.3.0\sdk\bin`) to the `PATH`.
339+
2. Build examples
369340

370-
If choose to copy the dynamic libraries, copy the dll in the bin directory to the same level directory of the just compiled exe (build/Release).
341+
Refer to the README.md in sdk folder
371342

372343
3. Inference
373344

374345
It is recommended to use `CMD` here.
375346

376-
Under `mmdeploy-1.0.0rc3-windows-amd64-cuda11.1-tensorrt8.2.3.0\\sdk\\example\\build\\Release` directory
347+
Under `mmdeploy-1.0.0rc3-windows-amd64-cuda11.3\\example\\cpp\\build\\Release` directory
377348

378349
```
379350
.\image_classification.exe cuda C:\workspace\work_dir\trt\resnet C:\workspace\mmclassification\demo\demo.JPEG

0 commit comments

Comments
 (0)