diff --git a/.bazelrc b/.bazelrc index ef005ed0d364..3d90722dc4dd 100644 --- a/.bazelrc +++ b/.bazelrc @@ -5,11 +5,18 @@ startup --host_jvm_args="-Xmx8g" build --announce_rc build --color=yes -build:production --config=lsan --strip=never --copt=-O3 +build:production --config=lsan --copt=-O3 # C/C++ CONFIGS build --cxxopt=-std=c++14 -build --compilation_mode=dbg +# Create debug information only for magma binaries (not for external dependencies). +# --compilation_mode=dbg would also create debug information of external dependencies +# and increase the size of artifacts drastically. +# Needs --strip=never so that debug information is not removed by the linker. +# See https://bazel.build/docs/user-manual#compilation-mode and +# https://bazel.build/docs/user-manual#strip +build --strip=never +build --per_file_copt=^lte/gateway/c/.*$@-g # DEFAULT TEST CONFIGURATION # Please read the GH issue #13073 before adding "test" options. @@ -57,6 +64,9 @@ build --test_env=PATH=/bin:/usr/bin:/usr/local/bin:/usr/sbin build --test_env=MAGMA_ROOT build --test_env=S1AP_TESTER_ROOT +# Needed for go tests to generate the test result XML in the correct format +build --test_env=GO_TEST_WRAP_TESTV=1 + # MME specific compile time defines # Compile mme libraries with unit test flag test --per_file_copt=^lte/gateway/c/core/.*$@-DMME_UNIT_TEST # See GH issue #13073 diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index a7d3924b5a84..0063cc977e52 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -3,7 +3,6 @@ ################################################################ FROM ghcr.io/magma/magma/bazel-base:latest as devcontainer -ARG HOME=/home/vscode # [Option] Install zsh ARG INSTALL_ZSH="true" # [Option] Upgrade OS packages to their latest versions @@ -53,7 +52,7 @@ RUN echo "Install general purpose packages" && \ gdb \ lcov \ libclang-11-dev \ - lldb \ + lldb-11 \ llvm-11-dev \ make \ ninja-build \ @@ -61,6 +60,7 @@ RUN echo "Install general purpose packages" && \ perl \ pkg-config \ python3-pip \ + python3-venv \ redis-server \ ruby \ rubygems \ @@ -82,7 +82,11 @@ RUN GO_TARBALL="go${GOLANG_VERSION}.linux-amd64.tar.gz" \ && curl https://artifactory.magmacore.org/artifactory/generic/${GO_TARBALL} --remote-name --location \ && tar -xzf ${GO_TARBALL} \ && rm ${GO_TARBALL} -ENV PATH=${PATH}:/usr/local/go/bin:${HOME}/go/bin +ENV PATH=${PATH}:/usr/local/go/bin + +# /home/vscode/go/bin doesn't exist initially, but for example orc8r/cloud/go/Makefile +# populates that folder and expects those binaries to be in PATH +ENV PATH=${PATH}:/home/vscode/go/bin RUN echo "Install 3rd party dependencies" && \ apt-get update && \ @@ -117,18 +121,18 @@ RUN echo "Install 3rd party dependencies" && \ grpc-dev ##### Useful for logfile modification e.g. pruning all /magma/... prefix from GCC warning logs -RUN GOBIN="/usr/bin/" go install github.com/ezekg/xo@0f7f076932dd +RUN GOBIN="/usr/bin/" go install github.com/ezekg/xo@0f7f076932dd && \ + rm --recursive --interactive=never /root/.cache/go-build ##### GRPC and it's dependencies RUN git clone --recurse-submodules -b v1.35.0 https://github.com/grpc/grpc && \ - cd grpc && \ - mkdir -p cmake/build && \ - cd cmake/build && \ + mkdir -p grpc/cmake/build && \ + cd grpc/cmake/build && \ cmake -DgRPC_INSTALL=ON -DgRPC_BUILD_TESTS=OFF -DBUILD_SHARED_LIBS=ON ../.. && \ make -j"$(nproc)" && \ make install && \ - cd / && \ - rm -rf grpc + cd ../../.. && \ + rm --recursive --interactive=never grpc ##### libprotobuf-mutator is used for randomized proto unit tests / property tests RUN git clone -b v1.0 https://github.com/google/libprotobuf-mutator && \ @@ -137,8 +141,8 @@ RUN git clone -b v1.0 https://github.com/google/libprotobuf-mutator && \ cmake .. -GNinja -DCMAKE_C_COMPILER=gcc -DCMAKE_CXX_COMPILER=g++ -DCMAKE_BUILD_TYPE=Debug && \ ninja && \ ninja install && \ - cd / && \ - rm -rf libprotobuf-mutator + cd ../.. && \ + rm --recursive --interactive=never libprotobuf-mutator ##### Prometheus CPP RUN git clone https://github.com/jupp0r/prometheus-cpp.git && \ @@ -150,7 +154,8 @@ RUN git clone https://github.com/jupp0r/prometheus-cpp.git && \ cmake .. && \ make -j"$(nproc)" && \ make install && \ - rm -rf /prometheus-cpp + cd ../.. && \ + rm --recursive --interactive=never prometheus-cpp # install magma dependencies RUN apt-get install -y --no-install-recommends \ @@ -169,8 +174,8 @@ RUN git clone https://git.osmocom.org/libgtpnl && \ make -j"$(nproc)" && \ make install && \ ldconfig && \ - cd / && \ - rm -rf libgtpnl + cd .. && \ + rm --recursive --interactive=never libgtpnl ##### Build and install libgtest and gmock RUN cd /usr/src/googletest && \ @@ -193,84 +198,14 @@ RUN git clone https://github.com/include-what-you-use/include-what-you-use && \ cmake -G "Unix Makefiles" -DCMAKE_PREFIX_PATH=/usr/lib/llvm-11 ../include-what-you-use/ && \ make && \ make install && \ - cd / && \ - rm -rf include-what-you-use && \ - rm -rf build_liwyu + cd .. && \ + rm --recursive --interactive=never include-what-you-use build_iwyu ##### Go language server support for vscode -RUN GOBIN="/usr/bin/" go install -v golang.org/x/tools/gopls@v0.8.3 +RUN GOBIN="/usr/bin/" go install -v golang.org/x/tools/gopls@v0.8.3 && \ + rm --recursive --interactive=never /root/.cache/go-build #### Update shared library configuration RUN ldconfig -v - -##### Install Python requirements - -### create virtualenv -ARG PYTHON_VENV=${HOME}/build/python -ENV PYTHON_VENV_EXECUTABLE=${PYTHON_VENV}/bin/python${PYTHON_VERSION} -# PYTHON_VENV must by in sync with "python.defaultInterpreterPath", "python.analysis.extraPaths" and magtivate path in "postCreateCommand" in .devcontainer/devcontainer.json - -RUN virtualenv --system-site-packages --python=/usr/bin/python${PYTHON_VERSION} ${PYTHON_VENV} -RUN ${PYTHON_VENV_EXECUTABLE} -m pip install --quiet --upgrade --no-cache-dir "setuptools==49.6.0" - -### install eggs (lte, orc8r) -COPY /lte/gateway/python/ ${MAGMA_ROOT}/lte/gateway/python/ -WORKDIR ${MAGMA_ROOT}/lte/gateway/python/ -RUN ${PYTHON_VENV_EXECUTABLE} -m pip install --quiet --upgrade --no-build-isolation --no-cache-dir --verbose --editable .[dev] && \ - rm -rf lte.egg-info - -COPY /orc8r/gateway/python/ ${MAGMA_ROOT}/orc8r/gateway/python/ -WORKDIR ${MAGMA_ROOT}/orc8r/gateway/python/ -RUN ${PYTHON_VENV_EXECUTABLE} -m pip install --quiet --upgrade --no-build-isolation --no-cache-dir --verbose --editable .[dev] && \ - rm -rf orc8r.egg-info - -### install formatter autopep8 -RUN ${PYTHON_VENV_EXECUTABLE} -m pip install --no-cache-dir autopep8 - -#### protos -ARG GEN_DIR=lib/python${PYTHON_VERSION}/gen - -COPY /protos/ ${MAGMA_ROOT}/protos/ -COPY /lte/protos/ ${MAGMA_ROOT}/lte/protos/ -COPY /orc8r/protos/ ${MAGMA_ROOT}/orc8r/protos/ -COPY /feg/protos/ ${MAGMA_ROOT}/feg/protos/ -COPY /dp/protos/ ${MAGMA_ROOT}/dp/protos/ -WORKDIR ${MAGMA_ROOT} -RUN ${PYTHON_VENV_EXECUTABLE} -m pip install --no-cache-dir "mypy-protobuf==2.4" && \ - mkdir ${PYTHON_VENV}/${GEN_DIR} && \ - for PROTO_SRC in orc8r lte feg dp; \ - do \ - ${PYTHON_VENV_EXECUTABLE} protos/gen_protos.py ${PROTO_SRC}/protos ${MAGMA_ROOT},orc8r/protos/prometheus ${MAGMA_ROOT} ${PYTHON_VENV}/${GEN_DIR} && \ - ${PYTHON_VENV_EXECUTABLE} protos/gen_prometheus_proto.py ${MAGMA_ROOT} ${PYTHON_VENV}/${GEN_DIR}; \ - done && \ - echo "${PYTHON_VENV}/${GEN_DIR}" > ${PYTHON_VENV}/lib/python${PYTHON_VERSION}/site-packages/magma_gen.pth - -### swagger -ENV SWAGGER_CODEGEN_DIR=/var/tmp/codegen -ENV SWAGGER_CODEGEN_JAR=${SWAGGER_CODEGEN_DIR}/swagger-codegen-cli.jar -ARG CODEGEN_VERSION=2.2.3 - -RUN mkdir -p ${SWAGGER_CODEGEN_DIR}; \ - wget --no-verbose https://repo1.maven.org/maven2/io/swagger/swagger-codegen-cli/${CODEGEN_VERSION}/swagger-codegen-cli-${CODEGEN_VERSION}.jar -O ${SWAGGER_CODEGEN_JAR} - -# Copy swagger specs over to the build directory, -# so that eventd can access them at runtime -COPY lte/swagger/*.yml ${PYTHON_VENV}/${GEN_DIR}/lte/swagger/specs/ -COPY orc8r/swagger/*.yml ${PYTHON_VENV}/${GEN_DIR}/orc8r/swagger/specs/ -RUN for SWAGGER_SRC in lte orc8r; \ - do \ - # Generate the files - ls ${PYTHON_VENV}/${GEN_DIR}/${SWAGGER_SRC}/swagger/specs/*.yml \ - | xargs -t -I% /usr/bin/java -jar ${SWAGGER_CODEGEN_JAR} generate \ - -i % \ - -o ${PYTHON_VENV}/${GEN_DIR}/${SWAGGER_SRC}/swagger \ - -l python \ - -D models && \ - # Flatten and clean up directory - mv ${PYTHON_VENV}/${GEN_DIR}/${SWAGGER_SRC}/swagger/swagger_client/* ${PYTHON_VENV}/${GEN_DIR}/${SWAGGER_SRC}/swagger/ && \ - rmdir ${PYTHON_VENV}/${GEN_DIR}/${SWAGGER_SRC}/swagger/swagger_client && \ - rm -r ${PYTHON_VENV}/${GEN_DIR}/${SWAGGER_SRC}/swagger/test; \ - done - WORKDIR $MAGMA_ROOT diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index b7ed37c30e64..e3b7fb2a4a20 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -82,10 +82,9 @@ "python.terminal.activateEnvironment": true, "python.analysis.extraPaths": [ "${containerWorkspaceFolder}/orc8r/gateway/python/", - "${containerWorkspaceFolder}/lte/gateway/python/", - "/home/vscode/build/python/lib/python3.8/site-packages" // has to be in sync with $PYTHON_VENV and $PYTHON_VERSION from .devcontainer/Dockerfile + "${containerWorkspaceFolder}/lte/gateway/python/" ], - "python.defaultInterpreterPath": "/home/vscode/build/python/bin/python3.8", // has to be in sync with $PYTHON_VENV and $PYTHON_VERSION from .devcontainer/Dockerfile + "python.defaultInterpreterPath": "/home/vscode/python_ide_env/bin/python3", "python.formatting.provider": "autopep8", "python.formatting.autopep8Args": [ // This should be the same set of flags as ones specified in `lte/gateway/precommit.py` diff --git a/.dockerignore b/.dockerignore index 8ef5a542c841..61335e9e5961 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,6 +1,5 @@ * !.cache/test_certs -!fb/config/ !.devcontainer/ !third_party/ @@ -29,12 +28,6 @@ !lte/gateway/deploy !lte/gateway/docker/deploy -!devmand/cloud/ -!devmand/gateway/ -!devmand/protos/ - -!fb/src/dpi/ - !orc8r/cloud/configs/ !orc8r/lib/ !orc8r/cloud/go/ diff --git a/.github/workflows/agw-coverage.yml b/.github/workflows/agw-coverage.yml new file mode 100644 index 000000000000..455e76a20007 --- /dev/null +++ b/.github/workflows/agw-coverage.yml @@ -0,0 +1,198 @@ +# Copyright 2022 The Magma Authors. +# +# This source code is licensed under the BSD-style license found in the +# LICENSE file in the root directory of this source tree. +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: AGW Generate Coverage + +on: + push: + branches: + - master + - 'v1.*' + pull_request: + branches: + - master + - 'v1.*' + types: [ opened, reopened, synchronize ] + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} + cancel-in-progress: true + +env: + BAZEL_BASE_IMAGE: "ghcr.io/magma/magma/bazel-base:latest" + # Regarding the CACHE_KEY see https://github.com/magma/magma/pull/14041 + CACHE_KEY: bazel-base-image-sha-c4de1e5 + REMOTE_DOWNLOAD_OPTIMIZATION_C_CPP: true + REMOTE_DOWNLOAD_OPTIMIZATION_PYTHON: false + +jobs: + path_filter: + runs-on: ubuntu-latest + outputs: + should_not_skip: ${{ steps.changes.outputs.filesChanged }} + steps: + # Need to get git on push event + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 + if: github.event_name == 'push' + - uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50 # pin@v2.11.1 + id: changes + with: + filters: | + filesChanged: + - '.github/workflows/agw-coverage.yml' + - 'orc8r/**' + - 'lte/**' + - '.bazelrc' + - 'WORKSPACE.bazel' + - 'bazel/**' + + c-cpp-codecov: + needs: path_filter + if: ${{ needs.path_filter.outputs.should_not_skip == 'true' }} + name: C / C++ code coverage + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 + - name: Maximize build space + uses: ./.github/workflows/composite/maximize-build-space + - name: Setup Bazel Base Image + uses: addnab/docker-run-action@4f65fabd2431ebc8d299f8e5a018d79a769ae185 # pin@v3 + with: + image: ${{ env.BAZEL_BASE_IMAGE }} + options: --pull always + # Run a simple echo command to pull down the image. This makes it a bit more clear how much time is spent on building Magma and not pulling down the image. + run: | + echo "Pulled the bazel base image!" + bazel # pull down bazel, if bazel download fails we can fail before we do all the lengthy work below + - name: Run C/C++ coverage with Bazel + if: always() + id: bazel-cc-codecoverage + uses: addnab/docker-run-action@4f65fabd2431ebc8d299f8e5a018d79a769ae185 # pin@v3 + with: + image: ${{ env.BAZEL_BASE_IMAGE }} + # TODO: Remove work-around mount of Github workspace to /magma (https://github.com/addnab/docker-run-action/issues/11) + options: -v ${{ github.workspace }}:/workspaces/magma/ -v ${{ github.workspace }}/lte/gateway/configs:/etc/magma + run: | + bazel/scripts/remote_cache_bazelrc_setup.sh "${{ env.CACHE_KEY }}" "${{ env.REMOTE_DOWNLOAD_OPTIMIZATION_C_CPP }}" "${{ secrets.BAZEL_REMOTE_PASSWORD }}" + # Collecting coverage with Bazel can be slow. We can follow this thread to see if this can be improved: https://github.com/bazelbuild/bazel/issues/8178 + # Coverage in bazel is flaky for remote caches - the flags below are helping. See GH13026 for details. + bazel coverage \ + --profile=Bazel_test_cc_coverage_profile \ + --experimental_split_coverage_postprocessing --experimental_fetch_all_coverage_outputs --remote_download_outputs=all \ + //orc8r/gateway/c/...:* //lte/gateway/c/...:* + # copy out coverage information into magma so that it's accessible from the CI node + cp bazel-out/_coverage/_coverage_report.dat . + - name: Upload code coverage + if: always() + id: c-cpp-codecov-upload + uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 # pin@v3.1.1 + with: + flags: c_cpp + - name: Publish bazel profile + uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # pin@v3 + if: | + always() && + github.repository_owner == 'magma' && + github.ref_name == 'master' + with: + name: Bazel test C and C++ coverage profile + path: Bazel_test_cc_coverage_profile + - name: Build space left after run + shell: bash + run: | + echo "Available storage:" + df -h + - name: Notify Bazel C/C++ coverage failure to slack + if: | + failure() && + (steps.bazel-cc-codecoverage.conclusion == 'failure' || + steps.c-cpp-codecov-upload.outcome == 'failure') && + github.event_name == 'push' + uses: rtCamp/action-slack-notify@12e36fc18b0689399306c2e0b3e0f2978b7f1ee7 # pin@v2.2.0 + env: + SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} + SLACK_TITLE: "C / C++ code coverage with Bazel" + SLACK_USERNAME: "${{ github.workflow }}" + SLACK_ICON_EMOJI: ":boom:" + SLACK_COLOR: "#FF0000" + SLACK_FOOTER: ' ' + MSG_MINIMAL: actions url,commit + + python-codecov: + needs: path_filter + if: ${{ needs.path_filter.outputs.should_not_skip == 'true' }} + name: Python code coverage + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 + - name: Maximize build space + uses: ./.github/workflows/composite/maximize-build-space + - name: Setup Bazel Base Image + uses: addnab/docker-run-action@4f65fabd2431ebc8d299f8e5a018d79a769ae185 # pin@v3 + with: + image: ${{ env.BAZEL_BASE_IMAGE }} + options: --pull always + # Run a simple echo command to pull down the image. This makes it a bit more clear how much time is spent on building Magma and not pulling down the image. + run: | + echo "Pulled the bazel base image!" + bazel # pull down bazel, if bazel download fails we can fail before we do all the lengthy work below + - name: Run Python coverage with Bazel + if: always() + id: bazel-python-codecoverage + uses: addnab/docker-run-action@4f65fabd2431ebc8d299f8e5a018d79a769ae185 # pin@v3 + with: + image: ${{ env.BAZEL_BASE_IMAGE }} + # TODO: Remove work-around mount of Github workspace to /magma (https://github.com/addnab/docker-run-action/issues/11) + options: -v ${{ github.workspace }}:/workspaces/magma/ -v ${{ github.workspace }}/lte/gateway/configs:/etc/magma + run: | + bazel/scripts/remote_cache_bazelrc_setup.sh "${{ env.CACHE_KEY }}" "${{ env.REMOTE_DOWNLOAD_OPTIMIZATION_PYTHON }}" "${{ secrets.BAZEL_REMOTE_PASSWORD }}" + # Collecting coverage with Bazel can be slow. We can follow this thread to see if this can be improved: https://github.com/bazelbuild/bazel/issues/8178 + # Coverage in bazel is flaky for remote caches - the flags below are helping. See GH13026 for details. + bazel coverage \ + --profile=Bazel_test_python_coverage_profile \ + //orc8r/gateway/python/...:* //lte/gateway/python/...:* + # copy out coverage information into magma so that it's accessible from the CI node + cp bazel-out/_coverage/_coverage_report.dat . + - name: Upload code coverage + if: always() + id: python-codecov-upload + uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 # pin@v3.1.1 + with: + flags: lte-test + - name: Publish bazel profile + uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # pin@v3 + if: | + always() && + github.repository_owner == 'magma' && + github.ref_name == 'master' + with: + name: Bazel test python coverage profile + path: Bazel_test_python_coverage_profile + - name: Build space left after run + shell: bash + run: | + echo "Available storage:" + df -h + - name: Notify Bazel Python coverage failure to slack + if: | + failure() && + (steps.bazel-python-codecoverage.conclusion == 'failure' || + steps.python-codecov-upload.outcome == 'failure') && + github.event_name == 'push' + uses: rtCamp/action-slack-notify@12e36fc18b0689399306c2e0b3e0f2978b7f1ee7 # pin@v2.2.0 + env: + SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} + SLACK_TITLE: "Python code coverage with Bazel" + SLACK_USERNAME: "${{ github.workflow }}" + SLACK_ICON_EMOJI: ":boom:" + SLACK_COLOR: "#FF0000" + SLACK_FOOTER: ' ' + MSG_MINIMAL: actions url,commit diff --git a/.github/workflows/agw-docker-load-test.yml b/.github/workflows/agw-docker-load-test.yml index 76851a0c9a5e..3809cf199769 100644 --- a/.github/workflows/agw-docker-load-test.yml +++ b/.github/workflows/agw-docker-load-test.yml @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: AGW docker load test +name: AGW Test Load Docker AMI on: workflow_run: @@ -35,14 +35,14 @@ jobs: WORK_DIR: "${{ github.workspace }}/experimental/cloudstrapper/playbooks" AGW_DOCKER_AMI: "ami-0150e153a94c122b5" steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Run apt run: sudo apt-get update && sudo apt -y upgrade - name: setup pyenv uses: "gabrielfalcao/pyenv-action@5327db2939908b2ef8f62d284403d678c4b611d0" # pin@v8 with: default: 3.8.10 - - uses: actions/setup-python@7f80679172b057fc5e90d70d197929d454754a5a # pin@v2 + - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # pin@v4.3.0 with: python-version: '3.8.10' - name: Install Dependencies diff --git a/.github/workflows/agw-workflow.yml b/.github/workflows/agw-workflow.yml index 0ff10128d974..99fc993286f2 100644 --- a/.github/workflows/agw-workflow.yml +++ b/.github/workflows/agw-workflow.yml @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: agw-workflow +name: AGW Lint & Test on: push: @@ -28,9 +28,6 @@ concurrency: env: DEVCONTAINER_IMAGE: "ghcr.io/magma/magma/devcontainer:latest" - BAZEL_BASE_IMAGE: "ghcr.io/magma/magma/bazel-base:sha-4a878d8" - CACHE_KEY: bazel-base-image - REMOTE_DOWNLOAD_OPTIMIZATION: true jobs: path_filter: @@ -39,9 +36,9 @@ jobs: should_not_skip: ${{ steps.changes.outputs.filesChanged }} steps: # Need to get git on push event - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 if: github.event_name == 'push' - - uses: dorny/paths-filter@b2feaf19c27470162a626bd6fa8438ae5b263721 # pin@v2 + - uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50 # pin@v2.11.1 id: changes with: filters: | @@ -78,8 +75,8 @@ jobs: MAGMA_DEV_MODE: 1 SKIP_SUDO_TESTS: 1 steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 - - uses: actions/setup-python@7f80679172b057fc5e90d70d197929d454754a5a # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 + - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # pin@v4.3.0 with: python-version: '3.8.10' - name: Run apt-focal-install-aioeventlet @@ -94,7 +91,6 @@ jobs: - name: Install libraries and dependecies run: | mkdir -p /var/tmp/test_results - mkdir -p /var/tmp/codecovs sudo -E apt-get update -y sudo -E apt-get install -y libsystemd-dev pkg-config curl zip unzip net-tools sudo -E apt-get install -y virtualenv python-babel python-dev build-essential autogen autoconf libtool python3-apt python3-requests python3-pip python-protobuf @@ -122,10 +118,6 @@ jobs: with: name: Unit Test Results path: /var/tmp/test_results - - uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # pin@v2 - with: - files: /var/tmp/codecovs/cover_lte.xml,/var/tmp/codecovs/cover_orc8r.xml - flags: lte-test - name: Extract commit title if: failure() && github.event_name == 'push' id: commit @@ -152,7 +144,7 @@ jobs: env: MAGMA_ROOT: "${{ github.workspace }}" steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Run li agent tests timeout-minutes: 5 uses: addnab/docker-run-action@4f65fabd2431ebc8d299f8e5a018d79a769ae185 # pin@v3 @@ -190,7 +182,7 @@ jobs: env: MAGMA_ROOT: "${{ github.workspace }}" steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Run sctpd tests with Debug build type uses: addnab/docker-run-action@4f65fabd2431ebc8d299f8e5a018d79a769ae185 # pin@v3 with: @@ -246,124 +238,12 @@ jobs: SLACK_COLOR: "#FF0000" SLACK_FOOTER: ' ' - c-cpp-codecov: - needs: path_filter - if: ${{ needs.path_filter.outputs.should_not_skip == 'true' }} - name: C / C++ code coverage - runs-on: ubuntu-latest - env: - MAGMA_ROOT: "${{ github.workspace }}" - BRANCH: "${{ github.base_ref }}" - REVISION: "${{ github.sha }}" - steps: - - name: Check Out Repo - # This is necessary for overlays into the Docker container below. - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 - - name: Maximize build space - uses: ./.github/workflows/composite/maximize-build-space - - name: Setup Devcontainer Image - uses: addnab/docker-run-action@4f65fabd2431ebc8d299f8e5a018d79a769ae185 # pin@v3 - with: - image: ${{ env.DEVCONTAINER_IMAGE }} - # Run a simple echo command to pull down the image. This makes it a bit more clear how much time is spent on building Magma and not pulling down the image. - run: | - echo "Pulled the devontainer image!" - - name: Setup Bazel Base Image - uses: addnab/docker-run-action@4f65fabd2431ebc8d299f8e5a018d79a769ae185 # pin@v3 - with: - image: ${{ env.BAZEL_BASE_IMAGE }} - options: --pull always - # Run a simple echo command to pull down the image. This makes it a bit more clear how much time is spent on building Magma and not pulling down the image. - run: | - echo "Pulled the bazel base image!" - bazel # pull down bazel, if bazel download fails we can fail before we do all the lengthy work below - - name: Run codecov with CMake (MME) - if: always() - uses: addnab/docker-run-action@4f65fabd2431ebc8d299f8e5a018d79a769ae185 # pin@v3 - with: - image: ${{ env.DEVCONTAINER_IMAGE }} - # TODO: Remove work-around mount of Github workspace to /magma (https://github.com/addnab/docker-run-action/issues/11) - options: -v ${{ github.workspace }}:/workspaces/magma/ -v ${{ github.workspace }}/lte/gateway/configs:/etc/magma - run: | - cd $MAGMA_ROOT/lte/gateway - make coverage - cp $C_BUILD/coverage.info $MAGMA_ROOT - - name: Run coverage with Bazel (COMMON / SESSIOND / SCTPD / LIAGENT / CONNECTIOND) - if: always() - id: bazel-codecoverage - uses: addnab/docker-run-action@4f65fabd2431ebc8d299f8e5a018d79a769ae185 # pin@v3 - with: - image: ${{ env.BAZEL_BASE_IMAGE }} - # TODO: Remove work-around mount of Github workspace to /magma (https://github.com/addnab/docker-run-action/issues/11) - options: -v ${{ github.workspace }}:/workspaces/magma/ -v ${{ github.workspace }}/lte/gateway/configs:/etc/magma - run: | - cd $MAGMA_ROOT - bazel/scripts/remote_cache_bazelrc_setup.sh "${{ env.CACHE_KEY }}" "${{ env.REMOTE_DOWNLOAD_OPTIMIZATION }}" "${{ secrets.BAZEL_REMOTE_PASSWORD }}" - # Collecting coverage with Bazel can be slow. We can follow this thread to see if this can be improved: https://github.com/bazelbuild/bazel/issues/8178 - # Coverage in bazel is flaky for remote caches - the flags below are helping. See GH13026 for details. - # TODO: GH11936 Omit OAI coverage until it is tested. We need to determine what the behavior is for doing both CMake and Bazel based coverage at the same time - bazel coverage \ - --profile=Bazel_test_coverage_profile \ - --experimental_split_coverage_postprocessing --experimental_fetch_all_coverage_outputs --remote_download_outputs=all \ - -- //orc8r/gateway/c/...:* //lte/gateway/c/...:* -//lte/gateway/c/core/...:* - # copy out coverage information into magma so that it's accessible from the CI node - cp bazel-out/_coverage/_coverage_report.dat $MAGMA_ROOT - - name: Upload code coverage - if: always() - id: c-cpp-codecov-upload - uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # pin@v2 - with: - flags: c_cpp - - name: Publish bazel profile - uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # pin@v3 - if: | - always() && - github.repository_owner == 'magma' && - github.ref_name == 'master' - with: - name: Bazel test coverage profile - path: Bazel_test_coverage_profile - - name: Extract commit title - # yamllint enable - if: failure() && github.event_name == 'push' - id: commit - run: | - str="$(jq '.head_commit.message' $GITHUB_EVENT_PATH)" # get the head_commit message - echo ::set-output name=title::${str%%\\n*} | tr -d '"' - - name: Notify failure to slack - if: steps.c-cpp-codecov-upload.outcome=='failure' && github.event_name == 'push' - uses: rtCamp/action-slack-notify@12e36fc18b0689399306c2e0b3e0f2978b7f1ee7 # pin@v2.2.0 - env: - SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_CI }} - SLACK_TITLE: "Github action c-cpp-codecov-upload failed" - SLACK_USERNAME: "AGW workflow" - SLACK_MESSAGE: "${{ steps.commit.outputs.title}}" - SLACK_ICON_EMOJI: ":boom:" - SLACK_COLOR: "#FF0000" - SLACK_FOOTER: ' ' - - name: Build space left after run - shell: bash - run: | - echo "Available storage:" - df -h - - name: Notify Bazel failure to slack - if: failure() && steps.bazel-codecoverage.conclusion == 'failure' && github.event_name == 'push' - uses: rtCamp/action-slack-notify@v2.2.0 - env: - SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_BAZEL_CI }} - SLACK_TITLE: "C / C++ code coverage with Bazel" - SLACK_USERNAME: "agw-workflow" - SLACK_ICON_EMOJI: ":boom:" - SLACK_COLOR: "#FF0000" - SLACK_FOOTER: ' ' - MSG_MINIMAL: actions url,commit - lint-clang-format: needs: path_filter if: ${{ needs.path_filter.outputs.should_not_skip == 'true' }} runs-on: ubuntu-latest steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Check clang-format for orc8r/gateway/c uses: DoozyX/clang-format-lint-action@9ea72631b74e61ce337d0839a90e76180e997283 # pin@v0.13 with: @@ -389,7 +269,7 @@ jobs: env: MAGMA_ROOT: "${{ github.workspace }}" steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Run session_manager tests timeout-minutes: 20 uses: addnab/docker-run-action@4f65fabd2431ebc8d299f8e5a018d79a769ae185 # pin@v3 @@ -423,7 +303,7 @@ jobs: name: jsonlint-mconfig runs-on: ubuntu-latest steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 with: ref: ${{github.event.pull_request.head.ref}} repository: ${{github.event.pull_request.head.repo.full_name}} diff --git a/.github/workflows/amis-workflow.yml b/.github/workflows/amis-workflow.yml index f578c863a07b..e69de29bb2d1 100644 --- a/.github/workflows/amis-workflow.yml +++ b/.github/workflows/amis-workflow.yml @@ -1,209 +0,0 @@ -# Copyright 2022 The Magma Authors. -# -# This source code is licensed under the BSD-style license found in the -# LICENSE file in the root directory of this source tree. -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name: amis-workflow - -on: - workflow_dispatch: null - schedule: - - cron: "0 2 * * 0" - -env: - MAGMA_VERSION: "1.7.0" - -jobs: - publish-amis-to-marketplace: - name: publish-amis-to-marketplace job - runs-on: ubuntu-latest - env: - MAGMA_ROOT: "${{ github.workspace }}" - CODE_DIR: "${{ github.workspace }}/experimental/cloudstrapper" - VARS_DIR: "${{ github.workspace }}/experimental/cloudstrapper/playbooks/roles/vars" - WORK_DIR: "${{ github.workspace }}/experimental/cloudstrapper/playbooks" - steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 - - name: Run apt - run: sudo apt-get update && sudo apt -y upgrade - - name: setup pyenv - uses: "gabrielfalcao/pyenv-action@5327db2939908b2ef8f62d284403d678c4b611d0" # pin@v8 - with: - default: 3.8.10 - - uses: actions/setup-python@7f80679172b057fc5e90d70d197929d454754a5a # pin@v2 - with: - python-version: '3.8.10' - - name: Install Dependencies - run: | - pip install ansible awscli boto3 - sudo apt-get update - - name: Determine AWS credentials to use - run: | - if ${{ github.event_name == 'workflow_dispatch' }}; then - echo AWS_ACCOUNT="LF" >> $GITHUB_ENV - echo BASE_REFERENCE=${{ github.base_ref }} >> $GITHUB_ENV - elif ${{ github.event_name == 'schedule' }}; then - echo AWS_ACCOUNT="FB" >> $GITHUB_ENV - echo BASE_REFERENCE="master" >> $GITHUB_ENV - fi - - name: Propagate AWS credentials to ansible and create version - run: | - if [ "${{ env.AWS_ACCOUNT }}" = "FB" ]; then - sed -i -e "s@awsAccessKey:@& ${{ secrets.FB_AWS_ACCESS_KEY }}@1" ${{ env.VARS_DIR }}/secrets.yaml - sed -i -e "s@awsSecretKey:@& ${{ secrets.FB_AWS_SECRET_ACCESS_KEY }}@1" ${{ env.VARS_DIR }}/secrets.yaml - echo VERSION="${{ env.MAGMA_VERSION }}-${{ github.sha }}" >> $GITHUB_ENV - echo PACKAGE_VERSION="${{ env.MAGMA_VERSION }}" >> $GITHUB_ENV - # When focal-ci is clean we will be able to dynamically fetch latest ci debian packages - elif [ "${{ env.AWS_ACCOUNT }}" = "LF" ]; then - sed -i -e "s@awsAccessKey:@& ${{ secrets.LF_AWS_ACCESS_KEY }}@1" ${{ env.VARS_DIR }}/secrets.yaml - sed -i -e "s@awsSecretKey:@& ${{ secrets.LF_AWS_SECRET_ACCESS_KEY }}@1" ${{ env.VARS_DIR }}/secrets.yaml - GIT_BRANCH_VERSION=${GITHUB_BASE_REF:-${GITHUB_REF#refs/heads/}} - echo PACKAGE_VERSION="${GIT_BRANCH_VERSION:1}" >> $GITHUB_ENV - echo VERSION="${GIT_BRANCH_VERSION:1}" >> $GITHUB_ENV - fi - echo PACKAGE_REPO_HOST="artifactory.magmacore.org\\\/artifactory\\\/debian" >> $GITHUB_ENV - echo GIT_REF="${{ github.sha }}" >> $GITHUB_ENV - - name: Propagate AWS region information - run: | - sed -i -e '/^awsAgwRegion: /s/:.*$/: us-east-1/' ${{ env.VARS_DIR }}/cluster.yaml - sed -i -e '/^awsOrc8rRegion: /s/:.*$/: us-east-1/' ${{ env.VARS_DIR }}/cluster.yaml - sed -i -e '/^awsAgwAz: /s/:.*$/: us-east-1b/' ${{ env.VARS_DIR }}/cluster.yaml - sed -i -e '/^buildAwsRegion: /s/:.*$/: us-east-1/' ${{ env.VARS_DIR }}/build.yaml - sed -i -e '/^buildAwsAz: /s/:.*$/: us-east-1b/' ${{ env.VARS_DIR }}/build.yaml - - name: Propagate names for AWS essential components - run: | - sed -i -e '/^secgroupDefault: /s/:.*$/: publish-amis-to-marketplace-secgroup/' ${{ env.VARS_DIR }}/defaults.yaml - sed -i -e '/^bucketDefault: /s/:.*$/: publish-amis-to-marketplace-bucket2/' ${{ env.VARS_DIR }}/defaults.yaml - sed -i -e '/^stackEssentialsDefault: /s/:.*$/: publish-amis-to-marketplace-stack/' ${{ env.VARS_DIR }}/defaults.yaml - sed -i -e '/^keyBoot: /s/:.*$/: publish-amis-to-marketplace-keyboot/' ${{ env.VARS_DIR }}/defaults.yaml - sed -i -e '/^keyHost: /s/:.*$/: publish-amis-to-marketplace-keyhost/' ${{ env.VARS_DIR }}/defaults.yaml - - name: Setup AWS essentials components - run: | - ansible-playbook ${{ env.WORK_DIR }}/aws-prerequisites.yaml -e "dirLocalInventory=${{ env.VARS_DIR }}" --tags keyCreate,essentialsCreate - - name: Propagate Tags for Cloudstrapper instances - run: | - sed -i -e '/^devOpsCloudstrapper: /s/:.*$/: publishAmisToMarketplaceDevopsCloudstrapper/' ${{ env.VARS_DIR }}/defaults.yaml - sed -i -e '/^primaryCloudstrapper: /s/:.*$/: publishAmisToMarketplacePrimaryCloudstrapper/' ${{ env.VARS_DIR }}/defaults.yaml - sed -i -e '/^stackDevOpsCloudstrapper: /s/:.*$/: publish-amis-to-marketplace-stack-devopscloustrapper/' ${{ env.VARS_DIR }}/defaults.yaml - sed -i -e '/^stackCloudstrapper: /s/:.*$/: publish-amis-to-marketplace-stack-cloustrapper/' ${{ env.VARS_DIR }}/defaults.yaml - sed -i -e '/^devOpsAmi: /s/:.*$/: cloudstrapper-'"$VERSION"'/' ${{ env.VARS_DIR }}/defaults.yaml - sed -i -e '/^buildUbuntuAmi: /s/:.*$/: ami-09e67e426f25ce0d7/' ${{ env.VARS_DIR }}/build.yaml - sed -i -e '/^buildAgwVersion: /s/:.*$/: '"$GIT_REF"'/' ${{ env.VARS_DIR }}/build.yaml - sed -i -e '/^buildAgwPackage: /s/:.*$/: '"$PACKAGE_VERSION"'/' ${{ env.VARS_DIR }}/build.yaml - sed -i -e '/^taggedVersion: /s/:.*$/: '"$VERSION"'/' ${{ env.VARS_DIR }}/build.yaml - - name: Generate Cloudstrapper AMI - timeout-minutes: 120 - run: | - echo "DEFAULTS" - cat ${{ env.VARS_DIR }}/defaults.yaml - echo "BUILD" - cat ${{ env.VARS_DIR }}/build.yaml - echo "CLUSTER" - cat ${{ env.VARS_DIR }}/cluster.yaml - ansible-playbook ${{ env.WORK_DIR }}/devops-provision.yaml -e "dirLocalInventory=${{ env.VARS_DIR }}" - echo "Waiting one minute for the instance to boot up." - sleep 60 - ansible-playbook ${{ env.WORK_DIR }}/devops-configure.yaml -e "devops=tag_Name_publishAmisToMarketplaceDevopsCloudstrapper" -e "dirLocalInventory=${{ env.VARS_DIR }}" -i ${{ env.VARS_DIR }}/common_instance_aws_ec2.yaml -u ubuntu --skip-tags usingGitSshKey,buildMagma,pubMagma,helm,pubHelm - ansible-playbook ${{ env.WORK_DIR }}/devops-init.yaml -e "dirLocalInventory=${{ env.VARS_DIR }}" - - name: Notify success to Slack - if: success() && github.ref == 'refs/heads/master' - uses: rtCamp/action-slack-notify@12e36fc18b0689399306c2e0b3e0f2978b7f1ee7 # pin@v2.2.0 - env: - SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL_ARTIFACTS }} - SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_OSS }} - SLACK_TITLE: "*Cloudstrapper AWS AMI artifacts have been published*" - SLACK_MESSAGE: "${{ steps.commit.outputs.title}}" - SLACK_USERNAME: "Cloud workflow" - SLACK_ICON_EMOJI: ":heavy_check_mark:" - SLACK_COLOR: "#00FF00" - SLACK_FOOTER: ' ' - - name: Propagate variables for AGW AMI build - run: | - sed -i -e '/^buildAgwAmiName: /s/:.*$/: agw-ami-'"$VERSION"'/' ${{ env.VARS_DIR }}/build.yaml - sed -i -e '/^buildGwTagName: /s/:.*$/: publishAmisToMarketplaceAgw/' ${{ env.VARS_DIR }}/build.yaml - sed -i -e '/^packageRepoHost: /s/:.*$/: '"${{ env.PACKAGE_REPO_HOST }}"'/' ${{ env.VARS_DIR }}/build.yaml - sed -i -e '/^awsAgwAmi: /s/:.*$/: ami-09e67e426f25ce0d7/' ${{ env.VARS_DIR }}/cluster.yaml - # TODO Overwriting the previous buildAgwVersion to the current branch - sed -i -e '/^buildAgwVersion: /s/:.*$/: '"refs\/heads\/${{ env.BASE_REFERENCE }}"'/' ${{ env.VARS_DIR }}/build.yaml - - name: Generate AGW AMI - timeout-minutes: 30 - run: | - ansible-playbook ${{ env.WORK_DIR }}/agw-provision.yaml -e "idSite=DevOps" -e "idGw=publishAmisToMarketplaceAgw" -e "dirLocalInventory=${{ env.VARS_DIR }}" --tags infra,inventory -e "agwDevops=1" --skip-tags createBridge,cleanupBridge,cleanupNet - echo "Waiting one minute for the instance to boot up." - sleep 60 - ansible-playbook ${{ env.WORK_DIR }}/ami-configure.yaml -i "${{ env.VARS_DIR }}/common_instance_aws_ec2.yaml" -e "dirLocalInventory=${{ env.VARS_DIR }}" -e "aminode=tag_Name_publishAmisToMarketplaceAgw" -e "ansible_python_interpreter=/usr/bin/python3" -u ubuntu - ansible-playbook ${{ env.WORK_DIR }}/ami-init.yaml -e "dirLocalInventory=${{ env.VARS_DIR }}" - - name: Export qcow2 image for fb aws account in s3 - timeout-minutes: 30 - run: | - if [ "${{ env.AWS_ACCOUNT }}" = "FB" ]; then - ansible-playbook ${{ env.WORK_DIR }}/devops-convert-to-qcow2.yaml -e "dirLocalInventory=${{ env.VARS_DIR }}" -e "agwAmiName=agw-ami-$VERSION" - fi - - name: Clean AWS resources - if: always() - run: | - ansible-playbook ${{ env.WORK_DIR }}/cleanup.yaml -e "dirLocalInventory=${{ env.VARS_DIR }}" --tags agw,subnet,secgroup,vpc,keys --skip-tags orc8r -e "{"deleteStacks": [stackDevOpspublishAmisToMarketplaceAgw, publish-amis-to-marketplace-stack-devopscloustrapper, publish-amis-to-marketplace-stack, stackDevOpsNetwork]}" - - name: Notify success to Slack - if: success() && github.ref == 'refs/heads/master' - uses: rtCamp/action-slack-notify@12e36fc18b0689399306c2e0b3e0f2978b7f1ee7 # pin@v2.2.0 - env: - SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL_ARTIFACTS }} - SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_OSS }} - SLACK_TITLE: "*AGW AWS AMI artifacts have been published*" - SLACK_MESSAGE: "${{ steps.commit.outputs.title}}" - SLACK_USERNAME: "Cloud workflow" - SLACK_ICON_EMOJI: ":heavy_check_mark:" - SLACK_COLOR: "#00FF00" - SLACK_FOOTER: ' ' - publish-docker-ami: - name: publish-docker-ami - runs-on: ubuntu-latest - if: github.event_name != 'workflow_dispatch' - env: - MAGMA_ROOT: "${{ github.workspace }}" - CODE_DIR: "${{ github.workspace }}/experimental/cloudstrapper" - VARS_DIR: "${{ github.workspace }}/experimental/cloudstrapper/playbooks/roles/vars" - WORK_DIR: "${{ github.workspace }}/experimental/cloudstrapper/playbooks" - SHA: "${{ github.sha }}" - steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 - - name: Run apt - run: sudo apt-get update && sudo apt -y upgrade - - name: setup pyenv - uses: "gabrielfalcao/pyenv-action@5327db2939908b2ef8f62d284403d678c4b611d0" # pin@v8 - with: - default: 3.8.10 - - uses: actions/setup-python@7f80679172b057fc5e90d70d197929d454754a5a # pin@v2 - with: - python-version: '3.8.10' - - name: Install Dependencies - run: | - pip install ansible awscli boto3 boto - sudo apt-get update - - name: Export AWS Credentials - run: | - sed -i -e "s@awsAccessKey:@& ${{ secrets.FB_AWS_ACCESS_KEY }}@1" ${{ env.VARS_DIR }}/secrets.yaml - sed -i -e "s@awsSecretKey:@& ${{ secrets.FB_AWS_SECRET_ACCESS_KEY }}@1" ${{ env.VARS_DIR }}/secrets.yaml - - name: Launch ec2 instance - run: | - ansible-playbook ${{ env.WORK_DIR }}/docker-ami-provision.yaml -e "dirLocalInventory=${{ env.VARS_DIR }}" -e "tag_host=dockerami" -e "awsAgwRegion=us-east-1" - sleep 400s - ls -R ${{ env.VARS_DIR }} - - name: Install needed components on the remote host - env: - ANSIBLE_HOST_KEY_CHECKING: false - run: | - ansible-playbook ${{ env.WORK_DIR }}/docker-ami-configure.yaml --key-file "${{ env.VARS_DIR }}/dockerAMI.pem" -e "buildMagmaVersion=${{env.SHA}}" -e "dirLocalInventory=${{ env.VARS_DIR }}" -e "dockerHost=tag_agw_dockerami" -e "awsAgwRegion=us-east-1" -i ${{ env.VARS_DIR }}/common_instance_aws_ec2.yaml -u ubuntu - - name: Snapshot the instance - run: | - ansible-playbook ${{ env.WORK_DIR }}/docker-ami-init.yaml -e "dirLocalInventory=${{ env.VARS_DIR }}" -e "awsAgwRegion=us-east-1" -e "tag_host=dockerami" -e "taggedVersion=${{ env.MAGMA_VERSION }}-${SHA:0:8}" -vv - - name: Terminate instance - if: always() - run: | - ansible-playbook ${{ env.WORK_DIR }}/docker-cleanup.yaml -e "dirLocalInventory=${{ env.VARS_DIR }}" -e "tag_host=dockerami" -e "awsAgwRegion=us-east-1" diff --git a/.github/workflows/autolabel-pullrequests.yml b/.github/workflows/autolabel-pullrequests.yml index 3b5d654ca748..f78b5b561175 100644 --- a/.github/workflows/autolabel-pullrequests.yml +++ b/.github/workflows/autolabel-pullrequests.yml @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: AutoLabel PR +name: PR Generate Labels on: # Use pull_request_target to gain write permissions. # Ref: https://github.blog/2020-08-03-github-actions-improvements-for-fork-and-pull-request-workflows/ diff --git a/.github/workflows/backport-pull-request.yml b/.github/workflows/backport-pull-request.yml index 89f2f10ae51b..b2ce58802fe4 100644 --- a/.github/workflows/backport-pull-request.yml +++ b/.github/workflows/backport-pull-request.yml @@ -11,7 +11,7 @@ # Based on https://github.com/sqren/backport-github-action/blob/main/README.md under MIT license. -name: backport-pull-request +name: PR Backport on: pull_request_target: types: diff --git a/.github/workflows/bazel.yml b/.github/workflows/bazel.yml index dd92cde0bf9d..284333059b97 100644 --- a/.github/workflows/bazel.yml +++ b/.github/workflows/bazel.yml @@ -9,10 +9,16 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: "Bazel Build & Test" +name: AGW Build, Format & Test Bazel on: # yamllint disable-line rule:truthy workflow_dispatch: + inputs: + publish_bazel_profile: + description: 'Publish bazel profile data (default: false)' + required: false + default: false + type: boolean pull_request: types: - opened @@ -23,8 +29,9 @@ on: - master env: - BAZEL_BASE_IMAGE: "ghcr.io/magma/magma/bazel-base:sha-4a878d8" - CACHE_KEY: bazel-base-image + BAZEL_BASE_IMAGE: "ghcr.io/magma/magma/bazel-base:latest" + # see GH14041 + CACHE_KEY: bazel-base-image-sha-c4de1e5 REMOTE_DOWNLOAD_OPTIMIZATION: true concurrency: @@ -39,7 +46,7 @@ jobs: if: github.repository_owner == 'magma' || github.event_name == 'workflow_dispatch' steps: # Need to get git on push event - - uses: dorny/paths-filter@b2feaf19c27470162a626bd6fa8438ae5b263721 # pin@v2 + - uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50 # pin@v2.11.1 if: github.event_name == 'pull_request' id: changes with: @@ -84,7 +91,7 @@ jobs: steps: - name: Check Out Repo # This is necessary for overlays into the Docker container below. - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Maximize build space uses: ./.github/workflows/composite/maximize-build-space - name: Setup Bazel Base Image @@ -116,18 +123,29 @@ jobs: printf '\r%s\r' '###############################' 1>&2 bazel build \ ${{ matrix.bazel-target }} \ - ${{ matrix.bazel-config }} \ --config=mme_unit_test \ + ${{ matrix.bazel-config }} \ --profile=Bazel_build_all_profile printf '\r%s\r' '###############################' 1>&2 printf '\r%s\r' 'Executing bazel test ${{ matrix.bazel-config }}' 1>&2 printf '\r%s\r' '###############################' 1>&2 + TEST_FAILED="false" bazel test \ ${{ matrix.bazel-target }} \ ${{ matrix.bazel-config }} \ --test_output=errors \ - --profile=Bazel_test_all_profile + --profile=Bazel_test_all_profile || TEST_FAILED="true" + # Create Bazel unit-test results + # Can't be a separate step, because the container's '/tmp' folder is not preserved between steps + mkdir bazel_unit_test_results/ + UNIQUE_FILENAME_INDEX=0 + TEST_REPORT_PATHS=( $(find bazel-testlogs/ -name 'test.xml') ) + for TEST_REPORT_PATH in "${TEST_REPORT_PATHS[@]}" + do + cp "${TEST_REPORT_PATH}" "bazel_unit_test_results/test_result_${UNIQUE_FILENAME_INDEX}.xml" + UNIQUE_FILENAME_INDEX=$((UNIQUE_FILENAME_INDEX + 1)) + done if [ -z "${{ matrix.bazel-config }}" ]; then @@ -144,18 +162,42 @@ jobs: printf '\r%s\r' '###############################' 1>&2 bazel/scripts/test_python_service_imports.sh; fi + + if [[ "${TEST_FAILED}" == "true" ]]; + then + echo "ERROR: 'bazel test' failed!" + exit 1 + fi + - name: Create merged test-result XML file + if: always() + run: | + mkdir -p lte/gateway/test_results/ + python3 lte/gateway/python/scripts/runtime_report.py -i "[^\/]+\.xml" -w "bazel_unit_test_results" -o "lte/gateway/test_results/merged_unit_test_reports.xml" - name: Publish bazel build profile uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # pin@v3 - if: always() + if: ${{ always() && github.event.inputs.publish_bazel_profile == 'true' }} with: name: Bazel build all profile ${{ matrix.bazel-config }} path: Bazel_build_all_profile - name: Publish bazel test profile uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # pin@v3 - if: always() + if: ${{ always() && github.event.inputs.publish_bazel_profile == 'true' }} with: name: Bazel test all profile ${{ matrix.bazel-config }} path: Bazel_test_all_profile + - name: Upload Bazel unit-test results ${{ matrix.bazel-config }} + uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # pin@v3 + if: always() + with: + name: Bazel unit-test results ${{ matrix.bazel-config }} + path: lte/gateway/test_results/merged_unit_test_reports.xml + - name: Publish Bazel unit-test results ${{ matrix.bazel-config }} + if: always() + uses: EnricoMi/publish-unit-test-result-action/composite@46ab8d49369d898e381a607119161771bc65c2a6 # pin@v2.2.0 + with: + check_name: Bazel unit-test results ${{ matrix.bazel-config }} + junit_files: lte/gateway/test_results/**/*.xml + check_run_annotations: all tests - name: Build space left after run shell: bash run: | @@ -165,7 +207,7 @@ jobs: if: failure() && github.event_name == 'push' && github.repository_owner == 'magma' uses: rtCamp/action-slack-notify@12e36fc18b0689399306c2e0b3e0f2978b7f1ee7 # pin@v2.2.0 env: - SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_BAZEL_CI }} + SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} SLACK_TITLE: "Bazel Build & Test Job `bazel build //...; bazel test //...` ${{ matrix.bazel-config }}" SLACK_USERNAME: "Bazel Build & Test" SLACK_ICON_EMOJI: ":boom:" @@ -185,7 +227,7 @@ jobs: success: ${{ steps.setoutput.outputs.success }} steps: - id: setoutput - run: echo "::set-output name=success::true" + run: echo "success=true" >> $GITHUB_OUTPUT report_result_bazel_build_and_test: name: Bazel build and test status @@ -229,7 +271,7 @@ jobs: steps: - name: Check Out Repo # This is necessary for overlays into the Docker container below. - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Maximize build space uses: ./.github/workflows/composite/maximize-build-space - name: Setup Bazel Base Image @@ -249,12 +291,12 @@ jobs: run: | cd /workspaces/magma bazel/scripts/remote_cache_bazelrc_setup.sh "${{ env.CACHE_KEY }}" "${{ env.REMOTE_DOWNLOAD_OPTIMIZATION }}" "${{ secrets.BAZEL_REMOTE_PASSWORD }}" - bazel build lte/gateway/release:sctpd_deb_pkg \ + bazel build lte/gateway/release:sctpd_deb_pkg lte/gateway/release:magma_deb_pkg \ --config=production \ --profile=Bazel_build_package_profile - name: Publish bazel profile uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # pin@v3 - if: always() + if: ${{ always() && github.event.inputs.publish_bazel_profile == 'true' }} with: name: Bazel build package profile path: Bazel_build_package_profile @@ -267,7 +309,7 @@ jobs: if: failure() && github.event_name == 'push' && github.repository_owner == 'magma' uses: rtCamp/action-slack-notify@12e36fc18b0689399306c2e0b3e0f2978b7f1ee7 # pin@v2.2.0 env: - SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_BAZEL_CI }} + SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} SLACK_TITLE: "Bazel Package Job" SLACK_USERNAME: "Bazel Build & Test" SLACK_ICON_EMOJI: ":boom:" @@ -280,7 +322,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Check Out Repo - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Execute check shell: bash run: | @@ -289,7 +331,7 @@ jobs: if: failure() && github.event_name == 'push' && github.repository_owner == 'magma' uses: rtCamp/action-slack-notify@12e36fc18b0689399306c2e0b3e0f2978b7f1ee7 # pin@v2.2.0 env: - SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_BAZEL_CI }} + SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} SLACK_TITLE: "Bazel Python Check Job `./bazel/scripts/check_py_bazel.sh`" SLACK_USERNAME: "Bazel Build & Test" SLACK_ICON_EMOJI: ":boom:" @@ -302,7 +344,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Check Out Repo - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Execute check shell: bash run: | @@ -311,7 +353,7 @@ jobs: if: failure() && github.event_name == 'push' && github.repository_owner == 'magma' uses: rtCamp/action-slack-notify@12e36fc18b0689399306c2e0b3e0f2978b7f1ee7 # pin@v2.2.0 env: - SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_BAZEL_CI }} + SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} SLACK_TITLE: "Bazel C/C++ Check Job `./bazel/scripts/check_c_cpp_bazel.sh`" SLACK_USERNAME: "Bazel Build & Test" SLACK_ICON_EMOJI: ":boom:" diff --git a/.github/workflows/build_all.yml b/.github/workflows/build_all.yml index a01d1fbe441c..69cfc72f7989 100644 --- a/.github/workflows/build_all.yml +++ b/.github/workflows/build_all.yml @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: build-all +name: Magma Build & Publish on: workflow_dispatch: null @@ -36,7 +36,7 @@ jobs: ISSUE_NUMBER: "${{ github.event.number }}" runs-on: ubuntu-latest steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 # Version is github job run number when running on master # Or is branch name when on release branch - name: Set Helm chart version @@ -115,15 +115,16 @@ jobs: runs-on: macos-12 outputs: artifacts: ${{ steps.publish_packages.outputs.artifacts }} + magma_package: ${{ steps.publish_packages.outputs.magma_package }} steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 with: fetch-depth: 0 - name: Cache magma-dev-box - uses: actions/cache@0865c47f36e68161719c5b124609996bb5c40129 # pin@v3 + uses: actions/cache@9b0c1fce7a93df8e3bb8926b0d6e9d89e92f20a7 # pin@v3.0.11 with: path: ~/.vagrant.d/boxes/magmacore-VAGRANTSLASH-magma_dev - key: vagrant-box-magma-dev-v1.2.20220801 + key: vagrant-box-magma-dev-v1.2.20221012 - name: Log in to vagrant cloud run: | if [[ -n "${{ secrets.VAGRANT_TOKEN }}" ]] @@ -133,7 +134,7 @@ jobs: else echo "Vagrant cloud token is not configured. Skipping login." fi - - uses: actions/setup-python@7f80679172b057fc5e90d70d197929d454754a5a # pin@v2 + - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # pin@v4.3.0 with: python-version: '3.8.10' - name: Install pre requisites @@ -179,6 +180,14 @@ jobs: if [[ "$HTTP_STATUS" != "2"* ]]; then PUBLISH_ERROR="true" fi + # extract magma debian package version + match="magma_[0-9]+\.[0-9]+\.[0-9]+-[0-9]+-[a-z0-9]+_[a-z0-9]+.deb" + if [[ $i =~ $match ]]; then + magma_package=${i#magma_} + magma_package=${magma_package%_[a-z0-9]*.deb} + magma_package='magma='${magma_package} + echo "::set-output name=magma_package::${magma_package}" + fi done # set output if [[ "$PUBLISH_ERROR" != "true" ]]; then @@ -233,13 +242,13 @@ jobs: MAGMA_VERSION: ${{ needs.agw-build.outputs.magma_version }} runs-on: ubuntu-latest steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 with: fetch-depth: 0 - - uses: actions/download-artifact@f023be2c48cc18debc3bacd34cb396e0295e2869 # pin@v2 + - uses: actions/download-artifact@9782bd6a9848b53b110e712e20e42d89988822b7 # pin@v3.0.1 with: name: sentry-exec - - uses: geekyeggo/delete-artifact@b73cb986740e466292a536d0e32e2666c56fdeb3 # pin@v1 + - uses: geekyeggo/delete-artifact@54ab544f12cdb7b71613a16a2b5a37a9ade990af # pin@v2.0.0 with: name: sentry-exec - run: ls -R @@ -291,10 +300,10 @@ jobs: env: MAGMA_ROOT: "${{ github.workspace }}" steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Run apt-get update run: sudo apt-get update - - uses: actions/setup-python@7f80679172b057fc5e90d70d197929d454754a5a # pin@v2 + - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # pin@v4.3.0 with: python-version: '3.8.10' - name: Run build.py @@ -372,7 +381,7 @@ jobs: MAGMA_ROOT: "${{ github.workspace }}" DOCKER_BUILDKIT: 1 steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Run agw docker compose id: agw-docker-compose continue-on-error: true @@ -467,7 +476,7 @@ jobs: runs-on: ubuntu-latest if: github.event_name == 'push' && github.repository_owner == 'magma' steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Install SwaggerHub CLI run: npm install --global swaggerhub-cli - name: Publish SwaggerHub API @@ -516,7 +525,7 @@ jobs: env: MAGMA_ROOT: "${{ github.workspace }}" steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Run docker compose id: cwag-docker-compose continue-on-error: true @@ -630,7 +639,7 @@ jobs: env: MAGMA_ROOT: "${{ github.workspace }}" steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Run docker compose build env: DOCKER_REGISTRY: cwf_ @@ -719,8 +728,8 @@ jobs: env: MAGMA_ROOT: "${{ github.workspace }}" steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 - - uses: actions/setup-python@7f80679172b057fc5e90d70d197929d454754a5a # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 + - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # pin@v4.3.0 with: python-version: '3.8.10' - name: generate test certs and snowflake @@ -821,7 +830,7 @@ jobs: MAGMA_ROOT: "${{ github.workspace }}" NMS_ROOT: "${{ github.workspace }}/nms" steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Run docker compose id: nms-docker-compose # yamllint disable rule:line-length @@ -906,8 +915,8 @@ jobs: nms-build ] steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 - - uses: actions/setup-python@7f80679172b057fc5e90d70d197929d454754a5a # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 + - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # pin@v4.3.0 with: python-version: '3.8.10' - name: Publish to @@ -936,7 +945,7 @@ jobs: env: MAGMA_ROOT: "${{ github.workspace }}" steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Prepare tools working-directory: "${{ github.workspace }}/dp" run: | @@ -991,3 +1000,15 @@ jobs: SLACK_ICON_EMOJI: ":heavy_check_mark:" SLACK_COLOR: "#00FF00" SLACK_FOOTER: ' ' + trigger-debian-integ-test: + if: always() && github.event_name == 'push' && github.repository_owner == 'magma' && github.ref == 'refs/heads/master' + runs-on: ubuntu-latest + needs: agw-build + steps: + - name: Trigger debian integ test workflow + uses: peter-evans/repository-dispatch@v2 + with: + token: ${{ secrets.GITHUB_TOKEN }} + repository: magma/magma + event-type: build-all-artifact + client-payload: '{ "artifact": "${{ needs.agw-build.outputs.magma_package }}" }' diff --git a/.github/workflows/build_magma_dep.yml b/.github/workflows/build_magma_dep.yml index bcf02d7c9af7..a78f0d947453 100644 --- a/.github/workflows/build_magma_dep.yml +++ b/.github/workflows/build_magma_dep.yml @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: "Build Nettle and Upload to Artifactory" +name: Magma Build & Publish Nettle on: workflow_dispatch: inputs: @@ -36,7 +36,7 @@ jobs: runs-on: ubuntu-20.04 if: contains('["maxhbr", "nstng", "Neudrino", "tmdzk", "alexzurbonsen", "MoritzThomasHuebner"]', github.actor) steps: - - uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # pin@v3 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Install fpm shell: bash run: | diff --git a/.github/workflows/check-rebase.yml b/.github/workflows/check-rebase.yml index 99928ba5ff97..982775b761db 100644 --- a/.github/workflows/check-rebase.yml +++ b/.github/workflows/check-rebase.yml @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: "Check Rebase" +name: PR Check Rebase on: pull_request: @@ -27,12 +27,12 @@ jobs: BASE_SHA: "${{ github.event.pull_request.base.sha }}" steps: - name: Checkout Head - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 with: repository: "${{env.HEAD_FULL_NAME}}" fetch-depth: 0 - name: Checkout Base - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 with: repository: "${{env.BASE_FULL_NAME}}" fetch-depth: 0 diff --git a/.github/workflows/cloud-workflow.yml b/.github/workflows/cloud-workflow.yml index aa529ad7e790..def96a7ef919 100644 --- a/.github/workflows/cloud-workflow.yml +++ b/.github/workflows/cloud-workflow.yml @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: cloud-workflow +name: Orc8r Lint & Test on: push: @@ -33,14 +33,21 @@ jobs: should_not_skip: ${{ steps.changes.outputs.filesChanged }} steps: # Need to get git on push event - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 if: github.event_name == 'push' - - uses: dorny/paths-filter@b2feaf19c27470162a626bd6fa8438ae5b263721 # pin@v2 + - uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50 # pin@v2.11.1 id: changes with: filters: | filesChanged: - - [".github/workflows/cloud-workflow.yml", "lte/protos/**", "cwf/cloud/**", "feg/cloud/**", "lte/cloud/**", "orc8r/**"] + - ".github/workflows/cloud-workflow.yml" + - "lte/protos/**" + - "cwf/cloud/**" + - "feg/cloud/**" + - "lte/cloud/**" + - "orc8r/**" + - "dp/cloud/**" + - "dp/protos/**" - name: Save should_not_skip output if: always() run: | @@ -62,8 +69,8 @@ jobs: MAGMA_ROOT: "${{ github.workspace }}" GO111MODULE: 'on' steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 - - uses: actions/setup-python@7f80679172b057fc5e90d70d197929d454754a5a # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 + - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # pin@v4.3.0 with: python-version: '3.8.10' - name: deploy-sync-checkin @@ -84,7 +91,7 @@ jobs: run: | cd ${MAGMA_ROOT}/orc8r/cloud/docker python3 build.py --coverage - - uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # pin@v2 + - uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 # pin@v3.1.1 if: always() && steps.cloud-lint-cov.outcome=='success' id: cloud-lint-codecov with: @@ -104,7 +111,7 @@ jobs: with: name: Unit Test Results path: "${{ env.MAGMA_ROOT}}/orc8r/cloud/test-results/*" - - uses: actions/setup-go@b22fbbc2921299758641fab08929b4ac52b32923 # pin@v3 + - uses: actions/setup-go@c4a742cab115ed795e34d4513e2cf7d472deb55f # pin@v3.3.1 if: always() id: gateway_test_init with: diff --git a/.github/workflows/codeowners-syntax.yml b/.github/workflows/codeowners-syntax.yml index 20f4615d8afa..2c84cb8b1876 100644 --- a/.github/workflows/codeowners-syntax.yml +++ b/.github/workflows/codeowners-syntax.yml @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: "Codeowners Validator" +name: PR Check Codeowners on: pull_request: @@ -29,7 +29,7 @@ jobs: runs-on: ubuntu-latest steps: # Checks-out your repository, which is validated in the next step - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: GitHub CODEOWNERS Validator uses: mszostok/codeowners-validator@2f6e3bb39aa6837d7dcf8eff2de5d6c046d0c9a9 # pin@v0.6.0 with: diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 93deff70d473..f370561eaed5 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -22,7 +22,7 @@ # the `language` matrix defined below to confirm you have the correct set of # supported CodeQL languages. -name: "CodeQL" +name: Magma Analyze With CodeQL on: push: @@ -53,11 +53,11 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@a6611b86918424d4588efe7d6dbe18fe52d42518 # pin@v1 + uses: github/codeql-action/init@ec3cf9c605b848da5f1e41e8452719eb1ccfb9a6 # pin@v2.1.29 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -68,7 +68,7 @@ jobs: # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild - uses: github/codeql-action/autobuild@a6611b86918424d4588efe7d6dbe18fe52d42518 # pin@v1 + uses: github/codeql-action/autobuild@ec3cf9c605b848da5f1e41e8452719eb1ccfb9a6 # pin@v2.1.29 # â„¹ï¸ Command-line programs to run using the OS shell. # 📚 https://git.io/JvXDl @@ -79,7 +79,7 @@ jobs: # make bootstrap # make release - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@a6611b86918424d4588efe7d6dbe18fe52d42518 # pin@v1 + uses: github/codeql-action/analyze@ec3cf9c605b848da5f1e41e8452719eb1ccfb9a6 # pin@v2.1.29 env: NODE_OPTIONS: --max-old-space-size=5120 diff --git a/.github/workflows/comment-pr-on-check-failure.yml b/.github/workflows/comment-pr-on-check-failure.yml index 0ce2ce08bf0e..3847e3d571fc 100644 --- a/.github/workflows/comment-pr-on-check-failure.yml +++ b/.github/workflows/comment-pr-on-check-failure.yml @@ -9,13 +9,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: Update PR on check failure +name: PR Generate Comment On Workflow Failure on: workflow_run: workflows: - - DCO check - - Python Format Check - - Markdown lint check + - PR Check DCO + - AGW Build & Format Python + - Docs Lint & Check Generated Files In Sync types: - completed @@ -101,9 +101,9 @@ jobs: fs.writeFileSync('${{github.workspace}}/pr.zip', Buffer.from(download.data)); - run: unzip pr.zip - name: DCO comment message - if: ${{ github.event.workflow.name == 'DCO check' }} + if: ${{ github.event.workflow.name == 'PR Check DCO' }} run: | - echo "Oops! Looks like you failed the \`DCO check\`. Be sure to sign all your commits. + echo "Oops! Looks like you failed the \`PR Check DCO\`. Be sure to sign all your commits. ### Howto - [Magma guidelines on signing commits](https://magma.github.io/magma/docs/next/contributing/contribute_workflow#guidelines) - [About the \`signoff\` feature](https://stackoverflow.com/questions/1962094/what-is-the-sign-off-feature-in-git-for) @@ -111,9 +111,9 @@ jobs: - [Howto: sign-off multiple past commits](https://gist.github.com/kwk/d70f20d17b18c4f3296d) - $CHECK_GUIDELINE" >> $GITHUB_WORKSPACE/msg - name: Python format comment message - if: ${{ github.event.workflow.name == 'Python Format Check' }} + if: ${{ github.event.workflow.name == 'AGW Build & Format Python' }} run: | - echo "Oops! Looks like you failed the \`Python Format Check\`. + echo "Oops! Looks like you failed the \`AGW Build & Format Python\`. ### Howto - Instructions on running the formatter and linter locally are provided in the [format AGW doc](https://docs.magmacore.org/docs/next/lte/dev_unit_testing#format-agw) - $CHECK_GUIDELINE" >> $GITHUB_WORKSPACE/msg @@ -126,11 +126,11 @@ jobs: - For PRs with only one commit, the commit message must also be semantic. See [Changing a commit message](https://docs.github.com/en/github/committing-changes-to-your-project/creating-and-editing-commits/changing-a-commit-message) for a howto - $CHECK_GUIDELINE" >> $GITHUB_WORKSPACE/msg - name: Markdown lint comment message - if: ${{ github.event.workflow.name == 'Markdown lint check' }} + if: ${{ github.event.workflow.name == 'Docs Lint & Check Generated Files In Sync' }} run: | - echo "Oops! Looks like you failed the \`Markdown lint check\`. + echo "Oops! Looks like you failed the \`Docs Lint & Check Generated Files In Sync\`. ### Howto - - [Instructions on formatting your Markdown changes](https://magma.github.io/magma/docs/next/docs/docs_overview#precommit) + - [Instructions on formatting your Markdown changes](https://github.com/magma/magma/wiki/Contributing-Documentation#precommit) - $CHECK_GUIDELINE" >> $GITHUB_WORKSPACE/msg - name: Comment on PR uses: actions/github-script@47f7cf65b5ced0830a325f705cad64f2f58dddf7 # pin@v3.1.0 diff --git a/.github/workflows/composite/docker-builder/action.yml b/.github/workflows/composite/docker-builder/action.yml index 90863bdb01ef..47ec51cda532 100644 --- a/.github/workflows/composite/docker-builder/action.yml +++ b/.github/workflows/composite/docker-builder/action.yml @@ -33,7 +33,7 @@ runs: using: composite steps: - name: Check Out Repo - uses: actions/checkout@v2 + uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Set up Docker meta id: meta uses: docker/metadata-action@v3 @@ -47,7 +47,7 @@ runs: id: buildx uses: docker/setup-buildx-action@v1 - name: Login to GHCR - uses: docker/login-action@v1 + uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # pin v2.1.0 with: registry: ${{ inputs.REGISTRY }} username: ${{ github.repository_owner }} @@ -55,7 +55,7 @@ runs: if: ${{ github.event_name == 'push' && github.ref_name == 'master' && github.token != null }} - name: Build and push Docker image id: docker_build - uses: docker/build-push-action@v2 + uses: docker/build-push-action@c56af957549030174b10d6867f20e78cfd7debc5 # pin@v3.2.0 with: context: . file: ${{ inputs.DOCKERFILE }} diff --git a/.github/workflows/composite/dp-integ-tests/action.yml b/.github/workflows/composite/dp-integ-tests/action.yml index 69941ab34ec5..c3bb9d03e911 100644 --- a/.github/workflows/composite/dp-integ-tests/action.yml +++ b/.github/workflows/composite/dp-integ-tests/action.yml @@ -24,7 +24,7 @@ inputs: runs: using: composite steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Set env shell: bash diff --git a/.github/workflows/cwag-workflow.yml b/.github/workflows/cwag-workflow.yml index dfdc7067b69d..06edb1c569cc 100644 --- a/.github/workflows/cwag-workflow.yml +++ b/.github/workflows/cwag-workflow.yml @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: cwag-workflow +name: CWAG Format & Test on: push: @@ -34,9 +34,9 @@ jobs: should_not_skip: ${{ steps.changes.outputs.filesChanged }} steps: # Need to get git on push event - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 if: github.event_name == 'push' - - uses: dorny/paths-filter@b2feaf19c27470162a626bd6fa8438ae5b263721 # pin@v2 + - uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50 # pin@v2.11.1 id: changes with: filters: | @@ -51,14 +51,14 @@ jobs: GO111MODULE: on MAGMA_ROOT: "${{ github.workspace }}" steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 - - uses: actions/setup-go@b22fbbc2921299758641fab08929b4ac52b32923 # pin@v3 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 + - uses: actions/setup-go@c4a742cab115ed795e34d4513e2cf7d472deb55f # pin@v3.3.1 with: go-version: '1.18.3' - name: Run golang_before_install.sh script run: ./.github/workflows/scripts/golang_before_install.sh - name: Run go mod download with retry - uses: nick-invision/retry@e88a9994b039653512d697de1bce46b00bfe11b5 # pin@v2 + uses: nick-fields/retry@48bc5d4b1ce856c44a7766114e4da81c980a8a92 # pin@v2.8.2 with: command: cd ${MAGMA_ROOT}/cwf/gateway && go mod download timeout_minutes: 10 diff --git a/.github/workflows/cwf-integ-test.yml b/.github/workflows/cwf-integ-test.yml index 00c9cf038e72..6c7fb6e35eae 100644 --- a/.github/workflows/cwf-integ-test.yml +++ b/.github/workflows/cwf-integ-test.yml @@ -9,29 +9,21 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: CWF integ test +name: CWAG Build & Test Integration on: workflow_dispatch: null - workflow_run: - workflows: - - build-all + push: branches: - master - 'v1.*' - types: - - completed -env: - SHA: ${{ github.event.workflow_run.head_commit.id || github.sha }} jobs: docker-build: if: github.repository_owner == 'magma' || github.event_name == 'workflow_dispatch' runs-on: ubuntu-latest steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 - with: - ref: ${{ env.SHA }} + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Run docker compose run: | cd cwf/gateway/docker @@ -58,24 +50,24 @@ jobs: str="$(jq '.head_commit.message' $GITHUB_EVENT_PATH)" # get the head_commit message echo ::set-output name=title::${str%%\\n*} | tr -d '"' - name: Notify failure to slack - if: failure() && github.event.workflow_run.event == 'push' + if: failure() && github.event_name == 'push' env: SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} SLACK_USERNAME: "CWF integ test" SLACK_AVATAR: ":boom:" uses: Ilshidur/action-slack@689ad44a9c9092315abd286d0e3a9a74d31ab78a # pin@2.1.0 with: - args: 'CWF integration test: docker build step failed on [${{ env.SHA }}](${{github.event.repository.owner.html_url}}/magma/commits/${{ env.SHA }}): ${{ steps.commit.outputs.title}}' + args: 'CWF integration test: docker build step failed on [${{ github.sha }}](${{github.event.repository.owner.html_url}}/magma/commits/${{ github.sha }}): ${{ steps.commit.outputs.title}}' cwf-integ-test: if: github.repository_owner == 'magma' || github.event_name == 'workflow_dispatch' runs-on: macos-12 needs: docker-build steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 with: - ref: ${{ env.SHA }} + ref: ${{ github.sha }} - name: Cache Vagrant Boxes - uses: actions/cache@0865c47f36e68161719c5b124609996bb5c40129 # pin@v3 + uses: actions/cache@9b0c1fce7a93df8e3bb8926b0d6e9d89e92f20a7 # pin@v3.0.11 with: path: ~/.vagrant.d/boxes key: vagrant-boxes-cwf-v20220722 @@ -88,7 +80,7 @@ jobs: else echo "Vagrant cloud token is not configured. Skipping login." fi - - uses: actions/setup-python@7f80679172b057fc5e90d70d197929d454754a5a # pin@v2 + - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # pin@v4.3.0 with: python-version: '3.8.10' - name: Install pre requisites @@ -96,10 +88,10 @@ jobs: pip3 install --upgrade pip pip3 install ansible fabric3 jsonpickle requests PyYAML firebase_admin vagrant plugin install vagrant-vbguest vagrant-reload vagrant-disksize - - uses: actions/download-artifact@f023be2c48cc18debc3bacd34cb396e0295e2869 # pin@v2 + - uses: actions/download-artifact@9782bd6a9848b53b110e712e20e42d89988822b7 # pin@v3.0.1 with: name: docker-images - - uses: geekyeggo/delete-artifact@b73cb986740e466292a536d0e32e2666c56fdeb3 # pin@v1 + - uses: geekyeggo/delete-artifact@54ab544f12cdb7b71613a16a2b5a37a9ade990af # pin@v2.0.0 with: name: docker-images - name: Copy docker images into /tmp/cwf-images @@ -128,10 +120,11 @@ jobs: path: cwf/gateway/tests.xml - name: Publish Unit Test Results if: always() - uses: EnricoMi/publish-unit-test-result-action/composite@7377632048da85434c30810c38353542d3162dc4 # pin@v1 + uses: EnricoMi/publish-unit-test-result-action/composite@46ab8d49369d898e381a607119161771bc65c2a6 # pin@v2.2.0 with: + check_name: CWF integration test results + junit_files: cwf/gateway/tests.xml check_run_annotations: all tests - files: cwf/gateway/tests.xml - name: Fetch logs if: always() run: | @@ -146,22 +139,22 @@ jobs: name: test-logs path: cwf/gateway/logs - name: Publish results to Firebase - if: always() && github.event.workflow_run.event == 'push' + if: always() && github.event_name == 'push' env: FIREBASE_SERVICE_CONFIG: ${{ secrets.FIREBASE_SERVICE_CONFIG }} - REPORT_FILENAME: "cwf_integ_test_${{ env.SHA }}.html" + REPORT_FILENAME: "cwf_integ_test_${{ github.sha }}.html" run: | npm install -g xunit-viewer [ -f "cwf/gateway/tests.xml" ] && { xunit-viewer -r cwf/gateway/tests.xml -o "$REPORT_FILENAME"; } [ -f "$REPORT_FILENAME" ] && { python ci-scripts/firebase_upload_file.py -f "$REPORT_FILENAME" -o out_url.txt; } [ -f "out_url.txt" ] && { URL=$(cat out_url.txt); } - python ci-scripts/firebase_publish_report.py -id ${{ env.SHA }} --verdict ${{ job.status }} --run_id ${{ github.run_id }} cwf --url $URL + python ci-scripts/firebase_publish_report.py -id ${{ github.sha }} --verdict ${{ job.status }} --run_id ${{ github.run_id }} cwf --url $URL - name: Notify failure to slack - if: failure() && github.event.workflow_run.event == 'push' + if: failure() && github.event_name == 'push' env: SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} SLACK_USERNAME: "CWF integ test" SLACK_AVATAR: ":boom:" uses: Ilshidur/action-slack@689ad44a9c9092315abd286d0e3a9a74d31ab78a # pin@2.1.0 with: - args: 'CWF integration test: tests failed on [${{ env.SHA }}](${{github.event.repository.owner.html_url}}/magma/commits/${{ env.SHA }}): ${{ steps.commit.outputs.title}}' + args: 'CWF integration test: tests failed on [${{ github.sha }}](${{github.event.repository.owner.html_url}}/magma/commits/${{ github.sha }}): ${{ steps.commit.outputs.title}}' diff --git a/.github/workflows/cwf-operator.yml b/.github/workflows/cwf-operator.yml index b6cd0679ea3b..309068a5bb3c 100644 --- a/.github/workflows/cwf-operator.yml +++ b/.github/workflows/cwf-operator.yml @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: cwf-operator +name: CWAG Lint & Test Operator on: push: @@ -33,9 +33,9 @@ jobs: should_not_skip: ${{ steps.changes.outputs.filesChanged }} steps: # Need to get git on push event - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 if: github.event_name == 'push' - - uses: dorny/paths-filter@b2feaf19c27470162a626bd6fa8438ae5b263721 # pin@v2 + - uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50 # pin@v2.11.1 id: changes with: filters: | @@ -50,14 +50,14 @@ jobs: GO111MODULE: on MAGMA_ROOT: "${{ github.workspace }}" steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 - - uses: actions/setup-go@b22fbbc2921299758641fab08929b4ac52b32923 # pin@v3 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 + - uses: actions/setup-go@c4a742cab115ed795e34d4513e2cf7d472deb55f # pin@v3.3.1 with: go-version: '1.18.3' - name: Run golang_before_install.sh script run: ./.github/workflows/scripts/golang_before_install.sh - name: Run go mod download with retry - uses: nick-invision/retry@e88a9994b039653512d697de1bce46b00bfe11b5 # pin@v2 + uses: nick-fields/retry@48bc5d4b1ce856c44a7766114e4da81c980a8a92 # pin@v2.8.2 with: command: cd ${MAGMA_ROOT}/cwf/k8s/cwf_operator && go mod download timeout_minutes: 10 diff --git a/.github/workflows/dco-check.yml b/.github/workflows/dco-check.yml index 27269138d9ef..1467a3a3fef8 100644 --- a/.github/workflows/dco-check.yml +++ b/.github/workflows/dco-check.yml @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: DCO check +name: PR Check DCO on: pull_request: types: [ opened, reopened, synchronize ] diff --git a/.github/workflows/deploy-build-from-pr.yml b/.github/workflows/deploy-build-from-pr.yml index 0300ddce757f..0b932bd38493 100644 --- a/.github/workflows/deploy-build-from-pr.yml +++ b/.github/workflows/deploy-build-from-pr.yml @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: Deploy PR build +name: Magma Publish Artifacts on: workflow_run: workflows: @@ -26,10 +26,9 @@ jobs: WORKFLOW_NAME: "${{ github.event.workflow.name }}" WORKFLOW_STATUS: "${{ github.event.workflow_run.conclusion }}" steps: - - uses: hmarr/debug-action@1201a20fc9d278ddddd5f0f46922d06513892491 # pin@v2 # Could be improved, only need the tag push docker and helm rotation script here - name: checkout code - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 # Retrieve Generated artifacts and delete them to keep cache usage low - name: Download builds uses: actions/github-script@47f7cf65b5ced0830a325f705cad64f2f58dddf7 # pin@v3.1.0 diff --git a/.github/workflows/docker-builder-devcontainer.yml b/.github/workflows/docker-builder-devcontainer.yml index c921bec851b9..30ee6f1fe834 100644 --- a/.github/workflows/docker-builder-devcontainer.yml +++ b/.github/workflows/docker-builder-devcontainer.yml @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: "Build Docker image for Bazel Base and DevContainer" +name: Magma Build Docker Image Bazel Base & DevContainer on: push: branches: @@ -44,7 +44,7 @@ jobs: build_dockerfile_bazel_base: runs-on: ubuntu-latest steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - uses: ./.github/workflows/composite/docker-builder with: REGISTRY: ${{ env.REGISTRY }} @@ -56,7 +56,7 @@ jobs: needs: build_dockerfile_bazel_base runs-on: ubuntu-latest steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - uses: ./.github/workflows/composite/docker-builder with: REGISTRY: ${{ env.REGISTRY }} diff --git a/.github/workflows/docker-builder-python-precommit.yml b/.github/workflows/docker-builder-python-precommit.yml index 2471dff3ece6..44cee9a29156 100644 --- a/.github/workflows/docker-builder-python-precommit.yml +++ b/.github/workflows/docker-builder-python-precommit.yml @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: "Build Docker image for Python Precommit" +name: AGW Build Docker Image Python Precommit on: push: branches: @@ -40,7 +40,7 @@ jobs: build_dockerfile: runs-on: ubuntu-latest steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - uses: ./.github/workflows/composite/docker-builder with: REGISTRY: ${{ env.REGISTRY }} diff --git a/.github/workflows/docker-promote.yml b/.github/workflows/docker-promote.yml new file mode 100644 index 000000000000..d91dfc186693 --- /dev/null +++ b/.github/workflows/docker-promote.yml @@ -0,0 +1,53 @@ +# Copyright 2022 The Magma Authors. +# +# This source code is licensed under the BSD-style license found in the +# LICENSE file in the root directory of this source tree. +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: Magma Promote Docker Images + +on: + workflow_dispatch: + inputs: + branch_tag: + description: 'Branch version number' + required: true + release_tag: + description: 'Release version number' + required: true + +jobs: + docker-promote: + runs-on: ubuntu-latest + env: + BRANCH_TAG: ${{ inputs.branch_tag }} + RELEASE_TAG: ${{ inputs.release_tag }} + MAGMA_ARTIFACTORY: artifactory.magmacore.org + steps: + - uses: tspascoal/get-user-teams-membership@39b5264024b7c3bd7480de2f2c8d3076eed49ec5 # pin@v1.0.4 + name: Check if user has rights to promote + id: checkUserMember + with: + username: ${{ github.actor }} + team: 'approvers-ci' + GITHUB_TOKEN: ${{ secrets.github_token }} + - if: ${{ steps.checkUserMember.outputs.isTeamMember == 'false' }} + run: | + echo "User is not a member of the team" + exit 1 + - uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # pin v2.1.0 + name: Login to Artifactory + with: + registry: docker.${{ env.MAGMA_ARTIFACTORY }} + username: ${{ secrets.ARTIFACTORY_USERNAME }} + password: ${{ secrets.ARTIFACTORY_PASSWORD }} + - run: | + wget https://github.com/magma/magma/raw/master/orc8r/tools/docker/promote.sh + chmod 755 promote.sh + # Promote Docker images + ./promote.sh diff --git a/.github/workflows/docs-workflow.yml b/.github/workflows/docs-workflow.yml index 382122d2634d..2d2e8f78f1a7 100644 --- a/.github/workflows/docs-workflow.yml +++ b/.github/workflows/docs-workflow.yml @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: Markdown lint check +name: Docs Lint & Check Generated Files In Sync on: push: @@ -31,9 +31,9 @@ jobs: should_not_skip: ${{ steps.changes.outputs.filesChanged }} steps: # Need to get git on push event - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 if: github.event_name == 'push' - - uses: dorny/paths-filter@b2feaf19c27470162a626bd6fa8438ae5b263721 # pin@v2 + - uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50 # pin@v2.11.1 id: changes with: filters: | @@ -62,8 +62,8 @@ jobs: env: MAGMA_ROOT: "${{ github.workspace }}" steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 - - uses: actions/setup-python@7f80679172b057fc5e90d70d197929d454754a5a # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 + - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # pin@v4.3.0 with: python-version: '3.8.10' - name: Run docs precommit @@ -79,8 +79,8 @@ jobs: env: MAGMA_ROOT: "${{ github.workspace }}" steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 - - uses: actions/setup-python@7f80679172b057fc5e90d70d197929d454754a5a # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 + - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # pin@v4.3.0 with: python-version: '3.8.10' - name: Run docs `make` diff --git a/.github/workflows/docusaurus-workflow.yml b/.github/workflows/docusaurus-workflow.yml index b9785c435050..745fdcba5677 100644 --- a/.github/workflows/docusaurus-workflow.yml +++ b/.github/workflows/docusaurus-workflow.yml @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: docusaurus-workflow +name: Docs Build & Deploy on: push: @@ -19,7 +19,7 @@ jobs: docusaurus-build-and-deploy: runs-on: ubuntu-latest steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Export vars run: | echo "DOCUSAURUS_URL=https://magma.github.io" >> $GITHUB_ENV diff --git a/.github/workflows/dp-workflow.yml b/.github/workflows/dp-workflow.yml index 724751683961..5efb4f767685 100644 --- a/.github/workflows/dp-workflow.yml +++ b/.github/workflows/dp-workflow.yml @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: dp-workflow +name: DP Lint & Test on: push: @@ -31,15 +31,14 @@ jobs: runs-on: ubuntu-latest outputs: cc: ${{ steps.filter.outputs.cc }} - am: ${{ steps.filter.outputs.am }} rc: ${{ steps.filter.outputs.rc }} db: ${{ steps.filter.outputs.db }} helm: ${{ steps.filter.outputs.helm }} integration_tests_orc8r: ${{ steps.filter.outputs.integration_tests_orc8r }} steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 if: github.event_name == 'push' - - uses: dorny/paths-filter@b2feaf19c27470162a626bd6fa8438ae5b263721 # pin@v2 + - uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50 # pin@v2.11.1 id: filter with: filters: | @@ -52,14 +51,6 @@ jobs: - 'dp/protos/**' - 'dp/cloud/go/protos/**' - 'dp/cloud/go/active_mode_controller/protos' - am: - - '.github/workflows/dp-workflow.yml' - - 'dp/cloud/go/active_mode_controller/**' - - 'dp/cloud/docker/go/active_mode_controller/**' - - 'dp/cloud/python/magma/db_service/**' - - 'dp/protos/**' - - 'dp/cloud/go/protos/**' - - 'dp/cloud/go/active_mode_controller/protos' rc: - '.github/workflows/dp-workflow.yml' - 'dp/cloud/python/magma/radio_controller/**' @@ -107,10 +98,8 @@ jobs: run: working-directory: dp/cloud/python/magma/configuration_controller steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 - - - name: Set up Python ${{ env.PYTHON_VERSION }} - uses: actions/setup-python@7f80679172b057fc5e90d70d197929d454754a5a # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 + - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # pin@v4.3.0 with: python-version: ${{ env.PYTHON_VERSION }} @@ -135,51 +124,12 @@ jobs: coverage report - name: Upload coverage to Codecov - uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # pin@v2 + uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 # pin@v3.1.1 with: flags: unittests,configuration-controller name: codecov-configuration-controller verbose: true - active_mode_controller_unit_tests: - needs: path_filter - if: ${{ needs.path_filter.outputs.am == 'true' }} - name: "Active mode controller unit tests" - runs-on: ubuntu-latest - - defaults: - run: - working-directory: dp/cloud/go/active_mode_controller - env: - GO_VERSION: 1.18.3 - - steps: - - name: Checkout code - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 - - - name: Set up Go ${{ env.GO_VERSION }} - uses: actions/setup-go@b22fbbc2921299758641fab08929b4ac52b32923 # pin@v3 - with: - go-version: ${{ env.GO_VERSION }} - - - name: Run Go linter - uses: golangci/golangci-lint-action@5c56cd6c9dc07901af25baab6f2b0d9f3b7c3018 # pin@v2 - with: - version: v1.46.2 - working-directory: dp/cloud/go/active_mode_controller - skip-go-installation: true - - - name: Run Go tests - run: | - go test ./... -v -race -coverprofile=coverage.txt -covermode=atomic - - - name: Upload coverage to Codecov - uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # pin@v2 - with: - flags: unittests,active-mode-controller - name: codecov-active-mode-controller - verbose: true - radio_controller_unit_tests: needs: path_filter if: ${{ needs.path_filter.outputs.rc == 'true' }} @@ -197,10 +147,8 @@ jobs: working-directory: dp/cloud/python/magma/radio_controller steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 - - - name: Set up Python ${{ env.PYTHON_VERSION }} - uses: actions/setup-python@7f80679172b057fc5e90d70d197929d454754a5a # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 + - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # pin@v4.3.0 with: python-version: ${{ env.PYTHON_VERSION }} @@ -252,7 +200,7 @@ jobs: coverage report - name: Upload coverage to Codecov - uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # pin@v2 + uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 # pin@v3.1.1 with: flags: unittests,radio-controller name: codecov-radio-controller @@ -281,10 +229,8 @@ jobs: POSTGRES_PASSWORD: postgres steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 - - - name: Set up Python ${{ env.PYTHON_VERSION }} - uses: actions/setup-python@7f80679172b057fc5e90d70d197929d454754a5a # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 + - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # pin@v4.3.0 with: python-version: ${{ env.PYTHON_VERSION }} @@ -319,10 +265,8 @@ jobs: working-directory: dp/cloud/python/magma/db_service steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 - - - name: Set up Python ${{ env.PYTHON_VERSION }} - uses: actions/setup-python@7f80679172b057fc5e90d70d197929d454754a5a # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 + - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # pin@v4.3.0 with: python-version: ${{ env.PYTHON_VERSION }} @@ -347,7 +291,7 @@ jobs: coverage report - name: Upload coverage to Codecov - uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # pin@v2 + uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 # pin@v3.1.1 with: flags: unittests,db-service name: codecov-db-service @@ -360,8 +304,7 @@ jobs: if: ${{ needs.path_filter.outputs.integration_tests_orc8r == 'true' }} continue-on-error: false steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 - + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Run DP integration test (with orc8r) uses: ./.github/workflows/composite/dp-integ-tests with: @@ -376,8 +319,7 @@ jobs: run: working-directory: dp steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 - + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Set env run: | echo "MINIKUBE_DP_MAX_MEMORY=$(grep MemTotal /proc/meminfo | awk '{printf "%dm",$2/1024 - 1}')" >> $GITHUB_ENV diff --git a/.github/workflows/federated-integ-test.yml b/.github/workflows/federated-integ-test.yml index 90e5b4867778..b3413ea2f058 100644 --- a/.github/workflows/federated-integ-test.yml +++ b/.github/workflows/federated-integ-test.yml @@ -9,18 +9,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: Federated integ test +name: Magma Build, Publish & Test Federated Integration on: # yamllint disable-line rule:truthy workflow_dispatch: null - workflow_run: - workflows: - - build-all + push: branches: - master - 'v1.*' - types: - - completed jobs: # Build images on ubuntu which is faster than MacOs. @@ -30,7 +26,7 @@ jobs: env: MAGMA_ROOT: "${{ github.workspace }}" steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Build Orc8r docker images run: | cd orc8r/cloud/docker @@ -45,7 +41,7 @@ jobs: docker save orc8r_controller:latest | gzip > fed_orc8r_controller.tar.gz docker save orc8r_fluentd:latest | gzip > fed_orc8r_fluentd.tar.gz docker save orc8r_test:latest | gzip > fed_orc8r_test.tar.gz - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # pin@v3 with: name: docker-build-orc8r-images path: images @@ -56,7 +52,7 @@ jobs: env: MAGMA_ROOT: "${{ github.workspace }}" steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: pre requisites to build feg run: | cd ${{ env.MAGMA_ROOT }} && mkdir -p .cache/test_certs/ && mkdir -p .cache/feg/ @@ -72,7 +68,7 @@ jobs: cd images docker save feg_gateway_go:latest | gzip > fed_feg_gateway_go.tar.gz docker save feg_gateway_python:latest | gzip > fed_feg_gateway_python.tar.gz - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # pin@v3 with: name: docker-build-feg-images path: images @@ -80,15 +76,13 @@ jobs: federated-integ-test: if: github.repository_owner == 'magma' || github.event_name == 'workflow_dispatch' runs-on: macos-12 + needs: [docker-build-orc8r, docker-build-feg] env: - SHA: ${{ github.event.workflow_run.head_commit.id || github.sha }} MAGMA_ROOT: "${{ github.workspace }}" AGW_ROOT: "${{ github.workspace }}/lte/gateway" steps: - - uses: actions/checkout@v2 - with: - ref: ${{ env.SHA }} - - uses: actions/setup-python@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 + - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # pin@v4.3.0 with: python-version: '3.8.5' - name: Install pre requisites @@ -108,17 +102,17 @@ jobs: sudo sh -c "echo '* 192.168.0.0/16' > /etc/vbox/networks.conf" sudo sh -c "echo '* 3001::/64' >> /etc/vbox/networks.conf" - name: Cache magma-dev-box - uses: actions/cache@0865c47f36e68161719c5b124609996bb5c40129 # pin@v3 + uses: actions/cache@9b0c1fce7a93df8e3bb8926b0d6e9d89e92f20a7 # pin@v3.0.11 with: path: ~/.vagrant.d/boxes/magmacore-VAGRANTSLASH-magma_dev - key: vagrant-box-magma-dev-v1.2.20220801 + key: vagrant-box-magma-dev-v1.2.20221012 - name: Cache magma-test-box - uses: actions/cache@0865c47f36e68161719c5b124609996bb5c40129 # pin@v3 + uses: actions/cache@9b0c1fce7a93df8e3bb8926b0d6e9d89e92f20a7 # pin@v3.0.11 with: path: ~/.vagrant.d/boxes/magmacore-VAGRANTSLASH-magma_test - key: vagrant-box-magma-test + key: vagrant-box-magma-test-v1.2.20221012 - name: Cache magma-trfserver-box - uses: actions/cache@0865c47f36e68161719c5b124609996bb5c40129 # pin@v3 + uses: actions/cache@9b0c1fce7a93df8e3bb8926b0d6e9d89e92f20a7 # pin@v3.0.11 with: path: ~/.vagrant.d/boxes/magmacore-VAGRANTSLASH-magma_trfserver key: vagrant-box-magma-trfserver-v20220722 @@ -142,15 +136,15 @@ jobs: export MAGMA_DEV_MEMORY_MB=9216 fab build_agw # Download to local and delete artifacts from remote - - uses: actions/download-artifact@v2 + - uses: actions/download-artifact@9782bd6a9848b53b110e712e20e42d89988822b7 # pin@v3.0.1 with: name: docker-build-orc8r-images path: ${{ env.AGW_ROOT }} - - uses: actions/download-artifact@v2 + - uses: actions/download-artifact@9782bd6a9848b53b110e712e20e42d89988822b7 # pin@v3.0.1 with: name: docker-build-feg-images path: ${{ env.AGW_ROOT }} - - uses: geekyeggo/delete-artifact@v1 + - uses: geekyeggo/delete-artifact@54ab544f12cdb7b71613a16a2b5a37a9ade990af # pin@v2.0.0 with: name: | docker-build-orc8r-images | @@ -166,7 +160,6 @@ jobs: vagrant ssh magma -c 'cat magma/lte/gateway/image.tar | docker load' rm image.tar done - mkdir -p /tmp/fed_integ_test-images - name: Run the federated integ test run: | cd lte/gateway @@ -180,45 +173,46 @@ jobs: fab get_test_summaries:dst_path="test-results" ls -R - name: Upload test results - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # pin@v3 if: always() with: name: test-results path: lte/gateway/test-results/**/*.xml - name: Get test logs - if: failure() + if: always() run: | cd lte/gateway fab get_test_logs:dst_path=./logs.tar.gz - name: Upload test logs - uses: actions/upload-artifact@v2 - if: failure() + uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # pin@v3 + if: always() with: name: test-logs path: lte/gateway/logs.tar.gz - name: Publish Unit Test Results if: always() - uses: EnricoMi/publish-unit-test-result-action/composite@7377632048da85434c30810c38353542d3162dc4 # pin@v1 + uses: EnricoMi/publish-unit-test-result-action/composite@46ab8d49369d898e381a607119161771bc65c2a6 # pin@v2.2.0 with: - files: lte/gateway/test-results/**/*.xml + check_name: FEG integration test results + junit_files: lte/gateway/test-results/**/*.xml check_run_annotations: all tests - name: Publish results to Firebase - if: always() && github.event.workflow_run.event == 'push' + if: always() && github.event_name == 'push' env: FIREBASE_SERVICE_CONFIG: ${{ secrets.FIREBASE_SERVICE_CONFIG }} - REPORT_FILENAME: "feg_integ_test_${{ env.SHA }}.html" + REPORT_FILENAME: "feg_integ_test_${{ github.sha }}.html" run: | npm install -g xunit-viewer [ -d "lte/gateway/test-results/" ] && { xunit-viewer -r lte/gateway/test-results/ -o "$REPORT_FILENAME"; } [ -f "$REPORT_FILENAME" ] && { python ci-scripts/firebase_upload_file.py -f "$REPORT_FILENAME" -o out_url.txt; } [ -f "out_url.txt" ] && { URL=$(cat out_url.txt); } - python ci-scripts/firebase_publish_report.py -id ${{ env.SHA }} --verdict ${{ job.status }} --run_id ${{ github.run_id }} feg --url $URL + python ci-scripts/firebase_publish_report.py -id ${{ github.sha }} --verdict ${{ job.status }} --run_id ${{ github.run_id }} feg --url $URL - name: Notify failure to slack - if: failure() && github.event.workflow_run.event == 'push' + if: failure() && github.event_name == 'push' env: SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} SLACK_USERNAME: "FEG integ test" SLACK_AVATAR: ":boom:" uses: Ilshidur/action-slack@2.1.0 with: - args: "Federated integration test test failed on [${{ env.SHA }}](${{github.event.repository.owner.html_url}}/magma/commit/${{ env.SHA }}): ${{ steps.commit.outputs.title}}" + args: "Federated integration test test failed on [${{ github.sha }}](${{github.event.repository.owner.html_url}}/magma/commit/${{ github.sha }}): ${{ steps.commit.outputs.title}}" diff --git a/.github/workflows/feg-workflow.yml b/.github/workflows/feg-workflow.yml index cc52be6408b3..5976eba9314a 100644 --- a/.github/workflows/feg-workflow.yml +++ b/.github/workflows/feg-workflow.yml @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: feg-workflow +name: FeG Lint & Test on: push: @@ -33,9 +33,9 @@ jobs: should_not_skip: ${{ steps.changes.outputs.filesChanged }} steps: # Need to get git on push event - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 if: github.event_name == 'push' - - uses: dorny/paths-filter@b2feaf19c27470162a626bd6fa8438ae5b263721 # pin@v2 + - uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50 # pin@v2.11.1 id: changes with: filters: | @@ -61,15 +61,15 @@ jobs: GO111MODULE: on MAGMA_ROOT: "${{ github.workspace }}" steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 - - uses: actions/setup-go@b22fbbc2921299758641fab08929b4ac52b32923 # pin@v3 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 + - uses: actions/setup-go@c4a742cab115ed795e34d4513e2cf7d472deb55f # pin@v3.3.1 with: go-version: '1.18.3' - run: go version - name: Run golang_before_install.sh script run: ./.github/workflows/scripts/golang_before_install.sh - name: Run go mod download with retry - uses: nick-invision/retry@e88a9994b039653512d697de1bce46b00bfe11b5 # pin@v2 + uses: nick-fields/retry@48bc5d4b1ce856c44a7766114e4da81c980a8a92 # pin@v2.8.2 if: always() id: feg-lint-init with: @@ -87,7 +87,7 @@ jobs: run: | cd ${MAGMA_ROOT}/feg/gateway make -C ${MAGMA_ROOT}/feg/gateway cover - - uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # pin@v2 + - uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 # pin@v3.1.1 if: always() id: feg-lint-codecov with: diff --git a/.github/workflows/fossa-workflow.yml b/.github/workflows/fossa-workflow.yml index 426bf773c6a9..82ba96a1b1e6 100644 --- a/.github/workflows/fossa-workflow.yml +++ b/.github/workflows/fossa-workflow.yml @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: fossa-workflow +name: Orc8r Analyze With Fossa on: push: @@ -31,7 +31,7 @@ jobs: MAGMA_ROOT: "${{ github.workspace }}" runs-on: ubuntu-latest steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Download fossa analyze script # yamllint disable rule:line-length run: | diff --git a/.github/workflows/gcc-problems.yml b/.github/workflows/gcc-problems.yml index 7ec82771d24a..9d812547bb61 100644 --- a/.github/workflows/gcc-problems.yml +++ b/.github/workflows/gcc-problems.yml @@ -19,15 +19,16 @@ # - Option: by improving our build system and enabling faster build-all-targets ####### -name: "GCC Warnings & Errors" +name: AGW Generate GCC Warnings & Errors on: push: branches: - master - 'v1.*' env: - BAZEL_BASE_IMAGE: "ghcr.io/magma/magma/bazel-base:sha-4a878d8" - CACHE_KEY: bazel-base-image + BAZEL_BASE_IMAGE: "ghcr.io/magma/magma/bazel-base:latest" + # see GH14041 + CACHE_KEY: bazel-base-image-sha-c4de1e5 REMOTE_DOWNLOAD_OPTIMIZATION: true concurrency: @@ -43,9 +44,9 @@ jobs: should_not_skip: ${{ steps.changes.outputs.filesChanged }} steps: # Need to get git on push event - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 if: github.event_name == 'push' - - uses: dorny/paths-filter@b2feaf19c27470162a626bd6fa8438ae5b263721 # pin@v2 + - uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50 # pin@v2.11.1 id: changes with: filters: | @@ -64,7 +65,7 @@ jobs: steps: - name: Check Out Repo # This is necessary for overlays into the Docker container below. - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Maximize build space uses: ./.github/workflows/composite/maximize-build-space - name: Setup Bazel Base Image @@ -135,7 +136,7 @@ jobs: if: failure() && github.event_name == 'push' uses: rtCamp/action-slack-notify@v2.2.0 env: - SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_BAZEL_CI }} + SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} SLACK_TITLE: "Build all Bazelified C/C++ targets" SLACK_USERNAME: "GCC Warnings & Errors" SLACK_ICON_EMOJI: ":boom:" diff --git a/.github/workflows/golang-build-test.yml b/.github/workflows/golang-build-test.yml index b190e09fa930..7b58ee63c77d 100644 --- a/.github/workflows/golang-build-test.yml +++ b/.github/workflows/golang-build-test.yml @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: Golang Build & Test +name: AGW Build & Test Experimental Go Code on: push: branches: @@ -34,7 +34,7 @@ jobs: env: MAGMA_ROOT: "${{ github.workspace }}" steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Run golang_check_version.sh script run: ./.github/workflows/scripts/golang_check_version.sh @@ -44,9 +44,9 @@ jobs: should_not_skip: ${{ steps.changes.outputs.filesChanged }} steps: # Need to get git on push event - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 if: github.event_name == 'push' - - uses: dorny/paths-filter@b2feaf19c27470162a626bd6fa8438ae5b263721 # pin@v2 + - uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50 # pin@v2.11.1 id: changes with: filters: | @@ -73,11 +73,11 @@ jobs: runs-on: ${{ matrix.os }} steps: - name: Install Go - uses: actions/setup-go@b22fbbc2921299758641fab08929b4ac52b32923 # pin@v3 + uses: actions/setup-go@c4a742cab115ed795e34d4513e2cf7d472deb55f # pin@v3.3.1 with: go-version: ${{ matrix.go-version }} - name: Checkout code - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Test run: | cd src/go/ @@ -110,7 +110,7 @@ jobs: runs-on: ${{ matrix.os }} steps: - name: Install Go - uses: actions/setup-go@b22fbbc2921299758641fab08929b4ac52b32923 # pin@v3 + uses: actions/setup-go@c4a742cab115ed795e34d4513e2cf7d472deb55f # pin@v3.3.1 with: go-version: ${{ matrix.go-version }} - name: Setup gotestsum @@ -118,7 +118,7 @@ jobs: with: gotestsum_version: 1.7.0 - name: Checkout code - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Test run: | cd src/go/ @@ -156,17 +156,17 @@ jobs: runs-on: ubuntu-latest steps: - name: Install Go - uses: actions/setup-go@b22fbbc2921299758641fab08929b4ac52b32923 # pin@v3 + uses: actions/setup-go@c4a742cab115ed795e34d4513e2cf7d472deb55f # pin@v3.3.1 with: go-version: ${{ matrix.go-version }} - name: Install QEMU - uses: docker/setup-qemu-action@27d0a4f181a40b142cce983c5393082c365d1480 # pin@v1 + uses: docker/setup-qemu-action@e81a89b1732b9c48d79cd809d8d81d79c4647a18 # pin@v2.1.0 - name: Setup gotestsum uses: autero1/action-gotestsum@2e48af62f5248bd3b014f598cd1aa69a01dd36e3 # pin@v1.0.0 with: gotestsum_version: 1.7.0 - name: Checkout code - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Run tests via qemu/binfmt run: | cd src/go/ @@ -206,17 +206,17 @@ jobs: runs-on: ubuntu-latest steps: - name: Install Go - uses: actions/setup-go@b22fbbc2921299758641fab08929b4ac52b32923 # pin@v3 + uses: actions/setup-go@c4a742cab115ed795e34d4513e2cf7d472deb55f # pin@v3.3.1 with: go-version: ${{ matrix.go-version }} - name: Checkout code - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Test run: | cd src/go/ go test -race -coverprofile=coverage.txt -covermode=atomic ./... - name: Codecov.io Upload - uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # pin@v2 + uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 # pin@v3.1.1 with: flags: src_go verbose: true diff --git a/.github/workflows/helm-chart-dependency-check.yml b/.github/workflows/helm-chart-dependency-check.yml index 7e56c5655ea4..56ef88b51eb1 100644 --- a/.github/workflows/helm-chart-dependency-check.yml +++ b/.github/workflows/helm-chart-dependency-check.yml @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: "Check dependencies of helm charts" +name: Magma Check Helm Dependencies on: push: @@ -36,7 +36,7 @@ jobs: runs-on: ubuntu-latest name: Check dependency of helm chart ${{ matrix.charts[0] }} steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Check Orc8r run: | echo "DIGEST=$(cat $MAGMA_ROOT/orc8r/cloud/helm/orc8r/Chart.lock | grep digest | cut -d ":" -f 2-3 | xargs)" >> $GITHUB_ENV diff --git a/.github/workflows/helm-deploy-on-demand.yml b/.github/workflows/helm-deploy-on-demand.yml index c8e6fd6a277c..593bb38d00a3 100644 --- a/.github/workflows/helm-deploy-on-demand.yml +++ b/.github/workflows/helm-deploy-on-demand.yml @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: helm-build-on-demand +name: Magma Build & Publish Helm Charts # Temporary on demand Job until we refactor helm build job in build-all on: @@ -26,7 +26,7 @@ jobs: ISSUE_NUMBER: "${{ github.event.number }}" runs-on: ubuntu-latest steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Launch build and publish script run: | orc8r/tools/helm/package.sh --deployment-type all diff --git a/.github/workflows/helm-promote.yml b/.github/workflows/helm-promote.yml new file mode 100644 index 000000000000..3f87e16ce193 --- /dev/null +++ b/.github/workflows/helm-promote.yml @@ -0,0 +1,49 @@ +# Copyright 2022 The Magma Authors. +# +# This source code is licensed under the BSD-style license found in the +# LICENSE file in the root directory of this source tree. +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: Magma Promote Helm Charts + +on: + workflow_dispatch: + inputs: + magma_version: + description: 'Magma version number' + required: true + +jobs: + helm-promote: + runs-on: ubuntu-latest + env: + MAGMA_VERSION: ${{ inputs.magma_version }} + MAGMA_ARTIFACTORY: https://artifactory.magmacore.org:443/artifactory + HELM_CHART_MUSEUM_TOKEN: ${{ secrets.HELM_CHART_MUSEUM_TOKEN }} + HELM_CHART_MUSEUM_USERNAME: ${{ secrets.HELM_CHART_MUSEUM_USERNAME }} + steps: + - uses: tspascoal/get-user-teams-membership@39b5264024b7c3bd7480de2f2c8d3076eed49ec5 # pin@v1.0.4 + name: Check if user has rights to promote + id: checkUserMember + with: + username: ${{ github.actor }} + team: 'approvers-ci' + GITHUB_TOKEN: ${{ secrets.github_token }} + - if: ${{ steps.checkUserMember.outputs.isTeamMember == 'false' }} + run: | + echo "User is not a member of the team" + exit 1 + - run: | + wget https://github.com/magma/magma/raw/master/orc8r/tools/helm/promote.sh + chmod 755 promote.sh + # Promote Helm charts + ./promote.sh orc8r-${MAGMA_VERSION}.tgz + ./promote.sh cwf-orc8r-${MAGMA_VERSION}.tgz + ./promote.sh feg-orc8r-${MAGMA_VERSION}.tgz + ./promote.sh lte-orc8r-${MAGMA_VERSION}.tgz + ./promote.sh domain-proxy-${MAGMA_VERSION}.tgz diff --git a/.github/workflows/insync-checkin.yml b/.github/workflows/insync-checkin.yml index 9c8068128f81..157ebf8b6408 100644 --- a/.github/workflows/insync-checkin.yml +++ b/.github/workflows/insync-checkin.yml @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: insync-check +name: Orc8r Check Generated Files In Sync on: push: @@ -33,8 +33,8 @@ jobs: env: MAGMA_ROOT: "${{ github.workspace }}" steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 - - uses: actions/setup-python@7f80679172b057fc5e90d70d197929d454754a5a # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 + - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # pin@v4.3.0 with: python-version: '3.8.10' - name: Run build.py diff --git a/.github/workflows/lte-integ-test-bazel-magma-deb.yml b/.github/workflows/lte-integ-test-bazel-magma-deb.yml new file mode 100644 index 000000000000..f50a28759cf0 --- /dev/null +++ b/.github/workflows/lte-integ-test-bazel-magma-deb.yml @@ -0,0 +1,133 @@ +# Copyright 2022 The Magma Authors. +# +# This source code is licensed under the BSD-style license found in the +# LICENSE file in the root directory of this source tree. +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: AGW Build & Test LTE Integration With Bazel Debian Build + +on: + workflow_dispatch: null + push: + branches: + - master + - 'v1.*' + +env: + BAZEL_BASE_IMAGE: "ghcr.io/magma/magma/bazel-base:latest" + # see GH14041 + CACHE_KEY: bazel-base-image-sha-c4de1e5 + REMOTE_DOWNLOAD_OPTIMIZATION: true + +jobs: + lte-integ-test-bazel-magma-deb: + if: github.repository_owner == 'magma' || github.event_name == 'workflow_dispatch' + runs-on: macos-12 + steps: + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 + + - name: Cache magma-deb-box + uses: actions/cache@9b0c1fce7a93df8e3bb8926b0d6e9d89e92f20a7 # pin@v3.0.11 + with: + path: ~/.vagrant.d/boxes/ubuntu-VAGRANTSLASH-focal64 + key: vagrant-box-magma-deb-focal64-20220804.0.0 + - name: Cache magma-test-box + uses: actions/cache@9b0c1fce7a93df8e3bb8926b0d6e9d89e92f20a7 # pin@v3.0.11 + with: + path: ~/.vagrant.d/boxes/magmacore-VAGRANTSLASH-magma_test + key: vagrant-box-magma-test-v1.2.20221012 + - name: Cache magma-trfserver-box + uses: actions/cache@9b0c1fce7a93df8e3bb8926b0d6e9d89e92f20a7 # pin@v3.0.11 + with: + path: ~/.vagrant.d/boxes/magmacore-VAGRANTSLASH-magma_trfserver + key: vagrant-box-magma-trfserver-v20220722 + + - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # pin@v4.3.0 + with: + python-version: '3.8.10' + - name: Install pre requisites + run: | + pip3 install --upgrade pip + pip3 install ansible fabric3 jsonpickle requests PyYAML + vagrant plugin install vagrant-vbguest vagrant-disksize vagrant-reload + - name: Install docker + uses: docker-practice/actions-setup-docker@5d9a5f65f510c01ec5f0bd81d5c95768b1ec032a # pin@v1 + - name: Open up network interfaces for VM + run: | + sudo mkdir -p /etc/vbox/ + echo '* 192.168.0.0/16' | sudo tee /etc/vbox/networks.conf + echo '* 3001::/64' | sudo tee -a /etc/vbox/networks.conf + + - name: Build .deb packages + run: | + docker run \ + -v ${{ github.workspace }}:/workspaces/magma/ \ + -v ${{ github.workspace }}/lte/gateway/configs:/etc/magma \ + -i \ + ${{ env.BAZEL_BASE_IMAGE }} \ + bash -c \ + 'cd /workspaces/magma && \ + bazel/scripts/remote_cache_bazelrc_setup.sh "${{ env.CACHE_KEY }}" "${{ env.REMOTE_DOWNLOAD_OPTIMIZATION }}" "${{ secrets.BAZEL_REMOTE_PASSWORD }}" && \ + bazel build lte/gateway/release:sctpd_deb_pkg lte/gateway/release:magma_deb_pkg \ + --config=production \ + --profile=Bazel_build_package_profile && \ + mv /workspaces/magma/bazel-bin/lte/gateway/release/magma*.deb /workspaces/magma/' + - name: Delete all docker containers + run: | + docker system prune -f -a --volumes + + - name: Run the integ test + env: + MAGMA_DEV_CPUS: 3 + MAGMA_DEV_MEMORY_MB: 9216 + MAGMA_PACKAGE: magma_1.8.0_amd64.deb + run: | + cd lte/gateway + fab integ_test_deb_installation + + - name: Get test results + if: always() + run: | + cd lte/gateway + fab get_test_summaries:dst_path="test-results",sudo_tests=False,dev_vm_name="magma_deb" + ls -R + - name: Upload test results + uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # pin@v3 + if: always() + with: + name: test-results + path: lte/gateway/test-results/**/*.xml + - name: Publish Unit Test Results + if: always() + uses: EnricoMi/publish-unit-test-result-action/composite@46ab8d49369d898e381a607119161771bc65c2a6 # pin@v2.2.0 + with: + check_name: LTE Debian integration test results + junit_files: lte/gateway/test-results/**/*.xml + check_run_annotations: all tests + + - name: Get test logs + if: always() + run: | + cd lte/gateway + fab get_test_logs:gateway_host_name=magma_deb,dst_path=./logs.tar.gz + - name: Upload test logs + uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # pin@v3 + if: always() + with: + name: test-logs + path: lte/gateway/logs.tar.gz + + - name: Notify failure to slack + if: failure() && github.repository_owner == 'magma' + uses: Ilshidur/action-slack@689ad44a9c9092315abd286d0e3a9a74d31ab78a # pin@2.1.0 + env: + SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} + SLACK_USERNAME: "Bazel Debian Integration Tests" + SLACK_AVATAR: ":boom:" + with: + args: "Bazel Debian LTE integration test failed in run: https://github.com/${{github.repository}}/actions/runs/${{github.run_id}}" diff --git a/.github/workflows/lte-integ-test-bazel.yml b/.github/workflows/lte-integ-test-bazel.yml index 6ba913bfd5db..5a8271b5df3b 100644 --- a/.github/workflows/lte-integ-test-bazel.yml +++ b/.github/workflows/lte-integ-test-bazel.yml @@ -9,17 +9,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: LTE integ test bazel +name: AGW Build & Test LTE Integration With Bazel Dev Build on: workflow_dispatch: null - workflow_run: - workflows: - - build-all + push: branches: - master - types: - - completed env: CACHE_KEY: magma-dev-vm @@ -29,24 +25,20 @@ jobs: lte-integ-test-bazel: if: github.repository_owner == 'magma' || github.event_name == 'workflow_dispatch' runs-on: macos-12 - env: - SHA: ${{ github.event.workflow_run.head_commit.id || github.sha }} steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 - with: - ref: ${{ env.SHA }} + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Cache magma-dev-box - uses: actions/cache@0865c47f36e68161719c5b124609996bb5c40129 # pin@v3 + uses: actions/cache@9b0c1fce7a93df8e3bb8926b0d6e9d89e92f20a7 # pin@v3.0.11 with: path: ~/.vagrant.d/boxes/magmacore-VAGRANTSLASH-magma_dev - key: vagrant-box-magma-dev-v1.2.20220801 + key: vagrant-box-magma-dev-v1.2.20221012 - name: Cache magma-test-box - uses: actions/cache@0865c47f36e68161719c5b124609996bb5c40129 # pin@v3 + uses: actions/cache@9b0c1fce7a93df8e3bb8926b0d6e9d89e92f20a7 # pin@v3.0.11 with: path: ~/.vagrant.d/boxes/magmacore-VAGRANTSLASH-magma_test - key: vagrant-box-magma-test + key: vagrant-box-magma-test-v1.2.20221012 - name: Cache magma-trfserver-box - uses: actions/cache@0865c47f36e68161719c5b124609996bb5c40129 # pin@v3 + uses: actions/cache@9b0c1fce7a93df8e3bb8926b0d6e9d89e92f20a7 # pin@v3.0.11 with: path: ~/.vagrant.d/boxes/magmacore-VAGRANTSLASH-magma_trfserver key: vagrant-box-magma-trfserver-v20220722 @@ -59,7 +51,7 @@ jobs: else echo "Vagrant cloud token is not configured. Skipping login." fi - - uses: actions/setup-python@7f80679172b057fc5e90d70d197929d454754a5a # pin@v2 + - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # pin@v4.3.0 with: python-version: '3.8.10' - name: Install pre requisites @@ -70,49 +62,36 @@ jobs: - name: Open up network interfaces for VM run: | sudo mkdir -p /etc/vbox/ - sudo touch /etc/vbox/networks.conf - sudo sh -c "echo '* 192.168.0.0/16' > /etc/vbox/networks.conf" - sudo sh -c "echo '* 3001::/64' >> /etc/vbox/networks.conf" - - name: Prepare the integ test + echo '* 192.168.0.0/16' | sudo tee /etc/vbox/networks.conf + echo '* 3001::/64' | sudo tee -a /etc/vbox/networks.conf + - name: Provision the magma-dev VM run: | cd lte/gateway export MAGMA_DEV_CPUS=3 export MAGMA_DEV_MEMORY_MB=9216 - fab bazel_integ_test_pre_build + fab provision_magma_dev_vm - name: Build all services and scripts with bazel run: | cd lte/gateway vagrant ssh -c 'cd ~/magma; bazel/scripts/remote_cache_bazelrc_setup.sh "${{ env.CACHE_KEY }}" "${{ env.REMOTE_DOWNLOAD_OPTIMIZATION }}" "${{ secrets.BAZEL_REMOTE_PASSWORD }}";' magma - vagrant ssh -c 'sudo sed -i "s@#precedence ::ffff:0:0/96 100@precedence ::ffff:0:0/96 100@" /etc/gai.conf;' magma - vagrant ssh -c 'cd ~/magma; bazel build --profile=bazel_profile_lte_integ_tests `bazel query "kind(.*_binary, //orc8r/... union //lte/... union //feg/...)"`;' magma - vagrant ssh -c 'sudo sed -i "s@precedence ::ffff:0:0/96 100@#precedence ::ffff:0:0/96 100@" /etc/gai.conf;' magma + REGEX='"service|util_script"' + QUERY="'attr(tags, "${REGEX}", kind(.*_binary, //orc8r/... union //lte/... union //feg/... except //lte/gateway/c/core:mme_oai))'" + vagrant ssh -c 'cd ~/magma; bazel build --profile=bazel_profile_lte_integ_tests `bazel query '"${QUERY}"'`;' magma - name: Linking bazel-built script executables to '/usr/local/bin/' run: | cd lte/gateway vagrant ssh -c 'cd ~/magma; bazel/scripts/link_scripts_for_bazel_integ_tests.sh;' magma - - name: Run the sudo tests - id: sudo_tests - run: | - cd lte/gateway - vagrant ssh -c 'cd ~/magma; bazel/scripts/run_sudo_tests.sh --retry-on-failure --retry-attempts 1;' magma - name: Run the integ test - if: ${{ success() || steps.sudo_tests.conclusion == 'failure' }} run: | cd lte/gateway export MAGMA_DEV_CPUS=3 export MAGMA_DEV_MEMORY_MB=9216 fab bazel_integ_test_post_build - - name: Publish bazel profile - uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # pin@v3 - if: always() - with: - name: Bazel profile lte integ tests - path: bazel_profile_lte_integ_tests - name: Get test results if: always() run: | cd lte/gateway - fab get_test_summaries:dst_path="test-results" + fab get_test_summaries:dst_path="test-results,sudo_tests=False" ls -R - name: Upload test results uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # pin@v3 @@ -121,27 +100,34 @@ jobs: name: test-results path: lte/gateway/test-results/**/*.xml - name: Get test logs - if: failure() + if: always() run: | cd lte/gateway fab get_test_logs:dst_path=./logs.tar.gz - name: Upload test logs uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # pin@v3 - if: failure() + if: always() with: name: test-logs path: lte/gateway/logs.tar.gz - name: Publish Unit Test Results if: always() - uses: EnricoMi/publish-unit-test-result-action/composite@7377632048da85434c30810c38353542d3162dc4 # pin@v1 + uses: EnricoMi/publish-unit-test-result-action/composite@46ab8d49369d898e381a607119161771bc65c2a6 # pin@v2.2.0 with: - files: lte/gateway/test-results/**/*.xml + check_name: LTE Bazel integration test results + junit_files: lte/gateway/test-results/**/*.xml check_run_annotations: all tests + - name: Publish bazel profile + uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # pin@v3 + if: always() + with: + name: Bazel profile lte integ tests + path: bazel_profile_lte_integ_tests - name: Notify failure to slack if: failure() && github.repository_owner == 'magma' uses: Ilshidur/action-slack@689ad44a9c9092315abd286d0e3a9a74d31ab78a # pin@2.1.0 env: - SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_BAZEL_CI }} + SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} SLACK_USERNAME: "LTE integ tests bazel" SLACK_AVATAR: ":boom:" with: diff --git a/.github/workflows/lte-integ-test-containerized.yml b/.github/workflows/lte-integ-test-containerized.yml new file mode 100644 index 000000000000..b118387515da --- /dev/null +++ b/.github/workflows/lte-integ-test-containerized.yml @@ -0,0 +1,80 @@ +# Copyright 2022 The Magma Authors. +# +# This source code is licensed under the BSD-style license found in the +# LICENSE file in the root directory of this source tree. +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: AGW Test LTE Integration With Make Containerized Build + +on: + workflow_dispatch: null + workflow_run: + workflows: + - build-all + branches: + - master + types: + - completed + +jobs: + lte-integ-test-containerized: + if: (github.event.workflow_run.conclusion == 'success' && github.repository_owner == 'magma') || github.event_name == 'workflow_dispatch' + runs-on: macos-12 + strategy: + fail-fast: false + matrix: + test_targets: [ "precommit", "extended_tests" ] + steps: + - name: Cache magma-dev-box + uses: actions/cache@9b0c1fce7a93df8e3bb8926b0d6e9d89e92f20a7 # pin@v3.0.11 + with: + path: ~/.vagrant.d/boxes/magmacore-VAGRANTSLASH-magma_dev + key: vagrant-box-magma-dev-v1.2.20221012 + - name: Cache magma-test-box + uses: actions/cache@9b0c1fce7a93df8e3bb8926b0d6e9d89e92f20a7 # pin@v3.0.11 + with: + path: ~/.vagrant.d/boxes/magmacore-VAGRANTSLASH-magma_test + key: vagrant-box-magma-test-v1.2.20221012 + - name: Cache magma-trfserver-box + uses: actions/cache@9b0c1fce7a93df8e3bb8926b0d6e9d89e92f20a7 # pin@v3.0.11 + with: + path: ~/.vagrant.d/boxes/magmacore-VAGRANTSLASH-magma_trfserver + key: vagrant-box-magma-trfserver-v20220722 + - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # pin@v4.3.0 + with: + python-version: '3.8.10' + - name: Install pre requisites + run: | + pip3 install --upgrade pip + pip3 install ansible fabric3 jsonpickle requests PyYAML + vagrant plugin install vagrant-vbguest vagrant-disksize vagrant-reload + - name: Open up network interfaces for VM + run: | + sudo mkdir -p /etc/vbox/ + echo '* 192.168.0.0/16' | sudo tee /etc/vbox/networks.conf + echo '* 3001::/64' | sudo tee -a /etc/vbox/networks.conf + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 + - name: Run the integration test + env: + DOCKER_REGISTRY: docker-ci.artifactory.magmacore.org/ + MAGMA_DEV_CPUS: 3 + MAGMA_DEV_MEMORY_MB: 9216 + working-directory: lte/gateway + run: | + fab --show=debug --set DOCKER_REGISTRY=${DOCKER_REGISTRY} integ_test_containerized:test_mode=${{ matrix.test_targets }} + - name: Get test results + if: always() + working-directory: lte/gateway + run: | + fab get_test_summaries:dst_path="test-results",sudo_tests=False,dev_vm_name="magma_deb" + - name: Upload test results + uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # pin@v3 + if: always() + with: + name: test-results-${{ matrix.test_targets }} + path: lte/gateway/test-results/**/*.xml diff --git a/.github/workflows/lte-integ-test-magma-deb.yml b/.github/workflows/lte-integ-test-magma-deb.yml new file mode 100644 index 000000000000..7bb14a87df0a --- /dev/null +++ b/.github/workflows/lte-integ-test-magma-deb.yml @@ -0,0 +1,110 @@ +# Copyright 2022 The Magma Authors. +# +# This source code is licensed under the BSD-style license found in the +# LICENSE file in the root directory of this source tree. +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: AGW Test LTE Integration With Make Debian Build + +on: + workflow_dispatch: null + repository_dispatch: + types: [build-all-artifact] + +jobs: + lte-integ-test-magma-deb: + if: github.repository_owner == 'magma' || github.event_name == 'workflow_dispatch' + runs-on: macos-12 + steps: + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 + - name: Cache magma-deb-box + uses: actions/cache@9b0c1fce7a93df8e3bb8926b0d6e9d89e92f20a7 # pin@v3.0.11 + with: + path: ~/.vagrant.d/boxes/ubuntu-VAGRANTSLASH-focal64 + key: vagrant-box-magma-deb-focal64-20220804.0.0 + - name: Cache magma-test-box + uses: actions/cache@9b0c1fce7a93df8e3bb8926b0d6e9d89e92f20a7 # pin@v3.0.11 + with: + path: ~/.vagrant.d/boxes/magmacore-VAGRANTSLASH-magma_test + key: vagrant-box-magma-test-v1.2.20221012 + - name: Cache magma-trfserver-box + uses: actions/cache@9b0c1fce7a93df8e3bb8926b0d6e9d89e92f20a7 # pin@v3.0.11 + with: + path: ~/.vagrant.d/boxes/magmacore-VAGRANTSLASH-magma_trfserver + key: vagrant-box-magma-trfserver-v20220722 + - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # pin@v4.3.0 + with: + python-version: '3.8.10' + - name: Install pre requisites + run: | + pip3 install --upgrade pip + pip3 install ansible fabric3 jsonpickle requests PyYAML firebase_admin + vagrant plugin install vagrant-vbguest vagrant-disksize vagrant-reload + - name: Open up network interfaces for VM + run: | + sudo mkdir -p /etc/vbox/ + echo '* 192.168.0.0/16' | sudo tee /etc/vbox/networks.conf + echo '* 3001::/64' | sudo tee -a /etc/vbox/networks.conf + - name: Run the integ test + env: + MAGMA_DEV_CPUS: 3 + MAGMA_DEV_MEMORY_MB: 9216 + MAGMA_PACKAGE: ${{ github.event.client_payload.artifact }} + working-directory: 'lte/gateway/' + run: | + fab integ_test_deb_installation + - name: Get test results + if: always() + working-directory: 'lte/gateway/' + run: | + fab get_test_summaries:dst_path="test-results",sudo_tests=False,dev_vm_name="magma_deb" + ls -R + - name: Upload test results + uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # pin@v3 + if: always() + with: + name: test-results + path: lte/gateway/test-results/**/*.xml + - name: Publish Unit Test Results + if: always() + uses: EnricoMi/publish-unit-test-result-action/composite@46ab8d49369d898e381a607119161771bc65c2a6 # pin@v2.2.0 + with: + check_name: LTE Debian integration test results + junit_files: lte/gateway/test-results/**/*.xml + check_run_annotations: all tests + - name: Get test logs + if: always() + working-directory: 'lte/gateway/' + run: | + fab get_test_logs:gateway_host_name=magma_deb,dst_path=./logs.tar.gz + - name: Upload test logs + uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # pin@v3 + if: always() + with: + name: test-logs + path: lte/gateway/logs.tar.gz + - name: Publish results to Firebase + if: always() && github.event_name == 'repository_dispatch' + env: + FIREBASE_SERVICE_CONFIG: ${{ secrets.FIREBASE_SERVICE_CONFIG }} + REPORT_FILENAME: "lte_integ_test_magma_deb_${{ github.sha }}.html" + run: | + npm install -g xunit-viewer + [ -d "lte/gateway/test-results/" ] && { xunit-viewer -r lte/gateway/test-results/ -o "$REPORT_FILENAME"; } + [ -f "$REPORT_FILENAME" ] && { python ci-scripts/firebase_upload_file.py -f "$REPORT_FILENAME" -o out_url.txt; } + [ -f "out_url.txt" ] && { URL=$(cat out_url.txt); } + python ci-scripts/firebase_publish_report.py -id ${{ github.sha }} --verdict ${{ job.status }} --run_id ${{ github.run_id }} make_debian_lte_integ_test --url $URL + - name: Notify failure to slack + if: failure() && github.repository_owner == 'magma' + uses: Ilshidur/action-slack@689ad44a9c9092315abd286d0e3a9a74d31ab78a # pin@2.1.0 + env: + SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} + SLACK_USERNAME: "LTE Debian integration test" + SLACK_AVATAR: ":boom:" + with: + args: "LTE Debian integration test failed on [${{ github.sha }}](${{github.event.repository.owner.html_url}}/magma/commit/${{ github.sha }}): ${{ steps.commit.outputs.title}}" diff --git a/.github/workflows/lte-integ-test.yml b/.github/workflows/lte-integ-test.yml index def198cfaa16..38ec6cf03587 100644 --- a/.github/workflows/lte-integ-test.yml +++ b/.github/workflows/lte-integ-test.yml @@ -9,41 +9,33 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: LTE integ test +name: AGW Build & Test LTE Integration With Make Dev Build on: workflow_dispatch: null - workflow_run: - workflows: - - build-all + push: branches: - master - 'v1.*' - types: - - completed jobs: lte-integ-test: if: github.repository_owner == 'magma' || github.event_name == 'workflow_dispatch' runs-on: macos-12 - env: - SHA: ${{ github.event.workflow_run.head_commit.id || github.sha }} steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 - with: - ref: ${{ env.SHA }} + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: Cache magma-dev-box - uses: actions/cache@0865c47f36e68161719c5b124609996bb5c40129 # pin@v3 + uses: actions/cache@9b0c1fce7a93df8e3bb8926b0d6e9d89e92f20a7 # pin@v3.0.11 with: path: ~/.vagrant.d/boxes/magmacore-VAGRANTSLASH-magma_dev - key: vagrant-box-magma-dev-v1.2.20220801 + key: vagrant-box-magma-dev-v1.2.20221012 - name: Cache magma-test-box - uses: actions/cache@0865c47f36e68161719c5b124609996bb5c40129 # pin@v3 + uses: actions/cache@9b0c1fce7a93df8e3bb8926b0d6e9d89e92f20a7 # pin@v3.0.11 with: path: ~/.vagrant.d/boxes/magmacore-VAGRANTSLASH-magma_test - key: vagrant-box-magma-test + key: vagrant-box-magma-test-v1.2.20221012 - name: Cache magma-trfserver-box - uses: actions/cache@0865c47f36e68161719c5b124609996bb5c40129 # pin@v3 + uses: actions/cache@9b0c1fce7a93df8e3bb8926b0d6e9d89e92f20a7 # pin@v3.0.11 with: path: ~/.vagrant.d/boxes/magmacore-VAGRANTSLASH-magma_trfserver key: vagrant-box-magma-trfserver-v20220722 @@ -56,7 +48,7 @@ jobs: else echo "Vagrant cloud token is not configured. Skipping login." fi - - uses: actions/setup-python@7f80679172b057fc5e90d70d197929d454754a5a # pin@v2 + - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # pin@v4.3.0 with: python-version: '3.8.10' - name: Install pre requisites @@ -89,39 +81,40 @@ jobs: name: test-results path: lte/gateway/test-results/**/*.xml - name: Get test logs - if: failure() + if: always() run: | cd lte/gateway fab get_test_logs:dst_path=./logs.tar.gz - name: Upload test logs uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # pin@v3 - if: failure() + if: always() with: name: test-logs path: lte/gateway/logs.tar.gz - name: Publish Unit Test Results if: always() - uses: EnricoMi/publish-unit-test-result-action/composite@7377632048da85434c30810c38353542d3162dc4 # pin@v1 + uses: EnricoMi/publish-unit-test-result-action/composite@46ab8d49369d898e381a607119161771bc65c2a6 # pin@v2.2.0 with: - files: lte/gateway/test-results/**/*.xml + check_name: LTE integration test results + junit_files: lte/gateway/test-results/**/*.xml check_run_annotations: all tests - name: Publish results to Firebase - if: always() && github.event.workflow_run.event == 'push' + if: always() && github.event_name == 'push' env: FIREBASE_SERVICE_CONFIG: ${{ secrets.FIREBASE_SERVICE_CONFIG }} - REPORT_FILENAME: "lte_integ_test_${{ env.SHA }}.html" + REPORT_FILENAME: "lte_integ_test_${{ github.sha }}.html" run: | npm install -g xunit-viewer [ -d "lte/gateway/test-results/" ] && { xunit-viewer -r lte/gateway/test-results/ -o "$REPORT_FILENAME"; } [ -f "$REPORT_FILENAME" ] && { python ci-scripts/firebase_upload_file.py -f "$REPORT_FILENAME" -o out_url.txt; } [ -f "out_url.txt" ] && { URL=$(cat out_url.txt); } - python ci-scripts/firebase_publish_report.py -id ${{ env.SHA }} --verdict ${{ job.status }} --run_id ${{ github.run_id }} lte --url $URL + python ci-scripts/firebase_publish_report.py -id ${{ github.sha }} --verdict ${{ job.status }} --run_id ${{ github.run_id }} lte --url $URL - name: Notify failure to slack - if: failure() && github.event.workflow_run.event == 'push' + if: failure() && github.event_name == 'push' env: SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} SLACK_USERNAME: "LTE integ test" SLACK_AVATAR: ":boom:" uses: Ilshidur/action-slack@689ad44a9c9092315abd286d0e3a9a74d31ab78a # pin@2.1.0 with: - args: "LTE integration test test failed on [${{ env.SHA }}](${{github.event.repository.owner.html_url}}/magma/commit/${{ env.SHA }}): ${{ steps.commit.outputs.title}}" + args: "LTE integration test test failed on [${{ github.sha }}](${{github.event.repository.owner.html_url}}/magma/commit/${{ github.sha }}): ${{ steps.commit.outputs.title}}" diff --git a/.github/workflows/nms-workflow.yml b/.github/workflows/nms-workflow.yml index 005cd134faa2..d574990173b7 100644 --- a/.github/workflows/nms-workflow.yml +++ b/.github/workflows/nms-workflow.yml @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: nms-workflow +name: NMS Lint & Test on: push: @@ -36,9 +36,9 @@ jobs: should_not_skip: ${{ steps.changes.outputs.filesChanged }} steps: # Need to get git on push event - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 if: github.event_name == 'push' - - uses: dorny/paths-filter@b2feaf19c27470162a626bd6fa8438ae5b263721 # pin@v2 + - uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50 # pin@v2.11.1 id: changes with: filters: | @@ -63,8 +63,8 @@ jobs: run: working-directory: "${{ github.workspace }}/nms" steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 + - uses: actions/setup-node@8c91899e586c5b171469028077307d293428b516 # pin@v3.5.1 with: node-version: 16 - name: install yarn @@ -100,8 +100,8 @@ jobs: env: MAGMA_ROOT: "${{ github.workspace }}" steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 - - uses: actions/setup-node@1f8c6b94b26d0feae1e387ca63ccbdc44d27b561 # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 + - uses: actions/setup-node@8c91899e586c5b171469028077307d293428b516 # pin@v3.5.1 with: node-version: 16 - name: apt install yarn @@ -147,7 +147,7 @@ jobs: env: MAGMA_ROOT: "${{ github.workspace }}" steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - uses: borales/actions-yarn@d8ce577a6f5d99a459fc7fdf2a86844617e353e4 # pin@v3.0.0 with: cmd: install # will run `yarn install` command @@ -186,7 +186,7 @@ jobs: PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: "true" PUPPETEER_EXECUTABLE_PATH: "/usr/bin/google-chrome-stable" steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 - name: apt install yarn run: | cd ${MAGMA_ROOT}/nms diff --git a/.github/workflows/pr_bot.yml b/.github/workflows/pr_bot.yml index d658f1aa2a99..088af47b0650 100644 --- a/.github/workflows/pr_bot.yml +++ b/.github/workflows/pr_bot.yml @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: PR Hello +name: PR Generate Hello on: # Use pull_request_target to gain write permissions. # Ref: https://github.blog/2020-08-03-github-actions-improvements-for-fork-and-pull-request-workflows/ @@ -28,7 +28,7 @@ jobs: var msg = `Thanks for opening a PR! :100: [A couple initial guidelines](https://github.com/magma/magma/wiki/Contributing-Code#commit-and-pull-request-guidelines) - - All commits must be signed off. This is [enforced by \`DCO check\`](https://github.com/magma/magma/blob/master/.github/workflows/dco-check.yml). + - All commits must be signed off. This is [enforced by \`PR DCO check\`](https://github.com/magma/magma/blob/master/.github/workflows/dco-check.yml). - All PR titles must follow the semantic commits format. This is [enforced by \`Semantic PR\`](https://github.com/magma/magma/blob/master/.github/workflows/semantic-pr.yml). ### Howto diff --git a/.github/workflows/python-workflow.yml b/.github/workflows/python-workflow.yml index eef9f21e5f03..e39c101e491d 100644 --- a/.github/workflows/python-workflow.yml +++ b/.github/workflows/python-workflow.yml @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: Python Format Check +name: AGW Build & Format Python on: push: branches: @@ -36,9 +36,9 @@ jobs: files_changed: ${{ steps.changes.outputs.filesChanged_files }} steps: # Need to get git on push event - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 if: github.event_name == 'push' - - uses: dorny/paths-filter@b2feaf19c27470162a626bd6fa8438ae5b263721 # pin@v2 + - uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50 # pin@v2.11.1 id: changes with: filters: | @@ -65,11 +65,11 @@ jobs: name: Python Format Check runs-on: ubuntu-latest steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 with: fetch-depth: 0 - name: Build the python-precommit Docker base image - uses: docker/build-push-action@ac9327eae2b366085ac7f6a2d02df8aa8ead720a # pin@v2 + uses: docker/build-push-action@c56af957549030174b10d6867f20e78cfd7debc5 # pin@v3.2.0 with: context: . file: ./lte/gateway/docker/python-precommit/Dockerfile diff --git a/.github/workflows/reviewdog-workflow.yml b/.github/workflows/reviewdog-workflow.yml index 760c636a809b..880cddd4a42c 100644 --- a/.github/workflows/reviewdog-workflow.yml +++ b/.github/workflows/reviewdog-workflow.yml @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: reviewdog-workflow +name: PR Lint Reviewdog on: pull_request_target: types: @@ -35,9 +35,9 @@ jobs: changed_terraform: ${{ steps.changes.outputs.terraform }} steps: # Need to get git on push event - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 if: github.event_name == 'push' - - uses: dorny/paths-filter@b2feaf19c27470162a626bd6fa8438ae5b263721 # pin@v2 + - uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50 # pin@v2.11.1 id: changes with: filters: | @@ -69,7 +69,7 @@ jobs: ## runs-on: ubuntu-latest steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 with: ref: ${{ github.event.pull_request.head.ref }} repository: ${{ github.event.pull_request.head.repo.full_name }} @@ -94,13 +94,13 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out code into the Go module directory - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 with: ref: ${{ github.event.pull_request.head.ref }} repository: ${{ github.event.pull_request.head.repo.full_name }} fetch-depth: 0 - name: golangci-lint - uses: reviewdog/action-golangci-lint@1530051a4d9af7e1c94afb2ea38fe7ba13e180ee # pin@v2 + uses: reviewdog/action-golangci-lint@53f8eabb87b40b1a2c63ec75b0d418bd0f4aa919 # pin@v2.2.2 with: golangci_lint_flags: '--config=../../.golangci.yml' reporter: github-pr-review @@ -111,7 +111,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out code - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 with: ref: ${{ github.event.pull_request.head.ref }} repository: ${{ github.event.pull_request.head.repo.full_name }} @@ -132,15 +132,15 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out code. - uses: actions/checkout@v2 + uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 with: ref: ${{ github.event.pull_request.head.ref }} repository: ${{ github.event.pull_request.head.repo.full_name }} fetch-depth: 0 - name: setup node - uses: actions/setup-node@v2 + uses: actions/setup-node@8c91899e586c5b171469028077307d293428b516 # pin@v3.5.1 with: - node-version: '16' + node-version: 16 - name: install dependencies run: yarn install working-directory: 'nms/' @@ -157,7 +157,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out code. - uses: actions/checkout@v2 + uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 with: ref: ${{ github.event.pull_request.head.ref }} repository: ${{ github.event.pull_request.head.repo.full_name }} @@ -176,7 +176,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out code. - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 with: ref: ${{ github.event.pull_request.head.ref }} repository: ${{ github.event.pull_request.head.repo.full_name }} @@ -198,7 +198,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out code. - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 with: ref: ${{ github.event.pull_request.head.ref }} repository: ${{ github.event.pull_request.head.repo.full_name }} @@ -214,7 +214,7 @@ jobs: name: shellcheck runs-on: ubuntu-latest steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 with: ref: ${{ github.event.pull_request.head.ref }} repository: ${{ github.event.pull_request.head.repo.full_name }} @@ -233,7 +233,7 @@ jobs: name: tflint runs-on: ubuntu-latest steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 with: ref: ${{ github.event.pull_request.head.ref }} repository: ${{ github.event.pull_request.head.repo.full_name }} @@ -253,7 +253,7 @@ jobs: name: wemake-python-styleguide runs-on: ubuntu-latest steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 with: ref: ${{ github.event.pull_request.head.ref }} repository: ${{ github.event.pull_request.head.repo.full_name }} @@ -270,7 +270,7 @@ jobs: name: yamllint runs-on: ubuntu-latest steps: - - uses: actions/checkout@7884fcad6b5d53d10323aee724dc68d8b9096a2e # pin@v2 + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 with: ref: ${{ github.event.pull_request.head.ref }} repository: ${{ github.event.pull_request.head.repo.full_name }} diff --git a/.github/workflows/semantic-pr.yml b/.github/workflows/semantic-pr.yml index 554d1769e4cf..3911f8a63c8d 100644 --- a/.github/workflows/semantic-pr.yml +++ b/.github/workflows/semantic-pr.yml @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: "Check for semantic PR title" +name: PR Check Title Or Commit Message on: # Semantic PR module only works with pull_request_target @@ -50,7 +50,7 @@ jobs: needs: check-reverted-pr if: ${{ needs.check-reverted-pr.outputs.is_reverted_pr == 'false' }} steps: - - uses: amannn/action-semantic-pull-request@db6e259b93f286e3416eef27aaae88935d16cf2e # pin@v3.4.0 + - uses: amannn/action-semantic-pull-request@01d5fd8a8ebb9aafe902c40c53f0f4744f7381eb # pin@v5.0.2 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: @@ -144,7 +144,7 @@ jobs: steps: - if: needs.check-semantic-pr.result == 'failure' run: echo 'STATUS=:x:' >> $GITHUB_ENV - - uses: peter-evans/find-comment@1769778a0c5bd330272d749d12c036d65e70d39d # pin@v2.0.0 + - uses: peter-evans/find-comment@b657a70ff16d17651703a84bee1cb9ad9d2be2ea # pin@v2.0.1 id: fc with: issue-number: ${{ github.event.number }} diff --git a/.github/workflows/sudo-python-tests.yml b/.github/workflows/sudo-python-tests.yml new file mode 100644 index 000000000000..c091a3891b27 --- /dev/null +++ b/.github/workflows/sudo-python-tests.yml @@ -0,0 +1,106 @@ +# Copyright 2022 The Magma Authors. +# +# This source code is licensed under the BSD-style license found in the +# LICENSE file in the root directory of this source tree. +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: AGW Test Sudo Python + +on: + workflow_dispatch: null + push: + branches: + - master + +env: + CACHE_KEY: magma-dev-vm + REMOTE_DOWNLOAD_OPTIMIZATION: false + +jobs: + sudo-python-tests: + if: github.repository_owner == 'magma' || github.event_name == 'workflow_dispatch' + runs-on: macos-12 + steps: + - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # pin@v3.1.0 + - name: Cache magma-dev-box + uses: actions/cache@9b0c1fce7a93df8e3bb8926b0d6e9d89e92f20a7 # pin@v3.0.11 + with: + path: ~/.vagrant.d/boxes/magmacore-VAGRANTSLASH-magma_dev + key: vagrant-box-magma-dev-v1.2.20221012 + - name: Log in to vagrant cloud + run: | + if [[ -n "${{ secrets.VAGRANT_TOKEN }}" ]] + then + echo "Logging in to vagrant cloud to mitigate rate limiting." + vagrant cloud auth login --token "${{ secrets.VAGRANT_TOKEN }}" + else + echo "Vagrant cloud token is not configured. Skipping login." + fi + - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # pin@v4.3.0 + with: + python-version: '3.8.10' + - name: Install pre requisites + run: | + pip3 install --upgrade pip + pip3 install ansible fabric3 jsonpickle requests PyYAML firebase_admin + vagrant plugin install vagrant-vbguest vagrant-disksize vagrant-reload + - name: Open up network interfaces for VM + run: | + sudo mkdir -p /etc/vbox/ + echo '* 192.168.0.0/16' | sudo tee /etc/vbox/networks.conf + echo '* 3001::/64' | sudo tee -a /etc/vbox/networks.conf + - name: Provision the magma-dev VM + run: | + cd lte/gateway + export MAGMA_DEV_CPUS=3 + export MAGMA_DEV_MEMORY_MB=9216 + fab provision_magma_dev_vm + - name: Run the sudo python tests + run: | + cd lte/gateway + vagrant ssh -c 'cd ~/magma; bazel/scripts/remote_cache_bazelrc_setup.sh "${{ env.CACHE_KEY }}" "${{ env.REMOTE_DOWNLOAD_OPTIMIZATION }}" "${{ secrets.BAZEL_REMOTE_PASSWORD }}";' magma + vagrant ssh -c 'cd ~/magma; bazel/scripts/run_sudo_tests.sh --retry-on-failure --retry-attempts 1;' magma + - name: Get test results + if: always() + run: | + cd lte/gateway + fab get_test_summaries:dst_path="test-results",integration_tests=False + ls -R + - name: Upload test results + uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # pin@v3 + if: always() + with: + name: test-results + path: lte/gateway/test-results/**/*.xml + - name: Publish Unit Test Results + if: always() + uses: EnricoMi/publish-unit-test-result-action/composite@46ab8d49369d898e381a607119161771bc65c2a6 # pin@v2.2.0 + with: + check_name: Sudo Python test results + junit_files: lte/gateway/test-results/**/*.xml + check_run_annotations: all tests + - name: Publish results to Firebase + if: always() && github.event_name == 'push' + env: + FIREBASE_SERVICE_CONFIG: ${{ secrets.FIREBASE_SERVICE_CONFIG }} + REPORT_FILENAME: "sudo_python_tests_${{ github.sha }}.html" + run: | + npm install -g xunit-viewer + [ -d "lte/gateway/test-results/" ] && { xunit-viewer -r lte/gateway/test-results/ -o "$REPORT_FILENAME"; } + [ -f "$REPORT_FILENAME" ] && { python ci-scripts/firebase_upload_file.py -f "$REPORT_FILENAME" -o out_url.txt; } + [ -f "out_url.txt" ] && { URL=$(cat out_url.txt); } + python ci-scripts/firebase_publish_report.py -id ${{ github.sha }} --verdict ${{ job.status }} --run_id ${{ github.run_id }} sudo_python_tests --url $URL + - name: Notify failure to slack + if: failure() && github.repository_owner == 'magma' + uses: Ilshidur/action-slack@689ad44a9c9092315abd286d0e3a9a74d31ab78a # pin@2.1.0 + env: + SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} + SLACK_USERNAME: "Sudo python tests" + SLACK_AVATAR: ":boom:" + with: + args: "Sudo python tests failed in run: https://github.com/${{github.repository}}/actions/runs/${{github.run_id}}" diff --git a/.github/workflows/unit-test-workflow.yml b/.github/workflows/unit-test-workflow.yml index 12239017140e..aa18142b3fb9 100644 --- a/.github/workflows/unit-test-workflow.yml +++ b/.github/workflows/unit-test-workflow.yml @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -name: Cloud Unit Test Results +name: PR Generate Unit Test Results on: workflow_run: workflows: diff --git a/WORKSPACE.bazel b/WORKSPACE.bazel index dc313daec155..d0ca3609a8b4 100644 --- a/WORKSPACE.bazel +++ b/WORKSPACE.bazel @@ -18,6 +18,13 @@ http_archive( url = "https://github.com/bazelbuild/rules_python/releases/download/0.5.0/rules_python-0.5.0.tar.gz", ) +http_archive( + name = "rules_pyvenv", + sha256 = "216dd65adfd78a334e8ecb4f700ffcc3578351bfc89ca55127e5b656626f6916", + strip_prefix = "rules_pyvenv-1.0", + url = "https://github.com/cedarai/rules_pyvenv/archive/refs/tags/1.0.tar.gz", +) + ### BUILDIFIER DEPENDENCIES # See https://github.com/bazelbuild/buildtools/blob/master/buildifier/README.md # buildifier is written in Go and hence needs rules_go to be built. @@ -150,12 +157,13 @@ load("//bazel:python_repositories.bzl", "python_repositories") python_repositories() +# TODO: GH13522 upgrade to >0.7.0 when landed - see issue http_archive( name = "rules_pkg", - sha256 = "8a298e832762eda1830597d64fe7db58178aa84cd5926d76d5b744d6558941c2", + sha256 = "bdac8d3d178467c89f246e1e894b59c26c784569e91798901fb81291de834708", + strip_prefix = "rules_pkg-7f7bcf9c93bed9ee693b5bfedde5d72f9a2d6ea4", urls = [ - "https://mirror.bazel.build/github.com/bazelbuild/rules_pkg/releases/download/0.7.0/rules_pkg-0.7.0.tar.gz", - "https://github.com/bazelbuild/rules_pkg/releases/download/0.7.0/rules_pkg-0.7.0.tar.gz", + "https://github.com/bazelbuild/rules_pkg/archive/7f7bcf9c93bed9ee693b5bfedde5d72f9a2d6ea4.zip", ], ) diff --git a/lte/gateway/deploy/roles/uselocalpkgrepo/templates/repospec.list.j2 b/bazel/deb_build.bzl similarity index 62% rename from lte/gateway/deploy/roles/uselocalpkgrepo/templates/repospec.list.j2 rename to bazel/deb_build.bzl index edd4e2265b6e..798f0f13e3cd 100644 --- a/lte/gateway/deploy/roles/uselocalpkgrepo/templates/repospec.list.j2 +++ b/bazel/deb_build.bzl @@ -1,5 +1,4 @@ -################################################################################ -# Copyright 2020 The Magma Authors. +# Copyright 2022 The Magma Authors. # This source code is licensed under the BSD-style license found in the # LICENSE file in the root directory of this source tree. @@ -9,6 +8,16 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -################################################################################ -deb [arch=amd64] https://{{ repohost }} {{ distro }} {{ component }} +""" +Constants for building debian packages. +""" + +PY_VERSION = "python3.8" + +PY_PKG_LOC = "dist-packages" + +PY_DEST = "/usr/local/lib/{version}/{pkg_loc}".format( + pkg_loc = PY_PKG_LOC, + version = PY_VERSION, +) diff --git a/bazel/external/requirements.in b/bazel/external/requirements.in index 1e6bad371143..21128f29fcb4 100644 --- a/bazel/external/requirements.in +++ b/bazel/external/requirements.in @@ -1,7 +1,7 @@ # requirements.in setuptools==49.6.0 -grpcio-tools -grpcio +grpcio-tools<1.49.0 +grpcio<1.49.0 redis>=3.5 protobuf six @@ -25,7 +25,8 @@ bravado_core jsonschema==3.2.0 psutil systemd-python -cryptography +# cryptography<38.0.0 because of runtime issues when starting magmad +cryptography<38.0.0 # h2>=3,<4 is requirement of aioh2 (loaded via bazel) h2>=3,<4 # priority==1.3.0 is requirement of aioh2 (loaded via bazel) diff --git a/bazel/external/requirements.txt b/bazel/external/requirements.txt index 259aaafcc69a..4cf8dc2a0a0d 100644 --- a/bazel/external/requirements.txt +++ b/bazel/external/requirements.txt @@ -99,13 +99,13 @@ attrs==22.1.0 \ # aiohttp # jsonschema # pytest -bravado-core==5.17.0 \ - --hash=sha256:b3b06ae86d3c80de5694340e55df7c9097857ff965b76642979e2a961f332abf \ - --hash=sha256:fa53e796ea574f905635a43871439a44713c2ef128c62a8fcc1d0ca8765cf855 +bravado-core==5.17.1 \ + --hash=sha256:0da9c6f3814734622a55db3f62d08db6e188b25f3ebd087de370c91afb66a7f4 \ + --hash=sha256:e231567cdc471337d23dfc950c45c5914ade8a78cde7ccf2ebb9433fcda29f40 # via -r requirements.in -certifi==2022.6.15 \ - --hash=sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d \ - --hash=sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412 +certifi==2022.9.14 \ + --hash=sha256:36973885b9542e6bd01dea287b2b4b3b21236307c56324fcc3f1160f2d655ed5 \ + --hash=sha256:e232343de1ab72c2aa521b625c80f699e356830fd0e2c620b465b304b17b0516 # via # -r requirements.in # requests @@ -382,160 +382,159 @@ frozenlist==1.3.1 \ # via # aiohttp # aiosignal -greenlet==1.1.2 \ - --hash=sha256:0051c6f1f27cb756ffc0ffbac7d2cd48cb0362ac1736871399a739b2885134d3 \ - --hash=sha256:00e44c8afdbe5467e4f7b5851be223be68adb4272f44696ee71fe46b7036a711 \ - --hash=sha256:013d61294b6cd8fe3242932c1c5e36e5d1db2c8afb58606c5a67efce62c1f5fd \ - --hash=sha256:049fe7579230e44daef03a259faa24511d10ebfa44f69411d99e6a184fe68073 \ - --hash=sha256:14d4f3cd4e8b524ae9b8aa567858beed70c392fdec26dbdb0a8a418392e71708 \ - --hash=sha256:166eac03e48784a6a6e0e5f041cfebb1ab400b394db188c48b3a84737f505b67 \ - --hash=sha256:17ff94e7a83aa8671a25bf5b59326ec26da379ace2ebc4411d690d80a7fbcf23 \ - --hash=sha256:1e12bdc622676ce47ae9abbf455c189e442afdde8818d9da983085df6312e7a1 \ - --hash=sha256:21915eb821a6b3d9d8eefdaf57d6c345b970ad722f856cd71739493ce003ad08 \ - --hash=sha256:288c6a76705dc54fba69fbcb59904ae4ad768b4c768839b8ca5fdadec6dd8cfd \ - --hash=sha256:2bde6792f313f4e918caabc46532aa64aa27a0db05d75b20edfc5c6f46479de2 \ - --hash=sha256:32ca72bbc673adbcfecb935bb3fb1b74e663d10a4b241aaa2f5a75fe1d1f90aa \ - --hash=sha256:356b3576ad078c89a6107caa9c50cc14e98e3a6c4874a37c3e0273e4baf33de8 \ - --hash=sha256:40b951f601af999a8bf2ce8c71e8aaa4e8c6f78ff8afae7b808aae2dc50d4c40 \ - --hash=sha256:572e1787d1460da79590bf44304abbc0a2da944ea64ec549188fa84d89bba7ab \ - --hash=sha256:58df5c2a0e293bf665a51f8a100d3e9956febfbf1d9aaf8c0677cf70218910c6 \ - --hash=sha256:64e6175c2e53195278d7388c454e0b30997573f3f4bd63697f88d855f7a6a1fc \ - --hash=sha256:7227b47e73dedaa513cdebb98469705ef0d66eb5a1250144468e9c3097d6b59b \ - --hash=sha256:7418b6bfc7fe3331541b84bb2141c9baf1ec7132a7ecd9f375912eca810e714e \ - --hash=sha256:7cbd7574ce8e138bda9df4efc6bf2ab8572c9aff640d8ecfece1b006b68da963 \ - --hash=sha256:7ff61ff178250f9bb3cd89752df0f1dd0e27316a8bd1465351652b1b4a4cdfd3 \ - --hash=sha256:833e1551925ed51e6b44c800e71e77dacd7e49181fdc9ac9a0bf3714d515785d \ - --hash=sha256:8639cadfda96737427330a094476d4c7a56ac03de7265622fcf4cfe57c8ae18d \ - --hash=sha256:8c5d5b35f789a030ebb95bff352f1d27a93d81069f2adb3182d99882e095cefe \ - --hash=sha256:8c790abda465726cfb8bb08bd4ca9a5d0a7bd77c7ac1ca1b839ad823b948ea28 \ - --hash=sha256:8d2f1fb53a421b410751887eb4ff21386d119ef9cde3797bf5e7ed49fb51a3b3 \ - --hash=sha256:903bbd302a2378f984aef528f76d4c9b1748f318fe1294961c072bdc7f2ffa3e \ - --hash=sha256:93f81b134a165cc17123626ab8da2e30c0455441d4ab5576eed73a64c025b25c \ - --hash=sha256:95e69877983ea39b7303570fa6760f81a3eec23d0e3ab2021b7144b94d06202d \ - --hash=sha256:9633b3034d3d901f0a46b7939f8c4d64427dfba6bbc5a36b1a67364cf148a1b0 \ - --hash=sha256:97e5306482182170ade15c4b0d8386ded995a07d7cc2ca8f27958d34d6736497 \ - --hash=sha256:9f3cba480d3deb69f6ee2c1825060177a22c7826431458c697df88e6aeb3caee \ - --hash=sha256:aa5b467f15e78b82257319aebc78dd2915e4c1436c3c0d1ad6f53e47ba6e2713 \ - --hash=sha256:abb7a75ed8b968f3061327c433a0fbd17b729947b400747c334a9c29a9af6c58 \ - --hash=sha256:aec52725173bd3a7b56fe91bc56eccb26fbdff1386ef123abb63c84c5b43b63a \ - --hash=sha256:b11548073a2213d950c3f671aa88e6f83cda6e2fb97a8b6317b1b5b33d850e06 \ - --hash=sha256:b1692f7d6bc45e3200844be0dba153612103db241691088626a33ff1f24a0d88 \ - --hash=sha256:b336501a05e13b616ef81ce329c0e09ac5ed8c732d9ba7e3e983fcc1a9e86965 \ - --hash=sha256:b8c008de9d0daba7b6666aa5bbfdc23dcd78cafc33997c9b7741ff6353bafb7f \ - --hash=sha256:b92e29e58bef6d9cfd340c72b04d74c4b4e9f70c9fa7c78b674d1fec18896dc4 \ - --hash=sha256:be5f425ff1f5f4b3c1e33ad64ab994eed12fc284a6ea71c5243fd564502ecbe5 \ - --hash=sha256:dd0b1e9e891f69e7675ba5c92e28b90eaa045f6ab134ffe70b52e948aa175b3c \ - --hash=sha256:e30f5ea4ae2346e62cedde8794a56858a67b878dd79f7df76a0767e356b1744a \ - --hash=sha256:e6a36bb9474218c7a5b27ae476035497a6990e21d04c279884eb10d9b290f1b1 \ - --hash=sha256:e859fcb4cbe93504ea18008d1df98dee4f7766db66c435e4882ab35cf70cac43 \ - --hash=sha256:eb6ea6da4c787111adf40f697b4e58732ee0942b5d3bd8f435277643329ba627 \ - --hash=sha256:ec8c433b3ab0419100bd45b47c9c8551248a5aee30ca5e9d399a0b57ac04651b \ - --hash=sha256:eff9d20417ff9dcb0d25e2defc2574d10b491bf2e693b4e491914738b7908168 \ - --hash=sha256:f0214eb2a23b85528310dad848ad2ac58e735612929c8072f6093f3585fd342d \ - --hash=sha256:f276df9830dba7a333544bd41070e8175762a7ac20350786b322b714b0e654f5 \ - --hash=sha256:f3acda1924472472ddd60c29e5b9db0cec629fbe3c5c5accb74d6d6d14773478 \ - --hash=sha256:f70a9e237bb792c7cc7e44c531fd48f5897961701cdaa06cf22fc14965c496cf \ - --hash=sha256:f9d29ca8a77117315101425ec7ec2a47a22ccf59f5593378fc4077ac5b754fce \ - --hash=sha256:fa877ca7f6b48054f847b61d6fa7bed5cebb663ebc55e018fda12db09dcc664c \ - --hash=sha256:fdcec0b8399108577ec290f55551d926d9a1fa6cad45882093a7a07ac5ec147b +greenlet==1.1.3 \ + --hash=sha256:0118817c9341ef2b0f75f5af79ac377e4da6ff637e5ee4ac91802c0e379dadb4 \ + --hash=sha256:048d2bed76c2aa6de7af500ae0ea51dd2267aec0e0f2a436981159053d0bc7cc \ + --hash=sha256:07c58e169bbe1e87b8bbf15a5c1b779a7616df9fd3e61cadc9d691740015b4f8 \ + --hash=sha256:095a980288fe05adf3d002fbb180c99bdcf0f930e220aa66fcd56e7914a38202 \ + --hash=sha256:0b181e9aa6cb2f5ec0cacc8cee6e5a3093416c841ba32c185c30c160487f0380 \ + --hash=sha256:1626185d938d7381631e48e6f7713e8d4b964be246073e1a1d15c2f061ac9f08 \ + --hash=sha256:184416e481295832350a4bf731ba619a92f5689bf5d0fa4341e98b98b1265bd7 \ + --hash=sha256:1dd51d2650e70c6c4af37f454737bf4a11e568945b27f74b471e8e2a9fd21268 \ + --hash=sha256:1ec2779774d8e42ed0440cf8bc55540175187e8e934f2be25199bf4ed948cd9e \ + --hash=sha256:2cf45e339cabea16c07586306a31cfcc5a3b5e1626d365714d283732afed6809 \ + --hash=sha256:2fb0aa7f6996879551fd67461d5d3ab0c3c0245da98be90c89fcb7a18d437403 \ + --hash=sha256:44b4817c34c9272c65550b788913620f1fdc80362b209bc9d7dd2f40d8793080 \ + --hash=sha256:466ce0928e33421ee84ae04c4ac6f253a3a3e6b8d600a79bd43fd4403e0a7a76 \ + --hash=sha256:4f166b4aca8d7d489e82d74627a7069ab34211ef5ebb57c300ec4b9337b60fc0 \ + --hash=sha256:510c3b15587afce9800198b4b142202b323bf4b4b5f9d6c79cb9a35e5e3c30d2 \ + --hash=sha256:5b756e6730ea59b2745072e28ad27f4c837084688e6a6b3633c8b1e509e6ae0e \ + --hash=sha256:5fbe1ab72b998ca77ceabbae63a9b2e2dc2d963f4299b9b278252ddba142d3f1 \ + --hash=sha256:6200a11f003ec26815f7e3d2ded01b43a3810be3528dd760d2f1fa777490c3cd \ + --hash=sha256:65ad1a7a463a2a6f863661329a944a5802c7129f7ad33583dcc11069c17e622c \ + --hash=sha256:694ffa7144fa5cc526c8f4512665003a39fa09ef00d19bbca5c8d3406db72fbe \ + --hash=sha256:6f5d4b2280ceea76c55c893827961ed0a6eadd5a584a7c4e6e6dd7bc10dfdd96 \ + --hash=sha256:7532a46505470be30cbf1dbadb20379fb481244f1ca54207d7df3bf0bbab6a20 \ + --hash=sha256:76a53bfa10b367ee734b95988bd82a9a5f0038a25030f9f23bbbc005010ca600 \ + --hash=sha256:77e41db75f9958f2083e03e9dd39da12247b3430c92267df3af77c83d8ff9eed \ + --hash=sha256:7a43bbfa9b6cfdfaeefbd91038dde65ea2c421dc387ed171613df340650874f2 \ + --hash=sha256:7b41d19c0cfe5c259fe6c539fd75051cd39a5d33d05482f885faf43f7f5e7d26 \ + --hash=sha256:7c5227963409551ae4a6938beb70d56bf1918c554a287d3da6853526212fbe0a \ + --hash=sha256:870a48007872d12e95a996fca3c03a64290d3ea2e61076aa35d3b253cf34cd32 \ + --hash=sha256:88b04e12c9b041a1e0bcb886fec709c488192638a9a7a3677513ac6ba81d8e79 \ + --hash=sha256:8c287ae7ac921dfde88b1c125bd9590b7ec3c900c2d3db5197f1286e144e712b \ + --hash=sha256:903fa5716b8fbb21019268b44f73f3748c41d1a30d71b4a49c84b642c2fed5fa \ + --hash=sha256:9537e4baf0db67f382eb29255a03154fcd4984638303ff9baaa738b10371fa57 \ + --hash=sha256:9951dcbd37850da32b2cb6e391f621c1ee456191c6ae5528af4a34afe357c30e \ + --hash=sha256:9b2f7d0408ddeb8ea1fd43d3db79a8cefaccadd2a812f021333b338ed6b10aba \ + --hash=sha256:9c88e134d51d5e82315a7c32b914a58751b7353eb5268dbd02eabf020b4c4700 \ + --hash=sha256:9fae214f6c43cd47f7bef98c56919b9222481e833be2915f6857a1e9e8a15318 \ + --hash=sha256:a3a669f11289a8995d24fbfc0e63f8289dd03c9aaa0cc8f1eab31d18ca61a382 \ + --hash=sha256:aa741c1a8a8cc25eb3a3a01a62bdb5095a773d8c6a86470bde7f607a447e7905 \ + --hash=sha256:b0877a9a2129a2c56a2eae2da016743db7d9d6a05d5e1c198f1b7808c602a30e \ + --hash=sha256:bcb6c6dd1d6be6d38d6db283747d07fda089ff8c559a835236560a4410340455 \ + --hash=sha256:caff52cb5cd7626872d9696aee5b794abe172804beb7db52eed1fd5824b63910 \ + --hash=sha256:cbc1eb55342cbac8f7ec159088d54e2cfdd5ddf61c87b8bbe682d113789331b2 \ + --hash=sha256:cd16a89efe3a003029c87ff19e9fba635864e064da646bc749fc1908a4af18f3 \ + --hash=sha256:ce5b64dfe8d0cca407d88b0ee619d80d4215a2612c1af8c98a92180e7109f4b5 \ + --hash=sha256:d58a5a71c4c37354f9e0c24c9c8321f0185f6945ef027460b809f4bb474bfe41 \ + --hash=sha256:db41f3845eb579b544c962864cce2c2a0257fe30f0f1e18e51b1e8cbb4e0ac6d \ + --hash=sha256:db5b25265010a1b3dca6a174a443a0ed4c4ab12d5e2883a11c97d6e6d59b12f9 \ + --hash=sha256:dd0404d154084a371e6d2bafc787201612a1359c2dee688ae334f9118aa0bf47 \ + --hash=sha256:de431765bd5fe62119e0bc6bc6e7b17ac53017ae1782acf88fcf6b7eae475a49 \ + --hash=sha256:df02fdec0c533301497acb0bc0f27f479a3a63dcdc3a099ae33a902857f07477 \ + --hash=sha256:e8533f5111704d75de3139bf0b8136d3a6c1642c55c067866fa0a51c2155ee33 \ + --hash=sha256:f2f908239b7098799b8845e5936c2ccb91d8c2323be02e82f8dcb4a80dcf4a25 \ + --hash=sha256:f8bfd36f368efe0ab2a6aa3db7f14598aac454b06849fb633b762ddbede1db90 \ + --hash=sha256:ffe73f9e7aea404722058405ff24041e59d31ca23d1da0895af48050a07b6932 # via eventlet -grpcio==1.47.0 \ - --hash=sha256:0425b5577be202d0a4024536bbccb1b052c47e0766096e6c3a5789ddfd5f400d \ - --hash=sha256:06c0739dff9e723bca28ec22301f3711d85c2e652d1c8ae938aa0f7ad632ef9a \ - --hash=sha256:08307dc5a6ac4da03146d6c00f62319e0665b01c6ffe805cfcaa955c17253f9c \ - --hash=sha256:090dfa19f41efcbe760ae59b34da4304d4be9a59960c9682b7eab7e0b6748a79 \ - --hash=sha256:0a24b50810aae90c74bbd901c3f175b9645802d2fbf03eadaf418ddee4c26668 \ - --hash=sha256:0cd44d78f302ff67f11a8c49b786c7ccbed2cfef6f4fd7bb0c3dc9255415f8f7 \ - --hash=sha256:0d8a7f3eb6f290189f48223a5f4464c99619a9de34200ce80d5092fb268323d2 \ - --hash=sha256:14d2bc74218986e5edf5527e870b0969d63601911994ebf0dce96288548cf0ef \ - --hash=sha256:1bb9afa85e797a646bfcd785309e869e80a375c959b11a17c9680abebacc0cb0 \ - --hash=sha256:1ec63bbd09586e5cda1bdc832ae6975d2526d04433a764a1cc866caa399e50d4 \ - --hash=sha256:2061dbe41e43b0a5e1fd423e8a7fb3a0cf11d69ce22d0fac21f1a8c704640b12 \ - --hash=sha256:324e363bad4d89a8ec7124013371f268d43afd0ac0fdeec1b21c1a101eb7dafb \ - --hash=sha256:35dfd981b03a3ec842671d1694fe437ee9f7b9e6a02792157a2793b0eba4f478 \ - --hash=sha256:43857d06b2473b640467467f8f553319b5e819e54be14c86324dad83a0547818 \ - --hash=sha256:4706c78b0c183dca815bbb4ef3e8dd2136ccc8d1699f62c585e75e211ad388f6 \ - --hash=sha256:4d9ad7122f60157454f74a850d1337ba135146cef6fb7956d78c7194d52db0fe \ - --hash=sha256:544da3458d1d249bb8aed5504adf3e194a931e212017934bf7bfa774dad37fb3 \ - --hash=sha256:55782a31ec539f15b34ee56f19131fe1430f38a4be022eb30c85e0b0dcf57f11 \ - --hash=sha256:55cd8b13c5ef22003889f599b8f2930836c6f71cd7cf3fc0196633813dc4f928 \ - --hash=sha256:5dbba95fab9b35957b4977b8904fc1fa56b302f9051eff4d7716ebb0c087f801 \ - --hash=sha256:5f57b9b61c22537623a5577bf5f2f970dc4e50fac5391090114c6eb3ab5a129f \ - --hash=sha256:64e097dd08bb408afeeaee9a56f75311c9ca5b27b8b0278279dc8eef85fa1051 \ - --hash=sha256:664a270d3eac68183ad049665b0f4d0262ec387d5c08c0108dbcfe5b351a8b4d \ - --hash=sha256:668350ea02af018ca945bd629754d47126b366d981ab88e0369b53bc781ffb14 \ - --hash=sha256:67cd275a651532d28620eef677b97164a5438c5afcfd44b15e8992afa9eb598c \ - --hash=sha256:68b5e47fcca8481f36ef444842801928e60e30a5b3852c9f4a95f2582d10dcb2 \ - --hash=sha256:7191ffc8bcf8a630c547287ab103e1fdf72b2e0c119e634d8a36055c1d988ad0 \ - --hash=sha256:815089435d0f113719eabf105832e4c4fa1726b39ae3fb2ca7861752b0f70570 \ - --hash=sha256:8dbef03853a0dbe457417c5469cb0f9d5bf47401b49d50c7dad3c495663b699b \ - --hash=sha256:91cd292373e85a52c897fa5b4768c895e20a7dc3423449c64f0f96388dd1812e \ - --hash=sha256:9298d6f2a81f132f72a7e79cbc90a511fffacc75045c2b10050bb87b86c8353d \ - --hash=sha256:96cff5a2081db82fb710db6a19dd8f904bdebb927727aaf4d9c427984b79a4c1 \ - --hash=sha256:9e63e0619a5627edb7a5eb3e9568b9f97e604856ba228cc1d8a9f83ce3d0466e \ - --hash=sha256:a278d02272214ec33f046864a24b5f5aab7f60f855de38c525e5b4ef61ec5b48 \ - --hash=sha256:a6b2432ac2353c80a56d9015dfc5c4af60245c719628d4193ecd75ddf9cd248c \ - --hash=sha256:b821403907e865e8377af3eee62f0cb233ea2369ba0fcdce9505ca5bfaf4eeb3 \ - --hash=sha256:b88bec3f94a16411a1e0336eb69f335f58229e45d4082b12d8e554cedea97586 \ - --hash=sha256:bfdb8af4801d1c31a18d54b37f4e49bb268d1f485ecf47f70e78d56e04ff37a7 \ - --hash=sha256:c79996ae64dc4d8730782dff0d1daacc8ce7d4c2ba9cef83b6f469f73c0655ce \ - --hash=sha256:cc34d182c4fd64b6ff8304a606b95e814e4f8ed4b245b6d6cc9607690e3ef201 \ - --hash=sha256:d0d481ff55ea6cc49dab2c8276597bd4f1a84a8745fedb4bc23e12e9fb9d0e45 \ - --hash=sha256:e9723784cf264697024778dcf4b7542c851fe14b14681d6268fb984a53f76df1 \ - --hash=sha256:f4508e8abd67ebcccd0fbde6e2b1917ba5d153f3f20c1de385abd8722545e05f \ - --hash=sha256:f515782b168a4ec6ea241add845ccfebe187fc7b09adf892b3ad9e2592c60af1 \ - --hash=sha256:f89de64d9eb3478b188859214752db50c91a749479011abd99e248550371375f \ - --hash=sha256:fcd5d932842df503eb0bf60f9cc35e6fe732b51f499e78b45234e0be41b0018d +grpcio==1.48.1 \ + --hash=sha256:1471e6f25a8e47d9f88499f48c565fc5b2876e8ee91bfb0ff33eaadd188b7ea6 \ + --hash=sha256:19f9c021ae858d3ef6d5ec4c0acf3f0b0a61e599e5aa36c36943c209520a0e66 \ + --hash=sha256:1c924d4e0493fd536ba3b82584b370e8b3c809ef341f9f828cff2dc3c761b3ab \ + --hash=sha256:1d065f40fe74b52b88a6c42d4373a0983f1b0090f952a0747f34f2c11d6cbc64 \ + --hash=sha256:1ff1be0474846ed15682843b187e6062f845ddfeaceb2b28972073f474f7b735 \ + --hash=sha256:2563357697f5f2d7fd80c1b07a57ef4736551327ad84de604e7b9f6c1b6b4e20 \ + --hash=sha256:2b6c336409937fd1cd2bf78eb72651f44d292d88da5e63059a4e8bd01b9d7411 \ + --hash=sha256:3340cb2224cc397954def015729391d85fb31135b5a7efca363e73e6f1b0e908 \ + --hash=sha256:346bef672a1536d59437210f16af35389d715d2b321bfe4899b3d6476a196706 \ + --hash=sha256:3d319a0c89ffac9b8dfc75bfe727a4c835d18bbccc14203b20eb5949c6c7d87d \ + --hash=sha256:460f5bec23fffa3c041aeba1f93a0f06b7a29e6a4da3658a52e1a866494920ab \ + --hash=sha256:4786323555a9f2c6380cd9a9922bcfd42165a51d68d242eebfcdfdc667651c96 \ + --hash=sha256:53b6306f9473020bc47ddf64ca704356466e63d5f88f5c2a7bf0a4692e7f03c4 \ + --hash=sha256:53fa2fc1a1713195fa7acf7443a6f59b6ac7837607690f813c66cc18a9cb8135 \ + --hash=sha256:598c8c42420443c55431eba1821c7a2f72707f1ff674a4de9e0bb03282923cfb \ + --hash=sha256:5a6a750c8324f3974e95265d3f9a0541573c537af1f67b3f6f46bf9c0b2e1b36 \ + --hash=sha256:5d81cd3c161291339ed3b469250c2f5013c3083dea7796e93aedff8f05fdcec1 \ + --hash=sha256:626822d799d8fab08f07c8d95ef5c36213d24143f7cad3f548e97413db9f4110 \ + --hash=sha256:660217eccd2943bf23ea9a36e2a292024305aec04bf747fbcff1f5032b83610e \ + --hash=sha256:741eeff39a26d26da2b6d74ff0559f882ee95ee4e3b20c0b4b829021cb917f96 \ + --hash=sha256:7cee20a4f873d61274d70c28ff63d19677d9eeea869c6a9cbaf3a00712336b6c \ + --hash=sha256:8bbaa6647986b874891bc682a1093df54cbdb073b5d4b844a2b480c47c7ffafd \ + --hash=sha256:934aad7350d9577f4275e787f3d91d3c8ff4efffa8d6b807d343d3c891ff53eb \ + --hash=sha256:9477967e605ba08715dcc769b5ee0f0d8b22bda40ef25a0df5a8759e5a4d21a5 \ + --hash=sha256:97dc35a99c61d5f35ec6457d3df0a4695ba9bb04a35686e1c254462b15c53f98 \ + --hash=sha256:9d116106cf220c79e91595523c893f1cf09ec0c2ea49de4fb82152528b7e6833 \ + --hash=sha256:9fba1d0ba7cf56811728f1951c800a9aca6677e86433c5e353f2cc2c4039fda6 \ + --hash=sha256:a15409bc1d05c52ecb00f5e42ab8ff280e7149f2eb854728f628fb2a0a161a5b \ + --hash=sha256:a1b81849061c67c2ffaa6ed27aa3d9b0762e71e68e784e24b0330b7b1c67470a \ + --hash=sha256:a5edbcb8289681fcb5ded7542f2b7dd456489e83007a95e32fcaf55e9f18603e \ + --hash=sha256:a661d4b9b314327dec1e92ed57e591e8e5eb055700e0ba9e9687f734d922dcb6 \ + --hash=sha256:b005502c59835f9ba3c3f8742f64c19eeb3db41eae1a89b035a559b39b421803 \ + --hash=sha256:b01faf7934c606d5050cf055c1d03943180f23d995d68d04cf50c80d1ef2c65a \ + --hash=sha256:b0fa666fecdb1b118d37823937e9237afa17fe734fc4dbe6dd642e1e4cca0246 \ + --hash=sha256:c54734a6eb3be544d332e65c846236d02e5fc71325e8c53af91e83a46b87b506 \ + --hash=sha256:c6b6969c529521c86884a13745a4b68930db1ef2e051735c0f479d0a7adb25b6 \ + --hash=sha256:ca382028cdfd2d79b7704b2acb8ae1fb54e9e1a03a6765e1895ba89a6fcfaba1 \ + --hash=sha256:ca5209ef89f7607be47a308fa92308cf079805ed556ecda672f00039a26e366f \ + --hash=sha256:d03009a26f7edca9f0a581aa5d3153242b815b858cb4790e34a955afb303c6ba \ + --hash=sha256:d751f8beb383c4a5a95625d7ccc1ab183b98b02c6a88924814ea7fbff530872d \ + --hash=sha256:dad2501603f954f222a6e555413c454a5f8d763ab910fbab3855bcdfef6b3148 \ + --hash=sha256:dbba883c2b6d63949bc98ab1950bc22cf7c8d4e8cb68de6edde49d3cccd8fd26 \ + --hash=sha256:e02f6ba10a3d4e289fa7ae91b301783a750d118b60f17924ca05e506c7d29bc8 \ + --hash=sha256:f0ef1dafb4eadeaca58aec8c721a5a73d551064b0c63d57fa003e233277c642e \ + --hash=sha256:f29627d66ae816837fd32c9450dc9c54780962cd74d034513ed829ba3ab46652 \ + --hash=sha256:f3a99ed422c38bd1bc893cb2cb2cea6d64173ec30927f699e95f5f58bdf625cf # via # -r requirements.in # grpcio-tools -grpcio-tools==1.47.0 \ - --hash=sha256:058060fbc5a60a1c6cc2cbb3d99f730825ba249917978d48b7d0fd8f2caf01da \ - --hash=sha256:05b495ed997a9afc9016c696ed7fcd35678a7276fe0bd8b95743a382363ad2b4 \ - --hash=sha256:0b32002ff4ae860c85feb2aca1b752eb4518e7781c5770b869e7b2dfa9d92cbe \ - --hash=sha256:0eced69e159b3fdd7597d85950f56990e0aa81c11a20a7785fb66f0e47c46b57 \ - --hash=sha256:156b5f6654fea51983fd9257d47f1ad7bfb2a1d09ed471e610a7b34b97d40802 \ - --hash=sha256:18548f35b0657422d5d40e6fa89994469f4bb77df09f8133ecdccec0e31fc72c \ - --hash=sha256:1a0a91941f6f2a4d97e843a5d9ad7ccccf702af2d9455932f18cf922e65af95e \ - --hash=sha256:2364ac3bd7266752c9971dbef3f79d21cd958777823512faa93473cbd973b8f1 \ - --hash=sha256:2a6a6e5e08866d643b84c89140bbe504f864f11b87bfff7a5f2af94c5a2be18d \ - --hash=sha256:2c5c50886e6e79af5387c6514eb19f1f6b1a0b4eb787f1b7a8f21a74e2444102 \ - --hash=sha256:3edb04d102e0d6f0149d93fe8cf69a38c20a2259a913701a4c35c119049c8404 \ - --hash=sha256:3fccc282ee97211a33652419dcdfd24a9a60bbd2d56f5c5dd50c7186a0f4d978 \ - --hash=sha256:441a0a378117447c089b944f325f11039329d8aa961ecdb8226c5dd84af6f003 \ - --hash=sha256:45ceb73a97e2d7ff719fc12c02f1ef13014c47bad60a864313da88ccd90cdf36 \ - --hash=sha256:498c0bae4975683a5a33b72cf1bd64703b34c826871fd3ee8d295407cd5211ec \ - --hash=sha256:4eced9e0674bfb5c528a3bf2ea2b8596da133148b3e0718915792074204ea226 \ - --hash=sha256:51352070f13ea3346b5f5ca825f2203528b8218fffc6ac6d951216f812272d8b \ - --hash=sha256:53c47b08ee2f59a89e8df5f3c09850d7fac264754cbaeabae65f6fbf78d80536 \ - --hash=sha256:5c8ab9b541a869d3b4ef34c291fbfb6ec78ad728e04737fddd91eac3c2193459 \ - --hash=sha256:6804cbd92b9069ae9189d65300e456bcc3945f6ae196d2af254e9635b9c3ef0d \ - --hash=sha256:6c66094fd79ee98bcb504e9f1a3fa6e7ebfd246b4e3d8132227e5020b5633988 \ - --hash=sha256:6d41ec06f2ccc8adcd400a63508ea8e008fb03f270e0031ff2de047def2ada9d \ - --hash=sha256:74f607b9084b5325a997d9ae57c0814955e19311111568d029b2a6a66f4869ec \ - --hash=sha256:7589d6f56e633378047274223f0a75534b2cd7c598f9f2894cb4854378b8b00b \ - --hash=sha256:759064fc8439bbfe5402b2fd3b0685f4ffe07d7cc6a64908c2f88a7c80449ce4 \ - --hash=sha256:7be45d69f0eed912df2e92d94958d1a3e72617469ec58ffcac3e2eb153a7057e \ - --hash=sha256:7fd10683f4f03400536e7a026de9929430ee198c2cbdf2c584edfa909ccc8993 \ - --hash=sha256:818fca1c7dd4ad1c9c01f91ba37006964f4c57c93856fa4ebd7d5589132844d6 \ - --hash=sha256:84e38f46af513a6f62a3d482160fcb94063dbc9fdd1452d09f8010422f144de1 \ - --hash=sha256:93d08c02bd82e423353399582f22493a191db459c3f34031b583f13bcf42b95e \ - --hash=sha256:94114e01c4508d904825bd984e3d2752c0b0e6eb714ac08b99f73421691cf931 \ - --hash=sha256:9ab78cd16b4ac7c6b79c8be194c67e03238f6378694133ce3ce9b123caf24ed5 \ - --hash=sha256:9dd6e26e3e0555deadcb52b087c6064e4fd02c09180b42e96c66260137d26b50 \ - --hash=sha256:a93263955da8d6e449d7ceb84af4e84b82fa760fd661b4ef4549929d9670ab8e \ - --hash=sha256:ac5c6aef72618ebc5ee9ad725dd53e1c145ef420b79d21a7c43ca80658d3d8d4 \ - --hash=sha256:ae53ae35a9761ceea50a502addb7186c5188969d63ad21cf12e00d939db5b967 \ - --hash=sha256:b2fa3c545c8aa1e8c33ca04b1424be3ff77da631faf37db3350d7459c3bdedde \ - --hash=sha256:c2c280197d68d5a28f5b90adf755bd9e28c99f3e47ad4edcfe20497cf3456e1d \ - --hash=sha256:ca548afcfa0ffc47c3cf9eeede81adde15c321bfe897085e90ce8913615584ae \ - --hash=sha256:ccc8ce33bd31bf12649541b5857fabfee7dd84b04138336a27bf46a28d150c11 \ - --hash=sha256:dc6567d652c6b70d8c03f4e450a694e62b4d69a400752f8b9c3c8b659dd6b06a \ - --hash=sha256:dd5d330230038374e64fc652fc4c1b25d457a8b67b9069bfce83a17ab675650b \ - --hash=sha256:e1de1f139f05ab6bbdabc58b06f6ebb5940a92214bbc7246270299387d0af2ae \ - --hash=sha256:f19191460435f8bc72450cf26ac0559726f98c49ad9b0969db3db8ba51be98c8 \ - --hash=sha256:f64b5378484be1d6ce59311f86174be29c8ff98d8d90f589e1c56d5acae67d3c \ - --hash=sha256:fb44ae747fd299b6513420cb6ead50491dc3691d17da48f28fcc5ebf07f47741 +grpcio-tools==1.48.1 \ + --hash=sha256:00b6592b04732648238dcabffd86202211e1df90e26bb4dbe2fa4882e8bf662c \ + --hash=sha256:04c263723dcc1ef4c3ae0a0d0c0a7d572bca49153e02fc026ed485fc2373128f \ + --hash=sha256:09c295fae4fc646376b02f81a763d32900bdd16b85a8555b912d2f7472a83ad3 \ + --hash=sha256:1178f2ea531f80cc2027ec64728df6ffc8e98cf1df61652a496eafd612127183 \ + --hash=sha256:1464248c6089d3c19ba82d29b515b11a43737ef73b08f73895c16959a4d537df \ + --hash=sha256:1cb5802dc1eb33e1c5ccc86d48af6c95cfb10975851f2e4b3e5547d2e7502829 \ + --hash=sha256:247035b1d59135c13b74b203a23069564ca8202d27b21578ced6426c06e08e98 \ + --hash=sha256:24bf144bafd8cdcdedcd738a3234346c4d8a3e82aee74429842fffb56920bcbf \ + --hash=sha256:289bb31d4ab25c49125f49bd7b048e8ce02b147ad296e8fce087814d034faf21 \ + --hash=sha256:2bf1d3b44132bf5ec8397fdee70102edff7cf311483d58926cae3a564310d121 \ + --hash=sha256:306bd078d20739026e481ef9f353e5b3025a761b443e96a47e5c5db7e3cdcd8a \ + --hash=sha256:31877d879023bd81344d31eba6e301f1239c2ccfc5fd28d62fac2cb117194a81 \ + --hash=sha256:42d0b2f488158cd12ed5848fc8e4b536214ff5a4f5d6ecc5ebe5edbbf064ae50 \ + --hash=sha256:491e87352e0be4c0e72dbecb0a52a53aa75f6a4d1dc9e53067c1cbc8833c4f5a \ + --hash=sha256:4dfcfb3f89b0e8c98e1a65f9c0e04f5bfe69ea7eaaa22b4b99c2e3895512afc8 \ + --hash=sha256:5037a919a2689ed6f8c2a3b9cf6f1431caf0a93c730bde38f0960143f0072540 \ + --hash=sha256:5090ef3fa0288977b25fa1c5c0d15ca5d751ff206f906966c64eb3200ed6fa13 \ + --hash=sha256:55b7de7f48c6f500c1252e6b96279c93d5bc24bcef618fd6587563e8c854527f \ + --hash=sha256:590a1bc3e5997e78b57085178d8de202de977d1f01984ac907a3ffca225bb729 \ + --hash=sha256:610da6278696e8f6d7831ecb9e513099f1f52dae3f40876a6049f634d4933919 \ + --hash=sha256:647ab95b7cecb5de392da3d70f0d49731cb90f1bd590d37a83ed6cfe06cb2a31 \ + --hash=sha256:6c2a2796e4b6d7ae5378d8fd848a64b9541524ad5f6eea4583b879fa53d97883 \ + --hash=sha256:739e3c49091175c117a26be4099145cdf85638a991d9bad8a5f9f6614ea31d61 \ + --hash=sha256:75fbf37461c337184624bec30db3a59c47514812b2f8f43a99818e44fb63243a \ + --hash=sha256:7d4ada8dc77f5db280fa476cd28263f7d7de96c4fce10f5559cde620a3363a95 \ + --hash=sha256:7dc607ee397eed06b126cadf6a86bde46bcc7da9b9a1d1ad5338b9afa6085d07 \ + --hash=sha256:7e3230dd4f1bd79251ef086222041954ca5c27650a3d7f6589f972292461c8bf \ + --hash=sha256:832935ea7ebb20f99bd8184fd70e0d111202043165207423f926dad22b255583 \ + --hash=sha256:87676b55aaf171c4922682e307e4fb8d6007c5e79bcdf1653cdf0868da17ae2a \ + --hash=sha256:8d7fc42b0443e8d16d740e463ea0eb77b68dec6423783738bf1e480e6475e32c \ + --hash=sha256:8ec634738c4d6220d96b293caff3115a0573c52d97581654c0a47b09d70ea958 \ + --hash=sha256:920d6381ea5bc8ebf3303e57bcc7ed3a0735508f881e70a5e08a88d1a98ab34d \ + --hash=sha256:92a7182bd96b8105a5372e8e656ab8bde8f607c5092278aba2ec2f83b8c9ab28 \ + --hash=sha256:951c3701157627f90ec6831ba502babb10aa0dd465a2cb00fe450a061ef10251 \ + --hash=sha256:a0e2c5ba0a4a181eb379415affba9dc122f3f2eecdd459cc98fe311fdad9cf96 \ + --hash=sha256:a53b033e38e60671c0c22a296ae9fd2b025a196e153c6d6b9a3aa20b11bedfcf \ + --hash=sha256:b53f48cb31a6a59ea01a371ab58bd8fa83fae9c1cfafab8acb954a70f757c6b1 \ + --hash=sha256:b5da8227eeefc957c41d7c59919d35e8a1d8275941e9839566b29eb368940bee \ + --hash=sha256:c4ec8447af615563a5da884b9ed93b41716b80bb01ac9183f09046ac48dbacb5 \ + --hash=sha256:d454f4ef35b070b5bf39b65fec30b26b1f22da5b9efa4a716f7518308713ceab \ + --hash=sha256:d9b854dec3e81fa5c2d601facb3b801a60bd5cbe295ef7e788a3e5824799183e \ + --hash=sha256:e47c663303ebc7b409f4da9073e6bef05e97b74ee47caafc08e0a2e223b95100 \ + --hash=sha256:eb04964c52721cb841a863a6a523bd06328a42d718371bc55d257f93390b6799 \ + --hash=sha256:ebe91db084c22143bc50967670f201bb5bf776b46227b62a97b0a75b0046aac5 \ + --hash=sha256:f432ae6593373cf46a0144aa6392875c5b3fd9d48bf27b5ced503c466ef496ea \ + --hash=sha256:fe071e1e13088752b2e96c6caadb719d39c80d768e441e3fc73b08f8b87560ae # via -r requirements.in h2==3.2.0 \ --hash=sha256:61e0f6601fa709f35cdb730863b4e5ec7ad449792add80d1410d4174ed139af5 \ @@ -592,9 +591,9 @@ hyperframe==5.2.0 \ --hash=sha256:5187962cb16dcc078f23cb5a4b110098d546c3f41ff2d4038a9896893bbd0b40 \ --hash=sha256:a9f5c17f2cc3c719b917c4f33ed1c61bd1f8dfac4b1bd23b7c80b3400971b41f # via h2 -idna==3.3 \ - --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \ - --hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d +idna==3.4 \ + --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ + --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 # via # jsonschema # requests @@ -976,7 +975,7 @@ oslo-config==8.8.0 \ --hash=sha256:96933d3011dae15608a11616bfb00d947e22da3cb09b6ff37ddd7576abd4764c \ --hash=sha256:b1e2a398450ea35a8e5630d8b23057b8939838c4433cd25a20cc3a36d5df9e3b # via -r requirements.in -oslo-i18n==5.1.0 \ +oslo.i18n==5.1.0 \ --hash=sha256:6bf111a6357d5449640852de4640eae4159b5562bbba4c90febb0034abc095d0 \ --hash=sha256:75086cfd898819638ca741159f677e2073a78ca86a9c9be8d38b46800cdf2dc9 # via oslo-config @@ -998,7 +997,7 @@ pbr==5.10.0 \ --hash=sha256:cfcc4ff8e698256fc17ea3ff796478b050852585aa5bae79ecd05b2ab7b39b9a \ --hash=sha256:da3e18aac0a3c003e9eea1a81bd23e5a3a75d745670dcf736317b7d966887fdf # via - # oslo-i18n + # oslo.i18n # stevedore pluggy==1.0.0 \ --hash=sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159 \ @@ -1008,70 +1007,69 @@ priority==1.3.0 \ --hash=sha256:6bc1961a6d7fcacbfc337769f1a382c8e746566aaa365e78047abe9f66b2ffbe \ --hash=sha256:be4fcb94b5e37cdeb40af5533afe6dd603bd665fe9c8b3052610fc1001d5d1eb # via -r requirements.in -prometheus-client==0.3.1 \ +prometheus_client==0.3.1 \ --hash=sha256:17bc24c09431644f7c65d7bce9f4237252308070b6395d6d8e87767afe867e24 # via -r requirements.in -protobuf==3.20.1 \ - --hash=sha256:06059eb6953ff01e56a25cd02cca1a9649a75a7e65397b5b9b4e929ed71d10cf \ - --hash=sha256:097c5d8a9808302fb0da7e20edf0b8d4703274d140fd25c5edabddcde43e081f \ - --hash=sha256:284f86a6207c897542d7e956eb243a36bb8f9564c1742b253462386e96c6b78f \ - --hash=sha256:32ca378605b41fd180dfe4e14d3226386d8d1b002ab31c969c366549e66a2bb7 \ - --hash=sha256:3cc797c9d15d7689ed507b165cd05913acb992d78b379f6014e013f9ecb20996 \ - --hash=sha256:62f1b5c4cd6c5402b4e2d63804ba49a327e0c386c99b1675c8a0fefda23b2067 \ - --hash=sha256:69ccfdf3657ba59569c64295b7d51325f91af586f8d5793b734260dfe2e94e2c \ - --hash=sha256:6f50601512a3d23625d8a85b1638d914a0970f17920ff39cec63aaef80a93fb7 \ - --hash=sha256:7403941f6d0992d40161aa8bb23e12575637008a5a02283a930addc0508982f9 \ - --hash=sha256:755f3aee41354ae395e104d62119cb223339a8f3276a0cd009ffabfcdd46bb0c \ - --hash=sha256:77053d28427a29987ca9caf7b72ccafee011257561259faba8dd308fda9a8739 \ - --hash=sha256:7e371f10abe57cee5021797126c93479f59fccc9693dafd6bd5633ab67808a91 \ - --hash=sha256:9016d01c91e8e625141d24ec1b20fed584703e527d28512aa8c8707f105a683c \ - --hash=sha256:9be73ad47579abc26c12024239d3540e6b765182a91dbc88e23658ab71767153 \ - --hash=sha256:adc31566d027f45efe3f44eeb5b1f329da43891634d61c75a5944e9be6dd42c9 \ - --hash=sha256:adfc6cf69c7f8c50fd24c793964eef18f0ac321315439d94945820612849c388 \ - --hash=sha256:af0ebadc74e281a517141daad9d0f2c5d93ab78e9d455113719a45a49da9db4e \ - --hash=sha256:cb29edb9eab15742d791e1025dd7b6a8f6fcb53802ad2f6e3adcb102051063ab \ - --hash=sha256:cd68be2559e2a3b84f517fb029ee611546f7812b1fdd0aa2ecc9bc6ec0e4fdde \ - --hash=sha256:cdee09140e1cd184ba9324ec1df410e7147242b94b5f8b0c64fc89e38a8ba531 \ - --hash=sha256:db977c4ca738dd9ce508557d4fce0f5aebd105e158c725beec86feb1f6bc20d8 \ - --hash=sha256:dd5789b2948ca702c17027c84c2accb552fc30f4622a98ab5c51fcfe8c50d3e7 \ - --hash=sha256:e250a42f15bf9d5b09fe1b293bdba2801cd520a9f5ea2d7fb7536d4441811d20 \ - --hash=sha256:ff8d8fa42675249bb456f5db06c00de6c2f4c27a065955917b28c4f15978b9c3 +protobuf==3.20.2 \ + --hash=sha256:03d76b7bd42ac4a6e109742a4edf81ffe26ffd87c5993126d894fe48a120396a \ + --hash=sha256:09e25909c4297d71d97612f04f41cea8fa8510096864f2835ad2f3b3df5a5559 \ + --hash=sha256:18e34a10ae10d458b027d7638a599c964b030c1739ebd035a1dfc0e22baa3bfe \ + --hash=sha256:291fb4307094bf5ccc29f424b42268640e00d5240bf0d9b86bf3079f7576474d \ + --hash=sha256:2c0b040d0b5d5d207936ca2d02f00f765906622c07d3fa19c23a16a8ca71873f \ + --hash=sha256:384164994727f274cc34b8abd41a9e7e0562801361ee77437099ff6dfedd024b \ + --hash=sha256:3cb608e5a0eb61b8e00fe641d9f0282cd0eedb603be372f91f163cbfbca0ded0 \ + --hash=sha256:5d9402bf27d11e37801d1743eada54372f986a372ec9679673bfcc5c60441151 \ + --hash=sha256:712dca319eee507a1e7df3591e639a2b112a2f4a62d40fe7832a16fd19151750 \ + --hash=sha256:7a5037af4e76c975b88c3becdf53922b5ffa3f2cddf657574a4920a3b33b80f3 \ + --hash=sha256:8228e56a865c27163d5d1d1771d94b98194aa6917bcfb6ce139cbfa8e3c27334 \ + --hash=sha256:84a1544252a933ef07bb0b5ef13afe7c36232a774affa673fc3636f7cee1db6c \ + --hash=sha256:84fe5953b18a383fd4495d375fe16e1e55e0a3afe7b4f7b4d01a3a0649fcda9d \ + --hash=sha256:9c673c8bfdf52f903081816b9e0e612186684f4eb4c17eeb729133022d6032e3 \ + --hash=sha256:9f876a69ca55aed879b43c295a328970306e8e80a263ec91cf6e9189243c613b \ + --hash=sha256:a9e5ae5a8e8985c67e8944c23035a0dff2c26b0f5070b2f55b217a1c33bbe8b1 \ + --hash=sha256:b4fdb29c5a7406e3f7ef176b2a7079baa68b5b854f364c21abe327bbeec01cdb \ + --hash=sha256:c184485e0dfba4dfd451c3bd348c2e685d6523543a0f91b9fd4ae90eb09e8422 \ + --hash=sha256:c9cdf251c582c16fd6a9f5e95836c90828d51b0069ad22f463761d27c6c19019 \ + --hash=sha256:e39cf61bb8582bda88cdfebc0db163b774e7e03364bbf9ce1ead13863e81e359 \ + --hash=sha256:e8fbc522303e09036c752a0afcc5c0603e917222d8bedc02813fd73b4b4ed804 \ + --hash=sha256:f34464ab1207114e73bba0794d1257c150a2b89b7a9faf504e00af7c9fd58978 \ + --hash=sha256:f52dabc96ca99ebd2169dadbe018824ebda08a795c7684a0b7d203a290f3adb0 # via # -r requirements.in # grpcio-tools -psutil==5.9.1 \ - --hash=sha256:068935df39055bf27a29824b95c801c7a5130f118b806eee663cad28dca97685 \ - --hash=sha256:0904727e0b0a038830b019551cf3204dd48ef5c6868adc776e06e93d615fc5fc \ - --hash=sha256:0f15a19a05f39a09327345bc279c1ba4a8cfb0172cc0d3c7f7d16c813b2e7d36 \ - --hash=sha256:19f36c16012ba9cfc742604df189f2f28d2720e23ff7d1e81602dbe066be9fd1 \ - --hash=sha256:20b27771b077dcaa0de1de3ad52d22538fe101f9946d6dc7869e6f694f079329 \ - --hash=sha256:28976df6c64ddd6320d281128817f32c29b539a52bdae5e192537bc338a9ec81 \ - --hash=sha256:29a442e25fab1f4d05e2655bb1b8ab6887981838d22effa2396d584b740194de \ - --hash=sha256:3054e923204b8e9c23a55b23b6df73a8089ae1d075cb0bf711d3e9da1724ded4 \ - --hash=sha256:32c52611756096ae91f5d1499fe6c53b86f4a9ada147ee42db4991ba1520e574 \ - --hash=sha256:3a76ad658641172d9c6e593de6fe248ddde825b5866464c3b2ee26c35da9d237 \ - --hash=sha256:44d1826150d49ffd62035785a9e2c56afcea66e55b43b8b630d7706276e87f22 \ - --hash=sha256:4b6750a73a9c4a4e689490ccb862d53c7b976a2a35c4e1846d049dcc3f17d83b \ - --hash=sha256:56960b9e8edcca1456f8c86a196f0c3d8e3e361320071c93378d41445ffd28b0 \ - --hash=sha256:57f1819b5d9e95cdfb0c881a8a5b7d542ed0b7c522d575706a80bedc848c8954 \ - --hash=sha256:58678bbadae12e0db55186dc58f2888839228ac9f41cc7848853539b70490021 \ - --hash=sha256:645bd4f7bb5b8633803e0b6746ff1628724668681a434482546887d22c7a9537 \ - --hash=sha256:799759d809c31aab5fe4579e50addf84565e71c1dc9f1c31258f159ff70d3f87 \ - --hash=sha256:79c9108d9aa7fa6fba6e668b61b82facc067a6b81517cab34d07a84aa89f3df0 \ - --hash=sha256:91c7ff2a40c373d0cc9121d54bc5f31c4fa09c346528e6a08d1845bce5771ffc \ - --hash=sha256:9272167b5f5fbfe16945be3db475b3ce8d792386907e673a209da686176552af \ - --hash=sha256:944c4b4b82dc4a1b805329c980f270f170fdc9945464223f2ec8e57563139cf4 \ - --hash=sha256:a6a11e48cb93a5fa606306493f439b4aa7c56cb03fc9ace7f6bfa21aaf07c453 \ - --hash=sha256:a8746bfe4e8f659528c5c7e9af5090c5a7d252f32b2e859c584ef7d8efb1e689 \ - --hash=sha256:abd9246e4cdd5b554a2ddd97c157e292ac11ef3e7af25ac56b08b455c829dca8 \ - --hash=sha256:b14ee12da9338f5e5b3a3ef7ca58b3cba30f5b66f7662159762932e6d0b8f680 \ - --hash=sha256:b88f75005586131276634027f4219d06e0561292be8bd6bc7f2f00bdabd63c4e \ - --hash=sha256:c7be9d7f5b0d206f0bbc3794b8e16fb7dbc53ec9e40bbe8787c6f2d38efcf6c9 \ - --hash=sha256:d2d006286fbcb60f0b391741f520862e9b69f4019b4d738a2a45728c7e952f1b \ - --hash=sha256:db417f0865f90bdc07fa30e1aadc69b6f4cad7f86324b02aa842034efe8d8c4d \ - --hash=sha256:e7e10454cb1ab62cc6ce776e1c135a64045a11ec4c6d254d3f7689c16eb3efd2 \ - --hash=sha256:f65f9a46d984b8cd9b3750c2bdb419b2996895b005aefa6cbaba9a143b1ce2c5 \ - --hash=sha256:fea896b54f3a4ae6f790ac1d017101252c93f6fe075d0e7571543510f11d2676 +psutil==5.9.2 \ + --hash=sha256:14b29f581b5edab1f133563272a6011925401804d52d603c5c606936b49c8b97 \ + --hash=sha256:256098b4f6ffea6441eb54ab3eb64db9ecef18f6a80d7ba91549195d55420f84 \ + --hash=sha256:39ec06dc6c934fb53df10c1672e299145ce609ff0611b569e75a88f313634969 \ + --hash=sha256:404f4816c16a2fcc4eaa36d7eb49a66df2d083e829d3e39ee8759a411dbc9ecf \ + --hash=sha256:42638876b7f5ef43cef8dcf640d3401b27a51ee3fa137cb2aa2e72e188414c32 \ + --hash=sha256:4642fd93785a29353d6917a23e2ac6177308ef5e8be5cc17008d885cb9f70f12 \ + --hash=sha256:4fb54941aac044a61db9d8eb56fc5bee207db3bc58645d657249030e15ba3727 \ + --hash=sha256:561dec454853846d1dd0247b44c2e66a0a0c490f937086930ec4b8f83bf44f06 \ + --hash=sha256:5d39e3a2d5c40efa977c9a8dd4f679763c43c6c255b1340a56489955dbca767c \ + --hash=sha256:614337922702e9be37a39954d67fdb9e855981624d8011a9927b8f2d3c9625d9 \ + --hash=sha256:67b33f27fc0427483b61563a16c90d9f3b547eeb7af0ef1b9fe024cdc9b3a6ea \ + --hash=sha256:68b35cbff92d1f7103d8f1db77c977e72f49fcefae3d3d2b91c76b0e7aef48b8 \ + --hash=sha256:7cbb795dcd8ed8fd238bc9e9f64ab188f3f4096d2e811b5a82da53d164b84c3f \ + --hash=sha256:8f024fbb26c8daf5d70287bb3edfafa22283c255287cf523c5d81721e8e5d82c \ + --hash=sha256:91aa0dac0c64688667b4285fa29354acfb3e834e1fd98b535b9986c883c2ce1d \ + --hash=sha256:94e621c6a4ddb2573d4d30cba074f6d1aa0186645917df42c811c473dd22b339 \ + --hash=sha256:9770c1d25aee91417eba7869139d629d6328a9422ce1cdd112bd56377ca98444 \ + --hash=sha256:b1928b9bf478d31fdffdb57101d18f9b70ed4e9b0e41af751851813547b2a9ab \ + --hash=sha256:b2f248ffc346f4f4f0d747ee1947963613216b06688be0be2e393986fe20dbbb \ + --hash=sha256:b315febaebae813326296872fdb4be92ad3ce10d1d742a6b0c49fb619481ed0b \ + --hash=sha256:b3591616fa07b15050b2f87e1cdefd06a554382e72866fcc0ab2be9d116486c8 \ + --hash=sha256:b4018d5f9b6651f9896c7a7c2c9f4652e4eea53f10751c4e7d08a9093ab587ec \ + --hash=sha256:d75291912b945a7351d45df682f9644540d564d62115d4a20d45fa17dc2d48f8 \ + --hash=sha256:dc9bda7d5ced744622f157cc8d8bdd51735dafcecff807e928ff26bdb0ff097d \ + --hash=sha256:e3ac2c0375ef498e74b9b4ec56df3c88be43fe56cac465627572dbfb21c4be34 \ + --hash=sha256:e4c4a7636ffc47b7141864f1c5e7d649f42c54e49da2dd3cceb1c5f5d29bfc85 \ + --hash=sha256:ed29ea0b9a372c5188cdb2ad39f937900a10fb5478dc077283bf86eeac678ef1 \ + --hash=sha256:f40ba362fefc11d6bea4403f070078d60053ed422255bd838cd86a40674364c9 \ + --hash=sha256:f4cb67215c10d4657e320037109939b1c1d2fd70ca3d76301992f89fe2edb1f1 \ + --hash=sha256:f7929a516125f62399d6e8e026129c8835f6c5a3aab88c3fff1a05ee8feb840d \ + --hash=sha256:fd331866628d18223a4265371fd255774affd86244fc307ef66eaf00de0633d5 \ + --hash=sha256:feb861a10b6c3bb00701063b37e4afc754f8217f0f09c42280586bd6ac712b5c # via -r requirements.in py==1.11.0 \ --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \ @@ -1174,32 +1172,32 @@ pyparsing==3.0.9 \ pyroute2==0.6.13 \ --hash=sha256:b03d49a581945fec2b1ec7d1d5125c6f40ba04ed11affc90c4caddc019e25792 # via -r requirements.in -pyroute2-core==0.6.13 \ +pyroute2.core==0.6.13 \ --hash=sha256:227dfd9f19888ddd1341966822ffd5880db9e3c89375096418c660ff4d1a11d0 # via # pyroute2 - # pyroute2-ethtool - # pyroute2-ipdb - # pyroute2-ipset - # pyroute2-ndb - # pyroute2-nftables - # pyroute2-nslink -pyroute2-ethtool==0.6.13 \ + # pyroute2.ethtool + # pyroute2.ipdb + # pyroute2.ipset + # pyroute2.ndb + # pyroute2.nftables + # pyroute2.nslink +pyroute2.ethtool==0.6.13 \ --hash=sha256:0a687fea0fcd77d9074c7c18ba35d9b9f70e4217ebe68a687e200408473a3bd4 # via pyroute2 -pyroute2-ipdb==0.6.13 \ +pyroute2.ipdb==0.6.13 \ --hash=sha256:bbbbb75d13be96e4549cf70eb94fd70b2e1736ea301ac6b683f56aa1acd84d5a # via pyroute2 -pyroute2-ipset==0.6.13 \ +pyroute2.ipset==0.6.13 \ --hash=sha256:28a254f622a18976d0683603d5aefda5ab7c8528fa9e36beb85bce52026f7866 # via pyroute2 -pyroute2-ndb==0.6.13 \ +pyroute2.ndb==0.6.13 \ --hash=sha256:09b1f55f26043ce64c933e8224fd08444a498f381e5dc483bc9f428cbaf0901a # via pyroute2 -pyroute2-nftables==0.6.13 \ +pyroute2.nftables==0.6.13 \ --hash=sha256:c94bd740d50b03a1a8d9654f769e77afc77a75e05fc5887dd0551e3970f86592 # via pyroute2 -pyroute2-nslink==0.6.13 \ +pyroute2.nslink==0.6.13 \ --hash=sha256:86ed506cadccb154cd27aebb3dbf73ebb723c391104e7f0f3bc2c4a39c62366c # via pyroute2 pyrsistent==0.18.1 \ @@ -1228,9 +1226,9 @@ pyrsistent==0.18.1 \ pystemd==0.10.0 \ --hash=sha256:d74a814bfda01085db1a8ad90be3cb27daf23a51ab6b03e7e29ec811fa2ae859 # via -r requirements.in -pytest==7.1.2 \ - --hash=sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c \ - --hash=sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45 +pytest==7.1.3 \ + --hash=sha256:1377bda3466d70b55e3f5cecfa55bb7cfcf219c7964629b967c37cf0bda818b7 \ + --hash=sha256:4f365fec2dff9c1162f834d9f18af1ba13062db0c708bf7b946f8a5c76180c39 # via # -r requirements.in # pytest-cov @@ -1256,6 +1254,7 @@ pytz==2022.2.1 \ # bravado-core # spyne pyyaml==6.0 \ + --hash=sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf \ --hash=sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293 \ --hash=sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b \ --hash=sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57 \ @@ -1267,26 +1266,32 @@ pyyaml==6.0 \ --hash=sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287 \ --hash=sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513 \ --hash=sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0 \ + --hash=sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782 \ --hash=sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0 \ --hash=sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92 \ --hash=sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f \ --hash=sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2 \ --hash=sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc \ + --hash=sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1 \ --hash=sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c \ --hash=sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86 \ --hash=sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4 \ --hash=sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c \ --hash=sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34 \ --hash=sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b \ + --hash=sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d \ --hash=sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c \ --hash=sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb \ + --hash=sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7 \ --hash=sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737 \ --hash=sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3 \ --hash=sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d \ + --hash=sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358 \ --hash=sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53 \ --hash=sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78 \ --hash=sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803 \ --hash=sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a \ + --hash=sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f \ --hash=sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174 \ --hash=sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5 # via @@ -1306,7 +1311,7 @@ redis-collections==0.11.0 \ --hash=sha256:0f6cda00666fdd26e3b8ca47da13a653eaf4cc4e45470a3b09f17d65061fea8a \ --hash=sha256:d23e8c0f6bf50de10c98a14a3b636ff1bb21119386f884f2641c906832bc4ec9 # via -r requirements.in -repoze-lru==0.7 \ +repoze.lru==0.7 \ --hash=sha256:0429a75e19380e4ed50c0694e26ac8819b4ea7851ee1fc7583c8572db80aff77 \ --hash=sha256:f77bf0e1096ea445beadd35f3479c5cff2aa1efe604a133e67150bc8630a62ea # via routes @@ -1315,6 +1320,7 @@ requests==2.28.1 \ --hash=sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349 # via # -r requirements.in + # bravado-core # docker # oslo-config rfc3986==2.0.0 \ @@ -1433,15 +1439,16 @@ stevedore==4.0.0 \ strict-rfc3339==0.7 \ --hash=sha256:5cad17bedfc3af57b399db0fed32771f18fc54bbd917e85546088607ac5e1277 # via jsonschema -swagger-spec-validator==2.7.4 \ - --hash=sha256:2aee5e1fc0503be9f8299378b10c92169572781573c6de3315e831fd0559ba73 \ - --hash=sha256:4e373a4db5262e7257fde17d84c5c0178327b8057985ab1be63f580bfa009855 +swagger-spec-validator==2.7.6 \ + --hash=sha256:73f33e631a58f407265f2f813d194f2762a2b86f9aa905e7eee3df9b7f9428d3 \ + --hash=sha256:ff55d671f4cf8a386e7ecda60267d6cdd2cfbe0b3521a8ccf09b0669cbb72ab6 # via bravado-core systemd-python==234 \ --hash=sha256:fd0e44bf70eadae45aadc292cb0a7eb5b0b6372cd1b391228047d33895db83e7 # via -r requirements.in -termcolor==1.1.0 \ - --hash=sha256:1d6d69ce66211143803fbc56652b41d73b4a400a2891d7bf7a1cdf4c02de613b +termcolor==2.0.1 \ + --hash=sha256:6b2cf769e93364a2676e1de56a7c0cff2cf5bd07f37e9cc80b0dd6320ebfe388 \ + --hash=sha256:7e597f9de8e001a3208c4132938597413b9da45382b6f1d150cff8d062b7aaa3 # via fire tinyrpc==1.1.4 \ --hash=sha256:c99f412e5d9849c2deb468ea37fee2faf12fbc95bdd3616ae5c276ea195ed6bd @@ -1474,9 +1481,9 @@ webob==1.8.7 \ --hash=sha256:73aae30359291c14fa3b956f8b5ca31960e420c28c1bec002547fb04928cf89b \ --hash=sha256:b64ef5141be559cfade448f044fa45c2260351edcb6a8ef6b7e00c7dcef0c323 # via -r requirements.in -websocket-client==1.3.3 \ - --hash=sha256:5d55652dc1d0b3c734f044337d929aaf83f4f9138816ec680c1aefefb4dc4877 \ - --hash=sha256:d58c5f284d6a9bf8379dab423259fe8f85b70d5fa5d2916d5791a84594b122b1 +websocket-client==1.4.1 \ + --hash=sha256:398909eb7e261f44b8f4bd474785b6ec5f5b499d4953342fe9755e01ef624090 \ + --hash=sha256:f9611eb65c8241a67fb373bef040b3cf8ad377a9f6546a12b620b6511e8ea9ef # via docker werkzeug==2.2.2 \ --hash=sha256:7ea2d48322cc7c0f8b3a215ed73eabd7b5d75d0b50e31ab006286ccff9e00b8f \ diff --git a/bazel/external/requirements_README.md b/bazel/external/requirements_README.md index 04f2c53ad0cd..9d709ab31b9a 100644 --- a/bazel/external/requirements_README.md +++ b/bazel/external/requirements_README.md @@ -10,7 +10,7 @@ Requirements.txt holds all Python dependencies which are required by Python-base `cd $MAGMA/bazel/external` - `pip-compile --generate-hashes --output-file=requirements.txt requirements.in` + `pip-compile --upgrade-package --generate-hashes --output-file=requirements.txt requirements.in` The changes are then automatically included in the next Bazel build process. diff --git a/bazel/external/system_libraries.BUILD b/bazel/external/system_libraries.BUILD index 222fdf1c5cd6..da0e03eb457f 100644 --- a/bazel/external/system_libraries.BUILD +++ b/bazel/external/system_libraries.BUILD @@ -120,3 +120,8 @@ cc_library( name = "libsqlite3-dev", linkopts = ["-lsqlite3"], ) + +cc_library( + name = "libsystemd", + linkopts = ["-lsystemd"], +) diff --git a/bazel/runfiles.bzl b/bazel/runfiles.bzl new file mode 100644 index 000000000000..fa0768a58090 --- /dev/null +++ b/bazel/runfiles.bzl @@ -0,0 +1,128 @@ +# Copyright 2022 The Magma Authors. + +# This source code is licensed under the BSD-style license found in the +# LICENSE file in the root directory of this source tree. + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Inspired by https://github.com/aspect-build/rules_container/blob/main/language/runfiles.bzl by @thesayyn +# Contributed to https://github.com/aspect-build/rules_container under Apache-2.0 + +""" +This file represents a workaround for the current state of https://github.com/bazelbuild/rules_pkg. +Dependencies are not packaged (even internal dependencies). The rule here expands all +internal and external dependencies into a PackageFilesInfo so that the files can be used in +package rules. + +The dependencies are not put into service specific paths. This means, that, e.g., pip dependencies and +proto files that are used by multiple services are only added once in the packaging process. + +This rule is currently only applied to python dependencies (go and c/c++ dependencies are linked statically +into the binaries). + +Additionally this rule +* renames the relative path of files so that they can be found correctly in the target system (usually + packaged into the "dist-packages" folder of the used python interpreter) +* excludes files that are not needed during runtime +""" + +load("@rules_pkg//:providers.bzl", "PackageFilesInfo") + +STRIP_PATHS = [ + "lte/gateway/python/", + "orc8r/gateway/python/", + "lte/swagger/specs_root/", + "orc8r/swagger/specs_root/", +] + +# beware: order matters here, e.g., "lte/protos/oai/" needs to be before "lte/protos/" +STRIP_PATHS_PROTOS = [ + "dp/protos/", + "feg/protos/", + "lte/protos/oai/", + "lte/protos/", + "orc8r/protos/", + "orc8r/swagger/magmad_events_v1", +] + +EXCLUDES = [ + # external protobuf is only needed during compile time + "../com_google_protobuf", + # external grpc is only needed during compile time + "../com_github_grpc_grpc", + # bazel compiled grpc library + "_solib_k8/libexternal_Scom_Ugithub_Ugrpc_Ugrpc_Slibgrpc.so", +] + +def _is_excluded(file): + for exclude in EXCLUDES: + if file.short_path.startswith(exclude): + return True + return False + +def _runfile_path(file): + path = file.short_path + if path.startswith("../"): + return _strip_external(path) + return _strip_internal(path, file) + +def _strip_external(path): + path_clean = path.replace("../", "") + + # removes the first folder + path_wo_first_folder = path_clean.partition("/")[2] + + # special case: grpc is packaged in subfolders (stripped here) + if path_wo_first_folder.startswith("src/python/grpcio/"): + return path_wo_first_folder.replace("src/python/grpcio/", "") + + return path_wo_first_folder + +def _strip_internal(path, file): + for prefix in STRIP_PATHS: + if path.startswith(prefix): + # lte/gateway/python/magma/foo/bar.py -> magma/foo/bar.py + return path.replace(prefix, "", 1) + + for prefix in STRIP_PATHS_PROTOS: + if path.startswith(prefix): + # lte/protos/target_name/lte/protos/foo_pb2.py -> lte/protos/foo_pb2.py + return path.replace(prefix, "", 1).replace(file.owner.name + "/", "", 1) + + print("Unhandled path: " + path) # buildifier: disable=print + + return "FIXME" # needs to be handled + +def _runfiles_impl(ctx): + py_infos = [target[PyInfo] for target in ctx.attr.targets] + def_infos = [target[DefaultInfo] for target in ctx.attr.targets] + + files = depset(transitive = [py_info.transitive_sources for py_info in py_infos] + [def_info.default_runfiles.files for def_info in def_infos]) + file_map = {} + mapped_files = [] + + for file in files.to_list(): + if not _is_excluded(file): + file_map[_runfile_path(file)] = file + mapped_files = mapped_files + [file] + + files = depset(transitive = [files]) + + return [ + PackageFilesInfo( + dest_src_map = file_map, + attributes = {"mode": "0755"}, + ), + DefaultInfo(files = depset(mapped_files)), + ] + +expand_runfiles = rule( + implementation = _runfiles_impl, + attrs = { + "targets": attr.label_list(providers = [PyInfo]), + }, +) diff --git a/bazel/scripts/check_c_cpp_bazel.sh b/bazel/scripts/check_c_cpp_bazel.sh index 06de77109169..2ef63c9f7d00 100755 --- a/bazel/scripts/check_c_cpp_bazel.sh +++ b/bazel/scripts/check_c_cpp_bazel.sh @@ -37,9 +37,6 @@ DENY_LIST_NOT_YET_BAZELIFIED=( # this needs to be refactored when make is not used anymore "./lte/gateway/python/magma/pipelined/ebpf/ebpf_ul_handler.c" "./lte/gateway/python/magma/pipelined/ebpf/ebpf_dl_handler.c" - # TODO: GH12771 add MME_BENCHMARK support and bazelify files - "./lte/gateway/c/core/oai/tasks/mme_app/experimental/mme_app_serialization.hpp" - "./lte/gateway/c/core/oai/tasks/mme_app/experimental/mme_app_serialization.cpp" ) DENY_LIST=( "${DENY_LIST_NOT_RELEVANT[@]}" "${DENY_LIST_NOT_YET_BAZELIFIED[@]}" ) diff --git a/bazel/scripts/check_py_bazel.sh b/bazel/scripts/check_py_bazel.sh index c4570ffb3b9f..f7d5f667fc3d 100755 --- a/bazel/scripts/check_py_bazel.sh +++ b/bazel/scripts/check_py_bazel.sh @@ -58,29 +58,30 @@ DENY_LIST_NOT_YET_BAZELIFIED=( "./lte/gateway/python/integ_tests/cloud_tests" "./lte/gateway/python/integ_tests/federated_tests" "./lte/gateway/python/integ_tests/s1aptests/workflow" + "./lte/gateway/python/integ_tests/s1aptests/test_enable_ipv6_iface.py" + "./lte/gateway/python/integ_tests/s1aptests/test_ipv6_non_nat_dp_ul_tcp.py" + "./lte/gateway/python/integ_tests/s1aptests/test_disable_ipv6_iface.py" + # as of 2022.09.19 commented out in make "./lte/gateway/python/integ_tests/s1aptests/test_agw_offload_mixed_idle_active_multiue.py" "./lte/gateway/python/integ_tests/s1aptests/test_attach_detach_two_pdns_with_tcptraffic.py" - "./lte/gateway/python/integ_tests/s1aptests/test_stateless_multi_ue_mixedstate_mme_restart.py" - "./lte/gateway/python/integ_tests/s1aptests/test_attach_ul_udp_data_multi_ue.py" - "./lte/gateway/python/integ_tests/s1aptests/test_attach_standalone_act_dflt_ber_ctxt_rej_ded_bearer_activation.py" + "./lte/gateway/python/integ_tests/s1aptests/test_attach_dl_tcp_data_multi_ue.py" "./lte/gateway/python/integ_tests/s1aptests/test_attach_dl_udp_data_multi_ue.py" - "./lte/gateway/python/integ_tests/s1aptests/test_ipv4v6_non_nat_ul_tcp.py" - "./lte/gateway/python/integ_tests/s1aptests/test_attach_ul_tcp_data_multi_ue.py" "./lte/gateway/python/integ_tests/s1aptests/test_attach_dl_ul_tcp_data_multi_ue.py" + "./lte/gateway/python/integ_tests/s1aptests/test_attach_standalone_act_dflt_ber_ctxt_rej_ded_bearer_activation.py" + "./lte/gateway/python/integ_tests/s1aptests/test_attach_ul_tcp_data_multi_ue.py" + "./lte/gateway/python/integ_tests/s1aptests/test_attach_ul_udp_data_multi_ue.py" + "./lte/gateway/python/integ_tests/s1aptests/test_attach_with_multiple_mme_restarts.py" "./lte/gateway/python/integ_tests/s1aptests/test_data_flow_after_service_request.py" - "./lte/gateway/python/integ_tests/s1aptests/test_outoforder_erab_setup_rsp_default_bearer.py" - "./lte/gateway/python/integ_tests/s1aptests/test_ipv6_non_nat_dp_dl_tcp.py" - "./lte/gateway/python/integ_tests/s1aptests/test_ipv6_non_nat_dp_dl_udp.py" - "./lte/gateway/python/integ_tests/s1aptests/test_ipv6_non_nat_ded_bearer_ul_tcp.py" "./lte/gateway/python/integ_tests/s1aptests/test_ipv4v6_non_nat_ded_bearer_dl_tcp.py" "./lte/gateway/python/integ_tests/s1aptests/test_ipv4v6_non_nat_ded_bearer_ul_tcp.py" + "./lte/gateway/python/integ_tests/s1aptests/test_ipv4v6_non_nat_ul_tcp.py" "./lte/gateway/python/integ_tests/s1aptests/test_ipv6_non_nat_ded_bearer_dl_tcp.py" - "./lte/gateway/python/integ_tests/s1aptests/test_ipv6_non_nat_dp_ul_tcp.py" + "./lte/gateway/python/integ_tests/s1aptests/test_ipv6_non_nat_ded_bearer_ul_tcp.py" + "./lte/gateway/python/integ_tests/s1aptests/test_ipv6_non_nat_dp_dl_tcp.py" + "./lte/gateway/python/integ_tests/s1aptests/test_ipv6_non_nat_dp_dl_udp.py" + "./lte/gateway/python/integ_tests/s1aptests/test_outoforder_erab_setup_rsp_default_bearer.py" "./lte/gateway/python/integ_tests/s1aptests/test_scalability_attach_detach_multi_ue.py" - "./lte/gateway/python/integ_tests/s1aptests/test_attach_dl_tcp_data_multi_ue.py" - "./lte/gateway/python/integ_tests/s1aptests/test_attach_with_multiple_mme_restarts.py" - "./lte/gateway/python/integ_tests/s1aptests/test_enable_ipv6_iface.py" - "./lte/gateway/python/integ_tests/s1aptests/test_disable_ipv6_iface.py" + "./lte/gateway/python/integ_tests/s1aptests/test_stateless_multi_ue_mixedstate_mme_restart.py" # TODO: GH12754 move to (lte|orc8r)/gateway/python/scripts/ "./orc8r/gateway/python/magma/common/health/docker_health_service.py" "./orc8r/gateway/python/magma/common/health/health_service.py" @@ -93,7 +94,6 @@ DENY_LIST_NOT_YET_BAZELIFIED=( "./lte/gateway/python/magma/pipelined/tests/script/gtp-packet.py" "./lte/gateway/python/magma/pipelined/tests/script/ip-packet.py" # TODO: GH9878 needs to be further analyzed - "./lte/gateway/python/load_tests" "./lte/gateway/python/scripts" "./orc8r/gateway/python/scripts" ) diff --git a/bazel/scripts/link_scripts_for_bazel_integ_tests.sh b/bazel/scripts/link_scripts_for_bazel_integ_tests.sh index dc53028b262e..4c8e78c4e464 100755 --- a/bazel/scripts/link_scripts_for_bazel_integ_tests.sh +++ b/bazel/scripts/link_scripts_for_bazel_integ_tests.sh @@ -21,9 +21,9 @@ set -euo pipefail get_python_scripts() { echo "Collecting script targets..." - mapfile -t PYTHON_SCRIPTS < <(bazel query "kind(.*_binary, \ + mapfile -t PYTHON_SCRIPTS < <(bazel query "attr(tags, 'util_script', kind(.*_binary, \ //orc8r/gateway/python/scripts/... union \ - //lte/gateway/python/scripts/... )") + //lte/gateway/python/scripts/... ))") } format_targets_to_paths() { diff --git a/bazel/scripts/run_integ_tests.sh b/bazel/scripts/run_integ_tests.sh index 78b8fe7f4533..dc6785f2c998 100755 --- a/bazel/scripts/run_integ_tests.sh +++ b/bazel/scripts/run_integ_tests.sh @@ -299,10 +299,11 @@ run_test() { } create_xml_report() { - rm -f "${MERGED_REPORT_FOLDER}/"*.xml - mkdir -p "${MERGED_REPORT_FOLDER}" - python3 lte/gateway/python/scripts/runtime_report.py -i "[^\/]+\.xml" -w "${INTEGTEST_REPORT_FOLDER}" -o "${MERGED_REPORT_FOLDER}/integtests_report.xml" - rm -f "${INTEGTEST_REPORT_FOLDER}/"*.xml + local MERGED_REPORT_XML="integtests_report.xml" + rm -f "${MERGED_REPORT_FOLDER}/${MERGED_REPORT_XML}" + mkdir -p "${INTEGTEST_REPORT_FOLDER}" + python3 lte/gateway/python/scripts/runtime_report.py -i "[^\/]+\.xml" -w "${INTEGTEST_REPORT_FOLDER}" -o "${MERGED_REPORT_FOLDER}/${MERGED_REPORT_XML}" + sudo rm -f "${INTEGTEST_REPORT_FOLDER}/"*.xml } print_summary() { @@ -345,8 +346,8 @@ RETRY_ATTEMPTS=2 RERUN_PREVIOUSLY_FAILED="false" FAILED_LIST=() FAILED_LIST_FILE="/tmp/last_failed_integration_tests.txt" -INTEGTEST_REPORT_FOLDER="/var/tmp/test_results" -MERGED_REPORT_FOLDER="${INTEGTEST_REPORT_FOLDER}/integtest_merged_report" +MERGED_REPORT_FOLDER="/var/tmp/test_results" +INTEGTEST_REPORT_FOLDER="${MERGED_REPORT_FOLDER}/integtest_reports" BOLD='\033[1m' RED='\033[0;31m' diff --git a/bazel/scripts/run_load_tests.sh b/bazel/scripts/run_load_tests.sh new file mode 100755 index 000000000000..505bfd323ac4 --- /dev/null +++ b/bazel/scripts/run_load_tests.sh @@ -0,0 +1,66 @@ +#!/usr/bin/env bash + +################################################################################ +# Copyright 2022 The Magma Authors. + +# This source code is licensed under the BSD-style license found in the +# LICENSE file in the root directory of this source tree. + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +set -euo pipefail + +############################################################################### +# FUNCTION DECLARATIONS +############################################################################### + +build_load_tests() { + echo "Building load tests..." + bazel build //lte/gateway/python/load_tests:all + echo "Finished building load tests." + echo "#############################" +} + +run_load_tests() { + echo "Running load tests..." + for LOAD_TEST in "${LOAD_TEST_LIST[@]}"; + do + echo "#############################" + echo "Running load test: ${LOAD_TEST}" + # shellcheck disable=SC2086 + sudo -E PATH="${PATH}" "${MAGMA_ROOT}/bazel-bin/lte/gateway/python/load_tests/"${LOAD_TEST} + done + echo "#############################" + echo "Finished running load tests." + echo "The result JSON files (result_.json) can be found in /var/tmp." +} + +############################################################################### +# SCRIPT SECTION +############################################################################### + +declare -a LOAD_TEST_LIST=("loadtest_mobilityd allocate" \ + "loadtest_mobilityd release" \ + "loadtest_pipelined activate_flows" \ + "loadtest_pipelined deactivate_flows" \ + "loadtest_sessiond create" \ + "loadtest_sessiond end" \ + "loadtest_subscriberdb add" \ + "loadtest_subscriberdb list" \ + "loadtest_subscriberdb delete" \ + "loadtest_subscriberdb get" \ + "loadtest_subscriberdb update" \ + "loadtest_policydb enable_static_rules" \ + "loadtest_policydb disable_static_rules" \ + "loadtest_directoryd update_record" \ + "loadtest_directoryd delete_record" \ + "loadtest_directoryd get_record" \ + "loadtest_directoryd get_all_records") + +build_load_tests +run_load_tests diff --git a/bazel/scripts/run_sudo_tests.sh b/bazel/scripts/run_sudo_tests.sh index 4b44783e3a05..7f02c3fccf46 100755 --- a/bazel/scripts/run_sudo_tests.sh +++ b/bazel/scripts/run_sudo_tests.sh @@ -76,10 +76,11 @@ run_test() { } create_xml_report() { - rm -f "${MERGED_REPORT_FOLDER}/"*.xml - mkdir -p "${MERGED_REPORT_FOLDER}" - python3 lte/gateway/python/scripts/runtime_report.py -i "[^\/]+\.xml" -w "${SUDO_TEST_REPORT_FOLDER}" -o "${MERGED_REPORT_FOLDER}/sudo_tests_report.xml" - rm -f "${SUDO_TEST_REPORT_FOLDER}/"*.xml + local MERGED_REPORT_XML="sudotests_report.xml" + rm -f "${MERGED_REPORT_FOLDER}/${MERGED_REPORT_XML}" + mkdir -p "${SUDO_TEST_REPORT_FOLDER}" + python3 lte/gateway/python/scripts/runtime_report.py -i "[^\/]+\.xml" -w "${SUDO_TEST_REPORT_FOLDER}" -o "${MERGED_REPORT_FOLDER}/${MERGED_REPORT_XML}" + sudo rm -f "${SUDO_TEST_REPORT_FOLDER}/"*.xml } print_summary() { @@ -104,8 +105,8 @@ NUM_SUCCESS=0 NUM_RUN=1 RETRY_ON_FAILURE="false" RETRY_ATTEMPTS=2 -SUDO_TEST_REPORT_FOLDER="/var/tmp/test_results" -MERGED_REPORT_FOLDER="${SUDO_TEST_REPORT_FOLDER}/sudo_merged_report" +MERGED_REPORT_FOLDER="/var/tmp/test_results" +SUDO_TEST_REPORT_FOLDER="${MERGED_REPORT_FOLDER}/sudotest_reports" BOLD='\033[1m' RED='\033[0;31m' diff --git a/bazel/scripts/test_python_service_imports.sh b/bazel/scripts/test_python_service_imports.sh index 0683acc4e80c..78ba0b4643a1 100755 --- a/bazel/scripts/test_python_service_imports.sh +++ b/bazel/scripts/test_python_service_imports.sh @@ -37,7 +37,7 @@ collect_services() { SERVICES=( "${SERVICE_PATH}" ) else echo "Multiple services specified:" - mapfile -t SERVICES < <(bazel query "attr(main, main.py, kind(py_binary, //${SERVICE_PATH}...))") + mapfile -t SERVICES < <(bazel query "attr(tags, service, kind(py_binary, //${SERVICE_PATH}...))") fi if [[ "${#SERVICES[@]}" -eq 0 ]]; then diff --git a/dp/cloud/docker/go/active_mode_controller/Dockerfile b/bazel/test/BUILD.bazel similarity index 79% rename from dp/cloud/docker/go/active_mode_controller/Dockerfile rename to bazel/test/BUILD.bazel index a76e52add951..99eaca117c62 100644 --- a/dp/cloud/docker/go/active_mode_controller/Dockerfile +++ b/bazel/test/BUILD.bazel @@ -9,6 +9,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -# TODO remove AMC from deployment scripts -FROM alpine:3.14.3 as final -CMD ["sleep", "infinity"] +load("//bazel/test:runfiles_test.bzl", "runfiles_test_suite") + +runfiles_test_suite( + name = "runfiles_test_suite_target", +) diff --git a/bazel/test/runfiles_test.bzl b/bazel/test/runfiles_test.bzl new file mode 100644 index 000000000000..a40e92090e59 --- /dev/null +++ b/bazel/test/runfiles_test.bzl @@ -0,0 +1,123 @@ +# Copyright 2022 The Magma Authors. + +# This source code is licensed under the BSD-style license found in the +# LICENSE file in the root directory of this source tree. + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Tests for runfiles.bzl. +See https://bazel.build/rules/testing for general bazel rule testing documentation. +""" + +load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts") +load("@rules_pkg//:providers.bzl", "PackageFilesInfo") +load("//bazel:runfiles.bzl", "expand_runfiles") + +# test suite + +def runfiles_test_suite(name): + _setup_empty_targets_returns_empty_providers_test() + _setup_targets_are_correctly_expanded_test() + + native.test_suite( + name = name, + tests = [ + ":empty_targets_returns_empty_providers_test", + ":targets_are_correctly_expanded_test", + ], + ) + +# setup for rule to be tested + +def _setup_empty_targets_returns_empty_providers_test(): + expand_runfiles( + name = "expand_empty_targets", + tags = ["manual"], # should only be build here + ) + + rule_empty_targets_returns_empty_providers_test( + name = "empty_targets_returns_empty_providers_test", + target_under_test = ":expand_empty_targets", + ) + +def _setup_targets_are_correctly_expanded_test(): + # testing an actually magma target instead of an artificial one + # mconfigs proto should be sufficiently stable + expand_runfiles( + name = "expand_targets", + tags = ["manual"], # should only be build here + targets = ["//lte/protos:mconfigs_python_proto"], + ) + + rule_targets_are_correctly_expanded_test( + name = "targets_are_correctly_expanded_test", + target_under_test = ":expand_targets", + ) + +# asserts + +def _empty_targets_returns_empty_providers_test_impl(ctx): + env = analysistest.begin(ctx) + + target_under_test = analysistest.target_under_test(env) + + asserts.equals( + env, + expected = {"mode": "0755"}, + actual = target_under_test[PackageFilesInfo].attributes, + ) + asserts.equals( + env, + expected = {}, + actual = target_under_test[PackageFilesInfo].dest_src_map, + ) + asserts.equals( + env, + expected = depset([]), + actual = target_under_test[DefaultInfo].files, + ) + + return analysistest.end(env) + +expected_mapping = ( + "{" + + '"orc8r/protos/common_pb2.py": , ' + + '"lte/protos/mconfig/mconfigs_pb2.py": ' + + "}" +) + +expected_depset = ( + "depset([" + + ", " + + "" + + "])" +) + +def _targets_are_correctly_expanded_test_impl(ctx): + env = analysistest.begin(ctx) + + target_under_test = analysistest.target_under_test(env) + + asserts.equals( + env, + expected = expected_mapping, + actual = str(target_under_test[PackageFilesInfo].dest_src_map), + ) + asserts.equals( + env, + expected = expected_depset, + actual = str(target_under_test[DefaultInfo].files), + ) + + return analysistest.end(env) + +# creating rules for asserts + +rule_empty_targets_returns_empty_providers_test = analysistest.make(_empty_targets_returns_empty_providers_test_impl) + +rule_targets_are_correctly_expanded_test = analysistest.make(_targets_are_correctly_expanded_test_impl) diff --git a/bazel/test_constants.bzl b/bazel/test_constants.bzl index 65d4a510322f..79cc4ff22ab1 100644 --- a/bazel/test_constants.bzl +++ b/bazel/test_constants.bzl @@ -34,6 +34,9 @@ TAG_MANUAL = ["manual"] # Note: for now a sudo test is also tagged as "manual". TAG_SUDO_TEST = ["sudo_test"] + TAG_MANUAL +# Used for integration tests. These tests need to be executed manually +# by a user with sudo privileges. These tags represent test categories, +# which are used to determine the appropriate environment for them. TAG_PRECOMMIT_TEST = ["precommit_test"] + TAG_MANUAL TAG_EXTENDED_TEST = ["extended_test"] + TAG_MANUAL TAG_EXTENDED_TEST_SETUP = ["extended_setup"] + TAG_MANUAL @@ -42,3 +45,9 @@ TAG_NON_SANITY_TEST = ["nonsanity_test"] + TAG_MANUAL TAG_NON_SANITY_TEST_SETUP = ["nonsanity_setup"] + TAG_MANUAL TAG_NON_SANITY_TEST_TEARDOWN = ["nonsanity_teardown"] + TAG_MANUAL TAG_TRAFFIC_SERVER_TEST = ["traffic_server_test"] + +# Tag for utility scripts that are used in the Magma VM. +TAG_UTIL_SCRIPT = ["util_script"] + +# Tag for Magma services. +TAG_SERVICE = ["service"] diff --git a/ci-scripts/JenkinsFile-CWAG-integ-test b/ci-scripts/JenkinsFile-CWAG-integ-test deleted file mode 100644 index 06cde243d2b6..000000000000 --- a/ci-scripts/JenkinsFile-CWAG-integ-test +++ /dev/null @@ -1,185 +0,0 @@ -#!/bin/groovy - -def GIT_URL = "github.com/magma/magma" -def GIT_BRANCH = "master" -def slack_channel = "#magma-ci-bot" - -pipeline { - agent { - label "virtualbox" - } - options { - timestamps() - ansiColor('xterm') - } - stages { - stage("Prepate Env"){ - steps{ - script { - echo "Clean Env" - cleanWs() - sh "docker system prune -f" - echo "Purge old vagrant boxes" - sh "sudo vagrant box prune --force" - sh "sudo chown -R jenkins:libvirt ~jenkins/.vagrant.d" - echo "Clone Sources" - checkout( - changelog: false, - poll: false, - scm: [$class: 'GitSCM', - branches: [[name: '${sha1}']], - doGenerateSubmoduleConfigurations: false, - extensions: [], - submoduleCfg: [], - userRemoteConfigs: [[refspec: '+refs/pull/*:refs/remotes/origin/pr/*', url: "https://" + GIT_URL + ".git"]]] - ) - } - } - } - stage("cwag-precommit"){ - environment { - MAGMA_ROOT = sh(script: 'pwd', , returnStdout: true).trim() - GO111MODULE = "on" - GOPROXY = "https://proxy.golang.org" - } - steps{ - script { - sh "./.github/workflows/scripts/golang_before_install.sh" - dir("cwf/gateway"){ - sh "go mod download" - sh "make -C ${MAGMA_ROOT}/cwf/gateway precommit" - sh "make -C ${MAGMA_ROOT}/cwf/gateway/integ_tests precommit" - } - } - } - } - stage("cwf-integ-test"){ - environment { - CIRCLE_REPOSITORY_URL = "https://${GIT_URL}.git" - CIRCLE_BRANCH = "$GIT_BRANCH" - CIRCLE_SHA1 = "HEAD" - PYTHONPATH = "$WORKSPACE/orc8r" - VAGRANT_DEFAULT_PROVIDER = "virtualbox" - } - steps{ - script { - echo "Setup required packages" - sh """ - export PATH; - export PYENV_ROOT="\$HOME/.pyenv" - export PATH="\$PYENV_ROOT/bin:\$PATH" - eval "\$(pyenv init -)" - eval "\$(pyenv virtualenv-init -)" - pyenv global 3.7.0 - pip3 install --upgrade pip - pip3 install fabric3 jsonpickle requests PyYAML awscli docker-compose - """ - dir('cwf/gateway') { - // sh "sleep 10" - sh "vagrant global-status" - sh "vagrant global-status 2>/dev/null | awk '/workspace/{print \$1}' | xargs -I {} vagrant destroy -f {}" - try { - sh "vboxmanage controlvm cwag-dev poweroff || true; vboxmanage unregistervm cwag-dev --delete" - } catch (Exception e) { - echo "cwag-dev didn't exists as expected" - } - try { - sh "vboxmanage controlvm magma-trfserver poweroff || true; vboxmanage unregistervm magma-trfserver --delete" - } catch (Exception e) { - echo "magma-trfserver didn't exists as expected" - } - try { - sh "vboxmanage controlvm cwag-test poweroff || true; vboxmanage unregistervm cwag-test --delete" - } catch (Exception e) { - echo "cwag-test didn't exists as expected" - } - - dir('docker') { - sh """ - docker-compose \ - -f docker-compose.yml \ - -f docker-compose.override.yml \ - -f docker-compose.nginx.yml \ - -f docker-compose.integ-test.yml \ - build --parallel - """ - } - def test_xml = "tests.xml" - def test_html = "tests.html" - def test_folder = "cwf-artifacts" - def test_results = 0 - timeout(time: 110, unit: 'MINUTES') { - try { - sh """ - export PYENV_ROOT="\$HOME/.pyenv" - export PATH="\$PYENV_ROOT/shims:\$PATH" - fab integ_test:destroy_vm=True,transfer_images=True,test_result_xml=$test_xml - """ - } catch (Exception e) { - test_results = 1 - } - } - // Move JUnit test result to /tmp/test-results directory - sh "mkdir $test_folder" - try { - sh "junit2html $test_xml $test_html" - sh "cp $test_xml $test_folder" - sh "cp $test_html $test_folder" - } catch (Exception e) { - echo "It might fail because XML file was not generated at previous step" - currentBuild.result = 'FAILURE' - } - - // On failure, transfer logs of key services from docker containers and - // copy to the log directory. This will get stored as an artifact. - // Following lines to be translated in to Jenkins DSL yet - if (test_results) { - currentBuild.result = 'FAILURE' - def services = "sessiond session_proxy pcrf ocs pipelined ingress" - try { - sh """ - export PYENV_ROOT="\$HOME/.pyenv" - export PATH="\$PYENV_ROOT/shims:\$PATH" - fab transfer_artifacts:services='$services',get_core_dump=True - """ - // Copy the log files out from the node - sh "cp *.log $test_folder" - if( fileExists("coredump.tar.gz") ) { - sh "cp coredump.tar.gz $test_folder" - } - } catch (Exception e) {} - } - timeout(time: 10, unit: 'MINUTES') { - archiveArtifacts("$test_folder/*") - } - } - } - } - } - } - post { - success { - script { - def color = "good" - def message = "MAGMA " + JOB_NAME + " build (" + BUILD_ID + "): passed (" + BUILD_URL + ")" - echo message - sendSocialMediaMessage(slack_channel,color, message) - } - } - unsuccessful { - script { - def color = "danger" - def message = "MAGMA " + JOB_NAME + " build (" + BUILD_ID + "): failed (" + BUILD_URL + ")" - echo message - sendSocialMediaMessage(slack_channel,color, message) - } - } - } -} - -//------------------------------------------------------------------------------- -// Abstraction function to send social media messages: -// like on Slack or Mattermost -def sendSocialMediaMessage(pipeChannel, pipeColor, pipeMessage) { - slackSend channel: pipeChannel, color: pipeColor, message: pipeMessage -} diff --git a/ci-scripts/JenkinsFile-CWAG-integ-test-libvirt b/ci-scripts/JenkinsFile-CWAG-integ-test-libvirt deleted file mode 100644 index 14fc83bbb75a..000000000000 --- a/ci-scripts/JenkinsFile-CWAG-integ-test-libvirt +++ /dev/null @@ -1,212 +0,0 @@ -#!/bin/groovy - -def GIT_URL = "github.com/magma/magma" -def GIT_BRANCH = "master" -def slack_channel = "#magma-ci-bot" - -pipeline { - agent { - label "libvirt" - } - options { - timestamps() - ansiColor('xterm') - } - stages { - stage("Prepate Env"){ - steps{ - script { - echo "Clean Env" - cleanWs() - sh "docker system prune -f" - echo "Purge old vagrant boxes" - sh "sudo vagrant box prune --force" - sh "sudo chown -R jenkins:libvirt ~jenkins/.vagrant.d" - echo "Clone Sources" - checkout( - changelog: false, - poll: false, - scm: [$class: 'GitSCM', - branches: [[name: "${sha1}"]], - doGenerateSubmoduleConfigurations: false, - extensions: [], - submoduleCfg: [], - userRemoteConfigs: [[refspec: '+refs/pull/*:refs/remotes/origin/pr/*', url: "https://" + GIT_URL + ".git"]]] - ) - } - } - } - stage("cwag-precommit"){ - environment { - MAGMA_ROOT = sh(script: 'pwd', , returnStdout: true).trim() - GO111MODULE = "on" - GOPROXY = "https://proxy.golang.org" - } - steps{ - script { - sh "./.github/workflows/scripts/golang_before_install.sh" - dir("cwf/gateway"){ - sh "go mod download" - sh "make -C ${MAGMA_ROOT}/cwf/gateway precommit" - sh "make -C ${MAGMA_ROOT}/cwf/gateway/integ_tests precommit" - } - } - } - } - stage("cwf-integ-test"){ - environment { - CIRCLE_REPOSITORY_URL = "https://${GIT_URL}.git" - CIRCLE_BRANCH = "$GIT_BRANCH" - CIRCLE_SHA1 = "HEAD" - PYTHONPATH = "$WORKSPACE/orc8r" - VAGRANT_DEFAULT_PROVIDER = "libvirt" - } - steps{ - script { - echo "Setup required packages" - sh """ - export PATH; - export PYENV_ROOT="\$HOME/.pyenv" - export PATH="\$PYENV_ROOT/bin:\$PATH" - eval "\$(pyenv init -)" - eval "\$(pyenv virtualenv-init -)" - pyenv global 3.7.0 - pip3 install --upgrade pip - pip3 install fabric3 jsonpickle requests PyYAML awscli docker-compose - """ - // sh "sleep 10" - sh "vagrant global-status" - sh "sudo virsh list --all --name" - sh "vagrant global-status 2>/dev/null | awk '/workspace/{print \$1}' | xargs -I {} vagrant destroy -f {}" - try { - sh('sudo virsh list --all --name | grep magma | xargs --no-run-if-empty -n1 sudo virsh destroy') - sh('sudo virsh list --all --name | grep cwag | xargs --no-run-if-empty -n1 sudo virsh destroy') - } - catch (Exception e) { - echo "Fine. Let it go..." - } - try { - sh('sudo virsh list --all --name | grep magma | xargs --no-run-if-empty -n1 sudo virsh undefine') - sh('sudo virsh list --all --name | grep cwag | xargs --no-run-if-empty -n1 sudo virsh undefine') - // Clean LTE VMs - sh('cd lte/gateway;virsh undefine gateway_magma || true; vagrant destroy --force magma') - sh('cd lte/gateway;virsh undefine gateway_magma_test || true; vagrant destroy --force magma_test') - sh('cd lte/gateway;virsh undefine gateway_magma_trfserver || true; vagrant destroy --force magma_trfserver') - // Clean CWF VMs - sh('cd cwf/gateway;vagrant destroy --force cwag') - sh('cd cwf/gateway;vagrant destroy --force cwag_test') - sh('cd lte/gateway;vagrant destroy --force magma_trfserver') - } - catch (Exception e) { - echo "Fine. Let it go..." - } - - try { - sh('sudo virsh list --all --name') - sh('sudo virsh list --all --name | grep _magma | xargs --no-run-if-empty -n1 sudo virsh destroy') - sh('sudo virsh list --all --name | grep cwag | xargs --no-run-if-empty -n1 sudo virsh destroy') - }catch (Exception e) { - echo "Fine. Let it go..." - } - try { - sh('sudo virsh list --all --name | grep _magma | xargs --no-run-if-empty -n1 sudo virsh undefine') - sh('sudo virsh list --all --name | grep cwag | xargs --no-run-if-empty -n1 sudo virsh undefine') - sh('cd lte/gateway;virsh undefine gateway_magma || true; vagrant destroy --force magma') - sh('cd cwf/gateway;virsh undefine gateway_cwag || true; vagrant destroy --force cwag') - } - catch (Exception e) { - echo "Fine. Let it go..." - } - dir('cwf/gateway') { - dir('docker') { - sh """ - docker-compose \ - -f docker-compose.yml \ - -f docker-compose.override.yml \ - -f docker-compose.nginx.yml \ - -f docker-compose.integ-test.yml \ - build --parallel - """ - } - def test_xml = "tests.xml" - def test_html = "tests.html" - def test_folder = "cwf-artifacts" - def test_results = 0 - timeout(time: 110, unit: 'MINUTES') { - try { - sh """ - sudo su - export PYENV_ROOT="/home/jenkins/.pyenv" - export PATH="\$PYENV_ROOT/shims:\$PATH" - fab integ_test:destroy_vm=True,transfer_images=True,test_result_xml=$test_xml - """ - } catch (Exception e) { - test_results = 1 - } - } - // Move JUnit test result to /tmp/test-results directory - sh "mkdir $test_folder" - try { - sh "junit2html $test_xml $test_html" - sh "cp $test_xml $test_folder" - sh "cp $test_html $test_folder" - } catch (Exception e) { - echo "It might fail because XML file was not generated at previous step" - currentBuild.result = 'FAILURE' - } - - // On failure, transfer logs of key services from docker containers and - // copy to the log directory. This will get stored as an artifact. - // Following lines to be translated in to Jenkins DSL yet - if (test_results) { - currentBuild.result = 'FAILURE' - def services = "sessiond session_proxy pcrf ocs pipelined ingress" - try { - timeout(time: 10, unit: 'MINUTES') { - sh """ - export PYENV_ROOT="\$HOME/.pyenv" - export PATH="\$PYENV_ROOT/shims:\$PATH" - fab transfer_artifacts:services='$services',get_core_dump=True - """ - } - // Copy the log files out from the node - sh "cp *.log $test_folder" - if( fileExists("coredump.tar.gz") ) { - sh "cp coredump.tar.gz $test_folder" - } - } catch (Exception e) {} - } - timeout(time: 10, unit: 'MINUTES') { - archiveArtifacts("$test_folder/*") - } - } - } - } - } - } - post { - success { - script { - def color = "good" - def message = "MAGMA " + JOB_NAME + " build (" + BUILD_ID + "): passed (" + BUILD_URL + ")" - echo message - sendSocialMediaMessage(slack_channel,color, message) - } - } - unsuccessful { - script { - def color = "danger" - def message = "MAGMA " + JOB_NAME + " build (" + BUILD_ID + "): failed (" + BUILD_URL + ")" - echo message - sendSocialMediaMessage(slack_channel,color, message) - } - } - } -} - -//------------------------------------------------------------------------------- -// Abstraction function to send social media messages: -// like on Slack or Mattermost -def sendSocialMediaMessage(pipeChannel, pipeColor, pipeMessage) { - slackSend channel: pipeChannel, color: pipeColor, message: pipeMessage -} diff --git a/ci-scripts/JenkinsFile-GitLab b/ci-scripts/JenkinsFile-GitLab deleted file mode 100644 index 4820ab6b6071..000000000000 --- a/ci-scripts/JenkinsFile-GitLab +++ /dev/null @@ -1,682 +0,0 @@ -#!/bin/groovy -/* - * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The OpenAirInterface Software Alliance licenses this file to You under - * the terms found in the LICENSE file in the root of this - * source tree. - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - *------------------------------------------------------------------------------- - * For more information about the OpenAirInterface (OAI) Software Alliance: - * contact@openairinterface.org - */ - - -def GIT_URL = "github.com/magma/magma" - -def GIT_BRANCH = "master" -def GIT_COMMIT - -def OAI_GIT_URL = "https://github.com/OPENAIRINTERFACE/openair-epc-fed.git" -def OAI_GIT_BRANCH = "master" -// Location of the executor node -def nodeExecutor = "libvirt" - -def GITHUB_USER = "magmabot" -def slack_channel = "#magma-ci-bot" - - -pipeline { - agent { - label "libvirt" - } - parameters { - booleanParam(name: 'REGRESSION_TEST', defaultValue: false, description: 'Test master branch for regressions and submit a Github issue') - } - - options { - timestamps() - ansiColor('xterm') - } - - stages { - stage ("Verify Parameters") { - steps { - script { - cleanWs() - sh "docker system prune -f" - echo "Purge old vagrant boxes" - sh "sudo vagrant box prune --force" - sh "sudo chown -R jenkins:libvirt ~jenkins/.vagrant.d" - - JOB_TIMESTAMP = sh returnStdout: true, script: 'date --utc --rfc-3339=seconds | sed -e "s#+00:00##"' - JOB_TIMESTAMP = JOB_TIMESTAMP.trim() - - echo '\u2705 \u001B[32mVerify Parameters\u001B[0m' - } - } - } - stage ("Retrieve and Prepare Source Code") { - steps { - script { - def branch - if (params.REGRESSION_TEST) { - branch = 'master' - } else { - branch = sha1 - } - checkout( - changelog: false, - poll: false, - scm: [$class: 'GitSCM', - branches: [[name: "$branch"]], - doGenerateSubmoduleConfigurations: false, - extensions: [], - submoduleCfg: [], - userRemoteConfigs: [[refspec: '+refs/pull/*:refs/remotes/origin/pr/*', url: "https://" + GIT_URL + ".git"]]] - ) - sh "git clean -x -d -e .cache -e lte/gateway/.vagrant -f > /dev/null 2>&1" - sh("mkdir -p openair-epc-fed archives") - dir("openair-epc-fed") { - checkout( - changelog: false, - poll: false, - scm: [$class: 'GitSCM', - branches: [[name: OAI_GIT_BRANCH]], - doGenerateSubmoduleConfigurations: false, - doGenerateSubmoduleConfigurations: false, - extensions: [[$class: 'SubmoduleOption', - disableSubmodules: false, - parentCredentials: false, - recursiveSubmodules: true, - reference: '', - trackingSubmodules: false]], - submoduleCfg: [], - userRemoteConfigs: [[url: OAI_GIT_URL]] - ] - ) - } - } - } - post { - failure { - script { - def message = "OAI " + JOB_NAME + " build (" + BUILD_ID + "): Merge Conflicts -- Cannot perform CI" - echo message - currentBuild.result = 'FAILURE' - } - } - } - } - stage ("Provisioning") { - parallel { - stage ("Provision the AGW VM") { - steps { - script { - try { - sh('sudo virsh list --all --name') - sh('sudo virsh list --all --name | grep _magma | xargs --no-run-if-empty -n1 sudo virsh destroy || true') - sh('sudo virsh list --all --name | grep _magma | xargs --no-run-if-empty -n1 sudo virsh undefine || true') - sh('cd lte/gateway && vagrant destroy --force magma') - } - catch (Exception e) { - echo "Fine. Let it go..." - } - myShCmdWithLog('cd lte/gateway && vagrant up --provider libvirt magma', 'archives/magma_vagrant_up.log') - // Check that magma services are all down. Should be the case after wake-up - try { - sh('cd lte/gateway && vagrant ssh magma -c "sudo service magma@* status"') - } catch (Exception e) { - echo "Fine. Let it go..." - } - sh('which zip || sudo apt-get install -y zip') - sh('dpkg -l apt-utils || apt-get install -y apt-utils') - } - } - } - stage ("Build Orchestrator") { - steps { - script { - echo "Not building orc8r at the moment" - } - } - } - } - } - stage ("Building") { - parallel { - stage ("Build AGW1 - noS11") { - steps { - script { - // Manual removal of build dirs - try { - sh('cd lte/gateway && vagrant ssh magma -c "sudo rm -Rf build/c build/python"') - - } catch (Exception e) { - echo "OK after a git clean..." - } - try { - myShCmdWithLog('cd lte/gateway && vagrant ssh magma -c "cd magma/lte/gateway && make clean"', 'archives/magma_vagrant_make_clean.log') - } catch (Exception e) { - echo "OK after a git clean..." - } - // Manually creating the c build dir - try { - sh('cd lte/gateway && vagrant ssh magma -c "sudo mkdir -p build/c;sudo chown -R vagrant build"') - } catch (Exception e) { - echo "It should not fail here but we still go on" - } - sh ('sudo chown -R jenkins:libvirt .cache' ) - timeout (time: 120, unit: 'MINUTES') { - // removing the magma/.cache/gateway folder will slow down build from 3 minutes to 27 minutes - myShCmdWithLog('''cd lte/gateway && vagrant ssh magma -c "cd magma/lte/gateway && make run "''', 'archives/magma_vagrant_make_run.log') - } - sh "sleep 30" - // check magma status --> non-blocking (even if OK it might fail from a bash script point of view) - try { - sh('cd lte/gateway && vagrant ssh magma -c "sudo service magma@* start; sudo service magma@* status"') - } catch (Exception e) { - echo "Checking magma@* status failed but still moving on!" - } - } - } - } - // Running CPPCHECK in parallel to gain time - stage ('Static Code Analysis') { - steps { - script { - // Running on xenial to have 1.72 version of cppcheck - sh('docker rm -f ci-cn-cppcheck || true') - sh('docker run --name ci-cn-cppcheck -v `pwd`:/code -d ubuntu:xenial /bin/bash -c "sleep infinity"') - sh('docker exec -i ci-cn-cppcheck /bin/bash -c "apt-get update && apt-get upgrade --yes" 2>&1 > archives/cppcheck_install.log') - sh('docker exec -i ci-cn-cppcheck /bin/bash -c "apt-get install --yes git cppcheck bzip2" 2>&1 >> archives/cppcheck_install.log') - - sh('docker exec -i ci-cn-cppcheck /bin/bash -c "cd /code && cppcheck -j8 --enable=warning --force --xml --xml-version=2 -i test ." 2> cppcheck.xml 1> archives/cppcheck_build.log') - sh('docker rm -f ci-cn-cppcheck') - } - } - post { - success { - sh "echo 'CPPCHECK: OK' >> archives/cppcheck_install.log" - } - unsuccessful { - sh "echo 'CPPCHECK: KO' >> archives/cppcheck_install.log" - } - } - } - stage ('Code Formatting Checker') { - steps { - script { - sh('docker rm -f ci-cn-clang-formatter || true') - sh('docker run --name ci-cn-clang-formatter -v `pwd`:/code -d ubuntu:bionic /bin/bash -c "sleep infinity"') - sh('docker exec -i ci-cn-clang-formatter /bin/bash -c "apt-get update && apt-get upgrade --yes" 2>&1 > archives/clang_format_install.log') - sh('docker exec -i ci-cn-clang-formatter /bin/bash -c "apt-get install --yes git tree bzip2" 2>&1 >> archives/clang_format_install.log') - - //sh('docker cp /tmp/converged_mme.tar.bz2 ci-cn-clang-formatter:/home') - //sh('docker exec -i ci-cn-clang-formatter /bin/bash -c "cd /home && tar -xjf converged_mme.tar.bz2"') - //sh('docker exec -i ci-cn-clang-formatter /bin/bash -c "rm -f /home/converged_mme.tar.bz2"') - //sh('docker exec -i ci-cn-clang-formatter /bin/bash -c "cd /home && git checkout -f ' + TEMP_COMMIT + '"') - - // We install a dedicated version (installed on our CI server). - sh('docker cp /opt/clang-format/9.0.0/bin/clang-format ci-cn-clang-formatter:/usr/local/bin') - sh('docker exec -i ci-cn-clang-formatter /bin/bash -c "cd /code && ./ci-scripts/checkCodingFormattingRules.sh"') - } - } - post { - always { - script { - sh('docker cp ci-cn-clang-formatter:/code/oai_rules_result.txt archives/.') - // May not have been generated - try { - sh('docker cp ci-cn-clang-formatter:/code/oai_rules_result_list.txt archives/.') - } catch (Exception e) { - echo "Failed to copy src/oai_rules_result_list.txt! It may not have been generated. That's OK!" - } - // no need to keep the clang-formatter container - sh('docker rm -f ci-cn-clang-formatter') - } - } - } - } - stage ("Run Orchestrator") { - steps { - script { - echo "Not at the moment" - } - } - } - stage ("Provision the Test VM") { - steps { - script { - try { - sh('cd lte/gateway && virsh undefine gateway_magma_test || true; vagrant destroy --force magma_test') - } - catch (Exception e) { - echo "Fine. Let it go..." - } - myShCmdWithLog('cd lte/gateway && vagrant up --provider libvirt magma_test', 'archives/magma_vagrant_test_up.log') - myShCmdWithLog('cd lte/gateway && vagrant ssh magma_test -c "cd magma/lte/gateway/python/ && make"', 'archives/magma_vagrant_test_make1.log') - myShCmdWithLog('cd lte/gateway && vagrant ssh magma_test -c "cd magma/lte/gateway/python/integ_tests/ && make"', 'archives/magma_vagrant_test_make2.log') - } - } - } - stage ("Provision the Traffic VM") { - steps { - script { - sh "sleep 60" - try { - myShCmdWithLog('cd lte/gateway && virsh undefine gateway_magma_trfserver || true; vagrant destroy --force magma_trfserver', 'archives/magma_vagrant_trfserver_up.log') - } catch (Exception e) { - echo "Ignoring issues cleaning up any lingering magma_trfserver" - } - sh('cd lte/gateway && vagrant up --provider libvirt magma_trfserver') - try { - sh('cd lte/gateway && vagrant ssh magma_trfserver -c "sudo apt update"') - } catch (Exception e) { - echo "Known issue with magma-custom.io public key?" - } - sh('cd lte/gateway && vagrant ssh magma_trfserver -c "sudo apt install --yes psmisc net-tools iproute"') - sh('cd lte/gateway && vagrant ssh magma_trfserver -c "sudo ip route add 192.168.128.0/24 via 192.168.129.1 dev eth2"') - } - } - } - } - } - stage ("Testing -- noS11") { - parallel { - stage ("Start Traffic Server -- noS11") { - steps { - script { - echo "Disabling TCP checksumming on Traffic VM" - sh('cd lte/gateway && vagrant ssh magma_trfserver -c "sudo ethtool --offload eth1 rx off tx off && sudo ethtool --offload eth2 rx off tx off"') - echo "Starting the Traffic server in foreground" - try { - sh('cd lte/gateway && vagrant ssh magma_trfserver -c "sudo traffic_server.py 192.168.60.144 62462"') - } catch (Exception e) { - echo "Moving on!" - } - } - } - } - stage ("Test-AGW1-noS11") { - steps { - script { - echo "Disabling TCP checksumming on all VMs" - sh('cd lte/gateway && vagrant ssh magma -c "sudo ethtool --offload eth1 rx off tx off && sudo ethtool --offload eth2 rx off tx off"') - sh('cd lte/gateway && vagrant ssh magma_test -c "sudo ethtool --offload eth1 rx off tx off && sudo ethtool --offload eth2 rx off tx off"') - // Adding capture on the S1 interface - sh('cd lte/gateway && vagrant ssh magma -c "nohup sudo tcpdump -i eth1 port 36412 -w ~/magma/archives/magma_run_s1ap_tester.pcap > /dev/null & sleep 1"') - - // Making sure the Traffic server is up and running - sh "sleep 20" - - echo "Starting the integration Tests - S1AP Tester" - // We have removed the traffic testcases from mandatory suite. - try { - sh "pip -y uninstall -y fabric" - } catch (Exception e) {} - try { - sh "pip3 -y uninstall -y fabric" - } catch (Exception e) {} - sh "cd lte/gateway && virtualenv -p python3 .venv" - sh '''#!/bin/bash - cd lte/gateway && source .venv/bin/activate && pip install fabric3 jsonpickle requests PyYAML - ''' - timeout (time: 110, unit: 'MINUTES') { - myShCmdWithLog('cd lte/gateway && source .venv/bin/activate && fab run_integ_tests', 'archives/magma_run_s1ap_tester_s11.log') - } - - timeout (time: 45, unit: 'SECONDS') { - try { - myShCmdWithLogAppend('cd lte/gateway && vagrant ssh magma_test -c "cd magma/lte/gateway/python/integ_tests/ && source ~/build/python/bin/activate && make integ_test TESTS=s1aptests/test_attach_dl_udp_data.py"', 'archives/magma_run_s1ap_tester.log') - } catch (Exception e) { - echo "s1aptests/test_attach_dl_udp_data testcase may fail" - } - } - timeout (time: 45, unit: 'SECONDS') { - try { - myShCmdWithLogAppend('cd lte/gateway && vagrant ssh magma_test -c "cd magma/lte/gateway/python/integ_tests/ && source ~/build/python/bin/activate && make integ_test TESTS=s1aptests/test_attach_dl_tcp_data.py"', 'archives/magma_run_s1ap_tester.log') - } catch (Exception e) { - echo "s1aptests/test_attach_dl_tcp_data testcase may fail" - } - } - - echo "Stopping the Traffic server in background" - try { - sh('cd lte/gateway && vagrant ssh magma_trfserver -c "sudo killall python3"') - } catch (Exception e) { - echo "Maybe Traffic server crashed" - } - } - } - post { - always { - script { - // Stopping capture - sh('cd lte/gateway && vagrant ssh magma -c "sudo pkill tcpdump"') - def retrieveOAIcovFiles = true - try { - myShCmdWithLog('cd lte/gateway && vagrant ssh magma -c "cd magma/lte/gateway && make coverage_oai"', 'archives/magma_vagrant_make_coverage_oai.log') - } catch (Exception e) { - echo "Let's keep running to have some logs, but not the OAI coverage files" - retrieveOAIcovFiles = false - } - if (retrieveOAIcovFiles) { - try { - sh('cd lte/gateway/c/oai && zip -r -qq ${WORKSPACE}/archives/code_coverage.zip code_coverage/') - } catch (Exception e) { - echo "Maybe we could not generate the coverage HTML report" - } - } - sh('cd lte/gateway && vagrant ssh magma -c "cd magma/lte/gateway && make stop"') - // Retrieving the sys logs and mme log for more debugging. - sh('cd lte/gateway && vagrant ssh magma -c "sudo cat /var/log/syslog" > ${WORKSPACE}/archives/magma_dev_syslog.log') - sh('cd lte/gateway && vagrant ssh magma -c "sudo cat /var/log/envoy.log" > ${WORKSPACE}/archives/magma_dev_envoy.log') - sh('cd lte/gateway && vagrant ssh magma -c "sudo cat /var/log/mme.log" > ${WORKSPACE}/archives/magma_dev_mme.log') - sh('cd lte/gateway && vagrant ssh magma_test -c "sudo cat /var/log/syslog" > ${WORKSPACE}/archives/magma_test_syslog.log') - } - } - success { - sh "echo 'AGW-VM-S1AP-TESTS: OK' >> archives/magma_run_s1ap_tester.log" - } - unsuccessful { - script { - try { - sh('cd lte/gateway && vagrant ssh magma_trfserver -c "sudo killall python3"') - } catch (Exception e) { - echo "Why it fails to kill the traffic server?" - } - sh "echo 'AGW-VM-S1AP-TESTS: KO' >> archives/magma_run_s1ap_tester.log" - } - } - } - } - } - } - stage ("Re-Build MME-S11") { - steps { - script { - echo "S11 tests are disabled until March 1" - } - } - } -/* // Adapt the interface and the container IP address for S11 --> SPGW-C - sh('sed -i -f ci-scripts/adapt-mme-yaml.sed lte/gateway/configs/mme.yml') - sh('cd lte/gateway && vagrant ssh magma -c "cd magma/lte/gateway && make clean"') - // Re-building w/ S11 enabled - sh "echo 'make FEATURES=\"mme\" run' > lte/gateway/make_mme_run.sh" - timeout (time: 15, unit: 'MINUTES') { - // removing the magma/.cache/gateway folder with speed down build from 3 minutes to 27 minutes - myShCmdWithLog('cd lte/gateway && vagrant ssh magma -c "cd magma/lte/gateway && chmod 755 make_mme_run.sh && sudo chown -R vagrant /home/vagrant/build && ./make_mme_run.sh"', 'archives/magma_vagrant_make_run2.log') - } - sh "sleep 60" - sh "echo 'make FEATURES=\"mme\" status' > lte/gateway/make_mme_status.sh" - try { - //sh('cd lte/gateway && vagrant ssh magma -c "cd magma/lte/gateway && chmod 755 make_mme_status.sh && ./make_mme_status.sh') - myShCmdWithLog('cd lte/gateway && vagrant ssh magma -c "sudo service magma@* status"', 'archives/magma_status2.log') - } catch (Exception e) { - echo "Status may return an error" - } - } - } - stage ("Deploy SPGW-CUPS") { - steps { - script { - sh('cd openair-epc-fed && git clean -ff') - sh('cd openair-epc-fed && ./scripts/syncComponents.sh --spgwc-branch 2020.w36 --spgwu-tiny-branch 2020.w36') - // Build containers if they aren't present - sh('cd openair-epc-fed && ls -l component && docker image inspect oai-spgwc:develop > /dev/null || docker build -f component/oai-spgwc/ci-scripts/Dockerfile.ubuntu18.04 -t oai-spgwc:develop ./component/oai-spgwc') - sh('cd openair-epc-fed && docker image inspect oai-spgw-tiny:develop > /dev/null || docker build -f component/oai-spgwu-tiny/ci-scripts/Dockerfile.ubuntu18.04 -t oai-spgwu-tiny:develop ./component/oai-spgwu-tiny') - sh('docker network create --attachable --subnet 192.168.61.128/26 --ip-range 192.168.61.128/26 magma-oai-public-net') - // We are fixing IP addresses to easy scripting - sh('docker rm -f magma-oai-spgwc magma-oai-spgwc-tiny || true') - sh('docker run --privileged --name magma-oai-spgwc --network magma-oai-public-net --ip 192.168.61.130 -d oai-spgwc:develop /bin/bash -c "sleep infinity"') - sh('docker run --privileged --name magma-oai-spgwu-tiny --network magma-oai-public-net --ip 192.168.61.131 -d oai-spgwu-tiny:develop /bin/bash -c "sleep infinity"') - // Configure the containers - sh('cd openair-epc-fed && python3 component/oai-spgwc/ci-scripts/generateConfigFiles.py --kind=SPGW-C --s11c=eth0 --sxc=eth0 --from_docker_file --apn=oai.ipv4') - sh('cd openair-epc-fed && python3 component/oai-spgwu-tiny/ci-scripts/generateConfigFiles.py --kind=SPGW-U --sxc_ip_addr=192.168.61.130 --sxu=eth0 --s1u=eth0 --from_docker_file') - sh('cd openair-epc-fed && docker cp ./spgwc-cfg.sh magma-oai-spgwc:/openair-spgwc') - sh('docker exec -i magma-oai-spgwc /bin/bash -c "cd /openair-spgwc && chmod 777 spgwc-cfg.sh && ./spgwc-cfg.sh"') - sh('cd openair-epc-fed && docker cp ./spgwu-cfg.sh magma-oai-spgwu-tiny:/openair-spgwu-tiny') - sh('docker exec -i magma-oai-spgwu-tiny /bin/bash -c "cd /openair-spgwu-tiny && chmod 777 spgwu-cfg.sh && ./spgwu-cfg.sh"') - // adapting the UE IP pool to magma test setup - sh('docker cp ./ci-scripts/adapt-spgwc-pool-ip.sed magma-oai-spgwc:/openair-spgwc') - sh('docker exec -i magma-oai-spgwc /bin/bash -c "sed -i -f adapt-spgwc-pool-ip.sed etc/spgw_c.conf"') - sh('docker cp ./ci-scripts/adapt-spgwu-pool-ip.sed magma-oai-spgwu-tiny:/openair-spgwu-tiny') - sh('docker exec -i magma-oai-spgwu-tiny /bin/bash -c "sed -i -f adapt-spgwu-pool-ip.sed etc/spgw_u.conf"') - - // Start cNFs - sh('docker exec -d magma-oai-spgwc /bin/bash -c "nohup ./bin/oai_spgwc -o -c ./etc/spgw_c.conf > spgwc_check_run.log 2>&1"') - sh('docker exec -d magma-oai-spgwu-tiny /bin/bash -c "nohup ./bin/oai_spgwu -o -c ./etc/spgw_u.conf > spgwu_check_run.log 2>&1"') - } - } - } - stage ("Test-AGW1-w-S11") { - steps { - script { - // Adding capture on the S1 and S11 interfaces - sh('cd lte/gateway && vagrant ssh magma -c "nohup sudo tcpdump -i any port 36412 or port 2123 -w ~/magma/archives/magma_run_s1ap_tester_s11.pcap > /dev/null & sleep 1"') - // making sure the TRF server is up - echo "Remove unnecessary route" - sh('cd lte/gateway && vagrant ssh magma_trfserver -c "sudo ip route del 192.168.128.0/24 via 192.168.129.1 dev eth2"') - sh('cd lte/gateway && vagrant reload magma_test') - // making sure the TRF server is up - sh "sleep 60" - echo "Starting the integration Tests - S1AP Tester" - timeout (time: 110, unit: 'MINUTES') { - myShCmdWithLog('cd lte/gateway && vagrant ssh magma_test -c "cd magma/lte/gateway/python/integ_tests/ && source ~/build/python/bin/activate && make integ_test TESTS=s1aptests/test_attach_detach.py" > ${WORKSPACE}/archives/magma_run_s1ap_tester_s11.log', 'archives/magma_run_s1ap_tester_s11.log') - myShCmdWithLogAppend('cd lte/gateway && vagrant ssh magma_test -c "cd magma/lte/gateway/python/integ_tests/ && source ~/build/python/bin/activate && make integ_test TESTS=s1aptests/test_attach_detach_multi_ue.py" >> ${WORKSPACE}/archives/magma_run_s1ap_tester_s11.log', 'archives/magma_run_s1ap_tester_s11.log') - myShCmdWithLogAppend('cd lte/gateway && vagrant ssh magma_test -c "cd magma/lte/gateway/python/integ_tests/ && source ~/build/python/bin/activate && make integ_test TESTS=s1aptests/test_attach_detach_looped.py" >> ${WORKSPACE}/archives/magma_run_s1ap_tester_s11.log', 'archives/magma_run_s1ap_tester_s11.log') - myShCmdWithLogAppend('cd lte/gateway && vagrant ssh magma_test -c "cd magma/lte/gateway/python/integ_tests/ && source ~/build/python/bin/activate && make integ_test TESTS=s1aptests/test_attach_emergency.py" >> ${WORKSPACE}/archives/magma_run_s1ap_tester_s11.log', 'archives/magma_run_s1ap_tester_s11.log') - myShCmdWithLogAppend('cd lte/gateway && vagrant ssh magma_test -c "cd magma/lte/gateway/python/integ_tests/ && source ~/build/python/bin/activate && make integ_test TESTS=s1aptests/test_attach_combined_eps_imsi.py" >> ${WORKSPACE}/archives/magma_run_s1ap_tester_s11.log', 'archives/magma_run_s1ap_tester_s11.log') - myShCmdWithLogAppend('cd lte/gateway && vagrant ssh magma_test -c "cd magma/lte/gateway/python/integ_tests/ && source ~/build/python/bin/activate && make integ_test TESTS=s1aptests/test_attach_via_guti.py" >> ${WORKSPACE}/archives/magma_run_s1ap_tester_s11.log', 'archives/magma_run_s1ap_tester_s11.log') - myShCmdWithLogAppend('cd lte/gateway && vagrant ssh magma_test -c "cd magma/lte/gateway/python/integ_tests/ && source ~/build/python/bin/activate && make integ_test TESTS=s1aptests/test_attach_detach_after_ue_context_release.py" >> ${WORKSPACE}/archives/magma_run_s1ap_tester_s11.log', 'archives/magma_run_s1ap_tester_s11.log') - myShCmdWithLogAppend('cd lte/gateway && vagrant ssh magma_test -c "cd magma/lte/gateway/python/integ_tests/ && source ~/build/python/bin/activate && make integ_test TESTS=s1aptests/test_no_auth_response.py" >> ${WORKSPACE}/archives/magma_run_s1ap_tester_s11.log', 'archives/magma_run_s1ap_tester_s11.log') - } - - - // echo "Stopping the Traffic server in background" - // sh('cd lte/gateway && vagrant ssh magma_trfserver -c "sudo killall python3"') - - echo "Stopping the SPGW-CUPS" - sh('docker exec -i magma-oai-spgwc /bin/bash -c "killall --signal SIGINT oai_spgwc || echo oai_spgwc not running"') - sh('docker exec -i magma-oai-spgwu-tiny /bin/bash -c "killall --signal SIGINT oai_spgwu || echo oai_spgwu not running"') - sh "sleep 10" - try { - sh('docker exec -i magma-oai-spgwc /bin/bash -c "killall --signal SIGKILL oai_spgwc"') - } catch (Exception e) { - echo "oai_spgwc may already be killed" - } - try { - sh('docker exec -i magma-oai-spgwu-tiny /bin/bash -c "killall --signal SIGKILL oai_spgwu"') - } catch (Exception e) { - echo "oai_spgwu may already be killed" - } - } - } - post { - always { - script { - sh('cd lte/gateway && vagrant ssh magma -c "cd magma/lte/gateway && make stop"') - // Stopping capture - sh('cd lte/gateway && vagrant ssh magma -c "sudo pkill tcpdump"') - // Retrieving the sys logs and mme log for more debugging. - sh('cd lte/gateway && vagrant ssh magma -c "sudo cat /var/log/syslog" > ${WORKSPACE}/archives/magma_dev_syslog_s11.log') - try { - sh('cd lte/gateway && vagrant ssh magma -c "sudo cat /var/log/mme.log" > ${WORKSPACE}/archives/magma_dev_mme_s11.log') - sh('docker cp magma-oai-spgwc:/openair-spgwc/spgwc_check_run.log archives') - } catch (Exception e) { - echo "MME log may not be available" - } - sh('cd lte/gateway && vagrant ssh magma_test -c "sudo cat /var/log/syslog" > ${WORKSPACE}/archives/magma_test_syslog_s11.log') - // Retrieving the container logs - sh('docker cp magma-oai-spgwc:/openair-spgwc/spgwc_check_run.log archives') - sh('docker cp magma-oai-spgwu-tiny:/openair-spgwu-tiny/spgwu_check_run.log archives') - } - } - success { - sh "echo 'AGW-VM-S1AP-TESTS: OK' >> archives/magma_run_s1ap_tester_s11.log" - } - unsuccessful { - script { - try { - sh('docker exec -i magma-oai-spgwc /bin/bash -c "killall --signal SIGKILL oai_spgwc"') - } catch (Exception e) { - echo "spgwc may already be stopped" - } - try { - sh('docker exec -i magma-oai-spgwu-tiny /bin/bash -c "killall --signal SIGKILL oai_spgwu"') - } catch (Exception e) { - echo "spgwu may already be stopped" - } - sh "echo 'AGW-VM-S1AP-TESTS: KO' >> archives/magma_run_s1ap_tester_s11.log" - } - } - } - }*/ - } - post { - always { - script { - sh('git checkout -- lte/gateway/python/integ_tests/defs.mk lte/gateway/configs/mme.yml') - - // Stopping the VMs and the Containers - sh('cd lte/gateway && vagrant halt magma') - sh('cd lte/gateway && vagrant halt magma_test') - sh('cd lte/gateway && vagrant halt magma_trfserver') - sh('cd lte/gateway && vagrant global-status') - - try { - sh('docker rm -f magma-oai-spgwc magma-oai-spgwu-tiny') - } catch (Exception e) { - echo "We may not have started the CUPS containers" - } - try { - sh('docker network rm magma-oai-public-net') - } catch (Exception e) { - echo "We may not have created the CUPS docker network" - } - - // Generate HTML report - sh "python3 ci-scripts/generateHtmlReport.py --job_name=${JOB_NAME} --job_id=${BUILD_ID} --job_url=${BUILD_URL} --git_url=${GIT_URL} --git_src_branch=${GIT_BRANCH} --git_src_commit=${GIT_COMMIT}" - sh "sed -i -e 's#TEMPLATE_TIME#${JOB_TIMESTAMP}#' test_results_magma_converged_mme.html" - if (fileExists('test_results_magma_converged_mme.html')) { - archiveArtifacts artifacts: 'test_results_magma_converged_mme.html' - } - - // Zipping all archived log files - sh "zip -r -qq magma_logs.zip archives" - if (fileExists('magma_logs.zip')) { - archiveArtifacts artifacts: 'magma_logs.zip' - } - sh('git stash && git stash clear') - } - } - success { - script { - def color = "good" - def message = "MAGMA " + JOB_NAME + " build (" + BUILD_ID + "): passed (" + BUILD_URL + ")" - echo message - sendSocialMediaMessage(slack_channel,color, message) - } - } - unsuccessful { - script { - def color = "danger" - def message = "MAGMA " + JOB_NAME + " build (" + BUILD_ID + "): failed (" + BUILD_URL + ")" - echo message - sendSocialMediaMessage(slack_channel,color, message) - if (params.REGRESSION_TEST) { - createOrUpdateGithubIssue(GIT_URL, GITHUB_USER, message) - } - } - } - } -} - -def myShCmdWithLog(cmd, logFile) { - sh """#!/bin/bash - set -o pipefail - ${cmd} 2>&1 | tee $WORKSPACE/${logFile} - """ -} - -def myShCmdWithLogAppend(cmd, logFile) { - sh """#!/bin/bash - set -o pipefail - ${cmd} 2>&1 | tee -a $WORKSPACE/${logFile} - """ -} - -def createOrUpdateGithubIssue(git_url, github_user, message) { - issueTitle = "[CI] Regression tests failed" - githubProject = git_url.split('/')[1] + "/" + git_url.split('/')[2] - issueId = getIssueByTitle(githubProject, github_user, issueTitle) - if (issueId != false) { - updateGitHubIssue(githubProject, github_user, issueId, message) - println("GitHub issue #${issueId} updated") - } else { - createGitHubIssue(githubProject, github_user, issueTitle, message) - println("GitHub issue created") - } -} - -def getIssueByTitle(githubProject, github_user, title) { - withCredentials([string(credentialsId: 'magma_bot_github_api_token', variable: 'TOKEN')]) { - try { - id = sh(returnStdout: true, script: """curl -G -u "$github_user:$TOKEN" \ - "https://api.github.com/search/issues" \ - -H "Accept: application/vnd.github.v3+json" \ - --data-urlencode "q=repo:${githubProject} author:$github_user state:open in:title ${title}" \ - | jq .items[0].number""") .trim() - } catch (Exception e) { - println("Failed looking up github issue") - return false - } - if (id && id != "null") { - println("Found matching github issue $id") - return id - } else { - return false - } - } -} - -def updateGitHubIssue(githubProject, github_user, issueId, message) { - message = message.replace('\n', '\\n') - withCredentials([string(credentialsId: 'magma_bot_github_api_token', variable: 'TOKEN')]) { - sh(returnStdout: true, script: """curl -X "POST" -u "$github_user:$TOKEN" \ - "https://api.github.com/repos/${githubProject}/issues/${issueId}/comments" \ - -H "Accept: application/vnd.github.v3+json" \ - -d '{"body": "${message}"}' """) - } -} - -def createGitHubIssue(githubProject, github_user, title, message) { - message = message.replace('\n', '\\n') - withCredentials([string(credentialsId: 'magma_bot_github_api_token', variable: 'TOKEN')]) { - sh(returnStdout: true, script: """curl -X "POST" -u "$github_user:$TOKEN" \ - "https://api.github.com/repos/magma/magma/issues" \ - -H "Accept: application/vnd.github.v3+json" \ - -d '{ - "title": "${title}", - "body": "${message}", - "labels": [ - "type: bug" - ] - }' """) - } -} - -//------------------------------------------------------------------------------- -// Abstraction function to send social media messages: -// like on Slack or Mattermost -def sendSocialMediaMessage(pipeChannel, pipeColor, pipeMessage) { - slackSend channel: pipeChannel, color: pipeColor, message: pipeMessage -} diff --git a/ci-scripts/JenkinsFile-LTE-integ-test b/ci-scripts/JenkinsFile-LTE-integ-test deleted file mode 100644 index 0962c2113ffd..000000000000 --- a/ci-scripts/JenkinsFile-LTE-integ-test +++ /dev/null @@ -1,244 +0,0 @@ -#!/bin/groovy - -def GIT_URL = "github.com/magma/magma" -def GIT_BRANCH = "master" -def slack_channel = "#magma-ci-bot" -def test_results = 0 -pipeline { - agent { - label "libvirt" - } - options { - timestamps() - ansiColor('xterm') - } - stages { - stage("Prepate Env"){ - steps{ - script { - echo "Clean Env" - cleanWs() - echo "Clone Sources" - checkout( - changelog: false, - poll: false, - scm: [$class: 'GitSCM', - branches: [[name: '$sha1']], - doGenerateSubmoduleConfigurations: false, - extensions: [], - submoduleCfg: [], - userRemoteConfigs: [[refspec: '+refs/pull/*:refs/remotes/origin/pr/*', url: "https://" + GIT_URL + ".git"]]] - ) - } - } - } - stage("Clean Resources"){ - environment { - PYTHONPATH = "${WORKSPACE}/orc8r" - } - steps{ - script { - echo "Clean Docker Cache" - sh "sudo docker system prune -f" - - echo "Purge old vagrant boxes" - sh "sudo vagrant box prune --force" - sh "sudo chown -R jenkins:libvirt ~jenkins/.vagrant.d" - - - echo "Clean VMs" - sh "vagrant global-status" - sh "sudo virsh list --all --name" - sh "vagrant global-status 2>/dev/null | awk '/workspace/{print \$1}' | xargs -I {} vagrant destroy -f {}" - try { - sh('sudo virsh list --all --name | grep magma | xargs --no-run-if-empty -n1 sudo virsh destroy') - sh('sudo virsh list --all --name | grep cwag | xargs --no-run-if-empty -n1 sudo virsh destroy') - } - catch (Exception e) { - echo "Fine. Let it go..." - } - try { - sh('sudo virsh list --all --name | grep magma | xargs --no-run-if-empty -n1 sudo virsh undefine') - sh('sudo virsh list --all --name | grep cwag | xargs --no-run-if-empty -n1 sudo virsh undefine') - // Clean LTE VMs - sh('cd lte/gateway;virsh undefine gateway_magma || true; vagrant destroy --force magma') - sh('cd lte/gateway;virsh undefine gateway_magma_test || true; vagrant destroy --force magma_test') - sh('cd lte/gateway;virsh undefine gateway_magma_trfserver || true; vagrant destroy --force magma_trfserver') - // Clean CWF VMs - sh('cd cwf/gateway;vagrant destroy --force cwag') - sh('cd cwf/gateway;vagrant destroy --force cwag_test') - sh('cd lte/gateway;vagrant destroy --force magma_trfserver') - } - catch (Exception e) { - echo "Fine. Let it go..." - } - } - } - } - stage("Setup Packages"){ - environment { - PYTHONPATH = "${WORKSPACE}/orc8r" - } - steps{ - script { - echo "Setup required packages" - sh """ - export PATH; - export PYENV_ROOT="\$HOME/.pyenv" - export PATH="\$PYENV_ROOT/bin:\$PATH" - eval "\$(pyenv init -)" - eval "\$(pyenv virtualenv-init -)" - pyenv global 3.7.0 - pip3 install --upgrade pip - pip3 install fabric3 jsonpickle requests PyYAML awscli docker-compose - """ - } - } - } - - stage ("Provisioning") { - parallel { - stage("Setup Magma VM"){ - environment { - PYTHONPATH = "${WORKSPACE}/orc8r" - } - steps{ - script { - dir('lte/gateway') { - timeout(time: 70, unit: 'MINUTES') { - sh 'vagrant up --provider libvirt magma' - sh 'vagrant status' - sh 'sudo chown -R jenkins:libvirt "${WORKSPACE}/.cache"' - - sh 'vagrant ssh magma -c "cd magma/lte/gateway && make"' - try { - sh 'vagrant ssh magma -c "cd magma/lte/gateway && make test"' - } catch (Exception e) { - test_results=1 - } - try { - sh 'vagrant ssh magma -c "cd magma/lte/gateway/python && make coverage"' - } catch (Exception e) { - test_results=1 - } - sh 'vagrant ssh magma -c "cd magma/lte/gateway && make run"' - sh 'vagrant ssh magma -c "sudo ethtool --offload eth1 rx off tx off && sudo ethtool --offload eth2 rx off tx off"' - } - } - } - } - } - stage("Setup TrfServer VM"){ - environment { - PYTHONPATH = "${WORKSPACE}/orc8r" - } - steps{ - script { - dir('lte/gateway') { - timeout(time: 10, unit: 'MINUTES') { - sh 'vagrant up --provider libvirt magma_trfserver' - sh 'vagrant ssh magma_trfserver -c "sudo ethtool --offload eth1 rx off tx off && sudo ethtool --offload eth2 rx off tx off"' - sh 'vagrant ssh magma_trfserver -c "sudo nohup sudo /usr/local/bin/traffic_server.py 192.168.60.144 62462 > /dev/null 2>&1 &"' - } - } - } - } - } - - stage("Setup Magma Test VM"){ - environment { - PYTHONPATH = "${WORKSPACE}/orc8r" - } - steps{ - script { - dir('lte/gateway') { - timeout(time: 10, unit: 'MINUTES') { - sh 'vagrant up --provider libvirt magma_test' - sh 'vagrant ssh magma_test -c "cd magma/lte/gateway/python && make"' - sh 'vagrant ssh magma_test -c "cd magma/lte/gateway/python/integ_tests && make"' - sh 'vagrant ssh magma_test -c "sudo ethtool --offload eth1 rx off tx off && sudo ethtool --offload eth2 rx off tx off"' - sh 'vagrant ssh magma_test -c "cat /etc/hosts"' - sh 'vagrant ssh magma_test -c "echo \'127.0.1.1 magma-test.magma.com magma-test\' | sudo tee --append /etc/hosts"' - } - } - } - } - } - } - } - stage("LTE integration tests"){ - environment { - PYTHONPATH = "${WORKSPACE}/orc8r" - } - steps{ - script { - dir('lte/gateway') { - // Wait a bit to ensure AGW services are started - sh "sleep 30" - timeout(time: 120, unit: 'MINUTES') { - try { - sh """ - export PYENV_ROOT="\$HOME/.pyenv" - export PATH="\$PYENV_ROOT/shims:\$PATH" - fab run_integ_tests - """ - } catch (Exception e) { - test_results=1 - } - - try { - sh 'vagrant ssh magma -c "cd magma/lte/gateway && make coverage_oai"' - } catch (Exception e) { - test_results=1 - } - } - def test_folder = "lte-artifacts" - sh "mkdir ${test_folder}" - if (test_results) { - currentBuild.result = 'FAILURE' - def tar_file_name = "lte-test-logs.tar.gz" - // # On failure, transfer logs into current directory - def log_path = "${test_folder}/${tar_file_name}" - timeout(time: 2, unit: 'MINUTES') { - sh """ - export PYENV_ROOT="\$HOME/.pyenv" - export PATH="\$PYENV_ROOT/shims:\$PATH" - fab get_test_logs:dst_path=\"${log_path}\" - """ - } - timeout(time: 10, unit: 'MINUTES') { - archiveArtifacts("${test_folder}/*") - } - } - - } - } - } - } - } - post { - success { - script { - def color = "good" - def message = "MAGMA " + JOB_NAME + " build (" + BUILD_ID + "): passed (" + BUILD_URL + ")" - echo message - sendSocialMediaMessage(slack_channel,color, message) - } - } - unsuccessful { - script { - def color = "danger" - def message = "MAGMA " + JOB_NAME + " build (" + BUILD_ID + "): failed (" + BUILD_URL + ")" - echo message - sendSocialMediaMessage(slack_channel,color, message) - } - } - } -} - -//------------------------------------------------------------------------------- -// Abstraction function to send social media messages: -// like on Slack or Mattermost -def sendSocialMediaMessage(pipeChannel, pipeColor, pipeMessage) { - slackSend channel: pipeChannel, color: pipeColor, message: pipeMessage -} diff --git a/ci-scripts/JenkinsFile-OAI-Container-GitHub b/ci-scripts/JenkinsFile-OAI-Container-GitHub index 417e26e2f4b0..580b18ba6f20 100644 --- a/ci-scripts/JenkinsFile-OAI-Container-GitHub +++ b/ci-scripts/JenkinsFile-OAI-Container-GitHub @@ -295,16 +295,15 @@ pipeline { } catch (Exception e) { echo 'OK if not present' } - // In case of the full build, we need the host RHEL8 credentials. - if (params.REGRESSION_TEST) { - sh('mkdir -p tmp/ca tmp/entitlement') - sh('cp /etc/pki/entitlement/*pem tmp/entitlement') - sh('sudo cp /etc/rhsm/ca/redhat-uep.pem tmp/ca') - } + // Copying the host RHEL8 credentials all the time + sh('mkdir -p ./etc-pki-entitlement ./rhsm-conf ./rhsm-ca') + sh('cp /etc/pki/entitlement/*pem ./etc-pki-entitlement') + sh('sudo cp /etc/rhsm/rhsm.conf ./rhsm-conf') + sh('sudo cp /etc/rhsm/ca/*.pem ./rhsm-ca') // Create the image to use // Once again, we are not using the full dockerfile from scratch: too long --> when it is a pull request // On the daily master build, we are doing from scratch - sh('sudo podman build --no-cache --target magma-mme --tag magma-mme:' + rhel8_image_tag + ' --file ' + rhel8_docker_file + ' . > archives/build_magma_mme_rhel8.log 2>&1') + sh('sudo podman build --no-cache --squash --target magma-mme --tag magma-mme:' + rhel8_image_tag + ' --file ' + rhel8_docker_file + ' . > archives/build_magma_mme_rhel8.log 2>&1') sh('wget --quiet https://raw.githubusercontent.com/' + trustedGHuser + '/magma/' + trustedBranch + '/ci-scripts/flatten_image.py -O ci-scripts/ci-flatten_image.py') sh('python3 ./ci-scripts/ci-flatten_image.py --tag magma-mme:' + rhel8_image_tag) sh('sudo podman image prune --force > /dev/null 2>&1') diff --git a/ci-scripts/JenkinsFile-autolabel-pull-request b/ci-scripts/JenkinsFile-autolabel-pull-request deleted file mode 100644 index d2d0373dfb9d..000000000000 --- a/ci-scripts/JenkinsFile-autolabel-pull-request +++ /dev/null @@ -1,100 +0,0 @@ -#!/bin/groovy -/* - * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The OpenAirInterface Software Alliance licenses this file to You under - * the terms found in the LICENSE file in the root of this - * source tree. - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - *------------------------------------------------------------------------------- - * For more information about the OpenAirInterface (OAI) Software Alliance: - * contact@openairinterface.org - */ - - -def GIT_URL = "github.com/magma/magma" - -def GIT_BRANCH = "master" -def GIT_COMMIT -def GITHUB_USER = "magmabot" -def REPO = "magma/magma" - -// Location of the executor node -def nodeExecutor = params.nodeExecutor -def slack_channel = "#magma-ci-bot" - -def changedFiles -def hasLteChanges -def hasCwagChanges - -pipeline { - agent { - label "CWAG" - } - options { - timestamps() - ansiColor('xterm') - } - - stages { - stage ("Retrieve and Prepare Source Code") { - steps { - script { - cleanWs() - checkout changelog: false, poll: false, scm: [$class: 'GitSCM', branches: [[name: 'master', name: '${sha1}']], doGenerateSubmoduleConfigurations: false, extensions: [], submoduleCfg: [], userRemoteConfigs: [[refspec: '+refs/pull/*:refs/remotes/origin/pr/*', url: "https://" + GIT_URL + ".git"]]] - GIT_COMMIT = sh returnStdout: true, script: 'git log -1 --pretty="%H"' - } - } - post { - failure { - script { - def message = "OAI " + JOB_NAME + " build (" + BUILD_ID + "): Merge Conflicts -- Cannot perform CI" - echo message - currentBuild.result = 'FAILURE' - } - } - } - } - stage ("Check for changes") { - steps { - script { - changedFiles = sh(returnStdout: true, script: "git --no-pager diff --name-only HEAD `git merge-base HEAD origin/master`") - hasLteChanges = changedFiles.split().any { it.startsWith("lte/")} - hasCwagChanges = changedFiles.split().any { it.startsWith("cwf/")} - echo "Changed files: $changedFiles" - echo "Has LTE: $hasLteChanges" - echo "Has CWAG: $hasCwagChanges" - } - } - } - stage ("Apply labels") { - steps { - script { - withCredentials([string(credentialsId: 'magma_bot_github_api_token', variable: 'TOKEN')]) { - if (hasLteChanges) { - sh("""curl --user "${GITHUB_USER}:${TOKEN}" \ - -X POST -H "Accept: application/vnd.github.v3+json" \ - https://api.github.com/repos/${REPO}/issues/${params.ghprbPullId}/labels \ - --data '{"labels": ["component: agw"]}' """) - echo "Applied component: agw label" - } - if (hasCwagChanges) { - sh("""curl --user "${GITHUB_USER}:${TOKEN}" \ - -X POST -H "Accept: application/vnd.github.v3+json" \ - https://api.github.com/repos/${REPO}/issues/${params.ghprbPullId}/labels \ - --data '{"labels": ["component: cwag"]}' """) - echo "Applied component: cwag label" - } - } - } - } - } - } -} - diff --git a/ci-scripts/JenkinsFile-build-vagrant-boxes b/ci-scripts/JenkinsFile-build-vagrant-boxes deleted file mode 100644 index 7b62ea65c43e..000000000000 --- a/ci-scripts/JenkinsFile-build-vagrant-boxes +++ /dev/null @@ -1,152 +0,0 @@ -#!/usr/local/bin/groovy -/* -* Licensed to the OpenAirInterface (OAI) Software Alliance under one or more -* contributor license agreements. See the NOTICE file distributed with -* this work for additional information regarding copyright ownership. -* The OpenAirInterface Software Alliance licenses this file to You under -* the terms found in the LICENSE file in the root of this -* source tree. -* -* Unless required by applicable law or agreed to in writing, software -* distributed under the License is distributed on an "AS IS" BASIS, -* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -* See the License for the specific language governing permissions and -* limitations under the License. -*------------------------------------------------------------------------------- -* For more information about the OpenAirInterface (OAI) Software Alliance: -* contact@openairinterface.org -*/ - - -def GIT_URL = "github.com/magma/magma" -def GIT_COMMIT -def BASE_BOXES = ["magma-dev", "magma-test", "magma-trfserver"] -def PACKER_VER = "1.6.5" -def PACKER_CACHE = "/var/jenkins/packer_cache" - -pipeline { - agent { - label "libvirt" - } - parameters { - string(name: 'branch', defaultValue: 'master') - } - options { - disableConcurrentBuilds() - timestamps() - ansiColor('xterm') - } - stages { - stage ("Retrieve and Prepare Source Code") { - steps { - script { - cleanWs() - checkout( - changelog: false, - poll: false, - scm: [ - $class: 'GitSCM', - branches: [[name: params.branch]], - doGenerateSubmoduleConfigurations: false, - extensions: [], - submoduleCfg: [], - userRemoteConfigs: [[refspec: '+refs/pull/*:refs/remotes/origin/pr/*', url: "https://" + GIT_URL + ".git"]]] - ) - GIT_COMMIT = sh returnStdout: true, script: 'git log -1 --pretty="%H"' - } - } - } - stage ("Setup") { - steps { - script { - sh("which unzip || sudo apt-get install -y unzip") - sh("which jq || sudo apt-get install -y jq") - - packerVersion = sh(returnStdout: true, script: "~/bin/packer version || echo notfound").trim() - println("Found '$packerVersion' wanted '$PACKER_VER'") - if (packerVersion.trim() != "Packer v${PACKER_VER}") { - sh("curl https://releases.hashicorp.com/packer/${PACKER_VER}/packer_${PACKER_VER}_linux_amd64.zip > packer.zip") - sh("mkdir -p ~/bin && unzip -o packer.zip -d ~/bin") - } - dir ('orc8r/tools/packer') { - iso_url = sh(returnStdout: true, script: 'cat magma-dev-libvirt.json | jq -r ".builders[0].iso_url"').trim() - iso_filename = sh(returnStdout: true, script: "basename $iso_url").trim() - print("Downloading $iso_url if not present in cache") - sh("mkdir -p ${PACKER_CACHE}") - sh('test -e "${PACKER_CACHE}/${iso_filename}" || wget --progress=dot -O ${PACKER_CACHE}/${iso_filename} ${iso_url}') - } - } - } - } - stage ("Build Vagrant boxes - libvirt") { - environment { - // Use dummy secret for build - ATLAS_TOKEN = "123" - PACKER_OPTS="-except=vagrant-cloud" - VAGRANT_CLOUD_TOKEN = credentials('magma_vagrant_token') - PACKER_CACHE_DIR = "${PACKER_CACHE}" - provider = "libvirt" - } - matrix { - axes { - axis { - name 'base_box' - values "magma-dev", "magma-test", "magma-trfserver" - } - } - stages { - stage ("Build and upload box") { - steps { - script { - println "Building box ${base_box}-${provider}" - dir ('orc8r/tools/packer') { - sh "mkdir -p ../packer-${base_box}-${provider}" - sh "cp -R *.json *.seed http scripts ../packer-${base_box}-${provider}" - } - dir ("orc8r/tools/packer-${base_box}-${provider}") { - sh "PACKER_LOG=1 ~/bin/packer build $PACKER_OPTS ${base_box}-${provider}.json" - sh "./vagrant-box-upload.sh builds/${base_box}-${provider}.box" - } - } - } - } - } - } - } - stage ("Build Vagrant boxes - virtualbox") { - environment { - // Use dummy secret for build - ATLAS_TOKEN = "123" - PACKER_OPTS="-except=vagrant-cloud" - VAGRANT_CLOUD_TOKEN = credentials('magma_vagrant_token') - provider = "virtualbox" - PACKER_CACHE_DIR = "${PACKER_CACHE}" - } - matrix { - axes { - axis { - name 'base_box' - values "magma-dev", "magma-test", "magma-trfserver" - } - } - stages { - stage ("Build and upload box") { - steps { - script { - println "Building box ${base_box}-${provider}" - dir ('orc8r/tools/packer') { - sh "mkdir -p ../packer-${base_box}-${provider}" - sh "cp -R *.json *.seed http scripts ../packer-${base_box}-${provider}" - } - dir ("orc8r/tools/packer-${base_box}-${provider}") { - sh "PACKER_LOG=1 ~/bin/packer build $PACKER_OPTS ${base_box}-${provider}.json" - sh "./vagrant-box-upload.sh builds/${base_box}-${provider}.box" - } - } - } - } - } - } - } - } -} diff --git a/ci-scripts/adapt-mme-yaml.sed b/ci-scripts/adapt-mme-yaml.sed deleted file mode 100644 index e23f6124c287..000000000000 --- a/ci-scripts/adapt-mme-yaml.sed +++ /dev/null @@ -1,19 +0,0 @@ -################################################################################ -# Licensed to the OpenAirInterface (OAI) Software Alliance under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The OpenAirInterface Software Alliance licenses this file to You under -# the terms found in the LICENSE file in the root of this -# source tree. -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#------------------------------------------------------------------------------- -# For more information about the OpenAirInterface (OAI) Software Alliance: -# contact@openairinterface.org -################################################################################ -s@s11_iface_name: "eth[0-9]"@s11_iface_name: "eth0"@ -s@remote_sgw_ip: "[0-9.]*"@remote_sgw_ip: "192.168.61.130"@ diff --git a/ci-scripts/adapt-spgwc-pool-ip.sed b/ci-scripts/adapt-spgwc-pool-ip.sed deleted file mode 100644 index 2f64d70c365c..000000000000 --- a/ci-scripts/adapt-spgwc-pool-ip.sed +++ /dev/null @@ -1,20 +0,0 @@ -################################################################################ -# Licensed to the OpenAirInterface (OAI) Software Alliance under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The OpenAirInterface Software Alliance licenses this file to You under -# the terms found in the LICENSE file in the root of this -# source tree. -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#------------------------------------------------------------------------------- -# For more information about the OpenAirInterface (OAI) Software Alliance: -# contact@openairinterface.org -################################################################################ -s@oai.ipv4@magma.ipv4@ -s@12.1.1.2 @192.168.128.2 @ -s@12.1.1.128@192.168.128.128@ diff --git a/ci-scripts/adapt-spgwu-pool-ip.sed b/ci-scripts/adapt-spgwu-pool-ip.sed deleted file mode 100644 index 4b1deb6ec7b9..000000000000 --- a/ci-scripts/adapt-spgwu-pool-ip.sed +++ /dev/null @@ -1,19 +0,0 @@ -################################################################################ -# Licensed to the OpenAirInterface (OAI) Software Alliance under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The OpenAirInterface Software Alliance licenses this file to You under -# the terms found in the LICENSE file in the root of this -# source tree. -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#------------------------------------------------------------------------------- -# For more information about the OpenAirInterface (OAI) Software Alliance: -# contact@openairinterface.org -################################################################################ -s@12.1.1.0@192.168.128.0@ -s@SNAT = "yes"@SNAT = "no"@ diff --git a/ci-scripts/checkCodingFormattingRules.sh b/ci-scripts/checkCodingFormattingRules.sh deleted file mode 100755 index 6fdea9d85287..000000000000 --- a/ci-scripts/checkCodingFormattingRules.sh +++ /dev/null @@ -1,193 +0,0 @@ -#!/bin/bash -#/* -# * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more -# * contributor license agreements. See the NOTICE file distributed with -# * this work for additional information regarding copyright ownership. -# * The OpenAirInterface Software Alliance licenses this file to You under -# * the terms found in the LICENSE file in the root of this -# * source tree. -# * -# * Unless required by applicable law or agreed to in writing, software -# * distributed under the License is distributed on an "AS IS" BASIS, -# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# * See the License for the specific language governing permissions and -# * limitations under the License. -# *------------------------------------------------------------------------------- -# * For more information about the OpenAirInterface (OAI) Software Alliance: -# * contact@openairinterface.org -# */ - -function usage { - echo "OAI Coding / Formatting Guideline Check script" - echo " Original Author: Raphael Defosseux" - echo "" - echo " Requirement: clang-format / git shall be installed" - echo "" - echo " By default (no options) the complete repository will be checked" - echo " In case of merge/pull request, provided source and target branch," - echo " the script will check only the modified files" - echo "" - echo "Usage:" - echo "------" - echo " checkCodingFormattingRules.sh [OPTIONS]" - echo "" - echo "Options:" - echo "--------" - echo " --src-branch #### OR -sb ####" - echo " Specify the source branch of the merge request." - echo "" - echo " --target-branch #### OR -tb ####" - echo " Specify the target branch of the merge request (usually develop)." - echo "" - echo " --help OR -h" - echo " Print this help message." - echo "" -} - -if [ $# -ne 4 ] && [ $# -ne 1 ] && [ $# -ne 0 ] -then - echo "Syntax Error: not the correct number of arguments" - echo "" - usage - exit 1 -fi - -if [ $# -eq 0 ] -then - echo " ---- Checking the whole repository ----" - echo "" - if [ -f oai_rules_result.txt ] - then - rm -f oai_rules_result.txt - fi - if [ -f oai_rules_result_list.txt ] - then - rm -f oai_rules_result_list.txt - fi - EXTENSION_LIST=("h" "hpp" "c" "cpp") - NB_TO_FORMAT=0 - NB_TOTAL=0 - for EXTENSION in "${EXTENSION_LIST[@]}" - do - echo "Checking for all files with .${EXTENSION} extension" - FILE_LIST=`tree -n --noreport -i -f -P *.${EXTENSION} | sed -e 's#^\./##' | grep -v test | grep "\.${EXTENSION}"` - for FILE_TO_CHECK in "${FILE_LIST[@]}" - do - TO_FORMAT=`clang-format -output-replacements-xml ${FILE_TO_CHECK} 2>&1 | grep -v replacements | grep -c replacement` - NB_TOTAL=$((NB_TOTAL + 1)) - if [ $TO_FORMAT -ne 0 ] - then - NB_TO_FORMAT=$((NB_TO_FORMAT + 1)) - # In case of full repo, being silent - #echo "$FILE_TO_CHECK" - echo "$FILE_TO_CHECK" >> ./oai_rules_result_list.txt - fi - done - done - echo "Nb Files that do NOT follow OAI rules: $NB_TO_FORMAT over $NB_TOTAL checked!" - echo "NB_FILES_FAILING_CHECK=$NB_TO_FORMAT" > ./oai_rules_result.txt - echo "NB_FILES_CHECKED=$NB_TOTAL" >> ./oai_rules_result.txt - exit 0 -fi - -checker=0 -while [[ $# -gt 0 ]] -do -key="$1" - -case $key in - -h|--help) - shift - usage - exit 0 - ;; - -sb|--src-branch) - SOURCE_BRANCH="$2" - let "checker|=0x1" - shift - shift - ;; - -tb|--target-branch) - TARGET_BRANCH="$2" - let "checker|=0x2" - shift - shift - ;; - *) - echo "Syntax Error: unknown option: $key" - echo "" - usage - exit 1 -esac - -done - - -if [ $checker -ne 3 ] -then - echo "Source Branch is : $SOURCE_BRANCH" - echo "Target Branch is : $TARGET_BRANCH" - echo "" - echo "Syntax Error: missing option" - echo "" - usage - exit 1 -fi - -# Merge request scenario - -MERGE_COMMMIT=`git log -n1 --pretty=format:%H` -if [ -f .git/refs/remotes/origin/$TARGET_BRANCH ] -then - TARGET_INIT_COMMIT=`cat .git/refs/remotes/origin/$TARGET_BRANCH` -else - TARGET_INIT_COMMIT=`git log -n1 --pretty=format:%H origin/$TARGET_BRANCH` -fi - -echo " ---- Checking the modified files by the merge request ----" -echo "" -echo "Source Branch is : $SOURCE_BRANCH" -echo "Target Branch is : $TARGET_BRANCH" -echo "Merged Commit is : $MERGE_COMMMIT" -echo "Target Init is : $TARGET_INIT_COMMIT" -echo "" -echo " ----------------------------------------------------------" -echo "" - -# Retrieve the list of modified files since the latest develop commit -MODIFIED_FILES=`git log $TARGET_INIT_COMMIT..$MERGE_COMMMIT --oneline --name-status | egrep "^M|^A" | sed -e "s@^M\t*@@" -e "s@^A\t*@@" | sort | uniq | grep -v test` -NB_TO_FORMAT=0 -NB_TOTAL=0 - -if [ -f oai_rules_result.txt ] -then - rm -f oai_rules_result.txt -fi -if [ -f oai_rules_result_list.txt ] -then - rm -f oai_rules_result_list.txt -fi -for FULLFILE in $MODIFIED_FILES -do - filename=$(basename -- "$FULLFILE") - EXT="${filename##*.}" - if [ $EXT = "c" ] || [ $EXT = "h" ] || [ $EXT = "cpp" ] || [ $EXT = "hpp" ] - then - SRC_FILE=`echo $FULLFILE | sed -e "s#src/##"` - TO_FORMAT=`clang-format -output-replacements-xml ${SRC_FILE} 2>&1 | grep -v replacements | grep -c replacement` - NB_TOTAL=$((NB_TOTAL + 1)) - if [ $TO_FORMAT -ne 0 ] - then - NB_TO_FORMAT=$((NB_TO_FORMAT + 1)) - echo $FULLFILE - echo $FULLFILE >> ./oai_rules_result_list.txt - fi - fi -done -echo "" -echo " ----------------------------------------------------------" -echo "Nb Files that do NOT follow OAI rules: $NB_TO_FORMAT over $NB_TOTAL checked!" -echo "NB_FILES_FAILING_CHECK=$NB_TO_FORMAT" > ./oai_rules_result.txt -echo "NB_FILES_CHECKED=$NB_TOTAL" >> ./oai_rules_result.txt - -exit 0 diff --git a/ci-scripts/doGitLabMerge.sh b/ci-scripts/doGitLabMerge.sh deleted file mode 100755 index 23445903db2a..000000000000 --- a/ci-scripts/doGitLabMerge.sh +++ /dev/null @@ -1,145 +0,0 @@ -#!/bin/bash -#/* -# * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more -# * contributor license agreements. See the NOTICE file distributed with -# * this work for additional information regarding copyright ownership. -# * The OpenAirInterface Software Alliance licenses this file to You under -# * the terms found in the LICENSE file in the root of this -# * source tree. -# * -# * Unless required by applicable law or agreed to in writing, software -# * distributed under the License is distributed on an "AS IS" BASIS, -# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# * See the License for the specific language governing permissions and -# * limitations under the License. -# *------------------------------------------------------------------------------- -# * For more information about the OpenAirInterface (OAI) Software Alliance: -# * contact@openairinterface.org -# */ - -function usage { - echo "OAI GitLab merge request applying script" - echo " Original Author: Raphael Defosseux" - echo "" - echo "Usage:" - echo "------" - echo "" - echo " doGitLabMerge.sh [OPTIONS] [MANDATORY_OPTIONS]" - echo "" - echo "Mandatory Options:" - echo "------------------" - echo "" - echo " --src-branch #### OR -sb ####" - echo " Specify the source branch of the merge request." - echo "" - echo " --src-commit #### OR -sc ####" - echo " Specify the source commit ID (SHA-1) of the merge request." - echo "" - echo " --target-branch #### OR -tb ####" - echo " Specify the target branch of the merge request (usually develop)." - echo "" - echo " --target-commit #### OR -tc ####" - echo " Specify the target commit ID (SHA-1) of the merge request." - echo "" - echo "Options:" - echo "--------" - echo " --help OR -h" - echo " Print this help message." - echo "" -} - -if [ $# -ne 8 ] && [ $# -ne 1 ] -then - echo "Syntax Error: not the correct number of arguments" - echo "" - usage - exit 1 -fi - -checker=0 -while [[ $# -gt 0 ]] -do -key="$1" - -case $key in - -h|--help) - shift - usage - exit 0 - ;; - -sb|--src-branch) - SOURCE_BRANCH="$2" - let "checker|=0x1" - shift - shift - ;; - -sc|--src-commit) - SOURCE_COMMIT_ID="$2" - let "checker|=0x2" - shift - shift - ;; - -tb|--target-branch) - TARGET_BRANCH="$2" - let "checker|=0x4" - shift - shift - ;; - -tc|--target-commit) - TARGET_COMMIT_ID="$2" - let "checker|=0x8" - shift - shift - ;; - *) - echo "Syntax Error: unknown option: $key" - echo "" - usage - exit 1 -esac - -done - -if [[ $TARGET_COMMIT_ID == "latest" ]] -then - TARGET_COMMIT_ID=`git log -n1 --pretty=format:%H origin/$TARGET_BRANCH` -fi - -echo "Source Branch is : $SOURCE_BRANCH" -echo "Source Commit ID is : $SOURCE_COMMIT_ID" -echo "Target Branch is : $TARGET_BRANCH" -echo "Target Commit ID is : $TARGET_COMMIT_ID" - -if [ $checker -ne 15 ] -then - echo "" - echo "Syntax Error: missing option" - echo "" - usage - exit 1 -fi - -git config user.email "jenkins@openairinterface.org" -git config user.name "OAI Jenkins" - -git checkout -f $SOURCE_COMMIT_ID > checkout.txt 2>&1 -STATUS=`egrep -c "fatal: reference is not a tree" checkout.txt` -rm -f checkout.txt -if [ $STATUS -ne 0 ] -then - echo "fatal: reference is not a tree --> $SOURCE_COMMIT_ID" - STATUS=-1 - exit $STATUS -fi - -git log -n1 --pretty=format:\"%s\" > .git/CI_COMMIT_MSG - -git merge --ff $TARGET_COMMIT_ID -m "Temporary merge for CI" - -STATUS=`git status | egrep -c "You have unmerged paths.|fix conflicts"` -if [ $STATUS -ne 0 ] -then - echo "There are merge conflicts.. Cannot perform further build tasks" - STATUS=-1 -fi -exit $STATUS diff --git a/ci-scripts/docker/Dockerfile.mme.ci.rhel8 b/ci-scripts/docker/Dockerfile.mme.ci.rhel8 index 7db91fe30b62..c08d75397cc1 100644 --- a/ci-scripts/docker/Dockerfile.mme.ci.rhel8 +++ b/ci-scripts/docker/Dockerfile.mme.ci.rhel8 @@ -3,7 +3,7 @@ ################################################################ FROM magma-dev-mme:ci-base-image as magma-mme-builder -ARG FEATURES=mme_oai +ENV FEATURES=mme_oai ENV MAGMA_ROOT=/magma ENV BUILD_TYPE=RelWithDebInfo ENV C_BUILD=/build/c @@ -16,12 +16,16 @@ COPY ./ $MAGMA_ROOT # Build MME executables RUN export PKG_CONFIG_PATH=$PKG_CONFIG_PATH:/usr/local/lib/pkgconfig/ && \ + # Remove entitlements + rm -Rf $MAGMA_ROOT/etc-pki-entitlement $MAGMA_ROOT/rhsm-conf $MAGMA_ROOT/rhsm-ca && \ cd $MAGMA_ROOT/lte/gateway && \ echo $FEATURES && \ make build_oai && \ make build_sctpd && \ cp $C_BUILD/core/oai/oai_mme/mme $C_BUILD/core/oai/oai_mme/oai_mme && \ + echo 'Shared libraries for oai_mme' && \ ldd $C_BUILD/core/oai/oai_mme/oai_mme && \ + echo 'Shared libraries for sctpd' && \ ldd $C_BUILD/sctpd/src/sctpd # Prepare config file @@ -54,14 +58,20 @@ RUN cd $MAGMA_ROOT/lte/gateway/docker/mme/configs/ && \ ################################################################ # Target Image ################################################################ -FROM registry.access.redhat.com/ubi8/ubi:latest as magma-mme +FROM registry.access.redhat.com/ubi8/ubi-minimal:latest as magma-mme ENV MAGMA_ROOT=/magma ENV C_BUILD=/build/c +# Copy RHEL certificates for builder image +COPY ./etc-pki-entitlement /etc/pki/entitlement +# Copy the subscription manager configurations +COPY ./rhsm-conf /etc/rhsm +COPY ./rhsm-ca /etc/rhsm/ca + # Install a few tools (may not be necessary later on) ENV TZ=Europe/Paris -RUN yum update -y && \ - yum -y install --enablerepo="ubi-8-codeready-builder" \ +RUN microdnf update -y && \ + microdnf -y install \ libubsan \ libasan \ liblsan \ @@ -70,49 +80,45 @@ RUN yum update -y && \ procps-ng \ tcpdump \ openssl \ + boost \ + libicu \ + libidn \ + libconfig \ + lksctp-tools \ net-tools \ tzdata && \ - echo "/usr/local/lib" > /etc/ld.so.conf.d/local-lib.conf && \ - echo "/usr/local/lib64" >> /etc/ld.so.conf.d/local-lib.conf && \ - yum clean all -y && \ - rm -rf /var/cache/yum + microdnf clean all -y && \ + rm -rf /var/cache/yum /var/cache/dnf && \ + rm -f /etc/pki/entitlement/*pem /etc/rhsm/ca/*pem # Copy runtime-used shared libraries from builder WORKDIR /lib64 COPY --from=magma-mme-builder \ - /lib64/libsctp.so.1 \ - /lib64/libconfig.so.9 \ - /lib64/libboost_program_options.so.1.66.0 \ - /lib64/libboost_filesystem.so.1.66.0 \ - /lib64/libboost_system.so.1.66.0 \ - /lib64/libboost_regex.so.1.66.0 \ +# From epel8, cannot be installed on minimal UBI + /lib64/libyaml-cpp.so.0.6 \ /lib64/libgflags.so.2.1 \ /lib64/libglog.so.0 \ - /lib64/libczmq.so.3 \ - /lib64/libicudata.so.60 \ - /lib64/libicui18n.so.60 \ - /lib64/libicuuc.so.60 \ - /lib64/libidn.so.11 \ - /usr/local/lib64/libdouble-conversion.so.3 \ - /lib64/ - -WORKDIR /usr/local/lib -COPY --from=magma-mme-builder \ - /usr/local/lib/libnettle.so.4 \ - /usr/local/lib/libgnutls.so.28 \ + /lib64/libdouble-conversion.so.3 \ + /lib64/libunwind.so.8 \ + /lib64/libzmq.so.5 \ + /lib64/libczmq.so.4 \ + /lib64/libsodium.so.23 \ + /lib64/libpgm-5.2.so.0 \ +# From GRPC src build /usr/local/lib/libgrpc.so \ /usr/local/lib/libgrpc++.so \ /usr/local/lib/libgpr.so \ - /usr/local/lib/libyaml-cpp.so.0.6 \ + /usr/local/lib/libaddress_sorting.so \ /usr/local/lib/libcares.so.2 \ - /usr/local/lib/libaddress_sorting.so \ - /usr/local/lib/libunwind.so.8 \ - /usr/local/lib/libfdproto.so.6 \ - /usr/local/lib/libfdcore.so.6 \ /usr/local/lib/libprotobuf.so.17 \ +# From Free Diameter src build + /usr/local/lib/libfdcore.so.6 \ + /usr/local/lib/libfdproto.so.6 \ +# From nettle/gnutls src build + /usr/local/lib/libgnutls.so.28 \ + /usr/local/lib/libnettle.so.4 \ /usr/local/lib/libhogweed.so.2 \ - /usr/local/lib/libzmq.so.5 \ - /usr/local/lib/ + /lib64/ # Copy all fdx files from freeDiameter installation WORKDIR /usr/local/lib/freeDiameter @@ -128,12 +134,21 @@ COPY --from=magma-mme-builder \ $C_BUILD/sctpd/src/sctpd \ ./ +# Copy the configuration file templates and mean to modify/generate certificates +WORKDIR /magma-mme/etc +COPY --from=magma-mme-builder \ + $MAGMA_ROOT/lte/gateway/docker/mme/configs/mme.conf \ + $MAGMA_ROOT/lte/gateway/docker/mme/configs/mme_fd.conf \ + /magma-mme/etc/ + # Create running dirs WORKDIR /var/opt/magma/configs # Adding mme configuration for stateful run RUN echo "use_stateless: false" > mme.yml && \ openssl rand -out /root/.rnd 128 && \ + echo 'Shared libraries for oai_mme' && \ ldd /magma-mme/bin/oai_mme && \ + echo 'Shared libraries for sctpd' && \ ldd /magma-mme/bin/sctpd WORKDIR /etc/magma diff --git a/ci-scripts/docker/Dockerfile.mme.ci.ubuntu18 b/ci-scripts/docker/Dockerfile.mme.ci.ubuntu18 index 21f7fe593507..a429259acdae 100644 --- a/ci-scripts/docker/Dockerfile.mme.ci.ubuntu18 +++ b/ci-scripts/docker/Dockerfile.mme.ci.ubuntu18 @@ -3,7 +3,7 @@ ################################################################ FROM magma-dev-mme:ci-base-image as magma-mme-builder -ARG FEATURES=mme_oai +ENV FEATURES=mme_oai ENV MAGMA_ROOT=/magma ENV BUILD_TYPE=RelWithDebInfo ENV C_BUILD=/build/c diff --git a/ci-scripts/firebase_publish_report.py b/ci-scripts/firebase_publish_report.py index 6fa6a354e80e..9e1517b8bc30 100644 --- a/ci-scripts/firebase_publish_report.py +++ b/ci-scripts/firebase_publish_report.py @@ -16,11 +16,17 @@ import os import sys import time +from typing import Optional from firebase_admin import credentials, db, initialize_app -def publish_report(worker_id, build_id, verdict, report): +def publish_report( + worker_id: str, + build_id: str, + verdict: str, + report: str, +): """Publish report to Firebase realtime database""" # Read Firebase service account config from envirenment firebase_config = os.environ["FIREBASE_SERVICE_CONFIG"] @@ -44,65 +50,68 @@ def publish_report(worker_id, build_id, verdict, report): reports_ref.set(report_dict) -def url_to_html_redirect(run_id, url): +def url_to_html_redirect(run_id: str, url: Optional[str]): """Convert URL into a redirecting HTML page""" report_url = url - if url is None: - report_url = 'https://github.com/magma/magma/actions/runs/' + run_id + if not url: + report_url = f'https://github.com/magma/magma/actions/runs/{run_id}' - return '' + return ( + f'' + ) def lte_integ_test(args): """Prepare and publish LTE Integ Test report""" - report = url_to_html_redirect(args.run_id, args.url) - # Possible args.verdict values are success, failure, or canceled - verdict = 'inconclusive' + prepare_and_publish('lte_integ_test', args, 'test_status.txt') - # As per the recent change, CI process runs all integ tests ignoring the - # failing test cases, because of which CI report always shows lte integ - # test as success. Here we read the CI status from file for more accurate - # lte integ test execution status - if os.path.exists("test_status.txt"): - with open('test_status.txt', 'r') as file: - status_file_content = file.read().rstrip() - expected_verdict_list = ["pass", "fail"] - if status_file_content in expected_verdict_list: - verdict = status_file_content - publish_report('lte_integ_test', args.build_id, verdict, report) + +def make_debian_lte_integ_test(args): + """Prepare and publish LTE Integ Test report""" + prepare_and_publish( + 'make_debian_lte_integ_test', args, 'test_status.txt', + ) def feg_integ_test(args): """Prepare and publish FEG Integ Test report""" + prepare_and_publish('feg_integ_test', args, 'test_status.txt') + + +def cwf_integ_test(args): + """Prepare and publish CWF Integ Test report""" + prepare_and_publish('cwf_integ_test', args) + + +def sudo_python_tests(args): + """Prepare and publish Sudo Python Test report""" + prepare_and_publish('sudo_python_tests', args) + + +def prepare_and_publish(test_type: str, args, path: Optional[str] = None): + """Prepare and publish test report""" report = url_to_html_redirect(args.run_id, args.url) - # Possible args.verdict values are success, failure, or canceled + # Possible args.verdict values are success, failure, or inconclusive verdict = 'inconclusive' - # As per the recent change, CI process runs all integ tests ignoring the - # failing test cases, because of which CI report always shows feg integ - # test as success. Here we read the CI status from file for more accurate - # feg integ test execution status - if os.path.exists("test_status.txt"): - with open('test_status.txt', 'r') as file: + if path and os.path.exists(path): + # As per the recent change, CI process runs all integ tests ignoring + # the failing test cases, because of which CI report always shows lte + # integ test as success. Here we read the CI status from file for more + # accurate lte integ test execution status + with open(path, 'r') as file: status_file_content = file.read().rstrip() expected_verdict_list = ["pass", "fail"] if status_file_content in expected_verdict_list: verdict = status_file_content - publish_report('feg_integ_test', args.build_id, verdict, report) - - -def cwf_integ_test(args): - """Prepare and publish CWF Integ Test report""" - report = url_to_html_redirect(args.run_id, args.url) - # Possible args.verdict values are success, failure, or canceled - verdict = 'inconclusive' - if args.verdict.lower() == 'success': - verdict = 'pass' - elif args.verdict.lower() == 'failure': - verdict = 'fail' - publish_report('cwf_integ_test', args.build_id, verdict, report) + else: + if args.verdict.lower() == 'success': + verdict = 'pass' + elif args.verdict.lower() == 'failure': + verdict = 'fail' + publish_report(test_type, args.build_id, verdict, report) # Create the top-level parser @@ -111,6 +120,7 @@ def cwf_integ_test(args): formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) + # Add arguments parser.add_argument("--build_id", "-id", required=True, help="build ID") parser.add_argument("--verdict", required=True, help="Test verdict") @@ -119,20 +129,20 @@ def cwf_integ_test(args): # Add subcommands subparsers = parser.add_subparsers(title='subcommands', dest='cmd') -# Create the parser for the "lte" command -parser_lte = subparsers.add_parser('lte') -parser_lte.add_argument("--url", default="none", help="Report URL", nargs='?') -parser_lte.set_defaults(func=lte_integ_test) - -# Create the parser for the "feg" command -parser_feg = subparsers.add_parser('feg') -parser_feg.add_argument("--url", default="none", help="Report URL", nargs='?') -parser_feg.set_defaults(func=feg_integ_test) - -# Create the parser for the "cwf" command -parser_cwf = subparsers.add_parser('cwf') -parser_cwf.add_argument("--url", default="none", help="Report URL", nargs='?') -parser_cwf.set_defaults(func=cwf_integ_test) +tests = { + 'lte': lte_integ_test, + 'feg': feg_integ_test, + 'cwf': cwf_integ_test, + 'sudo_python_tests': sudo_python_tests, + 'make_debian_lte_integ_test': make_debian_lte_integ_test, +} + +for key, value in tests.items(): + test_parser = subparsers.add_parser(key) + test_parser.add_argument( + "--url", default="none", help="Report URL", nargs='?', + ) + test_parser.set_defaults(func=value) # Read arguments from the command line args = parser.parse_args() diff --git a/ci-scripts/flatten_image.py b/ci-scripts/flatten_image.py index 88ce13eea52b..863e42b70ff5 100644 --- a/ci-scripts/flatten_image.py +++ b/ci-scripts/flatten_image.py @@ -57,6 +57,8 @@ def perform_flattening(tag): if podman_check.strip(): cli = 'sudo podman' image_prefix = 'localhost/' + # No more need to flatten with --squash option + return 0 else: cmd = 'which docker || true' docker_check = subprocess.check_output(cmd, shell=True, universal_newlines=True) # noqa: S602 diff --git a/ci-scripts/generateHtmlReport.py b/ci-scripts/generateHtmlReport.py deleted file mode 100644 index 1d4914595e5a..000000000000 --- a/ci-scripts/generateHtmlReport.py +++ /dev/null @@ -1,766 +0,0 @@ -# /* -# * Licensed to the OpenAirInterface (OAI) Software Alliance under one or more -# * contributor license agreements. See the NOTICE file distributed with -# * this work for additional information regarding copyright ownership. -# * The OpenAirInterface Software Alliance licenses this file to You under -# * the terms found in the LICENSE file in the root of this -# * source tree. -# * -# * Unless required by applicable law or agreed to in writing, software -# * distributed under the License is distributed on an "AS IS" BASIS, -# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# * See the License for the specific language governing permissions and -# * limitations under the License. -# *------------------------------------------------------------------------------- -# * For more information about the OpenAirInterface (OAI) Software Alliance: -# * contact@openairinterface.org -# */ -# --------------------------------------------------------------------- - -import os -import re -import subprocess -import sys - - -class HtmlReport(): - def __init__(self): - self.job_name = '' - self.job_id = '' - self.job_url = '' - self.job_start_time = 'TEMPLATE_TIME' - self.git_url = '' - self.git_src_branch = '' - self.git_src_commit = '' - self.git_src_commit_msg = None - self.git_merge_request = False - self.git_target_branch = '' - self.git_target_commit = '' - - def generate(self): - cwd = os.getcwd() - self.file = open(cwd + '/test_results_magma_converged_mme.html', 'w') - self.generateHeader() - - self.coding_formatting_log() - - self.analyze_sca_log() - - self.addPagination() - - # no-S11 part - self.buildSummaryHeader('agw1-no-s11') - self.makeRunRow('agw1-no-s11') - self.statusCheckRow('agw1-no-s11') - self.buildSummaryFooter() - - self.testSummaryHeader('agw1-no-s11') - self.s1apTesterTable('agw1-no-s11') - self.testSummaryFooter() - - # with-S11 part - self.buildSummaryHeader('agw1-with-s11') - self.makeRunRow('agw1-with-s11') - self.statusCheckRow('agw1-with-s11') - self.buildSummaryFooter() - - self.testSummaryHeader('agw1-with-s11') - self.s1apTesterTable('agw1-with-s11') - self.testSummaryFooter() - - self.generateFooter() - self.file.close() - - def generateHeader(self): - # HTML Header - self.file.write('\n') - self.file.write('\n') - self.file.write('\n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' MAGMA/OAI Core Network Test Results for ' + self.job_name + ' job build #' + self.job_id + '\n') - self.file.write('\n') - self.file.write('
\n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write('
\n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' Job Summary -- Job: ' + self.job_name + ' -- Build-ID: ' + self.job_id + '\n') - self.file.write('
\n') - self.file.write('
\n') - - # Build Info Summary - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - #date_formatted = re.sub('\..*', '', self.created) - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - if self.git_merge_request: - self.file.write(' \n') - else: - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - if self.git_merge_request: - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - if (self.git_src_commit_msg is not None): - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - else: - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - if (self.git_src_commit_msg is not None): - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write('
Build Start Time' + self.job_start_time + '
Build TriggerMerge RequestPush Event
GIT Repository' + self.git_url + '
Source Branch' + self.git_src_branch + '
Source Commit ID' + self.git_src_commit + '
Source Commit Message' + self.git_src_commit_msg + '
Target Branch' + self.git_target_branch + '
Target Commit ID' + self.git_target_commit + '
Branch' + self.git_src_branch + '
Commit ID' + self.git_src_commit + '
Commit Message' + self.git_src_commit_msg + '
\n') - self.file.write('
\n') - - def generateFooter(self): - self.file.write(' \n') - self.file.write('
End of Build Report -- Copyright 2020 OpenAirInterface. All Rights Reserved.
\n') - self.file.write('
\n') - self.file.write('\n') - - def coding_formatting_log(self): - cwd = os.getcwd() - self.file.write('

OAI Coding / Formatting Guidelines Check

\n') - if os.path.isfile(cwd + '/oai_rules_result.txt'): - cmd = 'grep NB_FILES_FAILING_CHECK ' + cwd + '/oai_rules_result.txt | sed -e "s#NB_FILES_FAILING_CHECK=##"' - nb_fail = subprocess.check_output(cmd, shell=True, universal_newlines=True) - cmd = 'grep NB_FILES_CHECKED ' + cwd + '/oai_rules_result.txt | sed -e "s#NB_FILES_CHECKED=##"' - nb_total = subprocess.check_output(cmd, shell=True, universal_newlines=True) - if int(nb_fail.strip()) == 0: - self.file.write('
\n') - if self.git_merge_request: - self.file.write(' All modified files in Merge-Request follow OAI rules. -> (' + nb_total.strip() + ' were checked)\n') - else: - self.file.write(' All files in repository follow OAI rules. -> (' + nb_total.strip() + ' were checked)\n') - self.file.write('
\n') - else: - self.file.write('
\n') - if self.git_merge_request: - self.file.write(' ' + nb_fail.strip() + ' modified files in Merge-Request DO NOT follow OAI rules. -> (' + nb_total.strip() + ' were checked)\n') - else: - self.file.write(' ' + nb_fail.strip() + ' files in repository DO NOT follow OAI rules. -> (' + nb_total.strip() + ' were checked)\n') - self.file.write('
\n') - - if os.path.isfile(cwd + '/oai_rules_result_list.txt'): - self.file.write(' \n') - self.file.write('
\n') - self.file.write('

Please apply the following command to this(ese) file(s):

\n') - self.file.write('

clang-format -i filename(s)

\n') - self.file.write(' \n') - self.file.write(' \n') - with open(cwd + '/oai_rules_result_list.txt', 'r') as filelist: - for line in filelist: - self.file.write(' \n') - filelist.close() - self.file.write('
Filename
' + line.strip() + '
\n') - self.file.write('
\n') - else: - self.file.write('
\n') - self.file.write(' Was NOT performed (with CLANG-FORMAT tool). \n') - self.file.write('
\n') - - self.file.write('
\n') - - def analyze_sca_log(self): - cwd = os.getcwd() - if os.path.isfile(cwd + '/archives/cppcheck_build.log'): - self.file.write('

Static Code Analysis

\n') - if os.path.isfile(cwd + '/archives/cppcheck.xml'): - nb_errors = 0 - nb_warnings = 0 - nb_uninitvar = 0 - nb_uninitStructMember = 0 - nb_memleak = 0 - nb_doubleFree = 0 - nb_resourceLeak = 0 - nb_nullPointer = 0 - nb_arrayIndexOutOfBounds = 0 - nb_bufferAccessOutOfBounds = 0 - nb_unknownEvaluationOrder = 0 - with open(cwd + '/archives/cppcheck.xml', 'r') as xmlfile: - for line in xmlfile: - result = re.search('severity="warning"', line) - if result is not None: - nb_warnings += 1 - result = re.search('severity="error"', line) - if result is not None: - nb_errors += 1 - result = re.search('uninitvar', line) - if result is not None: - nb_uninitvar += 1 - result = re.search('uninitStructMember', line) - if result is not None: - nb_uninitStructMember += 1 - result = re.search('memleak', line) - if result is not None: - nb_memleak += 1 - result = re.search('doubleFree', line) - if result is not None: - nb_doubleFree += 1 - result = re.search('resourceLeak', line) - if result is not None: - nb_resourceLeak += 1 - result = re.search('nullPointer', line) - if result is not None: - nb_nullPointer += 1 - result = re.search('arrayIndexOutOfBounds', line) - if result is not None: - nb_arrayIndexOutOfBounds += 1 - result = re.search('bufferAccessOutOfBounds', line) - if result is not None: - nb_bufferAccessOutOfBounds += 1 - result = re.search('unknownEvaluationOrder', line) - if result is not None: - nb_unknownEvaluationOrder += 1 - xmlfile.close() - if (nb_errors == 0) and (nb_warnings == 0): - self.file.write('
\n') - self.file.write(' CPPCHECK found NO error and NO warning \n') - self.file.write('
\n') - elif (nb_errors == 0): - self.file.write('
\n') - self.file.write(' CPPCHECK found NO error and ' + str(nb_warnings) + ' warnings \n') - self.file.write('
\n') - else: - self.file.write('
\n') - self.file.write(' CPPCHECK found ' + str(nb_errors) + ' errors and ' + str(nb_warnings) + ' warnings \n') - self.file.write('
\n') - if (nb_errors > 0) or (nb_warnings > 0): - self.file.write(' \n') - self.file.write('
\n') - self.file.write('
\n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - nb_others = nb_uninitvar + nb_uninitStructMember + nb_memleak + nb_doubleFree + nb_resourceLeak + nb_nullPointer + nb_arrayIndexOutOfBounds + nb_arrayIndexOutOfBounds + nb_bufferAccessOutOfBounds + nb_unknownEvaluationOrder - nb_others = nb_errors - nb_others - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write(' \n') - self.file.write('
Error / Warning TypeNb ErrorsNb Warnings
Uninitialized variable' + str(nb_uninitvar) + 'N/A
Uninitialized struct member' + str(nb_uninitStructMember) + 'N/A
Memory leak' + str(nb_memleak) + 'N/A
Memory is freed twice' + str(nb_doubleFree) + 'N/A
Resource leak' + str(nb_resourceLeak) + 'N/A
Possible null pointer dereference' + str(nb_nullPointer) + 'N/A
Array access out of bounds' + str(nb_arrayIndexOutOfBounds) + 'N/A
Buffer is accessed out of bounds' + str(nb_bufferAccessOutOfBounds) + 'N/A
Expression depends on order of evaluation of side effects' + str(nb_unknownEvaluationOrder) + 'N/A
Others' + str(nb_others) + '' + str(nb_warnings) + '
Total' + str(nb_errors) + '' + str(nb_warnings) + '
\n') - self.file.write('
\n') - self.file.write('

Full details in artifact (cppcheck.xml)

\n') - self.file.write('

Graphical Interface tool : cppcheck-gui -l cppcheck.xml

\n') - self.file.write('
\n') - self.file.write('
\n') - else: - self.file.write('
\n') - self.file.write(' Was NOT performed (with CPPCHECK tool). \n') - self.file.write('
\n') - - def addPagination(self): - self.file.write('\n') - self.file.write('