diff --git a/.dockerignore b/.dockerignore index 39efdabca19a..528c0ed9793a 100644 --- a/.dockerignore +++ b/.dockerignore @@ -20,11 +20,12 @@ keys/setup !prover/ !yarn.lock !package.json -!Cargo.lock -!Cargo.toml +!core/Cargo.lock +!core/Cargo.toml !contracts/ !setup_2\^26.key !setup_2\^24.key +!setup_compact.key # It's required to remove .git from contracts, # otherwise yarn tries to use .git parent directory that # doesn't exist. diff --git a/.githooks/pre-commit b/.githooks/pre-commit index 1f0c6b945b65..df99db16605f 100755 --- a/.githooks/pre-commit +++ b/.githooks/pre-commit @@ -15,8 +15,6 @@ check_fmt () { fi } -check_fmt - -cd prover/ - -check_fmt +( cd core/ && check_fmt ) +( cd prover/ && check_fmt ) +( cd zkstack_cli/ && check_fmt ) diff --git a/.github/release-please/config.json b/.github/release-please/config.json index 358e249a18bd..28c3583af29e 100644 --- a/.github/release-please/config.json +++ b/.github/release-please/config.json @@ -5,26 +5,27 @@ "bump-minor-pre-major": true, "bump-patch-for-minor-pre-major": true, "include-component-in-tag": true, + "release-type": "simple", "packages": { "core": { - "release-type": "simple", - "component": "core", + "component": "core" + }, + "prover": { + "component": "prover", "extra-files": [ { "type": "generic", - "path": "bin/external_node/Cargo.toml" + "path": "Cargo.toml" } ] }, - "prover": { - "release-type": "simple", - "component": "prover" - }, "zkstack_cli": { - "release-type": "simple", "component": "zkstack_cli", - "plugins": [ - "cargo-workspace" + "extra-files": [ + { + "type": "generic", + "path": "Cargo.toml" + } ] } } diff --git a/.github/release-please/manifest.json b/.github/release-please/manifest.json index c43a992917e1..0ba598bb0516 100644 --- a/.github/release-please/manifest.json +++ b/.github/release-please/manifest.json @@ -1,5 +1,5 @@ { - "core": "25.4.0", + "core": "26.1.0", "prover": "17.1.1", "zkstack_cli": "0.1.2" } diff --git a/.github/workflows/build-prover-fri-gpu-gar-and-circuit-prover-gpu-gar.yml b/.github/workflows/build-circuit-prover-gpu-gar.yml similarity index 67% rename from .github/workflows/build-prover-fri-gpu-gar-and-circuit-prover-gpu-gar.yml rename to .github/workflows/build-circuit-prover-gpu-gar.yml index 30990889caf6..a8e86d545c9f 100644 --- a/.github/workflows/build-prover-fri-gpu-gar-and-circuit-prover-gpu-gar.yml +++ b/.github/workflows/build-circuit-prover-gpu-gar.yml @@ -27,7 +27,7 @@ jobs: - name: Download Setup data run: | - gsutil -m rsync -r gs://matterlabs-setup-data-us/${{ inputs.setup_keys_id }} docker/prover-gpu-fri-gar + gsutil -m rsync -r gs://matterlabs-setup-data-us/${{ inputs.setup_keys_id }} docker/circuit-prover-gpu-gar - name: Login to us-central1 GAR run: | @@ -47,32 +47,6 @@ jobs: run: | gcloud auth print-access-token --lifetime=7200 --impersonate-service-account=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com | docker login -u oauth2accesstoken --password-stdin https://europe-docker.pkg.dev - - name: Build and push prover-gpu-fri-gar - uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0 - with: - context: docker/prover-gpu-fri-gar - build-args: | - PROVER_IMAGE=${{ inputs.protocol_version }}-${{ inputs.image_tag_suffix }} - push: true - tags: | - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/prover-fri-gpu-gar:2.0-${{ inputs.protocol_version }}-${{ inputs.image_tag_suffix }} - - - name: Build and push prover-gpu-fri-gar to Asia GAR - run: | - docker buildx imagetools create \ - --tag asia-docker.pkg.dev/matterlabs-infra/matterlabs-docker/prover-fri-gpu-gar:2.0-${{ inputs.protocol_version }}-${{ inputs.image_tag_suffix }} \ - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/prover-fri-gpu-gar:2.0-${{ inputs.protocol_version }}-${{ inputs.image_tag_suffix }} - - - name: Build and push prover-gpu-fri-gar to Europe GAR - run: | - docker buildx imagetools create \ - --tag europe-docker.pkg.dev/matterlabs-infra/matterlabs-docker/prover-fri-gpu-gar:2.0-${{ inputs.protocol_version }}-${{ inputs.image_tag_suffix }} \ - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/prover-fri-gpu-gar:2.0-${{ inputs.protocol_version }}-${{ inputs.image_tag_suffix }} - - - name: Move Setup data from prover-gpu-fri-gar to circuit-prover-gpu-gar - run: | - mv -v docker/prover-gpu-fri-gar/*.bin docker/circuit-prover-gpu-gar/ - - name: Build and push circuit-prover-gpu-gar uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0 with: diff --git a/.github/workflows/build-contract-verifier-template.yml b/.github/workflows/build-contract-verifier-template.yml index 1481e542de57..973ed9edcb4d 100644 --- a/.github/workflows/build-contract-verifier-template.yml +++ b/.github/workflows/build-contract-verifier-template.yml @@ -13,44 +13,37 @@ on: description: "Optional suffix to override tag name generation" type: string required: false - action: - description: "Action with docker image" - type: string - default: "push" - required: false compilers: description: 'JSON of required compilers and their versions' type: string required: false default: '[{ "zksolc": ["1.3.14", "1.3.16", "1.3.17", "1.3.1", "1.3.7", "1.3.18", "1.3.19", "1.3.21"] } , { "zkvyper": ["1.3.13"] }]' -jobs: - build-images: - name: Build and Push Docker Images - env: - IMAGE_TAG_SUFFIX: ${{ inputs.image_tag_suffix }} - runs-on: ${{ fromJSON('["matterlabs-ci-runner-high-performance", "matterlabs-ci-runner-arm"]')[contains(matrix.platforms, 'arm')] }} - strategy: - matrix: - components: - - contract-verifier - - verified-sources-fetcher - platforms: - - linux/amd64 + action: + type: string + default: non-push + required: false +jobs: + prepare-contracts: + name: Prepare contracts + runs-on: matterlabs-ci-runner-high-performance steps: - - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4 + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 with: submodules: "recursive" - - name: setup-env + - name: Prepare ENV + shell: bash run: | echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV echo CI=1 >> $GITHUB_ENV echo $(pwd)/bin >> $GITHUB_PATH + echo $HOME/.local/bin >> $GITHUB_PATH echo CI=1 >> .env echo IN_DOCKER=1 >> .env - name: Download contracts + shell: bash run: | commit_sha=$(git submodule status contracts | awk '{print $1}' | tr -d '-') page=1 @@ -80,8 +73,41 @@ jobs: tar -C ./contracts -zxf l2-contracts.tar.gz tar -C ./contracts -zxf system-contracts.tar.gz - - name: pre-download compilers + - name: Install Apt dependencies + if: env.BUILD_CONTRACTS == 'true' + shell: bash + run: | + sudo apt-get update && sudo apt-get install -y libssl-dev pkg-config + + - name: Install Node + if: env.BUILD_CONTRACTS == 'true' + uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4.0.3 + with: + node-version: 20 + cache: 'npm' + + - name: Install Yarn + if: env.BUILD_CONTRACTS == 'true' + run: npm install -g yarn + + - name: Setup rust + if: env.BUILD_CONTRACTS == 'true' + uses: actions-rust-lang/setup-rust-toolchain@1fbea72663f6d4c03efaab13560c8a24cfd2a7cc # v1.9.0 + with: + toolchain: nightly-2024-08-01 + + - name: Install foundry-zksync + if: env.BUILD_CONTRACTS == 'true' + run: | + mkdir ./foundry-zksync + curl -LO https://github.com/matter-labs/foundry-zksync/releases/download/nightly-27360d4c8d12beddbb730dae07ad33a206b38f4b/foundry_nightly_linux_amd64.tar.gz + tar zxf foundry_nightly_linux_amd64.tar.gz -C ./foundry-zksync + chmod +x ./foundry-zksync/forge ./foundry-zksync/cast + echo "$PWD/foundry-zksync" >> $GITHUB_PATH + + - name: Pre-download compilers if: env.BUILD_CONTRACTS == 'true' + shell: bash run: | # Download needed versions of vyper compiler # Not sanitized due to unconventional path and tags @@ -98,59 +124,128 @@ jobs: chmod +x "./hardhat-nodejs/compilers-v2/$compiler/${compiler}-v${version}" done - - name: start-services + - name: Install zkstack + if: env.BUILD_CONTRACTS == 'true' + run: | + ./zkstack_cli/zkstackup/install --path ./zkstack_cli/zkstackup/zkstackup + zkstackup --local || true + + - name: build contracts + if: env.BUILD_CONTRACTS == 'true' + shell: bash run: | - echo "IMAGE_TAG_SUFFIX=${{ env.IMAGE_TAG_SUFFIX }}" >> .env - run_retried docker compose pull zk postgres - docker compose up -d zk postgres - ci_run pre_download_compilers.sh - ci_run sccache --start-server + cp etc/tokens/{test,localhost}.json + zkstack dev contracts - - name: init + - name: Upload contracts + uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0 + with: + name: contacts-verifier + path: | + ./contracts + + build-images: + name: Build and Push Docker Images + needs: prepare-contracts + permissions: + packages: write + contents: read + runs-on: ${{ fromJSON('["matterlabs-ci-runner-high-performance", "matterlabs-ci-runner-arm"]')[contains(matrix.platforms, 'arm')] }} + strategy: + matrix: + components: + - contract-verifier + - verified-sources-fetcher + platforms: + - linux/amd64 + + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + with: + submodules: "recursive" + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1 + + - name: Setup env + shell: bash run: | - ci_run git config --global --add safe.directory /usr/src/zksync - ci_run git config --global --add safe.directory /usr/src/zksync/sdk/binaryen - ci_run git config --global --add safe.directory /usr/src/zksync/contracts/system-contracts - ci_run git config --global --add safe.directory /usr/src/zksync/contracts - ci_run ./bin/zk || true - ci_run run_retried curl -LO https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_2\^26.key - - - name: install zkstack + echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV + echo CI=1 >> $GITHUB_ENV + echo $(pwd)/bin >> $GITHUB_PATH + echo CI=1 >> .env + echo IN_DOCKER=1 >> .env + + - name: Download setup key + shell: bash run: | - ci_run ./zkstack_cli/zkstackup/install -g --path ./zkstack_cli/zkstackup/zkstackup || true - ci_run zkstackup -g --local + if [ -f "/setup_2^26.key" ]; then + cp '/setup_2^26.key' './setup_2^26.key' + else + run_retried curl -LO https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_2\^26.key + fi - - name: build contracts - if: env.BUILD_CONTRACTS == 'true' + - name: Set env vars + shell: bash run: | - ci_run cp etc/tokens/{test,localhost}.json - ci_run zkstack dev contracts + echo PLATFORM=$(echo ${{ matrix.platforms }} | tr '/' '-') >> $GITHUB_ENV + echo IMAGE_TAG_SHA=$(git rev-parse --short HEAD) >> $GITHUB_ENV + # Support for custom tag suffix + if [ -n "${{ inputs.image_tag_suffix }}" ]; then + echo IMAGE_TAG_SHA_TS="${{ inputs.image_tag_suffix }}" >> $GITHUB_ENV + else + echo IMAGE_TAG_SHA_TS=$(git rev-parse --short HEAD)-$(date +%s) >> $GITHUB_ENV + fi - - name: Login to Docker registries + - name: Download contracts + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 + with: + name: contacts-verifier + path: | + ./contracts + + - name: login to Docker registries if: ${{ inputs.action == 'push' }} + shell: bash run: | - ci_run docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} - ci_run gcloud auth configure-docker us-docker.pkg.dev -q - - - name: update-images - env: - DOCKER_ACTION: ${{ inputs.action }} - COMPONENT: ${{ matrix.components }} - PLATFORM: ${{ matrix.platforms }} - run: | - ci_run run_retried rustup default nightly-2024-05-07 - platform=$(echo $PLATFORM | tr '/' '-') - ci_run zk docker $DOCKER_ACTION --custom-tag=${IMAGE_TAG_SUFFIX} --platform=${PLATFORM} $COMPONENT - - name: Show sccache stats - if: always() + docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} + gcloud auth configure-docker us-docker.pkg.dev -q + + - name: Login to GitHub Container Registry + uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0 + if: ${{ inputs.action == 'push' }} + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build and push + uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0 + with: + context: . + load: true + platforms: ${{ matrix.platforms }} + file: docker/${{ matrix.components }}/Dockerfile + build-args: | + SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage + SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com + SCCACHE_GCS_RW_MODE=READ_WRITE + RUSTC_WRAPPER=sccache + tags: | + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }}-${{ env.PLATFORM }} + matterlabs/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }}-${{ env.PLATFORM }} + ghcr.io/${{ github.repository_owner }}/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }}-${{ env.PLATFORM }} + + - name: Push docker image + if: ${{ inputs.action == 'push' }} run: | - ci_run sccache --show-stats || true - ci_run cat /tmp/sccache_log.txt || true + docker push us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }}-${{ env.PLATFORM }} + docker push matterlabs/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }}-${{ env.PLATFORM }} + docker push ghcr.io/${{ github.repository_owner }}/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }}-${{ env.PLATFORM }} create_manifest: name: Create release manifest - # TODO: After migraton switch to CI - runs-on: matterlabs-default-infra-runners + runs-on: matterlabs-ci-runner needs: build-images if: ${{ inputs.action == 'push' }} strategy: diff --git a/.github/workflows/build-core-template.yml b/.github/workflows/build-core-template.yml index 15d4432191dd..122bbd747147 100644 --- a/.github/workflows/build-core-template.yml +++ b/.github/workflows/build-core-template.yml @@ -13,11 +13,6 @@ on: description: "Optional suffix to override tag name generation" type: string required: false - action: - description: "Action with docker image" - type: string - default: "push" - required: false compilers: description: 'JSON of required compilers and their versions' type: string @@ -28,38 +23,32 @@ on: type: boolean required: false default: false -jobs: - build-images: - name: Build and Push Docker Images - env: - IMAGE_TAG_SUFFIX: ${{ inputs.image_tag_suffix }}${{ (inputs.en_alpha_release && matrix.components == 'external-node') && '-alpha' || '' }} - runs-on: ${{ fromJSON('["matterlabs-ci-runner-high-performance", "matterlabs-ci-runner-arm"]')[contains(matrix.platforms, 'arm')] }} - strategy: - matrix: - components: - - server-v2 - - external-node - - snapshots-creator - platforms: - - linux/amd64 - include: - - components: external-node - platforms: linux/arm64 + action: + type: string + required: false + default: "do nothing" +jobs: + prepare-contracts: + name: Prepare contracts + runs-on: matterlabs-ci-runner-high-performance steps: - - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4 + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 with: submodules: "recursive" - - name: setup-env + - name: Prepare ENV + shell: bash run: | echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV echo CI=1 >> $GITHUB_ENV echo $(pwd)/bin >> $GITHUB_PATH + echo $HOME/.local/bin >> $GITHUB_PATH echo CI=1 >> .env echo IN_DOCKER=1 >> .env - name: Download contracts + shell: bash run: | commit_sha=$(git submodule status contracts | awk '{print $1}' | tr -d '-') page=1 @@ -71,15 +60,11 @@ jobs: if [ $(jq length <<<"$tags") -eq 0 ]; then echo "No tag found on all pages." echo "BUILD_CONTRACTS=true" >> "$GITHUB_ENV" - # TODO Remove it when we migrate to foundry inside contracts repository - mkdir -p contracts/l1-contracts/artifacts/ exit 0 fi filtered_tag=$(jq -r --arg commit_sha "$commit_sha" 'map(select(.commit.sha == $commit_sha)) | .[].name' <<<"$tags") if [[ ! -z "$filtered_tag" ]]; then echo "BUILD_CONTRACTS=false" >> "$GITHUB_ENV" - # TODO Remove it when we migrate to foundry inside contracts repository - mkdir -p contracts/l1-contracts/out break fi ((page++)) @@ -93,8 +78,41 @@ jobs: tar -C ./contracts -zxf l2-contracts.tar.gz tar -C ./contracts -zxf system-contracts.tar.gz - - name: pre-download compilers + - name: Install Apt dependencies if: env.BUILD_CONTRACTS == 'true' + shell: bash + run: | + sudo apt-get update && sudo apt-get install -y libssl-dev pkg-config + + - name: Install Node + if: env.BUILD_CONTRACTS == 'true' + uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4.0.3 + with: + node-version: 20 + cache: 'npm' + + - name: Install Yarn + if: env.BUILD_CONTRACTS == 'true' + run: npm install -g yarn + + - name: Setup rust + if: env.BUILD_CONTRACTS == 'true' + uses: actions-rust-lang/setup-rust-toolchain@1fbea72663f6d4c03efaab13560c8a24cfd2a7cc # v1.9.0 + with: + toolchain: nightly-2024-08-01 + + - name: Install foundry-zksync + if: env.BUILD_CONTRACTS == 'true' + run: | + mkdir ./foundry-zksync + curl -LO https://github.com/matter-labs/foundry-zksync/releases/download/nightly-27360d4c8d12beddbb730dae07ad33a206b38f4b/foundry_nightly_linux_amd64.tar.gz + tar zxf foundry_nightly_linux_amd64.tar.gz -C ./foundry-zksync + chmod +x ./foundry-zksync/forge ./foundry-zksync/cast + echo "$PWD/foundry-zksync" >> $GITHUB_PATH + + - name: Pre-download compilers + if: env.BUILD_CONTRACTS == 'true' + shell: bash run: | # Download needed versions of vyper compiler # Not sanitized due to unconventional path and tags @@ -111,60 +129,132 @@ jobs: chmod +x "./hardhat-nodejs/compilers-v2/$compiler/${compiler}-v${version}" done - - name: start-services - run: | - echo "IMAGE_TAG_SUFFIX=${{ env.IMAGE_TAG_SUFFIX }}" >> .env - run_retried docker compose pull zk postgres - docker compose up -d zk postgres - ci_run pre_download_compilers.sh - ci_run sccache --start-server - - - name: init - run: | - ci_run git config --global --add safe.directory /usr/src/zksync - ci_run git config --global --add safe.directory /usr/src/zksync/sdk/binaryen - ci_run git config --global --add safe.directory /usr/src/zksync/contracts/system-contracts - ci_run git config --global --add safe.directory /usr/src/zksync/contracts - ci_run ./bin/zk || true - ci_run run_retried curl -LO https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_2\^26.key - - name: Install zkstack if: env.BUILD_CONTRACTS == 'true' run: | - ci_run ./zkstack_cli/zkstackup/install -g --path ./zkstack_cli/zkstackup/zkstackup || true - ci_run zkstackup -g --local + ./zkstack_cli/zkstackup/install --path ./zkstack_cli/zkstackup/zkstackup + zkstackup --local || true - name: build contracts if: env.BUILD_CONTRACTS == 'true' + shell: bash run: | - ci_run cp etc/tokens/{test,localhost}.json - ci_run zkstack dev contracts --system-contracts --l1-contracts --l2-contracts + cp etc/tokens/{test,localhost}.json + zkstack dev contracts - - name: Login to Docker registries - if: ${{ inputs.action == 'push' }} + - name: Upload contracts + uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0 + with: + name: contacts + path: | + ./contracts + + build-images: + name: Build and Push Docker Images + needs: prepare-contracts + env: + IMAGE_TAG_SUFFIX: ${{ inputs.image_tag_suffix }}${{ (inputs.en_alpha_release && matrix.components == 'external-node') && '-alpha' || '' }} + runs-on: ${{ fromJSON('["matterlabs-ci-runner-high-performance", "matterlabs-ci-runner-arm"]')[contains(matrix.platforms, 'arm')] }} + permissions: + packages: write + contents: read + strategy: + matrix: + components: + - server-v2 + - external-node + - snapshots-creator + platforms: + - linux/amd64 + include: + - components: external-node + platforms: linux/arm64 + + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + with: + submodules: "recursive" + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1 + + - name: Setup env + shell: bash + run: | + echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV + echo CI=1 >> $GITHUB_ENV + echo $(pwd)/bin >> $GITHUB_PATH + echo CI=1 >> .env + echo IN_DOCKER=1 >> .env + + - name: Download setup key + shell: bash run: | - ci_run docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} - ci_run gcloud auth configure-docker us-docker.pkg.dev -q - - - name: update-images - env: - DOCKER_ACTION: ${{ inputs.action }} - COMPONENT: ${{ matrix.components }} - PLATFORM: ${{ matrix.platforms }} + if [ -f "/setup_2^26.key" ]; then + cp '/setup_2^26.key' './setup_2^26.key' + else + run_retried curl -LO https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_2\^26.key + fi + + - name: Set env vars + shell: bash run: | - ci_run run_retried rustup default nightly-2024-05-07 - platform=$(echo $PLATFORM | tr '/' '-') - ci_run zk docker $DOCKER_ACTION --custom-tag=${IMAGE_TAG_SUFFIX} --platform=${PLATFORM} $COMPONENT - - name: Show sccache stats - if: always() + echo PLATFORM=$(echo ${{ matrix.platforms }} | tr '/' '-') >> $GITHUB_ENV + if [ -n "${{ inputs.image_tag_suffix }}" ]; then + echo IMAGE_TAG_SHA_TS="${{ env.IMAGE_TAG_SUFFIX }}" >> $GITHUB_ENV + else + echo IMAGE_TAG_SHA_TS=$(git rev-parse --short HEAD)-$(date +%s) >> $GITHUB_ENV + fi + + - name: Download contracts + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 + with: + name: contacts + path: | + ./contracts + + - name: login to Docker registries + if: ${{ inputs.action == 'push' }} + shell: bash + run: | + docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} + gcloud auth configure-docker us-docker.pkg.dev -q + + - name: Login to GitHub Container Registry + uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0 + if: ${{ inputs.action == 'push' }} + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build docker image + uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0 + with: + context: . + load: true + platforms: ${{ matrix.platforms }} + file: docker/${{ matrix.components }}/Dockerfile + build-args: | + SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage + SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com + SCCACHE_GCS_RW_MODE=READ_WRITE + RUSTC_WRAPPER=sccache + tags: | + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }}-${{ env.PLATFORM }} + ghcr.io/${{ github.repository_owner }}/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }}-${{ env.PLATFORM }} + matterlabs/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }}-${{ env.PLATFORM }} + + - name: Push docker image + if: ${{ inputs.action == 'push' }} run: | - ci_run sccache --show-stats || true - ci_run cat /tmp/sccache_log.txt || true + docker push us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }}-${{ env.PLATFORM }} + docker push matterlabs/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }}-${{ env.PLATFORM }} + docker push ghcr.io/${{ github.repository_owner }}/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }}-${{ env.PLATFORM }} create_manifest: name: Create release manifest - # TODO: After migraton switch to CI - runs-on: matterlabs-default-infra-runners + runs-on: matterlabs-ci-runner needs: build-images if: ${{ inputs.action == 'push' }} strategy: diff --git a/.github/workflows/build-docker-from-tag.yml b/.github/workflows/build-docker-from-tag.yml index e48539c90738..3d637a224a99 100644 --- a/.github/workflows/build-docker-from-tag.yml +++ b/.github/workflows/build-docker-from-tag.yml @@ -49,7 +49,7 @@ jobs: build-push-core-images: name: Build and push image needs: [setup] - uses: ./.github/workflows/new-build-core-template.yml + uses: ./.github/workflows/build-core-template.yml if: contains(github.ref_name, 'core') secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} @@ -74,7 +74,7 @@ jobs: build-push-contract-verifier: name: Build and push image needs: [setup] - uses: ./.github/workflows/new-build-contract-verifier-template.yml + uses: ./.github/workflows/build-contract-verifier-template.yml if: contains(github.ref_name, 'contract_verifier') secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} @@ -86,7 +86,7 @@ jobs: build-push-prover-images: name: Build and push image needs: [setup] - uses: ./.github/workflows/new-build-prover-template.yml + uses: ./.github/workflows/build-prover-template.yml if: contains(github.ref_name, 'prover') with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} @@ -99,7 +99,7 @@ jobs: build-push-witness-generator-image-avx512: name: Build and push image needs: [setup] - uses: ./.github/workflows/new-build-witness-generator-template.yml + uses: ./.github/workflows/build-witness-generator-template.yml if: contains(github.ref_name, 'prover') with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }}-avx512 @@ -110,10 +110,20 @@ jobs: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} - build-gar-prover-fri-gpu-and-circuit-prover-gpu-gar: + build-circuit-prover-gpu-gar: name: Build GAR prover FRI GPU needs: [setup, build-push-prover-images] - uses: ./.github/workflows/build-prover-fri-gpu-gar-and-circuit-prover-gpu-gar.yml + uses: ./.github/workflows/build-circuit-prover-gpu-gar.yml + if: contains(github.ref_name, 'prover') + with: + setup_keys_id: ${{ needs.setup.outputs.prover_fri_gpu_key_id }} + image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} + protocol_version: ${{ needs.build-push-prover-images.outputs.protocol_version }} + + build-gar-proof-fri-gpu-compressor-gar: + name: Build GAR prover FRI GPU + needs: [setup, build-push-prover-images] + uses: ./.github/workflows/build-proof-fri-gpu-compressor-gar.yml if: contains(github.ref_name, 'prover') with: setup_keys_id: ${{ needs.setup.outputs.prover_fri_gpu_key_id }} diff --git a/.github/workflows/build-proof-fri-gpu-compressor-gar.yml b/.github/workflows/build-proof-fri-gpu-compressor-gar.yml new file mode 100644 index 000000000000..aeaaf5ff190d --- /dev/null +++ b/.github/workflows/build-proof-fri-gpu-compressor-gar.yml @@ -0,0 +1,71 @@ +name: Build Proof FRI GPU Compressor with builtin setup fflonk key + +on: + workflow_call: + inputs: + image_tag_suffix: + description: "Commit sha or git tag for Docker tag" + required: true + type: string + setup_keys_id: + description: "Commit sha for downloading setup data from bucket dir" + required: true + type: string + protocol_version: + description: "Protocol version to be included in the images tag" + required: true + type: string + +jobs: + build: + name: Build proof FRI GPU Compressor gar + runs-on: [matterlabs-ci-runner-high-performance] + steps: + - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4 + with: + submodules: "recursive" + + - name: Download FFLONK key and setup data + run: | + gsutil -m rsync -r gs://matterlabs-setup-data-us/${{ inputs.setup_keys_id }} docker/proof-fri-gpu-compressor-gar + gsutil -m cp -r gs://matterlabs-setup-keys-us/setup-keys/setup_compact.key docker/proof-fri-gpu-compressor-gar + + - name: Login to us-central1 GAR + run: | + gcloud auth print-access-token --lifetime=7200 --impersonate-service-account=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com | docker login -u oauth2accesstoken --password-stdin https://us-docker.pkg.dev + + - name: Set up QEMU + uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf # v3.2.0 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1 + + - name: Login to Asia GAR + run: | + gcloud auth print-access-token --lifetime=7200 --impersonate-service-account=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com | docker login -u oauth2accesstoken --password-stdin https://asia-docker.pkg.dev + + - name: Login to Europe GAR + run: | + gcloud auth print-access-token --lifetime=7200 --impersonate-service-account=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com | docker login -u oauth2accesstoken --password-stdin https://europe-docker.pkg.dev + + - name: Build and push proof-fri-gpu-compressor-gar + uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0 + with: + context: docker/proof-fri-gpu-compressor-gar + build-args: | + PROOF_COMPRESSOR_IMAGE=${{ inputs.protocol_version }}-${{ inputs.image_tag_suffix }} + push: true + tags: | + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/proof-fri-gpu-compressor-gar:2.0-${{ inputs.protocol_version }}-${{ inputs.image_tag_suffix }} + + - name: Build and push proof-fri-gpu-compressor-gar to Asia GAR + run: | + docker buildx imagetools create \ + --tag asia-docker.pkg.dev/matterlabs-infra/matterlabs-docker/proof-fri-gpu-compressor-gar:2.0-${{ inputs.protocol_version }}-${{ inputs.image_tag_suffix }} \ + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/proof-fri-gpu-compressor-gar:2.0-${{ inputs.protocol_version }}-${{ inputs.image_tag_suffix }} + + - name: Build and push proof-fri-gpu-compressor-gar to Europe GAR + run: | + docker buildx imagetools create \ + --tag europe-docker.pkg.dev/matterlabs-infra/matterlabs-docker/proof-fri-gpu-compressor-gar:2.0-${{ inputs.protocol_version }}-${{ inputs.image_tag_suffix }} \ + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/proof-fri-gpu-compressor-gar:2.0-${{ inputs.protocol_version }}-${{ inputs.image_tag_suffix }} diff --git a/.github/workflows/build-prover-template.yml b/.github/workflows/build-prover-template.yml index 762ec496943c..dc097b240b70 100644 --- a/.github/workflows/build-prover-template.yml +++ b/.github/workflows/build-prover-template.yml @@ -32,35 +32,23 @@ on: type: string default: "75;80;89" required: false + # Details: https://arnon.dk/matching-sm-architectures-arch-and-gencode-for-various-nvidia-cards/ + # L4: 89 + # T4: 75 + # A100: 80 outputs: protocol_version: description: "Protocol version of the binary" - value: ${{ jobs.build-images.outputs.protocol_version }} + value: ${{ jobs.get-protocol-version.outputs.protocol_version }} jobs: - build-images: - name: Build and Push Docker Images - env: - IMAGE_TAG_SUFFIX: ${{ inputs.image_tag_suffix }} - RUNNER_COMPOSE_FILE: "docker-compose-runner-nightly.yml" - ERA_BELLMAN_CUDA_RELEASE: ${{ inputs.ERA_BELLMAN_CUDA_RELEASE }} - CUDA_ARCH: ${{ inputs.CUDA_ARCH }} + get-protocol-version: + name: Get protocol version runs-on: [matterlabs-ci-runner-high-performance] - strategy: - matrix: - component: - - witness-generator - - prover-gpu-fri - - witness-vector-generator - - circuit-prover-gpu - - prover-fri-gateway - - prover-job-monitor - - proof-fri-gpu-compressor - - prover-autoscaler outputs: protocol_version: ${{ steps.protocolversion.outputs.protocol_version }} steps: - - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4 + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 with: submodules: "recursive" @@ -72,130 +60,142 @@ jobs: echo CI=1 >> .env echo IN_DOCKER=1 >> .env - - name: start-services - run: | - echo "IMAGE_TAG_SUFFIX=${{ env.IMAGE_TAG_SUFFIX }}" >> .env - run_retried docker compose pull zk postgres - docker compose up -d zk postgres - ci_run sccache --start-server - - - name: init - run: | - ci_run git config --global --add safe.directory /usr/src/zksync - ci_run git config --global --add safe.directory /usr/src/zksync/contracts/system-contracts - ci_run git config --global --add safe.directory /usr/src/zksync/contracts - ci_run zk - - - name: download CRS for GPU compressor - if: matrix.component == 'proof-fri-gpu-compressor' - run: | - ci_run run_retried curl -LO https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_2\^24.key - - - name: login to Docker registries - if: github.event_name != 'pull_request' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/')) - run: | - ci_run docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} - ci_run gcloud auth configure-docker us-docker.pkg.dev -q + - name: setup rust + uses: actions-rust-lang/setup-rust-toolchain@1fbea72663f6d4c03efaab13560c8a24cfd2a7cc # v1.9.0 + with: + toolchain: nightly-2024-08-01 - # We need to run this only when ERA_BELLMAN_CUDA_RELEASE is not available - # In our case it happens only when PR is created from fork - - name: Wait for runner IP to be not rate-limited against GH API - if: inputs.is_pr_from_fork == true + - name: Prepare sccache-cache env vars + shell: bash run: | - api_endpoint="https://api.github.com/users/zksync-era-bot" - wait_time=60 - max_retries=60 - retry_count=0 - - while [[ $retry_count -lt $max_retries ]]; do - response=$(run_retried curl -s -w "%{http_code}" -o temp.json "$api_endpoint") - http_code=$(echo "$response" | tail -n1) - - if [[ "$http_code" == "200" ]]; then - echo "Request successful. Not rate-limited." - cat temp.json - rm temp.json - exit 0 - elif [[ "$http_code" == "403" ]]; then - rate_limit_exceeded=$(jq -r '.message' temp.json | grep -i "API rate limit exceeded") - if [[ -n "$rate_limit_exceeded" ]]; then - retry_count=$((retry_count+1)) - echo "API rate limit exceeded. Retry $retry_count of $max_retries. Retrying in $wait_time seconds..." - sleep $wait_time - else - echo "Request failed with HTTP status $http_code." - cat temp.json - rm temp.json - exit 1 - fi - else - echo "Request failed with HTTP status $http_code." - cat temp.json - rm temp.json - exit 1 - fi - done - - echo "Reached the maximum number of retries ($max_retries). Exiting." - rm temp.json - exit 1 + echo SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage >> $GITHUB_ENV + echo SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com >> $GITHUB_ENV + echo SCCACHE_ERROR_LOG=/tmp/sccache_log.txt >> $GITHUB_ENV + echo SCCACHE_GCS_RW_MODE=READ_WRITE >> $GITHUB_ENV + echo RUSTC_WRAPPER=sccache >> $GITHUB_ENV - name: protocol-version id: protocolversion # TODO: use -C flag, when it will become stable. shell: bash run: | - ci_run bash -c "cd prover && cargo build --release --bin prover_version" - PPV=$(ci_run prover/target/release/prover_version) + cd prover + cargo build --release --bin prover_version + PPV=$(target/release/prover_version) echo Protocol version is ${PPV} echo "protocol_version=${PPV}" >> $GITHUB_OUTPUT - echo "PROTOCOL_VERSION=${PPV}" >> $GITHUB_ENV - - - name: update-images - env: - DOCKER_ACTION: ${{ inputs.action }} - COMPONENT: ${{ matrix.component }} - run: | - PASSED_ENV_VARS="ERA_BELLMAN_CUDA_RELEASE,CUDA_ARCH,PROTOCOL_VERSION" \ - ci_run zk docker $DOCKER_ACTION $COMPONENT - - name: Show sccache stats - if: always() - run: | - ci_run sccache --show-stats || true - ci_run cat /tmp/sccache_log.txt || true - - copy-images: - name: Copy images between docker registries - needs: build-images + build-images: + name: Build and Push Docker Images + needs: get-protocol-version env: - IMAGE_TAG_SUFFIX: ${{ inputs.image_tag_suffix }} - PROTOCOL_VERSION: ${{ needs.build-images.outputs.protocol_version }} - # TODO: After migraton switch to CI - runs-on: matterlabs-default-infra-runners - if: ${{ inputs.action == 'push' }} + PROTOCOL_VERSION: ${{ needs.get-protocol-version.outputs.protocol_version }} + runs-on: [matterlabs-ci-runner-high-performance] + permissions: + packages: write + contents: read strategy: matrix: - component: - - witness-vector-generator + components: + - witness-generator + - prover-fri-gateway + - prover-job-monitor + - proof-fri-gpu-compressor + - prover-autoscaler + - circuit-prover-gpu steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + with: + submodules: "recursive" + - name: Set up Docker Buildx uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1 - - name: Login to us-central1 GAR + - name: setup-env run: | - gcloud auth print-access-token --lifetime=7200 --impersonate-service-account=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com | docker login -u oauth2accesstoken --password-stdin https://us-docker.pkg.dev + echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV + echo CI=1 >> $GITHUB_ENV + echo $(pwd)/bin >> $GITHUB_PATH + echo CI=1 >> .env + echo IN_DOCKER=1 >> .env - - name: Login and push to Asia GAR + - name: Set env vars + shell: bash run: | - gcloud auth print-access-token --lifetime=7200 --impersonate-service-account=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com | docker login -u oauth2accesstoken --password-stdin https://asia-docker.pkg.dev - docker buildx imagetools create \ - --tag asia-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component }}:2.0-${{ needs.build-images.outputs.protocol_version }}-${{ inputs.image_tag_suffix }} \ - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component }}:2.0-${{ needs.build-images.outputs.protocol_version }}-${{ inputs.image_tag_suffix }} + # Support for custom tag suffix + if [ -n "${{ inputs.image_tag_suffix }}" ]; then + echo IMAGE_TAG_SHA_TS="${{ inputs.image_tag_suffix }}" >> $GITHUB_ENV + else + echo IMAGE_TAG_SHA_TS=$(git rev-parse --short HEAD)-$(date +%s) >> $GITHUB_ENV + fi - - name: Login and push to Europe GAR + - name: download CRS for GPU compressor + if: matrix.components == 'proof-fri-gpu-compressor' + run: | + run_retried curl -LO https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_2\^24.key + # We need to run this only when ERA_BELLMAN_CUDA_RELEASE is not available + # In our case it happens only when PR is created from fork + - name: Wait for runner IP to be not rate-limited against GH API + if: ( inputs.is_pr_from_fork == true && matrix.components == 'proof-fri-gpu-compressor' ) + run: ./.github/scripts/rate_limit_check.sh + + - name: Hack to set env vars inside docker container + shell: bash + run: | + sed -i '/^FROM matterlabs\/zksync-build-base:latest as builder/a ENV SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage\nENV SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com\nENV SCCACHE_GCS_RW_MODE=READ_WRITE\nENV RUSTC_WRAPPER=sccache' ./docker/${{ matrix.components }}/Dockerfile + #TODO: remove AS version =) + sed -i '/^FROM matterlabs\/zksync-build-base:latest AS builder/a ENV SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage\nENV SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com\nENV SCCACHE_GCS_RW_MODE=READ_WRITE\nENV RUSTC_WRAPPER=sccache' ./docker/${{ matrix.components }}/Dockerfile + cat ./docker/${{ matrix.components }}/Dockerfile + + - name: login to Docker registries + if: github.event_name != 'pull_request' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/')) + shell: bash + run: | + docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} + gcloud auth configure-docker us-docker.pkg.dev -q + + - name: Login to GitHub Container Registry + uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0 + if: github.event_name != 'pull_request' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/')) + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build and push + uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0 + with: + context: . + load: true + build-args: | + CUDA_ARCH=${{ inputs.CUDA_ARCH }} + SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage + SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com + SCCACHE_GCS_RW_MODE=READ_WRITE + RUSTC_WRAPPER=sccache + PROTOCOL_VERSION=${{ env.PROTOCOL_VERSION }} + ERA_BELLMAN_CUDA_RELEASE=${{ inputs.ERA_BELLMAN_CUDA_RELEASE }} + file: docker/${{ matrix.components }}/Dockerfile + tags: | + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ env.IMAGE_TAG_SHA_TS }} + matterlabs/${{ matrix.components }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ env.IMAGE_TAG_SHA_TS }} + ghcr.io/${{ github.repository_owner }}/${{ matrix.components }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ env.IMAGE_TAG_SHA_TS }} + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }} + matterlabs/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }} + ghcr.io/${{ github.repository_owner }}/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }} + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:latest + ghcr.io/${{ github.repository_owner }}/${{ matrix.components }}:latest + matterlabs/${{ matrix.components }}:latest + + - name: Push docker image + if: ${{ inputs.action == 'push' }} run: | - gcloud auth print-access-token --lifetime=7200 --impersonate-service-account=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com | docker login -u oauth2accesstoken --password-stdin https://europe-docker.pkg.dev - docker buildx imagetools create \ - --tag europe-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component }}:2.0-${{ needs.build-images.outputs.protocol_version }}-${{ inputs.image_tag_suffix }} \ - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component }}:2.0-${{ needs.build-images.outputs.protocol_version }}-${{ inputs.image_tag_suffix }} + docker push us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ env.IMAGE_TAG_SHA_TS }} + docker push matterlabs/${{ matrix.components }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ env.IMAGE_TAG_SHA_TS }} + docker push ghcr.io/${{ github.repository_owner }}/${{ matrix.components }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ env.IMAGE_TAG_SHA_TS }} + docker push us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }} + docker push ghcr.io/${{ github.repository_owner }}/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }} + docker push matterlabs/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }} + docker push us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:latest + docker push ghcr.io/${{ github.repository_owner }}/${{ matrix.components }}:latest + docker push matterlabs/${{ matrix.components }}:latest diff --git a/.github/workflows/build-witness-generator-template.yml b/.github/workflows/build-witness-generator-template.yml index 95053b89d3d8..7b25510ade12 100644 --- a/.github/workflows/build-witness-generator-template.yml +++ b/.github/workflows/build-witness-generator-template.yml @@ -18,14 +18,8 @@ on: type: string required: false action: - description: "Action with docker image" type: string - default: "push" - required: false - is_pr_from_fork: - description: "Indicates whether the workflow is invoked from a PR created from fork" - type: boolean - default: false + default: non-push required: false WITNESS_GENERATOR_RUST_FLAGS: description: "Rust flags for witness_generator compilation" @@ -35,26 +29,16 @@ on: outputs: protocol_version: description: "Protocol version of the binary" - value: ${{ jobs.build-images.outputs.protocol_version }} + value: ${{ jobs.get-protocol-version.outputs.protocol_version }} jobs: - build-images: - name: Build and Push Docker Images - env: - IMAGE_TAG_SUFFIX: ${{ inputs.image_tag_suffix }} - RUNNER_COMPOSE_FILE: "docker-compose-runner-nightly.yml" - ERA_BELLMAN_CUDA_RELEASE: ${{ inputs.ERA_BELLMAN_CUDA_RELEASE }} - WITNESS_GENERATOR_RUST_FLAGS: ${{ inputs.WITNESS_GENERATOR_RUST_FLAGS }} - ZKSYNC_USE_CUDA_STUBS: true - runs-on: [matterlabs-ci-runner-c3d] - strategy: - matrix: - component: - - witness-generator + get-protocol-version: + name: Get protocol version + runs-on: [matterlabs-ci-runner-high-performance] outputs: protocol_version: ${{ steps.protocolversion.outputs.protocol_version }} steps: - - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4 + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 with: submodules: "recursive" @@ -66,100 +50,103 @@ jobs: echo CI=1 >> .env echo IN_DOCKER=1 >> .env - - name: start-services - run: | - echo "IMAGE_TAG_SUFFIX=${{ env.IMAGE_TAG_SUFFIX }}" >> .env - run_retried docker compose pull zk postgres - docker compose up -d zk postgres - ci_run sccache --start-server - - - name: init - run: | - ci_run git config --global --add safe.directory /usr/src/zksync - ci_run git config --global --add safe.directory /usr/src/zksync/contracts/system-contracts - ci_run git config --global --add safe.directory /usr/src/zksync/contracts - ci_run zk - - - name: download CRS for GPU compressor - if: matrix.component == 'proof-fri-gpu-compressor' - run: | - ci_run run_retried curl -LO https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_2\^24.key - - - name: login to Docker registries - if: github.event_name != 'pull_request' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/')) - run: | - ci_run docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} - ci_run gcloud auth configure-docker us-docker.pkg.dev -q + - name: setup rust + uses: actions-rust-lang/setup-rust-toolchain@1fbea72663f6d4c03efaab13560c8a24cfd2a7cc # v1.9.0 + with: + toolchain: nightly-2024-08-01 - # We need to run this only when ERA_BELLMAN_CUDA_RELEASE is not available - # In our case it happens only when PR is created from fork - - name: Wait for runner IP to be not rate-limited against GH API - if: inputs.is_pr_from_fork == true + - name: Prepare sccache-cache env vars + shell: bash run: | - api_endpoint="https://api.github.com/users/zksync-era-bot" - wait_time=60 - max_retries=60 - retry_count=0 - - while [[ $retry_count -lt $max_retries ]]; do - response=$(run_retried curl -s -w "%{http_code}" -o temp.json "$api_endpoint") - http_code=$(echo "$response" | tail -n1) - - if [[ "$http_code" == "200" ]]; then - echo "Request successful. Not rate-limited." - cat temp.json - rm temp.json - exit 0 - elif [[ "$http_code" == "403" ]]; then - rate_limit_exceeded=$(jq -r '.message' temp.json | grep -i "API rate limit exceeded") - if [[ -n "$rate_limit_exceeded" ]]; then - retry_count=$((retry_count+1)) - echo "API rate limit exceeded. Retry $retry_count of $max_retries. Retrying in $wait_time seconds..." - sleep $wait_time - else - echo "Request failed with HTTP status $http_code." - cat temp.json - rm temp.json - exit 1 - fi - else - echo "Request failed with HTTP status $http_code." - cat temp.json - rm temp.json - exit 1 - fi - done - - echo "Reached the maximum number of retries ($max_retries). Exiting." - rm temp.json - exit 1 + echo SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage >> $GITHUB_ENV + echo SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com >> $GITHUB_ENV + echo SCCACHE_ERROR_LOG=/tmp/sccache_log.txt >> $GITHUB_ENV + echo SCCACHE_GCS_RW_MODE=READ_WRITE >> $GITHUB_ENV + echo RUSTC_WRAPPER=sccache >> $GITHUB_ENV - name: protocol-version id: protocolversion # TODO: use -C flag, when it will become stable. shell: bash run: | - ci_run bash -c "cd prover && cargo build --release --bin prover_version" - PPV=$(ci_run prover/target/release/prover_version) + cd prover + cargo build --release --bin prover_version + PPV=$(target/release/prover_version) echo Protocol version is ${PPV} echo "protocol_version=${PPV}" >> $GITHUB_OUTPUT - echo "PROTOCOL_VERSION=${PPV}" >> $GITHUB_ENV - - name: setup-rust-flags-env - if: matrix.component == 'witness-generator' + build-images: + name: Build and Push Docker Images + needs: get-protocol-version + permissions: + packages: write + contents: read + env: + PROTOCOL_VERSION: ${{ needs.get-protocol-version.outputs.protocol_version }} + runs-on: [matterlabs-ci-runner-c3d] + strategy: + matrix: + components: + - witness-generator + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + with: + submodules: "recursive" + + - name: setup-env run: | - echo RUST_FLAGS="${{ env.WITNESS_GENERATOR_RUST_FLAGS }}" >> $GITHUB_ENV + echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV + echo CI=1 >> $GITHUB_ENV + echo $(pwd)/bin >> $GITHUB_PATH + echo CI=1 >> .env + echo IN_DOCKER=1 >> .env - - name: update-images - env: - DOCKER_ACTION: ${{ inputs.action }} - COMPONENT: ${{ matrix.component }} + - name: Set env vars + shell: bash run: | - PASSED_ENV_VARS="ERA_BELLMAN_CUDA_RELEASE,PROTOCOL_VERSION,RUST_FLAGS" \ - ci_run zk docker $DOCKER_ACTION $COMPONENT + # Support for custom tag suffix + if [ -n "${{ inputs.image_tag_suffix }}" ]; then + echo IMAGE_TAG_SHA_TS="${{ inputs.image_tag_suffix }}" >> $GITHUB_ENV + else + echo IMAGE_TAG_SHA_TS=$(git rev-parse --short HEAD)-$(date +%s) >> $GITHUB_ENV + fi - - name: Show sccache stats - if: always() + - name: login to Docker registries + if: github.event_name != 'pull_request' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/')) + shell: bash run: | - ci_run sccache --show-stats || true - ci_run cat /tmp/sccache_log.txt || true + docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} + gcloud auth configure-docker us-docker.pkg.dev -q + + - name: Login to GitHub Container Registry + uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0 + if: github.event_name != 'pull_request' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/')) + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build and push + uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0 + with: + context: . + push: ${{ inputs.action == 'push' }} + build-args: | + SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage + SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com + SCCACHE_GCS_RW_MODE=READ_WRITE + RUSTC_WRAPPER=sccache + PROTOCOL_VERSION=${{ env.PROTOCOL_VERSION }} + ERA_BELLMAN_CUDA_RELEASE=${{ inputs.ERA_BELLMAN_CUDA_RELEASE }} + RUST_FLAGS=${{ inputs.WITNESS_GENERATOR_RUST_FLAGS }} + file: docker/${{ matrix.components }}/Dockerfile + tags: | + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ env.IMAGE_TAG_SHA_TS }} + matterlabs/${{ matrix.components }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ env.IMAGE_TAG_SHA_TS }} + ghcr.io/${{ github.repository_owner }}/${{ matrix.components }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ env.IMAGE_TAG_SHA_TS }} + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }} + ghcr.io/${{ github.repository_owner }}/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }} + matterlabs/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }} + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:latest + ghcr.io/${{ github.repository_owner }}/${{ matrix.components }}:latest + matterlabs/${{ matrix.components }}:latest diff --git a/.github/workflows/cargo-license.yaml b/.github/workflows/cargo-license.yaml index 72eb8d0d865b..8b0c095c628c 100644 --- a/.github/workflows/cargo-license.yaml +++ b/.github/workflows/cargo-license.yaml @@ -7,5 +7,6 @@ jobs: - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - uses: EmbarkStudios/cargo-deny-action@8371184bd11e21dcf8ac82ebf8c9c9f74ebf7268 # v2.0.1 with: + manifest-path: "./core/Cargo.toml" command: check command-arguments: "--hide-inclusion-graph" diff --git a/.github/workflows/ci-core-reusable.yml b/.github/workflows/ci-core-reusable.yml index d76bb776968d..f44b1f54dc02 100644 --- a/.github/workflows/ci-core-reusable.yml +++ b/.github/workflows/ci-core-reusable.yml @@ -65,15 +65,12 @@ jobs: - name: Install zkstack run: | ci_run ./zkstack_cli/zkstackup/install -g --path ./zkstack_cli/zkstackup/zkstackup - ci_run zkstackup -g --local + ci_run zkstackup -g --local --cargo-features gateway - name: Build contracts run: | ci_run zkstack dev contracts - - name: Contracts unit tests - run: ci_run yarn l1-contracts test - - name: Download compilers for contract verifier tests run: ci_run zkstack contract-verifier init --zksolc-version=v1.5.3 --zkvyper-version=v1.5.4 --solc-version=0.8.26 --vyper-version=v0.3.10 --era-vm-solc-version=0.8.26-1.0.1 --only --chain era @@ -82,7 +79,7 @@ jobs: ci_run zkstack dev test rust # Benchmarks are not tested by `cargo nextest` unless specified explicitly, and even then `criterion` harness is incompatible # with how `cargo nextest` runs tests. Thus, we run criterion-based benchmark tests manually. - ci_run cargo test --release -p vm-benchmark --bench oneshot --bench batch + ci_run cargo test --manifest-path ./core/Cargo.toml --release -p vm-benchmark --bench oneshot --bench batch loadtest: runs-on: [ matterlabs-ci-runner-high-performance ] @@ -131,7 +128,7 @@ jobs: - name: Install zkstack run: | ci_run ./zkstack_cli/zkstackup/install -g --path ./zkstack_cli/zkstackup/zkstackup || true - ci_run zkstackup -g --local + ci_run zkstackup -g --local --cargo-features gateway - name: Create and initialize legacy chain @@ -150,7 +147,7 @@ jobs: --legacy-bridge \ --evm-emulator false - ci_run zkstack ecosystem init --dev --verbose + ci_run zkstack ecosystem init --dev --support-l2-legacy-shared-bridge-test true --verbose # `sleep 60` because we need to wait until server added all the tokens - name: Run server @@ -174,13 +171,22 @@ jobs: integration-tests: runs-on: [ matterlabs-ci-runner-ultra-performance ] + strategy: + # ---------------------------------------------- + # Note, that while the contracts do support gateway chain + # in reality it won't exist for quite some time and so + # we will test both cases here + # ---------------------------------------------- + matrix: + use_gateway_chain: [ "WITH_GATEWAY", "WITHOUT_GATEWAY" ] + # In some cases it's useful to continue one job even if another fails. + fail-fast: false steps: - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4 with: submodules: "recursive" fetch-depth: 0 - - name: Setup environment run: | echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV @@ -200,7 +206,7 @@ jobs: - name: Install zkstack run: | ci_run ./zkstack_cli/zkstackup/install -g --path ./zkstack_cli/zkstackup/zkstackup || true - ci_run zkstackup -g --local + ci_run zkstackup -g --local --cargo-features gateway - name: Create log directories run: | @@ -270,7 +276,8 @@ jobs: --l1-rpc-url=http://localhost:8545 \ --server-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ --server-db-name=zksync_server_localhost_validium \ - --chain validium + --chain validium \ + --validium-type no-da - name: Create and initialize chain with Custom Token run: | @@ -292,7 +299,8 @@ jobs: --l1-rpc-url=http://localhost:8545 \ --server-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ --server-db-name=zksync_server_localhost_custom_token \ - --chain custom_token + --chain custom_token \ + --validium-type no-da - name: Create and register chain with transactions signed "offline" run: | @@ -314,7 +322,7 @@ jobs: governor_pk=$(awk '/governor:/ {flag=1} flag && /private_key:/ {print $2; exit}' ./configs/wallets.yaml) ci_run zkstack dev send-transactions \ - --file ./transactions/chain/offline_chain/register-hyperchain-txns.json \ + --file ./transactions/chain/offline_chain/register-zk-chain-txns.json \ --l1-rpc-url http://127.0.0.1:8545 \ --private-key $governor_pk @@ -350,13 +358,67 @@ jobs: --l1-rpc-url=http://localhost:8545 \ --server-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ --server-db-name=zksync_server_localhost_consensus \ - --chain consensus + --chain consensus \ + --validium-type no-da - name: Export chain list to environment variable run: | CHAINS="era,validium,custom_token,consensus" echo "CHAINS=$CHAINS" >> $GITHUB_ENV + # ---------------------------------------------------------------- + # Only create/initialize the gateway chain *if* use_gateway_chain=WITH_GATEWAY + # ---------------------------------------------------------------- + - name: Initialize gateway chain + if: matrix.use_gateway_chain == 'WITH_GATEWAY' + run: | + ci_run zkstack chain create \ + --chain-name gateway \ + --chain-id 505 \ + --prover-mode no-proofs \ + --wallet-creation localhost \ + --l1-batch-commit-data-generator-mode rollup \ + --base-token-address 0x0000000000000000000000000000000000000001 \ + --base-token-price-nominator 1 \ + --base-token-price-denominator 1 \ + --set-as-default false \ + --ignore-prerequisites \ + --evm-emulator false + + ci_run zkstack chain init \ + --deploy-paymaster \ + --l1-rpc-url=http://localhost:8545 \ + --server-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ + --server-db-name=zksync_server_localhost_gateway \ + --chain gateway \ + --validium-type no-da + + ci_run zkstack chain convert-to-gateway --chain gateway --ignore-prerequisites + + - name: Run gateway + if: matrix.use_gateway_chain == 'WITH_GATEWAY' + run: | + ci_run zkstack server --ignore-prerequisites --chain gateway &> ${{ env.SERVER_LOGS_DIR }}/gateway.log & + ci_run zkstack server wait --ignore-prerequisites --verbose --chain gateway + + - name: Migrate chains to gateway + if: matrix.use_gateway_chain == 'WITH_GATEWAY' + run: | + ci_run zkstack chain migrate-to-gateway --chain era --gateway-chain-name gateway + ci_run zkstack chain migrate-to-gateway --chain validium --gateway-chain-name gateway + ci_run zkstack chain migrate-to-gateway --chain custom_token --gateway-chain-name gateway + ci_run zkstack chain migrate-to-gateway --chain consensus --gateway-chain-name gateway + + - name: Migrate back era + if: matrix.use_gateway_chain == 'WITH_GATEWAY' + run: | + ci_run zkstack chain migrate-from-gateway --chain era --gateway-chain-name gateway + + - name: Migrate to gateway again + if: matrix.use_gateway_chain == 'WITH_GATEWAY' + run: | + ci_run zkstack chain migrate-to-gateway --chain era --gateway-chain-name gateway + - name: Build test dependencies run: | ci_run zkstack dev test build @@ -402,20 +464,22 @@ jobs: - name: Init external nodes run: | + GATEWAY_RPC_URL="${{ matrix.use_gateway_chain == 'WITH_GATEWAY' && '--gateway-rpc-url=http://localhost:3550' || '' }}" + ci_run zkstack external-node configs --db-url=postgres://postgres:notsecurepassword@localhost:5432 \ - --db-name=zksync_en_localhost_era_rollup --l1-rpc-url=http://localhost:8545 --chain era + --db-name=zksync_en_localhost_era_rollup --l1-rpc-url=http://localhost:8545 $GATEWAY_RPC_URL --chain era ci_run zkstack external-node init --ignore-prerequisites --chain era ci_run zkstack external-node configs --db-url=postgres://postgres:notsecurepassword@localhost:5432 \ - --db-name=zksync_en_localhost_era_validium1 --l1-rpc-url=http://localhost:8545 --chain validium + --db-name=zksync_en_localhost_era_validium1 --l1-rpc-url=http://localhost:8545 $GATEWAY_RPC_URL --chain validium ci_run zkstack external-node init --ignore-prerequisites --chain validium ci_run zkstack external-node configs --db-url=postgres://postgres:notsecurepassword@localhost:5432 \ - --db-name=zksync_en_localhost_era_custom_token --l1-rpc-url=http://localhost:8545 --chain custom_token + --db-name=zksync_en_localhost_era_custom_token --l1-rpc-url=http://localhost:8545 $GATEWAY_RPC_URL --chain custom_token ci_run zkstack external-node init --ignore-prerequisites --chain custom_token ci_run zkstack external-node configs --db-url=postgres://postgres:notsecurepassword@localhost:5432 \ - --db-name=zksync_en_localhost_era_consensus --l1-rpc-url=http://localhost:8545 --chain consensus + --db-name=zksync_en_localhost_era_consensus --l1-rpc-url=http://localhost:8545 $GATEWAY_RPC_URL --chain consensus ci_run zkstack external-node init --ignore-prerequisites --chain consensus - name: Run recovery tests (from snapshot) @@ -432,7 +496,7 @@ jobs: ci_run zkstack external-node run --ignore-prerequisites --chain validium &> ${{ env.EXTERNAL_NODE_LOGS_DIR }}/validium.log & ci_run zkstack external-node run --ignore-prerequisites --chain custom_token &> ${{ env.EXTERNAL_NODE_LOGS_DIR }}/custom_token.log & ci_run zkstack external-node run --ignore-prerequisites --chain consensus --enable-consensus &> ${{ env.EXTERNAL_NODE_LOGS_DIR }}/consensus.log & - + ci_run zkstack external-node wait --ignore-prerequisites --verbose --chain era ci_run zkstack external-node wait --ignore-prerequisites --verbose --chain validium ci_run zkstack external-node wait --ignore-prerequisites --verbose --chain custom_token @@ -445,6 +509,14 @@ jobs: - name: Fee projection tests run: | ci_run killall -INT zksync_server || true + + # Only start & wait for the gateway server if use_gateway_chain == WITH_GATEWAY + if [ "${{ matrix.use_gateway_chain }}" == "WITH_GATEWAY" ]; then + ci_run zkstack server --ignore-prerequisites --chain gateway &> ${{ env.SERVER_LOGS_DIR }}/gateway.log & + ci_run zkstack server wait --ignore-prerequisites --verbose --chain gateway + fi + + # Always run the chain-specific fee tests ci_run ./bin/run_on_all_chains.sh "zkstack dev test fees --no-deps --no-kill" ${{ env.CHAINS }} ${{ env.FEES_LOGS_DIR }} - name: Run revert tests @@ -452,6 +524,13 @@ jobs: ci_run killall -INT zksync_server || true ci_run killall -INT zksync_external_node || true + # Only start & wait for the gateway server if use_gateway_chain == WITH_GATEWAY + if [ "${{ matrix.use_gateway_chain }}" == "WITH_GATEWAY" ]; then + ci_run zkstack server --ignore-prerequisites --chain gateway &> ${{ env.SERVER_LOGS_DIR }}/gateway.log & + ci_run zkstack server wait --ignore-prerequisites --verbose --chain gateway + fi + + # Always run the chain-specific revert tests ci_run ./bin/run_on_all_chains.sh "zkstack dev test revert --no-deps --external-node --no-kill --ignore-prerequisites" ${{ env.CHAINS }} ${{ env.INTEGRATION_TESTS_LOGS_DIR }} # Upgrade tests should run last, because as soon as they @@ -459,12 +538,20 @@ jobs: # TODO make upgrade tests safe to run multiple times - name: Run upgrade test run: | - ci_run zkstack dev test upgrade --no-deps --chain era + ci_run killall -INT zksync_server || true + # Only start & wait for the gateway server if use_gateway_chain == WITH_GATEWAY + if [ "${{ matrix.use_gateway_chain }}" == "WITH_GATEWAY" ]; then + ci_run zkstack server --ignore-prerequisites --chain gateway &> ${{ env.SERVER_LOGS_DIR }}/gateway.log & + ci_run zkstack server wait --ignore-prerequisites --verbose --chain gateway + fi + + # Always run the upgrade test against era + ci_run zkstack dev test upgrade --no-deps --chain era - name: Upload logs uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0 if: always() with: - name: logs + name: logs_${{matrix.use_gateway_chain}} path: logs diff --git a/.github/workflows/ci-prover-e2e.yml b/.github/workflows/ci-prover-e2e.yml index 77d3e2da5758..0a09aee51315 100644 --- a/.github/workflows/ci-prover-e2e.yml +++ b/.github/workflows/ci-prover-e2e.yml @@ -43,10 +43,19 @@ jobs: run: | git fetch # Checkout the commit with the DualVerifier contract to test FFLONK interface - git checkout b4d5b984 + git checkout bcdd1cb05e8f4d9ec2dd41e2cc668cdfe30ee535 git submodule update --init --recursive git rev-parse HEAD + - name: Set new genesis for fflonk + # Note, that while `Verifier` is not explicitly a part of the genensis state, + # it affects it indirectly as it is a part of the repo. + working-directory: ./etc/env/file_based + if: matrix.compressor-mode == 'fflonk' + run: | + sudo sed -i 's/^genesis_root: .*/genesis_root: 0xc3fa60b6769a0c2f222053d7cbd1d6f63be7777e3c8d029cbd61cc075526ab81/' genesis.yaml + sudo sed -i "s/^genesis_batch_commitment: .*/genesis_batch_commitment: 0x17689e705b5749ed0bbd53c845988d17c419697c2cb29eabab8785f1cb775b4a/" genesis.yaml + - name: Init run: | ci_run chmod -R +x ./bin @@ -88,13 +97,13 @@ jobs: - name: Run server run: | ci_run zkstack server --uring --chain=proving_chain --components=api,tree,eth,state_keeper,commitment_generator,proof_data_handler,vm_runner_protective_reads,vm_runner_bwip &>prover_logs_${{matrix.compressor-mode}}/server.log & - - name: Run Gateway + - name: Run prover gateway run: | ci_run zkstack prover run --component=gateway --docker=false &>prover_logs_${{matrix.compressor-mode}}/gateway.log & - name: Run Prover Job Monitor run: | ci_run zkstack prover run --component=prover-job-monitor --docker=false &>prover_logs_${{matrix.compressor-mode}}/prover-job-monitor.log & - - name: Wait for batch to be passed through gateway + - name: Wait for batch to be passed through prover gateway env: DATABASE_URL: postgres://postgres:notsecurepassword@localhost:5432/zksync_prover_localhost_proving_chain BATCH_NUMBER: 1 @@ -126,10 +135,10 @@ jobs: - name: Wait for batch to be executed on L1 env: - DATABASE_URL: postgres://postgres:notsecurepassword@localhost:5432/zksync_prover_localhost_proving_chain - BATCH_NUMBER: 1 - INTERVAL: 30 - TIMEOUT: 1200 + DATABASE_URL: postgres://postgres:notsecurepassword@localhost:5432/zksync_prover_localhost_proving_chain + BATCH_NUMBER: 1 + INTERVAL: 30 + TIMEOUT: 1200 run: | PASSED_ENV_VARS="BATCH_NUMBER,DATABASE_URL,URL,INTERVAL,TIMEOUT" \ ci_run ./bin/prover_checkers/batch_l1_status_checker diff --git a/.github/workflows/ci-prover-reusable.yml b/.github/workflows/ci-prover-reusable.yml index 7f719b2240db..26679cb2232f 100644 --- a/.github/workflows/ci-prover-reusable.yml +++ b/.github/workflows/ci-prover-reusable.yml @@ -1,6 +1,7 @@ name: Workflow template for CI jobs for Prover Components on: workflow_call: + jobs: lint: runs-on: [ matterlabs-ci-runner-highmem-long ] diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 849fccc2e22c..a0b7ee1bc40e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -23,6 +23,7 @@ jobs: docs: ${{ steps.changed-files.outputs.docs_any_changed }} all: ${{ steps.changed-files.outputs.all_any_changed }} steps: + - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4 with: fetch-depth: 2 @@ -41,9 +42,7 @@ jobs: - '!prover/extract-setup-data-keys.sh' - 'docker/prover*/**' - '.github/workflows/build-prover-template.yml' - - '.github/workflows/new-build-prover-template.yml' - '.github/workflows/build-witness-generator-template.yml' - - '.github/workflows/new-build-witness-generator-template.yml' - '.github/workflows/ci-prover-reusable.yml' - 'docker-compose-runner-nightly.yml' - '!**/*.md' @@ -55,13 +54,11 @@ jobs: - 'docker/external-node/**' - 'docker/server/**' - '.github/workflows/build-core-template.yml' - - '.github/workflows/new-build-core-template.yml' - '.github/workflows/build-contract-verifier-template.yml' - - '.github/workflows/new-build-contract-verifier-template.yml' - '.github/workflows/ci-core-reusable.yml' - '.github/workflows/ci-core-lint-reusable.yml' - - 'Cargo.toml' - - 'Cargo.lock' + - './core/Cargo.toml' + - './core/Cargo.lock' - 'zkstack_cli/**' - '!**/*.md' - '!**/*.MD' @@ -121,7 +118,7 @@ jobs: name: Build core images needs: changed_files if: ${{ (needs.changed_files.outputs.core == 'true' || needs.changed_files.outputs.all == 'true') && !contains(github.ref_name, 'release-please--branches') }} - uses: ./.github/workflows/new-build-core-template.yml + uses: ./.github/workflows/build-core-template.yml with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} action: "build" @@ -146,7 +143,7 @@ jobs: name: Build contract verifier needs: changed_files if: ${{ (needs.changed_files.outputs.core == 'true' || needs.changed_files.outputs.all == 'true') && !contains(github.ref_name, 'release-please--branches') }} - uses: ./.github/workflows/new-build-contract-verifier-template.yml + uses: ./.github/workflows/build-contract-verifier-template.yml with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} action: "build" @@ -158,7 +155,7 @@ jobs: name: Build prover images needs: changed_files if: ${{ (needs.changed_files.outputs.prover == 'true' || needs.changed_files.outputs.all == 'true') && !contains(github.ref_name, 'release-please--branches') }} - uses: ./.github/workflows/new-build-prover-template.yml + uses: ./.github/workflows/build-prover-template.yml with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} action: "build" @@ -172,7 +169,7 @@ jobs: name: Build prover images with avx512 instructions needs: changed_files if: ${{ (needs.changed_files.outputs.prover == 'true' || needs.changed_files.outputs.all == 'true') && !contains(github.ref_name, 'release-please--branches') }} - uses: ./.github/workflows/new-build-witness-generator-template.yml + uses: ./.github/workflows/build-witness-generator-template.yml with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }}-avx512 action: "build" diff --git a/.github/workflows/new-build-contract-verifier-template.yml b/.github/workflows/new-build-contract-verifier-template.yml deleted file mode 100644 index 7d75f81fb73c..000000000000 --- a/.github/workflows/new-build-contract-verifier-template.yml +++ /dev/null @@ -1,270 +0,0 @@ -name: Build contract verifier -on: - workflow_call: - secrets: - DOCKERHUB_USER: - description: "DOCKERHUB_USER" - required: true - DOCKERHUB_TOKEN: - description: "DOCKERHUB_TOKEN" - required: true - inputs: - image_tag_suffix: - description: "Optional suffix to override tag name generation" - type: string - required: false - compilers: - description: 'JSON of required compilers and their versions' - type: string - required: false - default: '[{ "zksolc": ["1.3.14", "1.3.16", "1.3.17", "1.3.1", "1.3.7", "1.3.18", "1.3.19", "1.3.21"] } , { "zkvyper": ["1.3.13"] }]' - action: - type: string - default: non-push - required: false - -jobs: - prepare-contracts: - name: Prepare contracts - runs-on: matterlabs-ci-runner-high-performance - steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - with: - submodules: "recursive" - - - name: Prepare ENV - shell: bash - run: | - echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV - echo CI=1 >> $GITHUB_ENV - echo $(pwd)/bin >> $GITHUB_PATH - echo $HOME/.local/bin >> $GITHUB_PATH - echo CI=1 >> .env - echo IN_DOCKER=1 >> .env - - - name: Download contracts - shell: bash - run: | - commit_sha=$(git submodule status contracts | awk '{print $1}' | tr -d '-') - page=1 - filtered_tag="" - while [ true ]; do - echo "Page: $page" - tags=$(run_retried curl -s -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" -H "Accept: application/vnd.github+json" \ - "https://api.github.com/repos/matter-labs/era-contracts/tags?per_page=100&page=${page}" | jq .) - if [ $(jq length <<<"$tags") -eq 0 ]; then - echo "No tag found on all pages." - echo "BUILD_CONTRACTS=true" >> "$GITHUB_ENV" - exit 0 - fi - filtered_tag=$(jq -r --arg commit_sha "$commit_sha" 'map(select(.commit.sha == $commit_sha)) | .[].name' <<<"$tags") - if [[ ! -z "$filtered_tag" ]]; then - echo "BUILD_CONTRACTS=false" >> "$GITHUB_ENV" - break - fi - ((page++)) - done - echo "Contracts tag is: ${filtered_tag}" - mkdir -p ./contracts - run_retried curl -s -LO https://github.com/matter-labs/era-contracts/releases/download/${filtered_tag}/l1-contracts.tar.gz - run_retried curl -s -LO https://github.com/matter-labs/era-contracts/releases/download/${filtered_tag}/l2-contracts.tar.gz - run_retried curl -s -LO https://github.com/matter-labs/era-contracts/releases/download/${filtered_tag}/system-contracts.tar.gz - tar -C ./contracts -zxf l1-contracts.tar.gz - tar -C ./contracts -zxf l2-contracts.tar.gz - tar -C ./contracts -zxf system-contracts.tar.gz - - - name: Install Apt dependencies - if: env.BUILD_CONTRACTS == 'true' - shell: bash - run: | - sudo apt-get update && sudo apt-get install -y libssl-dev pkg-config - - - name: Install Node - if: env.BUILD_CONTRACTS == 'true' - uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4.0.3 - with: - node-version: 20 - cache: 'npm' - - - name: Install Yarn - if: env.BUILD_CONTRACTS == 'true' - run: npm install -g yarn - - - name: Setup rust - if: env.BUILD_CONTRACTS == 'true' - uses: actions-rust-lang/setup-rust-toolchain@1fbea72663f6d4c03efaab13560c8a24cfd2a7cc # v1.9.0 - with: - toolchain: nightly-2024-08-01 - - - name: Install foundry-zksync - if: env.BUILD_CONTRACTS == 'true' - run: | - mkdir ./foundry-zksync - curl -LO https://github.com/matter-labs/foundry-zksync/releases/download/nightly-15bec2f861b3b4c71e58f85e2b2c9dd722585aa8/foundry_nightly_linux_amd64.tar.gz - tar zxf foundry_nightly_linux_amd64.tar.gz -C ./foundry-zksync - chmod +x ./foundry-zksync/forge ./foundry-zksync/cast - echo "$PWD/foundry-zksync" >> $GITHUB_PATH - - - name: Pre-download compilers - if: env.BUILD_CONTRACTS == 'true' - shell: bash - run: | - # Download needed versions of vyper compiler - # Not sanitized due to unconventional path and tags - mkdir -p ./hardhat-nodejs/compilers-v2/vyper/linux - wget -nv -O ./hardhat-nodejs/compilers-v2/vyper/linux/0.3.10 https://github.com/vyperlang/vyper/releases/download/v0.3.10/vyper.0.3.10+commit.91361694.linux - wget -nv -O ./hardhat-nodejs/compilers-v2/vyper/linux/0.3.3 https://github.com/vyperlang/vyper/releases/download/v0.3.3/vyper.0.3.3+commit.48e326f0.linux - chmod +x ./hardhat-nodejs/compilers-v2/vyper/linux/0.3.10 - chmod +x ./hardhat-nodejs/compilers-v2/vyper/linux/0.3.3 - - COMPILERS_JSON='${{ inputs.compilers }}' - echo "$COMPILERS_JSON" | jq -r '.[] | to_entries[] | .key as $compiler | .value[] | "\(.),\($compiler)"' | while IFS=, read -r version compiler; do - mkdir -p "./hardhat-nodejs/compilers-v2/$compiler" - wget -nv -O "./hardhat-nodejs/compilers-v2/$compiler/${compiler}-v${version}" "https://github.com/matter-labs/${compiler}-bin/releases/download/v${version}/${compiler}-linux-amd64-musl-v${version}" - chmod +x "./hardhat-nodejs/compilers-v2/$compiler/${compiler}-v${version}" - done - - - name: Install zkstack - if: env.BUILD_CONTRACTS == 'true' - run: | - ./zkstack_cli/zkstackup/install --path ./zkstack_cli/zkstackup/zkstackup - zkstackup --local || true - - - name: build contracts - if: env.BUILD_CONTRACTS == 'true' - shell: bash - run: | - cp etc/tokens/{test,localhost}.json - zkstack dev contracts - - - name: Upload contracts - uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0 - with: - name: contacts-verifier - path: | - ./contracts - - build-images: - name: Build and Push Docker Images - needs: prepare-contracts - runs-on: ${{ fromJSON('["matterlabs-ci-runner-high-performance", "matterlabs-ci-runner-arm"]')[contains(matrix.platforms, 'arm')] }} - strategy: - matrix: - components: - - contract-verifier - - verified-sources-fetcher - platforms: - - linux/amd64 - - steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - with: - submodules: "recursive" - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1 - - - name: Setup env - shell: bash - run: | - echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV - echo CI=1 >> $GITHUB_ENV - echo $(pwd)/bin >> $GITHUB_PATH - echo CI=1 >> .env - echo IN_DOCKER=1 >> .env - - - name: Download setup key - shell: bash - run: | - if [ -f "/setup_2^26.key" ]; then - cp '/setup_2^26.key' './setup_2^26.key' - else - run_retried curl -LO https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_2\^26.key - fi - - - name: Set env vars - shell: bash - run: | - echo PLATFORM=$(echo ${{ matrix.platforms }} | tr '/' '-') >> $GITHUB_ENV - echo IMAGE_TAG_SHA=$(git rev-parse --short HEAD) >> $GITHUB_ENV - # Support for custom tag suffix - if [ -n "${{ inputs.image_tag_suffix }}" ]; then - echo IMAGE_TAG_SHA_TS="${{ inputs.image_tag_suffix }}" >> $GITHUB_ENV - else - echo IMAGE_TAG_SHA_TS=$(git rev-parse --short HEAD)-$(date +%s) >> $GITHUB_ENV - fi - - - name: Download contracts - uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 - with: - name: contacts-verifier - path: | - ./contracts - - - name: login to Docker registries - if: ${{ inputs.action == 'push' }} - shell: bash - run: | - docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} - gcloud auth configure-docker us-docker.pkg.dev -q - - - name: Build and push - uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0 - with: - context: . - load: true - platforms: ${{ matrix.platforms }} - file: docker/${{ matrix.components }}/Dockerfile - build-args: | - SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage - SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com - SCCACHE_GCS_RW_MODE=READ_WRITE - RUSTC_WRAPPER=sccache - tags: | - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }}-${{ env.PLATFORM }} - matterlabs/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }}-${{ env.PLATFORM }} - - - name: Push docker image - if: ${{ inputs.action == 'push' }} - run: | - docker push us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }}-${{ env.PLATFORM }} - docker push matterlabs/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }}-${{ env.PLATFORM }} - - - create_manifest: - name: Create release manifest - runs-on: matterlabs-ci-runner - needs: build-images - if: ${{ inputs.action == 'push' }} - strategy: - matrix: - component: - - name: contract-verifier - platform: linux/amd64 - - name: verified-sources-fetcher - platform: linux/amd64 - env: - IMAGE_TAG_SUFFIX: ${{ inputs.image_tag_suffix }} - steps: - - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4 - - - name: login to Docker registries - run: | - docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} - gcloud auth configure-docker us-docker.pkg.dev -q - - - name: Create Docker manifest - run: | - docker_repositories=("matterlabs/${{ matrix.component.name }}" "us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component.name }}") - platforms=${{ matrix.component.platform }} - for repo in "${docker_repositories[@]}"; do - platform_tags="" - for platform in ${platforms//,/ }; do - platform=$(echo $platform | tr '/' '-') - platform_tags+=" --amend ${repo}:${IMAGE_TAG_SUFFIX}-${platform}" - done - for manifest in "${repo}:${IMAGE_TAG_SUFFIX}" "${repo}:2.0-${IMAGE_TAG_SUFFIX}" "${repo}:latest" "${repo}:latest2.0"; do - docker manifest create ${manifest} ${platform_tags} - docker manifest push ${manifest} - done - done diff --git a/.github/workflows/new-build-core-template.yml b/.github/workflows/new-build-core-template.yml deleted file mode 100644 index 557d8455a31d..000000000000 --- a/.github/workflows/new-build-core-template.yml +++ /dev/null @@ -1,281 +0,0 @@ -name: Build Core images -on: - workflow_call: - secrets: - DOCKERHUB_USER: - description: "DOCKERHUB_USER" - required: true - DOCKERHUB_TOKEN: - description: "DOCKERHUB_TOKEN" - required: true - inputs: - image_tag_suffix: - description: "Optional suffix to override tag name generation" - type: string - required: false - compilers: - description: 'JSON of required compilers and their versions' - type: string - required: false - default: '[{ "zksolc": ["1.3.14", "1.3.16", "1.3.17", "1.3.1", "1.3.7", "1.3.18", "1.3.19", "1.3.21"] } , { "zkvyper": ["1.3.13"] }]' - en_alpha_release: - description: 'Flag that determins if EN release should be marked as alpha' - type: boolean - required: false - default: false - action: - type: string - required: false - default: "do nothing" - -jobs: - prepare-contracts: - name: Prepare contracts - runs-on: matterlabs-ci-runner-high-performance - steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - with: - submodules: "recursive" - - - name: Prepare ENV - shell: bash - run: | - echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV - echo CI=1 >> $GITHUB_ENV - echo $(pwd)/bin >> $GITHUB_PATH - echo $HOME/.local/bin >> $GITHUB_PATH - echo CI=1 >> .env - echo IN_DOCKER=1 >> .env - - - name: Download contracts - shell: bash - run: | - commit_sha=$(git submodule status contracts | awk '{print $1}' | tr -d '-') - page=1 - filtered_tag="" - while [ true ]; do - echo "Page: $page" - tags=$(run_retried curl -s -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" -H "Accept: application/vnd.github+json" \ - "https://api.github.com/repos/matter-labs/era-contracts/tags?per_page=100&page=${page}" | jq .) - if [ $(jq length <<<"$tags") -eq 0 ]; then - echo "No tag found on all pages." - echo "BUILD_CONTRACTS=true" >> "$GITHUB_ENV" - exit 0 - fi - filtered_tag=$(jq -r --arg commit_sha "$commit_sha" 'map(select(.commit.sha == $commit_sha)) | .[].name' <<<"$tags") - if [[ ! -z "$filtered_tag" ]]; then - echo "BUILD_CONTRACTS=false" >> "$GITHUB_ENV" - break - fi - ((page++)) - done - echo "Contracts tag is: ${filtered_tag}" - mkdir -p ./contracts - run_retried curl -s -LO https://github.com/matter-labs/era-contracts/releases/download/${filtered_tag}/l1-contracts.tar.gz - run_retried curl -s -LO https://github.com/matter-labs/era-contracts/releases/download/${filtered_tag}/l2-contracts.tar.gz - run_retried curl -s -LO https://github.com/matter-labs/era-contracts/releases/download/${filtered_tag}/system-contracts.tar.gz - tar -C ./contracts -zxf l1-contracts.tar.gz - tar -C ./contracts -zxf l2-contracts.tar.gz - tar -C ./contracts -zxf system-contracts.tar.gz - - - name: Install Apt dependencies - if: env.BUILD_CONTRACTS == 'true' - shell: bash - run: | - sudo apt-get update && sudo apt-get install -y libssl-dev pkg-config - - - name: Install Node - if: env.BUILD_CONTRACTS == 'true' - uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4.0.3 - with: - node-version: 20 - cache: 'npm' - - - name: Install Yarn - if: env.BUILD_CONTRACTS == 'true' - run: npm install -g yarn - - - name: Setup rust - if: env.BUILD_CONTRACTS == 'true' - uses: actions-rust-lang/setup-rust-toolchain@1fbea72663f6d4c03efaab13560c8a24cfd2a7cc # v1.9.0 - with: - toolchain: nightly-2024-08-01 - - - name: Install foundry-zksync - if: env.BUILD_CONTRACTS == 'true' - run: | - mkdir ./foundry-zksync - curl -LO https://github.com/matter-labs/foundry-zksync/releases/download/nightly-15bec2f861b3b4c71e58f85e2b2c9dd722585aa8/foundry_nightly_linux_amd64.tar.gz - tar zxf foundry_nightly_linux_amd64.tar.gz -C ./foundry-zksync - chmod +x ./foundry-zksync/forge ./foundry-zksync/cast - echo "$PWD/foundry-zksync" >> $GITHUB_PATH - - - name: Pre-download compilers - if: env.BUILD_CONTRACTS == 'true' - shell: bash - run: | - # Download needed versions of vyper compiler - # Not sanitized due to unconventional path and tags - mkdir -p ./hardhat-nodejs/compilers-v2/vyper/linux - wget -nv -O ./hardhat-nodejs/compilers-v2/vyper/linux/0.3.10 https://github.com/vyperlang/vyper/releases/download/v0.3.10/vyper.0.3.10+commit.91361694.linux - wget -nv -O ./hardhat-nodejs/compilers-v2/vyper/linux/0.3.3 https://github.com/vyperlang/vyper/releases/download/v0.3.3/vyper.0.3.3+commit.48e326f0.linux - chmod +x ./hardhat-nodejs/compilers-v2/vyper/linux/0.3.10 - chmod +x ./hardhat-nodejs/compilers-v2/vyper/linux/0.3.3 - - COMPILERS_JSON='${{ inputs.compilers }}' - echo "$COMPILERS_JSON" | jq -r '.[] | to_entries[] | .key as $compiler | .value[] | "\(.),\($compiler)"' | while IFS=, read -r version compiler; do - mkdir -p "./hardhat-nodejs/compilers-v2/$compiler" - wget -nv -O "./hardhat-nodejs/compilers-v2/$compiler/${compiler}-v${version}" "https://github.com/matter-labs/${compiler}-bin/releases/download/v${version}/${compiler}-linux-amd64-musl-v${version}" - chmod +x "./hardhat-nodejs/compilers-v2/$compiler/${compiler}-v${version}" - done - - - name: Install zkstack - if: env.BUILD_CONTRACTS == 'true' - run: | - ./zkstack_cli/zkstackup/install --path ./zkstack_cli/zkstackup/zkstackup - zkstackup --local || true - - - name: build contracts - if: env.BUILD_CONTRACTS == 'true' - shell: bash - run: | - cp etc/tokens/{test,localhost}.json - zkstack dev contracts - - - name: Upload contracts - uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0 - with: - name: contacts - path: | - ./contracts - - build-images: - name: Build and Push Docker Images - needs: prepare-contracts - env: - IMAGE_TAG_SUFFIX: ${{ inputs.image_tag_suffix }}${{ (inputs.en_alpha_release && matrix.components == 'external-node') && '-alpha' || '' }} - runs-on: ${{ fromJSON('["matterlabs-ci-runner-high-performance", "matterlabs-ci-runner-arm"]')[contains(matrix.platforms, 'arm')] }} - strategy: - matrix: - components: - - server-v2 - - external-node - - snapshots-creator - platforms: - - linux/amd64 - include: - - components: external-node - platforms: linux/arm64 - - steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - with: - submodules: "recursive" - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1 - - - name: Setup env - shell: bash - run: | - echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV - echo CI=1 >> $GITHUB_ENV - echo $(pwd)/bin >> $GITHUB_PATH - echo CI=1 >> .env - echo IN_DOCKER=1 >> .env - - - name: Download setup key - shell: bash - run: | - if [ -f "/setup_2^26.key" ]; then - cp '/setup_2^26.key' './setup_2^26.key' - else - run_retried curl -LO https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_2\^26.key - fi - - - name: Set env vars - shell: bash - run: | - echo PLATFORM=$(echo ${{ matrix.platforms }} | tr '/' '-') >> $GITHUB_ENV - if [ -n "${{ inputs.image_tag_suffix }}" ]; then - echo IMAGE_TAG_SHA_TS="${{ env.IMAGE_TAG_SUFFIX }}" >> $GITHUB_ENV - else - echo IMAGE_TAG_SHA_TS=$(git rev-parse --short HEAD)-$(date +%s) >> $GITHUB_ENV - fi - - - name: Download contracts - uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 - with: - name: contacts - path: | - ./contracts - - - name: login to Docker registries - if: ${{ inputs.action == 'push' }} - shell: bash - run: | - docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} - gcloud auth configure-docker us-docker.pkg.dev -q - - - name: Build docker image - uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0 - with: - context: . - load: true - platforms: ${{ matrix.platforms }} - file: docker/${{ matrix.components }}/Dockerfile - build-args: | - SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage - SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com - SCCACHE_GCS_RW_MODE=READ_WRITE - RUSTC_WRAPPER=sccache - tags: | - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }}-${{ env.PLATFORM }} - matterlabs/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }}-${{ env.PLATFORM }} - - - name: Push docker image - if: ${{ inputs.action == 'push' }} - run: | - docker push us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }}-${{ env.PLATFORM }} - docker push matterlabs/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }}-${{ env.PLATFORM }} - - create_manifest: - name: Create release manifest - runs-on: matterlabs-ci-runner - needs: build-images - if: ${{ inputs.action == 'push' }} - strategy: - matrix: - component: - - name: server-v2 - platform: linux/amd64 - - name: external-node - platform: linux/amd64,linux/arm64 - - name: snapshots-creator - platform: linux/amd64 - - env: - IMAGE_TAG_SUFFIX: ${{ inputs.image_tag_suffix }}${{ (inputs.en_alpha_release && matrix.component.name == 'external-node') && '-alpha' || '' }} - steps: - - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4 - - - name: login to Docker registries - run: | - docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} - gcloud auth configure-docker us-docker.pkg.dev -q - - - name: Create Docker manifest - run: | - docker_repositories=("matterlabs/${{ matrix.component.name }}" "us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component.name }}") - platforms=${{ matrix.component.platform }} - for repo in "${docker_repositories[@]}"; do - platform_tags="" - for platform in ${platforms//,/ }; do - platform=$(echo $platform | tr '/' '-') - platform_tags+=" --amend ${repo}:${IMAGE_TAG_SUFFIX}-${platform}" - done - for manifest in "${repo}:${IMAGE_TAG_SUFFIX}" "${repo}:2.0-${IMAGE_TAG_SUFFIX}" "${repo}:latest" "${repo}:latest2.0"; do - docker manifest create ${manifest} ${platform_tags} - docker manifest push ${manifest} - done - done diff --git a/.github/workflows/new-build-prover-template.yml b/.github/workflows/new-build-prover-template.yml deleted file mode 100644 index 3a721e4425a8..000000000000 --- a/.github/workflows/new-build-prover-template.yml +++ /dev/null @@ -1,225 +0,0 @@ -name: Build Prover images -on: - workflow_call: - secrets: - DOCKERHUB_USER: - description: "DOCKERHUB_USER" - required: true - DOCKERHUB_TOKEN: - description: "DOCKERHUB_TOKEN" - required: true - inputs: - ERA_BELLMAN_CUDA_RELEASE: - description: "ERA_BELLMAN_CUDA_RELEASE" - type: string - required: true - image_tag_suffix: - description: "Optional suffix to override tag name generation" - type: string - required: false - action: - description: "Action with docker image" - type: string - default: "push" - required: false - is_pr_from_fork: - description: "Indicates whether the workflow is invoked from a PR created from fork" - type: boolean - default: false - required: false - CUDA_ARCH: - description: "CUDA Arch to build" - type: string - default: "75;80;89" - required: false - # Details: https://arnon.dk/matching-sm-architectures-arch-and-gencode-for-various-nvidia-cards/ - # L4: 89 - # T4: 75 - # A100: 80 - outputs: - protocol_version: - description: "Protocol version of the binary" - value: ${{ jobs.get-protocol-version.outputs.protocol_version }} - -jobs: - get-protocol-version: - name: Get protocol version - runs-on: [matterlabs-ci-runner-high-performance] - outputs: - protocol_version: ${{ steps.protocolversion.outputs.protocol_version }} - steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - with: - submodules: "recursive" - - - name: setup-env - run: | - echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV - echo CI=1 >> $GITHUB_ENV - echo $(pwd)/bin >> $GITHUB_PATH - echo CI=1 >> .env - echo IN_DOCKER=1 >> .env - - - name: setup rust - uses: actions-rust-lang/setup-rust-toolchain@1fbea72663f6d4c03efaab13560c8a24cfd2a7cc # v1.9.0 - with: - toolchain: nightly-2024-08-01 - - - name: Prepare sccache-cache env vars - shell: bash - run: | - echo SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage >> $GITHUB_ENV - echo SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com >> $GITHUB_ENV - echo SCCACHE_ERROR_LOG=/tmp/sccache_log.txt >> $GITHUB_ENV - echo SCCACHE_GCS_RW_MODE=READ_WRITE >> $GITHUB_ENV - echo RUSTC_WRAPPER=sccache >> $GITHUB_ENV - - - name: protocol-version - id: protocolversion - # TODO: use -C flag, when it will become stable. - shell: bash - run: | - cd prover - cargo build --release --bin prover_version - PPV=$(target/release/prover_version) - echo Protocol version is ${PPV} - echo "protocol_version=${PPV}" >> $GITHUB_OUTPUT - - build-images: - name: Build and Push Docker Images - needs: get-protocol-version - env: - PROTOCOL_VERSION: ${{ needs.get-protocol-version.outputs.protocol_version }} - runs-on: [matterlabs-ci-runner-high-performance] - strategy: - matrix: - components: - - witness-generator - - prover-gpu-fri - - witness-vector-generator - - prover-fri-gateway - - prover-job-monitor - - proof-fri-gpu-compressor - - prover-autoscaler - - circuit-prover-gpu - steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - with: - submodules: "recursive" - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1 - - - name: setup-env - run: | - echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV - echo CI=1 >> $GITHUB_ENV - echo $(pwd)/bin >> $GITHUB_PATH - echo CI=1 >> .env - echo IN_DOCKER=1 >> .env - - - name: Set env vars - shell: bash - run: | - # Support for custom tag suffix - if [ -n "${{ inputs.image_tag_suffix }}" ]; then - echo IMAGE_TAG_SHA_TS="${{ inputs.image_tag_suffix }}" >> $GITHUB_ENV - else - echo IMAGE_TAG_SHA_TS=$(git rev-parse --short HEAD)-$(date +%s) >> $GITHUB_ENV - fi - - - name: download CRS for GPU compressor - if: matrix.components == 'proof-fri-gpu-compressor' - run: | - run_retried curl -LO https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_2\^24.key - # We need to run this only when ERA_BELLMAN_CUDA_RELEASE is not available - # In our case it happens only when PR is created from fork - - name: Wait for runner IP to be not rate-limited against GH API - if: ( inputs.is_pr_from_fork == true && matrix.components == 'proof-fri-gpu-compressor' ) - run: ./.github/scripts/rate_limit_check.sh - - - name: Hack to set env vars inside docker container - shell: bash - run: | - sed -i '/^FROM matterlabs\/zksync-build-base:latest as builder/a ENV SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage\nENV SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com\nENV SCCACHE_GCS_RW_MODE=READ_WRITE\nENV RUSTC_WRAPPER=sccache' ./docker/${{ matrix.components }}/Dockerfile - #TODO: remove AS version =) - sed -i '/^FROM matterlabs\/zksync-build-base:latest AS builder/a ENV SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage\nENV SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com\nENV SCCACHE_GCS_RW_MODE=READ_WRITE\nENV RUSTC_WRAPPER=sccache' ./docker/${{ matrix.components }}/Dockerfile - cat ./docker/${{ matrix.components }}/Dockerfile - - - name: login to Docker registries - if: github.event_name != 'pull_request' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/')) - shell: bash - run: | - docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} - gcloud auth configure-docker us-docker.pkg.dev -q - - - name: Build and push - uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0 - with: - context: . - load: true - build-args: | - CUDA_ARCH=${{ inputs.CUDA_ARCH }} - SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage - SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com - SCCACHE_GCS_RW_MODE=READ_WRITE - RUSTC_WRAPPER=sccache - PROTOCOL_VERSION=${{ env.PROTOCOL_VERSION }} - ERA_BELLMAN_CUDA_RELEASE=${{ inputs.ERA_BELLMAN_CUDA_RELEASE }} - file: docker/${{ matrix.components }}/Dockerfile - tags: | - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ env.IMAGE_TAG_SHA_TS }} - matterlabs/${{ matrix.components }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ env.IMAGE_TAG_SHA_TS }} - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }} - matterlabs/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }} - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:latest - matterlabs/${{ matrix.components }}:latest - - - name: Push docker image - if: ${{ inputs.action == 'push' }} - run: | - docker push us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ env.IMAGE_TAG_SHA_TS }} - docker push matterlabs/${{ matrix.components }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ env.IMAGE_TAG_SHA_TS }} - docker push us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }} - docker push matterlabs/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }} - docker push us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:latest - docker push matterlabs/${{ matrix.components }}:latest - - copy-images: - name: Copy images between docker registries - needs: [build-images, get-protocol-version] - env: - PROTOCOL_VERSION: ${{ needs.get-protocol-version.outputs.protocol_version }} - runs-on: matterlabs-ci-runner - if: ${{ inputs.action == 'push' }} - strategy: - matrix: - component: - - witness-vector-generator - steps: - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1 - - - name: Login to us-central1 GAR - run: | - gcloud auth print-access-token --lifetime=7200 --impersonate-service-account=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com | docker login -u oauth2accesstoken --password-stdin https://us-docker.pkg.dev - - - name: Login and push to Asia GAR - run: | - gcloud auth print-access-token --lifetime=7200 --impersonate-service-account=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com | docker login -u oauth2accesstoken --password-stdin https://asia-docker.pkg.dev - docker buildx imagetools create \ - --tag asia-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ inputs.image_tag_suffix }} \ - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ inputs.image_tag_suffix }} - docker buildx imagetools create \ - --tag asia-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component }}:2.0-${{ inputs.image_tag_suffix }} \ - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component }}:2.0-${{ inputs.image_tag_suffix }} - - - name: Login and push to Europe GAR - run: | - gcloud auth print-access-token --lifetime=7200 --impersonate-service-account=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com | docker login -u oauth2accesstoken --password-stdin https://europe-docker.pkg.dev - docker buildx imagetools create \ - --tag europe-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ inputs.image_tag_suffix }} \ - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ inputs.image_tag_suffix }} - docker buildx imagetools create \ - --tag europe-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component }}:2.0-${{ inputs.image_tag_suffix }} \ - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component }}:2.0-${{ inputs.image_tag_suffix }} diff --git a/.github/workflows/new-build-witness-generator-template.yml b/.github/workflows/new-build-witness-generator-template.yml deleted file mode 100644 index a96d217da832..000000000000 --- a/.github/workflows/new-build-witness-generator-template.yml +++ /dev/null @@ -1,138 +0,0 @@ -name: Build witness generator image with custom compiler flags -on: - workflow_call: - secrets: - DOCKERHUB_USER: - description: "DOCKERHUB_USER" - required: true - DOCKERHUB_TOKEN: - description: "DOCKERHUB_TOKEN" - required: true - inputs: - ERA_BELLMAN_CUDA_RELEASE: - description: "ERA_BELLMAN_CUDA_RELEASE" - type: string - required: true - image_tag_suffix: - description: "Optional suffix to override tag name generation" - type: string - required: false - action: - type: string - default: non-push - required: false - WITNESS_GENERATOR_RUST_FLAGS: - description: "Rust flags for witness_generator compilation" - type: string - default: "" - required: false - outputs: - protocol_version: - description: "Protocol version of the binary" - value: ${{ jobs.get-protocol-version.outputs.protocol_version }} - -jobs: - get-protocol-version: - name: Get protocol version - runs-on: [matterlabs-ci-runner-high-performance] - outputs: - protocol_version: ${{ steps.protocolversion.outputs.protocol_version }} - steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - with: - submodules: "recursive" - - - name: setup-env - run: | - echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV - echo CI=1 >> $GITHUB_ENV - echo $(pwd)/bin >> $GITHUB_PATH - echo CI=1 >> .env - echo IN_DOCKER=1 >> .env - - - name: setup rust - uses: actions-rust-lang/setup-rust-toolchain@1fbea72663f6d4c03efaab13560c8a24cfd2a7cc # v1.9.0 - with: - toolchain: nightly-2024-08-01 - - - name: Prepare sccache-cache env vars - shell: bash - run: | - echo SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage >> $GITHUB_ENV - echo SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com >> $GITHUB_ENV - echo SCCACHE_ERROR_LOG=/tmp/sccache_log.txt >> $GITHUB_ENV - echo SCCACHE_GCS_RW_MODE=READ_WRITE >> $GITHUB_ENV - echo RUSTC_WRAPPER=sccache >> $GITHUB_ENV - - - name: protocol-version - id: protocolversion - # TODO: use -C flag, when it will become stable. - shell: bash - run: | - cd prover - cargo build --release --bin prover_version - PPV=$(target/release/prover_version) - echo Protocol version is ${PPV} - echo "protocol_version=${PPV}" >> $GITHUB_OUTPUT - - build-images: - name: Build and Push Docker Images - needs: get-protocol-version - env: - PROTOCOL_VERSION: ${{ needs.get-protocol-version.outputs.protocol_version }} - runs-on: [matterlabs-ci-runner-c3d] - strategy: - matrix: - components: - - witness-generator - steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - with: - submodules: "recursive" - - - name: setup-env - run: | - echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV - echo CI=1 >> $GITHUB_ENV - echo $(pwd)/bin >> $GITHUB_PATH - echo CI=1 >> .env - echo IN_DOCKER=1 >> .env - - - name: Set env vars - shell: bash - run: | - # Support for custom tag suffix - if [ -n "${{ inputs.image_tag_suffix }}" ]; then - echo IMAGE_TAG_SHA_TS="${{ inputs.image_tag_suffix }}" >> $GITHUB_ENV - else - echo IMAGE_TAG_SHA_TS=$(git rev-parse --short HEAD)-$(date +%s) >> $GITHUB_ENV - fi - - - name: login to Docker registries - if: github.event_name != 'pull_request' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/')) - shell: bash - run: | - docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} - gcloud auth configure-docker us-docker.pkg.dev -q - - - name: Build and push - uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0 - with: - context: . - push: ${{ inputs.action == 'push' }} - build-args: | - SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage - SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com - SCCACHE_GCS_RW_MODE=READ_WRITE - RUSTC_WRAPPER=sccache - PROTOCOL_VERSION=${{ env.PROTOCOL_VERSION }} - ERA_BELLMAN_CUDA_RELEASE=${{ inputs.ERA_BELLMAN_CUDA_RELEASE }} - RUST_FLAGS=${{ inputs.WITNESS_GENERATOR_RUST_FLAGS }} - file: docker/${{ matrix.components }}/Dockerfile - tags: | - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ env.IMAGE_TAG_SHA_TS }} - matterlabs/${{ matrix.components }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ env.IMAGE_TAG_SHA_TS }} - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }} - matterlabs/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }} - us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:latest - matterlabs/${{ matrix.components }}:latest diff --git a/.github/workflows/protobuf.yaml b/.github/workflows/protobuf.yaml index f0565919ded1..62748c79251a 100644 --- a/.github/workflows/protobuf.yaml +++ b/.github/workflows/protobuf.yaml @@ -31,7 +31,7 @@ env: jobs: compatibility: - runs-on: [ubuntu-22.04-github-hosted-16core] + runs-on: [ubuntu-24.04-github-hosted-16core] steps: - uses: mozilla-actions/sccache-action@89e9040de88b577a072e3760aaf59f585da083af # v0.0.5 @@ -46,13 +46,15 @@ jobs: run: git checkout $(git merge-base $BASE $HEAD) --recurse-submodules working-directory: ./before + - name: compile before - run: cargo check --all-targets - working-directory: ./before/ + run: cargo check --manifest-path ./core/Cargo.toml --all-targets + working-directory: ./before + - name: build before.binpb run: > perl -ne 'print "$1\n" if /PROTOBUF_DESCRIPTOR="(.*)"/' - `find ./before/target/debug/build/*/output` + `find ./before/core/target/debug/build/*/output` | xargs cat > ./before.binpb # after @@ -61,13 +63,15 @@ jobs: ref: ${{ env.HEAD }} path: after submodules: recursive + - name: compile after - run: cargo check --all-targets + run: cargo check --manifest-path ./core/Cargo.toml --all-targets working-directory: ./after + - name: build after.binpb run: > perl -ne 'print "$1\n" if /PROTOBUF_DESCRIPTOR="(.*)"/' - `find ./after/target/debug/build/*/output` + `find ./after/core/target/debug/build/*/output` | xargs cat > ./after.binpb # compare @@ -75,6 +79,8 @@ jobs: with: github_token: ${{ github.token }} - name: buf breaking - run: > - buf breaking './after.binpb' --against './before.binpb' --exclude-path 'zksync/config/experimental.proto' - --config '{"version":"v1","breaking":{"use":["WIRE_JSON","WIRE"]}}' --error-format 'github-actions' + run: | + pwd + ls -la + buf breaking './after.binpb' --against './before.binpb' --exclude-path 'zksync/config/experimental.proto' \ + --config '{"version":"v1","breaking":{"use":["WIRE_JSON","WIRE"]}}' --error-format 'github-actions' diff --git a/.github/workflows/publish-crates.yml b/.github/workflows/publish-crates.yml new file mode 100644 index 000000000000..ebfb96a544c8 --- /dev/null +++ b/.github/workflows/publish-crates.yml @@ -0,0 +1,42 @@ +name: Publish crates + +on: + workflow_dispatch: + inputs: + component: + description: 'Component to release. Possible values are: core, prover or zkstack_cli.' + required: true + default: 'zkstack_cli' + run-build: + type: boolean + description: 'Build the workspace before release.' + required: false + default: true + run-tests: + type: boolean + description: 'Run tests before release.' + required: false + default: false + org-owner: + type: string + description: 'Organization to add as owner of the crates.' + required: false + default: 'github:matter-labs:crates-io' + + +jobs: + + publish-crates: + name: Publish to crates.io + runs-on: matterlabs-ci-runner-high-performance + steps: + - name: Publish crates + uses: matter-labs/zksync-ci-common/.github/actions/publish-crates@v1 + with: + slack_webhook: ${{ secrets.SLACK_WEBHOOK_RELEASES }} # Slack webhook for notifications + cargo_registry_token: ${{ secrets.CRATES_IO_TOKEN }} # Crates.io token for publishing + workspace_path: ${{ inputs.component }} + org_owner: ${{ inputs.org-owner }} + run_build: ${{ inputs.run-build }} + run_tests: ${{ inputs.run-tests }} + gh_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/release-please-cargo-lock.yml b/.github/workflows/release-please-cargo-lock.yml deleted file mode 100644 index 8c8036dfa47a..000000000000 --- a/.github/workflows/release-please-cargo-lock.yml +++ /dev/null @@ -1,55 +0,0 @@ -on: - push: - branches: - - release-please--branches--main--components--core - -name: release-please-update-cargo-lock -jobs: - update_cargo_lock: - # TODO: After migraton switch to CI - runs-on: [matterlabs-default-infra-runners] - - steps: - - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4 - with: - submodules: "recursive" - persist-credentials: false - - - name: Check last commit - id: condition - run: | - COMMIT=$(git log -1 --pretty=%B) - if [[ "$COMMIT" == "Update Cargo.lock" ]]; then - echo "Cargo.lock is already updated" - echo "::set-output name=skip_steps::true" - else - echo "Cargo.lock should be updated" - echo "::set-output name=skip_steps::false" - fi - - - name: Setup environment - if: steps.condition.outputs.skip_steps != 'true' - run: | - echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV - echo $(pwd)/bin >> $GITHUB_PATH - echo IN_DOCKER=1 >> .env - - - name: Start services - if: steps.condition.outputs.skip_steps != 'true' - run: docker compose up -d zk - - - name: Cargo check - if: steps.condition.outputs.skip_steps != 'true' - run: ci_run cargo check - - - name: Push changes - if: steps.condition.outputs.skip_steps != 'true' - env: - GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }} - run: | - git config --global user.email "zksync-era-bot@users.noreply.github.com" - git config --global user.name "zksync-era-bot" - git remote set-url origin 'https://${{ secrets.RELEASE_TOKEN }}@github.com/matter-labs/zksync-era.git' - git add ./Cargo.lock - git commit -m "Update Cargo.lock" - git push diff --git a/.github/workflows/release-please.yml b/.github/workflows/release-please.yml index 4a8f527f45c6..6a3935e9da84 100644 --- a/.github/workflows/release-please.yml +++ b/.github/workflows/release-please.yml @@ -1,29 +1,32 @@ +name: Release-please + +# Give permissions to the release-please to open, update PRs +# and commit to PRs the repository to update Cargo.lock +permissions: + contents: write + pull-requests: write + id-token: write + attestations: write + +# Run the workflow on push to the main branch or manually on: push: branches: - main workflow_dispatch: -permissions: - contents: write - pull-requests: write - -name: release-please jobs: - release-please: - runs-on: ubuntu-latest - steps: - - name: Run release-please - id: release - uses: google-github-actions/release-please-action@e4dc86ba9405554aeba3c6bb2d169500e7d3b4ee # v4.1.1 - with: - token: ${{ secrets.RELEASE_TOKEN }} - config-file: .github/release-please/config.json - manifest-file: .github/release-please/manifest.json - - name: Send Release Info - if: ${{ steps.release.outputs.releases_created == 'true' }} - uses: matter-labs/format-release-please-for-slack-action@69e6fe9e4ec531b7b5fb0d826f73c190db83cf42 # v2.1.0 - with: - release-please-output: ${{ toJSON(steps.release.outputs) }} - slack-webhook-url: ${{ secrets.SLACK_WEBHOOK_RELEASES }} + # Prepare the release PR with changelog updates and create github releases + release-please: + uses: matter-labs/zksync-ci-common/.github/workflows/release-please.yaml@v1 + secrets: + slack_webhook: ${{ secrets.SLACK_WEBHOOK_RELEASES }} # Slack webhook for notifications + gh_token: ${{ secrets.RELEASE_TOKEN }} # GitHub token for release-please + with: + config: '.github/release-please/config.json' # Path to the configuration file + manifest: '.github/release-please/manifest.json' # Path to the manifest file + update-cargo-lock: true # Update Cargo.lock file in the release PR + publish-to-crates-io: true # Enable publishing to crates.io + upgrade-dependencies: true # Upgrade cross-workspace dependencies + version-suffix: 'non-semver-compat' # Version suffix for the crates.io release diff --git a/.github/workflows/release-test-stage.yml b/.github/workflows/release-test-stage.yml index 2e6c7882aa98..cff6c5ee0b3f 100644 --- a/.github/workflows/release-test-stage.yml +++ b/.github/workflows/release-test-stage.yml @@ -61,7 +61,7 @@ jobs: build-push-core-images: name: Build and push images needs: [setup, changed_files] - uses: ./.github/workflows/new-build-core-template.yml + uses: ./.github/workflows/build-core-template.yml if: needs.changed_files.outputs.core == 'true' || needs.changed_files.outputs.all == 'true' with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} @@ -85,7 +85,7 @@ jobs: build-push-contract-verifier: name: Build and push images needs: [setup, changed_files] - uses: ./.github/workflows/new-build-contract-verifier-template.yml + uses: ./.github/workflows/build-contract-verifier-template.yml if: needs.changed_files.outputs.core == 'true' || needs.changed_files.outputs.all == 'true' with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} @@ -97,7 +97,7 @@ jobs: build-push-prover-images: name: Build and push images needs: [setup, changed_files] - uses: ./.github/workflows/new-build-prover-template.yml + uses: ./.github/workflows/build-prover-template.yml if: needs.changed_files.outputs.prover == 'true' || needs.changed_files.outputs.all == 'true' with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} @@ -110,7 +110,7 @@ jobs: build-push-witness-generator-image-avx512: name: Build and push prover images with avx512 instructions needs: [setup, changed_files] - uses: ./.github/workflows/new-build-witness-generator-template.yml + uses: ./.github/workflows/build-witness-generator-template.yml if: needs.changed_files.outputs.prover == 'true' || needs.changed_files.outputs.all == 'true' with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }}-avx512 @@ -121,10 +121,20 @@ jobs: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} - build-gar-prover-fri-gpu-and-circuit-prover-gpu-gar: + build-circuit-prover-gpu-gar: name: Build GAR prover FRI GPU needs: [setup, build-push-prover-images] - uses: ./.github/workflows/build-prover-fri-gpu-gar-and-circuit-prover-gpu-gar.yml + uses: ./.github/workflows/build-circuit-prover-gpu-gar.yml + if: needs.changed_files.outputs.prover == 'true' || needs.changed_files.outputs.all == 'true' + with: + setup_keys_id: ${{ needs.setup.outputs.prover_fri_gpu_key_id }} + image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} + protocol_version: ${{ needs.build-push-prover-images.outputs.protocol_version }} + + build-gar-proof-fri-gpu-compressor-gar: + name: Build GAR proof FRI GPU compressor + needs: [setup, build-push-prover-images] + uses: ./.github/workflows/build-proof-fri-gpu-compressor-gar.yml if: needs.changed_files.outputs.prover == 'true' || needs.changed_files.outputs.all == 'true' with: setup_keys_id: ${{ needs.setup.outputs.prover_fri_gpu_key_id }} diff --git a/.github/workflows/vm-perf-comparison.yml b/.github/workflows/vm-perf-comparison.yml index 3520419f1337..223404fa0b4f 100644 --- a/.github/workflows/vm-perf-comparison.yml +++ b/.github/workflows/vm-perf-comparison.yml @@ -47,28 +47,32 @@ jobs: run: | run_retried docker compose pull zk docker compose up -d zk - + - name: run benchmarks on base branch shell: bash run: | ci_run zkstackup -g --local - ci_run zkstack dev contracts --system-contracts - ci_run cargo bench --package vm-benchmark --bench instructions -- --verbose || echo "Instructions benchmark is missing" - ci_run cargo run --package vm-benchmark --release --bin instruction_counts | tee base-opcodes + ci_run zkstack dev contracts + ci_run cargo bench --manifest-path ./core/Cargo.toml \ + --package vm-benchmark --bench instructions -- --verbose || echo "Instructions benchmark is missing" + ci_run cargo run --manifest-path ./core/Cargo.toml \ + --package vm-benchmark --release --bin instruction_counts | tee base-opcodes - name: checkout PR run: | git checkout --force FETCH_HEAD --recurse-submodules + git submodule update --init --recursive - name: run benchmarks on PR shell: bash id: comparison run: | ci_run zkstackup -g --local - ci_run zkstack dev contracts --system-contracts - ci_run cargo bench --package vm-benchmark --bench instructions -- --verbose + ci_run zkstack dev contracts + ci_run cargo bench --manifest-path ./core/Cargo.toml --package vm-benchmark --bench instructions -- --verbose - ci_run cargo bench --package vm-benchmark --bench instructions -- --print > instructions.log 2>/dev/null + ci_run cargo bench --manifest-path ./core/Cargo.toml \ + --package vm-benchmark --bench instructions -- --print > instructions.log 2>/dev/null # Output all lines from the benchmark result starting from the "## ..." comparison header. # Since the output spans multiple lines, we use a heredoc declaration. EOF=$(dd if=/dev/urandom bs=15 count=1 status=none | base64) @@ -76,7 +80,8 @@ jobs: sed -n '/^## /,$p' instructions.log >> $GITHUB_OUTPUT echo "$EOF" >> $GITHUB_OUTPUT - ci_run cargo run --package vm-benchmark --release --bin instruction_counts -- --diff base-opcodes > opcodes.log + ci_run cargo run --manifest-path ./core/Cargo.toml \ + --package vm-benchmark --release --bin instruction_counts -- --diff base-opcodes > opcodes.log echo "opcodes<<$EOF" >> $GITHUB_OUTPUT sed -n '/^## /,$p' opcodes.log >> $GITHUB_OUTPUT echo "$EOF" >> $GITHUB_OUTPUT diff --git a/.github/workflows/vm-perf-to-prometheus.yml b/.github/workflows/vm-perf-to-prometheus.yml index 93d33116794f..0868e0902342 100644 --- a/.github/workflows/vm-perf-to-prometheus.yml +++ b/.github/workflows/vm-perf-to-prometheus.yml @@ -45,8 +45,8 @@ jobs: - name: run benchmarks run: | - ci_run cargo bench --package vm-benchmark --bench oneshot + ci_run cargo bench --manifest-path ./core/Cargo.toml --package vm-benchmark --bench oneshot # Run only benches with 1,000 transactions per batch to not spend too much time - ci_run cargo bench --package vm-benchmark --bench batch '/1000$' - ci_run cargo bench --package vm-benchmark --bench instructions -- --verbose - ci_run cargo bench --package vm-benchmark --bench instructions -- --print + ci_run cargo bench --manifest-path ./core/Cargo.toml --package vm-benchmark --bench batch '/1000$' + ci_run cargo bench --manifest-path ./core/Cargo.toml --package vm-benchmark --bench instructions -- --verbose + ci_run cargo bench --manifest-path ./core/Cargo.toml --package vm-benchmark --bench instructions -- --print diff --git a/.github/workflows/zk-environment-publish.yml b/.github/workflows/zk-environment-publish.yml index e9dfac1c6bb2..569fa66ebeb4 100644 --- a/.github/workflows/zk-environment-publish.yml +++ b/.github/workflows/zk-environment-publish.yml @@ -33,7 +33,7 @@ jobs: outputs: zk_environment: ${{ steps.changed-files-yaml.outputs.zk_env_any_changed }} zk_environment_cuda_11_8: ${{ steps.changed-files-yaml.outputs.zk_env_cuda_11_8_any_changed }} - zk_environment_cuda_12_0: ${{ steps.changed-files-yaml.outputs.zk_env_cuda_12_any_changed }} + zk_environment_cuda_12: ${{ steps.changed-files-yaml.outputs.zk_env_cuda_12_any_changed }} runs-on: ubuntu-latest steps: - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 @@ -52,7 +52,7 @@ jobs: - docker/zk-environment/22.04_amd64_cuda_11_8.Dockerfile - .github/workflows/zk-environment-publish.yml zk_env_cuda_12: - - docker/zk-environment/22.04_amd64_cuda_12_0.Dockerfile + - docker/zk-environment/22.04_amd64_cuda_12.Dockerfile - .github/workflows/zk-environment-publish.yml get_short_sha: @@ -197,7 +197,7 @@ jobs: runs-on: [matterlabs-ci-runner-high-performance] strategy: matrix: - cuda_version: ['11_8', '12_0'] + cuda_version: ['11_8', '12'] steps: - name: Evaluate condition id: condition diff --git a/.gitignore b/.gitignore index 471a601cc34b..716024531d4a 100644 --- a/.gitignore +++ b/.gitignore @@ -26,12 +26,6 @@ zksync_pk.key dist todo -Cargo.lock -!/Cargo.lock -!/infrastructure/zksync-crypto/Cargo.lock -!/prover/Cargo.lock -!/zkstack_cli/Cargo.lock - /etc/env/target/* /etc/env/.current /etc/env/configs/* @@ -111,6 +105,8 @@ hyperchain-*.yml # Prover keys that should not be commited prover/crates/bin/vk_setup_data_generator_server_fri/data/setup_* prover/data/keys/setup_* +prover/data/keys/fflonk_setup_snark_data.bin +prover/data/keys/plonk_setup_snark_data.bin # ZK Stack CLI chains/era/configs/* diff --git a/Cargo.toml b/Cargo.toml deleted file mode 100644 index 4efbf85b0930..000000000000 --- a/Cargo.toml +++ /dev/null @@ -1,330 +0,0 @@ -[workspace] -members = [ - # Binaries - "core/bin/block_reverter", - "core/bin/contract-verifier", - "core/bin/custom_genesis_export", - "core/bin/external_node", - "core/bin/merkle_tree_consistency_checker", - "core/bin/snapshots_creator", - "core/bin/selector_generator", - "core/bin/system-constants-generator", - "core/bin/verified_sources_fetcher", - "core/bin/zksync_server", - "core/bin/genesis_generator", - "core/bin/zksync_tee_prover", - # Node services - "core/node/node_framework", - "core/node/proof_data_handler", - "core/node/block_reverter", - "core/node/commitment_generator", - "core/node/house_keeper", - "core/node/genesis", - "core/node/shared_metrics", - "core/node/db_pruner", - "core/node/fee_model", - "core/node/da_dispatcher", - "core/node/eth_sender", - "core/node/vm_runner", - "core/node/test_utils", - "core/node/state_keeper", - "core/node/reorg_detector", - "core/node/consistency_checker", - "core/node/metadata_calculator", - "core/node/node_sync", - "core/node/node_storage_init", - "core/node/consensus", - "core/node/contract_verification_server", - "core/node/api_server", - "core/node/base_token_adjuster", - "core/node/external_proof_integration_api", - "core/node/logs_bloom_backfill", - "core/node/da_clients", - # Libraries - "core/lib/db_connection", - "core/lib/zksync_core_leftovers", - "core/lib/basic_types", - "core/lib/config", - "core/lib/constants", - "core/lib/contract_verifier", - "core/lib/contracts", - "core/lib/circuit_breaker", - "core/lib/dal", - "core/lib/env_config", - "core/lib/da_client", - "core/lib/eth_client", - "core/lib/eth_signer", - "core/lib/l1_contract_interface", - "core/lib/mempool", - "core/lib/merkle_tree", - "core/lib/mini_merkle_tree", - "core/lib/node_framework_derive", - "core/lib/object_store", - "core/lib/prover_interface", - "core/lib/queued_job_processor", - "core/lib/state", - "core/lib/storage", - "core/lib/tee_verifier", - "core/lib/types", - "core/lib/protobuf_config", - "core/lib/utils", - "core/lib/vlog", - "core/lib/multivm", - "core/lib/vm_interface", - "core/lib/vm_executor", - "core/lib/web3_decl", - "core/lib/snapshots_applier", - "core/lib/crypto_primitives", - "core/lib/external_price_api", - "core/lib/test_contracts", - # Test infrastructure - "core/tests/loadnext", - "core/tests/vm-benchmark", -] -resolver = "2" - -exclude = [] - -# for `perf` profiling -[profile.perf] -inherits = "release" -debug = true - -[workspace.package] -version = "0.1.0" -edition = "2021" -authors = ["The Matter Labs Team "] -homepage = "https://zksync.io/" -repository = "https://github.com/matter-labs/zksync-era" -license = "MIT OR Apache-2.0" -keywords = ["blockchain", "zksync"] -categories = ["cryptography"] - -[workspace.dependencies] -# "External" dependencies -anyhow = "1" -assert_matches = "1.5" -async-trait = "0.1" -async-recursion = "1" -axum = "0.7.5" -backon = "0.4.4" -bigdecimal = "0.4.5" -bincode = "1" -blake2 = "0.10" -bytes = "1" -chrono = "0.4" -clap = "4.2.2" -codegen = "0.2.0" -const-decoder = "0.4.0" -criterion = "0.4.0" -ctrlc = "3.1" -dashmap = "5.5.3" -derive_more = "1.0.0" -envy = "0.4" -ethabi = "18.0.0" -flate2 = "1.0.28" -fraction = "0.15.3" -futures = "0.3" -futures-util = "0.3" -glob = "0.3" -google-cloud-auth = "0.16.0" -google-cloud-storage = "0.20.0" -governor = "0.4.2" -hex = "0.4" -http = "1.1" -http-body-util = "0.1.2" -httpmock = "0.7.0" -hyper = "1.3" -insta = "1.29.0" -itertools = "0.10" -jsonrpsee = { version = "0.23", default-features = false } -leb128 = "0.2.5" -lru = { version = "0.12.1", default-features = false } -mini-moka = "0.10.0" -num = "0.4.0" -num_cpus = "1.13" -num_enum = "0.7.2" -octocrab = "0.41" -once_cell = "1" -opentelemetry = "0.24.0" -opentelemetry_sdk = "0.24.0" -opentelemetry-otlp = "0.17.0" -opentelemetry-semantic-conventions = "0.16.0" -opentelemetry-appender-tracing = "0.5" -pin-project-lite = "0.2.13" -pretty_assertions = "1" -prost = "0.12.6" -rand = "0.8" -rayon = "1.3.1" -regex = "1" -reqwest = "0.12" -rlp = "0.5" -rocksdb = "0.21" -rustc_version = "0.4.0" -rustls = "0.23" -secp256k1 = { version = "0.27.0", features = ["recovery", "global-context"] } -secrecy = "0.8.0" -semver = "1" -sentry = "0.31" -serde = "1" -serde_json = "1" -serde_with = "1" -serde_yaml = "0.9" -sha2 = "0.10.8" -sha3 = "0.10.8" -sqlx = "0.8.1" -static_assertions = "1.1" -structopt = "0.3.20" -strum = "0.26" -tempfile = "3.0.2" -test-casing = "0.1.2" -test-log = "0.2.15" -thiserror = "1" -thread_local = "1.1" -tikv-jemallocator = "0.5" -tiny-keccak = "2" -tokio = "1" -tower = "0.4.13" -tower-http = "0.5.2" -tracing = "0.1" -tracing-subscriber = "0.3" -tracing-opentelemetry = "0.25.0" -time = "0.3.36" # Has to be same as used by `tracing-subscriber` -url = "2" -web3 = "0.19.0" -yab = "0.1.0" - -# Proc-macro -syn = "2.0" -quote = "1.0" -proc-macro2 = "1.0" -trybuild = "1.0" - -# "Internal" dependencies -vise = "0.2.0" -vise-exporter = "0.2.0" -foundry-compilers = { version = "0.11.6", git = "https://github.com/Moonsong-Labs/compilers.git", rev = "7c69695e5c75451f158dd2456bf8c94a7492ea0b" } - -# DA clients' dependencies -# Avail -base58 = "0.2.0" -scale-encode = "0.5.0" -blake2b_simd = "1.0.2" -subxt-metadata = "0.34.0" -parity-scale-codec = { version = "3.6.9", default-features = false } -subxt-signer = { version = "0.34", default-features = false } - -# Celestia -celestia-types = "0.6.1" -bech32 = "0.11.0" -ripemd = "0.1.3" -tonic = { version = "0.11.0", default-features = false } -pbjson-types = "0.6.0" - -# Eigen -tokio-stream = "0.1.16" -rust-kzg-bn254 = "0.2.1" -ark-bn254 = "0.5.0" -num-bigint = "0.4.6" -serial_test = { version = "3.1.1", features = ["file_locks"] } - -# Here and below: -# We *always* pin the latest version of protocol to disallow accidental changes in the execution logic. -# However, for the historical version of protocol crates, we have lax requirements. Otherwise, -# Bumping a crypto dependency like `boojum` would require us to republish all the historical packages. -circuit_encodings = "0.150.19" -circuit_sequencer_api = "0.150.19" -circuit_definitions = "0.150.19" -crypto_codegen = { package = "zksync_solidity_vk_codegen",version = "0.30.12" } -kzg = { package = "zksync_kzg", version = "0.150.19" } -zk_evm = { version = "=0.133.0" } -zk_evm_1_3_1 = { package = "zk_evm", version = "0.131.0-rc.2" } -zk_evm_1_3_3 = { package = "zk_evm", version = "0.133" } -zk_evm_1_4_0 = { package = "zk_evm", version = "0.140" } -zk_evm_1_4_1 = { package = "zk_evm", version = "0.141" } -zk_evm_1_5_0 = { package = "zk_evm", version = "0.150.19" } -fflonk = "0.30.12" - -# New VM; pinned to a specific commit because of instability -zksync_vm2 = { git = "https://github.com/matter-labs/vm2.git", rev = "457d8a7eea9093af9440662e33e598c13ba41633" } - -# Consensus dependencies. -zksync_concurrency = "=0.7.0" -zksync_consensus_bft = "=0.7.0" -zksync_consensus_crypto = "=0.7.0" -zksync_consensus_executor = "=0.7.0" -zksync_consensus_network = "=0.7.0" -zksync_consensus_roles = "=0.7.0" -zksync_consensus_storage = "=0.7.0" -zksync_consensus_utils = "=0.7.0" -zksync_protobuf = "=0.7.0" -zksync_protobuf_build = "=0.7.0" - -# "Local" dependencies -zksync_multivm = { version = "0.1.0", path = "core/lib/multivm" } -zksync_vlog = { version = "0.1.0", path = "core/lib/vlog" } -zksync_vm_interface = { version = "0.1.0", path = "core/lib/vm_interface" } -zksync_vm_executor = { version = "0.1.0", path = "core/lib/vm_executor" } -zksync_basic_types = { version = "0.1.0", path = "core/lib/basic_types" } -zksync_circuit_breaker = { version = "0.1.0", path = "core/lib/circuit_breaker" } -zksync_config = { version = "0.1.0", path = "core/lib/config" } -zksync_contract_verifier_lib = { version = "0.1.0", path = "core/lib/contract_verifier" } -zksync_contracts = { version = "0.1.0", path = "core/lib/contracts" } -zksync_core_leftovers = { version = "0.1.0", path = "core/lib/zksync_core_leftovers" } -zksync_dal = { version = "0.1.0", path = "core/lib/dal" } -zksync_db_connection = { version = "0.1.0", path = "core/lib/db_connection" } -zksync_env_config = { version = "0.1.0", path = "core/lib/env_config" } -zksync_eth_client = { version = "0.1.0", path = "core/lib/eth_client" } -zksync_da_client = { version = "0.1.0", path = "core/lib/da_client" } -zksync_eth_signer = { version = "0.1.0", path = "core/lib/eth_signer" } -zksync_health_check = { version = "0.1.0", path = "core/lib/health_check" } -zksync_l1_contract_interface = { version = "0.1.0", path = "core/lib/l1_contract_interface" } -zksync_mempool = { version = "0.1.0", path = "core/lib/mempool" } -zksync_merkle_tree = { version = "0.1.0", path = "core/lib/merkle_tree" } -zksync_bin_metadata = { version = "0.1.0", path = "core/lib/bin_metadata" } -zksync_mini_merkle_tree = { version = "0.1.0", path = "core/lib/mini_merkle_tree" } -zksync_object_store = { version = "0.1.0", path = "core/lib/object_store" } -zksync_protobuf_config = { version = "0.1.0", path = "core/lib/protobuf_config" } -zksync_prover_interface = { version = "0.1.0", path = "core/lib/prover_interface" } -zksync_queued_job_processor = { version = "0.1.0", path = "core/lib/queued_job_processor" } -zksync_snapshots_applier = { version = "0.1.0", path = "core/lib/snapshots_applier" } -zksync_state = { version = "0.1.0", path = "core/lib/state" } -zksync_storage = { version = "0.1.0", path = "core/lib/storage" } -zksync_system_constants = { version = "0.1.0", path = "core/lib/constants" } -zksync_tee_verifier = { version = "0.1.0", path = "core/lib/tee_verifier" } -zksync_test_contracts = { version = "0.1.0", path = "core/lib/test_contracts" } -zksync_types = { version = "0.1.0", path = "core/lib/types" } -zksync_utils = { version = "0.1.0", path = "core/lib/utils" } -zksync_web3_decl = { version = "0.1.0", path = "core/lib/web3_decl" } -zksync_crypto_primitives = { version = "0.1.0", path = "core/lib/crypto_primitives" } -zksync_external_price_api = { version = "0.1.0", path = "core/lib/external_price_api" } - -# Framework and components -zksync_node_framework = { version = "0.1.0", path = "core/node/node_framework" } -zksync_node_framework_derive = { version = "0.1.0", path = "core/lib/node_framework_derive" } -zksync_eth_watch = { version = "0.1.0", path = "core/node/eth_watch" } -zksync_shared_metrics = { version = "0.1.0", path = "core/node/shared_metrics" } -zksync_proof_data_handler = { version = "0.1.0", path = "core/node/proof_data_handler" } -zksync_block_reverter = { version = "0.1.0", path = "core/node/block_reverter" } -zksync_commitment_generator = { version = "0.1.0", path = "core/node/commitment_generator" } -zksync_house_keeper = { version = "0.1.0", path = "core/node/house_keeper" } -zksync_node_genesis = { version = "0.1.0", path = "core/node/genesis" } -zksync_da_dispatcher = { version = "0.1.0", path = "core/node/da_dispatcher" } -zksync_da_clients = { version = "0.1.0", path = "core/node/da_clients" } -zksync_eth_sender = { version = "0.1.0", path = "core/node/eth_sender" } -zksync_node_db_pruner = { version = "0.1.0", path = "core/node/db_pruner" } -zksync_node_fee_model = { version = "0.1.0", path = "core/node/fee_model" } -zksync_vm_runner = { version = "0.1.0", path = "core/node/vm_runner" } -zksync_external_proof_integration_api = { version = "0.1.0", path = "core/node/external_proof_integration_api" } -zksync_node_test_utils = { version = "0.1.0", path = "core/node/test_utils" } -zksync_state_keeper = { version = "0.1.0", path = "core/node/state_keeper" } -zksync_reorg_detector = { version = "0.1.0", path = "core/node/reorg_detector" } -zksync_consistency_checker = { version = "0.1.0", path = "core/node/consistency_checker" } -zksync_metadata_calculator = { version = "0.1.0", path = "core/node/metadata_calculator" } -zksync_node_sync = { version = "0.1.0", path = "core/node/node_sync" } -zksync_node_storage_init = { version = "0.1.0", path = "core/node/node_storage_init" } -zksync_node_consensus = { version = "0.1.0", path = "core/node/consensus" } -zksync_contract_verification_server = { version = "0.1.0", path = "core/node/contract_verification_server" } -zksync_node_api_server = { version = "0.1.0", path = "core/node/api_server" } -zksync_base_token_adjuster = { version = "0.1.0", path = "core/node/base_token_adjuster" } -zksync_logs_bloom_backfill = { version = "0.1.0", path = "core/node/logs_bloom_backfill" } diff --git a/bin/run_loadtest_from_github_actions b/bin/run_loadtest_from_github_actions index 149988d63d8f..9222673051a4 100755 --- a/bin/run_loadtest_from_github_actions +++ b/bin/run_loadtest_from_github_actions @@ -19,4 +19,4 @@ export CONTRACT_EXECUTION_PARAMS_RECURSIVE_CALLS=${execution_params[5]} export CONTRACT_EXECUTION_PARAMS_DEPLOYS=${execution_params[6]} # Run the test -cargo run --bin loadnext +cd core && cargo run --bin loadnext diff --git a/contracts b/contracts index 46d75088e7dd..3e2b628be66d 160000 --- a/contracts +++ b/contracts @@ -1 +1 @@ -Subproject commit 46d75088e7ddb534101874c3ec15b877da1cb417 +Subproject commit 3e2b628be66d2cb200d542dd71d6f19788ad8037 diff --git a/core/CHANGELOG.md b/core/CHANGELOG.md index 12d1169f84a3..256c83058ee2 100644 --- a/core/CHANGELOG.md +++ b/core/CHANGELOG.md @@ -1,5 +1,58 @@ # Changelog +## [26.1.0](https://github.com/matter-labs/zksync-era/compare/core-v26.0.0...core-v26.1.0) (2025-01-21) + + +### Features + +* update l2 erc20 bridge address in updater as well ([#3500](https://github.com/matter-labs/zksync-era/issues/3500)) ([fe3c7b2](https://github.com/matter-labs/zksync-era/commit/fe3c7b2583bc4f9277e186334e5822ddf95bdcd0)) +* **vm:** Implement call tracing for fast VM ([#2905](https://github.com/matter-labs/zksync-era/issues/2905)) ([731b824](https://github.com/matter-labs/zksync-era/commit/731b8240abd4c0cfa42f2ce89c23f8ebf67e1bf2)) + + +### Bug Fixes + +* copy special case to fast VM call tracer ([#3509](https://github.com/matter-labs/zksync-era/issues/3509)) ([995e583](https://github.com/matter-labs/zksync-era/commit/995e583aa9b4ef6e0d8697fbb040e4b991a4248d)) +* fix execute encoding for transactions ([#3501](https://github.com/matter-labs/zksync-era/issues/3501)) ([4c381a8](https://github.com/matter-labs/zksync-era/commit/4c381a84346f8ab88d3f01dc2848c7fb5f2b788d)) +* **gateway:** erc20 workaround for gateway upgrade ([#3511](https://github.com/matter-labs/zksync-era/issues/3511)) ([c140ba8](https://github.com/matter-labs/zksync-era/commit/c140ba8f57caabf9c9bdd4bd8c9743a9ccf668be)) + + +### Performance Improvements + +* optimize get_unsealed_l1_batch_inner ([#3491](https://github.com/matter-labs/zksync-era/issues/3491)) ([9b121c9](https://github.com/matter-labs/zksync-era/commit/9b121c96bbb2e53be74aa81e0ca250ce9251f8db)) + +## [26.0.0](https://github.com/matter-labs/zksync-era/compare/core-v25.4.0...core-v26.0.0) (2025-01-17) + + +### âš  BREAKING CHANGES + +* **contracts:** gateway integration ([#1934](https://github.com/matter-labs/zksync-era/issues/1934)) + +### Features + +* Adapt server for new EVM bytecode hash encoding ([#3396](https://github.com/matter-labs/zksync-era/issues/3396)) ([5a1e6d2](https://github.com/matter-labs/zksync-era/commit/5a1e6d2445d4d4310fc1e54ccd44dc4254e5bcbc)) +* Add logging & metrics for mempool ([#3447](https://github.com/matter-labs/zksync-era/issues/3447)) ([64d861d](https://github.com/matter-labs/zksync-era/commit/64d861d1e1d2d46339938ee3174c58cdc3f348c3)) +* **api_server:** report gas price based on open batch ([#2868](https://github.com/matter-labs/zksync-era/issues/2868)) ([f30aca0](https://github.com/matter-labs/zksync-era/commit/f30aca00962aa34c8a7acd6e4116290a2b214dcb)) +* **contracts:** gateway integration ([#1934](https://github.com/matter-labs/zksync-era/issues/1934)) ([f06cb79](https://github.com/matter-labs/zksync-era/commit/f06cb79883bf320f50089099e0abeb95eaace470)) +* da_dispatcher refactoring ([#3409](https://github.com/matter-labs/zksync-era/issues/3409)) ([591cd86](https://github.com/matter-labs/zksync-era/commit/591cd86a1a1e6e4214d3cec74b4c601356060203)) +* **en:** make documentation more chain agnostic ([#3376](https://github.com/matter-labs/zksync-era/issues/3376)) ([361243f](https://github.com/matter-labs/zksync-era/commit/361243f3f15e01cf1f3e49b73a579cb962cf0124)) +* **eth-sender:** make base fee grow at least as fast as priority fee ([#3386](https://github.com/matter-labs/zksync-era/issues/3386)) ([78af2bf](https://github.com/matter-labs/zksync-era/commit/78af2bf786bb4f7a639fef9fd169594101818b79)) +* **eth-watch:** Change protocol upgrade schema ([#3435](https://github.com/matter-labs/zksync-era/issues/3435)) ([2c778fd](https://github.com/matter-labs/zksync-era/commit/2c778fdd3fcd1e774bcb945f14a640ccf4227a2f)) +* Features for an easier upgrade ([#3422](https://github.com/matter-labs/zksync-era/issues/3422)) ([3037ee6](https://github.com/matter-labs/zksync-era/commit/3037ee6aa976744a09882b5830d6242ad8336717)) +* FFLONK support for compressor ([#3359](https://github.com/matter-labs/zksync-era/issues/3359)) ([1a297be](https://github.com/matter-labs/zksync-era/commit/1a297bedd226c56fc2ba02dc54d79129a271a1eb)) +* pubdata type changes from sync-layer-stable ([#3425](https://github.com/matter-labs/zksync-era/issues/3425)) ([f09087b](https://github.com/matter-labs/zksync-era/commit/f09087bab397778976af42c321cbba93f9706b5a)) + + +### Bug Fixes + +* **api:** Propagate fallback errors in traces ([#3469](https://github.com/matter-labs/zksync-era/issues/3469)) ([84e3e31](https://github.com/matter-labs/zksync-era/commit/84e3e312688e3aaffe81828471d276e24432d496)) +* **en:** make EN use main node's fee input ([#3489](https://github.com/matter-labs/zksync-era/issues/3489)) ([cbf2c31](https://github.com/matter-labs/zksync-era/commit/cbf2c31e353fd7a5167fcca7e2df87026050c21a)) +* eth aggregator restriction ([#3490](https://github.com/matter-labs/zksync-era/issues/3490)) ([6cc9b9e](https://github.com/matter-labs/zksync-era/commit/6cc9b9e405b03a7e30f3c92735b7452099c165d0)) + + +### Performance Improvements + +* **eth-sender:** optimize sql query ([#3437](https://github.com/matter-labs/zksync-era/issues/3437)) ([0731f60](https://github.com/matter-labs/zksync-era/commit/0731f607a72d18decd1ff74139f190c253d807ef)) + ## [25.4.0](https://github.com/matter-labs/zksync-era/compare/core-v25.3.0...core-v25.4.0) (2024-12-19) diff --git a/Cargo.lock b/core/Cargo.lock similarity index 98% rename from Cargo.lock rename to core/Cargo.lock index 16d0bc9a007c..aa807124513c 100644 --- a/Cargo.lock +++ b/core/Cargo.lock @@ -1363,7 +1363,7 @@ checksum = "8d696c370c750c948ada61c69a0ee2cbbb9c50b1019ddb86d9317157a99c2cae" [[package]] name = "block_reverter" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "clap 4.5.20", @@ -1419,9 +1419,9 @@ dependencies = [ [[package]] name = "boojum" -version = "0.30.12" +version = "0.30.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14bd053feb7173130679a2119e105b5e78af7eb6b0e752de6793e4ee63d8e899" +checksum = "d689807d79092f8f7cfcb72a2313a43da77d56314e41324810566f385875c185" dependencies = [ "arrayvec 0.7.6", "bincode", @@ -1821,9 +1821,9 @@ dependencies = [ [[package]] name = "circuit_definitions" -version = "0.150.19" +version = "0.150.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10ebc81d5c2f6ee8de436c242f6466fb315fe25afcbc81aa1c47dfca39a55403" +checksum = "1f04f9c7c6b39255199aaba49802c5f40f95bcff24f5a456446a912d254f4bb1" dependencies = [ "circuit_encodings", "crossbeam", @@ -1835,26 +1835,26 @@ dependencies = [ [[package]] name = "circuit_encodings" -version = "0.150.19" +version = "0.150.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33375d2448a78c1aed9b8755f7939a6b6f19e2fa80f44f4930a5b4c2bb7cbb44" +checksum = "fc3399f1981164c3c687ea15b1eedd35a16f28069c845a24530de21f996f3fdd" dependencies = [ "derivative", "serde", - "zk_evm 0.150.19", + "zk_evm 0.150.20", "zkevm_circuits", ] [[package]] name = "circuit_sequencer_api" -version = "0.150.19" +version = "0.150.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca2fec5c28e5a9f085279e70e13b2eebb63a95ee0bfb99d58095ac01c1c7b256" +checksum = "b5583037ec61607ac481b0c887b7fb4f860e65c92c6f3f7be74f6bab7c40c3ce" dependencies = [ "derivative", "rayon", "serde", - "zk_evm 0.150.19", + "zk_evm 0.150.20", "zksync_bellman", ] @@ -2364,7 +2364,7 @@ dependencies = [ [[package]] name = "custom_genesis_export" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "bincode", @@ -3155,9 +3155,9 @@ dependencies = [ [[package]] name = "fflonk" -version = "0.30.12" +version = "0.30.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e63d70f1cbf9e572ccaf22ca1dfce4b93ff48b9a5e8dd70de50d87edb960d173" +checksum = "b36c5fa909ab71b7eb4b8f7fd092f72ed83b93f2615e42f245ca808d8f308917" dependencies = [ "bincode", "byteorder", @@ -3444,9 +3444,9 @@ dependencies = [ [[package]] name = "franklin-crypto" -version = "0.30.12" +version = "0.30.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82d7b8e5864df7f3747e5e64a5b87b4a57aa2a4a20c55c9e96a3a305a8143c45" +checksum = "8309d8fc22fc389d831390473b0ee9fe94e85f19a8b9229b9aec8aa73f5bcee3" dependencies = [ "arr_macro", "bit-vec", @@ -3667,7 +3667,7 @@ dependencies = [ [[package]] name = "genesis_generator" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "clap 4.5.20", @@ -5202,7 +5202,7 @@ checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" [[package]] name = "loadnext" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "async-trait", @@ -5389,7 +5389,7 @@ checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "merkle_tree_consistency_checker" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "clap 4.5.20", @@ -7308,9 +7308,9 @@ dependencies = [ [[package]] name = "rescue_poseidon" -version = "0.30.12" +version = "0.30.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c250446885c257bee70bc0f2600229ce72f03073b87fb8f5dd278dba16b11f30" +checksum = "5e631fd184b6d2f2c04f9dc75405289d99fd0d6612d8dfbb478c01bfbab648fb" dependencies = [ "addchain", "arrayvec 0.7.6", @@ -8072,7 +8072,7 @@ dependencies = [ [[package]] name = "selector_generator" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "clap 4.5.20", @@ -8758,7 +8758,7 @@ dependencies = [ [[package]] name = "snapshots_creator" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "futures 0.3.31", @@ -8779,9 +8779,9 @@ dependencies = [ [[package]] name = "snark_wrapper" -version = "0.30.12" +version = "0.30.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f361c2c47b71ee43f62954ce69f7730e14acb7fb3b0f2c697da02f97327c569" +checksum = "eddb498315057210abd25e2fbe2ea30ab69a07ca0c166406a3e7c056ec8fbbfd" dependencies = [ "derivative", "rand 0.4.6", @@ -9518,7 +9518,7 @@ dependencies = [ [[package]] name = "system-constants-generator" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "codegen", "once_cell", @@ -10461,7 +10461,7 @@ checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" [[package]] name = "verified_sources_fetcher" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "serde_json", @@ -10518,7 +10518,7 @@ dependencies = [ [[package]] name = "vm-benchmark" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "assert_matches", "criterion", @@ -11169,9 +11169,9 @@ dependencies = [ [[package]] name = "zk_evm" -version = "0.150.19" +version = "0.150.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84ee848aa90ae045457795b1c0afeb388fbd9fa1e57aa0e8791b28f405e7cc2c" +checksum = "f11d0310228af78e804e5e7deccd1ad6797fce1c44c3b8016722ab78dc183c4a" dependencies = [ "anyhow", "lazy_static", @@ -11179,7 +11179,7 @@ dependencies = [ "serde", "serde_json", "static_assertions", - "zk_evm_abstractions 0.150.19", + "zk_evm_abstractions 0.150.20", ] [[package]] @@ -11210,22 +11210,22 @@ dependencies = [ [[package]] name = "zk_evm_abstractions" -version = "0.150.19" +version = "0.150.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f08feaa3e3d99e1e57234fe6ba2aa062609492c6499b2344121c4a699292ab7" +checksum = "d7616edbdeeeb214211e9bdc4346b6a62c6c6118c3d2b83b7db24c01f65f6e25" dependencies = [ "anyhow", "num_enum 0.6.1", "serde", "static_assertions", - "zkevm_opcode_defs 0.150.19", + "zkevm_opcode_defs 0.150.20", ] [[package]] name = "zkevm_circuits" -version = "0.150.19" +version = "0.150.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "760cfbbce18f42bbecd2565de9bf658234cac2431cce9b0c1df08e9df645d467" +checksum = "6f36004572f5086c513715e11f38230e2538c159d4f5d90dc518833c6fc78293" dependencies = [ "arrayvec 0.7.6", "boojum", @@ -11237,7 +11237,7 @@ dependencies = [ "seq-macro", "serde", "smallvec", - "zkevm_opcode_defs 0.150.19", + "zkevm_opcode_defs 0.150.20", "zksync_cs_derive", ] @@ -11285,9 +11285,9 @@ dependencies = [ [[package]] name = "zkevm_opcode_defs" -version = "0.150.19" +version = "0.150.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f2bd8ef52c8f9911dd034b91d29f087ab52f80a80f9d996deb881abbb953793" +checksum = "ce6b4a47c0e7f95b51d29ca336821321cec4bbba0acdd412c3a209270a0d37fe" dependencies = [ "bitflags 2.6.0", "blake2 0.10.6", @@ -11302,7 +11302,7 @@ dependencies = [ [[package]] name = "zksync_base_token_adjuster" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "async-trait", @@ -11324,7 +11324,7 @@ dependencies = [ [[package]] name = "zksync_basic_types" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "bincode", @@ -11346,9 +11346,9 @@ dependencies = [ [[package]] name = "zksync_bellman" -version = "0.30.12" +version = "0.30.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d06d424f7e3862d7a6715179bafffbe7a5dce17129f95ac4124502ab9f1edfb8" +checksum = "78fc3c598daf718b6fc791bfbb01c4634199e479ea9b2c82d06cd108b967d441" dependencies = [ "arrayvec 0.7.6", "bit-vec", @@ -11369,7 +11369,7 @@ dependencies = [ [[package]] name = "zksync_block_reverter" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -11393,7 +11393,7 @@ dependencies = [ [[package]] name = "zksync_circuit_breaker" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "async-trait", @@ -11407,7 +11407,7 @@ dependencies = [ [[package]] name = "zksync_commitment_generator" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "circuit_encodings", @@ -11420,7 +11420,7 @@ dependencies = [ "tokio", "tracing", "vise", - "zk_evm 0.150.19", + "zk_evm 0.150.20", "zksync_contracts", "zksync_dal", "zksync_eth_client", @@ -11436,9 +11436,9 @@ dependencies = [ [[package]] name = "zksync_concurrency" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8312ab73d3caa55775bd531795b507fa8f76bd9dabfaeb0954fe43e8fc1323b" +checksum = "cec98400a9e8ba02bfd029eacfe7d6fb7b85b8ef00de59d6bb119d29cc9f7442" dependencies = [ "anyhow", "once_cell", @@ -11455,7 +11455,7 @@ dependencies = [ [[package]] name = "zksync_config" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "rand 0.8.5", @@ -11472,9 +11472,9 @@ dependencies = [ [[package]] name = "zksync_consensus_bft" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb6b0944322f30f88cd7fb22f7875435b394a135fc1b479719a18c42d9fb724d" +checksum = "0fa086aeb444d3d0122014fca06959e5c1be507d63596022bd28b8cdcc5cc687" dependencies = [ "anyhow", "async-trait", @@ -11494,9 +11494,9 @@ dependencies = [ [[package]] name = "zksync_consensus_crypto" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86b539960de98df3c3bd27d2d9b97de862027686bbb3bdfc5aaad5b74bb929a1" +checksum = "c04840825dfbe3b9f708d245c87618d5dcf28f29d7b58922971351068a0b8231" dependencies = [ "anyhow", "blst", @@ -11515,9 +11515,9 @@ dependencies = [ [[package]] name = "zksync_consensus_executor" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a75d86368579d5aa59b1baebbdc1aebca7c9234f3e3cba734db7e9bbc4880b0" +checksum = "d6d369ec72851aecdfb24c99ecb50b7c177f0ce7068bb84a17a294a26fb92fab" dependencies = [ "anyhow", "async-trait", @@ -11537,9 +11537,9 @@ dependencies = [ [[package]] name = "zksync_consensus_network" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30f73993b7d677dfd4e4f2598dd20906e6a5f3a2168c6cab3a599c056dc5e39a" +checksum = "6a74ed5a9a48d403b48c7ed0dea8cf2cd239e407227657aac27d75d00c3e4bcc" dependencies = [ "anyhow", "async-trait", @@ -11574,9 +11574,9 @@ dependencies = [ [[package]] name = "zksync_consensus_roles" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c49949546895a10431b9daec6ec4208ef0917ace006446d304b51f5b234ba462" +checksum = "05498eab1de26869028b5822cfa4490cac625508d427d59668dc73e8162de65f" dependencies = [ "anyhow", "bit-vec", @@ -11596,9 +11596,9 @@ dependencies = [ [[package]] name = "zksync_consensus_storage" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "feb0d6a54e7d8d2adeee4ba38662161e9309180ad497299092e5641db9fb1c1e" +checksum = "b20eb99fdd0e171a370214d2b7c99b5d4e8c11b9828a6b5705423bf653849a70" dependencies = [ "anyhow", "async-trait", @@ -11616,9 +11616,9 @@ dependencies = [ [[package]] name = "zksync_consensus_utils" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "723e2a4b056cc5af192a83163c89a6951ee75c098cc5c4a4cdc435f4232d88bd" +checksum = "f2f9fa69ef68e6a1955a1d7b33077103fb6d106b560fec0d599c6de268f5be03" dependencies = [ "anyhow", "rand 0.8.5", @@ -11628,7 +11628,7 @@ dependencies = [ [[package]] name = "zksync_consistency_checker" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -11653,7 +11653,7 @@ dependencies = [ [[package]] name = "zksync_contract_verification_server" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "axum 0.7.7", @@ -11672,7 +11672,7 @@ dependencies = [ [[package]] name = "zksync_contract_verifier" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "clap 4.5.20", @@ -11689,7 +11689,7 @@ dependencies = [ [[package]] name = "zksync_contract_verifier_lib" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -11721,7 +11721,7 @@ dependencies = [ [[package]] name = "zksync_contracts" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "bincode", "envy", @@ -11735,7 +11735,7 @@ dependencies = [ [[package]] name = "zksync_core_leftovers" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "ctrlc", @@ -11749,7 +11749,7 @@ dependencies = [ [[package]] name = "zksync_crypto_primitives" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "blake2 0.10.6", @@ -11765,9 +11765,9 @@ dependencies = [ [[package]] name = "zksync_cs_derive" -version = "0.30.12" +version = "0.30.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23237b019a469bfa59c11108beff84a63a43f52fa3afbf1b461527031fc47644" +checksum = "97ab7469afcd9e1cb220fe17b3c9f2abe031648b94add97da37065c58be08554" dependencies = [ "proc-macro-error", "proc-macro2 1.0.89", @@ -11777,7 +11777,7 @@ dependencies = [ [[package]] name = "zksync_da_client" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "async-trait", @@ -11786,7 +11786,7 @@ dependencies = [ [[package]] name = "zksync_da_clients" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "ark-bn254", @@ -11842,7 +11842,7 @@ dependencies = [ [[package]] name = "zksync_da_dispatcher" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "chrono", @@ -11859,7 +11859,7 @@ dependencies = [ [[package]] name = "zksync_dal" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "bigdecimal", @@ -11895,7 +11895,7 @@ dependencies = [ [[package]] name = "zksync_db_connection" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -11913,7 +11913,7 @@ dependencies = [ [[package]] name = "zksync_env_config" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "envy", @@ -11925,7 +11925,7 @@ dependencies = [ [[package]] name = "zksync_eth_client" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "assert_matches", "async-trait", @@ -11947,13 +11947,12 @@ dependencies = [ [[package]] name = "zksync_eth_sender" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "assert_matches", "async-trait", "chrono", - "once_cell", "serde", "test-casing", "test-log", @@ -11978,7 +11977,7 @@ dependencies = [ [[package]] name = "zksync_eth_signer" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "async-trait", "rlp", @@ -11989,7 +11988,7 @@ dependencies = [ [[package]] name = "zksync_eth_watch" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "async-recursion", @@ -12003,6 +12002,7 @@ dependencies = [ "tracing", "vise", "zksync_concurrency", + "zksync_config", "zksync_contracts", "zksync_dal", "zksync_eth_client", @@ -12015,7 +12015,7 @@ dependencies = [ [[package]] name = "zksync_external_node" -version = "25.4.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -12069,7 +12069,7 @@ dependencies = [ [[package]] name = "zksync_external_price_api" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "async-trait", @@ -12090,7 +12090,7 @@ dependencies = [ [[package]] name = "zksync_external_proof_integration_api" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "async-trait", @@ -12108,9 +12108,9 @@ dependencies = [ [[package]] name = "zksync_ff" -version = "0.30.12" +version = "0.30.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d5aa518ed0ea7ef737d50de02025f5a593dbb11104b3c1bf5a00f39581b47dc" +checksum = "6583c2db6dc787600879d27ec98d2eb628a757ee41831e54f8be1dae4acc599f" dependencies = [ "byteorder", "hex", @@ -12121,9 +12121,9 @@ dependencies = [ [[package]] name = "zksync_ff_derive" -version = "0.30.12" +version = "0.30.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e33b43100a1278e2f64820368db8751c2441860ea74ab5749074cf8f864647af" +checksum = "8f62e93dde881d8dd44d1864c7682394dde6d18e582fc5af78768221a1766fdf" dependencies = [ "num-bigint 0.4.6", "num-integer", @@ -12136,7 +12136,7 @@ dependencies = [ [[package]] name = "zksync_health_check" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "assert_matches", "async-trait", @@ -12151,7 +12151,7 @@ dependencies = [ [[package]] name = "zksync_house_keeper" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "async-trait", @@ -12166,9 +12166,9 @@ dependencies = [ [[package]] name = "zksync_kzg" -version = "0.150.19" +version = "0.150.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9da880b8282a97d9dfd6ac9f0189d310c0602059a8de20aa66a883979d6adba" +checksum = "174f82592590901cbcf2b298059c89f817b404299ffbd050a3915ea72357f545" dependencies = [ "boojum", "derivative", @@ -12183,7 +12183,7 @@ dependencies = [ [[package]] name = "zksync_l1_contract_interface" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "circuit_definitions", @@ -12204,7 +12204,7 @@ dependencies = [ [[package]] name = "zksync_logs_bloom_backfill" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "tokio", @@ -12216,7 +12216,7 @@ dependencies = [ [[package]] name = "zksync_mempool" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "tracing", "zksync_types", @@ -12224,7 +12224,7 @@ dependencies = [ [[package]] name = "zksync_merkle_tree" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -12253,7 +12253,7 @@ dependencies = [ [[package]] name = "zksync_metadata_calculator" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -12287,7 +12287,7 @@ dependencies = [ [[package]] name = "zksync_mini_merkle_tree" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "criterion", "once_cell", @@ -12297,7 +12297,7 @@ dependencies = [ [[package]] name = "zksync_multivm" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -12316,7 +12316,7 @@ dependencies = [ "zk_evm 0.133.0", "zk_evm 0.140.0", "zk_evm 0.141.0", - "zk_evm 0.150.19", + "zk_evm 0.150.20", "zksync_contracts", "zksync_eth_signer", "zksync_mini_merkle_tree", @@ -12329,7 +12329,7 @@ dependencies = [ [[package]] name = "zksync_node_api_server" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -12356,7 +12356,7 @@ dependencies = [ "tower-http 0.5.2", "tracing", "vise", - "zk_evm 0.150.19", + "zk_evm 0.150.20", "zksync_config", "zksync_consensus_roles", "zksync_contracts", @@ -12383,7 +12383,7 @@ dependencies = [ [[package]] name = "zksync_node_consensus" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "async-trait", @@ -12427,7 +12427,7 @@ dependencies = [ [[package]] name = "zksync_node_db_pruner" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -12449,7 +12449,7 @@ dependencies = [ [[package]] name = "zksync_node_fee_model" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "async-trait", @@ -12460,13 +12460,15 @@ dependencies = [ "zksync_config", "zksync_dal", "zksync_eth_client", + "zksync_node_genesis", + "zksync_node_test_utils", "zksync_types", "zksync_web3_decl", ] [[package]] name = "zksync_node_framework" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -12529,7 +12531,7 @@ dependencies = [ [[package]] name = "zksync_node_framework_derive" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "proc-macro2 1.0.89", "quote 1.0.37", @@ -12538,7 +12540,7 @@ dependencies = [ [[package]] name = "zksync_node_genesis" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "bincode", @@ -12559,7 +12561,7 @@ dependencies = [ [[package]] name = "zksync_node_storage_init" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "async-trait", @@ -12581,7 +12583,7 @@ dependencies = [ [[package]] name = "zksync_node_sync" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -12616,7 +12618,7 @@ dependencies = [ [[package]] name = "zksync_node_test_utils" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "zksync_contracts", "zksync_dal", @@ -12628,7 +12630,7 @@ dependencies = [ [[package]] name = "zksync_object_store" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -12653,9 +12655,9 @@ dependencies = [ [[package]] name = "zksync_pairing" -version = "0.30.12" +version = "0.30.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43f0d96f3e386f3b4c76a614d73b71714d6712e917d462bf8053b8af352da0b3" +checksum = "baafdd03ca7a48dc9b6808be3630f2d8a003aa425d71946e9158d8c0aeb1cc79" dependencies = [ "byteorder", "cfg-if", @@ -12666,7 +12668,7 @@ dependencies = [ [[package]] name = "zksync_proof_data_handler" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "axum 0.7.7", @@ -12690,9 +12692,9 @@ dependencies = [ [[package]] name = "zksync_protobuf" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8986ad796f8e00d8999fee72effba1a21bce40f5f877d681ac9cd89a94834d8" +checksum = "d9032e12528c2466293b206d6edb53b7e900e4a4cc4573e4d075ac2dc00e1b55" dependencies = [ "anyhow", "bit-vec", @@ -12711,9 +12713,9 @@ dependencies = [ [[package]] name = "zksync_protobuf_build" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d870b31995e3acb8e47afeb68ebeeffcf6121e70020e65b3d5d31692115d236" +checksum = "7c644fc8ef3c4d343ea42cebd5551e3562933f15dd9b0e68a52c2657603eb0f5" dependencies = [ "anyhow", "heck 0.5.0", @@ -12728,7 +12730,7 @@ dependencies = [ [[package]] name = "zksync_protobuf_config" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "hex", @@ -12748,7 +12750,7 @@ dependencies = [ [[package]] name = "zksync_prover_interface" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "bincode", "chrono", @@ -12760,6 +12762,7 @@ dependencies = [ "serde_with", "strum", "tokio", + "zksync_bellman", "zksync_object_store", "zksync_types", "zksync_vm_interface", @@ -12767,7 +12770,7 @@ dependencies = [ [[package]] name = "zksync_queued_job_processor" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "async-trait", @@ -12779,7 +12782,7 @@ dependencies = [ [[package]] name = "zksync_reorg_detector" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -12800,7 +12803,7 @@ dependencies = [ [[package]] name = "zksync_server" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "clap 4.5.20", @@ -12830,7 +12833,7 @@ dependencies = [ [[package]] name = "zksync_shared_metrics" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "rustc_version 0.4.1", "serde", @@ -12842,7 +12845,7 @@ dependencies = [ [[package]] name = "zksync_snapshots_applier" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -12864,9 +12867,9 @@ dependencies = [ [[package]] name = "zksync_solidity_vk_codegen" -version = "0.30.12" +version = "0.30.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb10f377dcc24fe2268cc5f530c16af1c879a791570d8fe64064b58ba143c7cc" +checksum = "bb05a12f5552d7947427f755e29f548ce94733851f1fa16edaf8b75c28033e73" dependencies = [ "ethereum-types", "franklin-crypto", @@ -12881,7 +12884,7 @@ dependencies = [ [[package]] name = "zksync_state" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -12906,7 +12909,7 @@ dependencies = [ [[package]] name = "zksync_state_keeper" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -12946,7 +12949,7 @@ dependencies = [ [[package]] name = "zksync_storage" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "num_cpus", "once_cell", @@ -12959,7 +12962,7 @@ dependencies = [ [[package]] name = "zksync_system_constants" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "once_cell", "zksync_basic_types", @@ -12967,7 +12970,7 @@ dependencies = [ [[package]] name = "zksync_tee_prover" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "async-trait", @@ -12992,7 +12995,7 @@ dependencies = [ [[package]] name = "zksync_tee_verifier" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "bincode", @@ -13010,7 +13013,7 @@ dependencies = [ [[package]] name = "zksync_test_contracts" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "ethabi", "foundry-compilers", @@ -13026,10 +13029,11 @@ dependencies = [ [[package]] name = "zksync_types" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "assert_matches", + "async-trait", "bigdecimal", "bincode", "blake2 0.10.6", @@ -13060,7 +13064,7 @@ dependencies = [ [[package]] name = "zksync_utils" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -13076,7 +13080,7 @@ dependencies = [ [[package]] name = "zksync_vlog" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "chrono", @@ -13106,8 +13110,8 @@ source = "git+https://github.com/matter-labs/vm2.git?rev=457d8a7eea9093af9440662 dependencies = [ "enum_dispatch", "primitive-types", - "zk_evm_abstractions 0.150.19", - "zkevm_opcode_defs 0.150.19", + "zk_evm_abstractions 0.150.20", + "zkevm_opcode_defs 0.150.20", "zksync_vm2_interface", ] @@ -13121,7 +13125,7 @@ dependencies = [ [[package]] name = "zksync_vm_executor" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -13139,7 +13143,7 @@ dependencies = [ [[package]] name = "zksync_vm_interface" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -13157,7 +13161,7 @@ dependencies = [ [[package]] name = "zksync_vm_runner" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -13191,7 +13195,7 @@ dependencies = [ [[package]] name = "zksync_web3_decl" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "assert_matches", diff --git a/core/Cargo.toml b/core/Cargo.toml new file mode 100644 index 000000000000..8b7113cb84e3 --- /dev/null +++ b/core/Cargo.toml @@ -0,0 +1,331 @@ +[workspace] +members = [ + # Binaries + "bin/block_reverter", + "bin/contract-verifier", + "bin/custom_genesis_export", + "bin/external_node", + "bin/merkle_tree_consistency_checker", + "bin/snapshots_creator", + "bin/selector_generator", + "bin/system-constants-generator", + "bin/verified_sources_fetcher", + "bin/zksync_server", + "bin/genesis_generator", + "bin/zksync_tee_prover", + # Node services + "node/node_framework", + "node/proof_data_handler", + "node/block_reverter", + "node/commitment_generator", + "node/house_keeper", + "node/genesis", + "node/shared_metrics", + "node/db_pruner", + "node/fee_model", + "node/da_dispatcher", + "node/eth_sender", + "node/vm_runner", + "node/test_utils", + "node/state_keeper", + "node/reorg_detector", + "node/consistency_checker", + "node/metadata_calculator", + "node/node_sync", + "node/node_storage_init", + "node/consensus", + "node/contract_verification_server", + "node/api_server", + "node/base_token_adjuster", + "node/external_proof_integration_api", + "node/logs_bloom_backfill", + "node/da_clients", + # Libraries + "lib/db_connection", + "lib/zksync_core_leftovers", + "lib/basic_types", + "lib/config", + "lib/constants", + "lib/contract_verifier", + "lib/contracts", + "lib/circuit_breaker", + "lib/dal", + "lib/env_config", + "lib/da_client", + "lib/eth_client", + "lib/eth_signer", + "lib/l1_contract_interface", + "lib/mempool", + "lib/merkle_tree", + "lib/mini_merkle_tree", + "lib/node_framework_derive", + "lib/object_store", + "lib/prover_interface", + "lib/queued_job_processor", + "lib/state", + "lib/storage", + "lib/tee_verifier", + "lib/types", + "lib/protobuf_config", + "lib/utils", + "lib/vlog", + "lib/multivm", + "lib/vm_interface", + "lib/vm_executor", + "lib/web3_decl", + "lib/snapshots_applier", + "lib/crypto_primitives", + "lib/external_price_api", + "lib/test_contracts", + # Test infrastructure + "tests/loadnext", + "tests/vm-benchmark", +] +resolver = "2" + +exclude = [] + +# for `perf` profiling +[profile.perf] +inherits = "release" +debug = true + +[workspace.package] +version = "26.1.0-non-semver-compat" +edition = "2021" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +repository = "https://github.com/matter-labs/zksync-era" +license = "MIT OR Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] + +[workspace.dependencies] +# "External" dependencies +anyhow = "1" +assert_matches = "1.5" +async-trait = "0.1" +async-recursion = "1" +axum = "0.7.5" +backon = "0.4.4" +bigdecimal = "0.4.5" +bincode = "1" +blake2 = "0.10" +bytes = "1" +chrono = "0.4" +clap = "4.2.2" +codegen = "0.2.0" +const-decoder = "0.4.0" +criterion = "0.4.0" +ctrlc = "3.1" +dashmap = "5.5.3" +derive_more = "1.0.0" +envy = "0.4" +ethabi = "18.0.0" +flate2 = "1.0.28" +fraction = "0.15.3" +futures = "0.3" +futures-util = "0.3" +glob = "0.3" +google-cloud-auth = "0.16.0" +google-cloud-storage = "0.20.0" +governor = "0.4.2" +hex = "0.4" +http = "1.1" +http-body-util = "0.1.2" +httpmock = "0.7.0" +hyper = "1.3" +insta = "1.29.0" +itertools = "0.10" +jsonrpsee = { version = "0.23", default-features = false } +leb128 = "0.2.5" +lru = { version = "0.12.1", default-features = false } +mini-moka = "0.10.0" +num = "0.4.0" +num_cpus = "1.13" +num_enum = "0.7.2" +octocrab = "0.41" +once_cell = "1" +opentelemetry = "0.24.0" +opentelemetry_sdk = "0.24.0" +opentelemetry-otlp = "0.17.0" +opentelemetry-semantic-conventions = "0.16.0" +opentelemetry-appender-tracing = "0.5" +pin-project-lite = "0.2.13" +pretty_assertions = "1" +prost = "0.12.6" +rand = "0.8" +rayon = "1.3.1" +regex = "1" +reqwest = "0.12" +rlp = "0.5" +rocksdb = "0.21" +rustc_version = "0.4.0" +rustls = "0.23" +secp256k1 = { version = "0.27.0", features = ["recovery", "global-context"] } +secrecy = "0.8.0" +semver = "1" +sentry = "0.31" +serde = "1" +serde_json = "1" +serde_with = "1" +serde_yaml = "0.9" +sha2 = "0.10.8" +sha3 = "0.10.8" +sqlx = "0.8.1" +static_assertions = "1.1" +structopt = "0.3.20" +strum = "0.26" +tempfile = "3.0.2" +test-casing = "0.1.2" +test-log = "0.2.15" +thiserror = "1" +thread_local = "1.1" +tikv-jemallocator = "0.5" +tiny-keccak = "2" +tokio = "1" +tower = "0.4.13" +tower-http = "0.5.2" +tracing = "0.1" +tracing-subscriber = "0.3" +tracing-opentelemetry = "0.25.0" +time = "0.3.36" # Has to be same as used by `tracing-subscriber` +url = "2" +web3 = "0.19.0" +yab = "0.1.0" + +# Proc-macro +syn = "2.0" +quote = "1.0" +proc-macro2 = "1.0" +trybuild = "1.0" + +# "Internal" dependencies +vise = "0.2.0" +vise-exporter = "0.2.0" +foundry-compilers = { version = "0.11.6", git = "https://github.com/Moonsong-Labs/compilers.git", rev = "7c69695e5c75451f158dd2456bf8c94a7492ea0b" } + +# DA clients' dependencies +# Avail +base58 = "0.2.0" +scale-encode = "0.5.0" +blake2b_simd = "1.0.2" +subxt-metadata = "0.34.0" +parity-scale-codec = { version = "3.6.9", default-features = false } +subxt-signer = { version = "0.34", default-features = false } + +# Celestia +celestia-types = "0.6.1" +bech32 = "0.11.0" +ripemd = "0.1.3" +tonic = { version = "0.11.0", default-features = false } +pbjson-types = "0.6.0" + +# Eigen +tokio-stream = "0.1.16" +rust-kzg-bn254 = "0.2.1" +ark-bn254 = "0.5.0" +num-bigint = "0.4.6" +serial_test = { version = "3.1.1", features = ["file_locks"] } + +# Here and below: +# We *always* pin the latest version of protocol to disallow accidental changes in the execution logic. +# However, for the historical version of protocol crates, we have lax requirements. Otherwise, +# Bumping a crypto dependency like `boojum` would require us to republish all the historical packages. +circuit_encodings = "=0.150.20" +circuit_sequencer_api = "=0.150.20" +circuit_definitions = "=0.150.20" +crypto_codegen = { package = "zksync_solidity_vk_codegen",version = "=0.30.13" } +kzg = { package = "zksync_kzg", version = "=0.150.20" } +zk_evm = { version = "=0.133.0" } +zk_evm_1_3_1 = { package = "zk_evm", version = "0.131.0-rc.2" } +zk_evm_1_3_3 = { package = "zk_evm", version = "0.133" } +zk_evm_1_4_0 = { package = "zk_evm", version = "0.140" } +zk_evm_1_4_1 = { package = "zk_evm", version = "0.141" } +zk_evm_1_5_0 = { package = "zk_evm", version = "=0.150.20" } +fflonk = "=0.30.13" +bellman = {package = "zksync_bellman", version = "=0.30.13"} + +# New VM; pinned to a specific commit because of instability +zksync_vm2 = { git = "https://github.com/matter-labs/vm2.git", rev = "457d8a7eea9093af9440662e33e598c13ba41633" } + +# Consensus dependencies. +zksync_concurrency = "=0.8.0" +zksync_consensus_bft = "=0.8.0" +zksync_consensus_crypto = "=0.8.0" +zksync_consensus_executor = "=0.8.0" +zksync_consensus_network = "=0.8.0" +zksync_consensus_roles = "=0.8.0" +zksync_consensus_storage = "=0.8.0" +zksync_consensus_utils = "=0.8.0" +zksync_protobuf = "=0.8.0" +zksync_protobuf_build = "=0.8.0" + +# "Local" dependencies +zksync_multivm = { version = "=26.1.0-non-semver-compat", path = "lib/multivm" } +zksync_vlog = { version = "=26.1.0-non-semver-compat", path = "lib/vlog" } +zksync_vm_interface = { version = "=26.1.0-non-semver-compat", path = "lib/vm_interface" } +zksync_vm_executor = { version = "=26.1.0-non-semver-compat", path = "lib/vm_executor" } +zksync_basic_types = { version = "=26.1.0-non-semver-compat", path = "lib/basic_types" } +zksync_circuit_breaker = { version = "=26.1.0-non-semver-compat", path = "lib/circuit_breaker" } +zksync_config = { version = "=26.1.0-non-semver-compat", path = "lib/config" } +zksync_contract_verifier_lib = { version = "=26.1.0-non-semver-compat", path = "lib/contract_verifier" } +zksync_contracts = { version = "=26.1.0-non-semver-compat", path = "lib/contracts" } +zksync_core_leftovers = { version = "=26.1.0-non-semver-compat", path = "lib/zksync_core_leftovers" } +zksync_dal = { version = "=26.1.0-non-semver-compat", path = "lib/dal" } +zksync_db_connection = { version = "=26.1.0-non-semver-compat", path = "lib/db_connection" } +zksync_env_config = { version = "=26.1.0-non-semver-compat", path = "lib/env_config" } +zksync_eth_client = { version = "=26.1.0-non-semver-compat", path = "lib/eth_client" } +zksync_da_client = { version = "=26.1.0-non-semver-compat", path = "lib/da_client" } +zksync_eth_signer = { version = "=26.1.0-non-semver-compat", path = "lib/eth_signer" } +zksync_health_check = { version = "=26.1.0-non-semver-compat", path = "lib/health_check" } +zksync_l1_contract_interface = { version = "=26.1.0-non-semver-compat", path = "lib/l1_contract_interface" } +zksync_mempool = { version = "=26.1.0-non-semver-compat", path = "lib/mempool" } +zksync_merkle_tree = { version = "=26.1.0-non-semver-compat", path = "lib/merkle_tree" } +zksync_bin_metadata = { version = "=26.1.0-non-semver-compat", path = "lib/bin_metadata" } +zksync_mini_merkle_tree = { version = "=26.1.0-non-semver-compat", path = "lib/mini_merkle_tree" } +zksync_object_store = { version = "=26.1.0-non-semver-compat", path = "lib/object_store" } +zksync_protobuf_config = { version = "=26.1.0-non-semver-compat", path = "lib/protobuf_config" } +zksync_prover_interface = { version = "=26.1.0-non-semver-compat", path = "lib/prover_interface" } +zksync_queued_job_processor = { version = "=26.1.0-non-semver-compat", path = "lib/queued_job_processor" } +zksync_snapshots_applier = { version = "=26.1.0-non-semver-compat", path = "lib/snapshots_applier" } +zksync_state = { version = "=26.1.0-non-semver-compat", path = "lib/state" } +zksync_storage = { version = "=26.1.0-non-semver-compat", path = "lib/storage" } +zksync_system_constants = { version = "=26.1.0-non-semver-compat", path = "lib/constants" } +zksync_tee_verifier = { version = "=26.1.0-non-semver-compat", path = "lib/tee_verifier" } +zksync_test_contracts = { version = "=26.1.0-non-semver-compat", path = "lib/test_contracts" } +zksync_types = { version = "=26.1.0-non-semver-compat", path = "lib/types" } +zksync_utils = { version = "=26.1.0-non-semver-compat", path = "lib/utils" } +zksync_web3_decl = { version = "=26.1.0-non-semver-compat", path = "lib/web3_decl" } +zksync_crypto_primitives = { version = "=26.1.0-non-semver-compat", path = "lib/crypto_primitives" } +zksync_external_price_api = { version = "=26.1.0-non-semver-compat", path = "lib/external_price_api" } + +# Framework and components +zksync_node_framework = { version = "=26.1.0-non-semver-compat", path = "node/node_framework" } +zksync_node_framework_derive = { version = "=26.1.0-non-semver-compat", path = "lib/node_framework_derive" } +zksync_eth_watch = { version = "=26.1.0-non-semver-compat", path = "node/eth_watch" } +zksync_shared_metrics = { version = "=26.1.0-non-semver-compat", path = "node/shared_metrics" } +zksync_proof_data_handler = { version = "=26.1.0-non-semver-compat", path = "node/proof_data_handler" } +zksync_block_reverter = { version = "=26.1.0-non-semver-compat", path = "node/block_reverter" } +zksync_commitment_generator = { version = "=26.1.0-non-semver-compat", path = "node/commitment_generator" } +zksync_house_keeper = { version = "=26.1.0-non-semver-compat", path = "node/house_keeper" } +zksync_node_genesis = { version = "=26.1.0-non-semver-compat", path = "node/genesis" } +zksync_da_dispatcher = { version = "=26.1.0-non-semver-compat", path = "node/da_dispatcher" } +zksync_da_clients = { version = "=26.1.0-non-semver-compat", path = "node/da_clients" } +zksync_eth_sender = { version = "=26.1.0-non-semver-compat", path = "node/eth_sender" } +zksync_node_db_pruner = { version = "=26.1.0-non-semver-compat", path = "node/db_pruner" } +zksync_node_fee_model = { version = "=26.1.0-non-semver-compat", path = "node/fee_model" } +zksync_vm_runner = { version = "=26.1.0-non-semver-compat", path = "node/vm_runner" } +zksync_external_proof_integration_api = { version = "=26.1.0-non-semver-compat", path = "node/external_proof_integration_api" } +zksync_node_test_utils = { version = "=26.1.0-non-semver-compat", path = "node/test_utils" } +zksync_state_keeper = { version = "=26.1.0-non-semver-compat", path = "node/state_keeper" } +zksync_reorg_detector = { version = "=26.1.0-non-semver-compat", path = "node/reorg_detector" } +zksync_consistency_checker = { version = "=26.1.0-non-semver-compat", path = "node/consistency_checker" } +zksync_metadata_calculator = { version = "=26.1.0-non-semver-compat", path = "node/metadata_calculator" } +zksync_node_sync = { version = "=26.1.0-non-semver-compat", path = "node/node_sync" } +zksync_node_storage_init = { version = "=26.1.0-non-semver-compat", path = "node/node_storage_init" } +zksync_node_consensus = { version = "=26.1.0-non-semver-compat", path = "node/consensus" } +zksync_contract_verification_server = { version = "=26.1.0-non-semver-compat", path = "node/contract_verification_server" } +zksync_node_api_server = { version = "=26.1.0-non-semver-compat", path = "node/api_server" } +zksync_base_token_adjuster = { version = "=26.1.0-non-semver-compat", path = "node/base_token_adjuster" } +zksync_logs_bloom_backfill = { version = "=26.1.0-non-semver-compat", path = "node/logs_bloom_backfill" } diff --git a/core/bin/external_node/Cargo.toml b/core/bin/external_node/Cargo.toml index 799108f30723..cb75a20c3ed7 100644 --- a/core/bin/external_node/Cargo.toml +++ b/core/bin/external_node/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "zksync_external_node" description = "Non-validator ZKsync node" -version = "25.4.0" # x-release-please-version +version.workspace = true edition.workspace = true authors.workspace = true homepage.workspace = true diff --git a/core/bin/external_node/src/config/mod.rs b/core/bin/external_node/src/config/mod.rs index db2c6eac9cf6..235802e1073b 100644 --- a/core/bin/external_node/src/config/mod.rs +++ b/core/bin/external_node/src/config/mod.rs @@ -128,6 +128,7 @@ pub(crate) struct RemoteENConfig { pub l2_weth_bridge_addr: Option
, pub l2_testnet_paymaster_addr: Option
, pub l2_timestamp_asserter_addr: Option
, + pub l1_wrapped_base_token_store: Option
, pub base_token_addr: Address, pub l1_batch_commit_data_generator_mode: L1BatchCommitmentMode, pub dummy_verifier: bool, @@ -195,6 +196,9 @@ impl RemoteENConfig { l1_bytecodes_supplier_addr: ecosystem_contracts .as_ref() .and_then(|a| a.l1_bytecodes_supplier_addr), + l1_wrapped_base_token_store: ecosystem_contracts + .as_ref() + .and_then(|a| a.l1_wrapped_base_token_store), l1_diamond_proxy_addr, l2_testnet_paymaster_addr, l1_erc20_bridge_proxy_addr: bridges.l1_erc20_default_bridge, @@ -235,6 +239,7 @@ impl RemoteENConfig { l2_shared_bridge_addr: Some(Address::repeat_byte(6)), l2_legacy_shared_bridge_addr: Some(Address::repeat_byte(7)), l1_batch_commit_data_generator_mode: L1BatchCommitmentMode::Rollup, + l1_wrapped_base_token_store: None, dummy_verifier: true, l2_timestamp_asserter_addr: None, } @@ -1477,6 +1482,7 @@ impl From<&ExternalNodeConfig> for InternalApiConfig { l2_weth_bridge: config.remote.l2_weth_bridge_addr, }, l1_bytecodes_supplier_addr: config.remote.l1_bytecodes_supplier_addr, + l1_wrapped_base_token_store: config.remote.l1_wrapped_base_token_store, l1_bridgehub_proxy_addr: config.remote.l1_bridgehub_proxy_addr, l1_state_transition_proxy_addr: config.remote.l1_state_transition_proxy_addr, l1_transparent_proxy_admin_addr: config.remote.l1_transparent_proxy_admin_addr, diff --git a/core/bin/merkle_tree_consistency_checker/Cargo.toml b/core/bin/merkle_tree_consistency_checker/Cargo.toml index eb7dcd81a0dc..f915f321f139 100644 --- a/core/bin/merkle_tree_consistency_checker/Cargo.toml +++ b/core/bin/merkle_tree_consistency_checker/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "merkle_tree_consistency_checker" description = "Tool to verify consistency of ZKsync Merkle Tree" -version = "0.1.0" +version.workspace = true edition.workspace = true authors.workspace = true homepage.workspace = true diff --git a/core/bin/selector_generator/Cargo.toml b/core/bin/selector_generator/Cargo.toml index b3425c11b4ec..28b3983605b3 100644 --- a/core/bin/selector_generator/Cargo.toml +++ b/core/bin/selector_generator/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "selector_generator" -version = "0.1.0" +version.workspace = true edition.workspace = true authors.workspace = true homepage.workspace = true diff --git a/core/bin/snapshots_creator/Cargo.toml b/core/bin/snapshots_creator/Cargo.toml index 5a36c646e88e..aa2dde097240 100644 --- a/core/bin/snapshots_creator/Cargo.toml +++ b/core/bin/snapshots_creator/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "snapshots_creator" description = "Tool to create ZKsync state snapshots" -version = "0.1.0" +version.workspace = true edition.workspace = true authors.workspace = true homepage.workspace = true diff --git a/core/bin/system-constants-generator/Cargo.toml b/core/bin/system-constants-generator/Cargo.toml index 7177d29ca743..d3b600ba258f 100644 --- a/core/bin/system-constants-generator/Cargo.toml +++ b/core/bin/system-constants-generator/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "system-constants-generator" description = "Tool for generating JSON files with the system constants for L1/L2 contracts" -version = "0.1.0" +version.workspace = true edition.workspace = true authors.workspace = true homepage.workspace = true diff --git a/core/bin/system-constants-generator/src/main.rs b/core/bin/system-constants-generator/src/main.rs index cd795f9f5326..69152545bdfe 100644 --- a/core/bin/system-constants-generator/src/main.rs +++ b/core/bin/system-constants-generator/src/main.rs @@ -212,7 +212,7 @@ fn generate_rust_fee_constants(intrinsic_gas_constants: &IntrinsicSystemGasConst } fn save_file(path_in_repo: &str, content: String) { - let zksync_home = Workspace::locate().core(); + let zksync_home = Workspace::locate().root(); let fee_constants_path = zksync_home.join(path_in_repo); fs::write(fee_constants_path, content) diff --git a/core/bin/verified_sources_fetcher/Cargo.toml b/core/bin/verified_sources_fetcher/Cargo.toml index 5fa90590ed5f..b143cafdbaee 100644 --- a/core/bin/verified_sources_fetcher/Cargo.toml +++ b/core/bin/verified_sources_fetcher/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "verified_sources_fetcher" description = "Tool to fetch verified contract sources" -version = "0.1.0" +version.workspace = true edition.workspace = true authors.workspace = true homepage.workspace = true diff --git a/core/bin/zksync_server/Cargo.toml b/core/bin/zksync_server/Cargo.toml index 4cf028be8210..e5eeeb0c79a9 100644 --- a/core/bin/zksync_server/Cargo.toml +++ b/core/bin/zksync_server/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "zksync_server" description = "ZKsync validator/sequencer node" -version = "0.1.0" +version.workspace = true edition.workspace = true authors.workspace = true homepage.workspace = true diff --git a/core/bin/zksync_server/src/node_builder.rs b/core/bin/zksync_server/src/node_builder.rs index cfafbbcd3ed4..7838fc3c63a9 100644 --- a/core/bin/zksync_server/src/node_builder.rs +++ b/core/bin/zksync_server/src/node_builder.rs @@ -72,7 +72,10 @@ use zksync_node_framework::{ service::{ZkStackService, ZkStackServiceBuilder}, }; use zksync_types::{ - pubdata_da::PubdataSendingMode, settlement::SettlementMode, SHARED_BRIDGE_ETHER_TOKEN_ADDRESS, + commitment::{L1BatchCommitmentMode, PubdataType}, + pubdata_da::PubdataSendingMode, + settlement::SettlementMode, + SHARED_BRIDGE_ETHER_TOKEN_ADDRESS, }; use zksync_vlog::prometheus::PrometheusExporterConfig; @@ -118,6 +121,24 @@ impl MainNodeBuilder { self.node.runtime_handle() } + pub fn get_pubdata_type(&self) -> anyhow::Result { + if self.genesis_config.l1_batch_commit_data_generator_mode == L1BatchCommitmentMode::Rollup + { + return Ok(PubdataType::Rollup); + } + + match self.configs.da_client_config.clone() { + None => Err(anyhow::anyhow!("No config for DA client")), + Some(da_client_config) => Ok(match da_client_config { + DAClientConfig::Avail(_) => PubdataType::Avail, + DAClientConfig::Celestia(_) => PubdataType::Celestia, + DAClientConfig::Eigen(_) => PubdataType::Eigen, + DAClientConfig::ObjectStore(_) => PubdataType::ObjectStore, + DAClientConfig::NoDA => PubdataType::NoDA, + }), + } + } + fn add_sigint_handler_layer(mut self) -> anyhow::Result { self.node.add_layer(SigintHandlerLayer); Ok(self) @@ -252,7 +273,7 @@ impl MainNodeBuilder { try_load_config!(self.configs.mempool_config), try_load_config!(wallets.state_keeper), self.contracts_config.l2_da_validator_addr, - self.genesis_config.l1_batch_commit_data_generator_mode, + self.get_pubdata_type()?, ); let db_config = try_load_config!(self.configs.db_config); let experimental_vm_config = self @@ -530,11 +551,22 @@ impl MainNodeBuilder { } fn add_da_client_layer(mut self) -> anyhow::Result { + let eth_sender_config = try_load_config!(self.configs.eth); + if let Some(sender_config) = eth_sender_config.sender { + if sender_config.pubdata_sending_mode != PubdataSendingMode::Custom { + tracing::warn!("DA dispatcher is enabled, but the pubdata sending mode is not `Custom`. DA client will not be started."); + return Ok(self); + } + } + let Some(da_client_config) = self.configs.da_client_config.clone() else { - tracing::warn!("No config for DA client, using the NoDA client"); + bail!("No config for DA client"); + }; + + if let DAClientConfig::NoDA = da_client_config { self.node.add_layer(NoDAClientWiringLayer); return Ok(self); - }; + } let secrets = try_load_config!(self.secrets.data_availability); let l1_secrets = try_load_config!(self.secrets.l1); diff --git a/core/bin/zksync_tee_prover/Cargo.toml b/core/bin/zksync_tee_prover/Cargo.toml index b853da348ee0..303ad30cf2ac 100644 --- a/core/bin/zksync_tee_prover/Cargo.toml +++ b/core/bin/zksync_tee_prover/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "zksync_tee_prover" description = "ZKsync TEE prover" -version = "0.1.0" +version.workspace = true edition.workspace = true authors.workspace = true homepage.workspace = true diff --git a/deny.toml b/core/deny.toml similarity index 100% rename from deny.toml rename to core/deny.toml diff --git a/core/genesis_export.bin b/core/genesis_export.bin new file mode 100644 index 000000000000..a65382a51ebb Binary files /dev/null and b/core/genesis_export.bin differ diff --git a/core/lib/basic_types/src/commitment.rs b/core/lib/basic_types/src/commitment.rs index 0eed46aad782..c43a55bab4a9 100644 --- a/core/lib/basic_types/src/commitment.rs +++ b/core/lib/basic_types/src/commitment.rs @@ -58,8 +58,35 @@ impl FromStr for L1BatchCommitmentMode { } } +#[derive(Default, Copy, Debug, Clone, PartialEq, Serialize, Deserialize, Display)] +pub enum PubdataType { + #[default] + Rollup, + NoDA, + Avail, + Celestia, + Eigen, + ObjectStore, +} + +impl FromStr for PubdataType { + type Err = &'static str; + + fn from_str(s: &str) -> Result { + match s { + "Rollup" => Ok(Self::Rollup), + "NoDA" => Ok(Self::NoDA), + "Avail" => Ok(Self::Avail), + "Celestia" => Ok(Self::Celestia), + "Eigen" => Ok(Self::Eigen), + "ObjectStore" => Ok(Self::ObjectStore), + _ => Err("Incorrect DA client type; expected one of `Rollup`, `NoDA`, `Avail`, `Celestia`, `Eigen`, `ObjectStore`"), + } + } +} + #[derive(Default, Copy, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct PubdataParams { pub l2_da_validator_address: Address, - pub pubdata_type: L1BatchCommitmentMode, + pub pubdata_type: PubdataType, } diff --git a/core/lib/basic_types/src/protocol_version.rs b/core/lib/basic_types/src/protocol_version.rs index 89251b3a2a40..5d896040f760 100644 --- a/core/lib/basic_types/src/protocol_version.rs +++ b/core/lib/basic_types/src/protocol_version.rs @@ -75,11 +75,11 @@ pub enum ProtocolVersionId { impl ProtocolVersionId { pub const fn latest() -> Self { - Self::Version25 + Self::Version26 } pub const fn next() -> Self { - Self::Version26 + Self::Version27 } pub fn try_from_packed_semver(packed_semver: U256) -> Result { @@ -123,7 +123,7 @@ impl ProtocolVersionId { ProtocolVersionId::Version23 => VmVersion::Vm1_5_0SmallBootloaderMemory, ProtocolVersionId::Version24 => VmVersion::Vm1_5_0IncreasedBootloaderMemory, ProtocolVersionId::Version25 => VmVersion::Vm1_5_0IncreasedBootloaderMemory, - ProtocolVersionId::Version26 => VmVersion::Vm1_5_0IncreasedBootloaderMemory, + ProtocolVersionId::Version26 => VmVersion::VmGateway, ProtocolVersionId::Version27 => VmVersion::VmGateway, ProtocolVersionId::Version28 => unreachable!("Version 28 is not yet supported"), } @@ -192,7 +192,7 @@ impl ProtocolVersionId { } pub const fn gateway_upgrade() -> Self { - ProtocolVersionId::Version27 + ProtocolVersionId::Version26 } } @@ -298,7 +298,7 @@ impl From for VmVersion { ProtocolVersionId::Version23 => VmVersion::Vm1_5_0SmallBootloaderMemory, ProtocolVersionId::Version24 => VmVersion::Vm1_5_0IncreasedBootloaderMemory, ProtocolVersionId::Version25 => VmVersion::Vm1_5_0IncreasedBootloaderMemory, - ProtocolVersionId::Version26 => VmVersion::Vm1_5_0IncreasedBootloaderMemory, + ProtocolVersionId::Version26 => VmVersion::VmGateway, ProtocolVersionId::Version27 => VmVersion::VmGateway, ProtocolVersionId::Version28 => unreachable!("Version 28 is not yet supported"), } diff --git a/core/lib/basic_types/src/vm.rs b/core/lib/basic_types/src/vm.rs index f11f98596f18..4469785c7411 100644 --- a/core/lib/basic_types/src/vm.rs +++ b/core/lib/basic_types/src/vm.rs @@ -22,7 +22,7 @@ pub enum VmVersion { impl VmVersion { /// Returns the latest supported VM version. pub const fn latest() -> VmVersion { - Self::Vm1_5_0IncreasedBootloaderMemory + Self::VmGateway } } diff --git a/core/lib/config/src/configs/contracts.rs b/core/lib/config/src/configs/contracts.rs index f6bd02f2dfae..561e51fa5dd5 100644 --- a/core/lib/config/src/configs/contracts.rs +++ b/core/lib/config/src/configs/contracts.rs @@ -9,6 +9,10 @@ pub struct EcosystemContracts { pub state_transition_proxy_addr: Address, pub transparent_proxy_admin_addr: Address, pub l1_bytecodes_supplier_addr: Option
, + // Note that on the contract side of things this contract is called `L2WrappedBaseTokenStore`, + // while on the server side for consistency with the conventions, where the prefix denotes + // the location of the contracts we call it `l1_wrapped_base_token_store` + pub l1_wrapped_base_token_store: Option
, } impl EcosystemContracts { @@ -18,6 +22,7 @@ impl EcosystemContracts { state_transition_proxy_addr: Address::repeat_byte(0x15), transparent_proxy_admin_addr: Address::repeat_byte(0x15), l1_bytecodes_supplier_addr: Some(Address::repeat_byte(0x16)), + l1_wrapped_base_token_store: Some(Address::repeat_byte(0x17)), } } } @@ -50,8 +55,6 @@ pub struct ContractsConfig { pub base_token_addr: Option
, pub l1_base_token_asset_id: Option, - pub l2_predeployed_wrapped_base_token_address: Option
, - pub chain_admin_addr: Option
, pub l2_da_validator_addr: Option
, } @@ -76,7 +79,6 @@ impl ContractsConfig { governance_addr: Address::repeat_byte(0x13), base_token_addr: Some(Address::repeat_byte(0x14)), l1_base_token_asset_id: Some(H256::repeat_byte(0x15)), - l2_predeployed_wrapped_base_token_address: Some(Address::repeat_byte(0x1b)), ecosystem_contracts: Some(EcosystemContracts::for_tests()), chain_admin_addr: Some(Address::repeat_byte(0x18)), l2_da_validator_addr: Some(Address::repeat_byte(0x1a)), diff --git a/core/lib/config/src/configs/da_client/avail.rs b/core/lib/config/src/configs/da_client/avail.rs index 48aaf5b0e61e..7b7740999080 100644 --- a/core/lib/config/src/configs/da_client/avail.rs +++ b/core/lib/config/src/configs/da_client/avail.rs @@ -4,6 +4,9 @@ use zksync_basic_types::secrets::{APIKey, SeedPhrase}; pub const AVAIL_GAS_RELAY_CLIENT_NAME: &str = "GasRelay"; pub const AVAIL_FULL_CLIENT_NAME: &str = "FullClient"; +pub const IN_BLOCK_FINALITY_STATE: &str = "inBlock"; +pub const FINALIZED_FINALITY_STATE: &str = "finalized"; + #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[serde(tag = "avail_client")] pub enum AvailClientConfig { @@ -23,6 +26,7 @@ pub struct AvailConfig { pub struct AvailDefaultConfig { pub api_node_url: String, pub app_id: u32, + pub finality_state: Option, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] @@ -36,3 +40,20 @@ pub struct AvailSecrets { pub seed_phrase: Option, pub gas_relay_api_key: Option, } + +impl AvailDefaultConfig { + pub fn finality_state(&self) -> anyhow::Result { + match self.finality_state.clone() { + Some(finality_state) => match finality_state.as_str() { + IN_BLOCK_FINALITY_STATE | FINALIZED_FINALITY_STATE => Ok(finality_state), + _ => Err(anyhow::anyhow!( + "Invalid finality state: {}. Supported values are: {}, {}", + finality_state, + IN_BLOCK_FINALITY_STATE, + FINALIZED_FINALITY_STATE + )), + }, + None => Ok(IN_BLOCK_FINALITY_STATE.to_string()), + } + } +} diff --git a/core/lib/config/src/configs/da_client/mod.rs b/core/lib/config/src/configs/da_client/mod.rs index f82fd134edb5..717239814b9b 100644 --- a/core/lib/config/src/configs/da_client/mod.rs +++ b/core/lib/config/src/configs/da_client/mod.rs @@ -8,6 +8,7 @@ pub const AVAIL_CLIENT_CONFIG_NAME: &str = "Avail"; pub const CELESTIA_CLIENT_CONFIG_NAME: &str = "Celestia"; pub const EIGEN_CLIENT_CONFIG_NAME: &str = "Eigen"; pub const OBJECT_STORE_CLIENT_CONFIG_NAME: &str = "ObjectStore"; +pub const NO_DA_CLIENT_CONFIG_NAME: &str = "NoDA"; #[derive(Debug, Clone, PartialEq)] pub enum DAClientConfig { @@ -15,6 +16,7 @@ pub enum DAClientConfig { Celestia(CelestiaConfig), Eigen(EigenConfig), ObjectStore(ObjectStoreConfig), + NoDA, } impl From for DAClientConfig { diff --git a/core/lib/config/src/configs/da_dispatcher.rs b/core/lib/config/src/configs/da_dispatcher.rs index e9ad6bd3c074..dfbd4d517b9d 100644 --- a/core/lib/config/src/configs/da_dispatcher.rs +++ b/core/lib/config/src/configs/da_dispatcher.rs @@ -2,9 +2,14 @@ use std::time::Duration; use serde::Deserialize; +/// The default interval between the `da_dispatcher's` iterations. pub const DEFAULT_POLLING_INTERVAL_MS: u32 = 5000; -pub const DEFAULT_MAX_ROWS_TO_DISPATCH: u32 = 100; +/// The maximum number of rows to fetch from the database in a single query. The value has to be +/// not too high to avoid the dispatcher iteration taking too much time. +pub const DEFAULT_MAX_ROWS_TO_DISPATCH: u32 = 3; +/// The maximum number of retries for the dispatch of a blob. pub const DEFAULT_MAX_RETRIES: u16 = 5; +/// Use dummy value as inclusion proof instead of getting it from the client. pub const DEFAULT_USE_DUMMY_INCLUSION_DATA: bool = false; #[derive(Debug, Clone, PartialEq, Deserialize)] diff --git a/core/lib/config/src/testonly.rs b/core/lib/config/src/testonly.rs index f6ad6bd4dae8..431fa406d109 100644 --- a/core/lib/config/src/testonly.rs +++ b/core/lib/config/src/testonly.rs @@ -268,7 +268,6 @@ impl Distribution for EncodeDist { ecosystem_contracts: self.sample(rng), base_token_addr: self.sample_opt(|| rng.gen()), l1_base_token_asset_id: self.sample_opt(|| rng.gen()), - l2_predeployed_wrapped_base_token_address: self.sample_opt(|| rng.gen()), chain_admin_addr: self.sample_opt(|| rng.gen()), l2_da_validator_addr: self.sample_opt(|| rng.gen()), } @@ -763,6 +762,7 @@ impl Distribution for EncodeDist { state_transition_proxy_addr: rng.gen(), transparent_proxy_admin_addr: rng.gen(), l1_bytecodes_supplier_addr: rng.gen(), + l1_wrapped_base_token_store: rng.gen(), } } } @@ -959,6 +959,7 @@ impl Distribution for EncodeDist { config: AvailClientConfig::FullClient(AvailDefaultConfig { api_node_url: self.sample(rng), app_id: self.sample(rng), + finality_state: None, }), }) } diff --git a/core/lib/contract_verifier/src/resolver/env.rs b/core/lib/contract_verifier/src/resolver/env.rs index 798efde64348..75fdf4e7f472 100644 --- a/core/lib/contract_verifier/src/resolver/env.rs +++ b/core/lib/contract_verifier/src/resolver/env.rs @@ -23,7 +23,7 @@ pub(crate) struct EnvCompilerResolver { impl Default for EnvCompilerResolver { fn default() -> Self { Self { - home_dir: Workspace::locate().core(), + home_dir: Workspace::locate().root(), } } } diff --git a/core/lib/contracts/src/lib.rs b/core/lib/contracts/src/lib.rs index ba22ba8d1b95..e7cf1508f733 100644 --- a/core/lib/contracts/src/lib.rs +++ b/core/lib/contracts/src/lib.rs @@ -37,24 +37,39 @@ const FORGE_PATH_PREFIX: &str = "contracts/l1-contracts/out"; const BRIDGEHUB_CONTRACT_FILE: (&str, &str) = ("bridgehub", "IBridgehub.sol/IBridgehub.json"); const STATE_TRANSITION_CONTRACT_FILE: (&str, &str) = ( "state-transition", - "IStateTransitionManager.sol/IStateTransitionManager.json", + "ChainTypeManager.sol/ChainTypeManager.json", ); +const BYTECODE_SUPPLIER_CONTRACT_FILE: (&str, &str) = + ("upgrades", "BytecodesSupplier.sol/BytecodesSupplier.json"); const ZKSYNC_HYPERCHAIN_CONTRACT_FILE: (&str, &str) = ( "state-transition/chain-interfaces", - "IZkSyncHyperchain.sol/IZkSyncHyperchain.json", + "IZKChain.sol/IZKChain.json", ); const DIAMOND_INIT_CONTRACT_FILE: (&str, &str) = ( "state-transition", "chain-interfaces/IDiamondInit.sol/IDiamondInit.json", ); const GOVERNANCE_CONTRACT_FILE: (&str, &str) = ("governance", "IGovernance.sol/IGovernance.json"); -const CHAIN_ADMIN_CONTRACT_FILE: (&str, &str) = ("governance", "IChainAdmin.sol/IChainAdmin.json"); +// TODO(EVM-924): We currently only support the "Ownable" chain admin. +const CHAIN_ADMIN_CONTRACT_FILE: (&str, &str) = ( + "governance", + "IChainAdminOwnable.sol/IChainAdminOwnable.json", +); const GETTERS_FACET_CONTRACT_FILE: (&str, &str) = ( "state-transition/chain-interfaces", "IGetters.sol/IGetters.json", ); const MULTICALL3_CONTRACT_FILE: (&str, &str) = ("dev-contracts", "Multicall3.sol/Multicall3.json"); +const L1_ASSET_ROUTER_FILE: (&str, &str) = ( + "bridge/asset-router", + "L1AssetRouter.sol/L1AssetRouter.json", +); +const L2_WRAPPED_BASE_TOKEN_STORE: (&str, &str) = ( + "bridge", + "L2WrappedBaseTokenStore.sol/L2WrappedBaseTokenStore.json", +); + const VERIFIER_CONTRACT_FILE: (&str, &str) = ("state-transition", "Verifier.sol/Verifier.json"); const DUAL_VERIFIER_CONTRACT_FILE: (&str, &str) = ( "state-transition/verifiers", @@ -67,7 +82,7 @@ const _FAIL_ON_RECEIVE_CONTRACT_FILE: &str = "contracts/l1-contracts/artifacts/contracts/zksync/dev-contracts/FailOnReceive.sol/FailOnReceive.json"; fn home_path() -> PathBuf { - Workspace::locate().core() + Workspace::locate().root() } fn read_file_to_json_value(path: impl AsRef + std::fmt::Debug) -> Option { @@ -158,6 +173,10 @@ pub fn state_transition_manager_contract() -> Contract { load_contract_for_both_compilers(STATE_TRANSITION_CONTRACT_FILE) } +pub fn bytecode_supplier_contract() -> Contract { + load_contract_for_both_compilers(BYTECODE_SUPPLIER_CONTRACT_FILE) +} + pub fn hyperchain_contract() -> Contract { load_contract_for_both_compilers(ZKSYNC_HYPERCHAIN_CONTRACT_FILE) } @@ -170,6 +189,14 @@ pub fn multicall_contract() -> Contract { load_contract_for_both_compilers(MULTICALL3_CONTRACT_FILE) } +pub fn l1_asset_router_contract() -> Contract { + load_contract_for_both_compilers(L1_ASSET_ROUTER_FILE) +} + +pub fn wrapped_base_token_store_contract() -> Contract { + load_contract_for_both_compilers(L2_WRAPPED_BASE_TOKEN_STORE) +} + pub fn verifier_contract() -> Contract { let path = format!("{}/{}", FORGE_PATH_PREFIX, DUAL_VERIFIER_CONTRACT_FILE.1); let zksync_home = home_path(); @@ -190,6 +217,14 @@ pub fn l1_messenger_contract() -> Contract { load_sys_contract("L1Messenger") } +pub fn l2_message_root() -> Contract { + load_contract("contracts/l1-contracts/out/MessageRoot.sol/MessageRoot.json") +} + +pub fn l2_rollup_da_validator_bytecode() -> Vec { + read_bytecode("contracts/l2-contracts/zkout/RollupL2DAValidator.sol/RollupL2DAValidator.json") +} + /// Reads bytecode from the path RELATIVE to the Cargo workspace location. pub fn read_bytecode(relative_path: impl AsRef + std::fmt::Debug) -> Vec { read_bytecode_from_path(relative_path).expect("Failed to open file") @@ -719,14 +754,14 @@ pub static PRE_BOOJUM_COMMIT_FUNCTION: Lazy = Lazy::new(|| { serde_json::from_str(abi).unwrap() }); -pub static SET_CHAIN_ID_EVENT: Lazy = Lazy::new(|| { +pub static GENESIS_UPGRADE_EVENT: Lazy = Lazy::new(|| { let abi = r#" { "anonymous": false, "inputs": [ { "indexed": true, - "name": "_stateTransitionChain", + "name": "_hyperchain", "type": "address" }, { @@ -804,9 +839,14 @@ pub static SET_CHAIN_ID_EVENT: Lazy = Lazy::new(|| { "indexed": true, "name": "_protocolVersion", "type": "uint256" + }, + { + "indexed": false, + "name": "_factoryDeps", + "type": "bytes[]" } ], - "name": "SetChainIdUpgrade", + "name": "GenesisUpgrade", "type": "event" }"#; serde_json::from_str(abi).unwrap() @@ -1422,28 +1462,3 @@ pub static POST_SHARED_BRIDGE_EXECUTE_FUNCTION: Lazy = Lazy::new(|| { }"#; serde_json::from_str(abi).unwrap() }); - -// Temporary thing, should be removed when new contracts are merged. -pub static MESSAGE_ROOT_CONTRACT: Lazy = Lazy::new(|| { - let abi = r#" - [{ - "inputs": [ - { - "internalType": "uint256", - "name": "_chainId", - "type": "uint256" - } - ], - "name": "getChainRoot", - "outputs": [ - { - "internalType": "bytes32", - "name": "", - "type": "bytes32" - } - ], - "stateMutability": "view", - "type": "function" - }]"#; - serde_json::from_str(abi).unwrap() -}); diff --git a/core/lib/dal/.sqlx/query-2234d7728d91cefaee792c900448aafe4b1aa2250fc535bfcdff39172551d42b.json b/core/lib/dal/.sqlx/query-18e351067537a6fc5d837f01778eeb89dcf134f97acb695e7f232cbd3839e81b.json similarity index 75% rename from core/lib/dal/.sqlx/query-2234d7728d91cefaee792c900448aafe4b1aa2250fc535bfcdff39172551d42b.json rename to core/lib/dal/.sqlx/query-18e351067537a6fc5d837f01778eeb89dcf134f97acb695e7f232cbd3839e81b.json index df60f114f5ef..a25294c6bf1f 100644 --- a/core/lib/dal/.sqlx/query-2234d7728d91cefaee792c900448aafe4b1aa2250fc535bfcdff39172551d42b.json +++ b/core/lib/dal/.sqlx/query-18e351067537a6fc5d837f01778eeb89dcf134f97acb695e7f232cbd3839e81b.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n *\n FROM\n eth_txs\n WHERE\n from_addr IS NOT DISTINCT FROM $2 -- can't just use equality as NULL != NULL\n AND is_gateway = $3\n AND id > (\n SELECT\n COALESCE(MAX(eth_tx_id), 0)\n FROM\n eth_txs_history\n JOIN eth_txs ON eth_txs.id = eth_txs_history.eth_tx_id\n WHERE\n eth_txs_history.sent_at_block IS NOT NULL\n AND eth_txs.from_addr IS NOT DISTINCT FROM $2\n AND is_gateway = $3\n )\n ORDER BY\n id\n LIMIT\n $1\n ", + "query": "\n SELECT\n *\n FROM\n eth_txs\n WHERE\n from_addr IS NOT DISTINCT FROM $2 -- can't just use equality as NULL != NULL\n AND is_gateway = $3\n AND id > COALESCE(\n (SELECT\n eth_tx_id\n FROM\n eth_txs_history\n JOIN eth_txs ON eth_txs.id = eth_txs_history.eth_tx_id\n WHERE\n eth_txs_history.sent_at_block IS NOT NULL\n AND eth_txs.from_addr IS NOT DISTINCT FROM $2\n AND is_gateway = $3\n ORDER BY eth_tx_id DESC LIMIT 1),\n 0\n )\n ORDER BY\n id\n LIMIT\n $1\n ", "describe": { "columns": [ { @@ -110,5 +110,5 @@ true ] }, - "hash": "2234d7728d91cefaee792c900448aafe4b1aa2250fc535bfcdff39172551d42b" + "hash": "18e351067537a6fc5d837f01778eeb89dcf134f97acb695e7f232cbd3839e81b" } diff --git a/core/lib/dal/.sqlx/query-24aca24f8811d87f5ff54757903e235deae9a0c54b9713207d53918eb4973600.json b/core/lib/dal/.sqlx/query-24aca24f8811d87f5ff54757903e235deae9a0c54b9713207d53918eb4973600.json new file mode 100644 index 000000000000..b25eff8b4f55 --- /dev/null +++ b/core/lib/dal/.sqlx/query-24aca24f8811d87f5ff54757903e235deae9a0c54b9713207d53918eb4973600.json @@ -0,0 +1,62 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n number,\n is_sealed,\n timestamp,\n protocol_version,\n fee_address,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price\n FROM\n l1_batches\n ORDER BY\n number DESC\n LIMIT\n 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "number", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "is_sealed", + "type_info": "Bool" + }, + { + "ordinal": 2, + "name": "timestamp", + "type_info": "Int8" + }, + { + "ordinal": 3, + "name": "protocol_version", + "type_info": "Int4" + }, + { + "ordinal": 4, + "name": "fee_address", + "type_info": "Bytea" + }, + { + "ordinal": 5, + "name": "l1_gas_price", + "type_info": "Int8" + }, + { + "ordinal": 6, + "name": "l2_fair_gas_price", + "type_info": "Int8" + }, + { + "ordinal": 7, + "name": "fair_pubdata_price", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false, + false, + false, + true, + false, + false, + false, + false + ] + }, + "hash": "24aca24f8811d87f5ff54757903e235deae9a0c54b9713207d53918eb4973600" +} diff --git a/core/lib/dal/.sqlx/query-33d49ec6028974fa8b46d7bf1f79e41923477ed8dc179ca0e1fe64b4700e6572.json b/core/lib/dal/.sqlx/query-33d49ec6028974fa8b46d7bf1f79e41923477ed8dc179ca0e1fe64b4700e6572.json new file mode 100644 index 000000000000..703a57ae0597 --- /dev/null +++ b/core/lib/dal/.sqlx/query-33d49ec6028974fa8b46d7bf1f79e41923477ed8dc179ca0e1fe64b4700e6572.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n COUNT(*)\n FROM\n eth_txs\n WHERE\n confirmed_eth_tx_history_id IS NULL\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "count", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + null + ] + }, + "hash": "33d49ec6028974fa8b46d7bf1f79e41923477ed8dc179ca0e1fe64b4700e6572" +} diff --git a/core/lib/dal/.sqlx/query-7235e50f9ce4b5c4f6f8325117eaccc7108538405743fe1ad71451d0f1842561.json b/core/lib/dal/.sqlx/query-34cb5e326f02cca0dac3483a64d21e30a2a643f7909b7b6803a9708357f8ecbe.json similarity index 52% rename from core/lib/dal/.sqlx/query-7235e50f9ce4b5c4f6f8325117eaccc7108538405743fe1ad71451d0f1842561.json rename to core/lib/dal/.sqlx/query-34cb5e326f02cca0dac3483a64d21e30a2a643f7909b7b6803a9708357f8ecbe.json index f46674b08bc6..f8e9e650d10a 100644 --- a/core/lib/dal/.sqlx/query-7235e50f9ce4b5c4f6f8325117eaccc7108538405743fe1ad71451d0f1842561.json +++ b/core/lib/dal/.sqlx/query-34cb5e326f02cca0dac3483a64d21e30a2a643f7909b7b6803a9708357f8ecbe.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n transactions.hash AS tx_hash,\n transactions.index_in_block AS tx_index_in_block,\n call_trace\n FROM\n call_traces\n INNER JOIN transactions ON tx_hash = transactions.hash\n WHERE\n transactions.miniblock_number = $1\n ORDER BY\n transactions.index_in_block\n ", + "query": "\n SELECT\n transactions.hash AS tx_hash,\n transactions.index_in_block AS tx_index_in_block,\n call_trace,\n transactions.error AS tx_error\n FROM\n call_traces\n INNER JOIN transactions ON tx_hash = transactions.hash\n WHERE\n transactions.miniblock_number = $1\n ORDER BY\n transactions.index_in_block\n ", "describe": { "columns": [ { @@ -17,6 +17,11 @@ "ordinal": 2, "name": "call_trace", "type_info": "Bytea" + }, + { + "ordinal": 3, + "name": "tx_error", + "type_info": "Varchar" } ], "parameters": { @@ -27,8 +32,9 @@ "nullable": [ false, true, - false + false, + true ] }, - "hash": "7235e50f9ce4b5c4f6f8325117eaccc7108538405743fe1ad71451d0f1842561" + "hash": "34cb5e326f02cca0dac3483a64d21e30a2a643f7909b7b6803a9708357f8ecbe" } diff --git a/core/lib/dal/.sqlx/query-4bd1a4e612d10f2ca26068c140442f38816f163a3e3fba4fdbb81076b969e970.json b/core/lib/dal/.sqlx/query-442d1b4604c7a4202811d250a531d30a258bdf36ed98b1422084ef682b29c8cc.json similarity index 82% rename from core/lib/dal/.sqlx/query-4bd1a4e612d10f2ca26068c140442f38816f163a3e3fba4fdbb81076b969e970.json rename to core/lib/dal/.sqlx/query-442d1b4604c7a4202811d250a531d30a258bdf36ed98b1422084ef682b29c8cc.json index 66d3e18075bf..39de8d246376 100644 --- a/core/lib/dal/.sqlx/query-4bd1a4e612d10f2ca26068c140442f38816f163a3e3fba4fdbb81076b969e970.json +++ b/core/lib/dal/.sqlx/query-442d1b4604c7a4202811d250a531d30a258bdf36ed98b1422084ef682b29c8cc.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n number BETWEEN $1 AND $2\n ORDER BY\n number\n LIMIT\n $3\n ", + "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n number BETWEEN $1 AND $2\n ORDER BY\n number\n LIMIT\n $3\n ", "describe": { "columns": [ { @@ -162,6 +162,21 @@ "ordinal": 31, "name": "inclusion_data", "type_info": "Bytea" + }, + { + "ordinal": 32, + "name": "l1_gas_price", + "type_info": "Int8" + }, + { + "ordinal": 33, + "name": "l2_fair_gas_price", + "type_info": "Int8" + }, + { + "ordinal": 34, + "name": "fair_pubdata_price", + "type_info": "Int8" } ], "parameters": { @@ -203,8 +218,11 @@ true, true, true, - true + true, + false, + false, + false ] }, - "hash": "4bd1a4e612d10f2ca26068c140442f38816f163a3e3fba4fdbb81076b969e970" + "hash": "442d1b4604c7a4202811d250a531d30a258bdf36ed98b1422084ef682b29c8cc" } diff --git a/core/lib/dal/.sqlx/query-4e994d519b9c75e64a74423f8c19fbde6eb6634d7a63005081ffc1eb6c28e9ec.json b/core/lib/dal/.sqlx/query-54f41fccbe8c100015ccf4d87e0e33d22c94aa47b7a8863da2afa5ab1a8502bf.json similarity index 77% rename from core/lib/dal/.sqlx/query-4e994d519b9c75e64a74423f8c19fbde6eb6634d7a63005081ffc1eb6c28e9ec.json rename to core/lib/dal/.sqlx/query-54f41fccbe8c100015ccf4d87e0e33d22c94aa47b7a8863da2afa5ab1a8502bf.json index 804318120fcc..2cdfad08c830 100644 --- a/core/lib/dal/.sqlx/query-4e994d519b9c75e64a74423f8c19fbde6eb6634d7a63005081ffc1eb6c28e9ec.json +++ b/core/lib/dal/.sqlx/query-54f41fccbe8c100015ccf4d87e0e33d22c94aa47b7a8863da2afa5ab1a8502bf.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n number,\n l1_tx_count,\n l2_tx_count,\n timestamp,\n l2_to_l1_messages,\n bloom,\n priority_ops_onchain_data,\n used_contract_hashes,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n protocol_version,\n system_logs,\n pubdata_input,\n fee_address\n FROM\n l1_batches\n WHERE\n is_sealed\n AND number = $1\n ", + "query": "\n SELECT\n number,\n l1_tx_count,\n l2_tx_count,\n timestamp,\n l2_to_l1_messages,\n bloom,\n priority_ops_onchain_data,\n used_contract_hashes,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n protocol_version,\n system_logs,\n pubdata_input,\n fee_address,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price\n FROM\n l1_batches\n WHERE\n is_sealed\n AND number = $1\n ", "describe": { "columns": [ { @@ -77,6 +77,21 @@ "ordinal": 14, "name": "fee_address", "type_info": "Bytea" + }, + { + "ordinal": 15, + "name": "l1_gas_price", + "type_info": "Int8" + }, + { + "ordinal": 16, + "name": "l2_fair_gas_price", + "type_info": "Int8" + }, + { + "ordinal": 17, + "name": "fair_pubdata_price", + "type_info": "Int8" } ], "parameters": { @@ -99,8 +114,11 @@ true, false, true, + false, + false, + false, false ] }, - "hash": "4e994d519b9c75e64a74423f8c19fbde6eb6634d7a63005081ffc1eb6c28e9ec" + "hash": "54f41fccbe8c100015ccf4d87e0e33d22c94aa47b7a8863da2afa5ab1a8502bf" } diff --git a/core/lib/dal/.sqlx/query-a42121cd85daeb95ee268ba5cff1806fcc54d73216a7dc54be6ba210ef02d789.json b/core/lib/dal/.sqlx/query-7203b56390ec3768b7f8ed221756eaa79e05b443995c819db0befdcb1449691f.json similarity index 77% rename from core/lib/dal/.sqlx/query-a42121cd85daeb95ee268ba5cff1806fcc54d73216a7dc54be6ba210ef02d789.json rename to core/lib/dal/.sqlx/query-7203b56390ec3768b7f8ed221756eaa79e05b443995c819db0befdcb1449691f.json index 9a93ba45978e..8a29dbe2158e 100644 --- a/core/lib/dal/.sqlx/query-a42121cd85daeb95ee268ba5cff1806fcc54d73216a7dc54be6ba210ef02d789.json +++ b/core/lib/dal/.sqlx/query-7203b56390ec3768b7f8ed221756eaa79e05b443995c819db0befdcb1449691f.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n system_logs,\n compressed_state_diffs,\n protocol_version,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data\n FROM\n (\n SELECT\n l1_batches.*,\n ROW_NUMBER() OVER (\n ORDER BY\n number ASC\n ) AS row_number\n FROM\n l1_batches\n WHERE\n eth_commit_tx_id IS NOT NULL\n AND l1_batches.skip_proof = TRUE\n AND l1_batches.number > $1\n ORDER BY\n number\n LIMIT\n $2\n ) inn\n LEFT JOIN commitments ON commitments.l1_batch_number = inn.number\n LEFT JOIN data_availability ON data_availability.l1_batch_number = inn.number\n WHERE\n number - row_number = $1\n ", + "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n system_logs,\n compressed_state_diffs,\n protocol_version,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price\n FROM\n (\n SELECT\n l1_batches.*,\n ROW_NUMBER() OVER (\n ORDER BY\n number ASC\n ) AS row_number\n FROM\n l1_batches\n WHERE\n eth_commit_tx_id IS NOT NULL\n AND l1_batches.skip_proof = TRUE\n AND l1_batches.number > $1\n ORDER BY\n number\n LIMIT\n $2\n ) inn\n LEFT JOIN commitments ON commitments.l1_batch_number = inn.number\n LEFT JOIN data_availability ON data_availability.l1_batch_number = inn.number\n WHERE\n number - row_number = $1\n ", "describe": { "columns": [ { @@ -162,6 +162,21 @@ "ordinal": 31, "name": "inclusion_data", "type_info": "Bytea" + }, + { + "ordinal": 32, + "name": "l1_gas_price", + "type_info": "Int8" + }, + { + "ordinal": 33, + "name": "l2_fair_gas_price", + "type_info": "Int8" + }, + { + "ordinal": 34, + "name": "fair_pubdata_price", + "type_info": "Int8" } ], "parameters": { @@ -202,8 +217,11 @@ true, true, true, - true + true, + false, + false, + false ] }, - "hash": "a42121cd85daeb95ee268ba5cff1806fcc54d73216a7dc54be6ba210ef02d789" + "hash": "7203b56390ec3768b7f8ed221756eaa79e05b443995c819db0befdcb1449691f" } diff --git a/core/lib/dal/.sqlx/query-77864e5eb5eada8edf8f4457aa153369701d7cd5f75ca031bf77ca27d0437cb9.json b/core/lib/dal/.sqlx/query-7cf174da6228113a27dacbcc0eac9b01a237f4d0f74c2d387099a3766bf7364b.json similarity index 84% rename from core/lib/dal/.sqlx/query-77864e5eb5eada8edf8f4457aa153369701d7cd5f75ca031bf77ca27d0437cb9.json rename to core/lib/dal/.sqlx/query-7cf174da6228113a27dacbcc0eac9b01a237f4d0f74c2d387099a3766bf7364b.json index f4e08abe31c5..c8548e37a5f6 100644 --- a/core/lib/dal/.sqlx/query-77864e5eb5eada8edf8f4457aa153369701d7cd5f75ca031bf77ca27d0437cb9.json +++ b/core/lib/dal/.sqlx/query-7cf174da6228113a27dacbcc0eac9b01a237f4d0f74c2d387099a3766bf7364b.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n system_logs,\n compressed_state_diffs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n is_sealed\n AND number = $1\n ", + "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n system_logs,\n compressed_state_diffs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n is_sealed\n AND number = $1\n ", "describe": { "columns": [ { @@ -162,6 +162,21 @@ "ordinal": 31, "name": "inclusion_data", "type_info": "Bytea" + }, + { + "ordinal": 32, + "name": "l1_gas_price", + "type_info": "Int8" + }, + { + "ordinal": 33, + "name": "l2_fair_gas_price", + "type_info": "Int8" + }, + { + "ordinal": 34, + "name": "fair_pubdata_price", + "type_info": "Int8" } ], "parameters": { @@ -201,8 +216,11 @@ true, true, true, - true + true, + false, + false, + false ] }, - "hash": "77864e5eb5eada8edf8f4457aa153369701d7cd5f75ca031bf77ca27d0437cb9" + "hash": "7cf174da6228113a27dacbcc0eac9b01a237f4d0f74c2d387099a3766bf7364b" } diff --git a/core/lib/dal/.sqlx/query-87f27295de500591f01ed76731df2aed7049c3f44a6d25556967ea867e0caf25.json b/core/lib/dal/.sqlx/query-87f27295de500591f01ed76731df2aed7049c3f44a6d25556967ea867e0caf25.json deleted file mode 100644 index dbeaede9ecd2..000000000000 --- a/core/lib/dal/.sqlx/query-87f27295de500591f01ed76731df2aed7049c3f44a6d25556967ea867e0caf25.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n call_trace\n FROM\n call_traces\n WHERE\n tx_hash = $1\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "call_trace", - "type_info": "Bytea" - } - ], - "parameters": { - "Left": [ - "Bytea" - ] - }, - "nullable": [ - false - ] - }, - "hash": "87f27295de500591f01ed76731df2aed7049c3f44a6d25556967ea867e0caf25" -} diff --git a/core/lib/dal/.sqlx/query-45154c2efc8d07c4f83ae3e229f9892118f5732374e62f35e27800422afb5746.json b/core/lib/dal/.sqlx/query-881ea0a611d35d4a6cc6893067a791f9887cc7dcf456c79bb22edb22cf27682f.json similarity index 75% rename from core/lib/dal/.sqlx/query-45154c2efc8d07c4f83ae3e229f9892118f5732374e62f35e27800422afb5746.json rename to core/lib/dal/.sqlx/query-881ea0a611d35d4a6cc6893067a791f9887cc7dcf456c79bb22edb22cf27682f.json index 11bff1102932..7eb653119357 100644 --- a/core/lib/dal/.sqlx/query-45154c2efc8d07c4f83ae3e229f9892118f5732374e62f35e27800422afb5746.json +++ b/core/lib/dal/.sqlx/query-881ea0a611d35d4a6cc6893067a791f9887cc7dcf456c79bb22edb22cf27682f.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n number,\n l1_batches.timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n l1_batches.bootloader_code_hash,\n l1_batches.default_aa_code_hash,\n l1_batches.evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n JOIN protocol_versions ON protocol_versions.id = l1_batches.protocol_version\n WHERE\n eth_commit_tx_id IS NULL\n AND number != 0\n AND protocol_versions.bootloader_code_hash = $1\n AND protocol_versions.default_account_code_hash = $2\n AND commitment IS NOT NULL\n AND (\n protocol_versions.id = $3\n OR protocol_versions.upgrade_tx_hash IS NULL\n )\n AND events_queue_commitment IS NOT NULL\n AND bootloader_initial_content_commitment IS NOT NULL\n AND (\n data_availability.inclusion_data IS NOT NULL\n OR $4 IS FALSE\n )\n ORDER BY\n number\n LIMIT\n $5\n ", + "query": "\n SELECT\n number,\n l1_batches.timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n l1_batches.bootloader_code_hash,\n l1_batches.default_aa_code_hash,\n l1_batches.evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n JOIN protocol_versions ON protocol_versions.id = l1_batches.protocol_version\n WHERE\n eth_commit_tx_id IS NULL\n AND number != 0\n AND protocol_versions.bootloader_code_hash = $1\n AND protocol_versions.default_account_code_hash = $2\n AND commitment IS NOT NULL\n AND (\n protocol_versions.id = $3\n OR protocol_versions.upgrade_tx_hash IS NULL\n )\n AND events_queue_commitment IS NOT NULL\n AND bootloader_initial_content_commitment IS NOT NULL\n AND (\n data_availability.inclusion_data IS NOT NULL\n OR $4 IS FALSE\n )\n ORDER BY\n number\n LIMIT\n $5\n ", "describe": { "columns": [ { @@ -162,6 +162,21 @@ "ordinal": 31, "name": "inclusion_data", "type_info": "Bytea" + }, + { + "ordinal": 32, + "name": "l1_gas_price", + "type_info": "Int8" + }, + { + "ordinal": 33, + "name": "l2_fair_gas_price", + "type_info": "Int8" + }, + { + "ordinal": 34, + "name": "fair_pubdata_price", + "type_info": "Int8" } ], "parameters": { @@ -205,8 +220,11 @@ true, true, true, - true + true, + false, + false, + false ] }, - "hash": "45154c2efc8d07c4f83ae3e229f9892118f5732374e62f35e27800422afb5746" + "hash": "881ea0a611d35d4a6cc6893067a791f9887cc7dcf456c79bb22edb22cf27682f" } diff --git a/core/lib/dal/.sqlx/query-8435ed4ee2a9b962116ecfa522f4ba52c9a0e64d1badc39cc2fef29b1468621a.json b/core/lib/dal/.sqlx/query-970c457cc4513615d9bb6ecd6f1a69128b9f77d7ce3b898e44c497dc56a40149.json similarity index 67% rename from core/lib/dal/.sqlx/query-8435ed4ee2a9b962116ecfa522f4ba52c9a0e64d1badc39cc2fef29b1468621a.json rename to core/lib/dal/.sqlx/query-970c457cc4513615d9bb6ecd6f1a69128b9f77d7ce3b898e44c497dc56a40149.json index df856b977026..d0b576bfa688 100644 --- a/core/lib/dal/.sqlx/query-8435ed4ee2a9b962116ecfa522f4ba52c9a0e64d1badc39cc2fef29b1468621a.json +++ b/core/lib/dal/.sqlx/query-970c457cc4513615d9bb6ecd6f1a69128b9f77d7ce3b898e44c497dc56a40149.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n number,\n timestamp,\n protocol_version,\n fee_address,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price\n FROM\n l1_batches\n WHERE\n NOT is_sealed\n ", + "query": "\n SELECT\n number,\n timestamp,\n protocol_version,\n fee_address,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price\n FROM (\n SELECT\n number,\n timestamp,\n protocol_version,\n fee_address,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price,\n is_sealed\n FROM l1_batches\n ORDER BY number DESC\n LIMIT 1\n ) AS u\n WHERE NOT is_sealed\n ", "describe": { "columns": [ { @@ -52,5 +52,5 @@ false ] }, - "hash": "8435ed4ee2a9b962116ecfa522f4ba52c9a0e64d1badc39cc2fef29b1468621a" + "hash": "970c457cc4513615d9bb6ecd6f1a69128b9f77d7ce3b898e44c497dc56a40149" } diff --git a/core/lib/dal/.sqlx/query-868bfdc5d8ee5eab395fa690891751dfd285628a75a35b152bccb3c73e9cc057.json b/core/lib/dal/.sqlx/query-a8cc4da6200478a72a1b490d302b8a0949212bf66c602024aaa9843b4c66f965.json similarity index 77% rename from core/lib/dal/.sqlx/query-868bfdc5d8ee5eab395fa690891751dfd285628a75a35b152bccb3c73e9cc057.json rename to core/lib/dal/.sqlx/query-a8cc4da6200478a72a1b490d302b8a0949212bf66c602024aaa9843b4c66f965.json index e47911f3d776..82b173a6a651 100644 --- a/core/lib/dal/.sqlx/query-868bfdc5d8ee5eab395fa690891751dfd285628a75a35b152bccb3c73e9cc057.json +++ b/core/lib/dal/.sqlx/query-a8cc4da6200478a72a1b490d302b8a0949212bf66c602024aaa9843b4c66f965.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n *\n FROM\n eth_txs\n WHERE\n from_addr IS NOT DISTINCT FROM $1 -- can't just use equality as NULL != NULL\n AND confirmed_eth_tx_history_id IS NULL\n AND is_gateway = $2\n AND id <= (\n SELECT\n COALESCE(MAX(eth_tx_id), 0)\n FROM\n eth_txs_history\n JOIN eth_txs ON eth_txs.id = eth_txs_history.eth_tx_id\n WHERE\n eth_txs_history.sent_at_block IS NOT NULL\n AND eth_txs.from_addr IS NOT DISTINCT FROM $1\n AND is_gateway = $2\n )\n ORDER BY\n id\n ", + "query": "\n SELECT\n *\n FROM\n eth_txs\n WHERE\n from_addr IS NOT DISTINCT FROM $1 -- can't just use equality as NULL != NULL\n AND confirmed_eth_tx_history_id IS NULL\n AND is_gateway = $2\n AND id <= COALESCE(\n (SELECT\n eth_tx_id\n FROM\n eth_txs_history\n JOIN eth_txs ON eth_txs.id = eth_txs_history.eth_tx_id\n WHERE\n eth_txs_history.sent_at_block IS NOT NULL\n AND eth_txs.from_addr IS NOT DISTINCT FROM $1\n AND is_gateway = $2\n ORDER BY eth_tx_id DESC LIMIT 1),\n 0\n )\n ORDER BY\n id\n ", "describe": { "columns": [ { @@ -109,5 +109,5 @@ true ] }, - "hash": "868bfdc5d8ee5eab395fa690891751dfd285628a75a35b152bccb3c73e9cc057" + "hash": "a8cc4da6200478a72a1b490d302b8a0949212bf66c602024aaa9843b4c66f965" } diff --git a/core/lib/dal/.sqlx/query-c5aedd2b1871d8f6276a31482caa673e4b5bba059ebe07bbbb64578881db030b.json b/core/lib/dal/.sqlx/query-c70a2e9d09f93b510cb726d6d420635d692a748021ed20b657b6d8f0dfcad5bc.json similarity index 90% rename from core/lib/dal/.sqlx/query-c5aedd2b1871d8f6276a31482caa673e4b5bba059ebe07bbbb64578881db030b.json rename to core/lib/dal/.sqlx/query-c70a2e9d09f93b510cb726d6d420635d692a748021ed20b657b6d8f0dfcad5bc.json index f97ea8a6ccd5..1f6a3dfec6ea 100644 --- a/core/lib/dal/.sqlx/query-c5aedd2b1871d8f6276a31482caa673e4b5bba059ebe07bbbb64578881db030b.json +++ b/core/lib/dal/.sqlx/query-c70a2e9d09f93b510cb726d6d420635d692a748021ed20b657b6d8f0dfcad5bc.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n eth_prove_tx_id IS NOT NULL\n AND eth_execute_tx_id IS NULL\n ORDER BY\n number\n LIMIT\n $1\n ", + "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n eth_prove_tx_id IS NOT NULL\n AND eth_execute_tx_id IS NULL\n ORDER BY\n number\n LIMIT\n $1\n ", "describe": { "columns": [ { @@ -162,6 +162,21 @@ "ordinal": 31, "name": "inclusion_data", "type_info": "Bytea" + }, + { + "ordinal": 32, + "name": "l1_gas_price", + "type_info": "Int8" + }, + { + "ordinal": 33, + "name": "l2_fair_gas_price", + "type_info": "Int8" + }, + { + "ordinal": 34, + "name": "fair_pubdata_price", + "type_info": "Int8" } ], "parameters": { @@ -201,8 +216,11 @@ true, true, true, - true + true, + false, + false, + false ] }, - "hash": "c5aedd2b1871d8f6276a31482caa673e4b5bba059ebe07bbbb64578881db030b" + "hash": "c70a2e9d09f93b510cb726d6d420635d692a748021ed20b657b6d8f0dfcad5bc" } diff --git a/core/lib/dal/.sqlx/query-62e8330881b73917394384adbf73911add046315e5f8877bc57a34e3dadf9e37.json b/core/lib/dal/.sqlx/query-cb1a078ba867415f27a50bdccfbfb654bdb1c4139c453889b5fe43902300c361.json similarity index 82% rename from core/lib/dal/.sqlx/query-62e8330881b73917394384adbf73911add046315e5f8877bc57a34e3dadf9e37.json rename to core/lib/dal/.sqlx/query-cb1a078ba867415f27a50bdccfbfb654bdb1c4139c453889b5fe43902300c361.json index dfdb4b6c82e7..ab9a270abc7b 100644 --- a/core/lib/dal/.sqlx/query-62e8330881b73917394384adbf73911add046315e5f8877bc57a34e3dadf9e37.json +++ b/core/lib/dal/.sqlx/query-cb1a078ba867415f27a50bdccfbfb654bdb1c4139c453889b5fe43902300c361.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n eth_commit_tx_id IS NOT NULL\n AND eth_prove_tx_id IS NULL\n ORDER BY\n number\n LIMIT\n $1\n ", + "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n eth_commit_tx_id IS NOT NULL\n AND eth_prove_tx_id IS NULL\n ORDER BY\n number\n LIMIT\n $1\n ", "describe": { "columns": [ { @@ -162,6 +162,21 @@ "ordinal": 31, "name": "inclusion_data", "type_info": "Bytea" + }, + { + "ordinal": 32, + "name": "l1_gas_price", + "type_info": "Int8" + }, + { + "ordinal": 33, + "name": "l2_fair_gas_price", + "type_info": "Int8" + }, + { + "ordinal": 34, + "name": "fair_pubdata_price", + "type_info": "Int8" } ], "parameters": { @@ -201,8 +216,11 @@ true, true, true, - true + true, + false, + false, + false ] }, - "hash": "62e8330881b73917394384adbf73911add046315e5f8877bc57a34e3dadf9e37" + "hash": "cb1a078ba867415f27a50bdccfbfb654bdb1c4139c453889b5fe43902300c361" } diff --git a/core/lib/dal/.sqlx/query-1cb61327bed4d65a3fc81aa2229e01396dacefc0cea8cbcf5807185eb00fc0f7.json b/core/lib/dal/.sqlx/query-d62f8d10c7b469067e20e56d2e58d94bb65d35ad4b0c819fa7b4afe07d9769e9.json similarity index 82% rename from core/lib/dal/.sqlx/query-1cb61327bed4d65a3fc81aa2229e01396dacefc0cea8cbcf5807185eb00fc0f7.json rename to core/lib/dal/.sqlx/query-d62f8d10c7b469067e20e56d2e58d94bb65d35ad4b0c819fa7b4afe07d9769e9.json index 48adcd412676..5d40770fcf1d 100644 --- a/core/lib/dal/.sqlx/query-1cb61327bed4d65a3fc81aa2229e01396dacefc0cea8cbcf5807185eb00fc0f7.json +++ b/core/lib/dal/.sqlx/query-d62f8d10c7b469067e20e56d2e58d94bb65d35ad4b0c819fa7b4afe07d9769e9.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n number = 0\n OR eth_commit_tx_id IS NOT NULL\n AND commitment IS NOT NULL\n ORDER BY\n number DESC\n LIMIT\n 1\n ", + "query": "\n SELECT\n number,\n timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n bootloader_code_hash,\n default_aa_code_hash,\n evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n number = 0\n OR eth_commit_tx_id IS NOT NULL\n AND commitment IS NOT NULL\n ORDER BY\n number DESC\n LIMIT\n 1\n ", "describe": { "columns": [ { @@ -162,6 +162,21 @@ "ordinal": 31, "name": "inclusion_data", "type_info": "Bytea" + }, + { + "ordinal": 32, + "name": "l1_gas_price", + "type_info": "Int8" + }, + { + "ordinal": 33, + "name": "l2_fair_gas_price", + "type_info": "Int8" + }, + { + "ordinal": 34, + "name": "fair_pubdata_price", + "type_info": "Int8" } ], "parameters": { @@ -199,8 +214,11 @@ true, true, true, - true + true, + false, + false, + false ] }, - "hash": "1cb61327bed4d65a3fc81aa2229e01396dacefc0cea8cbcf5807185eb00fc0f7" + "hash": "d62f8d10c7b469067e20e56d2e58d94bb65d35ad4b0c819fa7b4afe07d9769e9" } diff --git a/core/lib/dal/.sqlx/query-b7d448837439a3e3dfe73070d3c20e9c138d0a6d35e9ce7fc396c5e76fbc25dd.json b/core/lib/dal/.sqlx/query-e90a364528fab11f4b0d0e5d069395ab975d3e06a6004e01b86b8d4e14788b95.json similarity index 78% rename from core/lib/dal/.sqlx/query-b7d448837439a3e3dfe73070d3c20e9c138d0a6d35e9ce7fc396c5e76fbc25dd.json rename to core/lib/dal/.sqlx/query-e90a364528fab11f4b0d0e5d069395ab975d3e06a6004e01b86b8d4e14788b95.json index 8a68b1a9b9bd..69ac542a8a9a 100644 --- a/core/lib/dal/.sqlx/query-b7d448837439a3e3dfe73070d3c20e9c138d0a6d35e9ce7fc396c5e76fbc25dd.json +++ b/core/lib/dal/.sqlx/query-e90a364528fab11f4b0d0e5d069395ab975d3e06a6004e01b86b8d4e14788b95.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n number,\n l1_batches.timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n l1_batches.bootloader_code_hash,\n l1_batches.default_aa_code_hash,\n l1_batches.evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n JOIN protocol_versions ON protocol_versions.id = l1_batches.protocol_version\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n eth_commit_tx_id IS NULL\n AND number != 0\n AND protocol_versions.bootloader_code_hash = $1\n AND protocol_versions.default_account_code_hash = $2\n AND commitment IS NOT NULL\n AND (\n protocol_versions.id = $3\n OR protocol_versions.upgrade_tx_hash IS NULL\n )\n ORDER BY\n number\n LIMIT\n $4\n ", + "query": "\n SELECT\n number,\n l1_batches.timestamp,\n l1_tx_count,\n l2_tx_count,\n bloom,\n priority_ops_onchain_data,\n hash,\n commitment,\n l2_to_l1_messages,\n used_contract_hashes,\n compressed_initial_writes,\n compressed_repeated_writes,\n l2_l1_merkle_root,\n rollup_last_leaf_index,\n zkporter_is_available,\n l1_batches.bootloader_code_hash,\n l1_batches.default_aa_code_hash,\n l1_batches.evm_emulator_code_hash,\n aux_data_hash,\n pass_through_data_hash,\n meta_parameters_hash,\n protocol_version,\n compressed_state_diffs,\n system_logs,\n events_queue_commitment,\n bootloader_initial_content_commitment,\n pubdata_input,\n fee_address,\n aggregation_root,\n local_root,\n state_diff_hash,\n data_availability.inclusion_data,\n l1_gas_price,\n l2_fair_gas_price,\n fair_pubdata_price\n FROM\n l1_batches\n LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number\n JOIN protocol_versions ON protocol_versions.id = l1_batches.protocol_version\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n eth_commit_tx_id IS NULL\n AND number != 0\n AND protocol_versions.bootloader_code_hash = $1\n AND protocol_versions.default_account_code_hash = $2\n AND commitment IS NOT NULL\n AND (\n protocol_versions.id = $3\n OR protocol_versions.upgrade_tx_hash IS NULL\n )\n ORDER BY\n number\n LIMIT\n $4\n ", "describe": { "columns": [ { @@ -162,6 +162,21 @@ "ordinal": 31, "name": "inclusion_data", "type_info": "Bytea" + }, + { + "ordinal": 32, + "name": "l1_gas_price", + "type_info": "Int8" + }, + { + "ordinal": 33, + "name": "l2_fair_gas_price", + "type_info": "Int8" + }, + { + "ordinal": 34, + "name": "fair_pubdata_price", + "type_info": "Int8" } ], "parameters": { @@ -204,8 +219,11 @@ true, true, true, - true + true, + false, + false, + false ] }, - "hash": "b7d448837439a3e3dfe73070d3c20e9c138d0a6d35e9ce7fc396c5e76fbc25dd" + "hash": "e90a364528fab11f4b0d0e5d069395ab975d3e06a6004e01b86b8d4e14788b95" } diff --git a/core/lib/dal/.sqlx/query-fd68afd6eb8890f01fe646339951d1184afcb08d2bdf310a3fd3fb5b47d4d947.json b/core/lib/dal/.sqlx/query-fd68afd6eb8890f01fe646339951d1184afcb08d2bdf310a3fd3fb5b47d4d947.json new file mode 100644 index 000000000000..abdc0a1cfa87 --- /dev/null +++ b/core/lib/dal/.sqlx/query-fd68afd6eb8890f01fe646339951d1184afcb08d2bdf310a3fd3fb5b47d4d947.json @@ -0,0 +1,28 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n call_trace,\n transactions.error AS tx_error\n FROM\n call_traces\n INNER JOIN transactions ON tx_hash = transactions.hash\n WHERE\n tx_hash = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "call_trace", + "type_info": "Bytea" + }, + { + "ordinal": 1, + "name": "tx_error", + "type_info": "Varchar" + } + ], + "parameters": { + "Left": [ + "Bytea" + ] + }, + "nullable": [ + false, + true + ] + }, + "hash": "fd68afd6eb8890f01fe646339951d1184afcb08d2bdf310a3fd3fb5b47d4d947" +} diff --git a/core/lib/dal/migrations/20241226161705_add_ratio_timestamp_index.down.sql b/core/lib/dal/migrations/20241226161705_add_ratio_timestamp_index.down.sql new file mode 100644 index 000000000000..c7c00d281ac3 --- /dev/null +++ b/core/lib/dal/migrations/20241226161705_add_ratio_timestamp_index.down.sql @@ -0,0 +1 @@ +DROP INDEX IF EXISTS base_token_ratios_ratio_timestamp_idx; diff --git a/core/lib/dal/migrations/20241226161705_add_ratio_timestamp_index.up.sql b/core/lib/dal/migrations/20241226161705_add_ratio_timestamp_index.up.sql new file mode 100644 index 000000000000..7f1074f86f84 --- /dev/null +++ b/core/lib/dal/migrations/20241226161705_add_ratio_timestamp_index.up.sql @@ -0,0 +1 @@ +CREATE INDEX IF NOT EXISTS base_token_ratios_ratio_timestamp_idx ON base_token_ratios (ratio_timestamp); diff --git a/core/lib/dal/src/blocks_dal.rs b/core/lib/dal/src/blocks_dal.rs index 9404f4d14332..7b2f2e33fe2a 100644 --- a/core/lib/dal/src/blocks_dal.rs +++ b/core/lib/dal/src/blocks_dal.rs @@ -16,11 +16,10 @@ use zksync_db_connection::{ use zksync_types::{ aggregated_operations::AggregatedActionType, block::{ - L1BatchHeader, L1BatchStatistics, L1BatchTreeData, L2BlockHeader, StorageOracleInfo, - UnsealedL1BatchHeader, + CommonL1BatchHeader, L1BatchHeader, L1BatchStatistics, L1BatchTreeData, L2BlockHeader, + StorageOracleInfo, UnsealedL1BatchHeader, }, commitment::{L1BatchCommitmentArtifacts, L1BatchWithMetadata}, - fee_model::BatchFeeInput, l2_to_l1_log::{BatchAndChainMerklePath, UserL2ToL1Log}, writes::TreeWrite, Address, Bloom, L1BatchNumber, L2BlockNumber, ProtocolVersionId, SLChainId, H256, U256, @@ -32,7 +31,8 @@ use crate::{ models::{ parse_protocol_version, storage_block::{ - StorageL1Batch, StorageL1BatchHeader, StorageL2BlockHeader, UnsealedStorageL1Batch, + CommonStorageL1BatchHeader, StorageL1Batch, StorageL1BatchHeader, StorageL2BlockHeader, + UnsealedStorageL1Batch, }, storage_event::StorageL2ToL1Log, storage_oracle_info::DbStorageOracleInfo, @@ -103,6 +103,7 @@ impl BlocksDal<'_, '_> { Ok(count == 0) } + /// Returns the number of the last sealed L1 batch present in the DB, or `None` if there are no L1 batches. pub async fn get_sealed_l1_batch_number(&mut self) -> DalResult> { let row = sqlx::query!( r#" @@ -122,6 +123,39 @@ impl BlocksDal<'_, '_> { Ok(row.number.map(|num| L1BatchNumber(num as u32))) } + /// Returns latest L1 batch's header (could be unsealed). The header contains fields that are + /// common for both unsealed and sealed batches. Returns `None` if there are no L1 batches. + pub async fn get_latest_l1_batch_header(&mut self) -> DalResult> { + let Some(header) = sqlx::query_as!( + CommonStorageL1BatchHeader, + r#" + SELECT + number, + is_sealed, + timestamp, + protocol_version, + fee_address, + l1_gas_price, + l2_fair_gas_price, + fair_pubdata_price + FROM + l1_batches + ORDER BY + number DESC + LIMIT + 1 + "#, + ) + .instrument("get_latest_l1_batch_header") + .fetch_optional(self.storage) + .await? + else { + return Ok(None); + }; + + Ok(Some(header.into())) + } + pub async fn get_sealed_l2_block_number(&mut self) -> DalResult> { let row = sqlx::query!( r#" @@ -348,7 +382,10 @@ impl BlocksDal<'_, '_> { aggregation_root, local_root, state_diff_hash, - data_availability.inclusion_data + data_availability.inclusion_data, + l1_gas_price, + l2_fair_gas_price, + fair_pubdata_price FROM l1_batches LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number @@ -389,7 +426,10 @@ impl BlocksDal<'_, '_> { protocol_version, system_logs, pubdata_input, - fee_address + fee_address, + l1_gas_price, + l2_fair_gas_price, + fair_pubdata_price FROM l1_batches WHERE @@ -795,10 +835,21 @@ impl BlocksDal<'_, '_> { l1_gas_price, l2_fair_gas_price, fair_pubdata_price - FROM - l1_batches - WHERE - NOT is_sealed + FROM ( + SELECT + number, + timestamp, + protocol_version, + fee_address, + l1_gas_price, + l2_fair_gas_price, + fair_pubdata_price, + is_sealed + FROM l1_batches + ORDER BY number DESC + LIMIT 1 + ) AS u + WHERE NOT is_sealed "#, ) .instrument("get_unsealed_l1_batch") @@ -1213,7 +1264,10 @@ impl BlocksDal<'_, '_> { aggregation_root, local_root, state_diff_hash, - data_availability.inclusion_data + data_availability.inclusion_data, + l1_gas_price, + l2_fair_gas_price, + fair_pubdata_price FROM l1_batches LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number @@ -1408,7 +1462,10 @@ impl BlocksDal<'_, '_> { aggregation_root, local_root, state_diff_hash, - data_availability.inclusion_data + data_availability.inclusion_data, + l1_gas_price, + l2_fair_gas_price, + fair_pubdata_price FROM l1_batches LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number @@ -1497,7 +1554,10 @@ impl BlocksDal<'_, '_> { aggregation_root, local_root, state_diff_hash, - data_availability.inclusion_data + data_availability.inclusion_data, + l1_gas_price, + l2_fair_gas_price, + fair_pubdata_price FROM ( SELECT @@ -1577,7 +1637,10 @@ impl BlocksDal<'_, '_> { aggregation_root, local_root, state_diff_hash, - data_availability.inclusion_data + data_availability.inclusion_data, + l1_gas_price, + l2_fair_gas_price, + fair_pubdata_price FROM l1_batches LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number @@ -1737,7 +1800,10 @@ impl BlocksDal<'_, '_> { aggregation_root, local_root, state_diff_hash, - data_availability.inclusion_data + data_availability.inclusion_data, + l1_gas_price, + l2_fair_gas_price, + fair_pubdata_price FROM l1_batches LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number @@ -1810,7 +1876,10 @@ impl BlocksDal<'_, '_> { aggregation_root, local_root, state_diff_hash, - data_availability.inclusion_data + data_availability.inclusion_data, + l1_gas_price, + l2_fair_gas_price, + fair_pubdata_price FROM l1_batches LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number @@ -1897,7 +1966,10 @@ impl BlocksDal<'_, '_> { aggregation_root, local_root, state_diff_hash, - data_availability.inclusion_data + data_availability.inclusion_data, + l1_gas_price, + l2_fair_gas_price, + fair_pubdata_price FROM l1_batches LEFT JOIN commitments ON commitments.l1_batch_number = l1_batches.number @@ -2910,10 +2982,7 @@ impl BlocksDal<'_, '_> { } pub async fn insert_mock_l1_batch(&mut self, header: &L1BatchHeader) -> anyhow::Result<()> { - self.insert_l1_batch( - header.to_unsealed_header(BatchFeeInput::pubdata_independent(100, 100, 100)), - ) - .await?; + self.insert_l1_batch(header.to_unsealed_header()).await?; self.mark_l1_batch_as_sealed(header, &[], &[], &[], Default::default()) .await } diff --git a/core/lib/dal/src/blocks_web3_dal.rs b/core/lib/dal/src/blocks_web3_dal.rs index 229f49da6e37..96513da54264 100644 --- a/core/lib/dal/src/blocks_web3_dal.rs +++ b/core/lib/dal/src/blocks_web3_dal.rs @@ -567,7 +567,8 @@ impl BlocksWeb3Dal<'_, '_> { SELECT transactions.hash AS tx_hash, transactions.index_in_block AS tx_index_in_block, - call_trace + call_trace, + transactions.error AS tx_error FROM call_traces INNER JOIN transactions ON tx_hash = transactions.hash @@ -583,7 +584,7 @@ impl BlocksWeb3Dal<'_, '_> { .fetch_all(self.storage) .await? .into_iter() - .map(|call_trace| { + .map(|mut call_trace| { let tx_hash = H256::from_slice(&call_trace.tx_hash); let index = call_trace.tx_index_in_block.unwrap_or_default() as usize; let meta = CallTraceMeta { @@ -591,6 +592,7 @@ impl BlocksWeb3Dal<'_, '_> { tx_hash, block_number: block_number.0, block_hash, + internal_error: call_trace.tx_error.take(), }; (call_trace.into_call(protocol_version), meta) }) diff --git a/core/lib/dal/src/consensus/conv.rs b/core/lib/dal/src/consensus/conv.rs index 3153343d6014..140c3f6a84c3 100644 --- a/core/lib/dal/src/consensus/conv.rs +++ b/core/lib/dal/src/consensus/conv.rs @@ -5,7 +5,7 @@ use zksync_consensus_roles::{attester, node}; use zksync_protobuf::{read_optional_repr, read_required, required, ProtoFmt, ProtoRepr}; use zksync_types::{ abi, - commitment::{L1BatchCommitmentMode, PubdataParams}, + commitment::{PubdataParams, PubdataType}, ethabi, fee::Fee, h256_to_u256, @@ -112,8 +112,8 @@ impl ProtoRepr for proto::PubdataParams { l2_da_validator_address: required(&self.l2_da_validator_address) .and_then(|a| parse_h160(a)) .context("l2_da_validator_address")?, - pubdata_type: required(&self.pubdata_type) - .and_then(|x| Ok(proto::L1BatchCommitDataGeneratorMode::try_from(*x)?)) + pubdata_type: required(&self.pubdata_info) + .and_then(|x| Ok(proto::PubdataType::try_from(*x)?)) .context("pubdata_type")? .parse(), }) @@ -122,9 +122,7 @@ impl ProtoRepr for proto::PubdataParams { fn build(this: &Self::Type) -> Self { Self { l2_da_validator_address: Some(this.l2_da_validator_address.as_bytes().into()), - pubdata_type: Some( - proto::L1BatchCommitDataGeneratorMode::new(&this.pubdata_type) as i32, - ), + pubdata_info: Some(this.pubdata_type as i32), } } } @@ -572,18 +570,15 @@ impl ProtoRepr for proto::AttesterCommittee { } } -impl proto::L1BatchCommitDataGeneratorMode { - pub(crate) fn new(n: &L1BatchCommitmentMode) -> Self { - match n { - L1BatchCommitmentMode::Rollup => Self::Rollup, - L1BatchCommitmentMode::Validium => Self::Validium, - } - } - - pub(crate) fn parse(&self) -> L1BatchCommitmentMode { +impl proto::PubdataType { + pub(crate) fn parse(&self) -> PubdataType { match self { - Self::Rollup => L1BatchCommitmentMode::Rollup, - Self::Validium => L1BatchCommitmentMode::Validium, + Self::Rollup => PubdataType::Rollup, + Self::NoDa => PubdataType::NoDA, + Self::Avail => PubdataType::Avail, + Self::Celestia => PubdataType::Celestia, + Self::Eigen => PubdataType::Eigen, + Self::ObjectStore => PubdataType::ObjectStore, } } } diff --git a/core/lib/dal/src/consensus/proto/mod.proto b/core/lib/dal/src/consensus/proto/mod.proto index 49a69e8a36ec..6bae78ca5fe9 100644 --- a/core/lib/dal/src/consensus/proto/mod.proto +++ b/core/lib/dal/src/consensus/proto/mod.proto @@ -31,7 +31,8 @@ message Payload { message PubdataParams { optional bytes l2_da_validator_address = 1; // required; H160 - optional L1BatchCommitDataGeneratorMode pubdata_type = 2; // required + optional PubdataType pubdata_info = 3; // required + reserved 2; reserved "pubdata_type"; } message L1Transaction { @@ -149,7 +150,11 @@ message AttestationStatus { optional uint64 next_batch_to_attest = 2; // required } -enum L1BatchCommitDataGeneratorMode { +enum PubdataType { Rollup = 0; - Validium = 1; + NoDA = 1; + Avail = 2; + Celestia = 3; + Eigen = 4; + ObjectStore = 5; } diff --git a/core/lib/dal/src/consensus/tests.rs b/core/lib/dal/src/consensus/tests.rs index 465148dc7b5c..125db0de89f5 100644 --- a/core/lib/dal/src/consensus/tests.rs +++ b/core/lib/dal/src/consensus/tests.rs @@ -9,7 +9,7 @@ use zksync_protobuf::{ }; use zksync_test_contracts::Account; use zksync_types::{ - commitment::{L1BatchCommitmentMode, PubdataParams}, + commitment::{PubdataParams, PubdataType}, web3::Bytes, Execute, ExecuteTransactionCommon, L1BatchNumber, ProtocolVersionId, Transaction, }; @@ -58,8 +58,12 @@ fn payload(rng: &mut impl Rng, protocol_version: ProtocolVersionId) -> Payload { } else { PubdataParams { pubdata_type: match rng.gen_range(0..2) { - 0 => L1BatchCommitmentMode::Rollup, - _ => L1BatchCommitmentMode::Validium, + 0 => PubdataType::Rollup, + 1 => PubdataType::NoDA, + 2 => PubdataType::Avail, + 3 => PubdataType::Celestia, + 4 => PubdataType::Eigen, + _ => PubdataType::ObjectStore, }, l2_da_validator_address: rng.gen(), } diff --git a/core/lib/dal/src/consensus_dal/mod.rs b/core/lib/dal/src/consensus_dal/mod.rs index 7f3bcd1166ad..e6058e86b47d 100644 --- a/core/lib/dal/src/consensus_dal/mod.rs +++ b/core/lib/dal/src/consensus_dal/mod.rs @@ -234,7 +234,6 @@ impl ConsensusDal<'_, '_> { protocol_version: old.genesis.protocol_version, validators: old.genesis.validators.clone(), - attesters: old.genesis.attesters.clone(), leader_selection: old.genesis.leader_selection.clone(), } .with_hash(), diff --git a/core/lib/dal/src/consensus_dal/tests.rs b/core/lib/dal/src/consensus_dal/tests.rs index 694abc8508b6..4b4c5f7b3821 100644 --- a/core/lib/dal/src/consensus_dal/tests.rs +++ b/core/lib/dal/src/consensus_dal/tests.rs @@ -1,17 +1,9 @@ use rand::Rng as _; -use zksync_consensus_roles::{attester, validator}; +use zksync_consensus_roles::validator; use zksync_consensus_storage::ReplicaState; -use zksync_types::{ - block::L1BatchTreeData, - commitment::{L1BatchCommitmentArtifacts, L1BatchCommitmentHash}, - ProtocolVersion, -}; use super::*; -use crate::{ - tests::{create_l1_batch_header, create_l2_block_header}, - ConnectionPool, Core, CoreDal, -}; +use crate::{ConnectionPool, Core, CoreDal}; #[tokio::test] async fn replica_state_read_write() { @@ -52,138 +44,141 @@ async fn replica_state_read_write() { } } -#[tokio::test] -async fn test_batch_certificate() { - let rng = &mut rand::thread_rng(); - let setup = validator::testonly::Setup::new(rng, 3); - let pool = ConnectionPool::::test_pool().await; - let mut conn = pool.connection().await.unwrap(); - let cfg = GlobalConfig { - genesis: setup.genesis.clone(), - registry_address: Some(rng.gen()), - seed_peers: [].into(), - }; - conn.consensus_dal() - .try_update_global_config(&cfg) - .await - .unwrap(); +// NOTE: This test is disabled since we are going to remove L1 batches. Most likely +// we will remove all the attester related code as well, but keeping this until +// we are sure. +// #[tokio::test] +// async fn test_batch_certificate() { +// let rng = &mut rand::thread_rng(); +// let setup = validator::testonly::Setup::new(rng, 3); +// let pool = ConnectionPool::::test_pool().await; +// let mut conn = pool.connection().await.unwrap(); +// let cfg = GlobalConfig { +// genesis: setup.genesis.clone(), +// registry_address: Some(rng.gen()), +// seed_peers: [].into(), +// }; +// conn.consensus_dal() +// .try_update_global_config(&cfg) +// .await +// .unwrap(); - let make_cert = |number: attester::BatchNumber, hash: attester::BatchHash| { - let m = attester::Batch { - genesis: setup.genesis.hash(), - hash, - number, - }; - let mut sigs = attester::MultiSig::default(); - for k in &setup.attester_keys { - sigs.add(k.public(), k.sign_msg(m.clone()).sig); - } - attester::BatchQC { - message: m, - signatures: sigs, - } - }; +// let make_cert = |number: attester::BatchNumber, hash: attester::BatchHash| { +// let m = attester::Batch { +// genesis: setup.genesis.hash(), +// hash, +// number, +// }; +// let mut sigs = attester::MultiSig::default(); +// for k in &setup.attester_keys { +// sigs.add(k.public(), k.sign_msg(m.clone()).sig); +// } +// attester::BatchQC { +// message: m, +// signatures: sigs, +// } +// }; - // Required for inserting l2 blocks - conn.protocol_versions_dal() - .save_protocol_version_with_tx(&ProtocolVersion::default()) - .await - .unwrap(); +// // Required for inserting l2 blocks +// conn.protocol_versions_dal() +// .save_protocol_version_with_tx(&ProtocolVersion::default()) +// .await +// .unwrap(); - // Insert some mock L2 blocks and L1 batches - let mut block_number = 0; - let mut batch_number = 0; - for _ in 0..3 { - for _ in 0..3 { - block_number += 1; - let l2_block = create_l2_block_header(block_number); - conn.blocks_dal().insert_l2_block(&l2_block).await.unwrap(); - } - batch_number += 1; - let l1_batch = create_l1_batch_header(batch_number); - conn.blocks_dal() - .insert_mock_l1_batch(&l1_batch) - .await - .unwrap(); - conn.blocks_dal() - .save_l1_batch_tree_data( - l1_batch.number, - &L1BatchTreeData { - hash: rng.gen(), - rollup_last_leaf_index: rng.gen(), - }, - ) - .await - .unwrap(); - conn.blocks_dal() - .save_l1_batch_commitment_artifacts( - l1_batch.number, - &L1BatchCommitmentArtifacts { - commitment_hash: L1BatchCommitmentHash { - pass_through_data: rng.gen(), - aux_output: rng.gen(), - meta_parameters: rng.gen(), - commitment: rng.gen(), - }, - l2_l1_merkle_root: rng.gen(), - compressed_state_diffs: None, - compressed_initial_writes: None, - compressed_repeated_writes: None, - zkporter_is_available: false, - aux_commitments: None, - aggregation_root: rng.gen(), - local_root: rng.gen(), - state_diff_hash: rng.gen(), - }, - ) - .await - .unwrap(); - conn.blocks_dal() - .mark_l2_blocks_as_executed_in_l1_batch(l1_batch.number) - .await - .unwrap(); - } +// // Insert some mock L2 blocks and L1 batches +// let mut block_number = 0; +// let mut batch_number = 0; +// for _ in 0..3 { +// for _ in 0..3 { +// block_number += 1; +// let l2_block = create_l2_block_header(block_number); +// conn.blocks_dal().insert_l2_block(&l2_block).await.unwrap(); +// } +// batch_number += 1; +// let l1_batch = create_l1_batch_header(batch_number); +// conn.blocks_dal() +// .insert_mock_l1_batch(&l1_batch) +// .await +// .unwrap(); +// conn.blocks_dal() +// .save_l1_batch_tree_data( +// l1_batch.number, +// &L1BatchTreeData { +// hash: rng.gen(), +// rollup_last_leaf_index: rng.gen(), +// }, +// ) +// .await +// .unwrap(); +// conn.blocks_dal() +// .save_l1_batch_commitment_artifacts( +// l1_batch.number, +// &L1BatchCommitmentArtifacts { +// commitment_hash: L1BatchCommitmentHash { +// pass_through_data: rng.gen(), +// aux_output: rng.gen(), +// meta_parameters: rng.gen(), +// commitment: rng.gen(), +// }, +// l2_l1_merkle_root: rng.gen(), +// compressed_state_diffs: None, +// compressed_initial_writes: None, +// compressed_repeated_writes: None, +// zkporter_is_available: false, +// aux_commitments: None, +// aggregation_root: rng.gen(), +// local_root: rng.gen(), +// state_diff_hash: rng.gen(), +// }, +// ) +// .await +// .unwrap(); +// conn.blocks_dal() +// .mark_l2_blocks_as_executed_in_l1_batch(l1_batch.number) +// .await +// .unwrap(); +// } - let n = attester::BatchNumber(batch_number.into()); +// let n = attester::BatchNumber(batch_number.into()); - // Insert a batch certificate for the last L1 batch. - let hash = batch_hash(&conn.consensus_dal().batch_info(n).await.unwrap().unwrap()); - let want = make_cert(n, hash); - conn.consensus_dal() - .upsert_attester_committee(n, setup.genesis.attesters.as_ref().unwrap()) - .await - .unwrap(); - conn.consensus_dal() - .insert_batch_certificate(&want) - .await - .unwrap(); +// // Insert a batch certificate for the last L1 batch. +// let hash = batch_hash(&conn.consensus_dal().batch_info(n).await.unwrap().unwrap()); +// let want = make_cert(n, hash); +// conn.consensus_dal() +// .upsert_attester_committee(n, setup.genesis.attesters.as_ref().unwrap()) +// .await +// .unwrap(); +// conn.consensus_dal() +// .insert_batch_certificate(&want) +// .await +// .unwrap(); - // Reinserting a cert should fail. - assert!(conn - .consensus_dal() - .insert_batch_certificate(&make_cert(n, hash)) - .await - .is_err()); +// // Reinserting a cert should fail. +// assert!(conn +// .consensus_dal() +// .insert_batch_certificate(&make_cert(n, hash)) +// .await +// .is_err()); - // Retrieve the latest certificate. - let got_n = conn - .consensus_dal() - .last_batch_certificate_number() - .await - .unwrap() - .unwrap(); - let got = conn - .consensus_dal() - .batch_certificate(got_n) - .await - .unwrap() - .unwrap(); - assert_eq!(got, want); +// // Retrieve the latest certificate. +// let got_n = conn +// .consensus_dal() +// .last_batch_certificate_number() +// .await +// .unwrap() +// .unwrap(); +// let got = conn +// .consensus_dal() +// .batch_certificate(got_n) +// .await +// .unwrap() +// .unwrap(); +// assert_eq!(got, want); - // Try insert batch certificate for non-existing batch - assert!(conn - .consensus_dal() - .insert_batch_certificate(&make_cert(n.next(), rng.gen())) - .await - .is_err()); -} +// // Try insert batch certificate for non-existing batch +// assert!(conn +// .consensus_dal() +// .insert_batch_certificate(&make_cert(n.next(), rng.gen())) +// .await +// .is_err()); +// } diff --git a/core/lib/dal/src/eth_sender_dal.rs b/core/lib/dal/src/eth_sender_dal.rs index 10f77718ba38..eecd102f395e 100644 --- a/core/lib/dal/src/eth_sender_dal.rs +++ b/core/lib/dal/src/eth_sender_dal.rs @@ -41,9 +41,9 @@ impl EthSenderDal<'_, '_> { from_addr IS NOT DISTINCT FROM $1 -- can't just use equality as NULL != NULL AND confirmed_eth_tx_history_id IS NULL AND is_gateway = $2 - AND id <= ( - SELECT - COALESCE(MAX(eth_tx_id), 0) + AND id <= COALESCE( + (SELECT + eth_tx_id FROM eth_txs_history JOIN eth_txs ON eth_txs.id = eth_txs_history.eth_tx_id @@ -51,6 +51,8 @@ impl EthSenderDal<'_, '_> { eth_txs_history.sent_at_block IS NOT NULL AND eth_txs.from_addr IS NOT DISTINCT FROM $1 AND is_gateway = $2 + ORDER BY eth_tx_id DESC LIMIT 1), + 0 ) ORDER BY id @@ -84,6 +86,25 @@ impl EthSenderDal<'_, '_> { Ok(count.try_into().unwrap()) } + pub async fn get_unconfirmed_txs_count(&mut self) -> DalResult { + let count = sqlx::query!( + r#" + SELECT + COUNT(*) + FROM + eth_txs + WHERE + confirmed_eth_tx_history_id IS NULL + "# + ) + .instrument("get_unconfirmed_txs_count") + .fetch_one(self.storage) + .await? + .count + .unwrap(); + Ok(count.try_into().unwrap()) + } + pub async fn get_eth_l1_batches(&mut self) -> sqlx::Result { struct EthTxRow { number: i64, @@ -172,9 +193,9 @@ impl EthSenderDal<'_, '_> { WHERE from_addr IS NOT DISTINCT FROM $2 -- can't just use equality as NULL != NULL AND is_gateway = $3 - AND id > ( - SELECT - COALESCE(MAX(eth_tx_id), 0) + AND id > COALESCE( + (SELECT + eth_tx_id FROM eth_txs_history JOIN eth_txs ON eth_txs.id = eth_txs_history.eth_tx_id @@ -182,6 +203,8 @@ impl EthSenderDal<'_, '_> { eth_txs_history.sent_at_block IS NOT NULL AND eth_txs.from_addr IS NOT DISTINCT FROM $2 AND is_gateway = $3 + ORDER BY eth_tx_id DESC LIMIT 1), + 0 ) ORDER BY id diff --git a/core/lib/dal/src/factory_deps_dal.rs b/core/lib/dal/src/factory_deps_dal.rs index 424d708da241..64cbe78fbd1a 100644 --- a/core/lib/dal/src/factory_deps_dal.rs +++ b/core/lib/dal/src/factory_deps_dal.rs @@ -89,39 +89,36 @@ impl FactoryDepsDal<'_, '_> { .map(|row| row.bytecode)) } - pub async fn get_base_system_contracts( + pub async fn get_base_system_contracts_from_factory_deps( &mut self, bootloader_hash: H256, default_aa_hash: H256, evm_emulator_hash: Option, - ) -> anyhow::Result { + ) -> anyhow::Result> { let bootloader_bytecode = self .get_sealed_factory_dep(bootloader_hash) .await - .context("failed loading bootloader code")? - .with_context(|| format!("bootloader code with hash {bootloader_hash:?} should be present in the database"))?; - let bootloader_code = SystemContractCode { - code: bootloader_bytecode, - hash: bootloader_hash, - }; + .context("failed loading bootloader code")?; let default_aa_bytecode = self .get_sealed_factory_dep(default_aa_hash) .await - .context("failed loading default account code")? - .with_context(|| format!("default account code with hash {default_aa_hash:?} should be present in the database"))?; + .context("failed loading default account code")?; - let default_aa_code = SystemContractCode { - code: default_aa_bytecode, - hash: default_aa_hash, + let (Some(bootloader_bytecode), Some(default_aa_bytecode)) = + (bootloader_bytecode, default_aa_bytecode) + else { + return Ok(None); }; let evm_emulator_code = if let Some(evm_emulator_hash) = evm_emulator_hash { let evm_emulator_bytecode = self .get_sealed_factory_dep(evm_emulator_hash) .await - .context("failed loading EVM emulator code")? - .with_context(|| format!("EVM emulator code with hash {evm_emulator_hash:?} should be present in the database"))?; + .context("failed loading EVM emulator code")?; + let Some(evm_emulator_bytecode) = evm_emulator_bytecode else { + return Ok(None); + }; Some(SystemContractCode { code: evm_emulator_bytecode, @@ -131,11 +128,20 @@ impl FactoryDepsDal<'_, '_> { None }; - Ok(BaseSystemContracts { + let bootloader_code = SystemContractCode { + code: bootloader_bytecode, + hash: bootloader_hash, + }; + + let default_aa_code = SystemContractCode { + code: default_aa_bytecode, + hash: default_aa_hash, + }; + Ok(Some(BaseSystemContracts { bootloader: bootloader_code, default_aa: default_aa_code, evm_emulator: evm_emulator_code, - }) + })) } /// Returns bytecodes for factory deps with the specified `hashes`. diff --git a/core/lib/dal/src/models/storage_block.rs b/core/lib/dal/src/models/storage_block.rs index 54635932a1af..cb61c29190eb 100644 --- a/core/lib/dal/src/models/storage_block.rs +++ b/core/lib/dal/src/models/storage_block.rs @@ -6,9 +6,9 @@ use thiserror::Error; use zksync_contracts::BaseSystemContractsHashes; use zksync_types::{ api, - block::{L1BatchHeader, L2BlockHeader, UnsealedL1BatchHeader}, - commitment::{L1BatchCommitmentMode, L1BatchMetaParameters, L1BatchMetadata, PubdataParams}, - fee_model::{BatchFeeInput, L1PeggedBatchFeeModelInput, PubdataIndependentBatchFeeModelInput}, + block::{CommonL1BatchHeader, L1BatchHeader, L2BlockHeader, UnsealedL1BatchHeader}, + commitment::{L1BatchMetaParameters, L1BatchMetadata, PubdataParams, PubdataType}, + fee_model::BatchFeeInput, l2_to_l1_log::{L2ToL1Log, SystemL2ToL1Log, UserL2ToL1Log}, Address, Bloom, L1BatchNumber, L2BlockNumber, ProtocolVersionId, SLChainId, H256, }; @@ -54,6 +54,10 @@ pub(crate) struct StorageL1BatchHeader { pub system_logs: Vec>, pub pubdata_input: Option>, pub fee_address: Vec, + + pub l1_gas_price: i64, + pub l2_fair_gas_price: i64, + pub fair_pubdata_price: Option, } impl StorageL1BatchHeader { @@ -69,6 +73,14 @@ impl StorageL1BatchHeader { let system_logs = convert_l2_to_l1_logs(self.system_logs); + let batch_fee_input = BatchFeeInput::from_protocol_version( + self.protocol_version + .map(|v| (v as u16).try_into().unwrap()), + self.l1_gas_price as u64, + self.l2_fair_gas_price as u64, + self.fair_pubdata_price.map(|p| p as u64), + ); + L1BatchHeader { number: L1BatchNumber(self.number as u32), timestamp: self.timestamp as u64, @@ -92,6 +104,7 @@ impl StorageL1BatchHeader { .map(|v| (v as u16).try_into().unwrap()), pubdata_input: self.pubdata_input, fee_address: Address::from_slice(&self.fee_address), + batch_fee_input, } } } @@ -159,6 +172,10 @@ pub(crate) struct StorageL1Batch { pub local_root: Option>, pub state_diff_hash: Option>, pub inclusion_data: Option>, + + pub l1_gas_price: i64, + pub l2_fair_gas_price: i64, + pub fair_pubdata_price: Option, } impl StorageL1Batch { @@ -174,6 +191,14 @@ impl StorageL1Batch { let system_logs = convert_l2_to_l1_logs(self.system_logs); + let batch_fee_input = BatchFeeInput::from_protocol_version( + self.protocol_version + .map(|v| (v as u16).try_into().unwrap()), + self.l1_gas_price as u64, + self.l2_fair_gas_price as u64, + self.fair_pubdata_price.map(|p| p as u64), + ); + L1BatchHeader { number: L1BatchNumber(self.number as u32), timestamp: self.timestamp as u64, @@ -197,6 +222,7 @@ impl StorageL1Batch { .map(|v| (v as u16).try_into().unwrap()), pubdata_input: self.pubdata_input, fee_address: Address::from_slice(&self.fee_address), + batch_fee_input, } } } @@ -307,6 +333,39 @@ impl From for UnsealedL1BatchHeader { } } +/// Partial projection of the columns common to both [`L1BatchHeader`] and [`UnsealedL1BatchHeader`]. +pub(crate) struct CommonStorageL1BatchHeader { + pub number: i64, + pub is_sealed: bool, + pub timestamp: i64, + pub protocol_version: Option, + pub fee_address: Vec, + pub l1_gas_price: i64, + pub l2_fair_gas_price: i64, + pub fair_pubdata_price: Option, +} + +impl From for CommonL1BatchHeader { + fn from(batch: CommonStorageL1BatchHeader) -> Self { + let protocol_version: Option = batch + .protocol_version + .map(|v| (v as u16).try_into().unwrap()); + Self { + number: L1BatchNumber(batch.number as u32), + is_sealed: batch.is_sealed, + timestamp: batch.timestamp as u64, + protocol_version, + fee_address: Address::from_slice(&batch.fee_address), + fee_input: BatchFeeInput::for_protocol_version( + protocol_version.unwrap_or_else(ProtocolVersionId::last_potentially_undefined), + batch.l2_fair_gas_price as u64, + batch.fair_pubdata_price.map(|p| p as u64), + batch.l1_gas_price as u64, + ), + } + } +} + #[derive(Debug, Clone, sqlx::FromRow)] pub(crate) struct StorageBlockDetails { pub number: i64, @@ -512,25 +571,12 @@ pub(crate) struct StorageL2BlockHeader { impl From for L2BlockHeader { fn from(row: StorageL2BlockHeader) -> Self { let protocol_version = row.protocol_version.map(|v| (v as u16).try_into().unwrap()); - - let fee_input = protocol_version - .filter(|version: &ProtocolVersionId| version.is_post_1_4_1()) - .map(|_| { - BatchFeeInput::PubdataIndependent(PubdataIndependentBatchFeeModelInput { - fair_pubdata_price: row - .fair_pubdata_price - .expect("No fair pubdata price for 1.4.1 miniblock") - as u64, - fair_l2_gas_price: row.l2_fair_gas_price as u64, - l1_gas_price: row.l1_gas_price as u64, - }) - }) - .unwrap_or_else(|| { - BatchFeeInput::L1Pegged(L1PeggedBatchFeeModelInput { - fair_l2_gas_price: row.l2_fair_gas_price as u64, - l1_gas_price: row.l1_gas_price as u64, - }) - }); + let batch_fee_input = BatchFeeInput::from_protocol_version( + protocol_version, + row.l1_gas_price as u64, + row.l2_fair_gas_price as u64, + row.fair_pubdata_price.map(|p| p as u64), + ); L2BlockHeader { number: L2BlockNumber(row.number as u32), @@ -540,7 +586,7 @@ impl From for L2BlockHeader { l2_tx_count: row.l2_tx_count as u16, fee_account_address: Address::from_slice(&row.fee_account_address), base_fee_per_gas: row.base_fee_per_gas.to_u64().unwrap(), - batch_fee_input: fee_input, + batch_fee_input, base_system_contracts_hashes: convert_base_system_contracts_hashes( row.bootloader_code_hash, row.default_aa_code_hash, @@ -556,7 +602,7 @@ impl From for L2BlockHeader { .unwrap_or_default(), pubdata_params: PubdataParams { l2_da_validator_address: Address::from_slice(&row.l2_da_validator_address), - pubdata_type: L1BatchCommitmentMode::from_str(&row.pubdata_type).unwrap(), + pubdata_type: PubdataType::from_str(&row.pubdata_type).unwrap(), }, } } diff --git a/core/lib/dal/src/models/storage_sync.rs b/core/lib/dal/src/models/storage_sync.rs index 3f80f52c56eb..bac51c9e14d9 100644 --- a/core/lib/dal/src/models/storage_sync.rs +++ b/core/lib/dal/src/models/storage_sync.rs @@ -4,7 +4,7 @@ use zksync_contracts::BaseSystemContractsHashes; use zksync_db_connection::error::SqlxContext; use zksync_types::{ api::en, - commitment::{L1BatchCommitmentMode, PubdataParams}, + commitment::{PubdataParams, PubdataType}, parse_h160, parse_h256, parse_h256_opt, Address, L1BatchNumber, L2BlockNumber, ProtocolVersionId, Transaction, H256, }; @@ -97,7 +97,7 @@ impl TryFrom for SyncBlock { hash: parse_h256(&block.hash).decode_column("hash")?, protocol_version: parse_protocol_version(block.protocol_version)?, pubdata_params: PubdataParams { - pubdata_type: L1BatchCommitmentMode::from_str(&block.pubdata_type) + pubdata_type: PubdataType::from_str(&block.pubdata_type) .decode_column("Invalid pubdata type")?, l2_da_validator_address: parse_h160(&block.l2_da_validator_address) .decode_column("l2_da_validator_address")?, diff --git a/core/lib/dal/src/models/storage_transaction.rs b/core/lib/dal/src/models/storage_transaction.rs index cceebc85cf2b..27442e41d7be 100644 --- a/core/lib/dal/src/models/storage_transaction.rs +++ b/core/lib/dal/src/models/storage_transaction.rs @@ -591,6 +591,7 @@ pub(crate) struct CallTrace { pub call_trace: Vec, pub tx_hash: Vec, pub tx_index_in_block: Option, + pub tx_error: Option, } impl CallTrace { diff --git a/core/lib/dal/src/protocol_versions_dal.rs b/core/lib/dal/src/protocol_versions_dal.rs index 11f6a93efdc4..18dd7516f4de 100644 --- a/core/lib/dal/src/protocol_versions_dal.rs +++ b/core/lib/dal/src/protocol_versions_dal.rs @@ -1,7 +1,7 @@ use std::convert::TryInto; use anyhow::Context as _; -use zksync_contracts::{BaseSystemContracts, BaseSystemContractsHashes}; +use zksync_contracts::BaseSystemContractsHashes; use zksync_db_connection::{ connection::Connection, error::DalResult, @@ -200,12 +200,10 @@ impl ProtocolVersionsDal<'_, '_> { ProtocolVersionId::try_from(row.id as u16).map_err(|err| sqlx::Error::Decode(err.into())) } - /// Returns base system contracts' hashes. Prefer `load_base_system_contracts_by_version_id` if - /// you also want to load the contracts themselves AND expect the contracts to be in the DB - /// already. + /// Returns base system contracts' hashes. pub async fn get_base_system_contract_hashes_by_version_id( &mut self, - version_id: u16, + version_id: ProtocolVersionId, ) -> anyhow::Result> { let row = sqlx::query!( r#" @@ -218,10 +216,10 @@ impl ProtocolVersionsDal<'_, '_> { WHERE id = $1 "#, - i32::from(version_id) + i32::from(version_id as u16) ) .instrument("get_base_system_contract_hashes_by_version_id") - .with_arg("version_id", &version_id) + .with_arg("version_id", &(version_id as u16)) .fetch_optional(self.storage) .await .context("cannot fetch system contract hashes")?; @@ -237,45 +235,6 @@ impl ProtocolVersionsDal<'_, '_> { }) } - pub async fn load_base_system_contracts_by_version_id( - &mut self, - version_id: u16, - ) -> anyhow::Result> { - let row = sqlx::query!( - r#" - SELECT - bootloader_code_hash, - default_account_code_hash, - evm_emulator_code_hash - FROM - protocol_versions - WHERE - id = $1 - "#, - i32::from(version_id) - ) - .instrument("load_base_system_contracts_by_version_id") - .with_arg("version_id", &version_id) - .fetch_optional(self.storage) - .await - .context("cannot fetch system contract hashes")?; - - Ok(if let Some(row) = row { - let contracts = self - .storage - .factory_deps_dal() - .get_base_system_contracts( - H256::from_slice(&row.bootloader_code_hash), - H256::from_slice(&row.default_account_code_hash), - row.evm_emulator_code_hash.as_deref().map(H256::from_slice), - ) - .await?; - Some(contracts) - } else { - None - }) - } - pub async fn get_protocol_version_with_latest_patch( &mut self, version_id: ProtocolVersionId, diff --git a/core/lib/dal/src/transactions_dal.rs b/core/lib/dal/src/transactions_dal.rs index 20ecf8736d7c..6b35c507b919 100644 --- a/core/lib/dal/src/transactions_dal.rs +++ b/core/lib/dal/src/transactions_dal.rs @@ -1776,7 +1776,7 @@ impl TransactionsDal<'_, '_> { limit: usize, ) -> DalResult> { let stashed_addresses: Vec<_> = stashed_accounts.iter().map(Address::as_bytes).collect(); - sqlx::query!( + let result = sqlx::query!( r#" UPDATE transactions SET @@ -1794,8 +1794,15 @@ impl TransactionsDal<'_, '_> { .execute(self.storage) .await?; + tracing::debug!( + "Updated {} transactions for stashed accounts, stashed accounts amount: {}, stashed_accounts: {:?}", + result.rows_affected(), + stashed_addresses.len(), + stashed_accounts.iter().map(|a|format!("{:x}", a)).collect::>() + ); + let purged_addresses: Vec<_> = purged_accounts.iter().map(Address::as_bytes).collect(); - sqlx::query!( + let result = sqlx::query!( r#" DELETE FROM transactions WHERE @@ -1809,6 +1816,12 @@ impl TransactionsDal<'_, '_> { .execute(self.storage) .await?; + tracing::debug!( + "Updated {} transactions for purged accounts, purged accounts amount: {}", + result.rows_affected(), + purged_addresses.len() + ); + // Note, that transactions are updated in order of their hashes to avoid deadlocks with other UPDATE queries. let transactions = sqlx::query_as!( StorageTransaction, @@ -2220,9 +2233,11 @@ impl TransactionsDal<'_, '_> { Ok(sqlx::query!( r#" SELECT - call_trace + call_trace, + transactions.error AS tx_error FROM call_traces + INNER JOIN transactions ON tx_hash = transactions.hash WHERE tx_hash = $1 "#, @@ -2232,7 +2247,7 @@ impl TransactionsDal<'_, '_> { .with_arg("tx_hash", &tx_hash) .fetch_optional(self.storage) .await? - .map(|call_trace| { + .map(|mut call_trace| { ( parse_call_trace(&call_trace.call_trace, protocol_version), CallTraceMeta { @@ -2240,6 +2255,7 @@ impl TransactionsDal<'_, '_> { tx_hash, block_number: row.miniblock_number as u32, block_hash: H256::from_slice(&row.miniblocks_hash), + internal_error: call_trace.tx_error.take(), }, ) })) diff --git a/core/lib/env_config/src/contracts.rs b/core/lib/env_config/src/contracts.rs index 457a946d9831..4cd0b021ff20 100644 --- a/core/lib/env_config/src/contracts.rs +++ b/core/lib/env_config/src/contracts.rs @@ -10,8 +10,14 @@ impl FromEnv for EcosystemContracts { .parse()?, transparent_proxy_admin_addr: std::env::var("CONTRACTS_TRANSPARENT_PROXY_ADMIN_ADDR")? .parse()?, - // Not supported yet - l1_bytecodes_supplier_addr: None, + l1_bytecodes_supplier_addr: std::env::var("CONTRACTS_L1_BYTECODE_SUPPLIER_ADDR")? + .parse() + .ok(), + l1_wrapped_base_token_store: std::env::var( + "CONTRACTS_L1_WRAPPED_BASE_TOKEN_STORE_ADDR", + )? + .parse() + .ok(), }) } } @@ -44,6 +50,9 @@ impl FromEnv for ContractsConfig { #[cfg(test)] mod tests { + use std::str::FromStr; + + use zksync_basic_types::H256; use zksync_config::configs::EcosystemContracts; use zksync_system_constants::SHARED_BRIDGE_ETHER_TOKEN_ADDRESS; @@ -72,11 +81,20 @@ mod tests { bridgehub_proxy_addr: addr("0x35ea7f92f4c5f433efe15284e99c040110cf6297"), state_transition_proxy_addr: addr("0xd90f1c081c6117241624e97cb6147257c3cb2097"), transparent_proxy_admin_addr: addr("0xdd6fa5c14e7550b4caf2aa2818d24c69cbc347e5"), - l1_bytecodes_supplier_addr: None, + l1_bytecodes_supplier_addr: Some(addr( + "0x36ea7f92f4c5f433efe15284e99c040110cf6297", + )), + l1_wrapped_base_token_store: Some(addr( + "0x36ea7f92f4c5f433efe15284e99c040110cf6298", + )), }), base_token_addr: Some(SHARED_BRIDGE_ETHER_TOKEN_ADDRESS), - l1_base_token_asset_id: None, - l2_predeployed_wrapped_base_token_address: None, + l1_base_token_asset_id: Some( + H256::from_str( + "0x0000000000000000000000000000000000000001000000000000000000000000", + ) + .unwrap(), + ), chain_admin_addr: Some(addr("0xdd6fa5c14e7550b4caf2aa2818d24c69cbc347ff")), l2_da_validator_addr: Some(addr("0xed6fa5c14e7550b4caf2aa2818d24c69cbc347ff")), l2_timestamp_asserter_addr: Some(addr("0x0000000000000000000000000000000000000002")), @@ -101,11 +119,16 @@ CONTRACTS_L2_CONSENSUS_REGISTRY_ADDR="D64e136566a9E04eb05B30184fF577F52682D182" CONTRACTS_L1_MULTICALL3_ADDR="0xcA11bde05977b3631167028862bE2a173976CA11" CONTRACTS_L1_SHARED_BRIDGE_PROXY_ADDR="0x8656770FA78c830456B00B4fFCeE6b1De0e1b888" CONTRACTS_L2_SHARED_BRIDGE_ADDR="0x8656770FA78c830456B00B4fFCeE6b1De0e1b888" +CONTRACTS_L1_BYTECODE_SUPPLIER_ADDR="0x36ea7f92f4c5f433efe15284e99c040110cf6297" CONTRACTS_L2_LEGACY_SHARED_BRIDGE_ADDR="0x8656770FA78c830456B00B4fFCeE6b1De0e1b888" CONTRACTS_BRIDGEHUB_PROXY_ADDR="0x35ea7f92f4c5f433efe15284e99c040110cf6297" CONTRACTS_STATE_TRANSITION_PROXY_ADDR="0xd90f1c081c6117241624e97cb6147257c3cb2097" CONTRACTS_TRANSPARENT_PROXY_ADMIN_ADDR="0xdd6fa5c14e7550b4caf2aa2818d24c69cbc347e5" CONTRACTS_BASE_TOKEN_ADDR="0x0000000000000000000000000000000000000001" +CONTRACTS_L1_BASE_TOKEN_ASSET_ID="0x0000000000000000000000000000000000000001000000000000000000000000" +CONTRACTS_L1_WRAPPED_BASE_TOKEN_STORE_ADDR="0x36ea7f92f4c5f433efe15284e99c040110cf6298" +CONTRACTS_L2_NATIVE_TOKEN_VAULT_PROXY_ADDR="0xfc073319977e314f251eae6ae6be76b0b3baeecf" +CONTRACTS_PREDEPLOYED_L2_WRAPPED_BASE_TOKEN_ADDRESS="0x35ea7f92f4c5f433efe15284e99c040110cf6299" CONTRACTS_CHAIN_ADMIN_ADDR="0xdd6fa5c14e7550b4caf2aa2818d24c69cbc347ff" CONTRACTS_L2_DA_VALIDATOR_ADDR="0xed6fa5c14e7550b4caf2aa2818d24c69cbc347ff" CONTRACTS_L2_TIMESTAMP_ASSERTER_ADDR="0x0000000000000000000000000000000000000002" diff --git a/core/lib/env_config/src/da_client.rs b/core/lib/env_config/src/da_client.rs index 4469dd1afe4f..0a0a55c0eb92 100644 --- a/core/lib/env_config/src/da_client.rs +++ b/core/lib/env_config/src/da_client.rs @@ -149,6 +149,7 @@ mod tests { config: AvailClientConfig::FullClient(AvailDefaultConfig { api_node_url: api_node_url.to_string(), app_id, + finality_state: None, }), }) } diff --git a/core/lib/env_config/src/eth_sender.rs b/core/lib/env_config/src/eth_sender.rs index b15a153c30c3..3030d4206812 100644 --- a/core/lib/env_config/src/eth_sender.rs +++ b/core/lib/env_config/src/eth_sender.rs @@ -61,7 +61,6 @@ mod tests { aggregated_block_execute_deadline: 4_000, max_aggregated_tx_gas: 4_000_000, max_eth_tx_data_size: 120_000, - timestamp_criteria_max_allowed_lag: 30, max_aggregated_blocks_to_commit: 3, max_aggregated_blocks_to_execute: 4, diff --git a/core/lib/env_config/src/genesis.rs b/core/lib/env_config/src/genesis.rs index 486d354e6cb5..60c5a48993d7 100644 --- a/core/lib/env_config/src/genesis.rs +++ b/core/lib/env_config/src/genesis.rs @@ -84,7 +84,6 @@ impl FromEnv for GenesisConfig { bootloader_hash: state_keeper.bootloader_hash, default_aa_hash: state_keeper.default_aa_hash, evm_emulator_hash: state_keeper.evm_emulator_hash, - // TODO(EVM-676): for now, the settlement layer is always the same as the L1 network l1_chain_id: L1ChainId(network_config.network.chain_id().0), l2_chain_id: network_config.zksync_network_id, snark_wrapper_vk_hash: contracts_config.snark_wrapper_vk_hash, diff --git a/core/lib/env_config/src/wallets.rs b/core/lib/env_config/src/wallets.rs index 3518d56f7b45..e9574be4456f 100644 --- a/core/lib/env_config/src/wallets.rs +++ b/core/lib/env_config/src/wallets.rs @@ -33,6 +33,7 @@ impl FromEnv for Wallets { } else { None }; + Some(EthSender { operator, blob_operator, diff --git a/core/lib/l1_contract_interface/src/i_executor/methods/commit_batches.rs b/core/lib/l1_contract_interface/src/i_executor/methods/commit_batches.rs index 01e362fb7d65..3487ad81a840 100644 --- a/core/lib/l1_contract_interface/src/i_executor/methods/commit_batches.rs +++ b/core/lib/l1_contract_interface/src/i_executor/methods/commit_batches.rs @@ -18,7 +18,7 @@ pub struct CommitBatches<'a> { pub mode: L1BatchCommitmentMode, } -impl Tokenize for CommitBatches<'_> { +impl Tokenize for &CommitBatches<'_> { fn into_tokens(self) -> Vec { let protocol_version = self.l1_batches[0].header.protocol_version.unwrap(); let stored_batch_info = StoredBatchInfo::from(self.last_committed_l1_batch).into_token(); @@ -27,6 +27,7 @@ impl Tokenize for CommitBatches<'_> { .iter() .map(|batch| CommitBatchInfo::new(self.mode, batch, self.pubdata_da).into_token()) .collect(); + if protocol_version.is_pre_gateway() { vec![stored_batch_info, Token::Array(l1_batches_to_commit)] } else { diff --git a/core/lib/l1_contract_interface/src/i_executor/methods/execute_batches.rs b/core/lib/l1_contract_interface/src/i_executor/methods/execute_batches.rs index 649a7ca2b419..5d5494ca62a8 100644 --- a/core/lib/l1_contract_interface/src/i_executor/methods/execute_batches.rs +++ b/core/lib/l1_contract_interface/src/i_executor/methods/execute_batches.rs @@ -1,11 +1,12 @@ use zksync_types::{ commitment::{L1BatchWithMetadata, PriorityOpsMerkleProof}, ethabi::{encode, Token}, + ProtocolVersionId, }; use crate::{ i_executor::structures::{StoredBatchInfo, SUPPORTED_ENCODING_VERSION}, - Tokenizable, Tokenize, + Tokenizable, }; /// Input required to encode `executeBatches` call. @@ -15,11 +16,15 @@ pub struct ExecuteBatches { pub priority_ops_proofs: Vec, } -impl Tokenize for &ExecuteBatches { - fn into_tokens(self) -> Vec { - let protocol_version = self.l1_batches[0].header.protocol_version.unwrap(); +impl ExecuteBatches { + // The encodings of `ExecuteBatches` operations are different depending on the protocol version + // of the underlying chain. + // However, we can send batches with older protocol versions just by changing the encoding. + // This makes the migration simpler. + pub fn encode_for_eth_tx(&self, chain_protocol_version: ProtocolVersionId) -> Vec { + let internal_protocol_version = self.l1_batches[0].header.protocol_version.unwrap(); - if protocol_version.is_pre_gateway() { + if internal_protocol_version.is_pre_gateway() && chain_protocol_version.is_pre_gateway() { vec![Token::Array( self.l1_batches .iter() diff --git a/core/lib/l1_contract_interface/src/i_executor/methods/prove_batches.rs b/core/lib/l1_contract_interface/src/i_executor/methods/prove_batches.rs index 2d02bd5a1764..817448cc1b62 100644 --- a/core/lib/l1_contract_interface/src/i_executor/methods/prove_batches.rs +++ b/core/lib/l1_contract_interface/src/i_executor/methods/prove_batches.rs @@ -58,8 +58,10 @@ impl ProveBatches { } }; + let should_use_fflonk = !is_verifier_pre_fflonk || !protocol_version.is_pre_fflonk(); + if protocol_version.is_pre_gateway() { - let proof_input = if !is_verifier_pre_fflonk || !protocol_version.is_pre_fflonk() { + let proof_input = if should_use_fflonk { Token::Tuple(vec![ Token::Array(vec![verifier_type.into_token()]), Token::Array(proof.into_iter().map(Token::Uint).collect()), @@ -73,7 +75,17 @@ impl ProveBatches { vec![prev_l1_batch_info, batches_arg, proof_input] } else { - let proof_input = Token::Array(proof.into_iter().map(Token::Uint).collect()); + let proof_input = if should_use_fflonk { + Token::Array( + vec![verifier_type] + .into_iter() + .chain(proof) + .map(Token::Uint) + .collect(), + ) + } else { + Token::Array(proof.into_iter().map(Token::Uint).collect()) + }; let encoded_data = encode(&[prev_l1_batch_info, batches_arg, proof_input]); let prove_data = [[SUPPORTED_ENCODING_VERSION].to_vec(), encoded_data] diff --git a/core/lib/l1_contract_interface/src/i_executor/structures/mod.rs b/core/lib/l1_contract_interface/src/i_executor/structures/mod.rs index 9583e0204f75..5035abf6af60 100644 --- a/core/lib/l1_contract_interface/src/i_executor/structures/mod.rs +++ b/core/lib/l1_contract_interface/src/i_executor/structures/mod.rs @@ -3,6 +3,8 @@ mod commit_batch_info; mod stored_batch_info; +pub const SUPPORTED_ENCODING_VERSION: u8 = 0; + #[cfg(test)] mod tests; @@ -13,5 +15,3 @@ pub use self::{ }, stored_batch_info::StoredBatchInfo, }; - -pub const SUPPORTED_ENCODING_VERSION: u8 = 0; diff --git a/core/lib/mempool/src/mempool_store.rs b/core/lib/mempool/src/mempool_store.rs index 70176b456dd1..c6758574df7a 100644 --- a/core/lib/mempool/src/mempool_store.rs +++ b/core/lib/mempool/src/mempool_store.rs @@ -172,6 +172,8 @@ impl MempoolStore { .rfind(|el| el.matches_filter(filter))? .clone(); + let initial_length = self.stashed_accounts.len(); + // Stash all observed transactions that don't meet criteria for stashed_pointer in self .l2_priority_queue @@ -187,6 +189,13 @@ impl MempoolStore { self.stashed_accounts.push(stashed_pointer.account); } + + tracing::debug!( + "Stashed {} accounts by filter: {:?}", + self.stashed_accounts.len() - initial_length, + filter + ); + // insert pointer to the next transaction if it exists let (transaction, constraint, score) = self .l2_transactions_per_account diff --git a/core/lib/multivm/Cargo.toml b/core/lib/multivm/Cargo.toml index 107a168e305a..325a6f3a99e3 100644 --- a/core/lib/multivm/Cargo.toml +++ b/core/lib/multivm/Cargo.toml @@ -40,5 +40,5 @@ assert_matches.workspace = true pretty_assertions.workspace = true rand.workspace = true test-casing.workspace = true -zksync_test_contracts.workspace = true zksync_eth_signer.workspace = true +zksync_test_contracts.workspace = true diff --git a/core/lib/multivm/src/pubdata_builders/rollup.rs b/core/lib/multivm/src/pubdata_builders/full_builder.rs similarity index 65% rename from core/lib/multivm/src/pubdata_builders/rollup.rs rename to core/lib/multivm/src/pubdata_builders/full_builder.rs index 4a818dfe2314..40a260314a0a 100644 --- a/core/lib/multivm/src/pubdata_builders/rollup.rs +++ b/core/lib/multivm/src/pubdata_builders/full_builder.rs @@ -2,28 +2,27 @@ use zksync_types::{ ethabi, ethabi::{ParamType, Token}, l2_to_l1_log::l2_to_l1_logs_tree_size, - writes::compress_state_diffs, Address, ProtocolVersionId, }; use super::utils::{ build_chained_bytecode_hash, build_chained_log_hash, build_chained_message_hash, - build_logs_root, encode_user_logs, + build_logs_root, extend_from_pubdata_input, }; use crate::interface::pubdata::{PubdataBuilder, PubdataInput}; #[derive(Debug, Clone, Copy)] -pub struct RollupPubdataBuilder { +pub struct FullPubdataBuilder { pub l2_da_validator: Address, } -impl RollupPubdataBuilder { +impl FullPubdataBuilder { pub fn new(l2_da_validator: Address) -> Self { Self { l2_da_validator } } } -impl PubdataBuilder for RollupPubdataBuilder { +impl PubdataBuilder for FullPubdataBuilder { fn l2_da_validator(&self) -> Address { self.l2_da_validator } @@ -95,34 +94,3 @@ impl PubdataBuilder for RollupPubdataBuilder { pubdata } } - -fn extend_from_pubdata_input(buffer: &mut Vec, pubdata_input: &PubdataInput) { - let PubdataInput { - user_logs, - l2_to_l1_messages, - published_bytecodes, - state_diffs, - } = pubdata_input; - - // Adding user L2->L1 logs. - buffer.extend(encode_user_logs(user_logs)); - - // Encoding L2->L1 messages - // Format: `[(numberOfMessages as u32) || (messages[1].len() as u32) || messages[1] || ... || (messages[n].len() as u32) || messages[n]]` - buffer.extend((l2_to_l1_messages.len() as u32).to_be_bytes()); - for message in l2_to_l1_messages { - buffer.extend((message.len() as u32).to_be_bytes()); - buffer.extend(message); - } - // Encoding bytecodes - // Format: `[(numberOfBytecodes as u32) || (bytecodes[1].len() as u32) || bytecodes[1] || ... || (bytecodes[n].len() as u32) || bytecodes[n]]` - buffer.extend((published_bytecodes.len() as u32).to_be_bytes()); - for bytecode in published_bytecodes { - buffer.extend((bytecode.len() as u32).to_be_bytes()); - buffer.extend(bytecode); - } - // Encoding state diffs - // Format: `[size of compressed state diffs u32 || compressed state diffs || (# state diffs: intial + repeated) as u32 || sorted state diffs by ]` - let state_diffs_compressed = compress_state_diffs(state_diffs.clone()); - buffer.extend(state_diffs_compressed); -} diff --git a/core/lib/multivm/src/pubdata_builders/validium.rs b/core/lib/multivm/src/pubdata_builders/hashed_builder.rs similarity index 90% rename from core/lib/multivm/src/pubdata_builders/validium.rs rename to core/lib/multivm/src/pubdata_builders/hashed_builder.rs index a9156e970aad..d779e0172d5a 100644 --- a/core/lib/multivm/src/pubdata_builders/validium.rs +++ b/core/lib/multivm/src/pubdata_builders/hashed_builder.rs @@ -13,17 +13,17 @@ use super::utils::{ use crate::interface::pubdata::{PubdataBuilder, PubdataInput}; #[derive(Debug, Clone, Copy)] -pub struct ValidiumPubdataBuilder { +pub struct HashedPubdataBuilder { pub l2_da_validator: Address, } -impl ValidiumPubdataBuilder { +impl HashedPubdataBuilder { pub fn new(l2_da_validator: Address) -> Self { Self { l2_da_validator } } } -impl PubdataBuilder for ValidiumPubdataBuilder { +impl PubdataBuilder for HashedPubdataBuilder { fn l2_da_validator(&self) -> Address { self.l2_da_validator } @@ -35,7 +35,7 @@ impl PubdataBuilder for ValidiumPubdataBuilder { ) -> Vec { assert!( !protocol_version.is_pre_gateway(), - "ValidiumPubdataBuilder must not be called for pre gateway" + "HashedPubdataBuilder must not be called for pre gateway" ); let mut pubdata = vec![]; @@ -79,7 +79,7 @@ impl PubdataBuilder for ValidiumPubdataBuilder { ) -> Vec { assert!( !protocol_version.is_pre_gateway(), - "ValidiumPubdataBuilder must not be called for pre gateway" + "HashedPubdataBuilder must not be called for pre gateway" ); let state_diffs_packed = input diff --git a/core/lib/multivm/src/pubdata_builders/mod.rs b/core/lib/multivm/src/pubdata_builders/mod.rs index c52c4c70c86a..875a093370f4 100644 --- a/core/lib/multivm/src/pubdata_builders/mod.rs +++ b/core/lib/multivm/src/pubdata_builders/mod.rs @@ -1,24 +1,26 @@ use std::rc::Rc; -pub use rollup::RollupPubdataBuilder; -pub use validium::ValidiumPubdataBuilder; -use zksync_types::commitment::{L1BatchCommitmentMode, PubdataParams}; +pub use full_builder::FullPubdataBuilder; +pub use hashed_builder::HashedPubdataBuilder; +use zksync_types::commitment::{PubdataParams, PubdataType}; use crate::interface::pubdata::PubdataBuilder; -mod rollup; +mod full_builder; +mod hashed_builder; #[cfg(test)] mod tests; mod utils; -mod validium; pub fn pubdata_params_to_builder(params: PubdataParams) -> Rc { match params.pubdata_type { - L1BatchCommitmentMode::Rollup => { - Rc::new(RollupPubdataBuilder::new(params.l2_da_validator_address)) - } - L1BatchCommitmentMode::Validium => { - Rc::new(ValidiumPubdataBuilder::new(params.l2_da_validator_address)) + PubdataType::NoDA => Rc::new(HashedPubdataBuilder::new(params.l2_da_validator_address)), + PubdataType::Rollup + | PubdataType::Avail + | PubdataType::Celestia + | PubdataType::Eigen + | PubdataType::ObjectStore => { + Rc::new(FullPubdataBuilder::new(params.l2_da_validator_address)) } } } diff --git a/core/lib/multivm/src/pubdata_builders/tests.rs b/core/lib/multivm/src/pubdata_builders/tests.rs index b06cb9405aa7..a3894110c8a1 100644 --- a/core/lib/multivm/src/pubdata_builders/tests.rs +++ b/core/lib/multivm/src/pubdata_builders/tests.rs @@ -3,7 +3,7 @@ use zksync_types::{ ACCOUNT_CODE_STORAGE_ADDRESS, BOOTLOADER_ADDRESS, }; -use super::{rollup::RollupPubdataBuilder, validium::ValidiumPubdataBuilder}; +use super::{full_builder::FullPubdataBuilder, hashed_builder::HashedPubdataBuilder}; use crate::interface::pubdata::{L1MessengerL2ToL1Log, PubdataBuilder, PubdataInput}; fn mock_input() -> PubdataInput { @@ -54,13 +54,13 @@ fn mock_input() -> PubdataInput { } #[test] -fn test_rollup_pubdata_building() { +fn test_full_pubdata_building() { let input = mock_input(); - let rollup_pubdata_builder = RollupPubdataBuilder::new(Address::zero()); + let full_pubdata_builder = FullPubdataBuilder::new(Address::zero()); let actual = - rollup_pubdata_builder.l1_messenger_operator_input(&input, ProtocolVersionId::Version24); + full_pubdata_builder.l1_messenger_operator_input(&input, ProtocolVersionId::Version24); let expected = "00000001000000000000000000000000000000000000000000008001000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000800000000100000004deadbeef0000000100000060bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb0100002a040001000000000000000000000000000000000000000000000000000000000000007e090e0000000c0901000000020000000000000000000000000000000000008002000000000000000000000000000000000000000000000000000000000000009b000000000000000000000000000000000000000000000000000000000000007d000000000000000c000000000000000000000000000000000000000000000000000000000000000b000000000000000000000000000000000000000000000000000000000000000c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008002000000000000000000000000000000000000000000000000000000000000009c000000000000000000000000000000000000000000000000000000000000007e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"; assert_eq!( &hex::encode(actual), @@ -69,7 +69,7 @@ fn test_rollup_pubdata_building() { ); let actual = - rollup_pubdata_builder.settlement_layer_pubdata(&input, ProtocolVersionId::Version24); + full_pubdata_builder.settlement_layer_pubdata(&input, ProtocolVersionId::Version24); let expected = "00000001000000000000000000000000000000000000000000008001000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000800000000100000004deadbeef0000000100000060bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb0100002a040001000000000000000000000000000000000000000000000000000000000000007e090e0000000c0901"; assert_eq!( &hex::encode(actual), @@ -78,7 +78,7 @@ fn test_rollup_pubdata_building() { ); let actual = - rollup_pubdata_builder.l1_messenger_operator_input(&input, ProtocolVersionId::Version27); + full_pubdata_builder.l1_messenger_operator_input(&input, ProtocolVersionId::Version27); let expected = "89f9a07233e608561d90f7c4e7bcea24d718e425a6bd6c8eefb48a334366143694c75fae278944d856d68e33bbd32937cb3a1ea35cbf7d6eeeb1150f500dd0d64d0efe420d6dafe5897eab2fc27b2e47af303397ed285ace146d836d042717b0a3dc4b28a603a33b28ce1d5c52c593a46a15a99f1afa1c1d92715284288958fd54a93de700000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000000000000000032300000001000000000000000000000000000000000000000000008001000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000800000000100000004deadbeef0000000100000060bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb0100002a040001000000000000000000000000000000000000000000000000000000000000007e090e0000000c0901000000020000000000000000000000000000000000008002000000000000000000000000000000000000000000000000000000000000009b000000000000000000000000000000000000000000000000000000000000007d000000000000000c000000000000000000000000000000000000000000000000000000000000000b000000000000000000000000000000000000000000000000000000000000000c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008002000000000000000000000000000000000000000000000000000000000000009c000000000000000000000000000000000000000000000000000000000000007e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"; assert_eq!( &hex::encode(actual), @@ -87,7 +87,7 @@ fn test_rollup_pubdata_building() { ); let actual = - rollup_pubdata_builder.settlement_layer_pubdata(&input, ProtocolVersionId::Version27); + full_pubdata_builder.settlement_layer_pubdata(&input, ProtocolVersionId::Version27); let expected = "00000001000000000000000000000000000000000000000000008001000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000800000000100000004deadbeef0000000100000060bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb0100002a040001000000000000000000000000000000000000000000000000000000000000007e090e0000000c0901"; assert_eq!( &hex::encode(actual), @@ -97,13 +97,13 @@ fn test_rollup_pubdata_building() { } #[test] -fn test_validium_pubdata_building() { +fn test_hashed_pubdata_building() { let input = mock_input(); - let validium_pubdata_builder = ValidiumPubdataBuilder::new(Address::zero()); + let hashed_pubdata_builder = HashedPubdataBuilder::new(Address::zero()); let actual = - validium_pubdata_builder.l1_messenger_operator_input(&input, ProtocolVersionId::Version27); + hashed_pubdata_builder.l1_messenger_operator_input(&input, ProtocolVersionId::Version27); let expected = "89f9a07233e608561d90f7c4e7bcea24d718e425a6bd6c8eefb48a334366143694c75fae278944d856d68e33bbd32937cb3a1ea35cbf7d6eeeb1150f500dd0d64d0efe420d6dafe5897eab2fc27b2e47af303397ed285ace146d836d042717b0a3dc4b28a603a33b28ce1d5c52c593a46a15a99f1afa1c1d92715284288958fd54a93de700000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000000000000000005c000000010000000000000000000000000000000000000000000080010000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000008000000000"; assert_eq!( &hex::encode(actual), @@ -112,7 +112,7 @@ fn test_validium_pubdata_building() { ); let actual = - validium_pubdata_builder.settlement_layer_pubdata(&input, ProtocolVersionId::Version27); + hashed_pubdata_builder.settlement_layer_pubdata(&input, ProtocolVersionId::Version27); let expected = "fa96e2436e6fb4d668f5a06681a7c53fcb199b2747ee624ee52a13e85aac5f1e"; assert_eq!( &hex::encode(actual), diff --git a/core/lib/multivm/src/pubdata_builders/utils.rs b/core/lib/multivm/src/pubdata_builders/utils.rs index 83c9b9317640..3b30832b79dd 100644 --- a/core/lib/multivm/src/pubdata_builders/utils.rs +++ b/core/lib/multivm/src/pubdata_builders/utils.rs @@ -1,7 +1,7 @@ use zksync_mini_merkle_tree::MiniMerkleTree; -use zksync_types::{bytecode::BytecodeHash, web3::keccak256}; +use zksync_types::{bytecode::BytecodeHash, web3::keccak256, writes::compress_state_diffs}; -use crate::interface::pubdata::L1MessengerL2ToL1Log; +use crate::interface::pubdata::{L1MessengerL2ToL1Log, PubdataInput}; pub(crate) fn build_chained_log_hash(user_logs: &[L1MessengerL2ToL1Log]) -> Vec { let mut chained_log_hash = vec![0u8; 32]; @@ -68,3 +68,34 @@ pub(crate) fn encode_user_logs(user_logs: &[L1MessengerL2ToL1Log]) -> Vec { } result } + +pub(crate) fn extend_from_pubdata_input(buffer: &mut Vec, pubdata_input: &PubdataInput) { + let PubdataInput { + user_logs, + l2_to_l1_messages, + published_bytecodes, + state_diffs, + } = pubdata_input; + + // Adding user L2->L1 logs. + buffer.extend(encode_user_logs(user_logs)); + + // Encoding L2->L1 messages + // Format: `[(numberOfMessages as u32) || (messages[1].len() as u32) || messages[1] || ... || (messages[n].len() as u32) || messages[n]]` + buffer.extend((l2_to_l1_messages.len() as u32).to_be_bytes()); + for message in l2_to_l1_messages { + buffer.extend((message.len() as u32).to_be_bytes()); + buffer.extend(message); + } + // Encoding bytecodes + // Format: `[(numberOfBytecodes as u32) || (bytecodes[1].len() as u32) || bytecodes[1] || ... || (bytecodes[n].len() as u32) || bytecodes[n]]` + buffer.extend((published_bytecodes.len() as u32).to_be_bytes()); + for bytecode in published_bytecodes { + buffer.extend((bytecode.len() as u32).to_be_bytes()); + buffer.extend(bytecode); + } + // Encoding state diffs + // Format: `[size of compressed state diffs u32 || compressed state diffs || (# state diffs: intial + repeated) as u32 || sorted state diffs by ]` + let state_diffs_compressed = compress_state_diffs(state_diffs.clone()); + buffer.extend(state_diffs_compressed); +} diff --git a/core/lib/multivm/src/tracers/call_tracer/mod.rs b/core/lib/multivm/src/tracers/call_tracer/mod.rs index 44f274876032..20beb10642d8 100644 --- a/core/lib/multivm/src/tracers/call_tracer/mod.rs +++ b/core/lib/multivm/src/tracers/call_tracer/mod.rs @@ -17,6 +17,8 @@ pub mod vm_virtual_blocks; #[derive(Debug, Clone)] pub struct CallTracer { stack: Vec, + finished_calls: Vec, + result: Arc>>, max_stack_depth: usize, @@ -41,6 +43,7 @@ impl CallTracer { pub fn new(result: Arc>>) -> Self { Self { stack: vec![], + finished_calls: vec![], result, max_stack_depth: 0, max_near_calls: 0, diff --git a/core/lib/multivm/src/tracers/call_tracer/vm_latest/mod.rs b/core/lib/multivm/src/tracers/call_tracer/vm_latest/mod.rs index ed18a3eca47d..afa02d24f200 100644 --- a/core/lib/multivm/src/tracers/call_tracer/vm_latest/mod.rs +++ b/core/lib/multivm/src/tracers/call_tracer/vm_latest/mod.rs @@ -67,7 +67,9 @@ impl VmTracer for CallTracer { _bootloader_state: &BootloaderState, _stop_reason: VmExecutionStopReason, ) { - self.store_result() + let result = std::mem::take(&mut self.finished_calls); + let cell = self.result.as_ref(); + cell.set(result).unwrap(); } } @@ -191,7 +193,6 @@ impl CallTracer { .farcall .parent_gas .saturating_sub(state.vm_local_state.callstack.current.ergs_remaining as u64); - self.save_output_latest(state, memory, ret_opcode, &mut current_call.farcall); // If there is a parent call, push the current call to it @@ -199,7 +200,7 @@ impl CallTracer { if let Some(parent_call) = self.stack.last_mut() { parent_call.farcall.calls.push(current_call.farcall); } else { - self.push_call_and_update_stats(current_call.farcall, current_call.near_calls_after); + self.finished_calls.push(current_call.farcall); } } } diff --git a/core/lib/multivm/src/tracers/validator/vm_latest/mod.rs b/core/lib/multivm/src/tracers/validator/vm_latest/mod.rs index 5588dd144e95..5ad0ca025937 100644 --- a/core/lib/multivm/src/tracers/validator/vm_latest/mod.rs +++ b/core/lib/multivm/src/tracers/validator/vm_latest/mod.rs @@ -22,8 +22,8 @@ use crate::{ }, }, vm_latest::{ - tracers::utils::{computational_gas_price, get_calldata_page_via_abi, VmHook}, - BootloaderState, SimpleMemory, VmTracer, ZkSyncVmState, + tracers::utils::{computational_gas_price, get_calldata_page_via_abi}, + BootloaderState, SimpleMemory, VmHook, VmTracer, ZkSyncVmState, }, HistoryMode, }; @@ -205,25 +205,25 @@ impl DynTracer> let hook = VmHook::from_opcode_memory(&state, &data, self.vm_version.try_into().unwrap()); let current_mode = self.validation_mode; match (current_mode, hook) { - (ValidationTracerMode::NoValidation, VmHook::AccountValidationEntered) => { + (ValidationTracerMode::NoValidation, Some(VmHook::AccountValidationEntered)) => { // Account validation can be entered when there is no prior validation (i.e. "nested" validations are not allowed) self.validation_mode = ValidationTracerMode::UserTxValidation; } - (ValidationTracerMode::NoValidation, VmHook::PaymasterValidationEntered) => { + (ValidationTracerMode::NoValidation, Some(VmHook::PaymasterValidationEntered)) => { // Paymaster validation can be entered when there is no prior validation (i.e. "nested" validations are not allowed) self.validation_mode = ValidationTracerMode::PaymasterTxValidation; } - (_, VmHook::AccountValidationEntered | VmHook::PaymasterValidationEntered) => { + (_, Some(VmHook::AccountValidationEntered | VmHook::PaymasterValidationEntered)) => { panic!( "Unallowed transition inside the validation tracer. Mode: {:#?}, hook: {:#?}", self.validation_mode, hook ); } - (_, VmHook::NoValidationEntered) => { + (_, Some(VmHook::ValidationExited)) => { // Validation can be always turned off self.validation_mode = ValidationTracerMode::NoValidation; } - (_, VmHook::ValidationStepEndeded) => { + (_, Some(VmHook::ValidationStepEnded)) => { // The validation step has ended. self.should_stop_execution = true; } diff --git a/core/lib/multivm/src/utils/mod.rs b/core/lib/multivm/src/utils/mod.rs index 7bb907efc9b8..825874747682 100644 --- a/core/lib/multivm/src/utils/mod.rs +++ b/core/lib/multivm/src/utils/mod.rs @@ -113,13 +113,13 @@ pub fn adjust_pubdata_price_for_tx( ) -> BatchFeeInput { // If no max base fee was provided, we just use the maximal one for convenience. let max_base_fee = max_base_fee.unwrap_or(U256::MAX); - let desired_gas_per_pubdata = + let bounded_tx_gas_per_pubdata_limit = tx_gas_per_pubdata_limit.min(get_max_gas_per_pubdata_byte(vm_version).into()); let (current_base_fee, current_gas_per_pubdata) = derive_base_fee_and_gas_per_pubdata(batch_fee_input, vm_version); - if U256::from(current_gas_per_pubdata) <= desired_gas_per_pubdata + if U256::from(current_gas_per_pubdata) <= bounded_tx_gas_per_pubdata_limit && U256::from(current_base_fee) <= max_base_fee { // gas per pubdata is already smaller than or equal to `tx_gas_per_pubdata_limit`. @@ -138,8 +138,9 @@ pub fn adjust_pubdata_price_for_tx( // `gasPerPubdata = ceil(17 * l1gasprice / fair_l2_gas_price)` // `gasPerPubdata <= 17 * l1gasprice / fair_l2_gas_price + 1` // `fair_l2_gas_price(gasPerPubdata - 1) / 17 <= l1gasprice` - let new_l1_gas_price = - fair_l2_gas_price * (desired_gas_per_pubdata - U256::from(1u32)) / U256::from(17); + let new_l1_gas_price = fair_l2_gas_price + * bounded_tx_gas_per_pubdata_limit.saturating_sub(U256::from(1u32)) + / U256::from(17); BatchFeeInput::L1Pegged(L1PeggedBatchFeeModelInput { l1_gas_price: new_l1_gas_price.as_u64(), @@ -154,11 +155,14 @@ pub fn adjust_pubdata_price_for_tx( current_l2_fair_gas_price }; + // We want to adjust gas per pubdata to be min(bounded_tx_gas_per_pubdata_limit, current_gas_per_pubdata). + let desired_gas_per_pubdata = + bounded_tx_gas_per_pubdata_limit.min(U256::from(current_gas_per_pubdata)); // `gasPerPubdata = ceil(fair_pubdata_price / fair_l2_gas_price)` // `gasPerPubdata <= fair_pubdata_price / fair_l2_gas_price + 1` // `fair_l2_gas_price(gasPerPubdata - 1) <= fair_pubdata_price` let new_fair_pubdata_price = - fair_l2_gas_price * (desired_gas_per_pubdata - U256::from(1u32)); + fair_l2_gas_price * desired_gas_per_pubdata.saturating_sub(U256::from(1u32)); BatchFeeInput::PubdataIndependent(PubdataIndependentBatchFeeModelInput { fair_pubdata_price: new_fair_pubdata_price.as_u64(), diff --git a/core/lib/multivm/src/versions/shadow/mod.rs b/core/lib/multivm/src/versions/shadow/mod.rs index 1ad5bdba5a7b..ee1eb02b0dd5 100644 --- a/core/lib/multivm/src/versions/shadow/mod.rs +++ b/core/lib/multivm/src/versions/shadow/mod.rs @@ -25,7 +25,7 @@ use crate::{ mod tests; type ReferenceVm = vm_latest::Vm, HistoryEnabled>; -type ShadowedFastVm = crate::vm_instance::ShadowedFastVm; +type ShadowedFastVm = crate::vm_instance::ShadowedFastVm; fn hash_block(block_env: L2BlockEnv, tx_hashes: &[H256]) -> H256 { let mut hasher = L2BlockHasher::new( diff --git a/core/lib/multivm/src/versions/shadow/tests.rs b/core/lib/multivm/src/versions/shadow/tests.rs index 354459853f11..dc7417ad1259 100644 --- a/core/lib/multivm/src/versions/shadow/tests.rs +++ b/core/lib/multivm/src/versions/shadow/tests.rs @@ -1,20 +1,30 @@ //! Unit tests from the `testonly` test suite. -use std::{collections::HashSet, rc::Rc}; +use std::{collections::HashSet, fmt, rc::Rc}; use zksync_types::{writes::StateDiffRecord, StorageKey, Transaction, H256, U256}; +use zksync_vm2::interface::Tracer; +use zksync_vm_interface::{ + utils::{CheckDivergence, DivergenceErrors}, + Call, +}; use super::ShadowedFastVm; use crate::{ interface::{ pubdata::{PubdataBuilder, PubdataInput}, + storage::InMemoryStorage, utils::{ShadowMut, ShadowRef}, CurrentExecutionState, L2BlockEnv, VmExecutionResultAndLogs, }, - versions::testonly::TestedVm, + versions::testonly::{TestedVm, TestedVmWithCallTracer}, + vm_fast, }; -impl TestedVm for ShadowedFastVm { +impl TestedVm for ShadowedFastVm +where + Tr: Tracer + Default + fmt::Debug + 'static, +{ type StateDump = (); fn dump_state(&self) -> Self::StateDump { @@ -135,6 +145,44 @@ impl TestedVm for ShadowedFastVm { } } +#[derive(Debug)] +struct ExecutionResultAndTraces { + result: VmExecutionResultAndLogs, + traces: Vec, +} + +impl From<(VmExecutionResultAndLogs, Vec)> for ExecutionResultAndTraces { + fn from((result, traces): (VmExecutionResultAndLogs, Vec)) -> Self { + Self { result, traces } + } +} + +impl From for (VmExecutionResultAndLogs, Vec) { + fn from(value: ExecutionResultAndTraces) -> Self { + (value.result, value.traces) + } +} + +impl CheckDivergence for ExecutionResultAndTraces { + fn check_divergence(&self, other: &Self) -> DivergenceErrors { + let mut errors = self.result.check_divergence(&other.result); + errors.extend(self.traces.check_divergence(&other.traces)); + errors + } +} + +impl TestedVmWithCallTracer for ShadowedFastVm { + fn inspect_with_call_tracer(&mut self) -> (VmExecutionResultAndLogs, Vec) { + self.get_custom_mut("inspect_with_call_tracer", |r| { + ExecutionResultAndTraces::from(match r { + ShadowMut::Main(vm) => vm.inspect_with_call_tracer(), + ShadowMut::Shadow(vm) => vm.inspect_with_call_tracer(), + }) + }) + .into() + } +} + mod block_tip { use crate::versions::testonly::block_tip::*; @@ -167,6 +215,40 @@ mod bytecode_publishing { } } +mod call_tracer { + use crate::versions::testonly::call_tracer::*; + + #[test] + fn basic_behavior() { + test_basic_behavior::>(); + } + + #[test] + fn transfer() { + test_transfer::>(); + } + + #[test] + fn reverted_tx() { + test_reverted_tx::>(); + } + + #[test] + fn reverted_deployment() { + test_reverted_deployment_tx::>(); + } + + #[test] + fn out_of_gas() { + test_out_of_gas::>(); + } + + #[test] + fn recursive_tx() { + test_recursive_tx::>(); + } +} + mod circuits { use crate::versions::testonly::circuits::*; @@ -308,7 +390,6 @@ mod l1_messenger { use crate::versions::testonly::l1_messenger::*; #[test] - #[ignore] // Requires post-gateway system contracts fn rollup_da_output_hash_match() { test_rollup_da_output_hash_match::(); } diff --git a/core/lib/multivm/src/versions/testonly/call_tracer.rs b/core/lib/multivm/src/versions/testonly/call_tracer.rs new file mode 100644 index 000000000000..0e86cebec46e --- /dev/null +++ b/core/lib/multivm/src/versions/testonly/call_tracer.rs @@ -0,0 +1,292 @@ +//! Call tracer tests. These tests are special in the sense that it's too unreliable to keep fixtures +//! (since they can be invalidated by unrelated changes in system contracts, e.g. by changing consumed gas costs). + +use assert_matches::assert_matches; +use ethabi::Token; +use zksync_system_constants::MSG_VALUE_SIMULATOR_ADDRESS; +use zksync_test_contracts::{Account, LoadnextContractExecutionParams, TestContract, TxType}; +use zksync_types::{ + fee::Fee, utils::deployed_address_create, zk_evm_types::FarCallOpcode, Address, Execute, +}; + +use super::{ContractToDeploy, TestedVmWithCallTracer, VmTester, VmTesterBuilder}; +use crate::{ + interface::{Call, CallType, ExecutionResult, TxExecutionMode}, + vm_latest::constants::BATCH_COMPUTATIONAL_GAS_LIMIT, +}; + +fn check_call(call: &Call) { + assert!(call.gas_used < call.gas); + assert!(call.gas_used > call.calls.iter().map(|call| call.gas_used).sum::()); + + for subcall in &call.calls { + if subcall.r#type != CallType::Call(FarCallOpcode::Mimic) { + pretty_assertions::assert_eq!(call.to, subcall.from); + } + check_call(subcall); + } +} + +fn extract_single_call(calls: &[Call], filter: impl Fn(&Call) -> bool) -> &Call { + fn walk<'a>( + matching_call: &mut Option<&'a Call>, + calls: &'a [Call], + filter: &impl Fn(&Call) -> bool, + ) { + for call in calls { + if filter(call) { + if let Some(prev_call) = matching_call { + panic!("Multiple call match filter: {prev_call:?}, {call:?}"); + } + *matching_call = Some(call); + } + walk(matching_call, &call.calls, filter); + } + } + + let mut matching_call = None; + walk(&mut matching_call, calls, &filter); + matching_call.expect("no calls match the filter") +} + +pub(crate) fn test_basic_behavior() { + let bytecode = TestContract::counter().bytecode.to_vec(); + let address = Address::repeat_byte(0xA5); + let mut vm: VmTester = VmTesterBuilder::new() + .with_empty_in_memory_storage() + .with_rich_accounts(1) + .with_bootloader_gas_limit(BATCH_COMPUTATIONAL_GAS_LIMIT) + .with_execution_mode(TxExecutionMode::VerifyExecute) + .with_custom_contracts(vec![ContractToDeploy::new(bytecode, address)]) + .build(); + + let calldata = "7cf5dab00000000000000000000000000000000000000000000000000000000000000006"; + let calldata = hex::decode(calldata).unwrap(); + + let account = &mut vm.rich_accounts[0]; + let tx = account.get_l2_tx_for_execute( + Execute { + contract_address: Some(address), + calldata: calldata.clone(), + value: 0.into(), + factory_deps: vec![], + }, + None, + ); + + vm.vm.push_transaction(tx); + let (res, call_traces) = vm.vm.inspect_with_call_tracer(); + assert!(!res.result.is_failed(), "{:#?}", res.result); + + for call in &call_traces { + check_call(call); + assert_eq!(call.error, None); + assert_eq!(call.revert_reason, None); + } + + let call_to_contract = extract_single_call(&call_traces, |call| call.to == address); + assert_eq!(call_to_contract.from, account.address); + assert_eq!(call_to_contract.input, calldata); +} + +pub(crate) fn test_transfer() { + let mut vm: VmTester = VmTesterBuilder::new() + .with_empty_in_memory_storage() + .with_rich_accounts(1) + .with_bootloader_gas_limit(BATCH_COMPUTATIONAL_GAS_LIMIT) + .with_execution_mode(TxExecutionMode::VerifyExecute) + .build(); + + let recipient = Address::repeat_byte(0x23); + let value = 1_000_000_000.into(); + let account = &mut vm.rich_accounts[0]; + let transfer = account.get_l2_tx_for_execute( + Execute { + contract_address: Some(recipient), + calldata: vec![], + value, + factory_deps: vec![], + }, + None, + ); + + vm.vm.push_transaction(transfer); + let (res, call_traces) = vm.vm.inspect_with_call_tracer(); + assert!(!res.result.is_failed(), "{:#?}", res.result); + + for call in &call_traces { + check_call(call); + assert_eq!(call.error, None); + assert_eq!(call.revert_reason, None); + } + + let transfer_call = extract_single_call(&call_traces, |call| call.to == recipient); + assert_eq!(transfer_call.from, account.address); + assert_eq!(transfer_call.value, value); +} + +pub(crate) fn test_reverted_tx() { + let counter_address = Address::repeat_byte(0x23); + let mut vm: VmTester = VmTesterBuilder::new() + .with_empty_in_memory_storage() + .with_rich_accounts(1) + .with_bootloader_gas_limit(BATCH_COMPUTATIONAL_GAS_LIMIT) + .with_execution_mode(TxExecutionMode::VerifyExecute) + .with_custom_contracts(vec![ContractToDeploy::new( + TestContract::counter().bytecode.to_vec(), + counter_address, + )]) + .build(); + + let account = &mut vm.rich_accounts[0]; + let calldata = TestContract::counter() + .function("incrementWithRevert") + .encode_input(&[Token::Uint(1.into()), Token::Bool(true)]) + .unwrap(); + let reverted_tx = account.get_l2_tx_for_execute( + Execute { + contract_address: Some(counter_address), + calldata, + value: 0.into(), + factory_deps: vec![], + }, + None, + ); + + vm.vm.push_transaction(reverted_tx); + let (res, call_traces) = vm.vm.inspect_with_call_tracer(); + assert_matches!(&res.result, ExecutionResult::Revert { .. }); + + let call_to_contract = extract_single_call(&call_traces, |call| call.to == counter_address); + assert_eq!( + call_to_contract.revert_reason.as_ref().unwrap(), + "This method always reverts" + ); +} + +pub(crate) fn test_out_of_gas() { + let contract_address = Address::repeat_byte(0x23); + let mut vm: VmTester = VmTesterBuilder::new() + .with_empty_in_memory_storage() + .with_rich_accounts(1) + .with_execution_mode(TxExecutionMode::VerifyExecute) + .with_custom_contracts(vec![ContractToDeploy::new( + TestContract::expensive().bytecode.to_vec(), + contract_address, + )]) + .build(); + + let account = &mut vm.rich_accounts[0]; + let execute = Execute { + contract_address: Some(contract_address), + calldata: TestContract::expensive() + .function("expensive") + .encode_input(&[Token::Uint(1_000.into())]) + .unwrap(), + value: 0.into(), + factory_deps: vec![], + }; + let out_of_gas_tx = account.get_l2_tx_for_execute( + execute, + Some(Fee { + gas_limit: 500_000.into(), // insufficient gas + ..Account::default_fee() + }), + ); + + vm.vm.push_transaction(out_of_gas_tx); + let (res, call_traces) = vm.vm.inspect_with_call_tracer(); + assert_matches!(&res.result, ExecutionResult::Revert { .. }); + + let out_of_gas_call = extract_single_call(&call_traces, |call| { + call.from == account.address && call.to == contract_address + }); + assert_eq!(out_of_gas_call.error.as_ref().unwrap(), "Panic"); + assert_eq!(out_of_gas_call.gas_used, out_of_gas_call.gas); + + let parent_call = + extract_single_call(&call_traces, |call| call.calls.contains(out_of_gas_call)); + assert_eq!( + parent_call.revert_reason.as_ref().unwrap(), + "Unknown revert reason" + ); +} + +pub(crate) fn test_reverted_deployment_tx() { + let mut vm: VmTester = VmTesterBuilder::new() + .with_empty_in_memory_storage() + .with_rich_accounts(1) + .with_execution_mode(TxExecutionMode::VerifyExecute) + .build(); + + let account = &mut vm.rich_accounts[0]; + let deploy_tx = account.get_deploy_tx(TestContract::failed_call().bytecode, None, TxType::L2); + + vm.vm.push_transaction(deploy_tx.tx); + let (res, call_traces) = vm.vm.inspect_with_call_tracer(); + assert_matches!(&res.result, ExecutionResult::Success { .. }); + + let constructor_call = extract_single_call(&call_traces, |call| { + call.r#type == CallType::Create && call.from == account.address + }); + assert_eq!(constructor_call.input, [] as [u8; 0]); + assert_eq!(constructor_call.error, None); + assert_eq!(constructor_call.revert_reason, None); + let deploy_address = deployed_address_create(account.address, 0.into()); + assert_eq!(constructor_call.to, deploy_address); + + assert_eq!(constructor_call.calls.len(), 1, "{constructor_call:#?}"); + let inner_call = &constructor_call.calls[0]; + assert_eq!(inner_call.from, deploy_address); + assert_eq!(inner_call.to, MSG_VALUE_SIMULATOR_ADDRESS); + inner_call.revert_reason.as_ref().unwrap(); +} + +pub(crate) fn test_recursive_tx() { + let contract_address = Address::repeat_byte(0x42); + let mut vm: VmTester = VmTesterBuilder::new() + .with_empty_in_memory_storage() + .with_rich_accounts(1) + .with_bootloader_gas_limit(BATCH_COMPUTATIONAL_GAS_LIMIT) + .with_execution_mode(TxExecutionMode::VerifyExecute) + .with_custom_contracts(vec![ContractToDeploy::new( + TestContract::load_test().bytecode.to_vec(), + contract_address, + )]) + .build(); + + let account = &mut vm.rich_accounts[0]; + let calldata = LoadnextContractExecutionParams { + recursive_calls: 20, + ..LoadnextContractExecutionParams::empty() + } + .to_bytes(); + let recursive_tx = account.get_l2_tx_for_execute( + Execute { + contract_address: Some(contract_address), + calldata: calldata.clone(), + value: 0.into(), + factory_deps: vec![], + }, + None, + ); + + vm.vm.push_transaction(recursive_tx); + let (res, call_traces) = vm.vm.inspect_with_call_tracer(); + assert!(!res.result.is_failed(), "{:#?}", res.result); + + let mut call_to_contract = extract_single_call(&call_traces, |call| { + call.to == contract_address && call.input == calldata + }); + let mut depth = 0; + while let Some(child_call) = call_to_contract.calls.first() { + assert_eq!(call_to_contract.calls.len(), 1, "{call_to_contract:#?}"); + assert_eq!(child_call.from, contract_address); + assert_eq!(child_call.to, contract_address); + assert_ne!(child_call.input, call_to_contract.input); + + depth += 1; + call_to_contract = child_call; + } + assert_eq!(depth, 20); +} diff --git a/core/lib/multivm/src/versions/testonly/circuits.rs b/core/lib/multivm/src/versions/testonly/circuits.rs index de987a8912db..c379372bc970 100644 --- a/core/lib/multivm/src/versions/testonly/circuits.rs +++ b/core/lib/multivm/src/versions/testonly/circuits.rs @@ -34,8 +34,19 @@ pub(crate) fn test_circuits() { let s = res.statistics.circuit_statistic; // Check `circuit_statistic`. const EXPECTED: [f32; 13] = [ - 1.34935, 0.15026, 1.66666, 0.00315, 1.0594, 0.00058, 0.00348, 0.00076, 0.11945, 0.14285, - 0.0, 0.0, 0.0, + 1.258627, + 0.13982475, + 1.6666666, + 0.003154238, + 0.9247803, + 0.00058723404, + 0.0034893616, + 0.00076709175, + 0.11945392, + 0.14285715, + 0.0, + 0.0, + 0.0, ]; let actual = [ (s.main_vm, "main_vm"), diff --git a/core/lib/multivm/src/versions/testonly/l1_messenger.rs b/core/lib/multivm/src/versions/testonly/l1_messenger.rs index 5d602b1e7d66..c8b7b6bd8ed0 100644 --- a/core/lib/multivm/src/versions/testonly/l1_messenger.rs +++ b/core/lib/multivm/src/versions/testonly/l1_messenger.rs @@ -1,32 +1,26 @@ use std::rc::Rc; use ethabi::Token; -use zksync_contracts::l1_messenger_contract; +use zksync_contracts::{l1_messenger_contract, l2_rollup_da_validator_bytecode}; use zksync_test_contracts::{TestContract, TxType}; use zksync_types::{ address_to_h256, u256_to_h256, web3::keccak256, Address, Execute, ProtocolVersionId, L1_MESSENGER_ADDRESS, U256, }; -use zksync_vm_interface::SystemEnv; -use super::{default_system_env, ContractToDeploy, TestedVm, VmTesterBuilder}; +use super::{ContractToDeploy, TestedVm, VmTesterBuilder}; use crate::{ interface::{ pubdata::{PubdataBuilder, PubdataInput}, InspectExecutionMode, TxExecutionMode, VmInterfaceExt, }, - pubdata_builders::RollupPubdataBuilder, + pubdata_builders::FullPubdataBuilder, vm_latest::constants::ZK_SYNC_BYTES_PER_BLOB, }; const L2_DA_VALIDATOR_OUTPUT_HASH_KEY: usize = 5; const USED_L2_DA_VALIDATOR_ADDRESS_KEY: usize = 6; -// Bytecode is temporary hardcoded, should be removed after contracts are merged. -fn l2_rollup_da_validator_bytecode() -> Vec { - hex::decode("0012000000000002000a000000000002000000000301001900000060043002700000012703400197000100000031035500020000003103550003000000310355000400000031035500050000003103550006000000310355000700000031035500080000003103550009000000310355000a000000310355000b000000310355000c000000310355000d000000310355000e000000310355000f00000031035500100000003103550011000000010355000001270040019d0000008004000039000000400040043f00000001002001900000005d0000c13d000000040030008c000000fe0000413d000000000201043b00000129022001970000012a0020009c000000fe0000c13d000000a40030008c000000fe0000413d0000000002000416000000000002004b000000fe0000c13d0000008402100370000000000202043b000300000002001d0000012b0020009c000000fe0000213d00000003020000290000002302200039000000000032004b000000fe0000813d00000003020000290000000402200039000000000421034f000000000604043b0000012b0060009c000000fe0000213d0000000304000029000700240040003d0000000704600029000000000034004b000000fe0000213d0000004403100370000000000303043b000400000003001d0000006403100370000000000303043b000200000003001d000000040060008c000000fe0000413d0000002002200039000000000221034f000000000202043b000000e00220027000000058022000c90000000804200039000000000064004b000000fe0000213d00000003022000290000002802200039000000000121034f000000000101043b000500e00010027a000600000006001d000000650000c13d00000000090000190000000403000029000000000039004b000000f10000c13d0000014e0040009c000000fb0000a13d0000014001000041000000000010043f0000001101000039000000040010043f00000138010000410000049a000104300000000001000416000000000001004b000000fe0000c13d0000002001000039000001000010044300000120000004430000012801000041000004990001042e000000000800001900000000090000190000014f0040009c000000570000813d0000000403400039000000000063004b000000fe0000213d00000007024000290000001101000367000000000221034f000000000502043b000000e004500270000000000034001a000000570000413d0000000007340019000000000067004b000000fe0000213d00000000020004140000012c0050009c0000007b0000813d0000000003000031000000840000013d000000070330002900000127053001970001000000510355000000000034001a000000570000413d0000000003340019000000000330007b000000570000413d000000000151034f000a00000009001d000800000008001d000900000007001d000001270330019700010000003103e50000012d0020009c000003c20000813d00000000013103df000000c0022002100000012e022001970000012c022001c700010000002103b500000000012103af0000801002000039049804930000040f0000000003010019000000600330027000000127033001970000000100200190000002450000613d0000001f0230003900000130052001970000003f025000390000013104200197000000400200043d0000000004420019000000000024004b000000000600003900000001060040390000012b0040009c0000023f0000213d00000001006001900000023f0000c13d000000400040043f0000000004320436000000000005004b000000b10000613d0000000005540019000000000600003100000011066003670000000007040019000000006806043c0000000007870436000000000057004b000000ad0000c13d0000012f063001980000000005640019000000ba0000613d000000000701034f0000000008040019000000007907043c0000000008980436000000000058004b000000b60000c13d0000001f03300190000000c70000613d000000000161034f0000000303300210000000000605043300000000063601cf000000000636022f000000000101043b0000010003300089000000000131022f00000000013101cf000000000161019f00000000001504350000000001020433000000200010008c0000000a05000029000004210000c13d0000000002040433000000400100043d000000400310003900000000002304350000002002100039000000000052043500000040030000390000000000310435000001320010009c0000023f0000213d0000006003100039000000400030043f000001270020009c000001270200804100000040022002100000000001010433000001270010009c00000127010080410000006001100210000000000121019f0000000002000414000001270020009c0000012702008041000000c002200210000000000112019f00000133011001c700008010020000390498048e0000040f0000000100200190000000060600002900000009040000290000000808000029000000fe0000613d000000000901043b0000000108800039000000050080006c000000670000413d000000520000013d000000400100043d000000440210003900000000009204350000002402100039000000000032043500000134020000410000000000210435000000040210003900000000000204350000042d0000013d0000000403400039000000000063004b000001000000a13d00000000010000190000049a0001043000000007014000290000001101100367000000000101043b000400e00010027a0000025d0000c13d000000000900001900000000050300190000000003090019000000020090006c000002f20000c13d000000060050006c000002fd0000813d00000007015000290000001102000367000000000112034f000000000101043b000000f801100270000000010010008c000003030000c13d00000000060500190000014e0060009c0000000604000029000000570000213d0000000403600039000000000043004b000000fe0000213d00000003016000290000002501100039000000000112034f000000000101043b000000000043004b000002fd0000813d000000e8011002700000000703300029000000000432034f0000000503500039000000000404043b000000000031001a0000000607000029000000570000413d000a00000031001d0000000a0070006b000000fe0000213d000000050600008a0000000a0060006b000000570000213d0000000a050000290000000405500039000000000075004b000000fe0000213d0000000a08000029000300070080002d0000000306200360000000000606043b000400000006001d000000e006600272000500000006001d00090110006000cd0000013f0000613d000000090800002900000005068000fa000001100060008c000000570000c13d000000090050002a000000570000413d000200090050002d000000020070006c000000fe0000413d000000f804400270000000400a00043d0000004406a00039000000800700003900000000007604350000002406a000390000000000460435000001410400004100000000004a043500000007055000290000008404a00039000000090900002900000000009404350000000404a0003900000005060000290000000000640435000000000752034f0000001f0890018f00080000000a001d000000a405a0003900000142099001980000000006950019000001610000613d000000000a07034f000000000b05001900000000ac0a043c000000000bcb043600000000006b004b0000015d0000c13d0000000703300029000000000008004b0000016f0000613d000000000797034f0000000308800210000000000906043300000000098901cf000000000989022f000000000707043b0000010008800089000000000787022f00000000078701cf000000000797019f00000000007604350000000907000029000000000675001900000000000604350000001f06700039000001430660019700000000066500190000000004460049000000080500002900000064055000390000000000450435000000000432034f0000001f0510018f000000000216043600000144061001980000000003620019000001850000613d000000000704034f0000000008020019000000007907043c0000000008980436000000000038004b000001810000c13d000000000005004b000001920000613d000000000464034f0000000305500210000000000603043300000000065601cf000000000656022f000000000404043b0000010005500089000000000454022f00000000045401cf000000000464019f0000000000430435000000000312001900000000000304350000001f011000390000014501100197000000080300002900000000013100490000000001210019000001270010009c00000127010080410000006001100210000001270030009c000001270200004100000000020340190000004002200210000000000121019f0000000002000414000001270020009c0000012702008041000000c002200210000000000112019f0000800e02000039049804890000040f000000000301001900000060033002700000012703300197000000200030008c000000200400003900000000040340190000001f0640018f00000020074001900000000805700029000001b80000613d000000000801034f0000000809000029000000008a08043c0000000009a90436000000000059004b000001b40000c13d000000000006004b000001c50000613d000000000771034f0000000306600210000000000805043300000000086801cf000000000868022f000000000707043b0000010006600089000000000767022f00000000066701cf000000000686019f00000000006504350000000100200190000003480000613d0000001f01400039000000600110018f0000000802100029000000000012004b00000000010000390000000101004039000100000002001d0000012b0020009c0000023f0000213d00000001001001900000023f0000c13d0000000101000029000000400010043f000000200030008c0000000604000029000000fe0000413d00000008010000290000000001010433000800000001001d00000004010000290000012c0010009c000001e10000413d000000090200002900000005012000fa000001100010008c000000570000c13d0000000103000029000000440130003900000024023000390000000403300039000000020440006c000003660000c13d000001460400004100000001050000290000000000450435000000200400003900000000004304350000000a04000029000000000042043500000150034001980000001f0440018f000000000231001900000007050000290000001105500367000001fa0000613d000000000605034f0000000007010019000000006806043c0000000007870436000000000027004b000001f60000c13d000000000004004b000002070000613d000000000335034f0000000304400210000000000502043300000000054501cf000000000545022f000000000303043b0000010004400089000000000343022f00000000034301cf000000000353019f00000000003204350000000a030000290000001f023000390000015002200197000000000131001900000000000104350000004401200039000001270010009c000001270100804100000060011002100000000102000029000001270020009c00000127020080410000004002200210000000000112019f0000000002000414000001270020009c0000012702008041000000c002200210000000000112019f00008011020000390498048e0000040f000000000301001900000060033002700000001f0430018f0000012f0530019700000127033001970000000100200190000003720000613d0000000102500029000000000005004b0000022c0000613d000000000601034f0000000107000029000000006806043c0000000007870436000000000027004b000002280000c13d000000000004004b000002390000613d000000000151034f0000000304400210000000000502043300000000054501cf000000000545022f000000000101043b0000010004400089000000000141022f00000000014101cf000000000151019f00000000001204350000001f0130003900000130011001970000000101100029000900000001001d0000012b0010009c0000038a0000a13d0000014001000041000000000010043f0000004101000039000000040010043f00000138010000410000049a000104300000001f0430018f0000012f023001980000024e0000613d000000000501034f0000000006000019000000005705043c0000000006760436000000000026004b0000024a0000c13d000000000004004b0000025b0000613d000000000121034f0000000304400210000000000502043300000000054501cf000000000545022f000000000101043b0000010004400089000000000141022f00000000014101cf000000000151019f000000000012043500000060013002100000049a00010430000000000800001900000000090000190000014e0030009c000000570000213d0000000402300039000000000062004b000000fe0000213d00000007033000290000001101000367000000000331034f000000000303043b000000e00a30027000000000002a001a000000570000413d00000000072a0019000000000067004b000000fe0000213d0000013600300198000003130000c13d000001390030009c000003190000813d0000013a003001980000031f0000613d000000070420002900000127034001970000000002000414000100000031035500000000004a001a000000570000413d00000000044a0019000000000440007b000000570000413d00090000000a001d000a00000009001d000500000008001d000800000007001d000000000131034f000001270340019700010000003103e5000001270020009c000003c20000213d00000000013103df000000c0022002100000012e022001970000012c022001c700010000002103b500000000012103af0000000202000039049804930000040f00000000030100190000006003300270000001270330019700000001002001900000032a0000613d0000001f0230003900000130052001970000003f025000390000013104200197000000400200043d0000000004420019000000000024004b000000000600003900000001060040390000012b0040009c0000023f0000213d00000001006001900000023f0000c13d000000400040043f0000000004320436000000000005004b000000090a000029000002ad0000613d0000000005540019000000000600003100000011066003670000000007040019000000006806043c0000000007870436000000000057004b000002a90000c13d0000012f063001980000000005640019000002b60000613d000000000701034f0000000008040019000000007907043c0000000008980436000000000058004b000002b20000c13d0000001f03300190000002c30000613d000000000161034f0000000303300210000000000605043300000000063601cf000000000636022f000000000101043b0000010003300089000000000131022f00000000013101cf000000000161019f0000000000150435000000400100043d0000000002020433000000200020008c0000000a05000029000003420000c13d00000000020404330000013d02200197000000db03a002100000013e03300197000000000223019f0000013f022001c7000000400310003900000000002304350000002002100039000000000052043500000040030000390000000000310435000001320010009c0000023f0000213d0000006003100039000000400030043f000001270020009c000001270200804100000040022002100000000001010433000001270010009c00000127010080410000006001100210000000000121019f0000000002000414000001270020009c0000012702008041000000c002200210000000000112019f00000133011001c700008010020000390498048e0000040f0000000100200190000000060600002900000008030000290000000508000029000000fe0000613d000000000901043b0000000108800039000000040080006c0000025f0000413d000001060000013d000000400100043d0000004402100039000000000032043500000024021000390000000203000029000000000032043500000134020000410000000000210435000000040210003900000001030000390000042c0000013d0000014001000041000000000010043f0000003201000039000000040010043f00000138010000410000049a00010430000000400200043d0000004403200039000000000013043500000024012000390000000103000039000000000031043500000134010000410000000000120435000000040120003900000002030000390000000000310435000001270020009c0000012702008041000000400120021000000135011001c70000049a00010430000000400100043d0000013702000041000000000021043500000004021000390000000203000039000003240000013d000000400100043d0000013702000041000000000021043500000004021000390000000103000039000003240000013d000000400100043d00000137020000410000000000210435000000040210003900000003030000390000000000320435000001270010009c0000012701008041000000400110021000000138011001c70000049a000104300000001f0430018f0000012f02300198000003330000613d000000000501034f0000000006000019000000005705043c0000000006760436000000000026004b0000032f0000c13d000000000004004b000003400000613d000000000121034f0000000304400210000000000502043300000000054501cf000000000545022f000000000101043b0000010004400089000000000141022f00000000014101cf000000000151019f000000000012043500000060013002100000049a0001043000000044021000390000013b03000041000000000032043500000024021000390000001903000039000004270000013d0000001f0530018f0000012f06300198000000400200043d0000000004620019000003530000613d000000000701034f0000000008020019000000007907043c0000000008980436000000000048004b0000034f0000c13d000000000005004b000003600000613d000000000161034f0000000305500210000000000604043300000000065601cf000000000656022f000000000101043b0000010005500089000000000151022f00000000015101cf000000000161019f00000000001404350000006001300210000001270020009c00000127020080410000004002200210000000000112019f0000049a000104300000013405000041000000010600002900000000005604350000000305000039000000000053043500000000000204350000000000410435000001270060009c0000012706008041000000400160021000000135011001c70000049a00010430000000400200043d0000000006520019000000000005004b0000037c0000613d000000000701034f0000000008020019000000007907043c0000000008980436000000000068004b000003780000c13d000000000004004b000003600000613d000000000151034f0000000304400210000000000506043300000000054501cf000000000545022f000000000101043b0000010004400089000000000141022f00000000014101cf000000000151019f0000000000160435000003600000013d0000000901000029000000400010043f000000200030008c000000fe0000413d000000010100002900000000010104330000012b0010009c000000fe0000213d000000010230002900000001011000290000001f03100039000000000023004b000000fe0000813d00000000140104340000012b0040009c0000023f0000213d00000005034002100000003f05300039000001470550019700000009055000290000012b0050009c0000023f0000213d000000400050043f000000090500002900000000004504350000000003130019000000000023004b000000fe0000213d000000000004004b000003ae0000613d0000000902000029000000200220003900000000140104340000000000420435000000000031004b000003a90000413d000000000100041400000011020003670000000a0000006b000003b40000c13d0000000003000031000003be0000013d00000007030000290000012704300197000100000042035500000003050000290000000a0050006c000000570000413d0000000305000029000000000350007b000000570000413d000000000242034f000001270330019700010000003203e5000001270010009c000003c90000a13d000000400100043d00000044021000390000014d03000041000000000032043500000024021000390000000803000039000004270000013d00000000023203df000000c0011002100000012e011001970000012c011001c700010000001203b500000000011203af0000801002000039049804930000040f0000000003010019000000600330027000000127033001970000000100200190000004320000613d0000001f0230003900000130052001970000003f025000390000013104200197000000400200043d0000000004420019000000000024004b000000000600003900000001060040390000012b0040009c0000023f0000213d00000001006001900000023f0000c13d000000400040043f0000000004320436000000000005004b000003ef0000613d0000000005540019000000000600003100000011066003670000000007040019000000006806043c0000000007870436000000000057004b000003eb0000c13d0000001f0530018f0000012f063001980000000003640019000003f90000613d000000000701034f0000000008040019000000007907043c0000000008980436000000000038004b000003f50000c13d000000000005004b000004060000613d000000000161034f0000000305500210000000000603043300000000065601cf000000000656022f000000000101043b0000010005500089000000000151022f00000000015101cf000000000161019f00000000001304350000000001020433000000200010008c000004210000c13d000000400100043d00000009020000290000000002020433000001000020008c0000044a0000413d00000064021000390000014a03000041000000000032043500000044021000390000014b0300004100000000003204350000002402100039000000250300003900000000003204350000013c020000410000000000210435000000040210003900000020030000390000000000320435000001270010009c000001270100804100000040011002100000014c011001c70000049a00010430000000400100043d00000044021000390000014803000041000000000032043500000024021000390000001f0300003900000000003204350000013c020000410000000000210435000000040210003900000020030000390000000000320435000001270010009c0000012701008041000000400110021000000135011001c70000049a000104300000001f0430018f0000012f023001980000043b0000613d000000000501034f0000000006000019000000005705043c0000000006760436000000000026004b000004370000c13d000000000004004b000004480000613d000000000121034f0000000304400210000000000502043300000000054501cf000000000545022f000000000101043b0000010004400089000000000141022f00000000014101cf000000000151019f000000000012043500000060013002100000049a000104300000000003040433000000f8022002100000006004100039000000000024043500000040021000390000000000320435000000200210003900000008030000290000000000320435000000610310003900000009040000290000000004040433000000000004004b000004610000613d000000000500001900000009060000290000002006600039000900000006001d000000000606043300000000036304360000000105500039000000000045004b000004590000413d0000000003130049000000200430008a00000000004104350000001f0330003900000150043001970000000003140019000000000043004b000000000400003900000001040040390000012b0030009c0000023f0000213d00000001004001900000023f0000c13d000000400030043f000001270020009c000001270200804100000040022002100000000001010433000001270010009c00000127010080410000006001100210000000000121019f0000000002000414000001270020009c0000012702008041000000c002200210000000000112019f00000133011001c700008010020000390498048e0000040f0000000100200190000000fe0000613d000000000101043b000000400200043d0000000000120435000001270020009c0000012702008041000000400120021000000149011001c7000004990001042e0000048c002104210000000102000039000000000001042d0000000002000019000000000001042d00000491002104230000000102000039000000000001042d0000000002000019000000000001042d00000496002104230000000102000039000000000001042d0000000002000019000000000001042d0000049800000432000004990001042e0000049a00010430000000000000000000000000000000000000000000000000000000000000000000000000ffffffff0000000200000000000000000000000000000040000001000000000000000000ffffffff0000000000000000000000000000000000000000000000000000000089f9a07200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ffffffffffffffff0000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000ffffffff00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ffffffe000000000000000000000000000000000000000000000000000000001ffffffe000000000000000000000000000000000000000000000000000000003ffffffe0000000000000000000000000000000000000000000000000ffffffffffffff9f02000000000000000000000000000000000000000000000000000000000000007f7b0cf70000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000640000000000000000000000000000001f0000000000000000000000000000000000000000000000000000000043e266b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000024000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000007368612072657475726e656420696e76616c696420646174610000000000000008c379a00000000000000000000000000000000000000000000000000000000000000000ffffffffffffffffffffffffffffffffffffffffffffffffffffffff06ffffff0000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000004e487b71000000000000000000000000000000000000000000000000000000006006d8b500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001ffffffffe0000000000000000000000000000000000000000000000000000003ffffffffe00000000000000000000000000000000000000000000000000000000000ffffe00000000000000000000000000000000000000000000000000000000001ffffe018876a04000000000000000000000000000000000000000000000000000000007fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe06b656363616b3235362072657475726e656420696e76616c69642064617461000000000000000000000000000000000000000020000000000000000000000000206269747300000000000000000000000000000000000000000000000000000053616665436173743a2076616c756520646f65736e27742066697420696e203800000000000000000000000000000000000000840000000000000000000000004f766572666c6f77000000000000000000000000000000000000000000000000fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffbfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffcffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe00000000000000000000000000000000000000000000000000000000000000000e901f5bd8811df26e614332e2110b9bc002e2cbadd82065c67e102f858079d5a").unwrap() -} - fn encoded_uncompressed_state_diffs(input: &PubdataInput) -> Vec { let mut result = vec![]; for state_diff in input.state_diffs.iter() { @@ -48,7 +42,7 @@ fn compose_header_for_l1_commit_rollup(input: PubdataInput) -> Vec { let uncompressed_state_diffs_hash = keccak256(&uncompressed_state_diffs); full_header.extend(uncompressed_state_diffs_hash); - let pubdata_builder = RollupPubdataBuilder::new(Address::zero()); + let pubdata_builder = FullPubdataBuilder::new(Address::zero()); let mut full_pubdata = pubdata_builder.settlement_layer_pubdata(&input, ProtocolVersionId::latest()); let full_pubdata_hash = keccak256(&full_pubdata); @@ -78,15 +72,10 @@ pub(crate) fn test_rollup_da_output_hash_match() { // In this test, we check whether the L2 DA output hash is as expected. let l2_da_validator_address = Address::repeat_byte(0x12); - let system_env = SystemEnv { - version: ProtocolVersionId::Version27, - ..default_system_env() - }; let mut vm = VmTesterBuilder::new() .with_empty_in_memory_storage() .with_execution_mode(TxExecutionMode::VerifyExecute) .with_rich_accounts(1) - .with_system_env(system_env) .with_custom_contracts(vec![ContractToDeploy { bytecode: l2_rollup_da_validator_bytecode(), address: l2_da_validator_address, @@ -127,7 +116,7 @@ pub(crate) fn test_rollup_da_output_hash_match() { let result = vm.vm.execute(InspectExecutionMode::OneTx); assert!(!result.result.is_failed(), "Transaction wasn't successful"); - let pubdata_builder = RollupPubdataBuilder::new(l2_da_validator_address); + let pubdata_builder = FullPubdataBuilder::new(l2_da_validator_address); let batch_result = vm.vm.finish_batch(Rc::new(pubdata_builder)); assert!( !batch_result.block_tip_execution_result.result.is_failed(), diff --git a/core/lib/multivm/src/versions/testonly/l2_blocks.rs b/core/lib/multivm/src/versions/testonly/l2_blocks.rs index f8813231c9e1..ad14aeb60670 100644 --- a/core/lib/multivm/src/versions/testonly/l2_blocks.rs +++ b/core/lib/multivm/src/versions/testonly/l2_blocks.rs @@ -4,6 +4,7 @@ //! use assert_matches::assert_matches; +use ethabi::{ParamType, Token}; use zksync_system_constants::REQUIRED_L1_TO_L2_GAS_PER_PUBDATA_BYTE; use zksync_types::{ block::{pack_block_info, L2BlockHasher}, @@ -13,6 +14,7 @@ use zksync_types::{ SYSTEM_CONTEXT_CURRENT_L2_BLOCK_INFO_POSITION, SYSTEM_CONTEXT_CURRENT_TX_ROLLING_HASH_POSITION, U256, }; +use zksync_vm_interface::VmRevertReason; use super::{default_l1_batch, get_empty_storage, tester::VmTesterBuilder, TestedVm}; use crate::{ @@ -27,6 +29,29 @@ use crate::{ }, }; +/// Encodes a Solidity function call with parameters into a Vec. +fn encode_function_call( + name: &str, + types: &[ParamType], + params: &[Token], +) -> Result { + let short_sig = ethabi::short_signature(name, types); + + // Check if the provided number of parameters matches the function's expected inputs + if types.len() != params.len() { + return Err(ethabi::Error::InvalidData); + } + + // Encode the function call with the provided parameters + let encoded_data = ethabi::encode(params); + + Ok(VmRevertReason::Unknown { + function_selector: short_sig.to_vec(), + data: [short_sig.to_vec(), encoded_data].concat(), + } + .to_string()) +} + fn get_l1_noop() -> Transaction { Transaction { common_data: ExecuteTransactionCommon::L1(L1TxCommonData { @@ -72,7 +97,7 @@ pub(crate) fn test_l2_block_initialization_timestamp() { assert_matches!( res.result, ExecutionResult::Halt { reason: Halt::FailedToSetL2Block(msg) } - if msg.contains("timestamp") + if msg.contains("0x5e9ad9b0") ); } @@ -107,7 +132,7 @@ pub(crate) fn test_l2_block_initialization_number_non_zero() { res.result, ExecutionResult::Halt { reason: Halt::FailedToSetL2Block( - "L2 block number is never expected to be zero".to_string() + encode_function_call("L2BlockNumberZero", &[], &[]).unwrap() ) } ); @@ -163,7 +188,15 @@ pub(crate) fn test_l2_block_same_l2_block() { // Case 1: Incorrect timestamp test_same_l2_block::( Some(Halt::FailedToSetL2Block( - "The timestamp of the same L2 block must be same".to_string(), + encode_function_call( + "IncorrectSameL2BlockTimestamp", + &[ParamType::Uint(128), ParamType::Uint(128)], + &[ + Token::Uint(U256::zero()), + Token::Uint(U256::from(1_700_000_001)), + ], + ) + .unwrap(), )), Some(0), None, @@ -172,7 +205,20 @@ pub(crate) fn test_l2_block_same_l2_block() { // Case 2: Incorrect previous block hash test_same_l2_block::( Some(Halt::FailedToSetL2Block( - "The previous hash of the same L2 block must be same".to_string(), + encode_function_call( + "IncorrectSameL2BlockPrevBlockHash", + &[ParamType::FixedBytes(32), ParamType::FixedBytes(32)], + &[ + Token::FixedBytes(H256::zero().0.to_vec()), + Token::FixedBytes( + hex::decode( + "e8e77626586f73b955364c7b4bbf0bb7f7685ebd40e852b164633a4acbd3244c", + ) + .unwrap(), + ), + ], + ) + .unwrap(), )), None, Some(H256::zero()), @@ -249,7 +295,12 @@ pub(crate) fn test_l2_block_new_l2_block() { None, None, Some(Halt::FailedToSetL2Block( - "Invalid new L2 block number".to_string(), + encode_function_call( + "InvalidNewL2BlockNumber", + &[ParamType::Uint(256)], + &[Token::Uint(U256::from(3u32))], + ) + .unwrap(), )), ); @@ -259,7 +310,14 @@ pub(crate) fn test_l2_block_new_l2_block() { None, Some(1), None, - Some(Halt::FailedToSetL2Block("The timestamp of the new L2 block must be greater than the timestamp of the previous L2 block".to_string())), + Some(Halt::FailedToSetL2Block( + encode_function_call( + "NonMonotonicL2BlockTimestamp", + &[ParamType::Uint(128), ParamType::Uint(128)], + &[Token::Uint(U256::from(1)), Token::Uint(U256::from(1))], + ) + .unwrap(), + )), ); // Case 3: Incorrect previous block hash @@ -269,7 +327,20 @@ pub(crate) fn test_l2_block_new_l2_block() { None, Some(H256::zero()), Some(Halt::FailedToSetL2Block( - "The current L2 block hash is incorrect".to_string(), + encode_function_call( + "IncorrectL2BlockHash", + &[ParamType::FixedBytes(32), ParamType::FixedBytes(32)], + &[ + Token::FixedBytes(H256::zero().0.to_vec()), + Token::FixedBytes( + hex::decode( + "de4c551714ad02a0a4f51252f966ef90c13376ea4c8a463eedfb242b97551c43", + ) + .unwrap(), + ), + ], + ) + .unwrap(), )), ); @@ -395,7 +466,14 @@ pub(crate) fn test_l2_block_first_in_batch() { prev_block_hash, max_virtual_blocks_to_create: 1, }, - Some(Halt::FailedToSetL2Block("The timestamp of the L2 block must be greater than or equal to the timestamp of the current batch".to_string())), + Some(Halt::FailedToSetL2Block( + encode_function_call( + "L2BlockAndBatchTimestampMismatch", + &[ParamType::Uint(128), ParamType::Uint(128)], + &[Token::Uint(U256::from(9)), Token::Uint(U256::from(12))], + ) + .unwrap(), + )), ); } diff --git a/core/lib/multivm/src/versions/testonly/mod.rs b/core/lib/multivm/src/versions/testonly/mod.rs index 5ab13df87337..c1a603bfeefc 100644 --- a/core/lib/multivm/src/versions/testonly/mod.rs +++ b/core/lib/multivm/src/versions/testonly/mod.rs @@ -20,15 +20,17 @@ use zksync_types::{ get_is_account_key, h256_to_u256, u256_to_h256, utils::storage_key_for_eth_balance, Address, L1BatchNumber, L2BlockNumber, L2ChainId, ProtocolVersionId, U256, }; -use zksync_vm_interface::{ - pubdata::PubdataBuilder, L1BatchEnv, L2BlockEnv, SystemEnv, TxExecutionMode, -}; pub(super) use self::tester::{ - validation_params, TestedVm, TestedVmForValidation, VmTester, VmTesterBuilder, + validation_params, TestedVm, TestedVmForValidation, TestedVmWithCallTracer, VmTester, + VmTesterBuilder, }; use crate::{ - interface::storage::InMemoryStorage, pubdata_builders::RollupPubdataBuilder, + interface::{ + pubdata::PubdataBuilder, storage::InMemoryStorage, L1BatchEnv, L2BlockEnv, SystemEnv, + TxExecutionMode, + }, + pubdata_builders::FullPubdataBuilder, vm_latest::constants::BATCH_COMPUTATIONAL_GAS_LIMIT, }; @@ -36,6 +38,7 @@ pub(super) mod account_validation_rules; pub(super) mod block_tip; pub(super) mod bootloader; pub(super) mod bytecode_publishing; +pub(super) mod call_tracer; pub(super) mod circuits; pub(super) mod code_oracle; pub(super) mod default_aa; @@ -123,7 +126,7 @@ pub(super) fn default_l1_batch(number: L1BatchNumber) -> L1BatchEnv { } pub(super) fn default_pubdata_builder() -> Rc { - Rc::new(RollupPubdataBuilder::new(Address::zero())) + Rc::new(FullPubdataBuilder::new(Address::zero())) } pub(super) fn make_address_rich(storage: &mut InMemoryStorage, address: Address) { diff --git a/core/lib/multivm/src/versions/testonly/refunds.rs b/core/lib/multivm/src/versions/testonly/refunds.rs index 384a3edb7dbd..4d549f5a9be0 100644 --- a/core/lib/multivm/src/versions/testonly/refunds.rs +++ b/core/lib/multivm/src/versions/testonly/refunds.rs @@ -140,7 +140,7 @@ pub(crate) fn test_predetermined_refunded_gas() { current_state_without_predefined_refunds.user_l2_to_l1_logs ); - assert_ne!( + assert_eq!( current_state_with_changed_predefined_refunds.system_logs, current_state_without_predefined_refunds.system_logs ); diff --git a/core/lib/multivm/src/versions/testonly/tester/mod.rs b/core/lib/multivm/src/versions/testonly/tester/mod.rs index c29f2dbbf8f3..a25909525f11 100644 --- a/core/lib/multivm/src/versions/testonly/tester/mod.rs +++ b/core/lib/multivm/src/versions/testonly/tester/mod.rs @@ -8,6 +8,7 @@ use zksync_types::{ writes::StateDiffRecord, Address, L1BatchNumber, StorageKey, Transaction, H256, U256, }; +use zksync_vm_interface::Call; pub(crate) use self::transaction_test_info::{ExpectedError, TransactionTestInfo, TxModifier}; use super::get_empty_storage; @@ -252,3 +253,7 @@ pub(crate) fn validation_params(tx: &L2Tx, system: &SystemEnv) -> ValidationPara timestamp_asserter_params: None, } } + +pub(crate) trait TestedVmWithCallTracer: TestedVm { + fn inspect_with_call_tracer(&mut self) -> (VmExecutionResultAndLogs, Vec); +} diff --git a/core/lib/multivm/src/versions/vm_fast/bootloader_state/l2_block.rs b/core/lib/multivm/src/versions/vm_fast/bootloader_state/l2_block.rs deleted file mode 100644 index 4f05ef30a46d..000000000000 --- a/core/lib/multivm/src/versions/vm_fast/bootloader_state/l2_block.rs +++ /dev/null @@ -1,78 +0,0 @@ -use std::cmp::Ordering; - -use zksync_types::{web3::keccak256_concat, L2BlockNumber, H256}; - -use super::{snapshot::L2BlockSnapshot, tx::BootloaderTx}; -use crate::{ - interface::{L2Block, L2BlockEnv}, - vm_latest::utils::l2_blocks::l2_block_hash, -}; - -const EMPTY_TXS_ROLLING_HASH: H256 = H256::zero(); - -#[derive(Debug)] -pub(crate) struct BootloaderL2Block { - pub(crate) number: u32, - pub(crate) timestamp: u64, - pub(crate) txs_rolling_hash: H256, // The rolling hash of all the transactions in the miniblock - pub(crate) prev_block_hash: H256, - // Number of the first L2 block tx in L1 batch - pub(crate) first_tx_index: usize, - pub(crate) max_virtual_blocks_to_create: u32, - pub(crate) txs: Vec, -} - -impl BootloaderL2Block { - pub(crate) fn new(l2_block: L2BlockEnv, first_tx_place: usize) -> Self { - Self { - number: l2_block.number, - timestamp: l2_block.timestamp, - txs_rolling_hash: EMPTY_TXS_ROLLING_HASH, - prev_block_hash: l2_block.prev_block_hash, - first_tx_index: first_tx_place, - max_virtual_blocks_to_create: l2_block.max_virtual_blocks_to_create, - txs: vec![], - } - } - - pub(super) fn push_tx(&mut self, tx: BootloaderTx) { - self.update_rolling_hash(tx.hash); - self.txs.push(tx) - } - - pub(crate) fn get_hash(&self) -> H256 { - l2_block_hash( - L2BlockNumber(self.number), - self.timestamp, - self.prev_block_hash, - self.txs_rolling_hash, - ) - } - - fn update_rolling_hash(&mut self, tx_hash: H256) { - self.txs_rolling_hash = keccak256_concat(self.txs_rolling_hash, tx_hash) - } - - pub(crate) fn make_snapshot(&self) -> L2BlockSnapshot { - L2BlockSnapshot { - txs_rolling_hash: self.txs_rolling_hash, - txs_len: self.txs.len(), - } - } - - pub(crate) fn apply_snapshot(&mut self, snapshot: L2BlockSnapshot) { - self.txs_rolling_hash = snapshot.txs_rolling_hash; - match self.txs.len().cmp(&snapshot.txs_len) { - Ordering::Greater => self.txs.truncate(snapshot.txs_len), - Ordering::Less => panic!("Applying snapshot from future is not supported"), - Ordering::Equal => {} - } - } - pub(crate) fn l2_block(&self) -> L2Block { - L2Block { - number: self.number, - timestamp: self.timestamp, - hash: self.get_hash(), - } - } -} diff --git a/core/lib/multivm/src/versions/vm_fast/bootloader_state/mod.rs b/core/lib/multivm/src/versions/vm_fast/bootloader_state/mod.rs deleted file mode 100644 index 73830de2759b..000000000000 --- a/core/lib/multivm/src/versions/vm_fast/bootloader_state/mod.rs +++ /dev/null @@ -1,8 +0,0 @@ -mod l2_block; -mod snapshot; -mod state; -mod tx; - -pub(crate) mod utils; -pub(crate) use snapshot::BootloaderStateSnapshot; -pub use state::BootloaderState; diff --git a/core/lib/multivm/src/versions/vm_fast/bootloader_state/state.rs b/core/lib/multivm/src/versions/vm_fast/bootloader_state/state.rs deleted file mode 100644 index be4f2881297d..000000000000 --- a/core/lib/multivm/src/versions/vm_fast/bootloader_state/state.rs +++ /dev/null @@ -1,323 +0,0 @@ -use std::cmp::Ordering; - -use once_cell::sync::OnceCell; -use zksync_types::{vm::VmVersion, L2ChainId, ProtocolVersionId, U256}; - -use super::{ - l2_block::BootloaderL2Block, - tx::BootloaderTx, - utils::{apply_l2_block, apply_pubdata_to_memory, apply_tx_to_memory}, - BootloaderStateSnapshot, -}; -use crate::{ - interface::{ - pubdata::{PubdataBuilder, PubdataInput}, - BootloaderMemory, CompressedBytecodeInfo, L2BlockEnv, TxExecutionMode, - }, - versions::vm_fast::transaction_data::TransactionData, - vm_latest::{ - constants::get_tx_description_offset, utils::l2_blocks::assert_next_block, - MultiVmSubversion, - }, -}; - -/// Intermediate bootloader-related VM state. -/// -/// Required to process transactions one by one (since we intercept the VM execution to execute -/// transactions and add new ones to the memory on the fly). -/// Keeps tracking everything related to the bootloader memory and can restore the whole memory. -/// -/// -/// Serves two purposes: -/// - Tracks where next tx should be pushed to in the bootloader memory. -/// - Tracks which transaction should be executed next. -#[derive(Debug)] -pub struct BootloaderState { - /// ID of the next transaction to be executed. - /// See the structure doc-comment for a better explanation of purpose. - tx_to_execute: usize, - /// Stored txs in bootloader memory - l2_blocks: Vec, - /// The number of 32-byte words spent on the already included compressed bytecodes. - compressed_bytecodes_encoding: usize, - /// Initial memory of bootloader - initial_memory: BootloaderMemory, - /// Mode of txs for execution, it can be changed once per vm lunch - execution_mode: TxExecutionMode, - /// Current offset of the free space in the bootloader memory. - free_tx_offset: usize, - /// Information about the pubdata that will be needed to supply to the L1Messenger - pubdata_information: OnceCell, - /// Protocol version. - protocol_version: ProtocolVersionId, - /// Protocol subversion - subversion: MultiVmSubversion, -} - -impl BootloaderState { - pub(crate) fn new( - execution_mode: TxExecutionMode, - initial_memory: BootloaderMemory, - first_l2_block: L2BlockEnv, - protocol_version: ProtocolVersionId, - ) -> Self { - let l2_block = BootloaderL2Block::new(first_l2_block, 0); - Self { - tx_to_execute: 0, - compressed_bytecodes_encoding: 0, - l2_blocks: vec![l2_block], - initial_memory, - execution_mode, - free_tx_offset: 0, - pubdata_information: Default::default(), - protocol_version, - subversion: MultiVmSubversion::try_from(VmVersion::from(protocol_version)).unwrap(), - } - } - - pub(crate) fn set_refund_for_current_tx(&mut self, refund: u64) { - let current_tx = self.current_tx(); - // We can't set the refund for the latest tx or using the latest l2_block for fining tx - // Because we can fill the whole batch first and then execute txs one by one - let tx = self.find_tx_mut(current_tx); - tx.refund = refund; - } - - pub(crate) fn set_pubdata_input(&mut self, info: PubdataInput) { - self.pubdata_information - .set(info) - .expect("Pubdata information is already set"); - } - - pub(crate) fn start_new_l2_block(&mut self, l2_block: L2BlockEnv) { - let last_block = self.last_l2_block(); - assert!( - !last_block.txs.is_empty(), - "Can not create new miniblocks on top of empty ones" - ); - assert_next_block(&last_block.l2_block(), &l2_block); - self.push_l2_block(l2_block); - } - - /// This method bypass sanity checks and should be used carefully. - pub(crate) fn push_l2_block(&mut self, l2_block: L2BlockEnv) { - self.l2_blocks - .push(BootloaderL2Block::new(l2_block, self.free_tx_index())) - } - - pub(crate) fn push_tx( - &mut self, - tx: TransactionData, - predefined_overhead: u32, - predefined_refund: u64, - compressed_bytecodes: Vec, - trusted_ergs_limit: U256, - chain_id: L2ChainId, - ) -> BootloaderMemory { - let tx_offset = self.free_tx_offset(); - let bootloader_tx = BootloaderTx::new( - tx, - predefined_refund, - predefined_overhead, - trusted_ergs_limit, - compressed_bytecodes, - tx_offset, - chain_id, - ); - - let mut memory = vec![]; - let compressed_bytecode_size = apply_tx_to_memory( - &mut memory, - &bootloader_tx, - self.last_l2_block(), - self.free_tx_index(), - self.free_tx_offset(), - self.compressed_bytecodes_encoding, - self.execution_mode, - self.last_l2_block().txs.is_empty(), - self.subversion, - ); - self.compressed_bytecodes_encoding += compressed_bytecode_size; - self.free_tx_offset = tx_offset + bootloader_tx.encoded_len(); - self.last_mut_l2_block().push_tx(bootloader_tx); - memory - } - - pub(crate) fn last_l2_block(&self) -> &BootloaderL2Block { - self.l2_blocks.last().unwrap() - } - - pub(crate) fn get_pubdata_information(&self) -> &PubdataInput { - self.pubdata_information - .get() - .expect("Pubdata information is not set") - } - - pub(crate) fn settlement_layer_pubdata(&self, pubdata_builder: &dyn PubdataBuilder) -> Vec { - let pubdata_information = self - .pubdata_information - .get() - .expect("Pubdata information is not set"); - pubdata_builder.settlement_layer_pubdata(pubdata_information, self.protocol_version) - } - - fn last_mut_l2_block(&mut self) -> &mut BootloaderL2Block { - self.l2_blocks.last_mut().unwrap() - } - - /// Apply all bootloader transaction to the initial memory - pub(crate) fn bootloader_memory( - &self, - pubdata_builder: &dyn PubdataBuilder, - ) -> BootloaderMemory { - let mut initial_memory = self.initial_memory.clone(); - let mut offset = 0; - let mut compressed_bytecodes_offset = 0; - let mut tx_index = 0; - for l2_block in &self.l2_blocks { - for (num, tx) in l2_block.txs.iter().enumerate() { - let compressed_bytecodes_size = apply_tx_to_memory( - &mut initial_memory, - tx, - l2_block, - tx_index, - offset, - compressed_bytecodes_offset, - self.execution_mode, - num == 0, - self.subversion, - ); - offset += tx.encoded_len(); - compressed_bytecodes_offset += compressed_bytecodes_size; - tx_index += 1; - } - if l2_block.txs.is_empty() { - apply_l2_block(&mut initial_memory, l2_block, tx_index, self.subversion) - } - } - - let pubdata_information = self - .pubdata_information - .get() - .expect("Empty pubdata information"); - - apply_pubdata_to_memory( - &mut initial_memory, - pubdata_builder, - pubdata_information, - self.protocol_version, - self.subversion, - ); - initial_memory - } - - fn free_tx_offset(&self) -> usize { - self.free_tx_offset - } - - pub(crate) fn free_tx_index(&self) -> usize { - let l2_block = self.last_l2_block(); - l2_block.first_tx_index + l2_block.txs.len() - } - - pub(crate) fn get_last_tx_compressed_bytecodes(&self) -> &[CompressedBytecodeInfo] { - if let Some(tx) = self.last_l2_block().txs.last() { - &tx.compressed_bytecodes - } else { - &[] - } - } - - /// Returns the id of current tx - pub(crate) fn current_tx(&self) -> usize { - self.tx_to_execute - .checked_sub(1) - .expect("There are no current tx to execute") - } - - /// Returns the ID of the next transaction to be executed and increments the local transaction counter. - pub(crate) fn move_tx_to_execute_pointer(&mut self) -> usize { - assert!( - self.tx_to_execute < self.free_tx_index(), - "Attempt to execute tx that was not pushed to memory. Tx ID: {}, txs in bootloader: {}", - self.tx_to_execute, - self.free_tx_index() - ); - - let old = self.tx_to_execute; - self.tx_to_execute += 1; - old - } - - /// Get offset of tx description - pub(crate) fn get_tx_description_offset(&self, tx_index: usize) -> usize { - get_tx_description_offset(self.subversion) + self.find_tx(tx_index).offset - } - - pub(crate) fn insert_fictive_l2_block(&mut self) -> &BootloaderL2Block { - let block = self.last_l2_block(); - if !block.txs.is_empty() { - self.start_new_l2_block(L2BlockEnv { - timestamp: block.timestamp + 1, - number: block.number + 1, - prev_block_hash: block.get_hash(), - max_virtual_blocks_to_create: 1, - }); - } - self.last_l2_block() - } - - fn find_tx(&self, tx_index: usize) -> &BootloaderTx { - for block in self.l2_blocks.iter().rev() { - if tx_index >= block.first_tx_index { - return &block.txs[tx_index - block.first_tx_index]; - } - } - panic!("The tx with index {} must exist", tx_index) - } - - fn find_tx_mut(&mut self, tx_index: usize) -> &mut BootloaderTx { - for block in self.l2_blocks.iter_mut().rev() { - if tx_index >= block.first_tx_index { - return &mut block.txs[tx_index - block.first_tx_index]; - } - } - panic!("The tx with index {} must exist", tx_index) - } - - pub(crate) fn get_snapshot(&self) -> BootloaderStateSnapshot { - BootloaderStateSnapshot { - tx_to_execute: self.tx_to_execute, - l2_blocks_len: self.l2_blocks.len(), - last_l2_block: self.last_l2_block().make_snapshot(), - compressed_bytecodes_encoding: self.compressed_bytecodes_encoding, - free_tx_offset: self.free_tx_offset, - is_pubdata_information_provided: self.pubdata_information.get().is_some(), - } - } - - pub(crate) fn apply_snapshot(&mut self, snapshot: BootloaderStateSnapshot) { - self.tx_to_execute = snapshot.tx_to_execute; - self.compressed_bytecodes_encoding = snapshot.compressed_bytecodes_encoding; - self.free_tx_offset = snapshot.free_tx_offset; - match self.l2_blocks.len().cmp(&snapshot.l2_blocks_len) { - Ordering::Greater => self.l2_blocks.truncate(snapshot.l2_blocks_len), - Ordering::Less => panic!("Applying snapshot from future is not supported"), - Ordering::Equal => {} - } - self.last_mut_l2_block() - .apply_snapshot(snapshot.last_l2_block); - - if !snapshot.is_pubdata_information_provided { - self.pubdata_information = Default::default(); - } else { - // Under the correct usage of the snapshots of the bootloader state, - // this assertion should never fail, i.e. since the pubdata information - // can be set only once. However, we have this assertion just in case. - assert!( - self.pubdata_information.get().is_some(), - "Snapshot with no pubdata can not rollback to snapshot with one" - ); - } - } -} diff --git a/core/lib/multivm/src/versions/vm_fast/bootloader_state/tx.rs b/core/lib/multivm/src/versions/vm_fast/bootloader_state/tx.rs deleted file mode 100644 index dc0706561d5e..000000000000 --- a/core/lib/multivm/src/versions/vm_fast/bootloader_state/tx.rs +++ /dev/null @@ -1,50 +0,0 @@ -use zksync_types::{L2ChainId, H256, U256}; - -use crate::{ - interface::CompressedBytecodeInfo, versions::vm_fast::transaction_data::TransactionData, -}; - -/// Information about tx necessary for execution in bootloader. -#[derive(Debug, Clone)] -pub(crate) struct BootloaderTx { - pub(crate) hash: H256, - /// Encoded transaction - pub(crate) encoded: Vec, - /// Compressed bytecodes, which has been published during this transaction - pub(crate) compressed_bytecodes: Vec, - /// Refunds for this transaction - pub(crate) refund: u64, - /// Gas overhead - pub(crate) gas_overhead: u32, - /// Gas Limit for this transaction. It can be different from the gas limit inside the transaction - pub(crate) trusted_gas_limit: U256, - /// Offset of the tx in bootloader memory - pub(crate) offset: usize, -} - -impl BootloaderTx { - pub(super) fn new( - tx: TransactionData, - predefined_refund: u64, - predefined_overhead: u32, - trusted_gas_limit: U256, - compressed_bytecodes: Vec, - offset: usize, - chain_id: L2ChainId, - ) -> Self { - let hash = tx.tx_hash(chain_id); - Self { - hash, - encoded: tx.into_tokens(), - compressed_bytecodes, - refund: predefined_refund, - gas_overhead: predefined_overhead, - trusted_gas_limit, - offset, - } - } - - pub(super) fn encoded_len(&self) -> usize { - self.encoded.len() - } -} diff --git a/core/lib/multivm/src/versions/vm_fast/bootloader_state/utils.rs b/core/lib/multivm/src/versions/vm_fast/bootloader_state/utils.rs deleted file mode 100644 index 8883fd33904b..000000000000 --- a/core/lib/multivm/src/versions/vm_fast/bootloader_state/utils.rs +++ /dev/null @@ -1,236 +0,0 @@ -use zksync_types::{ethabi, h256_to_u256, ProtocolVersionId, U256}; - -use super::{l2_block::BootloaderL2Block, tx::BootloaderTx}; -use crate::{ - interface::{ - pubdata::{PubdataBuilder, PubdataInput}, - BootloaderMemory, CompressedBytecodeInfo, TxExecutionMode, - }, - utils::bytecode, - vm_latest::{ - constants::{ - get_bootloader_tx_description_offset, get_compressed_bytecodes_offset, - get_operator_provided_l1_messenger_pubdata_offset, get_operator_refunds_offset, - get_tx_description_offset, get_tx_operator_l2_block_info_offset, - get_tx_overhead_offset, get_tx_trusted_gas_limit_offset, - BOOTLOADER_TX_DESCRIPTION_SIZE, OPERATOR_PROVIDED_L1_MESSENGER_PUBDATA_SLOTS, - TX_OPERATOR_SLOTS_PER_L2_BLOCK_INFO, - }, - MultiVmSubversion, - }, -}; - -pub(super) fn get_memory_for_compressed_bytecodes( - compressed_bytecodes: &[CompressedBytecodeInfo], -) -> Vec { - let memory_addition: Vec<_> = compressed_bytecodes - .iter() - .flat_map(bytecode::encode_call) - .collect(); - bytecode::bytes_to_be_words(&memory_addition) -} - -#[allow(clippy::too_many_arguments)] -pub(super) fn apply_tx_to_memory( - memory: &mut BootloaderMemory, - bootloader_tx: &BootloaderTx, - bootloader_l2_block: &BootloaderL2Block, - tx_index: usize, - tx_offset: usize, - compressed_bytecodes_size: usize, - execution_mode: TxExecutionMode, - start_new_l2_block: bool, - subversion: MultiVmSubversion, -) -> usize { - let bootloader_description_offset = get_bootloader_tx_description_offset(subversion) - + BOOTLOADER_TX_DESCRIPTION_SIZE * tx_index; - let tx_description_offset = get_tx_description_offset(subversion) + tx_offset; - - memory.push(( - bootloader_description_offset, - assemble_tx_meta(execution_mode, true), - )); - - memory.push(( - bootloader_description_offset + 1, - U256::from_big_endian(&(32 * tx_description_offset).to_be_bytes()), - )); - - let refund_offset = get_operator_refunds_offset(subversion) + tx_index; - memory.push((refund_offset, bootloader_tx.refund.into())); - - let overhead_offset = get_tx_overhead_offset(subversion) + tx_index; - memory.push((overhead_offset, bootloader_tx.gas_overhead.into())); - - let trusted_gas_limit_offset = get_tx_trusted_gas_limit_offset(subversion) + tx_index; - memory.push((trusted_gas_limit_offset, bootloader_tx.trusted_gas_limit)); - - memory.extend( - (tx_description_offset..tx_description_offset + bootloader_tx.encoded_len()) - .zip(bootloader_tx.encoded.clone()), - ); - apply_l2_block_inner( - memory, - bootloader_l2_block, - tx_index, - start_new_l2_block, - subversion, - ); - - // Note, +1 is moving for pointer - let compressed_bytecodes_offset = - get_compressed_bytecodes_offset(subversion) + 1 + compressed_bytecodes_size; - - let encoded_compressed_bytecodes = - get_memory_for_compressed_bytecodes(&bootloader_tx.compressed_bytecodes); - let compressed_bytecodes_encoding = encoded_compressed_bytecodes.len(); - - memory.extend( - (compressed_bytecodes_offset - ..compressed_bytecodes_offset + encoded_compressed_bytecodes.len()) - .zip(encoded_compressed_bytecodes), - ); - compressed_bytecodes_encoding -} - -pub(crate) fn apply_l2_block( - memory: &mut BootloaderMemory, - bootloader_l2_block: &BootloaderL2Block, - txs_index: usize, - subversion: MultiVmSubversion, -) { - apply_l2_block_inner(memory, bootloader_l2_block, txs_index, true, subversion) -} - -fn apply_l2_block_inner( - memory: &mut BootloaderMemory, - bootloader_l2_block: &BootloaderL2Block, - txs_index: usize, - start_new_l2_block: bool, - subversion: MultiVmSubversion, -) { - // Since L2 block information start from the `TX_OPERATOR_L2_BLOCK_INFO_OFFSET` and each - // L2 block info takes `TX_OPERATOR_SLOTS_PER_L2_BLOCK_INFO` slots, the position where the L2 block info - // for this transaction needs to be written is: - - let block_position = get_tx_operator_l2_block_info_offset(subversion) - + txs_index * TX_OPERATOR_SLOTS_PER_L2_BLOCK_INFO; - - memory.extend(vec![ - (block_position, bootloader_l2_block.number.into()), - (block_position + 1, bootloader_l2_block.timestamp.into()), - ( - block_position + 2, - h256_to_u256(bootloader_l2_block.prev_block_hash), - ), - ( - block_position + 3, - if start_new_l2_block { - bootloader_l2_block.max_virtual_blocks_to_create.into() - } else { - U256::zero() - }, - ), - ]) -} - -fn bootloader_memory_input( - pubdata_builder: &dyn PubdataBuilder, - input: &PubdataInput, - protocol_version: ProtocolVersionId, -) -> Vec { - let l2_da_validator_address = pubdata_builder.l2_da_validator(); - let operator_input = pubdata_builder.l1_messenger_operator_input(input, protocol_version); - ethabi::encode(&[ - ethabi::Token::Address(l2_da_validator_address), - ethabi::Token::Bytes(operator_input), - ]) -} - -pub(crate) fn apply_pubdata_to_memory( - memory: &mut BootloaderMemory, - pubdata_builder: &dyn PubdataBuilder, - pubdata_information: &PubdataInput, - protocol_version: ProtocolVersionId, - subversion: MultiVmSubversion, -) { - let (l1_messenger_pubdata_start_slot, pubdata) = match subversion { - MultiVmSubversion::SmallBootloaderMemory | MultiVmSubversion::IncreasedBootloaderMemory => { - // Skipping two slots as they will be filled by the bootloader itself: - // - One slot is for the selector of the call to the L1Messenger. - // - The other slot is for the 0x20 offset for the calldata. - let l1_messenger_pubdata_start_slot = - get_operator_provided_l1_messenger_pubdata_offset(subversion) + 2; - - // Need to skip first word as it represents array offset - // while bootloader expects only [len || data] - let pubdata = ethabi::encode(&[ethabi::Token::Bytes( - pubdata_builder.l1_messenger_operator_input(pubdata_information, protocol_version), - )])[32..] - .to_vec(); - - assert!( - pubdata.len() / 32 <= OPERATOR_PROVIDED_L1_MESSENGER_PUBDATA_SLOTS - 2, - "The encoded pubdata is too big" - ); - - (l1_messenger_pubdata_start_slot, pubdata) - } - MultiVmSubversion::Gateway => { - // Skipping the first slot as it will be filled by the bootloader itself: - // It is for the selector of the call to the L1Messenger. - let l1_messenger_pubdata_start_slot = - get_operator_provided_l1_messenger_pubdata_offset(subversion) + 1; - - let pubdata = - bootloader_memory_input(pubdata_builder, pubdata_information, protocol_version); - - assert!( - // Note that unlike the previous version, the difference is `1`, since now it also includes the offset - pubdata.len() / 32 < OPERATOR_PROVIDED_L1_MESSENGER_PUBDATA_SLOTS, - "The encoded pubdata is too big" - ); - - (l1_messenger_pubdata_start_slot, pubdata) - } - }; - - pubdata - .chunks(32) - .enumerate() - .for_each(|(slot_offset, value)| { - memory.push(( - l1_messenger_pubdata_start_slot + slot_offset, - U256::from(value), - )) - }); -} - -/// Forms a word that contains meta information for the transaction execution. -/// -/// # Current layout -/// -/// - 0 byte (MSB): server-side tx execution mode -/// In the server, we may want to execute different parts of the transaction in the different context -/// For example, when checking validity, we don't want to actually execute transaction and have side effects. -/// -/// Possible values: -/// - 0x00: validate & execute (normal mode) -/// - 0x02: execute but DO NOT validate -/// -/// - 31 byte (LSB): whether to execute transaction or not (at all). -pub(super) fn assemble_tx_meta(execution_mode: TxExecutionMode, execute_tx: bool) -> U256 { - let mut output = [0u8; 32]; - - // Set 0 byte (execution mode) - output[0] = match execution_mode { - TxExecutionMode::VerifyExecute => 0x00, - TxExecutionMode::EstimateFee => 0x00, - TxExecutionMode::EthCall => 0x02, - }; - - // Set 31 byte (marker for tx execution) - output[31] = u8::from(execute_tx); - - U256::from_big_endian(&output) -} diff --git a/core/lib/multivm/src/versions/vm_fast/hook.rs b/core/lib/multivm/src/versions/vm_fast/hook.rs deleted file mode 100644 index b138c6d496d9..000000000000 --- a/core/lib/multivm/src/versions/vm_fast/hook.rs +++ /dev/null @@ -1,39 +0,0 @@ -#[derive(Debug, Copy, Clone)] -pub(crate) enum Hook { - AccountValidationEntered, - PaymasterValidationEntered, - ValidationExited, - ValidationStepEnded, - TxHasEnded, - DebugLog, - DebugReturnData, - NearCallCatch, - AskOperatorForRefund, - NotifyAboutRefund, - PostResult, - FinalBatchInfo, - PubdataRequested, -} - -impl Hook { - /// # Panics - /// Panics if the number does not correspond to any hook. - pub fn from_u32(hook: u32) -> Self { - match hook { - 0 => Hook::AccountValidationEntered, - 1 => Hook::PaymasterValidationEntered, - 2 => Hook::ValidationExited, - 3 => Hook::ValidationStepEnded, - 4 => Hook::TxHasEnded, - 5 => Hook::DebugLog, - 6 => Hook::DebugReturnData, - 7 => Hook::NearCallCatch, - 8 => Hook::AskOperatorForRefund, - 9 => Hook::NotifyAboutRefund, - 10 => Hook::PostResult, - 11 => Hook::FinalBatchInfo, - 12 => Hook::PubdataRequested, - _ => panic!("Unknown hook {}", hook), - } - } -} diff --git a/core/lib/multivm/src/versions/vm_fast/initial_bootloader_memory.rs b/core/lib/multivm/src/versions/vm_fast/initial_bootloader_memory.rs deleted file mode 100644 index 89b22d328ac5..000000000000 --- a/core/lib/multivm/src/versions/vm_fast/initial_bootloader_memory.rs +++ /dev/null @@ -1,43 +0,0 @@ -use zksync_types::{address_to_u256, h256_to_u256, U256}; - -use crate::{interface::L1BatchEnv, vm_latest::utils::fee::get_batch_base_fee}; - -const OPERATOR_ADDRESS_SLOT: usize = 0; -const PREV_BLOCK_HASH_SLOT: usize = 1; -const NEW_BLOCK_TIMESTAMP_SLOT: usize = 2; -const NEW_BLOCK_NUMBER_SLOT: usize = 3; -const FAIR_PUBDATA_PRICE_SLOT: usize = 4; -const FAIR_L2_GAS_PRICE_SLOT: usize = 5; -const EXPECTED_BASE_FEE_SLOT: usize = 6; -const SHOULD_SET_NEW_BLOCK_SLOT: usize = 7; - -/// Returns the initial memory for the bootloader based on the current batch environment. -pub(crate) fn bootloader_initial_memory(l1_batch: &L1BatchEnv) -> Vec<(usize, U256)> { - let (prev_block_hash, should_set_new_block) = l1_batch - .previous_batch_hash - .map(|prev_block_hash| (h256_to_u256(prev_block_hash), U256::one())) - .unwrap_or_default(); - - vec![ - ( - OPERATOR_ADDRESS_SLOT, - address_to_u256(&l1_batch.fee_account), - ), - (PREV_BLOCK_HASH_SLOT, prev_block_hash), - (NEW_BLOCK_TIMESTAMP_SLOT, U256::from(l1_batch.timestamp)), - (NEW_BLOCK_NUMBER_SLOT, U256::from(l1_batch.number.0)), - ( - FAIR_PUBDATA_PRICE_SLOT, - U256::from(l1_batch.fee_input.fair_pubdata_price()), - ), - ( - FAIR_L2_GAS_PRICE_SLOT, - U256::from(l1_batch.fee_input.fair_l2_gas_price()), - ), - ( - EXPECTED_BASE_FEE_SLOT, - U256::from(get_batch_base_fee(l1_batch)), - ), - (SHOULD_SET_NEW_BLOCK_SLOT, should_set_new_block), - ] -} diff --git a/core/lib/multivm/src/versions/vm_fast/mod.rs b/core/lib/multivm/src/versions/vm_fast/mod.rs index dca575138553..291961d3312a 100644 --- a/core/lib/multivm/src/versions/vm_fast/mod.rs +++ b/core/lib/multivm/src/versions/vm_fast/mod.rs @@ -2,21 +2,16 @@ pub use zksync_vm2::interface; pub(crate) use self::version::FastVmVersion; pub use self::{ - tracers::{FullValidationTracer, ValidationTracer}, + tracers::{CallTracer, FullValidationTracer, ValidationTracer}, vm::Vm, }; -mod bootloader_state; mod bytecode; mod events; mod glue; -mod hook; -mod initial_bootloader_memory; -mod refund; #[cfg(test)] mod tests; mod tracers; -mod transaction_data; mod utils; mod version; mod vm; diff --git a/core/lib/multivm/src/versions/vm_fast/tests/call_tracer.rs b/core/lib/multivm/src/versions/vm_fast/tests/call_tracer.rs new file mode 100644 index 000000000000..9a990bb8d200 --- /dev/null +++ b/core/lib/multivm/src/versions/vm_fast/tests/call_tracer.rs @@ -0,0 +1,31 @@ +use crate::versions::{testonly::call_tracer, vm_fast::Vm}; + +#[test] +fn basic_behavior() { + call_tracer::test_basic_behavior::>(); +} + +#[test] +fn transfer() { + call_tracer::test_transfer::>(); +} + +#[test] +fn reverted_tx() { + call_tracer::test_reverted_tx::>(); +} + +#[test] +fn reverted_deployment() { + call_tracer::test_reverted_deployment_tx::>(); +} + +#[test] +fn out_of_gas() { + call_tracer::test_out_of_gas::>(); +} + +#[test] +fn recursive_tx() { + call_tracer::test_recursive_tx::>(); +} diff --git a/core/lib/multivm/src/versions/vm_fast/tests/l1_messenger.rs b/core/lib/multivm/src/versions/vm_fast/tests/l1_messenger.rs index c7d4594d7692..0bd01c7de134 100644 --- a/core/lib/multivm/src/versions/vm_fast/tests/l1_messenger.rs +++ b/core/lib/multivm/src/versions/vm_fast/tests/l1_messenger.rs @@ -1,7 +1,6 @@ use crate::{versions::testonly::l1_messenger::test_rollup_da_output_hash_match, vm_fast::Vm}; #[test] -#[ignore] // Requires post-gateway system contracts fn rollup_da_output_hash_match() { test_rollup_da_output_hash_match::>(); } diff --git a/core/lib/multivm/src/versions/vm_fast/tests/mod.rs b/core/lib/multivm/src/versions/vm_fast/tests/mod.rs index e148444922ba..89edd85b86f1 100644 --- a/core/lib/multivm/src/versions/vm_fast/tests/mod.rs +++ b/core/lib/multivm/src/versions/vm_fast/tests/mod.rs @@ -8,21 +8,24 @@ use zksync_vm_interface::{ pubdata::{PubdataBuilder, PubdataInput}, storage::ReadStorage, tracer::ViolatedValidationRule, - CurrentExecutionState, InspectExecutionMode, L2BlockEnv, VmExecutionMode, + Call, CurrentExecutionState, InspectExecutionMode, L2BlockEnv, VmExecutionMode, VmExecutionResultAndLogs, VmInterface, }; use super::{FullValidationTracer, ValidationTracer, Vm}; use crate::{ interface::storage::{ImmutableStorageView, InMemoryStorage}, - versions::testonly::{validation_params, TestedVm, TestedVmForValidation}, - vm_fast::tracers::WithBuiltinTracers, + versions::testonly::{ + validation_params, TestedVm, TestedVmForValidation, TestedVmWithCallTracer, + }, + vm_fast::{tracers::WithBuiltinTracers, CallTracer}, }; mod account_validation_rules; mod block_tip; mod bootloader; mod bytecode_publishing; +mod call_tracer; mod circuits; mod code_oracle; mod default_aa; @@ -180,7 +183,7 @@ where } } -impl TestedVmForValidation for Vm, (), FullValidationTracer> { +impl TestedVmForValidation for TestedFastVm<(), FullValidationTracer> { fn run_validation(&mut self, tx: L2Tx, timestamp: u64) -> Option { let validation_params = validation_params(&tx, &self.system_env); self.push_transaction(tx.into()); @@ -189,3 +192,11 @@ impl TestedVmForValidation for Vm, (), Ful tracer.1.validation_error() } } + +impl TestedVmWithCallTracer for TestedFastVm { + fn inspect_with_call_tracer(&mut self) -> (VmExecutionResultAndLogs, Vec) { + let mut tracer = (CallTracer::default(), ()); + let result = self.inspect(&mut tracer, InspectExecutionMode::OneTx); + (result, tracer.0.into_result()) + } +} diff --git a/core/lib/multivm/src/versions/vm_fast/tracers/calls.rs b/core/lib/multivm/src/versions/vm_fast/tracers/calls.rs new file mode 100644 index 000000000000..2413e5916a25 --- /dev/null +++ b/core/lib/multivm/src/versions/vm_fast/tracers/calls.rs @@ -0,0 +1,159 @@ +use zksync_system_constants::CONTRACT_DEPLOYER_ADDRESS; +use zksync_types::{zk_evm_types::FarCallOpcode, U256}; +use zksync_vm2::{ + interface::{ + CallframeInterface, CallingMode, Opcode, OpcodeType, ReturnType, ShouldStop, + StateInterface, Tracer, + }, + FatPointer, +}; + +use crate::{ + interface::{Call, CallType, VmRevertReason}, + vm_fast::utils::read_raw_fat_pointer, +}; + +/// Call tracer for the fast VM. +#[derive(Debug, Clone, Default)] +pub struct CallTracer { + stack: Vec, + finished_calls: Vec, + current_stack_depth: usize, + // TODO: report as metrics + max_stack_depth: usize, + max_near_calls: usize, +} + +#[derive(Debug, Clone)] +struct FarcallAndNearCallCount { + farcall: Call, + near_calls_after: usize, +} + +impl CallTracer { + /// Converts this tracer into the captured calls. + pub fn into_result(self) -> Vec { + self.finished_calls + } +} + +impl Tracer for CallTracer { + fn after_instruction( + &mut self, + state: &mut S, + ) -> ShouldStop { + match OP::VALUE { + Opcode::FarCall(ty) => { + self.current_stack_depth += 1; + self.max_stack_depth = self.max_stack_depth.max(self.current_stack_depth); + + let current_gas = state.current_frame().gas() as u64; + let from = state.current_frame().caller(); + let to = state.current_frame().address(); + let input = if current_gas == 0 { + vec![] + } else { + read_raw_fat_pointer(state, state.read_register(1).0) + }; + let value = U256::from(state.current_frame().context_u128()); + let ty = match ty { + CallingMode::Normal => CallType::Call(FarCallOpcode::Normal), + CallingMode::Delegate => CallType::Call(FarCallOpcode::Delegate), + CallingMode::Mimic => { + let prev_this_address = state.callframe(1).address(); + if prev_this_address == CONTRACT_DEPLOYER_ADDRESS { + // EraVM contract creation is encoded as a mimic call from `ContractDeployer` to the created contract. + CallType::Create + } else { + CallType::Call(FarCallOpcode::Mimic) + } + } + }; + + self.stack.push(FarcallAndNearCallCount { + farcall: Call { + r#type: ty, + from, + to, + // The previous frame always exists directly after a far call + parent_gas: current_gas + state.callframe(1).gas() as u64, + gas: current_gas, + input, + value, + ..Default::default() + }, + near_calls_after: 0, + }); + } + Opcode::NearCall => { + self.current_stack_depth += 1; + self.max_stack_depth = self.max_stack_depth.max(self.current_stack_depth); + + if let Some(frame) = self.stack.last_mut() { + frame.near_calls_after += 1; + self.max_near_calls = self.max_near_calls.max(frame.near_calls_after); + } + } + Opcode::Ret(variant) => { + self.current_stack_depth -= 1; + + let Some(mut current_call) = self.stack.pop() else { + return ShouldStop::Continue; + }; + + if current_call.near_calls_after == 0 { + // Might overflow due to stipend + current_call.farcall.gas_used = current_call + .farcall + .parent_gas + .saturating_sub(state.current_frame().gas() as u64); + + let (maybe_output_ptr, is_pointer) = state.read_register(1); + let output = if is_pointer { + let output_ptr = FatPointer::from(maybe_output_ptr); + if output_ptr.length == 0 && output_ptr.offset == 0 { + // Trivial pointer, which is formally cannot be dereferenced. This only matters + // when extracting the revert reason; the legacy VM treats the trivial pointer specially. + None + } else { + Some(read_raw_fat_pointer(state, maybe_output_ptr)) + } + } else { + None + }; + + match variant { + ReturnType::Normal => { + current_call.farcall.output = output.unwrap_or_default(); + } + ReturnType::Revert => { + current_call.farcall.revert_reason = + Some(if let Some(output) = &output { + VmRevertReason::from(output.as_slice()).to_string() + } else { + "Unknown revert reason".to_owned() + }); + } + ReturnType::Panic => { + current_call.farcall.error = Some("Panic".to_string()); + } + } + + // If there is a parent call, push the current call to it + // Otherwise, put the current call back on the stack, because it's the top level call + if let Some(parent_call) = self.stack.last_mut() { + parent_call.farcall.calls.push(current_call.farcall); + } else { + self.finished_calls.push(current_call.farcall); + } + } else { + current_call.near_calls_after -= 1; + self.stack.push(current_call); + } + } + _ => {} + } + + ShouldStop::Continue + } +} diff --git a/core/lib/multivm/src/versions/vm_fast/tracers/evm_deploy.rs b/core/lib/multivm/src/versions/vm_fast/tracers/evm_deploy.rs index 1202b5b94dd2..deb06366eb36 100644 --- a/core/lib/multivm/src/versions/vm_fast/tracers/evm_deploy.rs +++ b/core/lib/multivm/src/versions/vm_fast/tracers/evm_deploy.rs @@ -8,7 +8,7 @@ use zksync_vm2::interface::{ CallframeInterface, CallingMode, GlobalStateInterface, Opcode, OpcodeType, ShouldStop, Tracer, }; -use crate::vm_fast::utils::read_fat_pointer; +use crate::vm_fast::utils::read_raw_fat_pointer; /// Container for dynamic bytecodes added by [`EvmDeployTracer`]. #[derive(Debug, Clone, Default)] @@ -54,7 +54,7 @@ impl EvmDeployTracer { return; } - let data = read_fat_pointer(state, state.read_register(1).0); + let data = read_raw_fat_pointer(state, state.read_register(1).0); if data.len() < 4 { return; } diff --git a/core/lib/multivm/src/versions/vm_fast/tracers/mod.rs b/core/lib/multivm/src/versions/vm_fast/tracers/mod.rs index 3d9602536743..db527bdbaceb 100644 --- a/core/lib/multivm/src/versions/vm_fast/tracers/mod.rs +++ b/core/lib/multivm/src/versions/vm_fast/tracers/mod.rs @@ -3,10 +3,14 @@ use zksync_vm2::interface::{CycleStats, GlobalStateInterface, OpcodeType, ShouldStop, Tracer}; pub(super) use self::evm_deploy::DynamicBytecodes; -pub use self::validation::{FullValidationTracer, ValidationTracer}; +pub use self::{ + calls::CallTracer, + validation::{FullValidationTracer, ValidationTracer}, +}; use self::{circuits::CircuitsTracer, evm_deploy::EvmDeployTracer}; use crate::interface::CircuitStatistic; +mod calls; mod circuits; mod evm_deploy; mod validation; diff --git a/core/lib/multivm/src/versions/vm_fast/tracers/validation.rs b/core/lib/multivm/src/versions/vm_fast/tracers/validation.rs index 52b0a4747b7d..6833051c5407 100644 --- a/core/lib/multivm/src/versions/vm_fast/tracers/validation.rs +++ b/core/lib/multivm/src/versions/vm_fast/tracers/validation.rs @@ -14,7 +14,7 @@ use zksync_vm_interface::tracer::{ TimestampAsserterParams, ValidationParams, ValidationTraces, ViolatedValidationRule, }; -use crate::{tracers::TIMESTAMP_ASSERTER_FUNCTION_SELECTOR, vm_fast::utils::read_fat_pointer}; +use crate::{tracers::TIMESTAMP_ASSERTER_FUNCTION_SELECTOR, vm_fast::utils::read_raw_fat_pointer}; /// [`Tracer`] used for account validation per [EIP-4337] and [EIP-7562]. /// @@ -157,7 +157,7 @@ impl Tracer for FullValidationTracer { // Intercept calls to keccak, whitelist storage slots corresponding to the hash let code_address = state.current_frame().code_address(); if code_address == KECCAK256_PRECOMPILE_ADDRESS { - let calldata = read_fat_pointer(state, state.read_register(1).0); + let calldata = read_raw_fat_pointer(state, state.read_register(1).0); if calldata.len() != 64 { return ShouldStop::Continue; } @@ -185,7 +185,7 @@ impl Tracer for FullValidationTracer { if let Some(ref params) = self.timestamp_asserter_params { if code_address == params.address { - let calldata = read_fat_pointer(state, state.read_register(1).0); + let calldata = read_raw_fat_pointer(state, state.read_register(1).0); if calldata.len() == 68 && calldata[..4] == TIMESTAMP_ASSERTER_FUNCTION_SELECTOR { @@ -215,7 +215,7 @@ impl Tracer for FullValidationTracer { } Ret(kind) => { if self.add_return_value_to_allowed_slots && kind == Normal { - let return_value = read_fat_pointer(state, state.read_register(1).0); + let return_value = read_raw_fat_pointer(state, state.read_register(1).0); self.slots_obtained_via_keccak .insert(return_value.as_slice().into()); } diff --git a/core/lib/multivm/src/versions/vm_fast/transaction_data.rs b/core/lib/multivm/src/versions/vm_fast/transaction_data.rs deleted file mode 100644 index 02697beee341..000000000000 --- a/core/lib/multivm/src/versions/vm_fast/transaction_data.rs +++ /dev/null @@ -1,340 +0,0 @@ -use std::convert::TryInto; - -use zksync_types::{ - address_to_h256, - bytecode::BytecodeHash, - ethabi::{encode, Address, Token}, - fee::{encoding_len, Fee}, - h256_to_u256, - l1::is_l1_tx_type, - l2::{L2Tx, TransactionType}, - transaction_request::{PaymasterParams, TransactionRequest}, - web3::Bytes, - Execute, ExecuteTransactionCommon, L2ChainId, L2TxCommonData, Nonce, Transaction, H256, U256, -}; - -use crate::{ - utils::bytecode::bytes_to_be_words, - vm_latest::{ - constants::{MAX_GAS_PER_PUBDATA_BYTE, TX_MAX_COMPUTE_GAS_LIMIT}, - utils::overhead::derive_overhead, - }, -}; - -/// This structure represents the data that is used by -/// the Bootloader to describe the transaction. -#[derive(Debug, Default, Clone)] -pub(crate) struct TransactionData { - pub(crate) tx_type: u8, - pub(crate) from: Address, - pub(crate) to: Option
, - pub(crate) gas_limit: U256, - pub(crate) pubdata_price_limit: U256, - pub(crate) max_fee_per_gas: U256, - pub(crate) max_priority_fee_per_gas: U256, - pub(crate) paymaster: Address, - pub(crate) nonce: U256, - pub(crate) value: U256, - // The reserved fields that are unique for different types of transactions. - // E.g. nonce is currently used in all transaction, but it should not be mandatory - // in the long run. - pub(crate) reserved: [U256; 4], - pub(crate) data: Vec, - pub(crate) signature: Vec, - // The factory deps provided with the transaction. - // Note that *only hashes* of these bytecodes are signed by the user - // and they are used in the ABI encoding of the struct. - // TODO: include this into the tx signature as part of SMA-1010 - pub(crate) factory_deps: Vec>, - pub(crate) paymaster_input: Vec, - pub(crate) reserved_dynamic: Vec, - pub(crate) raw_bytes: Option>, -} - -impl From for TransactionData { - fn from(execute_tx: Transaction) -> Self { - match execute_tx.common_data { - ExecuteTransactionCommon::L2(common_data) => { - let nonce = U256::from_big_endian(&common_data.nonce.to_be_bytes()); - - let should_check_chain_id = if matches!( - common_data.transaction_type, - TransactionType::LegacyTransaction - ) && common_data.extract_chain_id().is_some() - { - U256([1, 0, 0, 0]) - } else { - U256::zero() - }; - - // Ethereum transactions do not sign gas per pubdata limit, and so for them we need to use - // some default value. We use the maximum possible value that is allowed by the bootloader - // (i.e. we can not use u64::MAX, because the bootloader requires gas per pubdata for such - // transactions to be higher than `MAX_GAS_PER_PUBDATA_BYTE`). - let gas_per_pubdata_limit = if common_data.transaction_type.is_ethereum_type() { - MAX_GAS_PER_PUBDATA_BYTE.into() - } else { - common_data.fee.gas_per_pubdata_limit - }; - - TransactionData { - tx_type: (common_data.transaction_type as u32) as u8, - from: common_data.initiator_address, - to: execute_tx.execute.contract_address, - gas_limit: common_data.fee.gas_limit, - pubdata_price_limit: gas_per_pubdata_limit, - max_fee_per_gas: common_data.fee.max_fee_per_gas, - max_priority_fee_per_gas: common_data.fee.max_priority_fee_per_gas, - paymaster: common_data.paymaster_params.paymaster, - nonce, - value: execute_tx.execute.value, - reserved: [ - should_check_chain_id, - U256::zero(), - U256::zero(), - U256::zero(), - ], - data: execute_tx.execute.calldata, - signature: common_data.signature, - factory_deps: execute_tx.execute.factory_deps, - paymaster_input: common_data.paymaster_params.paymaster_input, - reserved_dynamic: vec![], - raw_bytes: execute_tx.raw_bytes.map(|a| a.0), - } - } - ExecuteTransactionCommon::L1(common_data) => { - let refund_recipient = h256_to_u256(address_to_h256(&common_data.refund_recipient)); - TransactionData { - tx_type: common_data.tx_format() as u8, - from: common_data.sender, - to: execute_tx.execute.contract_address, - gas_limit: common_data.gas_limit, - pubdata_price_limit: common_data.gas_per_pubdata_limit, - // It doesn't matter what we put here, since - // the bootloader does not charge anything - max_fee_per_gas: common_data.max_fee_per_gas, - max_priority_fee_per_gas: U256::zero(), - paymaster: Address::default(), - nonce: U256::from(common_data.serial_id.0), // priority op ID - value: execute_tx.execute.value, - reserved: [ - common_data.to_mint, - refund_recipient, - U256::zero(), - U256::zero(), - ], - data: execute_tx.execute.calldata, - // The signature isn't checked for L1 transactions so we don't care - signature: vec![], - factory_deps: execute_tx.execute.factory_deps, - paymaster_input: vec![], - reserved_dynamic: vec![], - raw_bytes: None, - } - } - ExecuteTransactionCommon::ProtocolUpgrade(common_data) => { - let refund_recipient = h256_to_u256(address_to_h256(&common_data.refund_recipient)); - TransactionData { - tx_type: common_data.tx_format() as u8, - from: common_data.sender, - to: execute_tx.execute.contract_address, - gas_limit: common_data.gas_limit, - pubdata_price_limit: common_data.gas_per_pubdata_limit, - // It doesn't matter what we put here, since - // the bootloader does not charge anything - max_fee_per_gas: common_data.max_fee_per_gas, - max_priority_fee_per_gas: U256::zero(), - paymaster: Address::default(), - nonce: U256::from(common_data.upgrade_id as u16), - value: execute_tx.execute.value, - reserved: [ - common_data.to_mint, - refund_recipient, - U256::zero(), - U256::zero(), - ], - data: execute_tx.execute.calldata, - // The signature isn't checked for L1 transactions so we don't care - signature: vec![], - factory_deps: execute_tx.execute.factory_deps, - paymaster_input: vec![], - reserved_dynamic: vec![], - raw_bytes: None, - } - } - } - } -} - -impl TransactionData { - pub(crate) fn abi_encode_with_custom_factory_deps( - self, - factory_deps_hashes: Vec, - ) -> Vec { - encode(&[Token::Tuple(vec![ - Token::Uint(U256::from_big_endian(&self.tx_type.to_be_bytes())), - Token::Address(self.from), - Token::Address(self.to.unwrap_or_default()), - Token::Uint(self.gas_limit), - Token::Uint(self.pubdata_price_limit), - Token::Uint(self.max_fee_per_gas), - Token::Uint(self.max_priority_fee_per_gas), - Token::Address(self.paymaster), - Token::Uint(self.nonce), - Token::Uint(self.value), - Token::FixedArray(self.reserved.iter().copied().map(Token::Uint).collect()), - Token::Bytes(self.data), - Token::Bytes(self.signature), - Token::Array(factory_deps_hashes.into_iter().map(Token::Uint).collect()), - Token::Bytes(self.paymaster_input), - Token::Bytes(self.reserved_dynamic), - ])]) - } - - pub(crate) fn abi_encode(self) -> Vec { - let factory_deps_hashes = self - .factory_deps - .iter() - .map(|dep| BytecodeHash::for_bytecode(dep).value_u256()) - .collect(); - self.abi_encode_with_custom_factory_deps(factory_deps_hashes) - } - - pub(crate) fn into_tokens(self) -> Vec { - bytes_to_be_words(&self.abi_encode()) - } - - pub(crate) fn overhead_gas(&self) -> u32 { - let encoded_len = encoding_len( - self.data.len() as u64, - self.signature.len() as u64, - self.factory_deps.len() as u64, - self.paymaster_input.len() as u64, - self.reserved_dynamic.len() as u64, - ); - - derive_overhead(encoded_len) - } - - pub(crate) fn trusted_ergs_limit(&self) -> U256 { - // No transaction is allowed to spend more than `TX_MAX_COMPUTE_GAS_LIMIT` gas on compute. - U256::from(TX_MAX_COMPUTE_GAS_LIMIT).min(self.gas_limit) - } - - pub(crate) fn tx_hash(&self, chain_id: L2ChainId) -> H256 { - if is_l1_tx_type(self.tx_type) { - return self.canonical_l1_tx_hash().unwrap(); - } - - let l2_tx: L2Tx = self.clone().try_into().unwrap(); - let mut transaction_request: TransactionRequest = l2_tx.into(); - transaction_request.chain_id = Some(chain_id.as_u64()); - - // It is assumed that the `TransactionData` always has all the necessary components to recover the hash. - transaction_request - .get_tx_hash() - .expect("Could not recover L2 transaction hash") - } - - fn canonical_l1_tx_hash(&self) -> Result { - use zksync_types::web3::keccak256; - - if !is_l1_tx_type(self.tx_type) { - return Err(TxHashCalculationError::CannotCalculateL1HashForL2Tx); - } - - let encoded_bytes = self.clone().abi_encode(); - - Ok(H256(keccak256(&encoded_bytes))) - } -} - -#[derive(Debug, Clone, Copy)] -pub(crate) enum TxHashCalculationError { - CannotCalculateL1HashForL2Tx, - CannotCalculateL2HashForL1Tx, -} - -impl TryInto for TransactionData { - type Error = TxHashCalculationError; - - fn try_into(self) -> Result { - if is_l1_tx_type(self.tx_type) { - return Err(TxHashCalculationError::CannotCalculateL2HashForL1Tx); - } - - let common_data = L2TxCommonData { - transaction_type: (self.tx_type as u32).try_into().unwrap(), - nonce: Nonce(self.nonce.as_u32()), - fee: Fee { - max_fee_per_gas: self.max_fee_per_gas, - max_priority_fee_per_gas: self.max_priority_fee_per_gas, - gas_limit: self.gas_limit, - gas_per_pubdata_limit: self.pubdata_price_limit, - }, - signature: self.signature, - input: None, - initiator_address: self.from, - paymaster_params: PaymasterParams { - paymaster: self.paymaster, - paymaster_input: self.paymaster_input, - }, - }; - let execute = Execute { - contract_address: self.to, - value: self.value, - calldata: self.data, - factory_deps: self.factory_deps, - }; - - Ok(L2Tx { - execute, - common_data, - received_timestamp_ms: 0, - raw_bytes: self.raw_bytes.map(Bytes::from), - }) - } -} - -#[cfg(test)] -mod tests { - use zksync_types::fee::encoding_len; - - use super::*; - - #[test] - fn test_consistency_with_encoding_length() { - let transaction = TransactionData { - tx_type: 113, - from: Address::random(), - to: Some(Address::random()), - gas_limit: U256::from(1u32), - pubdata_price_limit: U256::from(1u32), - max_fee_per_gas: U256::from(1u32), - max_priority_fee_per_gas: U256::from(1u32), - paymaster: Address::random(), - nonce: U256::zero(), - value: U256::zero(), - // The reserved fields that are unique for different types of transactions. - // E.g. nonce is currently used in all transaction, but it should not be mandatory - // in the long run. - reserved: [U256::zero(); 4], - data: vec![0u8; 65], - signature: vec![0u8; 75], - // The factory deps provided with the transaction. - // Note that *only hashes* of these bytecodes are signed by the user - // and they are used in the ABI encoding of the struct. - // TODO: include this into the tx signature as part of SMA-1010 - factory_deps: vec![vec![0u8; 32], vec![1u8; 32]], - paymaster_input: vec![0u8; 85], - reserved_dynamic: vec![0u8; 32], - raw_bytes: None, - }; - - let assumed_encoded_len = encoding_len(65, 75, 2, 85, 32); - - let true_encoding_len = transaction.into_tokens().len(); - - assert_eq!(assumed_encoded_len, true_encoding_len); - } -} diff --git a/core/lib/multivm/src/versions/vm_fast/utils.rs b/core/lib/multivm/src/versions/vm_fast/utils.rs index 20a6545d3385..8b3f57be988d 100644 --- a/core/lib/multivm/src/versions/vm_fast/utils.rs +++ b/core/lib/multivm/src/versions/vm_fast/utils.rs @@ -1,8 +1,11 @@ use zksync_types::U256; use zksync_vm2::{interface::StateInterface, FatPointer}; -pub(super) fn read_fat_pointer(state: &S, raw: U256) -> Vec { - let pointer = FatPointer::from(raw); +pub(super) fn read_raw_fat_pointer(state: &S, raw: U256) -> Vec { + read_fat_pointer(state, FatPointer::from(raw)) +} + +pub(super) fn read_fat_pointer(state: &S, pointer: FatPointer) -> Vec { let length = pointer.length - pointer.offset; let start = pointer.start + pointer.offset; let mut result = vec![0; length as usize]; diff --git a/core/lib/multivm/src/versions/vm_fast/vm.rs b/core/lib/multivm/src/versions/vm_fast/vm.rs index 6b14409a2e08..5065b8a7c67d 100644 --- a/core/lib/multivm/src/versions/vm_fast/vm.rs +++ b/core/lib/multivm/src/versions/vm_fast/vm.rs @@ -25,12 +25,8 @@ use zksync_vm2::{ }; use super::{ - bootloader_state::{BootloaderState, BootloaderStateSnapshot}, bytecode::compress_bytecodes, - hook::Hook, - initial_bootloader_memory::bootloader_initial_memory, tracers::{DynamicBytecodes, ValidationTracer, WithBuiltinTracers}, - transaction_data::TransactionData, }; use crate::{ glue::GlueInto, @@ -44,16 +40,19 @@ use crate::{ VmInterfaceHistoryEnabled, VmRevertReason, VmTrackingContracts, }, utils::events::extract_l2tol1logs_from_l1_messenger, - vm_fast::{ - bootloader_state::utils::{apply_l2_block, apply_pubdata_to_memory}, - events::merge_events, - refund::compute_refund, - version::FastVmVersion, - }, - vm_latest::constants::{ - get_operator_refunds_offset, get_result_success_first_slot, - get_vm_hook_params_start_position, get_vm_hook_position, TX_GAS_LIMIT_OFFSET, - VM_HOOK_PARAMS_COUNT, + vm_fast::{events::merge_events, version::FastVmVersion}, + vm_latest::{ + bootloader::{ + utils::{apply_l2_block, apply_pubdata_to_memory}, + BootloaderState, BootloaderStateSnapshot, + }, + constants::{ + get_operator_refunds_offset, get_result_success_first_slot, + get_vm_hook_params_start_position, get_vm_hook_position, TX_GAS_LIMIT_OFFSET, + VM_HOOK_PARAMS_COUNT, + }, + utils::refund::compute_refund, + TransactionData, VmHook, }, VmVersion, }; @@ -136,7 +135,7 @@ impl Vm { &system_env.base_system_smart_contracts.bootloader, true, ); - let bootloader_memory = bootloader_initial_memory(&batch_env); + let bootloader_memory = BootloaderState::initial_memory(&batch_env); let mut inner = VirtualMachine::new( BOOTLOADER_ADDRESS, @@ -258,11 +257,11 @@ impl Vm { pub(crate) fn push_transaction_inner( &mut self, - tx: zksync_types::Transaction, + tx: Transaction, refund: u64, with_compression: bool, ) { - let tx: TransactionData = tx.into(); + let tx = TransactionData::new(tx, false); let overhead = tx.overhead_gas(); self.insert_bytecodes(tx.factory_deps.iter().map(|dep| &dep[..])); @@ -432,9 +431,9 @@ where } }; - let hook = Hook::from_u32(hook); + let hook = VmHook::new(hook); match hook { - Hook::AccountValidationEntered => { + VmHook::AccountValidationEntered => { assert!( account_validation_gas_split.is_none(), "Account validation can't be nested" @@ -453,7 +452,7 @@ where self.inner.current_frame().set_gas(gas_given); } - Hook::ValidationExited => { + VmHook::ValidationExited => { tracer.validation.validation_exited(); if let Some(AccountValidationGasSplit { @@ -467,13 +466,13 @@ where } } - Hook::ValidationStepEnded => { + VmHook::ValidationStepEnded => { if Val::STOP_AFTER_VALIDATION { break (ExecutionResult::Success { output: vec![] }, true); } } - Hook::TxHasEnded => { + VmHook::TxHasEnded => { if let VmExecutionMode::OneTx = execution_mode { // The bootloader may invoke `TxHasEnded` hook without posting a tx result previously. One case when this can happen // is estimating gas for L1 transactions, if a transaction runs out of gas during execution. @@ -493,7 +492,7 @@ where break (tx_result, false); } } - Hook::AskOperatorForRefund => { + VmHook::AskOperatorForRefund => { if track_refunds { let [bootloader_refund, gas_spent_on_pubdata, gas_per_pubdata_byte] = self.get_hook_params(); @@ -535,12 +534,12 @@ where .set_refund_for_current_tx(refund_value); } } - Hook::NotifyAboutRefund => { + VmHook::NotifyAboutRefund => { if track_refunds { refunds.gas_refunded = self.get_hook_params()[0].low_u64() } } - Hook::PostResult => { + VmHook::PostResult => { let result = self.get_hook_params()[0]; let value = self.get_hook_params()[1]; let fp = FatPointer::from(value); @@ -556,7 +555,7 @@ where } }); } - Hook::FinalBatchInfo => { + VmHook::FinalBatchInfo => { // set fictive l2 block let txs_index = self.bootloader_state.free_tx_index(); let l2_block = self.bootloader_state.insert_fictive_l2_block(); @@ -564,7 +563,7 @@ where apply_l2_block(&mut memory, l2_block, txs_index, self.vm_version.into()); self.write_to_bootloader_heap(memory); } - Hook::PubdataRequested => { + VmHook::PubdataRequested => { if !matches!(execution_mode, VmExecutionMode::Batch) { unreachable!("We do not provide the pubdata when executing the block tip or a single transaction"); } @@ -614,14 +613,14 @@ where self.write_to_bootloader_heap(memory_to_apply); } - Hook::PaymasterValidationEntered => { /* unused */ } - Hook::DebugLog => { + VmHook::PaymasterValidationEntered => { /* unused */ } + VmHook::DebugLog => { let (log, log_arg) = self.get_debug_log(); let last_tx = self.bootloader_state.last_l2_block().txs.last(); let tx_hash = last_tx.map(|tx| tx.hash); tracing::trace!(tx = ?tx_hash, "{log}: {log_arg}"); } - Hook::DebugReturnData | Hook::NearCallCatch => { + VmHook::DebugReturnData | VmHook::NearCallCatch => { // These hooks are for debug purposes only } } diff --git a/core/lib/multivm/src/versions/vm_latest/bootloader/init.rs b/core/lib/multivm/src/versions/vm_latest/bootloader/init.rs new file mode 100644 index 000000000000..7897ada6ad23 --- /dev/null +++ b/core/lib/multivm/src/versions/vm_latest/bootloader/init.rs @@ -0,0 +1,46 @@ +use zksync_types::{address_to_u256, h256_to_u256, U256}; + +use super::BootloaderState; +use crate::{interface::L1BatchEnv, vm_latest::utils::fee::get_batch_base_fee}; + +const OPERATOR_ADDRESS_SLOT: usize = 0; +const PREV_BLOCK_HASH_SLOT: usize = 1; +const NEW_BLOCK_TIMESTAMP_SLOT: usize = 2; +const NEW_BLOCK_NUMBER_SLOT: usize = 3; +const FAIR_PUBDATA_PRICE_SLOT: usize = 4; +const FAIR_L2_GAS_PRICE_SLOT: usize = 5; +const EXPECTED_BASE_FEE_SLOT: usize = 6; +const SHOULD_SET_NEW_BLOCK_SLOT: usize = 7; + +impl BootloaderState { + /// Returns the initial memory for the bootloader based on the current batch environment. + pub(crate) fn initial_memory(l1_batch: &L1BatchEnv) -> Vec<(usize, U256)> { + let (prev_block_hash, should_set_new_block) = l1_batch + .previous_batch_hash + .map(|prev_block_hash| (h256_to_u256(prev_block_hash), U256::one())) + .unwrap_or_default(); + + vec![ + ( + OPERATOR_ADDRESS_SLOT, + address_to_u256(&l1_batch.fee_account), + ), + (PREV_BLOCK_HASH_SLOT, prev_block_hash), + (NEW_BLOCK_TIMESTAMP_SLOT, U256::from(l1_batch.timestamp)), + (NEW_BLOCK_NUMBER_SLOT, U256::from(l1_batch.number.0)), + ( + FAIR_PUBDATA_PRICE_SLOT, + U256::from(l1_batch.fee_input.fair_pubdata_price()), + ), + ( + FAIR_L2_GAS_PRICE_SLOT, + U256::from(l1_batch.fee_input.fair_l2_gas_price()), + ), + ( + EXPECTED_BASE_FEE_SLOT, + U256::from(get_batch_base_fee(l1_batch)), + ), + (SHOULD_SET_NEW_BLOCK_SLOT, should_set_new_block), + ] + } +} diff --git a/core/lib/multivm/src/versions/vm_latest/bootloader_state/l2_block.rs b/core/lib/multivm/src/versions/vm_latest/bootloader/l2_block.rs similarity index 97% rename from core/lib/multivm/src/versions/vm_latest/bootloader_state/l2_block.rs rename to core/lib/multivm/src/versions/vm_latest/bootloader/l2_block.rs index 95502b8dc60c..922f5384ab3f 100644 --- a/core/lib/multivm/src/versions/vm_latest/bootloader_state/l2_block.rs +++ b/core/lib/multivm/src/versions/vm_latest/bootloader/l2_block.rs @@ -5,7 +5,7 @@ use zksync_types::{web3::keccak256_concat, L2BlockNumber, H256}; use crate::{ interface::{L2Block, L2BlockEnv}, vm_latest::{ - bootloader_state::{snapshot::L2BlockSnapshot, tx::BootloaderTx}, + bootloader::{snapshot::L2BlockSnapshot, tx::BootloaderTx}, utils::l2_blocks::l2_block_hash, }, }; diff --git a/core/lib/multivm/src/versions/vm_latest/bootloader/mod.rs b/core/lib/multivm/src/versions/vm_latest/bootloader/mod.rs new file mode 100644 index 000000000000..02f67f322f0c --- /dev/null +++ b/core/lib/multivm/src/versions/vm_latest/bootloader/mod.rs @@ -0,0 +1,9 @@ +pub(crate) use self::snapshot::BootloaderStateSnapshot; +pub use self::state::BootloaderState; + +mod init; +mod l2_block; +mod snapshot; +mod state; +mod tx; +pub(crate) mod utils; diff --git a/core/lib/multivm/src/versions/vm_fast/bootloader_state/snapshot.rs b/core/lib/multivm/src/versions/vm_latest/bootloader/snapshot.rs similarity index 100% rename from core/lib/multivm/src/versions/vm_fast/bootloader_state/snapshot.rs rename to core/lib/multivm/src/versions/vm_latest/bootloader/snapshot.rs diff --git a/core/lib/multivm/src/versions/vm_latest/bootloader_state/state.rs b/core/lib/multivm/src/versions/vm_latest/bootloader/state.rs similarity index 99% rename from core/lib/multivm/src/versions/vm_latest/bootloader_state/state.rs rename to core/lib/multivm/src/versions/vm_latest/bootloader/state.rs index 8897ed2dc6e0..0b65d961d3ac 100644 --- a/core/lib/multivm/src/versions/vm_latest/bootloader_state/state.rs +++ b/core/lib/multivm/src/versions/vm_latest/bootloader/state.rs @@ -11,13 +11,13 @@ use crate::{ TxExecutionMode, }, vm_latest::{ - bootloader_state::{ + bootloader::{ l2_block::BootloaderL2Block, snapshot::BootloaderStateSnapshot, utils::{apply_l2_block, apply_tx_to_memory}, }, constants::get_tx_description_offset, - types::internals::TransactionData, + types::TransactionData, utils::l2_blocks::assert_next_block, MultiVmSubversion, }, diff --git a/core/lib/multivm/src/versions/vm_latest/bootloader_state/tx.rs b/core/lib/multivm/src/versions/vm_latest/bootloader/tx.rs similarity index 98% rename from core/lib/multivm/src/versions/vm_latest/bootloader_state/tx.rs rename to core/lib/multivm/src/versions/vm_latest/bootloader/tx.rs index 2c63db7e4354..00c7ae43c58f 100644 --- a/core/lib/multivm/src/versions/vm_latest/bootloader_state/tx.rs +++ b/core/lib/multivm/src/versions/vm_latest/bootloader/tx.rs @@ -1,6 +1,6 @@ use zksync_types::{L2ChainId, H256, U256}; -use crate::{interface::CompressedBytecodeInfo, vm_latest::types::internals::TransactionData}; +use crate::{interface::CompressedBytecodeInfo, vm_latest::types::TransactionData}; /// Information about tx necessary for execution in bootloader. #[derive(Debug, Clone)] diff --git a/core/lib/multivm/src/versions/vm_latest/bootloader_state/utils.rs b/core/lib/multivm/src/versions/vm_latest/bootloader/utils.rs similarity index 99% rename from core/lib/multivm/src/versions/vm_latest/bootloader_state/utils.rs rename to core/lib/multivm/src/versions/vm_latest/bootloader/utils.rs index 77a8ef3a1a71..e309c6c45acb 100644 --- a/core/lib/multivm/src/versions/vm_latest/bootloader_state/utils.rs +++ b/core/lib/multivm/src/versions/vm_latest/bootloader/utils.rs @@ -8,7 +8,7 @@ use crate::{ }, utils::bytecode, vm_latest::{ - bootloader_state::l2_block::BootloaderL2Block, + bootloader::l2_block::BootloaderL2Block, constants::{ get_bootloader_tx_description_offset, get_compressed_bytecodes_offset, get_operator_provided_l1_messenger_pubdata_offset, get_operator_refunds_offset, diff --git a/core/lib/multivm/src/versions/vm_latest/bootloader_state/mod.rs b/core/lib/multivm/src/versions/vm_latest/bootloader_state/mod.rs deleted file mode 100644 index 73830de2759b..000000000000 --- a/core/lib/multivm/src/versions/vm_latest/bootloader_state/mod.rs +++ /dev/null @@ -1,8 +0,0 @@ -mod l2_block; -mod snapshot; -mod state; -mod tx; - -pub(crate) mod utils; -pub(crate) use snapshot::BootloaderStateSnapshot; -pub use state::BootloaderState; diff --git a/core/lib/multivm/src/versions/vm_latest/bootloader_state/snapshot.rs b/core/lib/multivm/src/versions/vm_latest/bootloader_state/snapshot.rs deleted file mode 100644 index 8f1cec3cb7f1..000000000000 --- a/core/lib/multivm/src/versions/vm_latest/bootloader_state/snapshot.rs +++ /dev/null @@ -1,25 +0,0 @@ -use zksync_types::H256; - -#[derive(Debug, Clone)] -pub(crate) struct BootloaderStateSnapshot { - /// ID of the next transaction to be executed. - pub(crate) tx_to_execute: usize, - /// Stored L2 blocks in bootloader memory - pub(crate) l2_blocks_len: usize, - /// Snapshot of the last L2 block. Only this block could be changed during the rollback - pub(crate) last_l2_block: L2BlockSnapshot, - /// The number of 32-byte words spent on the already included compressed bytecodes. - pub(crate) compressed_bytecodes_encoding: usize, - /// Current offset of the free space in the bootloader memory. - pub(crate) free_tx_offset: usize, - /// Whether the pubdata information has been provided already - pub(crate) is_pubdata_information_provided: bool, -} - -#[derive(Debug, Clone)] -pub(crate) struct L2BlockSnapshot { - /// The rolling hash of all the transactions in the miniblock - pub(crate) txs_rolling_hash: H256, - /// The number of transactions in the last L2 block - pub(crate) txs_len: usize, -} diff --git a/core/lib/multivm/src/versions/vm_latest/implementation/snapshots.rs b/core/lib/multivm/src/versions/vm_latest/implementation/snapshots.rs index 377c4f548b06..82ea5829342b 100644 --- a/core/lib/multivm/src/versions/vm_latest/implementation/snapshots.rs +++ b/core/lib/multivm/src/versions/vm_latest/implementation/snapshots.rs @@ -7,7 +7,7 @@ use crate::{ interface::storage::WriteStorage, vm_latest::{ old_vm::{history_recorder::HistoryEnabled, oracles::OracleWithHistory}, - types::internals::VmSnapshot, + types::VmSnapshot, vm::Vm, }, }; diff --git a/core/lib/multivm/src/versions/vm_latest/implementation/tx.rs b/core/lib/multivm/src/versions/vm_latest/implementation/tx.rs index 6dd73866adf2..02adcc3cdad8 100644 --- a/core/lib/multivm/src/versions/vm_latest/implementation/tx.rs +++ b/core/lib/multivm/src/versions/vm_latest/implementation/tx.rs @@ -6,7 +6,7 @@ use crate::{ vm_latest::{ constants::BOOTLOADER_HEAP_PAGE, implementation::bytecode::{bytecode_to_factory_dep, compress_bytecodes}, - types::internals::TransactionData, + types::TransactionData, vm::Vm, }, HistoryMode, diff --git a/core/lib/multivm/src/versions/vm_latest/mod.rs b/core/lib/multivm/src/versions/vm_latest/mod.rs index 46f8db789ddc..4e739efe9516 100644 --- a/core/lib/multivm/src/versions/vm_latest/mod.rs +++ b/core/lib/multivm/src/versions/vm_latest/mod.rs @@ -1,6 +1,5 @@ -pub(crate) use self::vm::MultiVmSubversion; pub use self::{ - bootloader_state::BootloaderState, + bootloader::BootloaderState, old_vm::{ history_recorder::{ AppDataFrameManagerWithHistory, HistoryDisabled, HistoryEnabled, HistoryMode, @@ -12,12 +11,16 @@ pub use self::{ dispatcher::TracerDispatcher, traits::{ToTracerPointer, TracerPointer, VmTracer}, }, - types::internals::ZkSyncVmState, + types::ZkSyncVmState, utils::transaction_encoding::TransactionVmExt, vm::Vm, }; +pub(crate) use self::{ + types::{TransactionData, VmHook}, + vm::MultiVmSubversion, +}; -mod bootloader_state; +pub(crate) mod bootloader; pub mod constants; mod implementation; mod old_vm; diff --git a/core/lib/multivm/src/versions/vm_latest/old_vm/utils.rs b/core/lib/multivm/src/versions/vm_latest/old_vm/utils.rs index c020d1db000a..aa5155fc003b 100644 --- a/core/lib/multivm/src/versions/vm_latest/old_vm/utils.rs +++ b/core/lib/multivm/src/versions/vm_latest/old_vm/utils.rs @@ -10,7 +10,7 @@ use zksync_types::{Address, U256}; use crate::{ interface::storage::WriteStorage, - vm_latest::{old_vm::memory::SimpleMemory, types::internals::ZkSyncVmState, HistoryMode}, + vm_latest::{old_vm::memory::SimpleMemory, types::ZkSyncVmState, HistoryMode}, }; #[derive(Debug, Clone)] diff --git a/core/lib/multivm/src/versions/vm_latest/tests/call_tracer.rs b/core/lib/multivm/src/versions/vm_latest/tests/call_tracer.rs index c8f623478569..451647545671 100644 --- a/core/lib/multivm/src/versions/vm_latest/tests/call_tracer.rs +++ b/core/lib/multivm/src/versions/vm_latest/tests/call_tracer.rs @@ -1,15 +1,14 @@ use std::sync::Arc; use once_cell::sync::OnceCell; -use zksync_test_contracts::TestContract; use zksync_types::{Address, Execute}; use super::TestedLatestVm; use crate::{ interface::{InspectExecutionMode, TxExecutionMode, VmInterface}, tracers::CallTracer, - versions::testonly::{read_max_depth_contract, ContractToDeploy, VmTesterBuilder}, - vm_latest::{constants::BATCH_COMPUTATIONAL_GAS_LIMIT, ToTracerPointer}, + versions::testonly::{call_tracer, read_max_depth_contract, ContractToDeploy, VmTesterBuilder}, + vm_latest::{constants::BATCH_COMPUTATIONAL_GAS_LIMIT, ToTracerPointer, Vm}, }; // This test is ultra slow, so it's ignored by default. @@ -48,43 +47,31 @@ fn test_max_depth() { } #[test] -fn test_basic_behavior() { - let contract = TestContract::counter().bytecode.to_vec(); - let address = Address::repeat_byte(1); - let mut vm = VmTesterBuilder::new() - .with_empty_in_memory_storage() - .with_rich_accounts(1) - .with_bootloader_gas_limit(BATCH_COMPUTATIONAL_GAS_LIMIT) - .with_execution_mode(TxExecutionMode::VerifyExecute) - .with_custom_contracts(vec![ContractToDeploy::account(contract, address)]) - .build::(); +fn basic_behavior() { + call_tracer::test_basic_behavior::>(); +} - let increment_by_6_calldata = - "7cf5dab00000000000000000000000000000000000000000000000000000000000000006"; +#[test] +fn transfer() { + call_tracer::test_transfer::>(); +} - let account = &mut vm.rich_accounts[0]; - let tx = account.get_l2_tx_for_execute( - Execute { - contract_address: Some(address), - calldata: hex::decode(increment_by_6_calldata).unwrap(), - value: Default::default(), - factory_deps: vec![], - }, - None, - ); +#[test] +fn reverted_tx() { + call_tracer::test_reverted_tx::>(); +} - let result = Arc::new(OnceCell::new()); - let call_tracer = CallTracer::new(result.clone()).into_tracer_pointer(); - vm.vm.push_transaction(tx); - let res = vm - .vm - .inspect(&mut call_tracer.into(), InspectExecutionMode::OneTx); +#[test] +fn reverted_deployment() { + call_tracer::test_reverted_deployment_tx::>(); +} - let call_tracer_result = result.get().unwrap(); +#[test] +fn out_of_gas() { + call_tracer::test_out_of_gas::>(); +} - assert_eq!(call_tracer_result.len(), 1); - // Expect that there are a plenty of subcalls underneath. - let subcall = &call_tracer_result[0].calls; - assert!(subcall.len() > 10); - assert!(!res.result.is_failed()); +#[test] +fn recursive_tx() { + call_tracer::test_recursive_tx::>(); } diff --git a/core/lib/multivm/src/versions/vm_latest/tests/l1_messenger.rs b/core/lib/multivm/src/versions/vm_latest/tests/l1_messenger.rs index 7d301f33a131..f1dade9dd8e6 100644 --- a/core/lib/multivm/src/versions/vm_latest/tests/l1_messenger.rs +++ b/core/lib/multivm/src/versions/vm_latest/tests/l1_messenger.rs @@ -4,7 +4,6 @@ use crate::{ }; #[test] -#[ignore] // Requires post-gateway system contracts fn rollup_da_output_hash_match() { test_rollup_da_output_hash_match::>(); } diff --git a/core/lib/multivm/src/versions/vm_latest/tests/mod.rs b/core/lib/multivm/src/versions/vm_latest/tests/mod.rs index 0a89ddb0bf50..260b1a4eeef3 100644 --- a/core/lib/multivm/src/versions/vm_latest/tests/mod.rs +++ b/core/lib/multivm/src/versions/vm_latest/tests/mod.rs @@ -4,6 +4,7 @@ use std::{ sync::Arc, }; +use once_cell::sync::OnceCell; use zk_evm_1_5_0::{ aux_structures::{MemoryPage, Timestamp}, vm_state::VmLocalState, @@ -13,7 +14,7 @@ use zksync_types::{ bytecode::BytecodeHash, l2::L2Tx, vm::VmVersion, writes::StateDiffRecord, StorageKey, StorageValue, Transaction, H256, U256, }; -use zksync_vm_interface::VmInterface; +use zksync_vm_interface::{Call, InspectExecutionMode, VmInterface}; use super::{HistoryEnabled, ToTracerPointer, Vm}; use crate::{ @@ -23,16 +24,17 @@ use crate::{ tracer::ViolatedValidationRule, CurrentExecutionState, L2BlockEnv, VmExecutionMode, VmExecutionResultAndLogs, }, - tracers::ValidationTracer, + tracers::{CallTracer, ValidationTracer}, utils::bytecode::bytes_to_be_words, versions::testonly::{ filter_out_base_system_contracts, validation_params, TestedVm, TestedVmForValidation, + TestedVmWithCallTracer, }, vm_latest::{ constants::BOOTLOADER_HEAP_PAGE, old_vm::{event_sink::InMemoryEventSink, history_recorder::HistoryRecorder}, tracers::PubdataTracer, - types::internals::TransactionData, + types::TransactionData, utils::logs::StorageLogQuery, AppDataFrameManagerWithHistory, HistoryMode, SimpleMemory, TracerDispatcher, }, @@ -338,3 +340,13 @@ impl Vm (VmExecutionResultAndLogs, Vec) { + let result = Arc::new(OnceCell::new()); + let call_tracer = CallTracer::new(result.clone()).into_tracer_pointer(); + let res = self.inspect(&mut call_tracer.into(), InspectExecutionMode::OneTx); + let traces = result.get().unwrap().clone(); + (res, traces) + } +} diff --git a/core/lib/multivm/src/versions/vm_latest/tests/rollbacks.rs b/core/lib/multivm/src/versions/vm_latest/tests/rollbacks.rs index f126a7f8fbdd..5b99fc4558ed 100644 --- a/core/lib/multivm/src/versions/vm_latest/tests/rollbacks.rs +++ b/core/lib/multivm/src/versions/vm_latest/tests/rollbacks.rs @@ -18,7 +18,7 @@ use crate::{ VmTesterBuilder, }, vm_latest::{ - types::internals::ZkSyncVmState, BootloaderState, HistoryEnabled, HistoryMode, + bootloader::BootloaderState, types::ZkSyncVmState, HistoryEnabled, HistoryMode, SimpleMemory, ToTracerPointer, Vm, VmTracer, }, }; diff --git a/core/lib/multivm/src/versions/vm_latest/tracers/circuits_tracer.rs b/core/lib/multivm/src/versions/vm_latest/tracers/circuits_tracer.rs index 6a47f3ae2fbe..059c9e67eeb3 100644 --- a/core/lib/multivm/src/versions/vm_latest/tracers/circuits_tracer.rs +++ b/core/lib/multivm/src/versions/vm_latest/tracers/circuits_tracer.rs @@ -15,10 +15,10 @@ use crate::{ tracers::dynamic::vm_1_5_0::DynTracer, utils::CircuitCycleStatistic, vm_latest::{ - bootloader_state::BootloaderState, + bootloader::BootloaderState, old_vm::{history_recorder::HistoryMode, memory::SimpleMemory}, tracers::traits::VmTracer, - types::internals::ZkSyncVmState, + types::ZkSyncVmState, }, }; diff --git a/core/lib/multivm/src/versions/vm_latest/tracers/default_tracers.rs b/core/lib/multivm/src/versions/vm_latest/tracers/default_tracers.rs index 08ff79524b87..05afbbdb398c 100755 --- a/core/lib/multivm/src/versions/vm_latest/tracers/default_tracers.rs +++ b/core/lib/multivm/src/versions/vm_latest/tracers/default_tracers.rs @@ -23,17 +23,17 @@ use crate::{ }, tracers::dynamic::vm_1_5_0::DynTracer, vm_latest::{ - bootloader_state::{utils::apply_l2_block, BootloaderState}, + bootloader::{utils::apply_l2_block, BootloaderState}, constants::BOOTLOADER_HEAP_PAGE, old_vm::{history_recorder::HistoryMode, memory::SimpleMemory}, tracers::{ dispatcher::TracerDispatcher, - utils::{computational_gas_price, print_debug_if_needed, VmHook}, + utils::{computational_gas_price, print_debug_log, print_debug_returndata}, CircuitsTracer, RefundsTracer, ResultTracer, }, - types::internals::ZkSyncVmState, + types::ZkSyncVmState, vm::MultiVmSubversion, - VmTracer, + VmHook, VmTracer, }, }; @@ -220,22 +220,18 @@ impl Tracer for DefaultExecutionTracer { } let hook = VmHook::from_opcode_memory(&state, &data, self.subversion); - print_debug_if_needed( - &hook, - &state, - memory, - self.result_tracer.get_latest_result_ptr(), - self.subversion, - ); - match hook { - VmHook::TxHasEnded if matches!(self.execution_mode, VmExecutionMode::OneTx) => { + Some(VmHook::TxHasEnded) if matches!(self.execution_mode, VmExecutionMode::OneTx) => { self.result_tracer.tx_finished_in_one_tx_mode = true; self.tx_has_been_processed = true; } - VmHook::NoValidationEntered => self.in_account_validation = false, - VmHook::AccountValidationEntered => self.in_account_validation = true, - VmHook::FinalBatchInfo => self.final_batch_info_requested = true, + Some(VmHook::ValidationExited) => self.in_account_validation = false, + Some(VmHook::AccountValidationEntered) => self.in_account_validation = true, + Some(VmHook::FinalBatchInfo) => self.final_batch_info_requested = true, + Some(VmHook::DebugLog) => print_debug_log(&state, memory, self.subversion), + Some(VmHook::DebugReturnData) => { + print_debug_returndata(memory, self.result_tracer.get_latest_result_ptr()) + } _ => {} } diff --git a/core/lib/multivm/src/versions/vm_latest/tracers/dispatcher.rs b/core/lib/multivm/src/versions/vm_latest/tracers/dispatcher.rs index 3c3ef1173f53..41ff237551f4 100644 --- a/core/lib/multivm/src/versions/vm_latest/tracers/dispatcher.rs +++ b/core/lib/multivm/src/versions/vm_latest/tracers/dispatcher.rs @@ -9,7 +9,8 @@ use crate::{ }, tracers::dynamic::vm_1_5_0::DynTracer, vm_latest::{ - BootloaderState, HistoryMode, SimpleMemory, TracerPointer, VmTracer, ZkSyncVmState, + bootloader::BootloaderState, HistoryMode, SimpleMemory, TracerPointer, VmTracer, + ZkSyncVmState, }, }; diff --git a/core/lib/multivm/src/versions/vm_latest/tracers/evm_deploy_tracer.rs b/core/lib/multivm/src/versions/vm_latest/tracers/evm_deploy_tracer.rs index dd03a9427efa..70e055343e61 100644 --- a/core/lib/multivm/src/versions/vm_latest/tracers/evm_deploy_tracer.rs +++ b/core/lib/multivm/src/versions/vm_latest/tracers/evm_deploy_tracer.rs @@ -19,7 +19,7 @@ use crate::{ }, tracers::dynamic::vm_1_5_0::DynTracer, utils::bytecode::bytes_to_be_words, - vm_latest::{BootloaderState, HistoryMode, SimpleMemory, ZkSyncVmState}, + vm_latest::{bootloader::BootloaderState, HistoryMode, SimpleMemory, ZkSyncVmState}, }; /// Tracer responsible for collecting information about EVM deploys and providing those diff --git a/core/lib/multivm/src/versions/vm_latest/tracers/pubdata_tracer.rs b/core/lib/multivm/src/versions/vm_latest/tracers/pubdata_tracer.rs index 31309e6ff062..f1f41a2ef3d3 100644 --- a/core/lib/multivm/src/versions/vm_latest/tracers/pubdata_tracer.rs +++ b/core/lib/multivm/src/versions/vm_latest/tracers/pubdata_tracer.rs @@ -27,14 +27,14 @@ use crate::{ }, }, vm_latest::{ - bootloader_state::{utils::apply_pubdata_to_memory, BootloaderState}, + bootloader::{utils::apply_pubdata_to_memory, BootloaderState}, constants::BOOTLOADER_HEAP_PAGE, old_vm::{history_recorder::HistoryMode, memory::SimpleMemory}, - tracers::{traits::VmTracer, utils::VmHook}, - types::internals::ZkSyncVmState, + tracers::traits::VmTracer, + types::ZkSyncVmState, utils::logs::collect_events_and_l1_system_logs_after_timestamp, vm::MultiVmSubversion, - StorageOracle, + StorageOracle, VmHook, }, }; @@ -207,7 +207,7 @@ impl DynTracer> for PubdataTracer { _storage: StoragePtr, ) { let hook = VmHook::from_opcode_memory(&state, &data, self.subversion); - if let VmHook::PubdataRequested = hook { + if matches!(hook, Some(VmHook::PubdataRequested)) { self.pubdata_info_requested = true; } } diff --git a/core/lib/multivm/src/versions/vm_latest/tracers/refunds.rs b/core/lib/multivm/src/versions/vm_latest/tracers/refunds.rs index 2bd08e094327..071c0ac37718 100644 --- a/core/lib/multivm/src/versions/vm_latest/tracers/refunds.rs +++ b/core/lib/multivm/src/versions/vm_latest/tracers/refunds.rs @@ -5,7 +5,7 @@ use zk_evm_1_5_0::{ aux_structures::Timestamp, tracing::{BeforeExecutionData, VmLocalStateData}, }; -use zksync_types::{ceil_div_u256, H256, U256}; +use zksync_types::H256; use crate::{ interface::{ @@ -15,16 +15,14 @@ use crate::{ }, tracers::dynamic::vm_1_5_0::DynTracer, vm_latest::{ - bootloader_state::BootloaderState, + bootloader::BootloaderState, constants::{get_operator_refunds_offset, BOOTLOADER_HEAP_PAGE, TX_GAS_LIMIT_OFFSET}, old_vm::{history_recorder::HistoryMode, memory::SimpleMemory}, - tracers::{ - traits::VmTracer, - utils::{get_vm_hook_params, VmHook}, - }, - types::internals::ZkSyncVmState, - utils::fee::get_batch_base_fee, + tracers::{traits::VmTracer, utils::get_vm_hook_params}, + types::ZkSyncVmState, + utils::refund::compute_refund, vm::MultiVmSubversion, + VmHook, }, }; @@ -101,58 +99,15 @@ impl RefundsTracer { pubdata_published: u32, tx_hash: H256, ) -> u64 { - let total_gas_spent = tx_gas_limit - bootloader_refund; - - let gas_spent_on_computation = total_gas_spent - .checked_sub(gas_spent_on_pubdata) - .unwrap_or_else(|| { - tracing::error!( - "Gas spent on pubdata is greater than total gas spent. On pubdata: {}, total: {}", - gas_spent_on_pubdata, - total_gas_spent - ); - 0 - }); - - // For now, bootloader charges only for base fee. - let effective_gas_price = get_batch_base_fee(&self.l1_batch); - - let bootloader_eth_price_per_pubdata_byte = - U256::from(effective_gas_price) * U256::from(current_ergs_per_pubdata_byte); - - let fair_eth_price_per_pubdata_byte = - U256::from(self.l1_batch.fee_input.fair_pubdata_price()); - - // For now, L1 originated transactions are allowed to pay less than fair fee per pubdata, - // so we should take it into account. - let eth_price_per_pubdata_byte_for_calculation = std::cmp::min( - bootloader_eth_price_per_pubdata_byte, - fair_eth_price_per_pubdata_byte, - ); - - let fair_fee_eth = U256::from(gas_spent_on_computation) - * U256::from(self.l1_batch.fee_input.fair_l2_gas_price()) - + U256::from(pubdata_published) * eth_price_per_pubdata_byte_for_calculation; - let pre_paid_eth = U256::from(tx_gas_limit) * U256::from(effective_gas_price); - let refund_eth = pre_paid_eth.checked_sub(fair_fee_eth).unwrap_or_else(|| { - tracing::error!( - "Fair fee is greater than pre paid. Fair fee: {} wei, pre paid: {} wei", - fair_fee_eth, - pre_paid_eth - ); - U256::zero() - }); - - tracing::trace!( - "Fee benchmark for transaction with hash {}", - hex::encode(tx_hash.as_bytes()) - ); - tracing::trace!("Gas Limit: {}", tx_gas_limit); - tracing::trace!("Gas spent on computation: {}", gas_spent_on_computation); - tracing::trace!("Gas spent on pubdata: {}", gas_spent_on_pubdata); - tracing::trace!("Pubdata published: {}", pubdata_published); - - ceil_div_u256(refund_eth, effective_gas_price.into()).as_u64() + compute_refund( + &self.l1_batch, + bootloader_refund, + gas_spent_on_pubdata, + tx_gas_limit, + current_ergs_per_pubdata_byte, + pubdata_published, + tx_hash, + ) } pub(crate) fn pubdata_published(&self) -> u32 { @@ -171,16 +126,16 @@ impl DynTracer> for RefundsTracer { self.timestamp_before_cycle = Timestamp(state.vm_local_state.timestamp); let hook = VmHook::from_opcode_memory(&state, &data, self.subversion); match hook { - VmHook::NotifyAboutRefund => { - self.refund_gas = get_vm_hook_params(memory, self.subversion)[0].as_u64() + Some(VmHook::NotifyAboutRefund) => { + self.refund_gas = get_vm_hook_params(memory, self.subversion)[0].as_u64(); } - VmHook::AskOperatorForRefund => { + Some(VmHook::AskOperatorForRefund) => { self.pending_refund_request = Some(RefundRequest { refund: get_vm_hook_params(memory, self.subversion)[0].as_u64(), gas_spent_on_pubdata: get_vm_hook_params(memory, self.subversion)[1].as_u64(), used_gas_per_pubdata_byte: get_vm_hook_params(memory, self.subversion)[2] .as_u32(), - }) + }); } _ => {} } diff --git a/core/lib/multivm/src/versions/vm_latest/tracers/result_tracer.rs b/core/lib/multivm/src/versions/vm_latest/tracers/result_tracer.rs index 80a3147f65d2..a90c7c8a018a 100644 --- a/core/lib/multivm/src/versions/vm_latest/tracers/result_tracer.rs +++ b/core/lib/multivm/src/versions/vm_latest/tracers/result_tracer.rs @@ -16,15 +16,16 @@ use crate::{ }, tracers::dynamic::vm_1_5_0::DynTracer, vm_latest::{ + bootloader::BootloaderState, constants::{get_result_success_first_slot, BOOTLOADER_HEAP_PAGE}, old_vm::utils::{vm_may_have_ended_inner, VmExecutionResult}, tracers::{ traits::VmTracer, - utils::{get_vm_hook_params, read_pointer, VmHook}, + utils::{get_vm_hook_params, read_pointer}, }, - types::internals::ZkSyncVmState, + types::ZkSyncVmState, vm::MultiVmSubversion, - BootloaderState, HistoryMode, SimpleMemory, + HistoryMode, SimpleMemory, VmHook, }, }; @@ -155,7 +156,7 @@ impl DynTracer> for ResultTracer { _storage: StoragePtr, ) { let hook = VmHook::from_opcode_memory(&state, &data, self.subversion); - if let VmHook::ExecutionResult = hook { + if matches!(hook, Some(VmHook::PostResult)) { let vm_hook_params = get_vm_hook_params(memory, self.subversion); let success = vm_hook_params[0]; let returndata = self diff --git a/core/lib/multivm/src/versions/vm_latest/tracers/traits.rs b/core/lib/multivm/src/versions/vm_latest/tracers/traits.rs index 76dab3dd70a1..b85f8e3ca4e0 100644 --- a/core/lib/multivm/src/versions/vm_latest/tracers/traits.rs +++ b/core/lib/multivm/src/versions/vm_latest/tracers/traits.rs @@ -5,9 +5,9 @@ use crate::{ }, tracers::dynamic::vm_1_5_0::DynTracer, vm_latest::{ - bootloader_state::BootloaderState, + bootloader::BootloaderState, old_vm::{history_recorder::HistoryMode, memory::SimpleMemory}, - types::internals::ZkSyncVmState, + types::ZkSyncVmState, }, }; diff --git a/core/lib/multivm/src/versions/vm_latest/tracers/utils.rs b/core/lib/multivm/src/versions/vm_latest/tracers/utils.rs index 6f81a3ac8de5..ced3193ca490 100644 --- a/core/lib/multivm/src/versions/vm_latest/tracers/utils.rs +++ b/core/lib/multivm/src/versions/vm_latest/tracers/utils.rs @@ -22,33 +22,15 @@ use crate::vm_latest::{ utils::{aux_heap_page_from_base, heap_page_from_base}, }, vm::MultiVmSubversion, + VmHook, }; -#[derive(Clone, Debug, Copy)] -pub(crate) enum VmHook { - AccountValidationEntered, - PaymasterValidationEntered, - NoValidationEntered, - ValidationStepEndeded, - TxHasEnded, - DebugLog, - DebugReturnData, - NoHook, - NearCallCatch, - AskOperatorForRefund, - NotifyAboutRefund, - ExecutionResult, - FinalBatchInfo, - // Hook used to signal that the final pubdata for a batch is requested. - PubdataRequested, -} - impl VmHook { pub(crate) fn from_opcode_memory( state: &VmLocalStateData<'_>, data: &BeforeExecutionData, subversion: MultiVmSubversion, - ) -> Self { + ) -> Option { let opcode_variant = data.opcode.variant; let heap_page = heap_page_from_base(state.vm_local_state.callstack.current.base_memory_page).0; @@ -64,33 +46,18 @@ impl VmHook { || heap_page != BOOTLOADER_HEAP_PAGE || fat_ptr.offset != get_vm_hook_position(subversion) * 32 { - return Self::NoHook; + return None; } - match value.as_u32() { - 0 => Self::AccountValidationEntered, - 1 => Self::PaymasterValidationEntered, - 2 => Self::NoValidationEntered, - 3 => Self::ValidationStepEndeded, - 4 => Self::TxHasEnded, - 5 => Self::DebugLog, - 6 => Self::DebugReturnData, - 7 => Self::NearCallCatch, - 8 => Self::AskOperatorForRefund, - 9 => Self::NotifyAboutRefund, - 10 => Self::ExecutionResult, - 11 => Self::FinalBatchInfo, - 12 => Self::PubdataRequested, - _ => panic!("Unknown hook: {}", value.as_u32()), - } + Some(Self::new(value.as_u32())) } } -pub(crate) fn get_debug_log( +pub(crate) fn print_debug_log( state: &VmLocalStateData<'_>, memory: &SimpleMemory, subversion: MultiVmSubversion, -) -> String { +) { let vm_hook_params: Vec<_> = get_vm_hook_params(memory, subversion) .into_iter() .map(u256_to_h256) @@ -113,7 +80,7 @@ pub(crate) fn get_debug_log( data.to_string() }; let tx_id = state.vm_local_state.tx_number_in_block; - format!("Bootloader transaction {tx_id}: {msg}: {data_str}") + tracing::trace!("Bootloader transaction {tx_id}: {msg}: {data_str}"); } /// Reads the memory slice represented by the fat pointer. @@ -142,33 +109,17 @@ pub(crate) fn read_pointer( /// Outputs the returndata for the latest call. /// This is usually used to output the revert reason. -pub(crate) fn get_debug_returndata( +pub(crate) fn print_debug_returndata( memory: &SimpleMemory, latest_returndata_ptr: Option, -) -> String { +) { let returndata = if let Some(ptr) = latest_returndata_ptr { read_pointer(memory, ptr) } else { vec![] }; - format!("0x{}", hex::encode(returndata)) -} - -/// Accepts a vm hook and, if it requires to output some debug log, outputs it. -pub(crate) fn print_debug_if_needed( - hook: &VmHook, - state: &VmLocalStateData<'_>, - memory: &SimpleMemory, - latest_returndata_ptr: Option, - subversion: MultiVmSubversion, -) { - let log = match hook { - VmHook::DebugLog => get_debug_log(state, memory, subversion), - VmHook::DebugReturnData => get_debug_returndata(memory, latest_returndata_ptr), - _ => return, - }; - tracing::trace!("{log}"); + tracing::trace!("0x{}", hex::encode(returndata)); } pub(crate) fn computational_gas_price( diff --git a/core/lib/multivm/src/versions/vm_latest/types/hook.rs b/core/lib/multivm/src/versions/vm_latest/types/hook.rs new file mode 100644 index 000000000000..1fbd0c826e5e --- /dev/null +++ b/core/lib/multivm/src/versions/vm_latest/types/hook.rs @@ -0,0 +1,39 @@ +#[derive(Debug, Copy, Clone)] +pub(crate) enum VmHook { + AccountValidationEntered, + PaymasterValidationEntered, + ValidationExited, + ValidationStepEnded, + TxHasEnded, + DebugLog, + DebugReturnData, + NearCallCatch, + AskOperatorForRefund, + NotifyAboutRefund, + PostResult, + FinalBatchInfo, + PubdataRequested, +} + +impl VmHook { + /// # Panics + /// Panics if the number does not correspond to any hook. + pub fn new(raw: u32) -> Self { + match raw { + 0 => Self::AccountValidationEntered, + 1 => Self::PaymasterValidationEntered, + 2 => Self::ValidationExited, + 3 => Self::ValidationStepEnded, + 4 => Self::TxHasEnded, + 5 => Self::DebugLog, + 6 => Self::DebugReturnData, + 7 => Self::NearCallCatch, + 8 => Self::AskOperatorForRefund, + 9 => Self::NotifyAboutRefund, + 10 => Self::PostResult, + 11 => Self::FinalBatchInfo, + 12 => Self::PubdataRequested, + _ => panic!("Unknown hook: {raw}"), + } + } +} diff --git a/core/lib/multivm/src/versions/vm_latest/types/internals/mod.rs b/core/lib/multivm/src/versions/vm_latest/types/internals/mod.rs deleted file mode 100644 index 601b7b8bd014..000000000000 --- a/core/lib/multivm/src/versions/vm_latest/types/internals/mod.rs +++ /dev/null @@ -1,7 +0,0 @@ -pub(crate) use snapshot::VmSnapshot; -pub(crate) use transaction_data::TransactionData; -pub(crate) use vm_state::new_vm_state; -pub use vm_state::ZkSyncVmState; -mod snapshot; -mod transaction_data; -mod vm_state; diff --git a/core/lib/multivm/src/versions/vm_latest/types/l1_batch.rs b/core/lib/multivm/src/versions/vm_latest/types/l1_batch.rs deleted file mode 100644 index 89b22d328ac5..000000000000 --- a/core/lib/multivm/src/versions/vm_latest/types/l1_batch.rs +++ /dev/null @@ -1,43 +0,0 @@ -use zksync_types::{address_to_u256, h256_to_u256, U256}; - -use crate::{interface::L1BatchEnv, vm_latest::utils::fee::get_batch_base_fee}; - -const OPERATOR_ADDRESS_SLOT: usize = 0; -const PREV_BLOCK_HASH_SLOT: usize = 1; -const NEW_BLOCK_TIMESTAMP_SLOT: usize = 2; -const NEW_BLOCK_NUMBER_SLOT: usize = 3; -const FAIR_PUBDATA_PRICE_SLOT: usize = 4; -const FAIR_L2_GAS_PRICE_SLOT: usize = 5; -const EXPECTED_BASE_FEE_SLOT: usize = 6; -const SHOULD_SET_NEW_BLOCK_SLOT: usize = 7; - -/// Returns the initial memory for the bootloader based on the current batch environment. -pub(crate) fn bootloader_initial_memory(l1_batch: &L1BatchEnv) -> Vec<(usize, U256)> { - let (prev_block_hash, should_set_new_block) = l1_batch - .previous_batch_hash - .map(|prev_block_hash| (h256_to_u256(prev_block_hash), U256::one())) - .unwrap_or_default(); - - vec![ - ( - OPERATOR_ADDRESS_SLOT, - address_to_u256(&l1_batch.fee_account), - ), - (PREV_BLOCK_HASH_SLOT, prev_block_hash), - (NEW_BLOCK_TIMESTAMP_SLOT, U256::from(l1_batch.timestamp)), - (NEW_BLOCK_NUMBER_SLOT, U256::from(l1_batch.number.0)), - ( - FAIR_PUBDATA_PRICE_SLOT, - U256::from(l1_batch.fee_input.fair_pubdata_price()), - ), - ( - FAIR_L2_GAS_PRICE_SLOT, - U256::from(l1_batch.fee_input.fair_l2_gas_price()), - ), - ( - EXPECTED_BASE_FEE_SLOT, - U256::from(get_batch_base_fee(l1_batch)), - ), - (SHOULD_SET_NEW_BLOCK_SLOT, should_set_new_block), - ] -} diff --git a/core/lib/multivm/src/versions/vm_latest/types/mod.rs b/core/lib/multivm/src/versions/vm_latest/types/mod.rs index a12005734abb..0963b7c458c0 100644 --- a/core/lib/multivm/src/versions/vm_latest/types/mod.rs +++ b/core/lib/multivm/src/versions/vm_latest/types/mod.rs @@ -1,2 +1,9 @@ -pub(crate) mod internals; -mod l1_batch; +mod hook; +mod snapshot; +mod transaction_data; +mod vm_state; + +pub use self::vm_state::ZkSyncVmState; +pub(crate) use self::{ + hook::VmHook, snapshot::VmSnapshot, transaction_data::TransactionData, vm_state::new_vm_state, +}; diff --git a/core/lib/multivm/src/versions/vm_latest/types/internals/snapshot.rs b/core/lib/multivm/src/versions/vm_latest/types/snapshot.rs similarity index 82% rename from core/lib/multivm/src/versions/vm_latest/types/internals/snapshot.rs rename to core/lib/multivm/src/versions/vm_latest/types/snapshot.rs index 76b466fd605b..b43392b5111a 100644 --- a/core/lib/multivm/src/versions/vm_latest/types/internals/snapshot.rs +++ b/core/lib/multivm/src/versions/vm_latest/types/snapshot.rs @@ -1,6 +1,6 @@ use zk_evm_1_5_0::vm_state::VmLocalState; -use crate::vm_latest::bootloader_state::BootloaderStateSnapshot; +use crate::vm_latest::bootloader::BootloaderStateSnapshot; /// A snapshot of the VM that holds enough information to /// rollback the VM to some historical state. diff --git a/core/lib/multivm/src/versions/vm_latest/types/internals/transaction_data.rs b/core/lib/multivm/src/versions/vm_latest/types/transaction_data.rs similarity index 100% rename from core/lib/multivm/src/versions/vm_latest/types/internals/transaction_data.rs rename to core/lib/multivm/src/versions/vm_latest/types/transaction_data.rs diff --git a/core/lib/multivm/src/versions/vm_latest/types/internals/vm_state.rs b/core/lib/multivm/src/versions/vm_latest/types/vm_state.rs similarity index 97% rename from core/lib/multivm/src/versions/vm_latest/types/internals/vm_state.rs rename to core/lib/multivm/src/versions/vm_latest/types/vm_state.rs index 63f06f4fd846..55b504a85f7c 100644 --- a/core/lib/multivm/src/versions/vm_latest/types/internals/vm_state.rs +++ b/core/lib/multivm/src/versions/vm_latest/types/vm_state.rs @@ -20,7 +20,7 @@ use crate::{ }, utils::bytecode::bytes_to_be_words, vm_latest::{ - bootloader_state::BootloaderState, + bootloader::BootloaderState, constants::BOOTLOADER_HEAP_PAGE, old_vm::{ event_sink::InMemoryEventSink, @@ -31,7 +31,6 @@ use crate::{ }, }, oracles::storage::StorageOracle, - types::l1_batch::bootloader_initial_memory, utils::l2_blocks::{assert_next_block, load_last_l2_block}, }, }; @@ -106,7 +105,7 @@ pub(crate) fn new_vm_state( Timestamp(0), ); - let bootloader_initial_memory = bootloader_initial_memory(l1_batch_env); + let bootloader_initial_memory = BootloaderState::initial_memory(l1_batch_env); memory.populate_page( BOOTLOADER_HEAP_PAGE as usize, bootloader_initial_memory.clone(), diff --git a/core/lib/multivm/src/versions/vm_latest/utils/logs.rs b/core/lib/multivm/src/versions/vm_latest/utils/logs.rs index dfa23685dcda..9e4c61ab059c 100644 --- a/core/lib/multivm/src/versions/vm_latest/utils/logs.rs +++ b/core/lib/multivm/src/versions/vm_latest/utils/logs.rs @@ -6,7 +6,7 @@ use crate::{ interface::{storage::WriteStorage, L1BatchEnv, VmEvent}, vm_latest::{ old_vm::{events::merge_events, history_recorder::HistoryMode}, - types::internals::ZkSyncVmState, + types::ZkSyncVmState, }, }; diff --git a/core/lib/multivm/src/versions/vm_latest/utils/mod.rs b/core/lib/multivm/src/versions/vm_latest/utils/mod.rs index 97483633bc54..04821d411f84 100644 --- a/core/lib/multivm/src/versions/vm_latest/utils/mod.rs +++ b/core/lib/multivm/src/versions/vm_latest/utils/mod.rs @@ -6,6 +6,7 @@ pub mod fee; pub mod l2_blocks; pub(crate) mod logs; pub mod overhead; +pub(crate) mod refund; pub mod transaction_encoding; pub const fn heap_page_from_base(base: MemoryPage) -> MemoryPage { diff --git a/core/lib/multivm/src/versions/vm_fast/refund.rs b/core/lib/multivm/src/versions/vm_latest/utils/refund.rs similarity index 100% rename from core/lib/multivm/src/versions/vm_fast/refund.rs rename to core/lib/multivm/src/versions/vm_latest/utils/refund.rs diff --git a/core/lib/multivm/src/versions/vm_latest/utils/transaction_encoding.rs b/core/lib/multivm/src/versions/vm_latest/utils/transaction_encoding.rs index ed532f89dbc6..515f436df89c 100644 --- a/core/lib/multivm/src/versions/vm_latest/utils/transaction_encoding.rs +++ b/core/lib/multivm/src/versions/vm_latest/utils/transaction_encoding.rs @@ -1,6 +1,6 @@ use zksync_types::Transaction; -use crate::vm_latest::types::internals::TransactionData; +use crate::vm_latest::types::TransactionData; /// Extension for transactions, specific for VM. Required for bypassing the orphan rule pub trait TransactionVmExt { diff --git a/core/lib/multivm/src/versions/vm_latest/vm.rs b/core/lib/multivm/src/versions/vm_latest/vm.rs index 1db369d4ae20..9eb342a6c8d6 100644 --- a/core/lib/multivm/src/versions/vm_latest/vm.rs +++ b/core/lib/multivm/src/versions/vm_latest/vm.rs @@ -21,10 +21,10 @@ use crate::{ }, utils::{bytecode::be_words_to_bytes, events::extract_l2tol1logs_from_l1_messenger}, vm_latest::{ - bootloader_state::BootloaderState, + bootloader::BootloaderState, old_vm::{events::merge_events, history_recorder::HistoryEnabled}, tracers::{dispatcher::TracerDispatcher, PubdataTracer}, - types::internals::{new_vm_state, VmSnapshot, ZkSyncVmState}, + types::{new_vm_state, VmSnapshot, ZkSyncVmState}, }, HistoryMode, }; @@ -47,7 +47,7 @@ pub(crate) enum MultiVmSubversion { impl MultiVmSubversion { #[cfg(test)] pub(crate) fn latest() -> Self { - Self::IncreasedBootloaderMemory + Self::Gateway } } diff --git a/core/lib/protobuf_config/src/contracts.rs b/core/lib/protobuf_config/src/contracts.rs index 12cbf996697b..1c9711ef62d2 100644 --- a/core/lib/protobuf_config/src/contracts.rs +++ b/core/lib/protobuf_config/src/contracts.rs @@ -34,6 +34,10 @@ impl ProtoRepr for proto::Contracts { .l1_bytecodes_supplier_addr .as_ref() .map(|x| parse_h160(x).expect("Invalid address")), + l1_wrapped_base_token_store: ecosystem_contracts + .l1_wrapped_base_token_store + .as_ref() + .map(|x| parse_h160(x).expect("Invalid address")), }) } else { None @@ -123,12 +127,6 @@ impl ProtoRepr for proto::Contracts { .map(|x| parse_h256(x)) .transpose() .context("base_token_asset_id")?, - l2_predeployed_wrapped_base_token_address: l2 - .predeployed_wrapped_base_token_address - .as_ref() - .map(|x| parse_h160(x)) - .transpose() - .context("l2 predeployed_wrapped_base_token_address")?, chain_admin_addr: l1 .chain_admin_addr .as_ref() @@ -164,6 +162,9 @@ impl ProtoRepr for proto::Contracts { l1_bytecodes_supplier_addr: ecosystem_contracts .l1_bytecodes_supplier_addr .map(|x| format!("{:?}", x)), + l1_wrapped_base_token_store: ecosystem_contracts + .l1_wrapped_base_token_store + .map(|x| format!("{:?}", x)), }); Self { ecosystem_contracts, @@ -184,9 +185,6 @@ impl ProtoRepr for proto::Contracts { legacy_shared_bridge_addr: this .l2_legacy_shared_bridge_addr .map(|a| format!("{:?}", a)), - predeployed_wrapped_base_token_address: this - .l2_predeployed_wrapped_base_token_address - .map(|x| format!("{:?}", x)), timestamp_asserter_addr: this .l2_timestamp_asserter_addr .map(|a| format!("{:?}", a)), diff --git a/core/lib/protobuf_config/src/da_client.rs b/core/lib/protobuf_config/src/da_client.rs index 9ba38336da52..ca206e8c5443 100644 --- a/core/lib/protobuf_config/src/da_client.rs +++ b/core/lib/protobuf_config/src/da_client.rs @@ -7,7 +7,7 @@ use zksync_config::configs::{ avail::{AvailClientConfig, AvailConfig, AvailDefaultConfig, AvailGasRelayConfig}, celestia::CelestiaConfig, eigen::EigenConfig, - DAClientConfig::{Avail, Celestia, Eigen, ObjectStore}, + DAClientConfig::{Avail, Celestia, Eigen, NoDA, ObjectStore}, }, }; use zksync_protobuf::{required, ProtoRepr}; @@ -25,8 +25,7 @@ impl ProtoRepr for proto::DataAvailabilityClient { type Type = configs::DAClientConfig; fn read(&self) -> anyhow::Result { - let config = required(&self.config).context("config")?; - + let config = required(&self.config).context("da_client config")?; let client = match config { proto::data_availability_client::Config::Avail(conf) => Avail(AvailConfig { bridge_api_url: required(&conf.bridge_api_url) @@ -40,6 +39,7 @@ impl ProtoRepr for proto::DataAvailabilityClient { .context("api_node_url")? .clone(), app_id: *required(&full_client_conf.app_id).context("app_id")?, + finality_state: full_client_conf.finality_state.clone(), }) } Some(proto::avail_config::Config::GasRelay(gas_relay_conf)) => { @@ -85,6 +85,7 @@ impl ProtoRepr for proto::DataAvailabilityClient { proto::data_availability_client::Config::ObjectStore(conf) => { ObjectStore(object_store_proto::ObjectStore::read(conf)?) } + proto::data_availability_client::Config::NoDa(_) => NoDA, }; Ok(client) @@ -100,6 +101,7 @@ impl ProtoRepr for proto::DataAvailabilityClient { proto::avail_config::Config::FullClient(proto::AvailClientConfig { api_node_url: Some(conf.api_node_url.clone()), app_id: Some(conf.app_id), + finality_state: conf.finality_state.clone(), }), ), AvailClientConfig::GasRelay(conf) => Some( @@ -140,6 +142,7 @@ impl ProtoRepr for proto::DataAvailabilityClient { ObjectStore(config) => proto::data_availability_client::Config::ObjectStore( object_store_proto::ObjectStore::build(config), ), + NoDA => proto::data_availability_client::Config::NoDa(proto::NoDaConfig {}), }; Self { diff --git a/core/lib/protobuf_config/src/proto/config/contracts.proto b/core/lib/protobuf_config/src/proto/config/contracts.proto index febbc981478b..538f415ff408 100644 --- a/core/lib/protobuf_config/src/proto/config/contracts.proto +++ b/core/lib/protobuf_config/src/proto/config/contracts.proto @@ -7,6 +7,7 @@ message EcosystemContracts { optional string state_transition_proxy_addr = 2; // optional; h160 optional string transparent_proxy_admin_addr = 3; // optional; h160 optional string l1_bytecodes_supplier_addr = 4; // optional; h160 + optional string l1_wrapped_base_token_store = 5; // optional; h160 } message L1 { @@ -26,7 +27,7 @@ message L2 { optional string da_validator_addr = 2; // optional; H160 optional string legacy_shared_bridge_addr = 3; // optional; H160 optional string timestamp_asserter_addr = 4; // optional; H160 - optional string predeployed_wrapped_base_token_address = 5; // optional; H160 + reserved 5; reserved "predeployed_wrapped_base_token_address"; } message Bridge { diff --git a/core/lib/protobuf_config/src/proto/config/da_client.proto b/core/lib/protobuf_config/src/proto/config/da_client.proto index 21ac2fb5e116..4bbcc7e0b033 100644 --- a/core/lib/protobuf_config/src/proto/config/da_client.proto +++ b/core/lib/protobuf_config/src/proto/config/da_client.proto @@ -22,6 +22,7 @@ message AvailConfig { message AvailClientConfig { optional string api_node_url = 1; optional uint32 app_id = 2; + optional string finality_state = 3; } message AvailGasRelayConfig { @@ -50,6 +51,8 @@ message EigenConfig { reserved "rpc_node_url","inclusion_polling_interval_ms"; } +message NoDAConfig {} + message DataAvailabilityClient { // oneof in protobuf allows for None oneof config { @@ -57,5 +60,6 @@ message DataAvailabilityClient { object_store.ObjectStore object_store = 2; CelestiaConfig celestia = 3; EigenConfig eigen = 4; + NoDAConfig no_da = 5; } } diff --git a/core/lib/prover_interface/Cargo.toml b/core/lib/prover_interface/Cargo.toml index 533e3bc1296a..dfa860c5aa6b 100644 --- a/core/lib/prover_interface/Cargo.toml +++ b/core/lib/prover_interface/Cargo.toml @@ -18,6 +18,7 @@ zksync_types.workspace = true # We can use the newest api to send proofs to L1. circuit_definitions.workspace = true fflonk.workspace = true +bellman.workspace = true circuit_sequencer_api.workspace = true serde.workspace = true diff --git a/core/lib/prover_interface/src/outputs.rs b/core/lib/prover_interface/src/outputs.rs index b536c39778a5..3fc037d0c040 100644 --- a/core/lib/prover_interface/src/outputs.rs +++ b/core/lib/prover_interface/src/outputs.rs @@ -1,10 +1,12 @@ use core::fmt; +use bellman::plonk::better_better_cs::proof::Proof as PlonkProof; use circuit_definitions::{ boojum::pairing::bn256::Bn256, - circuit_definitions::aux_layer::ZkSyncSnarkWrapperCircuitNoLookupCustomGate, + circuit_definitions::aux_layer::{ + ZkSyncSnarkWrapperCircuit, ZkSyncSnarkWrapperCircuitNoLookupCustomGate, + }, }; -use circuit_sequencer_api::proof::FinalProof; use fflonk::FflonkProof; use serde::{Deserialize, Serialize}; use serde_with::{hex::Hex, serde_as}; @@ -13,6 +15,7 @@ use zksync_types::{protocol_version::ProtocolSemanticVersion, tee_types::TeeType /// A "final" ZK proof that can be sent to the L1 contract. #[derive(Clone, Serialize, Deserialize)] +#[serde(untagged)] #[allow(clippy::large_enum_variant)] pub enum L1BatchProofForL1 { Fflonk(FflonkL1BatchProofForL1), @@ -42,10 +45,18 @@ pub struct FflonkL1BatchProofForL1 { pub protocol_version: ProtocolSemanticVersion, } +// Implementation created to allow conversion from FflonkL1BatchProofForL1(which is old L1BatchProofForL1) +// to L1BatchProofForL1 to avoid compatibility problems with serialization/deserialization +impl From for L1BatchProofForL1 { + fn from(proof: FflonkL1BatchProofForL1) -> Self { + L1BatchProofForL1::Fflonk(proof) + } +} + #[derive(Clone, Serialize, Deserialize)] pub struct PlonkL1BatchProofForL1 { pub aggregation_result_coords: [[u8; 32]; 4], - pub scheduler_proof: FinalProof, + pub scheduler_proof: PlonkProof, pub protocol_version: ProtocolSemanticVersion, } @@ -124,11 +135,12 @@ impl StoredObject for L1BatchProofForL1 { } fn deserialize(bytes: Vec) -> Result { - zksync_object_store::bincode::deserialize::(&bytes).or_else(|_| { - zksync_object_store::bincode::deserialize::(&bytes) + match zksync_object_store::bincode::deserialize::(&bytes) { + Ok(proof) => Ok(proof.into()), + Err(_) => zksync_object_store::bincode::deserialize::(&bytes) .map(Into::into) - .map_err(Into::into) - }) + .map_err(Into::into), + } } } diff --git a/core/lib/prover_interface/tests/job_serialization.rs b/core/lib/prover_interface/tests/job_serialization.rs index cc78a9acf262..f28a8d94bf0c 100644 --- a/core/lib/prover_interface/tests/job_serialization.rs +++ b/core/lib/prover_interface/tests/job_serialization.rs @@ -102,8 +102,7 @@ fn test_proof_request_serialization() { let encoded_obj = serde_json::to_string(&proof).unwrap(); let encoded_json = r#"{ "Proof": { - "Plonk": { - "aggregation_result_coords": [ + "aggregation_result_coords": [ [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ], @@ -159,7 +158,6 @@ fn test_proof_request_serialization() { }, "protocol_version": "0.25.10" } - } }"#; let decoded_obj: SubmitProofRequest = serde_json::from_str(&encoded_obj).unwrap(); let decoded_json: SubmitProofRequest = serde_json::from_str(encoded_json).unwrap(); diff --git a/core/lib/snapshots_applier/README.md b/core/lib/snapshots_applier/README.md index 60f17344f5b1..9ec934cbce35 100644 --- a/core/lib/snapshots_applier/README.md +++ b/core/lib/snapshots_applier/README.md @@ -4,7 +4,7 @@ Library responsible for recovering Postgres from a protocol-level snapshot. ## Recovery workflow -_(See [node docs](../../../docs/guides/external-node/07_snapshots_recovery.md) for a high-level snapshot recovery +_(See [node docs](../../../docs/src/guides/external-node/07_snapshots_recovery.md) for a high-level snapshot recovery overview and [snapshot creator docs](../../bin/snapshots_creator/README.md) for the snapshot format details)_ 1. Recovery is started by querying the main node and determining the snapshot parameters. By default, recovery is diff --git a/core/lib/types/Cargo.toml b/core/lib/types/Cargo.toml index 325fe22209a7..6af0e39d14f0 100644 --- a/core/lib/types/Cargo.toml +++ b/core/lib/types/Cargo.toml @@ -19,6 +19,7 @@ zksync_mini_merkle_tree.workspace = true zksync_protobuf.workspace = true zksync_crypto_primitives.workspace = true +async-trait.workspace = true anyhow.workspace = true chrono = { workspace = true, features = ["serde"] } derive_more = { workspace = true, features = ["debug"] } diff --git a/core/lib/types/src/abi.rs b/core/lib/types/src/abi.rs index 92d4cb4c8612..da51c6297d4d 100644 --- a/core/lib/types/src/abi.rs +++ b/core/lib/types/src/abi.rs @@ -1,4 +1,5 @@ use anyhow::Context as _; +use zksync_basic_types::protocol_version::ProtocolSemanticVersion; use crate::{ bytecode::BytecodeHash, @@ -185,7 +186,7 @@ impl NewPriorityRequest { } /// `VerifierParams` from `l1-contracts/contracts/state-transition/chain-interfaces/IVerifier.sol`. -#[derive(Default, PartialEq)] +#[derive(Debug, Default, PartialEq)] pub struct VerifierParams { pub recursion_node_level_vk_hash: [u8; 32], pub recursion_leaf_level_vk_hash: [u8; 32], @@ -193,9 +194,11 @@ pub struct VerifierParams { } /// `ProposedUpgrade` from, `l1-contracts/contracts/upgrades/BazeZkSyncUpgrade.sol`. +#[derive(Debug)] pub struct ProposedUpgrade { pub l2_protocol_upgrade_tx: Box, - pub factory_deps: Vec>, + // Factory deps are set only pre-gateway upgrades. + pub factory_deps: Option>>, pub bootloader_hash: [u8; 32], pub default_account_hash: [u8; 32], pub verifier: Address, @@ -250,8 +253,8 @@ impl VerifierParams { } impl ProposedUpgrade { - /// RLP schema of the `ProposedUpgrade`. - pub fn schema() -> ParamType { + /// Pre-gateway RLP schema of the `ProposedUpgrade`. + pub fn schema_pre_gateway() -> ParamType { ParamType::Tuple(vec![ L2CanonicalTransaction::schema(), // transaction data ParamType::Array(ParamType::Bytes.into()), // factory deps @@ -266,16 +269,39 @@ impl ProposedUpgrade { ]) } + /// Post-gateway RLP schema of the `ProposedUpgrade`. + pub fn schema_post_gateway() -> ParamType { + ParamType::Tuple(vec![ + L2CanonicalTransaction::schema(), // transaction data + ParamType::FixedBytes(32), // bootloader code hash + ParamType::FixedBytes(32), // default account code hash + ParamType::Address, // verifier address + VerifierParams::schema(), // verifier params + ParamType::Bytes, // l1 custom data + ParamType::Bytes, // l1 post-upgrade custom data + ParamType::Uint(256), // timestamp + ParamType::Uint(256), // version id + ]) + } + /// Encodes `ProposedUpgrade` to a RLP token. pub fn encode(&self) -> Token { - Token::Tuple(vec![ - self.l2_protocol_upgrade_tx.encode(), - Token::Array( + let mut tokens = vec![self.l2_protocol_upgrade_tx.encode()]; + + let protocol_version = ProtocolSemanticVersion::try_from_packed(self.new_protocol_version) + .expect("Version is not supported") + .minor; + if protocol_version.is_pre_gateway() { + tokens.push(Token::Array( self.factory_deps - .iter() - .map(|b| Token::Bytes(b.clone())) + .clone() + .expect("Factory deps should be present in pre-gateway upgrade data") + .into_iter() + .map(Token::Bytes) .collect(), - ), + )); + } + tokens.extend([ Token::FixedBytes(self.bootloader_hash.into()), Token::FixedBytes(self.default_account_hash.into()), Token::Address(self.verifier), @@ -284,32 +310,52 @@ impl ProposedUpgrade { Token::Bytes(self.post_upgrade_calldata.clone()), Token::Uint(self.upgrade_timestamp), Token::Uint(self.new_protocol_version), - ]) + ]); + + Token::Tuple(tokens) } /// Decodes `ProposedUpgrade` from a RLP token. /// Returns an error if token doesn't match the `schema()`. pub fn decode(token: Token) -> anyhow::Result { let tokens = token.into_tuple().context("not a tuple")?; - anyhow::ensure!(tokens.len() == 10); + let tokens_len = tokens.len(); + anyhow::ensure!(tokens_len >= 9); let mut t = tokens.into_iter(); let mut next = || t.next().unwrap(); - Ok(Self { - l2_protocol_upgrade_tx: L2CanonicalTransaction::decode(next()) - .context("l2_protocol_upgrade_tx")? - .into(), - factory_deps: next() - .into_array() - .context("factory_deps")? - .into_iter() - .enumerate() - .map(|(i, b)| b.into_bytes().context(i)) - .collect::>() - .context("factory_deps")?, - bootloader_hash: next() - .into_fixed_bytes() - .and_then(|b| b.try_into().ok()) - .context("bootloader_hash")?, + + let l2_protocol_upgrade_tx = L2CanonicalTransaction::decode(next()) + .context("l2_protocol_upgrade_tx")? + .into(); + let next_token = next(); + let (factory_deps, bootloader_hash) = match next_token { + Token::Array(tokens) => { + anyhow::ensure!(tokens_len == 10); + ( + Some( + tokens + .into_iter() + .enumerate() + .map(|(i, b)| b.into_bytes().context(i)) + .collect::>() + .context("factory_deps")?, + ), + next().into_fixed_bytes(), + ) + } + Token::FixedBytes(bytes) => { + anyhow::ensure!(tokens_len == 9); + (None, Some(bytes)) + } + _ => anyhow::bail!("Unexpected type of the second token"), + }; + let bootloader_hash = bootloader_hash + .and_then(|b| b.try_into().ok()) + .context("bootloader_hash")?; + let upgrade = Self { + l2_protocol_upgrade_tx, + factory_deps, + bootloader_hash, default_account_hash: next() .into_fixed_bytes() .and_then(|b| b.try_into().ok()) @@ -322,7 +368,16 @@ impl ProposedUpgrade { post_upgrade_calldata: next().into_bytes().context("post_upgrade_calldata")?, upgrade_timestamp: next().into_uint().context("upgrade_timestamp")?, new_protocol_version: next().into_uint().context("new_protocol_version")?, - }) + }; + + let protocol_version = + ProtocolSemanticVersion::try_from_packed(upgrade.new_protocol_version) + .map_err(|err| anyhow::anyhow!(err)) + .context("Version is not supported")? + .minor; + anyhow::ensure!(protocol_version.is_pre_gateway() == upgrade.factory_deps.is_some()); + + Ok(upgrade) } } @@ -365,3 +420,166 @@ impl Transaction { }) } } + +pub struct ForceDeployment { + pub bytecode_hash: H256, + pub new_address: Address, + pub call_constructor: bool, + pub value: U256, + pub input: Vec, +} + +impl ForceDeployment { + /// ABI schema of the `ForceDeployment`. + pub fn schema() -> ParamType { + ParamType::Tuple(vec![ + ParamType::FixedBytes(32), + ParamType::Address, + ParamType::Bool, + ParamType::Uint(256), + ParamType::Bytes, + ]) + } + + /// Encodes `ForceDeployment` to a RLP token. + pub fn encode(&self) -> Token { + Token::Tuple(vec![ + Token::FixedBytes(self.bytecode_hash.0.to_vec()), + Token::Address(self.new_address), + Token::Bool(self.call_constructor), + Token::Uint(self.value), + Token::Bytes(self.input.clone()), + ]) + } + + /// Decodes `ForceDeployment` from a RLP token. + /// Returns an error if token doesn't match the `schema()`. + pub fn decode(token: Token) -> anyhow::Result { + let tokens = token.into_tuple().context("not a tuple")?; + anyhow::ensure!(tokens.len() == 5); + let mut t = tokens.into_iter(); + let mut next = || t.next().unwrap(); + Ok(Self { + bytecode_hash: next() + .into_fixed_bytes() + .and_then(|b| Some(H256(b.try_into().ok()?))) + .context("bytecode_hash")?, + new_address: next().into_address().context("new_address")?, + call_constructor: next().into_bool().context("call_constructor")?, + value: next().into_uint().context("value")?, + input: next().into_bytes().context("input")?, + }) + } +} + +pub struct GatewayUpgradeEncodedInput { + pub force_deployments: Vec, + pub l2_gateway_upgrade_position: usize, + pub fixed_force_deployments_data: Vec, + pub ctm_deployer: Address, + pub old_validator_timelock: Address, + pub new_validator_timelock: Address, + pub wrapped_base_token_store: Address, +} + +impl GatewayUpgradeEncodedInput { + /// ABI schema of the `GatewayUpgradeEncodedInput`. + pub fn schema() -> ParamType { + ParamType::Tuple(vec![ + ParamType::Array(Box::new(ForceDeployment::schema())), + ParamType::Uint(256), + ParamType::Bytes, + ParamType::Address, + ParamType::Address, + ParamType::Address, + ParamType::Address, + ]) + } + + /// Decodes `GatewayUpgradeEncodedInput` from a RLP token. + /// Returns an error if token doesn't match the `schema()`. + pub fn decode(token: Token) -> anyhow::Result { + let tokens = token.into_tuple().context("not a tuple")?; + anyhow::ensure!(tokens.len() == 7); + let mut t = tokens.into_iter(); + let mut next = || t.next().unwrap(); + + let force_deployments_array = next().into_array().context("force_deployments_array")?; + let mut force_deployments = vec![]; + for token in force_deployments_array { + force_deployments.push(ForceDeployment::decode(token)?); + } + + Ok(Self { + force_deployments, + l2_gateway_upgrade_position: next() + .into_uint() + .context("l2_gateway_upgrade_position")? + .as_usize(), + fixed_force_deployments_data: next() + .into_bytes() + .context("fixed_force_deployments_data")?, + ctm_deployer: next().into_address().context("ctm_deployer")?, + old_validator_timelock: next().into_address().context("old_validator_timelock")?, + new_validator_timelock: next().into_address().context("new_validator_timelock")?, + wrapped_base_token_store: next().into_address().context("wrapped_base_token_store")?, + }) + } +} + +#[derive(Debug, Clone)] +pub struct ZkChainSpecificUpgradeData { + pub base_token_asset_id: H256, + pub l2_legacy_shared_bridge: Address, + pub l2_predeployed_wrapped_base_token: Address, + pub base_token_l1_address: Address, + pub base_token_name: String, + pub base_token_symbol: String, +} + +impl ZkChainSpecificUpgradeData { + pub fn from_partial_components( + base_token_asset_id: Option, + l2_legacy_shared_bridge: Option
, + predeployed_l2_weth_address: Option
, + base_token_l1_address: Option
, + base_token_name: Option, + base_token_symbol: Option, + ) -> Option { + Some(Self { + base_token_asset_id: base_token_asset_id?, + l2_legacy_shared_bridge: l2_legacy_shared_bridge?, + // Note, that some chains may not contain previous deployment of L2 wrapped base + // token. For those, zero address is used. + l2_predeployed_wrapped_base_token: predeployed_l2_weth_address.unwrap_or_default(), + base_token_l1_address: base_token_l1_address?, + base_token_name: base_token_name?, + base_token_symbol: base_token_symbol?, + }) + } + + /// ABI schema of the `ZkChainSpecificUpgradeData`. + pub fn schema() -> ParamType { + ParamType::Tuple(vec![ + ParamType::FixedBytes(32), + ParamType::Address, + ParamType::Address, + ]) + } + + /// Encodes `ZkChainSpecificUpgradeData` to a RLP token. + pub fn encode(&self) -> Token { + Token::Tuple(vec![ + Token::FixedBytes(self.base_token_asset_id.0.to_vec()), + Token::Address(self.l2_legacy_shared_bridge), + Token::Address(self.l2_predeployed_wrapped_base_token), + Token::Address(self.base_token_l1_address), + Token::String(self.base_token_name.clone()), + Token::String(self.base_token_symbol.clone()), + ]) + } + + pub fn encode_bytes(&self) -> Vec { + ethabi::encode(&[self.encode()]) + } +} diff --git a/core/lib/types/src/block.rs b/core/lib/types/src/block.rs index c4fd3306f2d5..4ae665656ee1 100644 --- a/core/lib/types/src/block.rs +++ b/core/lib/types/src/block.rs @@ -63,20 +63,22 @@ pub struct L1BatchHeader { pub protocol_version: Option, pub pubdata_input: Option>, pub fee_address: Address, + pub batch_fee_input: BatchFeeInput, } impl L1BatchHeader { - pub fn to_unsealed_header(&self, fee_input: BatchFeeInput) -> UnsealedL1BatchHeader { + pub fn to_unsealed_header(&self) -> UnsealedL1BatchHeader { UnsealedL1BatchHeader { number: self.number, timestamp: self.timestamp, protocol_version: self.protocol_version, fee_address: self.fee_address, - fee_input, + fee_input: self.batch_fee_input, } } } +/// Holder for the metadata that is relevant for unsealed batches. #[derive(Debug, Clone, PartialEq)] pub struct UnsealedL1BatchHeader { pub number: L1BatchNumber, @@ -86,6 +88,16 @@ pub struct UnsealedL1BatchHeader { pub fee_input: BatchFeeInput, } +/// Holder for the metadata that is relevant for both sealed and unsealed batches. +pub struct CommonL1BatchHeader { + pub number: L1BatchNumber, + pub is_sealed: bool, + pub timestamp: u64, + pub protocol_version: Option, + pub fee_address: Address, + pub fee_input: BatchFeeInput, +} + /// Holder for the L2 block metadata that is not available from transactions themselves. #[derive(Debug, Clone, PartialEq)] pub struct L2BlockHeader { @@ -153,6 +165,7 @@ impl L1BatchHeader { protocol_version: Some(protocol_version), pubdata_input: Some(vec![]), fee_address: Default::default(), + batch_fee_input: BatchFeeInput::pubdata_independent(0, 0, 0), } } diff --git a/core/lib/types/src/commitment/mod.rs b/core/lib/types/src/commitment/mod.rs index 786ce03e671d..9aef6b14a0f2 100644 --- a/core/lib/types/src/commitment/mod.rs +++ b/core/lib/types/src/commitment/mod.rs @@ -9,7 +9,7 @@ use std::{collections::HashMap, convert::TryFrom}; use serde::{Deserialize, Serialize}; -pub use zksync_basic_types::commitment::{L1BatchCommitmentMode, PubdataParams}; +pub use zksync_basic_types::commitment::{L1BatchCommitmentMode, PubdataParams, PubdataType}; use zksync_contracts::BaseSystemContractsHashes; use zksync_crypto_primitives::hasher::{keccak::KeccakHasher, Hasher}; use zksync_mini_merkle_tree::MiniMerkleTree; @@ -112,6 +112,7 @@ pub struct L1BatchMetadata { pub aux_data_hash: H256, pub meta_parameters_hash: H256, pub pass_through_data_hash: H256, + /// The commitment to the final events queue state after the batch is committed. /// Practically, it is a commitment to all events that happened on L2 during the batch execution. pub events_queue_commitment: Option, diff --git a/core/lib/types/src/debug_flat_call.rs b/core/lib/types/src/debug_flat_call.rs index 3488b0e5b42c..2f130019446a 100644 --- a/core/lib/types/src/debug_flat_call.rs +++ b/core/lib/types/src/debug_flat_call.rs @@ -49,4 +49,11 @@ pub struct CallTraceMeta { pub tx_hash: H256, pub block_number: u32, pub block_hash: H256, + /// Error message associated with the transaction in the sequencer database. + /// Can be used to identify a failed transaction if error information is not + /// recorded otherwise (e.g. out-of-gas errors in early protocol versions). + /// + /// Should be seen as a fallback value (e.g. if the trace doesn't contain the error + /// or revert reason). + pub internal_error: Option, } diff --git a/core/lib/types/src/fee_model.rs b/core/lib/types/src/fee_model.rs index 79515e6f63a9..414295071746 100644 --- a/core/lib/types/src/fee_model.rs +++ b/core/lib/types/src/fee_model.rs @@ -45,6 +45,30 @@ impl BatchFeeInput { fair_pubdata_price, }) } + + pub fn from_protocol_version( + protocol_version: Option, + l1_gas_price: u64, + fair_l2_gas_price: u64, + fair_pubdata_price: Option, + ) -> Self { + protocol_version + .filter(|version: &ProtocolVersionId| version.is_post_1_4_1()) + .map(|_| { + Self::PubdataIndependent(PubdataIndependentBatchFeeModelInput { + fair_pubdata_price: fair_pubdata_price + .expect("No fair pubdata price for 1.4.1"), + fair_l2_gas_price, + l1_gas_price, + }) + }) + .unwrap_or_else(|| { + Self::L1Pegged(L1PeggedBatchFeeModelInput { + fair_l2_gas_price, + l1_gas_price, + }) + }) + } } impl Default for BatchFeeInput { diff --git a/core/lib/types/src/protocol_upgrade.rs b/core/lib/types/src/protocol_upgrade.rs index 4ea007239103..324650c97e21 100644 --- a/core/lib/types/src/protocol_upgrade.rs +++ b/core/lib/types/src/protocol_upgrade.rs @@ -2,20 +2,20 @@ use std::convert::{TryFrom, TryInto}; use anyhow::Context as _; use serde::{Deserialize, Serialize}; -use zksync_basic_types::{ - ethabi, - protocol_version::{ - L1VerifierConfig, ProtocolSemanticVersion, ProtocolVersionId, VerifierParams, - }, -}; -use zksync_contracts::{ - BaseSystemContractsHashes, ADMIN_EXECUTE_UPGRADE_FUNCTION, - ADMIN_UPGRADE_CHAIN_FROM_VERSION_FUNCTION, DIAMOND_CUT, +use zksync_basic_types::protocol_version::{ + L1VerifierConfig, ProtocolSemanticVersion, ProtocolVersionId, VerifierParams, }; +use zksync_contracts::{BaseSystemContractsHashes, DIAMOND_CUT}; use crate::{ - abi, ethabi::ParamType, h256_to_u256, web3::Log, Address, Execute, ExecuteTransactionCommon, - Transaction, TransactionType, H256, U256, + abi::{ + self, ForceDeployment, GatewayUpgradeEncodedInput, ProposedUpgrade, + ZkChainSpecificUpgradeData, + }, + ethabi::{self, decode, encode, ParamType, Token}, + h256_to_u256, u256_to_h256, + web3::Log, + Address, Execute, ExecuteTransactionCommon, Transaction, TransactionType, H256, U256, }; /// Represents a call to be made during governance operation. @@ -93,29 +93,155 @@ impl From for VerifierParams { } } +/// Protocol upgrade transactions do not contain preimages within them. +/// Instead, they are expected to be known and need to be fetched, typically from L1. +#[async_trait::async_trait] +pub trait ProtocolUpgradePreimageOracle: Send + Sync { + async fn get_protocol_upgrade_preimages( + &self, + hashes: Vec, + ) -> anyhow::Result>>; +} + +/// Some upgrades have chain-dependent calldata that has to be prepared properly. +async fn prepare_upgrade_call( + proposed_upgrade: &ProposedUpgrade, + chain_specific: Option, +) -> anyhow::Result> { + // No upgrade + if proposed_upgrade.l2_protocol_upgrade_tx.tx_type == U256::zero() { + return Ok(vec![]); + } + + let minor_version = proposed_upgrade.l2_protocol_upgrade_tx.nonce; + if ProtocolVersionId::try_from(minor_version.as_u32() as u16).unwrap() + != ProtocolVersionId::gateway_upgrade() + { + // We'll just keep it the same for non-Gateway upgrades + return Ok(proposed_upgrade.l2_protocol_upgrade_tx.data.clone()); + } + + // For gateway upgrade, things are bit more complex. + // The source of truth for the code below is the one that is present in + // `GatewayUpgrade.sol`. + let mut encoded_input = GatewayUpgradeEncodedInput::decode( + decode( + &[GatewayUpgradeEncodedInput::schema()], + &proposed_upgrade.post_upgrade_calldata, + )?[0] + .clone(), + )?; + + let gateway_upgrade_calldata = encode(&[ + Token::Address(encoded_input.ctm_deployer), + Token::Bytes(encoded_input.fixed_force_deployments_data), + Token::Bytes(chain_specific.context("chain_specific")?.encode_bytes()), + ]); + + // May not be very idiomatic, but we do it in the same way as it was done in Solidity + // for easier review + encoded_input.force_deployments[encoded_input.l2_gateway_upgrade_position].input = + gateway_upgrade_calldata; + + let force_deployments_as_tokens: Vec<_> = encoded_input + .force_deployments + .iter() + .map(ForceDeployment::encode) + .collect(); + + let full_data = zksync_contracts::deployer_contract() + .function("forceDeployOnAddresses") + .unwrap() + .encode_input(&[Token::Array(force_deployments_as_tokens)]) + .unwrap(); + + Ok(full_data) +} + impl ProtocolUpgrade { - pub fn try_from_diamond_cut(diamond_cut_data: &[u8]) -> anyhow::Result { + pub async fn try_from_diamond_cut( + diamond_cut_data: &[u8], + preimage_oracle: impl ProtocolUpgradePreimageOracle, + chain_specific: Option, + ) -> anyhow::Result { // Unwraps are safe because we have validated the input against the function signature. let diamond_cut_tokens = DIAMOND_CUT.decode_input(diamond_cut_data)?[0] .clone() .into_tuple() .unwrap(); - Self::try_from_init_calldata(&diamond_cut_tokens[2].clone().into_bytes().unwrap()) + Self::try_from_init_calldata( + &diamond_cut_tokens[2].clone().into_bytes().unwrap(), + preimage_oracle, + chain_specific, + ) + .await } /// `l1-contracts/contracts/state-transition/libraries/diamond.sol:DiamondCutData.initCalldata` - fn try_from_init_calldata(init_calldata: &[u8]) -> anyhow::Result { - let upgrade = ethabi::decode( - &[abi::ProposedUpgrade::schema()], + async fn try_from_init_calldata( + init_calldata: &[u8], + preimage_oracle: impl ProtocolUpgradePreimageOracle, + chain_specific: Option, + ) -> anyhow::Result { + let upgrade = if let Ok(upgrade) = ethabi::decode( + &[abi::ProposedUpgrade::schema_pre_gateway()], init_calldata.get(4..).context("need >= 4 bytes")?, - ) - .context("ethabi::decode()")?; - let upgrade = abi::ProposedUpgrade::decode(upgrade.into_iter().next().unwrap()).unwrap(); + ) { + upgrade + } else { + ethabi::decode( + &[abi::ProposedUpgrade::schema_post_gateway()], + init_calldata.get(4..).context("need >= 4 bytes")?, + ) + .context("ethabi::decode()")? + }; + + let mut upgrade = abi::ProposedUpgrade::decode(upgrade.into_iter().next().unwrap()) + .context("ProposedUpgrade::decode()")?; + let bootloader_hash = H256::from_slice(&upgrade.bootloader_hash); let default_account_hash = H256::from_slice(&upgrade.default_account_hash); + + let version = ProtocolSemanticVersion::try_from_packed(upgrade.new_protocol_version) + .map_err(|err| anyhow::format_err!("Version is not supported: {err}"))?; + let tx = if upgrade.l2_protocol_upgrade_tx.tx_type != U256::zero() { + let factory_deps = if version.minor.is_pre_gateway() { + upgrade.factory_deps.clone().unwrap() + } else { + preimage_oracle + .get_protocol_upgrade_preimages( + upgrade + .l2_protocol_upgrade_tx + .factory_deps + .iter() + .map(|&x| u256_to_h256(x)) + .collect(), + ) + .await? + }; + + upgrade.l2_protocol_upgrade_tx.data = + prepare_upgrade_call(&upgrade, chain_specific).await?; + + Some( + Transaction::from_abi( + abi::Transaction::L1 { + tx: upgrade.l2_protocol_upgrade_tx, + factory_deps, + eth_block: 0, + }, + false, + ) + .context("Transaction::try_from()")? + .try_into() + .map_err(|err| anyhow::format_err!("try_into::(): {err}"))?, + ) + } else { + None + }; + Ok(Self { - version: ProtocolSemanticVersion::try_from_packed(upgrade.new_protocol_version) - .map_err(|err| anyhow::format_err!("Version is not supported: {err}"))?, + version, bootloader_code_hash: (bootloader_hash != H256::zero()).then_some(bootloader_hash), default_account_code_hash: (default_account_hash != H256::zero()) .then_some(default_account_hash), @@ -124,31 +250,35 @@ impl ProtocolUpgrade { .then_some(upgrade.verifier_params.into()), verifier_address: (upgrade.verifier != Address::zero()).then_some(upgrade.verifier), timestamp: upgrade.upgrade_timestamp.try_into().unwrap(), - tx: (upgrade.l2_protocol_upgrade_tx.tx_type != U256::zero()) - .then(|| { - Transaction::from_abi( - abi::Transaction::L1 { - tx: upgrade.l2_protocol_upgrade_tx, - factory_deps: upgrade.factory_deps, - eth_block: 0, - }, - true, - ) - .context("Transaction::try_from()")? - .try_into() - .map_err(|err| anyhow::format_err!("try_into::(): {err}")) - }) - .transpose()?, + tx, }) } } -pub fn decode_set_chain_id_event( +pub fn decode_genesis_upgrade_event( event: Log, ) -> Result<(ProtocolVersionId, ProtocolUpgradeTx), ethabi::Error> { - let tx = ethabi::decode(&[abi::L2CanonicalTransaction::schema()], &event.data.0)?; - let tx = abi::L2CanonicalTransaction::decode(tx.into_iter().next().unwrap()).unwrap(); - + let tokens = ethabi::decode( + &[ + abi::L2CanonicalTransaction::schema(), + ParamType::Array(Box::new(ParamType::Bytes)), + ], + &event.data.0, + )?; + let mut t: std::vec::IntoIter = tokens.into_iter(); + let mut next = || t.next().unwrap(); + + let tx = abi::L2CanonicalTransaction::decode(next()).unwrap(); + let factory_deps = next() + .into_array() + .context("factory_deps") + .map_err(|_| ethabi::Error::InvalidData)? + .into_iter() + .enumerate() + .map(|(i, t)| t.into_bytes().context(i)) + .collect::>, _>>() + .context("factory_deps") + .map_err(|_| ethabi::Error::InvalidData)?; let full_version_id = h256_to_u256(event.topics[2]); let protocol_version = ProtocolVersionId::try_from_packed_semver(full_version_id) .unwrap_or_else(|_| panic!("Version is not supported, packed version: {full_version_id}")); @@ -157,8 +287,11 @@ pub fn decode_set_chain_id_event( Transaction::from_abi( abi::Transaction::L1 { tx: tx.into(), - eth_block: 0, - factory_deps: vec![], + eth_block: event + .block_number + .expect("Event block number is missing") + .as_u64(), + factory_deps, }, true, ) @@ -168,50 +301,6 @@ pub fn decode_set_chain_id_event( )) } -impl TryFrom for ProtocolUpgrade { - type Error = anyhow::Error; - - fn try_from(call: Call) -> Result { - anyhow::ensure!(call.data.len() >= 4); - let (signature, data) = call.data.split_at(4); - - let diamond_cut_tokens = - if signature.to_vec() == ADMIN_EXECUTE_UPGRADE_FUNCTION.short_signature().to_vec() { - // Unwraps are safe, because we validate the input against the function signature. - ADMIN_EXECUTE_UPGRADE_FUNCTION - .decode_input(data)? - .pop() - .unwrap() - .into_tuple() - .unwrap() - } else if signature.to_vec() - == ADMIN_UPGRADE_CHAIN_FROM_VERSION_FUNCTION - .short_signature() - .to_vec() - { - let mut data = ADMIN_UPGRADE_CHAIN_FROM_VERSION_FUNCTION.decode_input(data)?; - - assert_eq!( - data.len(), - 2, - "The second method is expected to accept exactly 2 arguments" - ); - - // The second item must be a tuple of diamond cut data - // Unwraps are safe, because we validate the input against the function signature. - data.pop().unwrap().into_tuple().unwrap() - } else { - anyhow::bail!("unknown function"); - }; - - ProtocolUpgrade::try_from_init_calldata( - // Unwrap is safe because we have validated the input against the function signature. - &diamond_cut_tokens[2].clone().into_bytes().unwrap(), - ) - .context("ProtocolUpgrade::try_from_init_calldata()") - } -} - impl TryFrom for GovernanceOperation { type Error = crate::ethabi::Error; diff --git a/core/lib/types/src/system_contracts.rs b/core/lib/types/src/system_contracts.rs index 4d1ff9b554ea..28d8def59277 100644 --- a/core/lib/types/src/system_contracts.rs +++ b/core/lib/types/src/system_contracts.rs @@ -4,8 +4,10 @@ use zksync_basic_types::{AccountTreeId, Address, U256}; use zksync_contracts::{read_sys_contract_bytecode, ContractLanguage, SystemContractsRepo}; use zksync_system_constants::{ BOOTLOADER_UTILITIES_ADDRESS, CODE_ORACLE_ADDRESS, COMPRESSOR_ADDRESS, CREATE2_FACTORY_ADDRESS, - EVENT_WRITER_ADDRESS, EVM_GAS_MANAGER_ADDRESS, P256VERIFY_PRECOMPILE_ADDRESS, - PUBDATA_CHUNK_PUBLISHER_ADDRESS, + EVENT_WRITER_ADDRESS, EVM_GAS_MANAGER_ADDRESS, L2_ASSET_ROUTER_ADDRESS, L2_BRIDGEHUB_ADDRESS, + L2_GENESIS_UPGRADE_ADDRESS, L2_MESSAGE_ROOT_ADDRESS, L2_NATIVE_TOKEN_VAULT_ADDRESS, + L2_WRAPPED_BASE_TOKEN_IMPL, P256VERIFY_PRECOMPILE_ADDRESS, PUBDATA_CHUNK_PUBLISHER_ADDRESS, + SLOAD_CONTRACT_ADDRESS, }; use crate::{ @@ -25,7 +27,7 @@ use crate::{ pub const TX_NONCE_INCREMENT: U256 = U256([1, 0, 0, 0]); // 1 pub const DEPLOYMENT_NONCE_INCREMENT: U256 = U256([0, 0, 1, 0]); // 2^128 -static SYSTEM_CONTRACT_LIST: [(&str, &str, Address, ContractLanguage); 26] = [ +static SYSTEM_CONTRACT_LIST: [(&str, &str, Address, ContractLanguage); 33] = [ ( "", "AccountCodeStorage", @@ -174,6 +176,48 @@ static SYSTEM_CONTRACT_LIST: [(&str, &str, Address, ContractLanguage); 26] = [ CREATE2_FACTORY_ADDRESS, ContractLanguage::Sol, ), + ( + "", + "L2GenesisUpgrade", + L2_GENESIS_UPGRADE_ADDRESS, + ContractLanguage::Sol, + ), + ( + "../../l1-contracts/zkout/", + "Bridgehub", + L2_BRIDGEHUB_ADDRESS, + ContractLanguage::Sol, + ), + ( + "../../l1-contracts/zkout/", + "MessageRoot", + L2_MESSAGE_ROOT_ADDRESS, + ContractLanguage::Sol, + ), + ( + "../../l1-contracts/zkout/", + "L2AssetRouter", + L2_ASSET_ROUTER_ADDRESS, + ContractLanguage::Sol, + ), + ( + "../../l1-contracts/zkout/", + "L2NativeTokenVault", + L2_NATIVE_TOKEN_VAULT_ADDRESS, + ContractLanguage::Sol, + ), + ( + "", + "SloadContract", + SLOAD_CONTRACT_ADDRESS, + ContractLanguage::Sol, + ), + ( + "../../l1-contracts/zkout/", + "L2WrappedBaseToken", + L2_WRAPPED_BASE_TOKEN_IMPL, + ContractLanguage::Sol, + ), ]; /// Gets default set of system contracts, based on Cargo workspace location. diff --git a/core/lib/utils/src/env.rs b/core/lib/utils/src/env.rs index 8f4aa1da9400..0da0ddbc9f70 100644 --- a/core/lib/utils/src/env.rs +++ b/core/lib/utils/src/env.rs @@ -11,11 +11,9 @@ static WORKSPACE: OnceCell> = OnceCell::new(); /// Represents Cargo workspaces available in the repository. #[derive(Debug, Clone, Copy)] pub enum Workspace<'a> { - /// Workspace was not found. - /// Assumes that the code is running in a binary. - /// Will use the current directory as a fallback. - None, /// Root folder. + Root, + /// `core` folder. Core(&'a Path), /// `prover` folder. Prover(&'a Path), @@ -42,21 +40,30 @@ impl Workspace<'static> { result.ok() }) .as_deref(); - path.map_or(Self::None, Self::from) + path.map_or(Self::Root, Self::from) } } impl<'a> Workspace<'a> { + const CORE_DIRECTORY_NAME: &'static str = "core"; const PROVER_DIRECTORY_NAME: &'static str = "prover"; const ZKSTACK_CLI_DIRECTORY_NAME: &'static str = "zkstack_cli"; - /// Returns the path of the core workspace. - /// For `Workspace::None`, considers the current directory to represent core workspace. + /// Returns the path of the repository root. + pub fn root(self) -> PathBuf { + match self { + Self::Root => PathBuf::from("."), + Self::Core(path) | Self::Prover(path) | Self::ZkStackCli(path) => { + path.parent().unwrap().into() + } + } + } + + /// Returns the path of the `core` workspace. pub fn core(self) -> PathBuf { match self { - Self::None => PathBuf::from("."), Self::Core(path) => path.into(), - Self::Prover(path) | Self::ZkStackCli(path) => path.parent().unwrap().into(), + _ => self.root().join(Self::CORE_DIRECTORY_NAME), } } @@ -64,7 +71,7 @@ impl<'a> Workspace<'a> { pub fn prover(self) -> PathBuf { match self { Self::Prover(path) => path.into(), - _ => self.core().join(Self::PROVER_DIRECTORY_NAME), + _ => self.root().join(Self::PROVER_DIRECTORY_NAME), } } @@ -72,7 +79,7 @@ impl<'a> Workspace<'a> { pub fn zkstack_cli(self) -> PathBuf { match self { Self::ZkStackCli(path) => path.into(), - _ => self.core().join(Self::ZKSTACK_CLI_DIRECTORY_NAME), + _ => self.root().join(Self::ZKSTACK_CLI_DIRECTORY_NAME), } } } @@ -83,8 +90,10 @@ impl<'a> From<&'a Path> for Workspace<'a> { Self::Prover(path) } else if path.ends_with(Self::ZKSTACK_CLI_DIRECTORY_NAME) { Self::ZkStackCli(path) - } else { + } else if path.ends_with(Self::CORE_DIRECTORY_NAME) { Self::Core(path) + } else { + Self::Root } } } @@ -150,7 +159,6 @@ mod tests { let _pwd_protector = PwdProtector::new(); // Core. - let workspace = Workspace::locate(); assert_matches!(workspace, Workspace::Core(_)); let core_path = workspace.core(); diff --git a/core/lib/vm_executor/src/batch/factory.rs b/core/lib/vm_executor/src/batch/factory.rs index 9797e1681032..5e375d1f3062 100644 --- a/core/lib/vm_executor/src/batch/factory.rs +++ b/core/lib/vm_executor/src/batch/factory.rs @@ -8,8 +8,8 @@ use zksync_multivm::{ executor::{BatchExecutor, BatchExecutorFactory}, pubdata::PubdataBuilder, storage::{ReadStorage, StoragePtr, StorageView, StorageViewStats}, - utils::DivergenceHandler, - BatchTransactionExecutionResult, BytecodeCompressionError, CompressedBytecodeInfo, + utils::{DivergenceHandler, ShadowMut}, + BatchTransactionExecutionResult, BytecodeCompressionError, Call, CompressedBytecodeInfo, ExecutionResult, FinishedL1Batch, Halt, L1BatchEnv, L2BlockEnv, SystemEnv, VmFactory, VmInterface, VmInterfaceHistoryEnabled, }, @@ -28,6 +28,23 @@ use super::{ }; use crate::shared::{InteractionType, Sealed, STORAGE_METRICS}; +#[doc(hidden)] +pub trait CallTracingTracer: vm_fast::interface::Tracer + Default { + fn into_traces(self) -> Vec; +} + +impl CallTracingTracer for () { + fn into_traces(self) -> Vec { + vec![] + } +} + +impl CallTracingTracer for vm_fast::CallTracer { + fn into_traces(self) -> Vec { + self.into_result() + } +} + /// Encapsulates a tracer used during batch processing. Currently supported tracers are `()` (no-op) and [`TraceCalls`]. /// /// All members of this trait are implementation details. @@ -37,7 +54,7 @@ pub trait BatchTracer: fmt::Debug + 'static + Send + Sealed { const TRACE_CALLS: bool; /// Tracer for the fast VM. #[doc(hidden)] - type Fast: vm_fast::interface::Tracer + Default; + type Fast: CallTracingTracer; } impl Sealed for () {} @@ -56,7 +73,7 @@ impl Sealed for TraceCalls {} impl BatchTracer for TraceCalls { const TRACE_CALLS: bool = true; - type Fast = (); // TODO: change once call tracing is implemented in fast VM + type Fast = vm_fast::CallTracer; } /// The default implementation of [`BatchExecutorFactory`]. @@ -213,13 +230,14 @@ impl BatchVm { tx: Transaction, with_compression: bool, ) -> BatchTransactionExecutionResult { - let call_tracer_result = Arc::new(OnceCell::default()); + let legacy_tracer_result = Arc::new(OnceCell::default()); let legacy_tracer = if Tr::TRACE_CALLS { - vec![CallTracer::new(call_tracer_result.clone()).into_tracer_pointer()] + vec![CallTracer::new(legacy_tracer_result.clone()).into_tracer_pointer()] } else { vec![] }; let mut legacy_tracer = legacy_tracer.into(); + let mut fast_traces = vec![]; let (compression_result, tx_result) = match self { Self::Legacy(vm) => vm.inspect_transaction_with_bytecode_compression( @@ -228,16 +246,35 @@ impl BatchVm { with_compression, ), Self::Fast(vm) => { - let mut tracer = (legacy_tracer.into(), Default::default()); - vm.inspect_transaction_with_bytecode_compression(&mut tracer, tx, with_compression) + let mut tracer = (legacy_tracer.into(), (Tr::Fast::default(), ())); + let res = vm.inspect_transaction_with_bytecode_compression( + &mut tracer, + tx, + with_compression, + ); + let (_, (call_tracer, _)) = tracer; + fast_traces = call_tracer.into_traces(); + res } }; let compressed_bytecodes = compression_result.map(Cow::into_owned); - let call_traces = Arc::try_unwrap(call_tracer_result) + let legacy_traces = Arc::try_unwrap(legacy_tracer_result) .expect("failed extracting call traces") .take() .unwrap_or_default(); + let call_traces = match self { + Self::Legacy(_) => legacy_traces, + Self::Fast(FastVmInstance::Fast(_)) => fast_traces, + Self::Fast(FastVmInstance::Shadowed(vm)) => { + vm.get_custom_mut("call_traces", |r| match r { + ShadowMut::Main(_) => legacy_traces.as_slice(), + ShadowMut::Shadow(_) => fast_traces.as_slice(), + }); + fast_traces + } + }; + BatchTransactionExecutionResult { tx_result: Box::new(tx_result), compressed_bytecodes, diff --git a/core/lib/vm_executor/src/oneshot/contracts.rs b/core/lib/vm_executor/src/oneshot/contracts.rs index 257ede5a7c7c..d67d1dfbc662 100644 --- a/core/lib/vm_executor/src/oneshot/contracts.rs +++ b/core/lib/vm_executor/src/oneshot/contracts.rs @@ -106,10 +106,8 @@ impl MultiVmBaseSystemContracts { ProtocolVersionId::Version21 | ProtocolVersionId::Version22 => &self.post_1_4_2, ProtocolVersionId::Version23 => &self.vm_1_5_0_small_memory, ProtocolVersionId::Version24 => &self.vm_1_5_0_increased_memory, - ProtocolVersionId::Version25 | ProtocolVersionId::Version26 => { - &self.vm_protocol_defense - } - ProtocolVersionId::Version27 => &self.gateway, + ProtocolVersionId::Version25 => &self.vm_protocol_defense, + ProtocolVersionId::Version26 | ProtocolVersionId::Version27 => &self.gateway, ProtocolVersionId::Version28 => unreachable!("Version 28 is not supported yet"), }; let base = base.clone(); diff --git a/core/lib/vm_executor/src/storage.rs b/core/lib/vm_executor/src/storage.rs index e5a2d404233b..96b0d60ed122 100644 --- a/core/lib/vm_executor/src/storage.rs +++ b/core/lib/vm_executor/src/storage.rs @@ -3,13 +3,13 @@ use std::time::{Duration, Instant}; use anyhow::Context; -use zksync_contracts::BaseSystemContracts; +use zksync_contracts::{BaseSystemContracts, SystemContractCode}; use zksync_dal::{Connection, Core, CoreDal, DalError}; use zksync_multivm::interface::{L1BatchEnv, L2BlockEnv, SystemEnv, TxExecutionMode}; use zksync_types::{ - block::L2BlockHeader, commitment::PubdataParams, fee_model::BatchFeeInput, - snapshots::SnapshotRecoveryStatus, Address, L1BatchNumber, L2BlockNumber, L2ChainId, - ProtocolVersionId, H256, ZKPORTER_IS_AVAILABLE, + block::L2BlockHeader, bytecode::BytecodeHash, commitment::PubdataParams, + fee_model::BatchFeeInput, snapshots::SnapshotRecoveryStatus, Address, L1BatchNumber, + L2BlockNumber, L2ChainId, ProtocolVersionId, H256, ZKPORTER_IS_AVAILABLE, }; const BATCH_COMPUTATIONAL_GAS_LIMIT: u32 = u32::MAX; @@ -308,15 +308,15 @@ impl L1BatchParamsProvider { ); let contract_hashes = first_l2_block_in_batch.header.base_system_contracts_hashes; - let base_system_contracts = storage - .factory_deps_dal() - .get_base_system_contracts( - contract_hashes.bootloader, - contract_hashes.default_aa, - contract_hashes.evm_emulator, - ) - .await - .context("failed getting base system contracts")?; + let base_system_contracts = get_base_system_contracts( + storage, + first_l2_block_in_batch.header.protocol_version, + contract_hashes.bootloader, + contract_hashes.default_aa, + contract_hashes.evm_emulator, + ) + .await + .context("failed getting base system contracts")?; let (system_env, l1_batch_env) = l1_batch_params( first_l2_block_in_batch.l1_batch_number, @@ -373,3 +373,96 @@ impl L1BatchParamsProvider { .map(Some) } } + +async fn get_base_system_contracts( + storage: &mut Connection<'_, Core>, + protocol_version: Option, + bootloader_hash: H256, + default_aa_hash: H256, + evm_simulator_hash: Option, +) -> anyhow::Result { + // There are two potential sources of base contracts bytecode: + // - Factory deps table in case the upgrade transaction has been executed before. + // - Factory deps of the upgrade transaction. + + // Firstly trying from factory deps + if let Some(deps) = storage + .factory_deps_dal() + .get_base_system_contracts_from_factory_deps( + bootloader_hash, + default_aa_hash, + evm_simulator_hash, + ) + .await? + { + return Ok(deps); + } + + let protocol_version = protocol_version.context("Protocol version not provided")?; + + let upgrade_tx = storage + .protocol_versions_dal() + .get_protocol_upgrade_tx(protocol_version) + .await? + .with_context(|| { + format!("Could not find base contracts for version {protocol_version:?}: bootloader {bootloader_hash:?} or {default_aa_hash:?}") + })?; + + anyhow::ensure!( + upgrade_tx.execute.factory_deps.len() >= 2, + "Upgrade transaction does not have enough factory deps" + ); + + let bootloader_preimage = upgrade_tx.execute.factory_deps[0].clone(); + let default_aa_preimage = upgrade_tx.execute.factory_deps[1].clone(); + + anyhow::ensure!( + BytecodeHash::for_bytecode(&bootloader_preimage).value() == bootloader_hash, + "Bootloader hash mismatch" + ); + anyhow::ensure!( + BytecodeHash::for_bytecode(&default_aa_preimage).value() == default_aa_hash, + "Default account hash mismatch" + ); + + if evm_simulator_hash.is_some() { + // TODO(EVM-933): support EVM emulator. + panic!("EVM simulator not supported as part of gateway upgrade"); + } + + Ok(BaseSystemContracts { + bootloader: SystemContractCode { + code: bootloader_preimage, + hash: bootloader_hash, + }, + default_aa: SystemContractCode { + code: default_aa_preimage, + hash: default_aa_hash, + }, + evm_emulator: None, + }) +} + +pub async fn get_base_system_contracts_by_version_id( + storage: &mut Connection<'_, Core>, + version_id: ProtocolVersionId, +) -> anyhow::Result> { + let hashes = storage + .protocol_versions_dal() + .get_base_system_contract_hashes_by_version_id(version_id) + .await?; + let Some(hashes) = hashes else { + return Ok(None); + }; + + Ok(Some( + get_base_system_contracts( + storage, + Some(version_id), + hashes.bootloader, + hashes.default_aa, + hashes.evm_emulator, + ) + .await?, + )) +} diff --git a/core/lib/vm_interface/src/utils/shadow.rs b/core/lib/vm_interface/src/utils/shadow.rs index d6a6d16c77a0..e4a7aa51f78c 100644 --- a/core/lib/vm_interface/src/utils/shadow.rs +++ b/core/lib/vm_interface/src/utils/shadow.rs @@ -7,16 +7,19 @@ use std::{ sync::Arc, }; -use zksync_types::{StorageKey, StorageLog, StorageLogWithPreviousValue, Transaction}; +use zksync_types::{ + Address, StorageKey, StorageLog, StorageLogWithPreviousValue, Transaction, U256, +}; use super::dump::{DumpingVm, VmDump}; use crate::{ pubdata::PubdataBuilder, storage::{ReadStorage, StoragePtr, StorageView}, tracer::{ValidationError, ValidationTraces}, - BytecodeCompressionResult, CurrentExecutionState, FinishedL1Batch, InspectExecutionMode, - L1BatchEnv, L2BlockEnv, PushTransactionResult, SystemEnv, VmExecutionResultAndLogs, VmFactory, - VmInterface, VmInterfaceHistoryEnabled, VmTrackingContracts, + BytecodeCompressionResult, Call, CallType, CurrentExecutionState, FinishedL1Batch, + InspectExecutionMode, L1BatchEnv, L2BlockEnv, PushTransactionResult, SystemEnv, + VmExecutionResultAndLogs, VmFactory, VmInterface, VmInterfaceHistoryEnabled, + VmTrackingContracts, }; /// Handler for VM divergences. @@ -233,6 +236,64 @@ impl CheckDivergence for Result { } } +/// `PartialEq` for `Call` doesn't compare gas-related fields. Here, we do compare them. +#[derive(Debug, PartialEq)] +struct StrictCall<'a> { + r#type: CallType, + from: Address, + to: Address, + // `gas` / `parent_gas` differ between fast VM and legacy VM during validation + gas_used: u64, + value: U256, + input: &'a [u8], + output: &'a [u8], + error: Option<&'a str>, + revert_reason: Option<&'a str>, +} + +impl<'a> StrictCall<'a> { + fn flatten(calls: &'a [Call]) -> Vec { + let mut flattened = Vec::new(); + Self::flatten_inner(&mut flattened, calls); + flattened + } + + fn flatten_inner(flattened: &mut Vec, calls: &'a [Call]) { + // Depth-first, parents-before-children traversal. + for call in calls { + flattened.push(Self { + r#type: call.r#type, + from: call.from, + to: call.to, + gas_used: call.gas_used, + value: call.value, + input: &call.input, + output: &call.output, + error: call.error.as_deref(), + revert_reason: call.revert_reason.as_deref(), + }); + Self::flatten_inner(flattened, &call.calls); + } + } +} + +impl CheckDivergence for [Call] { + fn check_divergence(&self, other: &Self) -> DivergenceErrors { + let this = StrictCall::flatten(self); + let other = StrictCall::flatten(other); + let mut errors = DivergenceErrors::new(); + + errors.check_match("call_traces", &this, &other); + errors + } +} + +impl CheckDivergence for &T { + fn check_divergence(&self, other: &Self) -> DivergenceErrors { + (**self).check_divergence(*other) + } +} + /// Shadowed VM that executes 2 VMs for each operation and compares their outputs. /// /// If a divergence is detected, the VM state is dumped using [a pluggable handler](Self::set_dump_handler()), @@ -526,7 +587,8 @@ impl DivergenceErrors { } } - fn extend(&mut self, from: Self) { + /// Extends this instance from another set of errors. + pub fn extend(&mut self, from: Self) { self.divergences.extend(from.divergences); } diff --git a/core/lib/web3_decl/src/namespaces/unstable.rs b/core/lib/web3_decl/src/namespaces/unstable.rs index f666f02f2811..da18806d126c 100644 --- a/core/lib/web3_decl/src/namespaces/unstable.rs +++ b/core/lib/web3_decl/src/namespaces/unstable.rs @@ -38,4 +38,7 @@ pub trait UnstableNamespace { l1_batch_number: L1BatchNumber, chain_id: L2ChainId, ) -> RpcResult>; + + #[method(name = "unconfirmedTxsCount")] + async fn get_unconfirmed_txs_count(&self) -> RpcResult; } diff --git a/core/lib/zksync_core_leftovers/src/lib.rs b/core/lib/zksync_core_leftovers/src/lib.rs index 87fb7ea28f71..2bdc8094d142 100644 --- a/core/lib/zksync_core_leftovers/src/lib.rs +++ b/core/lib/zksync_core_leftovers/src/lib.rs @@ -3,7 +3,6 @@ use std::str::FromStr; use tokio::sync::oneshot; - pub mod temp_config_store; /// Sets up an interrupt handler and returns a future that resolves once an interrupt signal diff --git a/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/unstable.rs b/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/unstable.rs index cfa8c84b05b0..214e34241cf9 100644 --- a/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/unstable.rs +++ b/core/node/api_server/src/web3/backend_jsonrpsee/namespaces/unstable.rs @@ -40,4 +40,10 @@ impl UnstableNamespaceServer for UnstableNamespace { .await .map_err(|err| self.current_method().map_err(err)) } + + async fn get_unconfirmed_txs_count(&self) -> RpcResult { + self.get_unconfirmed_txs_count_impl() + .await + .map_err(|err| self.current_method().map_err(err)) + } } diff --git a/core/node/api_server/src/web3/namespaces/debug.rs b/core/node/api_server/src/web3/namespaces/debug.rs index 8e72f5b45991..180de6b273e5 100644 --- a/core/node/api_server/src/web3/namespaces/debug.rs +++ b/core/node/api_server/src/web3/namespaces/debug.rs @@ -31,13 +31,14 @@ impl DebugNamespace { pub(crate) fn map_call( call: Call, - meta: CallTraceMeta, + mut meta: CallTraceMeta, tracer_option: TracerConfig, ) -> CallTracerResult { match tracer_option.tracer { SupportedTracers::CallTracer => CallTracerResult::CallTrace(Self::map_default_call( call, tracer_option.tracer_config.only_top_call, + meta.internal_error, )), SupportedTracers::FlatCallTracer => { let mut calls = vec![]; @@ -47,19 +48,25 @@ impl DebugNamespace { &mut calls, &mut traces, tracer_option.tracer_config.only_top_call, - &meta, + &mut meta, ); CallTracerResult::FlatCallTrace(calls) } } } - pub(crate) fn map_default_call(call: Call, only_top_call: bool) -> DebugCall { + + pub(crate) fn map_default_call( + call: Call, + only_top_call: bool, + internal_error: Option, + ) -> DebugCall { let calls = if only_top_call { vec![] } else { + // We don't need to propagate the internal error to the nested calls. call.calls .into_iter() - .map(|call| Self::map_default_call(call, false)) + .map(|call| Self::map_default_call(call, false, None)) .collect() }; let debug_type = match call.r#type { @@ -76,7 +83,7 @@ impl DebugNamespace { value: call.value, output: web3::Bytes::from(call.output), input: web3::Bytes::from(call.input), - error: call.error, + error: call.error.or(internal_error), revert_reason: call.revert_reason, calls, } @@ -87,7 +94,7 @@ impl DebugNamespace { calls: &mut Vec, trace_address: &mut Vec, only_top_call: bool, - meta: &CallTraceMeta, + meta: &mut CallTraceMeta, ) { let subtraces = call.calls.len(); let debug_type = match call.r#type { @@ -96,16 +103,24 @@ impl DebugNamespace { CallType::NearCall => unreachable!("We have to filter our near calls before"), }; - let (result, error) = match (call.revert_reason, call.error) { - (Some(revert_reason), _) => { + // We only want to set the internal error for topmost call, so we take it. + let internal_error = meta.internal_error.take(); + + let (result, error) = match (call.revert_reason, call.error, internal_error) { + (Some(revert_reason), _, _) => { // If revert_reason exists, it takes priority over VM error (None, Some(revert_reason)) } - (None, Some(vm_error)) => { + (None, Some(vm_error), _) => { // If no revert_reason but VM error exists (None, Some(vm_error)) } - (None, None) => ( + (None, None, Some(internal_error)) => { + // No VM error, but there is an error in the sequencer DB. + // Only to be set as a topmost error. + (None, Some(internal_error)) + } + (None, None, None) => ( Some(CallResult { output: web3::Bytes::from(call.output), gas_used: U256::from(call.gas_used), @@ -175,15 +190,19 @@ impl DebugNamespace { SupportedTracers::CallTracer => CallTracerBlockResult::CallTrace( call_traces .into_iter() - .map(|(call, _)| ResultDebugCall { - result: Self::map_default_call(call, options.tracer_config.only_top_call), + .map(|(call, meta)| ResultDebugCall { + result: Self::map_default_call( + call, + options.tracer_config.only_top_call, + meta.internal_error, + ), }) .collect(), ), SupportedTracers::FlatCallTracer => { let res = call_traces .into_iter() - .map(|(call, meta)| { + .map(|(call, mut meta)| { let mut traces = vec![meta.index_in_block]; let mut flat_calls = vec![]; Self::flatten_call( @@ -191,7 +210,7 @@ impl DebugNamespace { &mut flat_calls, &mut traces, options.tracer_config.only_top_call, - &meta, + &mut meta, ); ResultDebugCallFlat { tx_hash: meta.tx_hash, diff --git a/core/node/api_server/src/web3/namespaces/en.rs b/core/node/api_server/src/web3/namespaces/en.rs index b2baa8497c98..9ccecf9001b5 100644 --- a/core/node/api_server/src/web3/namespaces/en.rs +++ b/core/node/api_server/src/web3/namespaces/en.rs @@ -165,6 +165,7 @@ impl EnNamespace { .l1_transparent_proxy_admin_addr .unwrap(), l1_bytecodes_supplier_addr: self.state.api_config.l1_bytecodes_supplier_addr, + l1_wrapped_base_token_store: self.state.api_config.l1_wrapped_base_token_store, }) .context("Shared bridge doesn't supported")?) } diff --git a/core/node/api_server/src/web3/namespaces/unstable/mod.rs b/core/node/api_server/src/web3/namespaces/unstable/mod.rs index 47e43f10282b..c70cb6f6e0fe 100644 --- a/core/node/api_server/src/web3/namespaces/unstable/mod.rs +++ b/core/node/api_server/src/web3/namespaces/unstable/mod.rs @@ -139,4 +139,16 @@ impl UnstableNamespace { chain_id_leaf_proof_mask: chain_id_leaf_proof_mask as u64, })) } + + pub async fn get_unconfirmed_txs_count_impl(&self) -> Result { + let mut connection = self.state.acquire_connection().await?; + + let result = connection + .eth_sender_dal() + .get_unconfirmed_txs_count() + .await + .map_err(DalError::generalize)?; + + Ok(result) + } } diff --git a/core/node/api_server/src/web3/namespaces/zks.rs b/core/node/api_server/src/web3/namespaces/zks.rs index b272f7c443e9..9f7c5662a631 100644 --- a/core/node/api_server/src/web3/namespaces/zks.rs +++ b/core/node/api_server/src/web3/namespaces/zks.rs @@ -677,9 +677,7 @@ impl ZksNamespace { Ok(self .state .tx_sender - .0 - .batch_fee_input_provider - .get_batch_fee_input() + .scaled_batch_fee_input() .await? .into_pubdata_independent()) } diff --git a/core/node/api_server/src/web3/state.rs b/core/node/api_server/src/web3/state.rs index bdefd79b6dd6..1319c866968c 100644 --- a/core/node/api_server/src/web3/state.rs +++ b/core/node/api_server/src/web3/state.rs @@ -108,6 +108,7 @@ pub struct InternalApiConfig { pub estimate_gas_optimize_search: bool, pub bridge_addresses: api::BridgeAddresses, pub l1_bytecodes_supplier_addr: Option
, + pub l1_wrapped_base_token_store: Option
, pub l1_bridgehub_proxy_addr: Option
, pub l1_state_transition_proxy_addr: Option
, pub l1_transparent_proxy_admin_addr: Option
, @@ -169,6 +170,10 @@ impl InternalApiConfig { .ecosystem_contracts .as_ref() .and_then(|a| a.l1_bytecodes_supplier_addr), + l1_wrapped_base_token_store: contracts_config + .ecosystem_contracts + .as_ref() + .and_then(|a| a.l1_wrapped_base_token_store), l1_diamond_proxy_addr: contracts_config.diamond_proxy_addr, l2_testnet_paymaster_addr: contracts_config.l2_testnet_paymaster_addr, req_entities_limit: web3_config.req_entities_limit(), @@ -232,6 +237,15 @@ impl BridgeAddressesHandle { *self.0.write().await = bridge_addresses; } + pub async fn update_l1_shared_bridge(&self, l1_shared_bridge: Address) { + self.0.write().await.l1_shared_default_bridge = Some(l1_shared_bridge); + } + + pub async fn update_l2_bridges(&self, l2_shared_bridge: Address) { + self.0.write().await.l2_shared_default_bridge = Some(l2_shared_bridge); + self.0.write().await.l2_erc20_default_bridge = Some(l2_shared_bridge); + } + pub async fn read(&self) -> api::BridgeAddresses { self.0.read().await.clone() } diff --git a/core/node/api_server/src/web3/tests/debug.rs b/core/node/api_server/src/web3/tests/debug.rs index 28a22511fa98..b2aae53eaa32 100644 --- a/core/node/api_server/src/web3/tests/debug.rs +++ b/core/node/api_server/src/web3/tests/debug.rs @@ -73,7 +73,7 @@ impl HttpTest for TraceBlockTest { let expected_calls: Vec<_> = tx_result .call_traces .iter() - .map(|call| DebugNamespace::map_default_call(call.clone(), false)) + .map(|call| DebugNamespace::map_default_call(call.clone(), false, None)) .collect(); assert_eq!(result.calls, expected_calls); } @@ -216,7 +216,7 @@ impl HttpTest for TraceTransactionTest { let expected_calls: Vec<_> = tx_results[0] .call_traces .iter() - .map(|call| DebugNamespace::map_default_call(call.clone(), false)) + .map(|call| DebugNamespace::map_default_call(call.clone(), false, None)) .collect(); let result = client diff --git a/core/node/api_server/src/web3/tests/mod.rs b/core/node/api_server/src/web3/tests/mod.rs index 9c5730a23386..25405b50c508 100644 --- a/core/node/api_server/src/web3/tests/mod.rs +++ b/core/node/api_server/src/web3/tests/mod.rs @@ -32,7 +32,7 @@ use zksync_system_constants::{ }; use zksync_types::{ api, - block::{pack_block_info, L2BlockHasher, L2BlockHeader}, + block::{pack_block_info, L2BlockHasher, L2BlockHeader, UnsealedL1BatchHeader}, bytecode::{ testonly::{PADDED_EVM_BYTECODE, PROCESSED_EVM_BYTECODE}, BytecodeHash, @@ -397,6 +397,18 @@ async fn store_custom_l2_block( Ok(()) } +async fn open_l1_batch( + storage: &mut Connection<'_, Core>, + number: L1BatchNumber, + batch_fee_input: BatchFeeInput, +) -> anyhow::Result { + let mut header = create_l1_batch(number.0); + header.batch_fee_input = batch_fee_input; + let header = header.to_unsealed_header(); + storage.blocks_dal().insert_l1_batch(header.clone()).await?; + Ok(header) +} + async fn seal_l1_batch( storage: &mut Connection<'_, Core>, number: L1BatchNumber, diff --git a/core/node/api_server/src/web3/tests/vm.rs b/core/node/api_server/src/web3/tests/vm.rs index a82ca3b9e347..76c49f7ad6db 100644 --- a/core/node/api_server/src/web3/tests/vm.rs +++ b/core/node/api_server/src/web3/tests/vm.rs @@ -212,20 +212,44 @@ impl HttpTest for CallTest { panic!("Unexpected error: {error:?}"); } - // Check that the method handler fetches fee inputs for recent blocks. To do that, we create a new block - // with a large fee input; it should be loaded by `ApiFeeInputProvider` and override the input provided by the wrapped mock provider. - let mut block_header = create_l2_block(2); - block_header.batch_fee_input = scaled_sensible_fee_input(2.5); - store_custom_l2_block(&mut connection, &block_header, &[]).await?; + // Check that the method handler fetches fee input from the open batch. To do that, we open a new batch + // with a large fee input; it should be loaded by `ApiFeeInputProvider` and used instead of the input + // provided by the wrapped mock provider. + let batch_header = open_l1_batch( + &mut connection, + L1BatchNumber(1), + scaled_sensible_fee_input(3.0), + ) + .await?; // Fee input is not scaled further as per `ApiFeeInputProvider` implementation - self.fee_input.expect_custom(block_header.batch_fee_input); - let call_request = Self::call_request(b"block=3"); - let call_result = client.call(call_request, None, None).await?; + self.fee_input.expect_custom(batch_header.fee_input); + let call_request = Self::call_request(b"block=2"); + let call_result = client.call(call_request.clone(), None, None).await?; + assert_eq!(call_result.0, b"output"); + let call_result = client + .call( + call_request, + Some(api::BlockIdVariant::BlockNumber(api::BlockNumber::Pending)), + None, + ) + .await?; + assert_eq!(call_result.0, b"output"); + + // Logic here is arguable, but we consider "latest" requests to be interested in the newly + // open batch's fee input even if the latest block was sealed in the previous batch. + let call_request = Self::call_request(b"block=1"); + let call_result = client + .call( + call_request.clone(), + Some(api::BlockIdVariant::BlockNumber(api::BlockNumber::Latest)), + None, + ) + .await?; assert_eq!(call_result.0, b"output"); let call_request_without_target = CallRequest { to: None, - ..Self::call_request(b"block=3") + ..Self::call_request(b"block=2") }; let err = client .call(call_request_without_target, None, None) @@ -728,8 +752,11 @@ impl HttpTest for TraceCallTest { pool: &ConnectionPool, ) -> anyhow::Result<()> { // Store an additional L2 block because L2 block #0 has some special processing making it work incorrectly. + // First half of the test asserts API server's behavior when there is no open batch. In other words, + // when `ApiFeeInputProvider` is forced to fetch fee params from the main fee provider. let mut connection = pool.connection().await?; store_l2_block(&mut connection, L2BlockNumber(1), &[]).await?; + seal_l1_batch(&mut connection, L1BatchNumber(1)).await?; self.fee_input.expect_default(Self::FEE_SCALE); let call_request = CallTest::call_request(b"pending"); @@ -775,20 +802,44 @@ impl HttpTest for TraceCallTest { panic!("Unexpected error: {error:?}"); } - // Check that the method handler fetches fee inputs for recent blocks. To do that, we create a new block - // with a large fee input; it should be loaded by `ApiFeeInputProvider` and override the input provided by the wrapped mock provider. - let mut block_header = create_l2_block(2); - block_header.batch_fee_input = scaled_sensible_fee_input(3.0); - store_custom_l2_block(&mut connection, &block_header, &[]).await?; + // Check that the method handler fetches fee input from the open batch. To do that, we open a new batch + // with a large fee input; it should be loaded by `ApiFeeInputProvider` and used instead of the input + // provided by the wrapped mock provider. + let batch_header = open_l1_batch( + &mut connection, + L1BatchNumber(2), + scaled_sensible_fee_input(3.0), + ) + .await?; // Fee input is not scaled further as per `ApiFeeInputProvider` implementation - self.fee_input.expect_custom(block_header.batch_fee_input); - let call_request = CallTest::call_request(b"block=3"); + self.fee_input.expect_custom(batch_header.fee_input); + let call_request = CallTest::call_request(b"block=2"); let call_result = client.trace_call(call_request.clone(), None, None).await?; Self::assert_debug_call(&call_request, &call_result.unwrap_default()); + let call_result = client + .trace_call( + call_request.clone(), + Some(api::BlockId::Number(api::BlockNumber::Pending)), + None, + ) + .await?; + Self::assert_debug_call(&call_request, &call_result.unwrap_default()); + + // Logic here is arguable, but we consider "latest" requests to be interested in the newly + // open batch's fee input even if the latest block was sealed in the previous batch. + let call_request = CallTest::call_request(b"block=1"); + let call_result = client + .trace_call( + call_request.clone(), + Some(api::BlockId::Number(api::BlockNumber::Latest)), + None, + ) + .await?; + Self::assert_debug_call(&call_request, &call_result.unwrap_default()); let call_request_without_target = CallRequest { to: None, - ..CallTest::call_request(b"block=3") + ..CallTest::call_request(b"block=2") }; let err = client .call(call_request_without_target, None, None) @@ -897,12 +948,15 @@ impl HttpTest for TraceCallTestWithEvmEmulator { pool: &ConnectionPool, ) -> anyhow::Result<()> { // Store an additional L2 block because L2 block #0 has some special processing making it work incorrectly. + // And make sure there is no open batch so that `ApiFeeInputProvider` is forced to fetch fee params from + // the main fee provider. let mut connection = pool.connection().await?; let block_header = L2BlockHeader { base_system_contracts_hashes: genesis_contract_hashes(&mut connection).await?, ..create_l2_block(1) }; store_custom_l2_block(&mut connection, &block_header, &[]).await?; + seal_l1_batch(&mut connection, L1BatchNumber(1)).await?; client .trace_call(CallTest::call_request(&[]), None, None) diff --git a/core/node/base_token_adjuster/src/base_token_l1_behaviour.rs b/core/node/base_token_adjuster/src/base_token_l1_behaviour.rs index 599aba36f3e9..580083cd1682 100644 --- a/core/node/base_token_adjuster/src/base_token_l1_behaviour.rs +++ b/core/node/base_token_adjuster/src/base_token_l1_behaviour.rs @@ -151,6 +151,7 @@ impl BaseTokenL1Behaviour { }; } + // TODO(EVM-924): this logic supports only `ChainAdminOwnable`. async fn do_update_l1( &self, l1_params: &UpdateOnL1Params, diff --git a/core/node/block_reverter/src/tests.rs b/core/node/block_reverter/src/tests.rs index b2c4ee6465f6..21265e7fb0c6 100644 --- a/core/node/block_reverter/src/tests.rs +++ b/core/node/block_reverter/src/tests.rs @@ -12,6 +12,7 @@ use zksync_object_store::{Bucket, MockObjectStore}; use zksync_state::interface::ReadStorage; use zksync_types::{ block::{L1BatchHeader, L2BlockHeader}, + fee_model::BatchFeeInput, snapshots::SnapshotVersion, AccountTreeId, L2BlockNumber, ProtocolVersion, ProtocolVersionId, StorageKey, StorageLog, }; @@ -60,7 +61,7 @@ async fn setup_storage(storage: &mut Connection<'_, Core>, storage_logs: &[Stora l2_tx_count: 0, fee_account_address: Address::default(), base_fee_per_gas: 0, - batch_fee_input: Default::default(), + batch_fee_input: BatchFeeInput::pubdata_independent(0, 0, 0), gas_per_pubdata_limit: 0, base_system_contracts_hashes: Default::default(), protocol_version: Some(ProtocolVersionId::latest()), @@ -89,6 +90,7 @@ async fn setup_storage(storage: &mut Connection<'_, Core>, storage_logs: &[Stora protocol_version: Some(ProtocolVersionId::latest()), pubdata_input: None, fee_address: Default::default(), + batch_fee_input: BatchFeeInput::pubdata_independent(0, 0, 0), }; storage .blocks_dal() diff --git a/core/node/commitment_generator/src/lib.rs b/core/node/commitment_generator/src/lib.rs index 71b019e230a7..3c7dadb5fd30 100644 --- a/core/node/commitment_generator/src/lib.rs +++ b/core/node/commitment_generator/src/lib.rs @@ -6,6 +6,7 @@ use tokio::{sync::watch, task::JoinHandle}; use zksync_dal::{ConnectionPool, Core, CoreDal}; use zksync_health_check::{Health, HealthStatus, HealthUpdater, ReactiveHealthCheck}; use zksync_l1_contract_interface::i_executor::commit::kzg::pubdata_to_blob_commitments; +use zksync_multivm::zk_evm_latest::ethereum_types::U256; use zksync_types::{ blob::num_blobs_required, commitment::{ @@ -14,7 +15,7 @@ use zksync_types::{ }, h256_to_u256, writes::{InitialStorageWrite, RepeatedStorageWrite, StateDiffRecord}, - L1BatchNumber, ProtocolVersionId, StorageKey, H256, U256, + L1BatchNumber, ProtocolVersionId, StorageKey, H256, }; use crate::{ diff --git a/core/node/consensus/src/config.rs b/core/node/consensus/src/config.rs index 2cb6045151bd..733500998a30 100644 --- a/core/node/consensus/src/config.rs +++ b/core/node/consensus/src/config.rs @@ -42,7 +42,6 @@ pub(super) struct GenesisSpec { pub(super) chain_id: validator::ChainId, pub(super) protocol_version: validator::ProtocolVersion, pub(super) validators: validator::Committee, - pub(super) attesters: Option, pub(super) leader_selection: validator::LeaderSelectionMode, pub(super) registry_address: Option, pub(super) seed_peers: BTreeMap, @@ -54,7 +53,6 @@ impl GenesisSpec { chain_id: cfg.genesis.chain_id, protocol_version: cfg.genesis.protocol_version, validators: cfg.genesis.validators.clone(), - attesters: cfg.genesis.attesters.clone(), leader_selection: cfg.genesis.leader_selection.clone(), registry_address: cfg.registry_address, seed_peers: cfg.seed_peers.clone(), @@ -75,19 +73,6 @@ impl GenesisSpec { .collect::>() .context("validators")?; - let attesters: Vec<_> = x - .attesters - .iter() - .enumerate() - .map(|(i, v)| { - Ok(attester::WeightedAttester { - key: Text::new(&v.key.0).decode().context("key").context(i)?, - weight: v.weight, - }) - }) - .collect::>() - .context("attesters")?; - Ok(Self { chain_id: validator::ChainId(x.chain_id.as_u64()), protocol_version: validator::ProtocolVersion(x.protocol_version.0), @@ -95,11 +80,6 @@ impl GenesisSpec { Text::new(&x.leader.0).decode().context("leader")?, ), validators: validator::Committee::new(validators).context("validators")?, - attesters: if attesters.is_empty() { - None - } else { - Some(attester::Committee::new(attesters).context("attesters")?) - }, registry_address: x.registry_address, seed_peers: x .seed_peers diff --git a/core/node/consensus/src/en.rs b/core/node/consensus/src/en.rs index e417b68cf2cb..ffb580ce1284 100644 --- a/core/node/consensus/src/en.rs +++ b/core/node/consensus/src/en.rs @@ -178,7 +178,7 @@ impl EN { tracing::warn!("\ WARNING: this node is using ZKsync API synchronization, which will be deprecated soon. \ Please follow this instruction to switch to p2p synchronization: \ - https://github.com/matter-labs/zksync-era/blob/main/docs/guides/external-node/10_decentralization.md"); + https://github.com/matter-labs/zksync-era/blob/main/docs/src/guides/external-node/10_decentralization.md"); let res: ctx::Result<()> = scope::run!(ctx, |ctx, s| async { // Update sync state in the background. s.spawn_bg(self.fetch_state_loop(ctx)); @@ -208,7 +208,7 @@ impl EN { attestation: Arc, ) -> ctx::Result<()> { const POLL_INTERVAL: time::Duration = time::Duration::seconds(5); - let registry = registry::Registry::new(cfg.genesis.clone(), self.pool.clone()).await; + let registry = registry::Registry::new(self.pool.clone()).await; let mut next = attester::BatchNumber(0); loop { let status = loop { diff --git a/core/node/consensus/src/mn.rs b/core/node/consensus/src/mn.rs index a392acfbe5f0..028a635398c6 100644 --- a/core/node/consensus/src/mn.rs +++ b/core/node/consensus/src/mn.rs @@ -118,7 +118,7 @@ async fn run_attestation_controller( attestation: Arc, ) -> ctx::Result<()> { const POLL_INTERVAL: time::Duration = time::Duration::seconds(5); - let registry = registry::Registry::new(cfg.genesis, pool.clone()).await; + let registry = registry::Registry::new(pool.clone()).await; let registry_addr = cfg.registry_address.map(registry::Address::new); let mut next = attester::BatchNumber(0); loop { diff --git a/core/node/consensus/src/registry/mod.rs b/core/node/consensus/src/registry/mod.rs index 74da41309573..235389acda0e 100644 --- a/core/node/consensus/src/registry/mod.rs +++ b/core/node/consensus/src/registry/mod.rs @@ -1,7 +1,7 @@ use anyhow::Context as _; use zksync_concurrency::{ctx, error::Wrap as _}; use zksync_consensus_crypto::ByteFmt; -use zksync_consensus_roles::{attester, validator}; +use zksync_consensus_roles::attester; use crate::{storage::ConnectionPool, vm::VM}; @@ -30,22 +30,20 @@ pub type Address = crate::abi::Address; #[derive(Debug)] pub(crate) struct Registry { contract: abi::ConsensusRegistry, - genesis: validator::Genesis, vm: VM, } impl Registry { - pub async fn new(genesis: validator::Genesis, pool: ConnectionPool) -> Self { + pub async fn new(pool: ConnectionPool) -> Self { Self { contract: abi::ConsensusRegistry::load(), - genesis, vm: VM::new(pool).await, } } /// Attester committee for the given batch. /// It reads committee from the contract. - /// Falls back to committee specified in the genesis. + /// Falls back to empty committee. pub async fn attester_committee_for( &self, ctx: &ctx::Ctx, @@ -57,7 +55,7 @@ impl Registry { return Ok(None); }; let Some(address) = address else { - return Ok(self.genesis.attesters.clone()); + return Ok(None); }; let raw = self .vm diff --git a/core/node/consensus/src/registry/tests.rs b/core/node/consensus/src/registry/tests.rs index 15329077a651..736b6c9b3ac7 100644 --- a/core/node/consensus/src/registry/tests.rs +++ b/core/node/consensus/src/registry/tests.rs @@ -1,6 +1,6 @@ use rand::Rng as _; use zksync_concurrency::{ctx, scope, time}; -use zksync_consensus_roles::{attester, validator::testonly::Setup}; +use zksync_consensus_roles::attester; use zksync_test_contracts::Account; use zksync_types::ProtocolVersionId; @@ -26,21 +26,20 @@ async fn test_attester_committee() { zksync_concurrency::testonly::abort_on_panic(); let ctx = &ctx::test_root(&ctx::RealClock); let rng = &mut ctx.rng(); - let setup = Setup::new(rng, 10); let account = &mut Account::random(); let to_fund = &[account.address]; scope::run!(ctx, |ctx, s| async { let pool = ConnectionPool::test(false, ProtocolVersionId::latest()).await; - let registry = Registry::new(setup.genesis.clone(), pool.clone()).await; + let registry = Registry::new(pool.clone()).await; // If the registry contract address is not specified, - // then the committee from genesis should be returned. + // then an empty committee should be returned. let got = registry .attester_committee_for(ctx, None, attester::BatchNumber(10)) .await .unwrap(); - assert_eq!(setup.genesis.attesters, got); + assert!(got.is_none()); let (mut node, runner) = crate::testonly::StateKeeper::new(ctx, pool.clone()).await?; s.spawn_bg(runner.run_real(ctx, to_fund)); diff --git a/core/node/consensus/src/storage/connection.rs b/core/node/consensus/src/storage/connection.rs index 6ec5794e968d..08fa7996bdc1 100644 --- a/core/node/consensus/src/storage/connection.rs +++ b/core/node/consensus/src/storage/connection.rs @@ -277,7 +277,6 @@ impl<'a> Connection<'a> { first_block: txn.next_block(ctx).await.context("next_block()")?, protocol_version: spec.protocol_version, validators: spec.validators.clone(), - attesters: spec.attesters.clone(), leader_selection: spec.leader_selection.clone(), } .with_hash(), diff --git a/core/node/consensus/src/storage/testonly.rs b/core/node/consensus/src/storage/testonly.rs index 295ae4fc1790..858bca542c33 100644 --- a/core/node/consensus/src/storage/testonly.rs +++ b/core/node/consensus/src/storage/testonly.rs @@ -16,6 +16,7 @@ use crate::registry; impl Connection<'_> { /// Wrapper for `consensus_dal().batch_of_block()`. + #[allow(dead_code)] pub async fn batch_of_block( &mut self, ctx: &ctx::Ctx, @@ -27,6 +28,7 @@ impl Connection<'_> { } /// Wrapper for `consensus_dal().last_batch_certificate_number()`. + #[allow(dead_code)] pub async fn last_batch_certificate_number( &mut self, ctx: &ctx::Ctx, @@ -37,6 +39,7 @@ impl Connection<'_> { } /// Wrapper for `consensus_dal().batch_certificate()`. + #[allow(dead_code)] pub async fn batch_certificate( &mut self, ctx: &ctx::Ctx, @@ -187,6 +190,7 @@ impl ConnectionPool { Ok(blocks) } + #[allow(dead_code)] pub async fn wait_for_batch_certificates_and_verify( &self, ctx: &ctx::Ctx, @@ -217,7 +221,7 @@ impl ConnectionPool { .await .wrap("batch_of_block()")? .context("batch of first_block is missing")?; - let registry = registry::Registry::new(cfg.genesis.clone(), self.clone()).await; + let registry = registry::Registry::new(self.clone()).await; for i in first.0..want_last.0 { let i = attester::BatchNumber(i); let cert = conn diff --git a/core/node/consensus/src/tests/attestation.rs b/core/node/consensus/src/tests/attestation.rs index 6f24fbe65b4c..8e20474f453c 100644 --- a/core/node/consensus/src/tests/attestation.rs +++ b/core/node/consensus/src/tests/attestation.rs @@ -151,7 +151,7 @@ async fn test_multiple_attesters(version: ProtocolVersionId) { tracing::info!("deploy registry with 1 attester"); let attesters: Vec<_> = setup.genesis.attesters.as_ref().unwrap().iter().collect(); - let registry = Registry::new(setup.genesis.clone(), validator_pool.clone()).await; + let registry = Registry::new(validator_pool.clone()).await; let (registry_addr, tx) = registry.deploy(account); cfgs[0] .config diff --git a/core/node/consensus/src/tests/mod.rs b/core/node/consensus/src/tests/mod.rs index c7697ba8480e..c685fdd3223d 100644 --- a/core/node/consensus/src/tests/mod.rs +++ b/core/node/consensus/src/tests/mod.rs @@ -22,7 +22,10 @@ use crate::{ testonly, }; -mod attestation; +// NOTE: These tests are disabled since we are going to remove L1 batches. Most likely +// we will remove all the attester related code as well, but keeping this until +// we are sure. +//mod attestation; const VERSIONS: [ProtocolVersionId; 2] = [ProtocolVersionId::latest(), ProtocolVersionId::next()]; const FROM_SNAPSHOT: [bool; 2] = [true, false]; diff --git a/core/node/consistency_checker/src/lib.rs b/core/node/consistency_checker/src/lib.rs index a73adc44b83e..c1735a54fd7f 100644 --- a/core/node/consistency_checker/src/lib.rs +++ b/core/node/consistency_checker/src/lib.rs @@ -14,12 +14,9 @@ use zksync_eth_client::{ }; use zksync_health_check::{Health, HealthStatus, HealthUpdater, ReactiveHealthCheck}; use zksync_l1_contract_interface::{ - i_executor::{ - commit::kzg::ZK_SYNC_BYTES_PER_BLOB, - structures::{ - CommitBatchInfo, StoredBatchInfo, PUBDATA_SOURCE_BLOBS, PUBDATA_SOURCE_CALLDATA, - PUBDATA_SOURCE_CUSTOM_PRE_GATEWAY, SUPPORTED_ENCODING_VERSION, - }, + i_executor::structures::{ + CommitBatchInfo, StoredBatchInfo, PUBDATA_SOURCE_BLOBS, PUBDATA_SOURCE_CALLDATA, + PUBDATA_SOURCE_CUSTOM_PRE_GATEWAY, SUPPORTED_ENCODING_VERSION, }, Tokenizable, }; @@ -231,31 +228,19 @@ impl LocalL1BatchCommitData { } /// All returned errors are validation errors. - fn verify_commitment(&self, reference: ðabi::Token) -> anyhow::Result<()> { + fn verify_commitment(&self, reference: ðabi::Token, is_gateway: bool) -> anyhow::Result<()> { let protocol_version = self .l1_batch .header .protocol_version .unwrap_or_else(ProtocolVersionId::last_potentially_undefined); - let da = detect_da(protocol_version, reference, self.commitment_mode) - .context("cannot detect DA source from reference commitment token")?; - - // For rollups with `PubdataSendingMode::Calldata`, it's required that the pubdata fits into a single blob. - if matches!(self.commitment_mode, L1BatchCommitmentMode::Rollup) - && matches!(da, PubdataSendingMode::Calldata) - { - let pubdata_len = self - .l1_batch - .header - .pubdata_input - .as_ref() - .map_or_else(|| self.l1_batch.construct_pubdata().len(), Vec::len); - anyhow::ensure!( - pubdata_len <= ZK_SYNC_BYTES_PER_BLOB, - "pubdata size is too large when using calldata DA source: expected <={ZK_SYNC_BYTES_PER_BLOB} bytes, \ - got {pubdata_len} bytes" - ); - } + let da = detect_da( + protocol_version, + reference, + self.commitment_mode, + is_gateway, + ) + .context("cannot detect DA source from reference commitment token")?; let local_token = CommitBatchInfo::new(self.commitment_mode, &self.l1_batch, da).into_token(); @@ -278,6 +263,7 @@ pub fn detect_da( protocol_version: ProtocolVersionId, reference: &Token, commitment_mode: L1BatchCommitmentMode, + is_gateway: bool, ) -> Result { fn parse_error(message: impl Into>) -> ethabi::Error { ethabi::Error::Other(message.into()) @@ -351,7 +337,11 @@ pub fn detect_da( })? as usize; match last_reference_token.get(65 + 32 * number_of_blobs) { - Some(&byte) if byte == PUBDATA_SOURCE_CALLDATA => Ok(PubdataSendingMode::Calldata), + Some(&byte) if byte == PUBDATA_SOURCE_CALLDATA => if is_gateway { + Ok(PubdataSendingMode::RelayedL2Calldata) + } else { + Ok(PubdataSendingMode::Calldata) + }, Some(&byte) if byte == PUBDATA_SOURCE_BLOBS => Ok(PubdataSendingMode::Blobs), Some(&byte) => Err(parse_error(format!( "unexpected first byte of the last reference token for rollup; expected one of [{PUBDATA_SOURCE_CALLDATA}, {PUBDATA_SOURCE_BLOBS}], \ @@ -406,15 +396,9 @@ impl ConsistencyChecker { }; let gateway_chain_data = if let Some(client) = gateway_client { - let contract = bridgehub_contract(); - let function_name = if contract.function("getZKChain").is_ok() { - "getZKChain" - } else { - "getHyperchain" - }; let gateway_diamond_proxy = - CallFunctionArgs::new(function_name, Token::Uint(l2_chain_id.as_u64().into())) - .for_contract(L2_BRIDGEHUB_ADDRESS, &contract) + CallFunctionArgs::new("getZKChain", Token::Uint(l2_chain_id.as_u64().into())) + .for_contract(L2_BRIDGEHUB_ADDRESS, &bridgehub_contract()) .call(&client) .await?; let chain_id = client.fetch_chain_id().await?; @@ -565,8 +549,10 @@ impl ConsistencyChecker { format!("failed extracting commit data for transaction {commit_tx_hash:?}") }) .map_err(CheckError::Validation)?; + + let is_gateway = chain_data.chain_id != self.l1_chain_data.chain_id; local - .verify_commitment(&commitment) + .verify_commitment(&commitment, is_gateway) .map_err(CheckError::Validation) } diff --git a/core/node/consistency_checker/src/tests/mod.rs b/core/node/consistency_checker/src/tests/mod.rs index 1635bddffb83..57511fbb69c7 100644 --- a/core/node/consistency_checker/src/tests/mod.rs +++ b/core/node/consistency_checker/src/tests/mod.rs @@ -134,13 +134,8 @@ fn create_mock_sl(chain_id: u64, with_get_zk_chain: bool) -> MockSettlementLayer } Some(addr) if with_get_zk_chain && addr == L2_BRIDGEHUB_ADDRESS => { let contract = zksync_contracts::bridgehub_contract(); - let function_name = if contract.function("getZKChain").is_ok() { - "getZKChain" - } else { - "getHyperchain" - }; let expected_input = contract - .function(function_name) + .function("getZKChain") .unwrap() .encode_input(&[Token::Uint(ERA_CHAIN_ID.into())]) .unwrap(); diff --git a/core/node/da_clients/Cargo.toml b/core/node/da_clients/Cargo.toml index 5a7930e433e8..9d455cdee056 100644 --- a/core/node/da_clients/Cargo.toml +++ b/core/node/da_clients/Cargo.toml @@ -40,6 +40,7 @@ jsonrpsee = { workspace = true, features = ["ws-client"] } reqwest = { workspace = true } bytes = { workspace = true } backon.workspace = true +url.workspace = true # Celestia dependencies http.workspace = true @@ -64,7 +65,6 @@ ark-bn254.workspace = true num-bigint.workspace = true zksync_web3_decl.workspace = true zksync_eth_client.workspace = true -url.workspace = true thiserror.workspace = true tempfile.workspace = true diff --git a/core/node/da_clients/src/avail/client.rs b/core/node/da_clients/src/avail/client.rs index c0ead429d91a..411a0354d632 100644 --- a/core/node/da_clients/src/avail/client.rs +++ b/core/node/da_clients/src/avail/client.rs @@ -2,9 +2,11 @@ use std::{fmt::Debug, sync::Arc, time::Duration}; use anyhow::anyhow; use async_trait::async_trait; +use http::StatusCode; use jsonrpsee::ws_client::WsClientBuilder; use serde::{Deserialize, Serialize}; use subxt_signer::ExposeSecret; +use url::Url; use zksync_config::configs::da_client::avail::{AvailClientConfig, AvailConfig, AvailSecrets}; use zksync_da_client::{ types::{DAError, DispatchResponse, InclusionData}, @@ -40,10 +42,10 @@ pub struct AvailClient { pub struct BridgeAPIResponse { blob_root: Option, bridge_root: Option, - data_root_index: Option, + data_root_index: Option, data_root_proof: Option>, leaf: Option, - leaf_index: Option, + leaf_index: Option, leaf_proof: Option>, range_hash: Option, error: Option, @@ -121,8 +123,12 @@ impl AvailClient { .seed_phrase .ok_or_else(|| anyhow::anyhow!("Seed phrase is missing"))?; // these unwraps are safe because we validate in protobuf config - let sdk_client = - RawAvailClient::new(conf.app_id, seed_phrase.0.expose_secret()).await?; + let sdk_client = RawAvailClient::new( + conf.app_id, + seed_phrase.0.expose_secret(), + conf.finality_state()?, + ) + .await?; Ok(Self { config, @@ -187,19 +193,30 @@ impl DataAvailabilityClient for AvailClient { error: anyhow!("Invalid blob ID format"), is_retriable: false, })?; - let url = format!( - "{}/eth/proof/{}?index={}", - self.config.bridge_api_url, block_hash, tx_idx - ); + let url = Url::parse(&self.config.bridge_api_url) + .map_err(|_| DAError { + error: anyhow!("Invalid URL"), + is_retriable: false, + })? + .join(format!("/eth/proof/{}?index={}", block_hash, tx_idx).as_str()) + .map_err(|_| DAError { + error: anyhow!("Unable to join to URL"), + is_retriable: false, + })?; let response = self .api_client - .get(&url) + .get(url) .timeout(Duration::from_millis(self.config.timeout_ms as u64)) .send() .await .map_err(to_retriable_da_error)?; + // 404 means that the blob is not included in the bridge yet + if response.status() == StatusCode::NOT_FOUND { + return Ok(None); + } + let bridge_api_data = response .json::() .await @@ -209,12 +226,13 @@ impl DataAvailabilityClient for AvailClient { data_root_proof: bridge_api_data.data_root_proof.unwrap(), leaf_proof: bridge_api_data.leaf_proof.unwrap(), range_hash: bridge_api_data.range_hash.unwrap(), - data_root_index: bridge_api_data.data_root_index.unwrap(), + data_root_index: bridge_api_data.data_root_index.unwrap().into(), blob_root: bridge_api_data.blob_root.unwrap(), bridge_root: bridge_api_data.bridge_root.unwrap(), leaf: bridge_api_data.leaf.unwrap(), - leaf_index: bridge_api_data.leaf_index.unwrap(), + leaf_index: bridge_api_data.leaf_index.unwrap().into(), }; + Ok(Some(InclusionData { data: ethabi::encode(&attestation_data.into_tokens()), })) diff --git a/core/node/da_clients/src/avail/sdk.rs b/core/node/da_clients/src/avail/sdk.rs index 19309dc3cbf3..8f28e797dc9a 100644 --- a/core/node/da_clients/src/avail/sdk.rs +++ b/core/node/da_clients/src/avail/sdk.rs @@ -27,6 +27,7 @@ const PROTOCOL_VERSION: u8 = 4; pub(crate) struct RawAvailClient { app_id: u32, keypair: Keypair, + finality_state: String, } /// Utility type needed for encoding the call data @@ -44,11 +45,19 @@ struct BoundedVec<_0>(pub Vec<_0>); impl RawAvailClient { pub(crate) const MAX_BLOB_SIZE: usize = 512 * 1024; // 512kb - pub(crate) async fn new(app_id: u32, seed: &str) -> anyhow::Result { + pub(crate) async fn new( + app_id: u32, + seed: &str, + finality_state: String, + ) -> anyhow::Result { let mnemonic = Mnemonic::parse(seed)?; let keypair = Keypair::from_phrase(&mnemonic, None)?; - Ok(Self { app_id, keypair }) + Ok(Self { + app_id, + keypair, + finality_state, + }) } /// Returns a hex-encoded extrinsic @@ -291,7 +300,7 @@ impl RawAvailClient { let status = sub.next().await.transpose()?; if status.is_some() && status.as_ref().unwrap().is_object() { - if let Some(block_hash) = status.unwrap().get("finalized") { + if let Some(block_hash) = status.unwrap().get(self.finality_state.as_str()) { break block_hash .as_str() .ok_or_else(|| anyhow::anyhow!("Invalid block hash"))? diff --git a/core/node/da_dispatcher/src/da_dispatcher.rs b/core/node/da_dispatcher/src/da_dispatcher.rs index 2cdde9951be9..f59a30b362ee 100644 --- a/core/node/da_dispatcher/src/da_dispatcher.rs +++ b/core/node/da_dispatcher/src/da_dispatcher.rs @@ -1,4 +1,4 @@ -use std::{future::Future, time::Duration}; +use std::{future::Future, sync::Arc, time::Duration}; use anyhow::Context; use chrono::Utc; @@ -14,7 +14,7 @@ use zksync_types::L1BatchNumber; use crate::metrics::METRICS; -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct DataAvailabilityDispatcher { client: Box, pool: ConnectionPool, @@ -35,37 +35,59 @@ impl DataAvailabilityDispatcher { } pub async fn run(self, mut stop_receiver: Receiver) -> anyhow::Result<()> { - loop { - if *stop_receiver.borrow() { - break; - } + let self_arc = Arc::new(self.clone()); - let subtasks = futures::future::join( - async { - if let Err(err) = self.dispatch().await { - tracing::error!("dispatch error {err:?}"); - } - }, - async { - if let Err(err) = self.poll_for_inclusion().await { - tracing::error!("poll_for_inclusion error {err:?}"); - } - }, - ); + let mut stop_receiver_dispatch = stop_receiver.clone(); + let mut stop_receiver_poll_for_inclusion = stop_receiver.clone(); + + let dispatch_task = tokio::spawn(async move { + loop { + if *stop_receiver_dispatch.borrow() { + break; + } - tokio::select! { - _ = subtasks => {}, - _ = stop_receiver.changed() => { + if let Err(err) = self_arc.dispatch().await { + tracing::error!("dispatch error {err:?}"); + } + + if tokio::time::timeout( + self_arc.config.polling_interval(), + stop_receiver_dispatch.changed(), + ) + .await + .is_ok() + { break; } } + }); - if tokio::time::timeout(self.config.polling_interval(), stop_receiver.changed()) + let inclusion_task = tokio::spawn(async move { + loop { + if *stop_receiver_poll_for_inclusion.borrow() { + break; + } + + if let Err(err) = self.poll_for_inclusion().await { + tracing::error!("poll_for_inclusion error {err:?}"); + } + + if tokio::time::timeout( + self.config.polling_interval(), + stop_receiver_poll_for_inclusion.changed(), + ) .await .is_ok() - { - break; + { + break; + } } + }); + + tokio::select! { + _ = dispatch_task => {}, + _ = inclusion_task => {}, + _ = stop_receiver.changed() => {}, } tracing::info!("Stop signal received, da_dispatcher is shutting down"); diff --git a/core/node/db_pruner/README.md b/core/node/db_pruner/README.md index ee1317d01e46..b1528bd0ade0 100644 --- a/core/node/db_pruner/README.md +++ b/core/node/db_pruner/README.md @@ -10,7 +10,7 @@ There are two types of objects that are not fully cleaned: ## Pruning workflow -_(See [node docs](../../../docs/guides/external-node/08_pruning.md) for a high-level pruning overview)_ +_(See [node docs](../../../docs/src/guides/external-node/08_pruning.md) for a high-level pruning overview)_ There are two phases of pruning an L1 batch, soft pruning and hard pruning. Every batch that would have its records removed if first _soft-pruned_. Soft-pruned batches cannot safely be used. One minute (this is configurable) after soft diff --git a/core/node/eth_sender/Cargo.toml b/core/node/eth_sender/Cargo.toml index f578743dcea9..2f95bf54e176 100644 --- a/core/node/eth_sender/Cargo.toml +++ b/core/node/eth_sender/Cargo.toml @@ -25,7 +25,6 @@ zksync_prover_interface.workspace = true zksync_shared_metrics.workspace = true zksync_node_fee_model.workspace = true zksync_mini_merkle_tree.workspace = true -once_cell.workspace = true tokio = { workspace = true, features = ["time"] } anyhow.workspace = true diff --git a/core/node/eth_sender/src/aggregated_operations.rs b/core/node/eth_sender/src/aggregated_operations.rs index 5271d42d3b75..35cb1648116d 100644 --- a/core/node/eth_sender/src/aggregated_operations.rs +++ b/core/node/eth_sender/src/aggregated_operations.rs @@ -62,4 +62,8 @@ impl AggregatedOperation { self.get_action_type() == AggregatedActionType::PublishProofOnchain || self.get_action_type() == AggregatedActionType::Execute } + + pub fn is_execute(&self) -> bool { + self.get_action_type() == AggregatedActionType::Execute + } } diff --git a/core/node/eth_sender/src/aggregator.rs b/core/node/eth_sender/src/aggregator.rs index 3a318f44bcea..69e0e45a9b0d 100644 --- a/core/node/eth_sender/src/aggregator.rs +++ b/core/node/eth_sender/src/aggregator.rs @@ -51,6 +51,60 @@ pub struct Aggregator { priority_tree_start_index: Option, } +/// Denotes whether there are any restrictions on sending either +/// commit, prove or execute operations. If there is one, the reason for it +/// is stored to be logged. +#[derive(Debug, Default)] +pub(crate) struct OperationSkippingRestrictions { + pub(crate) commit_restriction: Option<&'static str>, + pub(crate) prove_restriction: Option<&'static str>, + pub(crate) execute_restriction: Option<&'static str>, +} + +impl OperationSkippingRestrictions { + fn check_for_continuation( + &self, + agg_op: &AggregatedOperation, + reason: Option<&'static str>, + ) -> bool { + if let Some(reason) = reason { + tracing::info!( + "Skipping sending operation of type {} for batches {}-{} since {}", + agg_op.get_action_type(), + agg_op.l1_batch_range().start(), + agg_op.l1_batch_range().end(), + reason + ); + false + } else { + true + } + } + + // Unlike other funcitons `filter_commit_op` accepts an already prepared `AggregatedOperation` for + // easier compatibility with other interfaces in the file. + fn filter_commit_op( + &self, + commit_op: Option, + ) -> Option { + let commit_op = commit_op?; + self.check_for_continuation(&commit_op, self.commit_restriction) + .then_some(commit_op) + } + + fn filter_prove_op(&self, prove_op: Option) -> Option { + let op = AggregatedOperation::PublishProofOnchain(prove_op?); + self.check_for_continuation(&op, self.prove_restriction) + .then_some(op) + } + + fn filter_execute_op(&self, execute_op: Option) -> Option { + let op = AggregatedOperation::Execute(execute_op?); + self.check_for_continuation(&op, self.execute_restriction) + .then_some(op) + } +} + impl Aggregator { pub async fn new( config: SenderConfig, @@ -153,12 +207,13 @@ impl Aggregator { }) } - pub async fn get_next_ready_operation( + pub(crate) async fn get_next_ready_operation( &mut self, storage: &mut Connection<'_, Core>, base_system_contracts_hashes: BaseSystemContractsHashes, protocol_version_id: ProtocolVersionId, l1_verifier_config: L1VerifierConfig, + restrictions: OperationSkippingRestrictions, ) -> Result, EthSenderError> { let Some(last_sealed_l1_batch_number) = storage .blocks_dal() @@ -169,30 +224,31 @@ impl Aggregator { return Ok(None); // No L1 batches in Postgres; no operations are ready yet }; - if let Some(op) = self - .get_execute_operations( + if let Some(op) = restrictions.filter_execute_op( + self.get_execute_operations( storage, self.config.max_aggregated_blocks_to_execute as usize, last_sealed_l1_batch_number, ) - .await? - { - Ok(Some(AggregatedOperation::Execute(op))) - } else if let Some(op) = self - .get_proof_operation(storage, last_sealed_l1_batch_number, l1_verifier_config) - .await - { - Ok(Some(AggregatedOperation::PublishProofOnchain(op))) + .await?, + ) { + Ok(Some(op)) + } else if let Some(op) = restrictions.filter_prove_op( + self.get_proof_operation(storage, last_sealed_l1_batch_number, l1_verifier_config) + .await, + ) { + Ok(Some(op)) } else { - Ok(self - .get_commit_operation( + Ok(restrictions.filter_commit_op( + self.get_commit_operation( storage, self.config.max_aggregated_blocks_to_commit as usize, last_sealed_l1_batch_number, base_system_contracts_hashes, protocol_version_id, ) - .await) + .await, + )) } } diff --git a/core/node/eth_sender/src/eth_tx_aggregator.rs b/core/node/eth_sender/src/eth_tx_aggregator.rs index 0b176d6cc7f3..bc9d3266ec78 100644 --- a/core/node/eth_sender/src/eth_tx_aggregator.rs +++ b/core/node/eth_sender/src/eth_tx_aggregator.rs @@ -28,6 +28,7 @@ use zksync_types::{ use super::aggregated_operations::AggregatedOperation; use crate::{ + aggregator::OperationSkippingRestrictions, health::{EthTxAggregatorHealthDetails, EthTxDetails}, metrics::{PubdataKind, METRICS}, publish_criterion::L1GasCriterion, @@ -41,7 +42,13 @@ use crate::{ pub struct MulticallData { pub base_system_contracts_hashes: BaseSystemContractsHashes, pub verifier_address: Address, - pub protocol_version_id: ProtocolVersionId, + pub chain_protocol_version_id: ProtocolVersionId, + /// The latest validator timelock that is stored on the StateTransitionManager (ChainTypeManager). + /// For a smoother upgrade process, if the `stm_protocol_version_id` is the same as `chain_protocol_version_id`, + /// we will use the validator timelock from the CTM. This removes the need to immediately set the correct + /// validator timelock in the config. However, it is expected that it will be done eventually. + pub stm_validator_timelock_address: Address, + pub stm_protocol_version_id: ProtocolVersionId, } /// The component is responsible for aggregating l1 batches into eth_txs: @@ -52,9 +59,15 @@ pub struct EthTxAggregator { aggregator: Aggregator, eth_client: Box, config: SenderConfig, - timelock_contract_address: Address, + // The validator timelock address provided in the config. + // If the contracts have the same protocol version as the state transition manager, the validator timelock + // from the state transition manager will be used. + // The address provided from the config is only used when there is a discrepancy between the two. + // TODO(EVM-932): always fetch the validator timelock from L1, but it requires a protocol change. + config_timelock_contract_address: Address, l1_multicall3_address: Address, pub(super) state_transition_chain_contract: Address, + state_transition_manager_address: Address, functions: ZkSyncFunctions, base_nonce: u64, base_nonce_custom_commit_sender: Option, @@ -84,7 +97,8 @@ impl EthTxAggregator { config: SenderConfig, aggregator: Aggregator, eth_client: Box, - timelock_contract_address: Address, + config_timelock_contract_address: Address, + state_transition_manager_address: Address, l1_multicall3_address: Address, state_transition_chain_contract: Address, rollup_chain_id: L2ChainId, @@ -113,7 +127,8 @@ impl EthTxAggregator { config, aggregator, eth_client, - timelock_contract_address, + config_timelock_contract_address, + state_transition_manager_address, l1_multicall3_address, state_transition_chain_contract, functions, @@ -229,12 +244,40 @@ impl EthTxAggregator { calldata: get_protocol_version_input, }; + let get_stm_protocol_version_input = self + .functions + .state_transition_manager_contract + .function("protocolVersion") + .unwrap() + .encode_input(&[]) + .unwrap(); + let get_stm_protocol_version_call = Multicall3Call { + target: self.state_transition_manager_address, + allow_failure: ALLOW_FAILURE, + calldata: get_stm_protocol_version_input, + }; + + let get_stm_validator_timelock_input = self + .functions + .state_transition_manager_contract + .function("validatorTimelock") + .unwrap() + .encode_input(&[]) + .unwrap(); + let get_stm_validator_timelock_call = Multicall3Call { + target: self.state_transition_manager_address, + allow_failure: ALLOW_FAILURE, + calldata: get_stm_validator_timelock_input, + }; + let mut token_vec = vec![ get_bootloader_hash_call.into_token(), get_default_aa_hash_call.into_token(), get_verifier_params_call.into_token(), get_verifier_call.into_token(), get_protocol_version_call.into_token(), + get_stm_protocol_version_call.into_token(), + get_stm_validator_timelock_call.into_token(), ]; let mut evm_emulator_hash_requested = false; @@ -270,8 +313,8 @@ impl EthTxAggregator { }; if let Token::Array(call_results) = token { - let number_of_calls = if evm_emulator_hash_requested { 6 } else { 5 }; - // 5 or 6 calls are aggregated in multicall + let number_of_calls = if evm_emulator_hash_requested { 8 } else { 7 }; + // 7 or 8 calls are aggregated in multicall if call_results.len() != number_of_calls { return parse_error(&call_results); } @@ -327,47 +370,86 @@ impl EthTxAggregator { call_results_iterator.next().unwrap(); // FIXME: why is this value requested? - let multicall3_verifier_address = - Multicall3Result::from_token(call_results_iterator.next().unwrap())?.return_data; - if multicall3_verifier_address.len() != 32 { - return Err(EthSenderError::Parse(Web3ContractError::InvalidOutputType( - format!( - "multicall3 verifier address data is not of the len of 32: {:?}", - multicall3_verifier_address - ), - ))); - } - let verifier_address = Address::from_slice(&multicall3_verifier_address[12..]); - - let multicall3_protocol_version = - Multicall3Result::from_token(call_results_iterator.next().unwrap())?.return_data; - if multicall3_protocol_version.len() != 32 { - return Err(EthSenderError::Parse(Web3ContractError::InvalidOutputType( - format!( - "multicall3 protocol version data is not of the len of 32: {:?}", - multicall3_protocol_version - ), - ))); - } - - let protocol_version = U256::from_big_endian(&multicall3_protocol_version); - // In case the protocol version is smaller than `PACKED_SEMVER_MINOR_MASK`, it will mean that it is - // equal to the `protocol_version_id` value, since it the interface from before the semver was supported. - let protocol_version_id = if protocol_version < U256::from(PACKED_SEMVER_MINOR_MASK) { - ProtocolVersionId::try_from(protocol_version.as_u32() as u16).unwrap() - } else { - ProtocolVersionId::try_from_packed_semver(protocol_version).unwrap() - }; + let verifier_address = + Self::parse_address(call_results_iterator.next().unwrap(), "verifier address")?; + + let chain_protocol_version_id = Self::parse_protocol_version( + call_results_iterator.next().unwrap(), + "contract protocol version", + )?; + let stm_protocol_version_id = Self::parse_protocol_version( + call_results_iterator.next().unwrap(), + "STM protocol version", + )?; + let stm_validator_timelock_address = Self::parse_address( + call_results_iterator.next().unwrap(), + "STM validator timelock address", + )?; return Ok(MulticallData { base_system_contracts_hashes, verifier_address, - protocol_version_id, + chain_protocol_version_id, + stm_protocol_version_id, + stm_validator_timelock_address, }); } parse_error(&[token]) } + fn parse_protocol_version( + data: Token, + name: &'static str, + ) -> Result { + let multicall_data = Multicall3Result::from_token(data)?.return_data; + if multicall_data.len() != 32 { + return Err(EthSenderError::Parse(Web3ContractError::InvalidOutputType( + format!( + "multicall3 {name} data is not of the len of 32: {:?}", + multicall_data + ), + ))); + } + + let protocol_version = U256::from_big_endian(&multicall_data); + // In case the protocol version is smaller than `PACKED_SEMVER_MINOR_MASK`, it will mean that it is + // equal to the `protocol_version_id` value, since it the interface from before the semver was supported. + let protocol_version_id = if protocol_version < U256::from(PACKED_SEMVER_MINOR_MASK) { + ProtocolVersionId::try_from(protocol_version.as_u32() as u16).unwrap() + } else { + ProtocolVersionId::try_from_packed_semver(protocol_version).unwrap() + }; + + Ok(protocol_version_id) + } + + fn parse_address(data: Token, name: &'static str) -> Result { + let multicall_data = Multicall3Result::from_token(data)?.return_data; + if multicall_data.len() != 32 { + return Err(EthSenderError::Parse(Web3ContractError::InvalidOutputType( + format!( + "multicall3 {name} data is not of the len of 32: {:?}", + multicall_data + ), + ))); + } + + Ok(Address::from_slice(&multicall_data[12..])) + } + + fn timelock_contract_address( + &self, + chain_protocol_version_id: ProtocolVersionId, + stm_protocol_version_id: ProtocolVersionId, + stm_validator_timelock_address: Address, + ) -> Address { + if chain_protocol_version_id == stm_protocol_version_id { + stm_validator_timelock_address + } else { + self.config_timelock_contract_address + } + } + /// Loads current verifier config on L1 async fn get_snark_wrapper_vk_hash( &mut self, @@ -382,6 +464,32 @@ impl EthTxAggregator { Ok(vk_hash) } + /// Returns whether there is a pending gateway upgrade. + /// During gateway upgrade, the signature of the `executeBatches` function on `ValidatorTimelock` will change. + /// This means that transactions that were created before the upgrade but were sent right after it + /// will fail, which we want to avoid. + async fn is_pending_gateway_upgrade( + storage: &mut Connection<'_, Core>, + chain_protocol_version: ProtocolVersionId, + ) -> bool { + // If the gateway protocol version is present in the DB, and its timestamp is larger than `now`, it means that + // the upgrade process on the server has begun. + // However, if the protocol version on the contract is lower than the `gateway_upgrade`, it means that the upgrade has + // not yet completed. + + if storage + .blocks_dal() + .pending_protocol_version() + .await + .unwrap() + < ProtocolVersionId::gateway_upgrade() + { + return false; + } + + chain_protocol_version < ProtocolVersionId::gateway_upgrade() + } + async fn get_fflonk_snark_wrapper_vk_hash( &mut self, verifier_address: Address, @@ -417,7 +525,9 @@ impl EthTxAggregator { let MulticallData { base_system_contracts_hashes, verifier_address, - protocol_version_id, + chain_protocol_version_id, + stm_protocol_version_id, + stm_validator_timelock_address, } = self.get_multicall_data().await.map_err(|err| { tracing::error!("Failed to get multicall data {err:?}"); err @@ -442,37 +552,52 @@ impl EthTxAggregator { snark_wrapper_vk_hash, fflonk_snark_wrapper_vk_hash, }; + + let mut op_restrictions = OperationSkippingRestrictions { + commit_restriction: self + .config + .tx_aggregation_only_prove_and_execute + .then_some("tx_aggregation_only_prove_and_execute=true"), + prove_restriction: None, + execute_restriction: Self::is_pending_gateway_upgrade( + storage, + chain_protocol_version_id, + ) + .await + .then_some("there is a pending gateway upgrade"), + }; + if self.config.tx_aggregation_paused { + let reason = Some("tx aggregation is paused"); + op_restrictions.commit_restriction = reason; + op_restrictions.prove_restriction = reason; + op_restrictions.execute_restriction = reason; + } + if let Some(agg_op) = self .aggregator .get_next_ready_operation( storage, base_system_contracts_hashes, - protocol_version_id, + chain_protocol_version_id, l1_verifier_config, + op_restrictions, ) .await? { - if self.config.tx_aggregation_paused { - tracing::info!( - "Skipping sending operation of type {} for batches {}-{} \ - as tx_aggregation_paused=true", - agg_op.get_action_type(), - agg_op.l1_batch_range().start(), - agg_op.l1_batch_range().end() - ); - return Ok(()); - } - if self.config.tx_aggregation_only_prove_and_execute && !agg_op.is_prove_or_execute() { - tracing::info!( - "Skipping sending commit operation for batches {}-{} \ - as tx_aggregation_only_prove_and_execute=true", - agg_op.l1_batch_range().start(), - agg_op.l1_batch_range().end() - ); - return Ok(()); - } let is_gateway = self.settlement_mode.is_gateway(); - let tx = self.save_eth_tx(storage, &agg_op, is_gateway).await?; + let tx = self + .save_eth_tx( + storage, + &agg_op, + self.timelock_contract_address( + chain_protocol_version_id, + stm_protocol_version_id, + stm_validator_timelock_address, + ), + chain_protocol_version_id, + is_gateway, + ) + .await?; Self::report_eth_tx_saving(storage, &agg_op, &tx).await; self.health_updater.update( @@ -518,7 +643,11 @@ impl EthTxAggregator { .await; } - fn encode_aggregated_op(&self, op: &AggregatedOperation) -> TxData { + fn encode_aggregated_op( + &self, + op: &AggregatedOperation, + chain_protocol_version_id: ProtocolVersionId, + ) -> TxData { let mut args = vec![Token::Uint(self.rollup_chain_id.as_u64().into())]; let is_op_pre_gateway = op.protocol_version().is_pre_gateway(); @@ -562,8 +691,9 @@ impl EthTxAggregator { (calldata, None) } AggregatedOperation::Execute(op) => { - args.extend(op.into_tokens()); - let encoding_fn = if is_op_pre_gateway { + args.extend(op.encode_for_eth_tx(chain_protocol_version_id)); + let encoding_fn = if is_op_pre_gateway && chain_protocol_version_id.is_pre_gateway() + { &self.functions.post_shared_bridge_execute } else { &self.functions.post_gateway_execute @@ -618,6 +748,8 @@ impl EthTxAggregator { &self, storage: &mut Connection<'_, Core>, aggregated_op: &AggregatedOperation, + timelock_contract_address: Address, + chain_protocol_version_id: ProtocolVersionId, is_gateway: bool, ) -> Result { let mut transaction = storage.start_transaction().await.unwrap(); @@ -630,7 +762,8 @@ impl EthTxAggregator { (_, _) => None, }; let nonce = self.get_next_nonce(&mut transaction, sender_addr).await?; - let encoded_aggregated_op = self.encode_aggregated_op(aggregated_op); + let encoded_aggregated_op = + self.encode_aggregated_op(aggregated_op, chain_protocol_version_id); let l1_batch_number_range = aggregated_op.l1_batch_range(); let eth_tx_predicted_gas = match (op_type, is_gateway, self.aggregator.mode()) { @@ -653,7 +786,7 @@ impl EthTxAggregator { nonce, encoded_aggregated_op.calldata, op_type, - self.timelock_contract_address, + timelock_contract_address, eth_tx_predicted_gas, sender_addr, encoded_aggregated_op.sidecar, diff --git a/core/node/eth_sender/src/tester.rs b/core/node/eth_sender/src/tester.rs index e7d9f2ac87e7..022e2bc87222 100644 --- a/core/node/eth_sender/src/tester.rs +++ b/core/node/eth_sender/src/tester.rs @@ -13,7 +13,7 @@ use zksync_object_store::MockObjectStore; use zksync_types::{ aggregated_operations::AggregatedActionType, block::L1BatchHeader, commitment::L1BatchCommitmentMode, eth_sender::EthTx, pubdata_da::PubdataSendingMode, - settlement::SettlementMode, Address, L1BatchNumber, ProtocolVersion, H256, + settlement::SettlementMode, Address, L1BatchNumber, ProtocolVersion, ProtocolVersionId, H256, }; use crate::{ @@ -24,6 +24,7 @@ use crate::{ }; pub(super) const STATE_TRANSITION_CONTRACT_ADDRESS: Address = Address::repeat_byte(0xa0); +pub(super) const STATE_TRANSITION_MANAGER_CONTRACT_ADDRESS: Address = Address::repeat_byte(0xb0); // Alias to conveniently call static methods of `ETHSender`. type MockEthTxManager = EthTxManager; @@ -268,6 +269,7 @@ impl EthSenderTester { gateway.clone(), // ZKsync contract address Address::random(), + STATE_TRANSITION_MANAGER_CONTRACT_ADDRESS, contracts_config.l1_multicall3_addr, STATE_TRANSITION_CONTRACT_ADDRESS, Default::default(), @@ -522,6 +524,8 @@ impl EthSenderTester { .save_eth_tx( &mut self.conn.connection().await.unwrap(), &aggregated_operation, + Address::random(), + ProtocolVersionId::latest(), self.is_l2, ) .await diff --git a/core/node/eth_sender/src/tests.rs b/core/node/eth_sender/src/tests.rs index aab6d2e43d76..8841f297cadc 100644 --- a/core/node/eth_sender/src/tests.rs +++ b/core/node/eth_sender/src/tests.rs @@ -11,18 +11,19 @@ use zksync_types::{ commitment::{ L1BatchCommitmentMode, L1BatchMetaParameters, L1BatchMetadata, L1BatchWithMetadata, }, - ethabi, - ethabi::Token, + ethabi::{self, Token}, helpers::unix_timestamp_ms, - web3, - web3::contract::Error, + web3::{self, contract::Error}, Address, ProtocolVersionId, H256, }; use crate::{ abstract_l1_interface::OperatorType, aggregated_operations::AggregatedOperation, - tester::{EthSenderTester, TestL1Batch, STATE_TRANSITION_CONTRACT_ADDRESS}, + tester::{ + EthSenderTester, TestL1Batch, STATE_TRANSITION_CONTRACT_ADDRESS, + STATE_TRANSITION_MANAGER_CONTRACT_ADDRESS, + }, zksync_functions::ZkSyncFunctions, EthSenderError, }; @@ -66,27 +67,53 @@ pub(crate) fn mock_multicall_response(call: &web3::CallRequest) -> Token { panic!("Unexpected input: {tokens:?}"); }; + let validator_timelock_short_selector = functions + .state_transition_manager_contract + .function("validatorTimelock") + .unwrap() + .short_signature(); + let prototol_version_short_selector = functions + .state_transition_manager_contract + .function("protocolVersion") + .unwrap() + .short_signature(); + let calls = tokens.into_iter().map(Multicall3Call::from_token); let response = calls.map(|call| { let call = call.unwrap(); - assert_eq!(call.target, STATE_TRANSITION_CONTRACT_ADDRESS); let output = match &call.calldata[..4] { selector if selector == bootloader_signature => { + assert!(call.target == STATE_TRANSITION_CONTRACT_ADDRESS); vec![1u8; 32] } selector if selector == default_aa_signature => { + assert!(call.target == STATE_TRANSITION_CONTRACT_ADDRESS); vec![2u8; 32] } selector if Some(selector) == evm_emulator_getter_signature => { + assert!(call.target == STATE_TRANSITION_CONTRACT_ADDRESS); vec![3u8; 32] } selector if selector == functions.get_verifier_params.short_signature() => { + assert!(call.target == STATE_TRANSITION_CONTRACT_ADDRESS); vec![4u8; 96] } selector if selector == functions.get_verifier.short_signature() => { + assert!(call.target == STATE_TRANSITION_CONTRACT_ADDRESS); vec![5u8; 32] } selector if selector == functions.get_protocol_version.short_signature() => { + assert!(call.target == STATE_TRANSITION_CONTRACT_ADDRESS); + H256::from_low_u64_be(ProtocolVersionId::default() as u64) + .0 + .to_vec() + } + selector if selector == validator_timelock_short_selector => { + assert!(call.target == STATE_TRANSITION_MANAGER_CONTRACT_ADDRESS); + vec![6u8; 32] + } + selector if selector == prototol_version_short_selector => { + assert!(call.target == STATE_TRANSITION_MANAGER_CONTRACT_ADDRESS); H256::from_low_u64_be(ProtocolVersionId::default() as u64) .0 .to_vec() @@ -208,6 +235,8 @@ async fn resend_each_block(commitment_mode: L1BatchCommitmentMode) -> anyhow::Re .save_eth_tx( &mut tester.conn.connection().await.unwrap(), &get_dummy_operation(0), + Address::random(), + ProtocolVersionId::latest(), false, ) .await?; @@ -729,6 +758,15 @@ async fn parsing_multicall_data(with_evm_emulator: bool) { .to_vec(), ), ]), + Token::Tuple(vec![ + Token::Bool(true), + Token::Bytes( + H256::from_low_u64_be(ProtocolVersionId::latest() as u64) + .0 + .to_vec(), + ), + ]), + Token::Tuple(vec![Token::Bool(true), Token::Bytes(vec![6u8; 32])]), ]; if with_evm_emulator { mock_response.insert( @@ -756,7 +794,15 @@ async fn parsing_multicall_data(with_evm_emulator: bool) { expected_evm_emulator_hash ); assert_eq!(parsed.verifier_address, Address::repeat_byte(5)); - assert_eq!(parsed.protocol_version_id, ProtocolVersionId::latest()); + assert_eq!( + parsed.chain_protocol_version_id, + ProtocolVersionId::latest() + ); + assert_eq!( + parsed.stm_validator_timelock_address, + Address::repeat_byte(6) + ); + assert_eq!(parsed.stm_protocol_version_id, ProtocolVersionId::latest()); } #[test_log::test(tokio::test)] @@ -848,5 +894,5 @@ async fn get_multicall_data(commitment_mode: L1BatchCommitmentMode) { ); assert_eq!(data.base_system_contracts_hashes.evm_emulator, None); assert_eq!(data.verifier_address, Address::repeat_byte(5)); - assert_eq!(data.protocol_version_id, ProtocolVersionId::latest()); + assert_eq!(data.chain_protocol_version_id, ProtocolVersionId::latest()); } diff --git a/core/node/eth_sender/src/zksync_functions.rs b/core/node/eth_sender/src/zksync_functions.rs index f3e4998ef37c..5c2088f7cec7 100644 --- a/core/node/eth_sender/src/zksync_functions.rs +++ b/core/node/eth_sender/src/zksync_functions.rs @@ -1,6 +1,7 @@ use zksync_contracts::{ - hyperchain_contract, multicall_contract, verifier_contract, POST_SHARED_BRIDGE_COMMIT_FUNCTION, - POST_SHARED_BRIDGE_EXECUTE_FUNCTION, POST_SHARED_BRIDGE_PROVE_FUNCTION, + hyperchain_contract, multicall_contract, state_transition_manager_contract, verifier_contract, + POST_SHARED_BRIDGE_COMMIT_FUNCTION, POST_SHARED_BRIDGE_EXECUTE_FUNCTION, + POST_SHARED_BRIDGE_PROVE_FUNCTION, }; use zksync_types::ethabi::{Contract, Function}; @@ -26,6 +27,8 @@ pub(super) struct ZkSyncFunctions { pub(super) multicall_contract: Contract, pub(super) aggregate3: Function, + + pub(super) state_transition_manager_contract: Contract, } fn get_function(contract: &Contract, name: &str) -> Function { @@ -51,6 +54,7 @@ impl Default for ZkSyncFunctions { let zksync_contract = hyperchain_contract(); let verifier_contract = verifier_contract(); let multicall_contract = multicall_contract(); + let state_transition_manager_contract = state_transition_manager_contract(); let post_shared_bridge_commit = POST_SHARED_BRIDGE_COMMIT_FUNCTION.clone(); let post_shared_bridge_prove = POST_SHARED_BRIDGE_PROVE_FUNCTION.clone(); @@ -89,6 +93,7 @@ impl Default for ZkSyncFunctions { verification_key_hash, multicall_contract, aggregate3, + state_transition_manager_contract, } } } diff --git a/core/node/eth_watch/Cargo.toml b/core/node/eth_watch/Cargo.toml index 2a2374cef70e..38e8cf944efc 100644 --- a/core/node/eth_watch/Cargo.toml +++ b/core/node/eth_watch/Cargo.toml @@ -19,6 +19,7 @@ zksync_system_constants.workspace = true zksync_eth_client.workspace = true zksync_shared_metrics.workspace = true zksync_mini_merkle_tree.workspace = true +zksync_config.workspace = true zksync_web3_decl.workspace = true tokio = { workspace = true, features = ["time"] } diff --git a/core/node/eth_watch/src/client.rs b/core/node/eth_watch/src/client.rs index 0197748376ae..1a48898c131d 100644 --- a/core/node/eth_watch/src/client.rs +++ b/core/node/eth_watch/src/client.rs @@ -1,9 +1,9 @@ -use std::{fmt, sync::Arc}; +use std::{collections::HashMap, fmt, sync::Arc}; use anyhow::Context; use zksync_contracts::{ - getters_facet_contract, state_transition_manager_contract, verifier_contract, - MESSAGE_ROOT_CONTRACT, + bytecode_supplier_contract, getters_facet_contract, l1_asset_router_contract, l2_message_root, + state_transition_manager_contract, verifier_contract, wrapped_base_token_store_contract, }; use zksync_eth_client::{ clients::{DynClient, L1}, @@ -12,10 +12,12 @@ use zksync_eth_client::{ }; use zksync_system_constants::L2_MESSAGE_ROOT_ADDRESS; use zksync_types::{ + abi::ZkChainSpecificUpgradeData, api::{ChainAggProof, Log}, - ethabi::Contract, - web3::{BlockId, BlockNumber, Filter, FilterBuilder}, - Address, L1BatchNumber, L2ChainId, SLChainId, H256, U256, U64, + ethabi::{self, decode, encode, Contract, ParamType}, + web3::{keccak256, BlockId, BlockNumber, Filter, FilterBuilder}, + Address, L1BatchNumber, L2ChainId, SLChainId, H256, L2_NATIVE_TOKEN_VAULT_ADDRESS, + SHARED_BRIDGE_ETHER_TOKEN_ADDRESS, U256, U64, }; use zksync_web3_decl::{ client::{Network, L2}, @@ -57,6 +59,15 @@ pub trait EthClient: 'static + fmt::Debug + Send + Sync { packed_version: H256, ) -> EnrichedClientResult>>; + async fn get_published_preimages( + &self, + hashes: Vec, + ) -> EnrichedClientResult>>>; + + async fn get_chain_gateway_upgrade_info( + &self, + ) -> Result, ContractCallError>; + /// Returns ID of the chain. async fn chain_id(&self) -> EnrichedClientResult; @@ -70,6 +81,8 @@ pub trait EthClient: 'static + fmt::Debug + Send + Sync { ) -> Result; } +// This constant is used for reading auxiliary events +const LOOK_BACK_BLOCK_RANGE: u64 = 1_000_000; pub const RETRY_LIMIT: usize = 5; const TOO_MANY_RESULTS_INFURA: &str = "query returned more than"; const TOO_MANY_RESULTS_ALCHEMY: &str = "response size exceeded"; @@ -84,26 +97,38 @@ pub struct EthHttpQueryClient { diamond_proxy_addr: Address, governance_address: Address, new_upgrade_cut_data_signature: H256, + bytecode_published_signature: H256, + bytecode_supplier_addr: Option
, + wrapped_base_token_store: Option
, + l1_shared_bridge_addr: Option
, // Only present for post-shared bridge chains. state_transition_manager_address: Option
, chain_admin_address: Option
, verifier_contract_abi: Contract, getters_facet_contract_abi: Contract, message_root_abi: Contract, + l1_asset_router_abi: Contract, + wrapped_base_token_store_abi: Contract, confirmations_for_eth_event: Option, + l2_chain_id: L2ChainId, } impl EthHttpQueryClient where Box>: GetLogsClient, { + #[allow(clippy::too_many_arguments)] pub fn new( client: Box>, diamond_proxy_addr: Address, + bytecode_supplier_addr: Option
, + wrapped_base_token_store: Option
, + l1_shared_bridge_addr: Option
, state_transition_manager_address: Option
, chain_admin_address: Option
, governance_address: Address, confirmations_for_eth_event: Option, + l2_chain_id: L2ChainId, ) -> Self { tracing::debug!( "New eth client, ZKsync addr: {:x}, governance addr: {:?}", @@ -116,15 +141,26 @@ where state_transition_manager_address, chain_admin_address, governance_address, + bytecode_supplier_addr, new_upgrade_cut_data_signature: state_transition_manager_contract() .event("NewUpgradeCutData") .context("NewUpgradeCutData event is missing in ABI") .unwrap() .signature(), + bytecode_published_signature: bytecode_supplier_contract() + .event("BytecodePublished") + .context("BytecodePublished event is missing in ABI") + .unwrap() + .signature(), verifier_contract_abi: verifier_contract(), getters_facet_contract_abi: getters_facet_contract(), - message_root_abi: MESSAGE_ROOT_CONTRACT.clone(), + message_root_abi: l2_message_root(), + l1_asset_router_abi: l1_asset_router_contract(), + wrapped_base_token_store_abi: wrapped_base_token_store_contract(), confirmations_for_eth_event, + wrapped_base_token_store, + l1_shared_bridge_addr, + l2_chain_id, } } @@ -264,6 +300,43 @@ where .await } + async fn get_published_preimages( + &self, + hashes: Vec, + ) -> EnrichedClientResult>>> { + let Some(bytecode_supplier_addr) = self.bytecode_supplier_addr else { + return Ok(vec![None; hashes.len()]); + }; + + let to_block = self.client.block_number().await?; + let from_block = to_block.saturating_sub((LOOK_BACK_BLOCK_RANGE - 1).into()); + + let logs = self + .get_events_inner( + from_block.into(), + to_block.into(), + Some(vec![self.bytecode_published_signature]), + Some(hashes.clone()), + Some(vec![bytecode_supplier_addr]), + RETRY_LIMIT, + ) + .await?; + + let mut preimages = HashMap::new(); + for log in logs { + let hash = log.topics[1]; + let preimage = decode(&[ParamType::Bytes], &log.data.0).expect("Invalid encoding"); + assert_eq!(preimage.len(), 1); + let preimage = preimage[0].clone().into_bytes().unwrap(); + preimages.insert(hash, preimage); + } + + Ok(hashes + .into_iter() + .map(|hash| preimages.get(&hash).cloned()) + .collect()) + } + async fn get_events( &self, from: BlockNumber, @@ -346,8 +419,6 @@ where &self, packed_version: H256, ) -> EnrichedClientResult>> { - const LOOK_BACK_BLOCK_RANGE: u64 = 1_000_000; - let Some(state_transition_manager_address) = self.state_transition_manager_address else { return Ok(None); }; @@ -384,6 +455,83 @@ where .call(&self.client) .await } + + async fn get_chain_gateway_upgrade_info( + &self, + ) -> Result, ContractCallError> { + let Some(l1_shared_bridge_addr) = self.l1_shared_bridge_addr else { + tracing::warn!("l1 shared bridge is not provided!"); + return Ok(None); + }; + + let Some(l1_wrapped_base_token_store) = self.wrapped_base_token_store else { + tracing::warn!("l1 wrapped base token store is not provided!"); + return Ok(None); + }; + + let l2_chain_id = U256::from(self.l2_chain_id.as_u64()); + + // It does not matter whether the l1 shared bridge is an L1AssetRouter or L1Nullifier, + // either way it supports the "l2BridgeAddress" method. + let l2_legacy_shared_bridge: Address = + CallFunctionArgs::new("l2BridgeAddress", l2_chain_id) + .for_contract(l1_shared_bridge_addr, &self.l1_asset_router_abi) + .call(&self.client) + .await?; + + if l2_legacy_shared_bridge == Address::zero() { + // This state is not completely impossible, but somewhat undesirable. + // Contracts will still allow the upgrade to go through without + // the shared bridge, so we will allow it here as well. + tracing::error!("L2 shared bridge from L1 is empty"); + } + + let l2_predeployed_wrapped_base_token: Address = + CallFunctionArgs::new("l2WBaseTokenAddress", l2_chain_id) + .for_contract( + l1_wrapped_base_token_store, + &self.wrapped_base_token_store_abi, + ) + .call(&self.client) + .await?; + + if l2_predeployed_wrapped_base_token == Address::zero() { + // This state is not completely impossible, but somewhat undesirable. + // Contracts will still allow the upgrade to go through without + // the l2 predeployed wrapped base token, so we will allow it here as well. + tracing::error!("L2 predeployed wrapped base token is empty"); + } + + let base_token_l1_address: Address = CallFunctionArgs::new("getBaseToken", ()) + .for_contract(self.diamond_proxy_addr, &self.getters_facet_contract_abi) + .call(&self.client) + .await?; + + let (base_token_name, base_token_symbol) = + if base_token_l1_address == SHARED_BRIDGE_ETHER_TOKEN_ADDRESS { + (String::from("Ether"), String::from("ETH")) + } else { + // Due to an issue in the upgrade process, the automatically + // deployed wrapped base tokens will contain generic names + (String::from("Base Token"), String::from("BT")) + }; + + let base_token_asset_id = encode_ntv_asset_id( + // Note, that this is correct only for tokens that are being upgraded to the gateway protocol version. + // The chains that were deployed after it may have tokens with non-L1 base tokens. + U256::from(self.chain_id().await?.0), + base_token_l1_address, + ); + + Ok(Some(ZkChainSpecificUpgradeData { + base_token_asset_id, + l2_legacy_shared_bridge, + l2_predeployed_wrapped_base_token, + base_token_l1_address, + base_token_name, + base_token_symbol, + })) + } } /// Encapsulates `eth_getLogs` calls. @@ -530,4 +678,27 @@ impl EthClient for L2EthClientW { ) -> Result { self.0.get_chain_root(block_number, l2_chain_id).await } + + async fn get_chain_gateway_upgrade_info( + &self, + ) -> Result, ContractCallError> { + self.0.get_chain_gateway_upgrade_info().await + } + + async fn get_published_preimages( + &self, + hashes: Vec, + ) -> EnrichedClientResult>>> { + self.0.get_published_preimages(hashes).await + } +} + +pub(crate) fn encode_ntv_asset_id(l1_chain_id: U256, addr: Address) -> H256 { + let encoded_data = encode(&[ + ethabi::Token::Uint(l1_chain_id), + ethabi::Token::Address(L2_NATIVE_TOKEN_VAULT_ADDRESS), + ethabi::Token::Address(addr), + ]); + + H256(keccak256(&encoded_data)) } diff --git a/core/node/eth_watch/src/event_processors/appended_chain_batch_root.rs b/core/node/eth_watch/src/event_processors/appended_chain_batch_root.rs index 68f731120c65..158d800d1ec3 100644 --- a/core/node/eth_watch/src/event_processors/appended_chain_batch_root.rs +++ b/core/node/eth_watch/src/event_processors/appended_chain_batch_root.rs @@ -226,6 +226,9 @@ impl BatchRootProcessor { metadata[0] = LOG_PROOF_SUPPORTED_METADATA_VERSION; metadata[1] = chain_agg_proof.chain_id_leaf_proof.len() as u8; + // Chain proofs are always final nodes in the proofs. + metadata[3] = 1; + let mut chain_proof_vector = vec![ u256_to_h256(sl_encoded_data), H256::from_low_u64_be(sl_chain_id.0), diff --git a/core/node/eth_watch/src/event_processors/decentralized_upgrades.rs b/core/node/eth_watch/src/event_processors/decentralized_upgrades.rs index 6e55b9ea0f89..2892d6ca718f 100644 --- a/core/node/eth_watch/src/event_processors/decentralized_upgrades.rs +++ b/core/node/eth_watch/src/event_processors/decentralized_upgrades.rs @@ -3,8 +3,8 @@ use std::sync::Arc; use anyhow::Context as _; use zksync_dal::{eth_watcher_dal::EventType, Connection, Core, CoreDal, DalError}; use zksync_types::{ - api::Log, ethabi::Contract, protocol_version::ProtocolSemanticVersion, ProtocolUpgrade, H256, - U256, + api::Log, ethabi::Contract, protocol_upgrade::ProtocolUpgradePreimageOracle, + protocol_version::ProtocolSemanticVersion, ProtocolUpgrade, H256, U256, }; use crate::{ @@ -20,6 +20,7 @@ pub struct DecentralizedUpgradesEventProcessor { last_seen_protocol_version: ProtocolSemanticVersion, update_upgrade_timestamp_signature: H256, sl_client: Arc, + l1_client: Arc, } impl DecentralizedUpgradesEventProcessor { @@ -27,6 +28,7 @@ impl DecentralizedUpgradesEventProcessor { last_seen_protocol_version: ProtocolSemanticVersion, chain_admin_contract: &Contract, sl_client: Arc, + l1_client: Arc, ) -> Self { Self { last_seen_protocol_version, @@ -36,10 +38,34 @@ impl DecentralizedUpgradesEventProcessor { .unwrap() .signature(), sl_client, + l1_client, } } } +#[async_trait::async_trait] +impl ProtocolUpgradePreimageOracle for &dyn EthClient { + async fn get_protocol_upgrade_preimages( + &self, + hashes: Vec, + ) -> anyhow::Result>> { + let preimages = self.get_published_preimages(hashes.clone()).await?; + + let mut result = vec![]; + for (i, preimage) in preimages.into_iter().enumerate() { + let preimage = preimage.with_context(|| { + format!( + "Protocol upgrade preimage under id {i} for {:#?} is missing", + hashes[i] + ) + })?; + result.push(preimage); + } + + Ok(result) + } +} + #[async_trait::async_trait] impl EventProcessor for DecentralizedUpgradesEventProcessor { async fn process_events( @@ -63,8 +89,14 @@ impl EventProcessor for DecentralizedUpgradesEventProcessor { let upgrade = ProtocolUpgrade { timestamp, - ..ProtocolUpgrade::try_from_diamond_cut(&diamond_cut)? + ..ProtocolUpgrade::try_from_diamond_cut( + &diamond_cut, + self.l1_client.as_ref(), + self.l1_client.get_chain_gateway_upgrade_info().await?, + ) + .await? }; + // Scheduler VK is not present in proposal event. It is hard coded in verifier contract. let scheduler_vk_hash = if let Some(address) = upgrade.verifier_address { Some(self.sl_client.scheduler_vk_hash(address).await?) diff --git a/core/node/eth_watch/src/lib.rs b/core/node/eth_watch/src/lib.rs index 908ff4da37f1..f866c8e627c5 100644 --- a/core/node/eth_watch/src/lib.rs +++ b/core/node/eth_watch/src/lib.rs @@ -77,6 +77,7 @@ impl EthWatch { state.last_seen_protocol_version, chain_admin_contract, sl_client.clone(), + l1_client.clone(), ); let mut event_processors: Vec> = vec![ Box::new(priority_ops_processor), diff --git a/core/node/eth_watch/src/tests/client.rs b/core/node/eth_watch/src/tests/client.rs index e94a32096d96..cec297435225 100644 --- a/core/node/eth_watch/src/tests/client.rs +++ b/core/node/eth_watch/src/tests/client.rs @@ -6,18 +6,19 @@ use zksync_contracts::{ }; use zksync_eth_client::{ContractCallError, EnrichedClientResult}; use zksync_types::{ - abi, - abi::ProposedUpgrade, + abi::{self, ProposedUpgrade, ZkChainSpecificUpgradeData}, api::{ChainAggProof, Log}, - ethabi, - ethabi::Token, + bytecode::BytecodeHash, + ethabi::{self, Token}, l1::L1Tx, + protocol_upgrade::ProtocolUpgradeTx, u256_to_h256, web3::{contract::Tokenizable, BlockNumber}, - Address, L1BatchNumber, L2ChainId, ProtocolUpgrade, SLChainId, Transaction, H256, U256, U64, + Address, L1BatchNumber, L2ChainId, ProtocolUpgrade, SLChainId, Transaction, H256, + SHARED_BRIDGE_ETHER_TOKEN_ADDRESS, U256, U64, }; -use crate::client::{EthClient, L2EthClient, RETRY_LIMIT}; +use crate::client::{encode_ntv_asset_id, EthClient, L2EthClient, RETRY_LIMIT}; #[derive(Debug)] pub struct FakeEthClientData { @@ -30,6 +31,7 @@ pub struct FakeEthClientData { chain_log_proofs: HashMap, batch_roots: HashMap>, chain_roots: HashMap, + bytecode_preimages: HashMap>, } impl FakeEthClientData { @@ -44,6 +46,7 @@ impl FakeEthClientData { chain_log_proofs: Default::default(), batch_roots: Default::default(), chain_roots: Default::default(), + bytecode_preimages: Default::default(), } } @@ -68,6 +71,7 @@ impl FakeEthClientData { .entry(*eth_block) .or_default() .push(diamond_upgrade_log(upgrade.clone(), *eth_block)); + self.add_bytecode_preimages(&upgrade.tx); } } @@ -99,6 +103,22 @@ impl FakeEthClientData { self.chain_log_proofs.insert(batch, proof); } } + + fn get_bytecode_preimage(&self, hash: H256) -> Option> { + self.bytecode_preimages.get(&hash).cloned() + } + + fn add_bytecode_preimages(&mut self, upgrade_tx: &Option) { + let Some(tx) = upgrade_tx.as_ref() else { + // Nothing to add + return; + }; + + for dep in tx.execute.factory_deps.iter() { + self.bytecode_preimages + .insert(BytecodeHash::for_bytecode(dep).value(), dep.clone()); + } + } } #[derive(Debug, Clone)] @@ -273,6 +293,35 @@ impl EthClient for MockEthClient { unimplemented!() } + async fn get_published_preimages( + &self, + hashes: Vec, + ) -> EnrichedClientResult>>> { + let mut result = vec![]; + + for hash in hashes { + result.push(self.inner.read().await.get_bytecode_preimage(hash)); + } + + Ok(result) + } + + async fn get_chain_gateway_upgrade_info( + &self, + ) -> Result, ContractCallError> { + Ok(Some(ZkChainSpecificUpgradeData { + base_token_asset_id: encode_ntv_asset_id( + self.chain_id().await?.0.into(), + SHARED_BRIDGE_ETHER_TOKEN_ADDRESS, + ), + l2_legacy_shared_bridge: Address::repeat_byte(0x01), + l2_predeployed_wrapped_base_token: Address::repeat_byte(0x02), + base_token_l1_address: SHARED_BRIDGE_ETHER_TOKEN_ADDRESS, + base_token_name: String::from("Ether"), + base_token_symbol: String::from("ETH"), + })) + } + async fn fflonk_scheduler_vk_hash( &self, _verifier_address: Address, @@ -435,9 +484,7 @@ fn upgrade_timestamp_log(eth_block: u64) -> Log { } fn upgrade_into_diamond_cut(upgrade: ProtocolUpgrade) -> Token { - let abi::Transaction::L1 { - tx, factory_deps, .. - } = upgrade + let abi::Transaction::L1 { tx, .. } = upgrade .tx .map(|tx| Transaction::from(tx).try_into().unwrap()) .unwrap_or(abi::Transaction::L1 { @@ -448,6 +495,7 @@ fn upgrade_into_diamond_cut(upgrade: ProtocolUpgrade) -> Token { else { unreachable!() }; + let factory_deps = upgrade.version.minor.is_pre_gateway().then(Vec::new); ProposedUpgrade { l2_protocol_upgrade_tx: tx, factory_deps, diff --git a/core/node/eth_watch/src/tests/mod.rs b/core/node/eth_watch/src/tests/mod.rs index 0b34a34ab63f..36833eb0f2dc 100644 --- a/core/node/eth_watch/src/tests/mod.rs +++ b/core/node/eth_watch/src/tests/mod.rs @@ -56,18 +56,23 @@ fn build_l1_tx(serial_id: u64, eth_block: u64) -> L1Tx { tx.try_into().unwrap() } -fn build_upgrade_tx(id: ProtocolVersionId, eth_block: u64) -> ProtocolUpgradeTx { +fn dummy_bytecode() -> Vec { + vec![0u8; 32] +} + +fn build_upgrade_tx(id: ProtocolVersionId) -> ProtocolUpgradeTx { let tx = ProtocolUpgradeTx { execute: Execute { contract_address: Some(Address::repeat_byte(0x11)), calldata: vec![1, 2, 3], - factory_deps: vec![], + factory_deps: vec![dummy_bytecode(), dummy_bytecode()], value: U256::zero(), }, common_data: ProtocolUpgradeTxCommonData { upgrade_id: id, sender: [1u8; 20].into(), - eth_block, + // Note, that the field is deprecated + eth_block: 0, gas_limit: Default::default(), max_fee_per_gas: Default::default(), gas_per_pubdata_limit: 1u32.into(), @@ -215,6 +220,8 @@ async fn test_normal_operation_upgrade_timestamp() { .await .unwrap(); + let expected_upgrade_tx = build_upgrade_tx(ProtocolVersionId::next()); + let mut storage = connection_pool.connection().await.unwrap(); client .add_upgrade_timestamp(&[ @@ -231,7 +238,7 @@ async fn test_normal_operation_upgrade_timestamp() { minor: ProtocolVersionId::next(), patch: 0.into(), }, - tx: Some(build_upgrade_tx(ProtocolVersionId::next(), 18)), + tx: Some(expected_upgrade_tx.clone()), ..Default::default() }, 18, @@ -278,7 +285,20 @@ async fn test_normal_operation_upgrade_timestamp() { .await .unwrap() .expect("no protocol upgrade transaction"); - assert_eq!(tx.common_data.upgrade_id, ProtocolVersionId::next()); + + let ProtocolUpgradeTx { + execute: expected_execute, + common_data: expected_common_data, + .. + } = expected_upgrade_tx; + + let ProtocolUpgradeTx { + execute, + common_data, + .. + } = tx; + assert_eq!(expected_execute, execute); + assert_eq!(expected_common_data, common_data); } #[test_log::test(tokio::test)] @@ -481,7 +501,7 @@ async fn test_batch_root_processor_from_genesis() { .unwrap() .unwrap(); let proof1 = hex::encode(bincode::serialize(&proof1).unwrap()); - assert_eq!(proof1, "000000000600000000000000420000000000000030783030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303042000000000000003078303030303030303030303030303030303030303030303030303030303030303530303030303030303030303030303030303030303030303030303030303030334200000000000000307830303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030316639420000000000000030783031303230303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303042000000000000003078303932343932386331333737613663663234633339633264343666386562396466323365383131623236646333353237653534383339366664346531373362314200000000000000307833373561356266393039636230323134336533363935636136353865303634316537333961613539306630303034646261393335373263343463646239643264"); + assert_eq!(proof1, "000000000600000000000000420000000000000030783030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303042000000000000003078303030303030303030303030303030303030303030303030303030303030303530303030303030303030303030303030303030303030303030303030303030334200000000000000307830303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030316639420000000000000030783031303230303031303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303042000000000000003078303932343932386331333737613663663234633339633264343666386562396466323365383131623236646333353237653534383339366664346531373362314200000000000000307833373561356266393039636230323134336533363935636136353865303634316537333961613539306630303034646261393335373263343463646239643264"); sl_client.set_last_finalized_block_number(11).await; watcher.loop_iteration(&mut connection).await.unwrap(); @@ -493,7 +513,7 @@ async fn test_batch_root_processor_from_genesis() { .unwrap() .unwrap(); let proof2 = hex::encode(bincode::serialize(&proof2).unwrap()); - assert_eq!(proof2, "0100000007000000000000004200000000000000307830303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303031420000000000000030783130613265663736653730396433313862343539626534396631653864376630326437313230663262353031626330616664646439333566316138313363363742000000000000003078303030303030303030303030303030303030303030303030303030303030303930303030303030303030303030303030303030303030303030303030303030334200000000000000307830303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030316639420000000000000030783031303230303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303042000000000000003078303932343932386331333737613663663234633339633264343666386562396466323365383131623236646333353237653534383339366664346531373362314200000000000000307861333738613230636132376237616533303731643162643763326164613030343639616263353765343239646436663438613833303932646237303539613138"); + assert_eq!(proof2, "0100000007000000000000004200000000000000307830303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303031420000000000000030783130613265663736653730396433313862343539626534396631653864376630326437313230663262353031626330616664646439333566316138313363363742000000000000003078303030303030303030303030303030303030303030303030303030303030303930303030303030303030303030303030303030303030303030303030303030334200000000000000307830303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030316639420000000000000030783031303230303031303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303042000000000000003078303932343932386331333737613663663234633339633264343666386562396466323365383131623236646333353237653534383339366664346531373362314200000000000000307861333738613230636132376237616533303731643162643763326164613030343639616263353765343239646436663438613833303932646237303539613138"); let proof3 = connection .blocks_dal() @@ -502,7 +522,7 @@ async fn test_batch_root_processor_from_genesis() { .unwrap() .unwrap(); let proof3 = hex::encode(bincode::serialize(&proof3).unwrap()); - assert_eq!(proof3, "02000000080000000000000042000000000000003078303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030324200000000000000307834363730306234643430616335633335616632633232646461323738376139316562353637623036633932346138666238616539613035623230633038633231420000000000000030786530633333333066363734623662326435373866393538613164626436366631363464303638623062623561396662303737656361303133393736666461366642000000000000003078303030303030303030303030303030303030303030303030303030303030306230303030303030303030303030303030303030303030303030303030303030334200000000000000307830303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030316639420000000000000030783031303230303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303042000000000000003078303932343932386331333737613663663234633339633264343666386562396466323365383131623236646333353237653534383339366664346531373362314200000000000000307833373561356266393039636230323134336533363935636136353865303634316537333961613539306630303034646261393335373263343463646239643264"); + assert_eq!(proof3, "02000000080000000000000042000000000000003078303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030324200000000000000307834363730306234643430616335633335616632633232646461323738376139316562353637623036633932346138666238616539613035623230633038633231420000000000000030786530633333333066363734623662326435373866393538613164626436366631363464303638623062623561396662303737656361303133393736666461366642000000000000003078303030303030303030303030303030303030303030303030303030303030306230303030303030303030303030303030303030303030303030303030303030334200000000000000307830303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030316639420000000000000030783031303230303031303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303042000000000000003078303932343932386331333737613663663234633339633264343666386562396466323365383131623236646333353237653534383339366664346531373362314200000000000000307833373561356266393039636230323134336533363935636136353865303634316537333961613539306630303034646261393335373263343463646239643264"); } #[test_log::test(tokio::test)] @@ -568,7 +588,7 @@ async fn test_batch_root_processor_restart() { .unwrap() .unwrap(); let proof = hex::encode(bincode::serialize(&proof).unwrap()); - assert_eq!(proof, "02000000080000000000000042000000000000003078303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030324200000000000000307834363730306234643430616335633335616632633232646461323738376139316562353637623036633932346138666238616539613035623230633038633231420000000000000030786530633333333066363734623662326435373866393538613164626436366631363464303638623062623561396662303737656361303133393736666461366642000000000000003078303030303030303030303030303030303030303030303030303030303030306230303030303030303030303030303030303030303030303030303030303030334200000000000000307830303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030316639420000000000000030783031303230303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303042000000000000003078303932343932386331333737613663663234633339633264343666386562396466323365383131623236646333353237653534383339366664346531373362314200000000000000307833373561356266393039636230323134336533363935636136353865303634316537333961613539306630303034646261393335373263343463646239643264"); + assert_eq!(proof, "02000000080000000000000042000000000000003078303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030324200000000000000307834363730306234643430616335633335616632633232646461323738376139316562353637623036633932346138666238616539613035623230633038633231420000000000000030786530633333333066363734623662326435373866393538613164626436366631363464303638623062623561396662303737656361303133393736666461366642000000000000003078303030303030303030303030303030303030303030303030303030303030306230303030303030303030303030303030303030303030303030303030303030334200000000000000307830303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030316639420000000000000030783031303230303031303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303042000000000000003078303932343932386331333737613663663234633339633264343666386562396466323365383131623236646333353237653534383339366664346531373362314200000000000000307833373561356266393039636230323134336533363935636136353865303634316537333961613539306630303034646261393335373263343463646239643264"); let proof = connection .blocks_dal() @@ -577,7 +597,7 @@ async fn test_batch_root_processor_restart() { .unwrap() .unwrap(); let proof = hex::encode(bincode::serialize(&proof).unwrap()); - assert_eq!(proof, "02000000080000000000000042000000000000003078303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030334200000000000000307837623765373735373139343639366666393634616233353837393131373362636337663735356132656161393334653935373061636533393139383435313265420000000000000030786530633333333066363734623662326435373866393538613164626436366631363464303638623062623561396662303737656361303133393736666461366642000000000000003078303030303030303030303030303030303030303030303030303030303030306430303030303030303030303030303030303030303030303030303030303030334200000000000000307830303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030316639420000000000000030783031303230303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303042000000000000003078303932343932386331333737613663663234633339633264343666386562396466323365383131623236646333353237653534383339366664346531373362314200000000000000307835353063313735316338653764626166633839303939326634353532333636663064643565623665343362653535353936386264616338633732656466316261"); + assert_eq!(proof, "02000000080000000000000042000000000000003078303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030334200000000000000307837623765373735373139343639366666393634616233353837393131373362636337663735356132656161393334653935373061636533393139383435313265420000000000000030786530633333333066363734623662326435373866393538613164626436366631363464303638623062623561396662303737656361303133393736666461366642000000000000003078303030303030303030303030303030303030303030303030303030303030306430303030303030303030303030303030303030303030303030303030303030334200000000000000307830303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030316639420000000000000030783031303230303031303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303042000000000000003078303932343932386331333737613663663234633339633264343666386562396466323365383131623236646333353237653534383339366664346531373362314200000000000000307835353063313735316338653764626166633839303939326634353532333636663064643565623665343362653535353936386264616338633732656466316261"); let proof = connection .blocks_dal() @@ -586,7 +606,7 @@ async fn test_batch_root_processor_restart() { .unwrap() .unwrap(); let proof = hex::encode(bincode::serialize(&proof).unwrap()); - assert_eq!(proof, "030000000900000000000000420000000000000030783030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303442000000000000003078303235663065363031353230366661626364326263613930316432633438396536336263356564346231356266356330633963363066396531363735383564614200000000000000307863633463343165646230633230333133343862323932623736386539626163316565386339326330396566386133323737633265636534303963313264383661420000000000000030783533656463316635616437396335393939626435373864666331333566396335316562643766616661343538356236346637316431356232646365316237323842000000000000003078303030303030303030303030303030303030303030303030303030303030306530303030303030303030303030303030303030303030303030303030303030334200000000000000307830303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030316639420000000000000030783031303230303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303042000000000000003078303932343932386331333737613663663234633339633264343666386562396466323365383131623236646333353237653534383339366664346531373362314200000000000000307833373561356266393039636230323134336533363935636136353865303634316537333961613539306630303034646261393335373263343463646239643264"); + assert_eq!(proof, "030000000900000000000000420000000000000030783030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303442000000000000003078303235663065363031353230366661626364326263613930316432633438396536336263356564346231356266356330633963363066396531363735383564614200000000000000307863633463343165646230633230333133343862323932623736386539626163316565386339326330396566386133323737633265636534303963313264383661420000000000000030783533656463316635616437396335393939626435373864666331333566396335316562643766616661343538356236346637316431356232646365316237323842000000000000003078303030303030303030303030303030303030303030303030303030303030306530303030303030303030303030303030303030303030303030303030303030334200000000000000307830303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030316639420000000000000030783031303230303031303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303042000000000000003078303932343932386331333737613663663234633339633264343666386562396466323365383131623236646333353237653534383339366664346531373362314200000000000000307833373561356266393039636230323134336533363935636136353865303634316537333961613539306630303034646261393335373263343463646239643264"); let proof = connection .blocks_dal() @@ -595,7 +615,7 @@ async fn test_batch_root_processor_restart() { .unwrap() .unwrap(); let proof = hex::encode(bincode::serialize(&proof).unwrap()); - assert_eq!(proof, "030000000900000000000000420000000000000030783030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303542000000000000003078323465653435363834376535373364313635613832333634306632303834383139636331613865333433316562633635633865363064333435343266313637324200000000000000307863633463343165646230633230333133343862323932623736386539626163316565386339326330396566386133323737633265636534303963313264383661420000000000000030783533656463316635616437396335393939626435373864666331333566396335316562643766616661343538356236346637316431356232646365316237323842000000000000003078303030303030303030303030303030303030303030303030303030303030306530303030303030303030303030303030303030303030303030303030303030334200000000000000307830303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030316639420000000000000030783031303230303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303042000000000000003078303932343932386331333737613663663234633339633264343666386562396466323365383131623236646333353237653534383339366664346531373362314200000000000000307833373561356266393039636230323134336533363935636136353865303634316537333961613539306630303034646261393335373263343463646239643264"); + assert_eq!(proof, "030000000900000000000000420000000000000030783030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303542000000000000003078323465653435363834376535373364313635613832333634306632303834383139636331613865333433316562633635633865363064333435343266313637324200000000000000307863633463343165646230633230333133343862323932623736386539626163316565386339326330396566386133323737633265636534303963313264383661420000000000000030783533656463316635616437396335393939626435373864666331333566396335316562643766616661343538356236346637316431356232646365316237323842000000000000003078303030303030303030303030303030303030303030303030303030303030306530303030303030303030303030303030303030303030303030303030303030334200000000000000307830303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030316639420000000000000030783031303230303031303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303042000000000000003078303932343932386331333737613663663234633339633264343666386562396466323365383131623236646333353237653534383339366664346531373362314200000000000000307833373561356266393039636230323134336533363935636136353865303634316537333961613539306630303034646261393335373263343463646239643264"); } async fn get_all_db_txs(storage: &mut Connection<'_, Core>) -> Vec { diff --git a/core/node/fee_model/Cargo.toml b/core/node/fee_model/Cargo.toml index a84a7c5c2173..5b40164a029f 100644 --- a/core/node/fee_model/Cargo.toml +++ b/core/node/fee_model/Cargo.toml @@ -25,3 +25,5 @@ tracing.workspace = true [dev-dependencies] test-casing.workspace = true +zksync_node_test_utils.workspace = true +zksync_node_genesis.workspace = true diff --git a/core/node/fee_model/src/l1_gas_price/main_node_fetcher.rs b/core/node/fee_model/src/l1_gas_price/main_node_fetcher.rs index 259a5e3e3fed..587142ae499b 100644 --- a/core/node/fee_model/src/l1_gas_price/main_node_fetcher.rs +++ b/core/node/fee_model/src/l1_gas_price/main_node_fetcher.rs @@ -3,8 +3,9 @@ use std::{ time::Duration, }; +use async_trait::async_trait; use tokio::sync::watch::Receiver; -use zksync_types::fee_model::FeeParams; +use zksync_types::fee_model::{BatchFeeInput, FeeParams}; use zksync_web3_decl::{ client::{DynClient, L2}, error::ClientRpcContext, @@ -15,8 +16,9 @@ use crate::BatchFeeModelInputProvider; const SLEEP_INTERVAL: Duration = Duration::from_secs(5); -/// This structure maintains the known L1 gas price by periodically querying +/// This structure maintains the known fee params/input by periodically querying /// the main node. +/// /// It is required since the main node doesn't only observe the current L1 gas price, /// but also applies adjustments to it in order to smooth out the spikes. /// The same algorithm cannot be consistently replicated on the external node side, @@ -24,28 +26,40 @@ const SLEEP_INTERVAL: Duration = Duration::from_secs(5); #[derive(Debug)] pub struct MainNodeFeeParamsFetcher { client: Box>, - main_node_fee_params: RwLock, + main_node_fee_state: RwLock<(FeeParams, BatchFeeInput)>, } impl MainNodeFeeParamsFetcher { pub fn new(client: Box>) -> Self { + let fee_params = FeeParams::sensible_v1_default(); + let fee_input = fee_params.scale(1.0, 1.0); Self { client: client.for_component("fee_params_fetcher"), - main_node_fee_params: RwLock::new(FeeParams::sensible_v1_default()), + main_node_fee_state: RwLock::new((fee_params, fee_input)), } } pub async fn run(self: Arc, mut stop_receiver: Receiver) -> anyhow::Result<()> { while !*stop_receiver.borrow_and_update() { - let fetch_result = self - .client - .get_fee_params() - .rpc_context("get_fee_params") - .await; - let main_node_fee_params = match fetch_result { - Ok(price) => price, + // We query fee params and fee input together to minimize the potential for them to be + // out of sync. They can still be fetched out of sync in rare circumstances but nothing + // in the system *directly* relies on `BatchFeeModelInputProvider::get_fee_model_params` + // except for `zks_getFeeParams`. Which is likely fine because EN is essentially + // mimicking how it observed the call to main node. + let (params_result, input_result) = tokio::join!( + self.client.get_fee_params().rpc_context("get_fee_params"), + self.client + .get_batch_fee_input() + .rpc_context("get_batch_fee_input") + ); + let fee_state_result = + params_result.and_then(|params| input_result.map(|input| (params, input))); + let main_node_fee_state = match fee_state_result { + Ok((fee_params, fee_input)) => { + (fee_params, BatchFeeInput::PubdataIndependent(fee_input)) + } Err(err) => { - tracing::warn!("Unable to get the gas price: {}", err); + tracing::warn!("Unable to get main node's fee params/input: {}", err); // A delay to avoid spamming the main node with requests. if tokio::time::timeout(SLEEP_INTERVAL, stop_receiver.changed()) .await @@ -56,7 +70,7 @@ impl MainNodeFeeParamsFetcher { continue; } }; - *self.main_node_fee_params.write().unwrap() = main_node_fee_params; + *self.main_node_fee_state.write().unwrap() = main_node_fee_state; if tokio::time::timeout(SLEEP_INTERVAL, stop_receiver.changed()) .await @@ -71,8 +85,18 @@ impl MainNodeFeeParamsFetcher { } } +#[async_trait] impl BatchFeeModelInputProvider for MainNodeFeeParamsFetcher { + async fn get_batch_fee_input_scaled( + &self, + // EN's scale factors are ignored as we have already fetched scaled fee input from main node + _l1_gas_price_scale_factor: f64, + _l1_pubdata_price_scale_factor: f64, + ) -> anyhow::Result { + Ok(self.main_node_fee_state.read().unwrap().1) + } + fn get_fee_model_params(&self) -> FeeParams { - *self.main_node_fee_params.read().unwrap() + self.main_node_fee_state.read().unwrap().0 } } diff --git a/core/node/fee_model/src/lib.rs b/core/node/fee_model/src/lib.rs index 380a279cccc1..a66d05f7cb2e 100644 --- a/core/node/fee_model/src/lib.rs +++ b/core/node/fee_model/src/lib.rs @@ -1,6 +1,6 @@ use std::{fmt, fmt::Debug, sync::Arc}; -use anyhow::Context as _; +use anyhow::Context; use async_trait::async_trait; use zksync_dal::{ConnectionPool, Core, CoreDal}; use zksync_types::fee_model::{ @@ -112,22 +112,34 @@ impl BatchFeeModelInputProvider for ApiFeeInputProvider { l1_gas_price_scale_factor: f64, l1_pubdata_price_scale_factor: f64, ) -> anyhow::Result { + let mut conn = self + .connection_pool + .connection_tagged("api_fee_input_provider") + .await?; + let latest_batch_header = conn + .blocks_dal() + .get_latest_l1_batch_header() + .await? + .context("no batches were found in the DB")?; + + if !latest_batch_header.is_sealed { + tracing::trace!( + latest_batch_number = %latest_batch_header.number, + "Found an open batch; reporting its fee input" + ); + return Ok(latest_batch_header.fee_input); + } + + tracing::trace!( + latest_batch_number = %latest_batch_header.number, + "No open batch found; fetching from base provider" + ); let inner_input = self .inner .get_batch_fee_input_scaled(l1_gas_price_scale_factor, l1_pubdata_price_scale_factor) .await .context("cannot get batch fee input from base provider")?; - let last_l2_block_params = self - .connection_pool - .connection_tagged("api_fee_input_provider") - .await? - .blocks_dal() - .get_last_sealed_l2_block_header() - .await?; - - Ok(last_l2_block_params - .map(|header| inner_input.stricter(header.batch_fee_input)) - .unwrap_or(inner_input)) + Ok(inner_input) } /// Returns the fee model parameters. @@ -161,6 +173,8 @@ mod tests { use l1_gas_price::GasAdjusterClient; use zksync_config::GasAdjusterConfig; use zksync_eth_client::{clients::MockSettlementLayer, BaseFees}; + use zksync_node_genesis::{insert_genesis_batch, GenesisParams}; + use zksync_node_test_utils::create_l1_batch; use zksync_types::{ commitment::L1BatchCommitmentMode, fee_model::{BaseTokenConversionRatio, FeeModelConfigV2}, @@ -378,4 +392,33 @@ mod tests { .await .expect("Failed to create GasAdjuster") } + + #[tokio::test] + async fn test_take_fee_input_from_unsealed_batch() { + let sealed_batch_fee_input = BatchFeeInput::pubdata_independent(1, 2, 3); + let unsealed_batch_fee_input = BatchFeeInput::pubdata_independent(101, 102, 103); + + let pool = ConnectionPool::::test_pool().await; + let mut conn = pool.connection().await.unwrap(); + insert_genesis_batch(&mut conn, &GenesisParams::mock()) + .await + .unwrap(); + + let mut l1_batch_header = create_l1_batch(1); + l1_batch_header.batch_fee_input = sealed_batch_fee_input; + conn.blocks_dal() + .insert_mock_l1_batch(&l1_batch_header) + .await + .unwrap(); + let mut l1_batch_header = create_l1_batch(2); + l1_batch_header.batch_fee_input = unsealed_batch_fee_input; + conn.blocks_dal() + .insert_l1_batch(l1_batch_header.to_unsealed_header()) + .await + .unwrap(); + let provider: &dyn BatchFeeModelInputProvider = + &ApiFeeInputProvider::new(Arc::new(MockBatchFeeParamsProvider::default()), pool); + let fee_input = provider.get_batch_fee_input().await.unwrap(); + assert_eq!(fee_input, unsealed_batch_fee_input); + } } diff --git a/core/node/genesis/src/lib.rs b/core/node/genesis/src/lib.rs index 87457f3b1c01..0425a475c5bc 100644 --- a/core/node/genesis/src/lib.rs +++ b/core/node/genesis/src/lib.rs @@ -8,7 +8,7 @@ use anyhow::Context as _; use zksync_config::GenesisConfig; use zksync_contracts::{ hyperchain_contract, verifier_contract, BaseSystemContracts, BaseSystemContractsHashes, - SET_CHAIN_ID_EVENT, + GENESIS_UPGRADE_EVENT, }; use zksync_dal::{custom_genesis_export_dal::GenesisState, Connection, Core, CoreDal, DalError}; use zksync_eth_client::{CallFunctionArgs, EthInterface}; @@ -20,7 +20,7 @@ use zksync_types::{ bytecode::BytecodeHash, commitment::{CommitmentInput, L1BatchCommitment}, fee_model::BatchFeeInput, - protocol_upgrade::decode_set_chain_id_event, + protocol_upgrade::decode_genesis_upgrade_event, protocol_version::{L1VerifierConfig, ProtocolSemanticVersion}, system_contracts::get_system_smart_contracts, u256_to_h256, @@ -517,7 +517,7 @@ pub(crate) async fn create_genesis_l1_batch_from_storage_logs_and_factory_deps( .await?; transaction .blocks_dal() - .insert_l1_batch(genesis_l1_batch_header.to_unsealed_header(batch_fee_input)) + .insert_l1_batch(genesis_l1_batch_header.to_unsealed_header()) .await?; transaction .blocks_dal() @@ -589,14 +589,14 @@ pub async fn save_set_chain_id_tx( storage: &mut Connection<'_, Core>, query_client: &dyn EthInterface, diamond_proxy_address: Address, - state_transition_manager_address: Address, ) -> anyhow::Result<()> { let to = query_client.block_number().await?.as_u64(); let from = to.saturating_sub(PRIORITY_EXPIRATION); + let filter = FilterBuilder::default() - .address(vec![state_transition_manager_address]) + .address(vec![diamond_proxy_address]) .topics( - Some(vec![SET_CHAIN_ID_EVENT.signature()]), + Some(vec![GENESIS_UPGRADE_EVENT.signature()]), Some(vec![diamond_proxy_address.into()]), None, None, @@ -612,7 +612,7 @@ pub async fn save_set_chain_id_tx( logs ); let (version_id, upgrade_tx) = - decode_set_chain_id_event(logs.remove(0)).context("Chain id event is incorrect")?; + decode_genesis_upgrade_event(logs.remove(0)).context("Chain id event is incorrect")?; tracing::info!("New version id {:?}", version_id); storage diff --git a/core/node/metadata_calculator/src/api_server/tests.rs b/core/node/metadata_calculator/src/api_server/tests.rs index 9bb994cb4163..483236c3bc9b 100644 --- a/core/node/metadata_calculator/src/api_server/tests.rs +++ b/core/node/metadata_calculator/src/api_server/tests.rs @@ -131,11 +131,13 @@ fn assert_raw_nodes_response(response: &serde_json::Value) { assert_matches!(key, b'0'..=b'9' | b'a'..=b'f'); } - let node = response["0:0"].as_object().expect("not an object"); - assert!( - node.len() == 2 && node.contains_key("internal") && node.contains_key("raw"), - "{node:#?}" - ); + if let Some(value) = response.get("0:0") { + let node = value.as_object().expect("not an object"); + assert!( + node.len() == 2 && node.contains_key("internal") && node.contains_key("raw"), + "{node:#?}" + ); + } } fn assert_raw_stale_keys_response(response: &serde_json::Value) { diff --git a/core/node/node_framework/src/implementations/layers/eth_sender/aggregator.rs b/core/node/node_framework/src/implementations/layers/eth_sender/aggregator.rs index b412c376f68d..1642db9be367 100644 --- a/core/node/node_framework/src/implementations/layers/eth_sender/aggregator.rs +++ b/core/node/node_framework/src/implementations/layers/eth_sender/aggregator.rs @@ -111,24 +111,33 @@ impl WiringLayer for EthTxAggregatorLayer { tracing::info!("Gateway contracts: {:?}", self.gateway_chain_config); // Get resources. - let (validator_timelock_addr, multicall3_addr, diamond_proxy_addr) = - if self.settlement_mode.is_gateway() { - let gateway_chain_config = self - .gateway_chain_config - .as_ref() - .context("gateway_chain_config")?; - ( - gateway_chain_config.validator_timelock_addr, - gateway_chain_config.multicall3_addr, - gateway_chain_config.diamond_proxy_addr, - ) - } else { - ( - self.contracts_config.validator_timelock_addr, - self.contracts_config.l1_multicall3_addr, - self.contracts_config.diamond_proxy_addr, - ) - }; + let ( + validator_timelock_addr, + multicall3_addr, + diamond_proxy_addr, + state_transition_manager_address, + ) = if self.settlement_mode.is_gateway() { + let gateway_chain_config = self + .gateway_chain_config + .as_ref() + .context("gateway_chain_config")?; + ( + gateway_chain_config.validator_timelock_addr, + gateway_chain_config.multicall3_addr, + gateway_chain_config.diamond_proxy_addr, + gateway_chain_config.state_transition_proxy_addr, + ) + } else { + ( + self.contracts_config.validator_timelock_addr, + self.contracts_config.l1_multicall3_addr, + self.contracts_config.diamond_proxy_addr, + self.contracts_config + .ecosystem_contracts + .context("Missing ecosystem contracts")? + .state_transition_proxy_addr, + ) + }; let eth_client = if self.settlement_mode.is_gateway() { input @@ -167,6 +176,7 @@ impl WiringLayer for EthTxAggregatorLayer { aggregator, eth_client, validator_timelock_addr, + state_transition_manager_address, multicall3_addr, diamond_proxy_addr, self.zksync_network_id, diff --git a/core/node/node_framework/src/implementations/layers/eth_sender/manager.rs b/core/node/node_framework/src/implementations/layers/eth_sender/manager.rs index e9ce4cc19e1a..b8951c2a91ca 100644 --- a/core/node/node_framework/src/implementations/layers/eth_sender/manager.rs +++ b/core/node/node_framework/src/implementations/layers/eth_sender/manager.rs @@ -6,7 +6,10 @@ use zksync_eth_sender::EthTxManager; use crate::{ implementations::resources::{ circuit_breakers::CircuitBreakersResource, - eth_interface::{BoundEthInterfaceForBlobsResource, BoundEthInterfaceResource}, + eth_interface::{ + BoundEthInterfaceForBlobsResource, BoundEthInterfaceForL2Resource, + BoundEthInterfaceResource, + }, gas_adjuster::GasAdjusterResource, healthcheck::AppHealthCheckResource, pools::{MasterPool, PoolResource, ReplicaPool}, @@ -46,6 +49,7 @@ pub struct Input { pub replica_pool: PoolResource, pub eth_client: BoundEthInterfaceResource, pub eth_client_blobs: Option, + pub eth_client_gateway: Option, pub gas_adjuster: GasAdjusterResource, #[context(default)] pub circuit_breakers: CircuitBreakersResource, @@ -80,10 +84,9 @@ impl WiringLayer for EthTxManagerLayer { let master_pool = input.master_pool.get().await.unwrap(); let replica_pool = input.replica_pool.get().await.unwrap(); - let settlement_mode = self.eth_sender_config.gas_adjuster.unwrap().settlement_mode; let eth_client = input.eth_client.0.clone(); let eth_client_blobs = input.eth_client_blobs.map(|c| c.0); - let l2_client = input.eth_client.0; + let l2_client = input.eth_client_gateway.map(|c| c.0); let config = self.eth_sender_config.sender.context("sender")?; @@ -93,21 +96,9 @@ impl WiringLayer for EthTxManagerLayer { master_pool, config, gas_adjuster, - if !settlement_mode.is_gateway() { - Some(eth_client) - } else { - None - }, - if !settlement_mode.is_gateway() { - eth_client_blobs - } else { - None - }, - if settlement_mode.is_gateway() { - Some(l2_client) - } else { - None - }, + Some(eth_client), + eth_client_blobs, + l2_client, ); // Insert circuit breaker. diff --git a/core/node/node_framework/src/implementations/layers/eth_watch.rs b/core/node/node_framework/src/implementations/layers/eth_watch.rs index eeffae4ae6d9..92356e770c40 100644 --- a/core/node/node_framework/src/implementations/layers/eth_watch.rs +++ b/core/node/node_framework/src/implementations/layers/eth_watch.rs @@ -94,6 +94,15 @@ impl WiringLayer for EthWatchLayer { let l1_client = EthHttpQueryClient::new( client, self.contracts_config.diamond_proxy_addr, + self.contracts_config + .ecosystem_contracts + .as_ref() + .and_then(|a| a.l1_bytecodes_supplier_addr), + self.contracts_config + .ecosystem_contracts + .as_ref() + .and_then(|a| a.l1_wrapped_base_token_store), + self.contracts_config.l1_shared_bridge_proxy_addr, self.contracts_config .ecosystem_contracts .as_ref() @@ -101,6 +110,7 @@ impl WiringLayer for EthWatchLayer { self.contracts_config.chain_admin_addr, self.contracts_config.governance_addr, self.eth_watch_config.confirmations_for_eth_event, + self.chain_id, ); let sl_l2_client: Option> = @@ -109,10 +119,17 @@ impl WiringLayer for EthWatchLayer { Some(Box::new(EthHttpQueryClient::new( gateway_client.0, contracts_config.diamond_proxy_addr, + // Only present on L1. + None, + // Only present on L1. + None, + // Only present on L1. + None, Some(contracts_config.state_transition_proxy_addr), contracts_config.chain_admin_addr, contracts_config.governance_addr, self.eth_watch_config.confirmations_for_eth_event, + self.chain_id, ))) } else { None diff --git a/core/node/node_framework/src/implementations/layers/state_keeper/mempool_io.rs b/core/node/node_framework/src/implementations/layers/state_keeper/mempool_io.rs index 77992f34c7f5..79eb233041a6 100644 --- a/core/node/node_framework/src/implementations/layers/state_keeper/mempool_io.rs +++ b/core/node/node_framework/src/implementations/layers/state_keeper/mempool_io.rs @@ -4,7 +4,7 @@ use zksync_config::configs::{ wallets, }; use zksync_state_keeper::{MempoolFetcher, MempoolGuard, MempoolIO, SequencerSealer}; -use zksync_types::{commitment::L1BatchCommitmentMode, Address, L2ChainId}; +use zksync_types::{commitment::PubdataType, Address, L2ChainId}; use crate::{ implementations::resources::{ @@ -40,7 +40,7 @@ pub struct MempoolIOLayer { mempool_config: MempoolConfig, wallets: wallets::StateKeeper, l2_da_validator_addr: Option
, - l1_batch_commit_data_generator_mode: L1BatchCommitmentMode, + pubdata_type: PubdataType, } #[derive(Debug, FromContext)] @@ -66,7 +66,7 @@ impl MempoolIOLayer { mempool_config: MempoolConfig, wallets: wallets::StateKeeper, l2_da_validator_addr: Option
, - l1_batch_commit_data_generator_mode: L1BatchCommitmentMode, + pubdata_type: PubdataType, ) -> Self { Self { zksync_network_id, @@ -74,7 +74,7 @@ impl MempoolIOLayer { mempool_config, wallets, l2_da_validator_addr, - l1_batch_commit_data_generator_mode, + pubdata_type, } } @@ -136,7 +136,7 @@ impl WiringLayer for MempoolIOLayer { self.mempool_config.delay_interval(), self.zksync_network_id, self.l2_da_validator_addr, - self.l1_batch_commit_data_generator_mode, + self.pubdata_type, )?; // Create sealer. diff --git a/core/node/node_framework/src/implementations/layers/web3_api/server/bridge_addresses.rs b/core/node/node_framework/src/implementations/layers/web3_api/server/bridge_addresses.rs index 4ba8098c8399..b85d74699857 100644 --- a/core/node/node_framework/src/implementations/layers/web3_api/server/bridge_addresses.rs +++ b/core/node/node_framework/src/implementations/layers/web3_api/server/bridge_addresses.rs @@ -1,20 +1,119 @@ use std::time::Duration; +use zksync_eth_client::{CallFunctionArgs, ContractCallError}; use zksync_node_api_server::web3::state::BridgeAddressesHandle; +use zksync_types::{ethabi::Contract, Address, L2_ASSET_ROUTER_ADDRESS}; use zksync_web3_decl::{ - client::{DynClient, L2}, + client::{DynClient, L1, L2}, namespaces::ZksNamespaceClient, }; use crate::{StopReceiver, Task, TaskId}; #[derive(Debug)] -pub struct BridgeAddressesUpdaterTask { +pub struct MainNodeUpdaterInner { pub bridge_address_updater: BridgeAddressesHandle, pub main_node_client: Box>, pub update_interval: Option, } +impl MainNodeUpdaterInner { + async fn loop_iteration(&self) { + match self.main_node_client.get_bridge_contracts().await { + Ok(bridge_addresses) => { + self.bridge_address_updater.update(bridge_addresses).await; + } + Err(err) => { + tracing::error!("Failed to query `get_bridge_contracts`, error: {:?}", err); + } + } + } +} + +#[derive(Debug)] +pub struct L1UpdaterInner { + pub bridge_address_updater: BridgeAddressesHandle, + pub l1_eth_client: Box>, + pub bridgehub_addr: Address, + pub update_interval: Option, + pub bridgehub_abi: Contract, + pub l1_asset_router_abi: Contract, +} + +struct L1SharedBridgeInfo { + l1_shared_bridge_addr: Address, + should_use_l2_asset_router: bool, +} + +impl L1UpdaterInner { + async fn get_shared_bridge_info(&self) -> Result { + let l1_shared_bridge_addr: Address = CallFunctionArgs::new("sharedBridge", ()) + .for_contract(self.bridgehub_addr, &self.bridgehub_abi) + .call(&self.l1_eth_client) + .await?; + + let l1_nullifier_addr: Result = + CallFunctionArgs::new("L1_NULLIFIER", ()) + .for_contract(l1_shared_bridge_addr, &self.l1_asset_router_abi) + .call(&self.l1_eth_client) + .await; + + // In case we can successfully retrieve the l1 nullifier, this is definitely the new l1 asset router. + // The contrary is not necessarily true: the query can fail either due to network issues or + // due to the contract being outdated. To be conservative, we just always treat such cases as `false`. + let should_use_l2_asset_router = l1_nullifier_addr.is_ok(); + + Ok(L1SharedBridgeInfo { + l1_shared_bridge_addr, + should_use_l2_asset_router, + }) + } + + async fn loop_iteration(&self) { + match self.get_shared_bridge_info().await { + Ok(info) => { + self.bridge_address_updater + .update_l1_shared_bridge(info.l1_shared_bridge_addr) + .await; + // We only update one way: + // - Once the L2 asset router should be used, there is never a need to go back + // - To not undo the previous change in case of a network error + if info.should_use_l2_asset_router { + self.bridge_address_updater + .update_l2_bridges(L2_ASSET_ROUTER_ADDRESS) + .await; + } + } + Err(err) => { + tracing::error!("Failed to query shared bridge address, error: {err:?}"); + } + } + } +} + +// Define the enum to hold either updater +#[derive(Debug)] +pub enum BridgeAddressesUpdaterTask { + L1Updater(L1UpdaterInner), + MainNodeUpdater(MainNodeUpdaterInner), +} + +impl BridgeAddressesUpdaterTask { + async fn loop_iteration(&self) { + match self { + BridgeAddressesUpdaterTask::L1Updater(updater) => updater.loop_iteration().await, + BridgeAddressesUpdaterTask::MainNodeUpdater(updater) => updater.loop_iteration().await, + } + } + + fn update_interval(&self) -> Option { + match self { + BridgeAddressesUpdaterTask::L1Updater(updater) => updater.update_interval, + BridgeAddressesUpdaterTask::MainNodeUpdater(updater) => updater.update_interval, + } + } +} + #[async_trait::async_trait] impl Task for BridgeAddressesUpdaterTask { fn id(&self) -> TaskId { @@ -24,16 +123,9 @@ impl Task for BridgeAddressesUpdaterTask { async fn run(self: Box, mut stop_receiver: StopReceiver) -> anyhow::Result<()> { const DEFAULT_INTERVAL: Duration = Duration::from_secs(30); - let update_interval = self.update_interval.unwrap_or(DEFAULT_INTERVAL); + let update_interval = self.update_interval().unwrap_or(DEFAULT_INTERVAL); while !*stop_receiver.0.borrow_and_update() { - match self.main_node_client.get_bridge_contracts().await { - Ok(bridge_addresses) => { - self.bridge_address_updater.update(bridge_addresses).await; - } - Err(err) => { - tracing::error!("Failed to query `get_bridge_contracts`, error: {err:?}"); - } - } + self.loop_iteration().await; if tokio::time::timeout(update_interval, stop_receiver.0.changed()) .await diff --git a/core/node/node_framework/src/implementations/layers/web3_api/server/mod.rs b/core/node/node_framework/src/implementations/layers/web3_api/server/mod.rs index 390d321647cf..b1d9ca79979e 100644 --- a/core/node/node_framework/src/implementations/layers/web3_api/server/mod.rs +++ b/core/node/node_framework/src/implementations/layers/web3_api/server/mod.rs @@ -1,8 +1,11 @@ use std::{num::NonZeroU32, time::Duration}; +use anyhow::Context; +use bridge_addresses::{L1UpdaterInner, MainNodeUpdaterInner}; use tokio::{sync::oneshot, task::JoinHandle}; use zksync_circuit_breaker::replication_lag::ReplicationLagChecker; use zksync_config::configs::api::MaxResponseSize; +use zksync_contracts::{bridgehub_contract, l1_asset_router_contract}; use zksync_node_api_server::web3::{ state::{BridgeAddressesHandle, InternalApiConfig, SealedL2BlockNumber}, ApiBuilder, ApiServer, Namespace, @@ -15,6 +18,7 @@ use crate::{ }, resources::{ circuit_breakers::CircuitBreakersResource, + eth_interface::EthInterfaceResource, healthcheck::AppHealthCheckResource, main_node_client::MainNodeClientResource, pools::{PoolResource, ReplicaPool}, @@ -128,6 +132,7 @@ pub struct Input { #[context(default)] pub app_health: AppHealthCheckResource, pub main_node_client: Option, + pub l1_eth_client: EthInterfaceResource, } #[derive(Debug, IntoContext)] @@ -140,7 +145,7 @@ pub struct Output { #[context(task)] pub sealed_l2_block_updater_task: SealedL2BlockUpdaterTask, #[context(task)] - pub bridge_addresses_updater_task: Option, + pub bridge_addresses_updater_task: BridgeAddressesUpdaterTask, } impl Web3ServerLayer { @@ -201,15 +206,29 @@ impl WiringLayer for Web3ServerLayer { number_updater: sealed_l2_block_handle.clone(), pool: updaters_pool, }; - // Bridge addresses updater task must be started for ENs and only for ENs. + + // In case it is an EN, the bridge addresses should be updated by fetching values from the main node. + // It is the main node, the bridge addresses need to be updated by querying the L1. let bridge_addresses_updater_task = - input - .main_node_client - .map(|main_node_client| BridgeAddressesUpdaterTask { + if let Some(main_node_client) = input.main_node_client.clone() { + BridgeAddressesUpdaterTask::MainNodeUpdater(MainNodeUpdaterInner { bridge_address_updater: bridge_addresses_handle.clone(), main_node_client: main_node_client.0, update_interval: self.optional_config.bridge_addresses_refresh_interval, - }); + }) + } else { + BridgeAddressesUpdaterTask::L1Updater(L1UpdaterInner { + bridge_address_updater: bridge_addresses_handle.clone(), + l1_eth_client: input.l1_eth_client.0, + bridgehub_addr: self + .internal_api_config + .l1_bridgehub_proxy_addr + .context("Lacking l1 bridgehub proxy address")?, + update_interval: self.optional_config.bridge_addresses_refresh_interval, + bridgehub_abi: bridgehub_contract(), + l1_asset_router_abi: l1_asset_router_contract(), + }) + }; // Build server. let mut api_builder = @@ -233,6 +252,9 @@ impl WiringLayer for Web3ServerLayer { if let Some(sync_state) = sync_state { api_builder = api_builder.with_sync_state(sync_state); } + if let Some(main_node_client) = input.main_node_client { + api_builder = api_builder.with_l2_l1_log_proof_handler(main_node_client.0) + } let replication_lag_limit = self.optional_config.replication_lag_limit; api_builder = self.optional_config.apply(api_builder); diff --git a/core/node/node_storage_init/README.md b/core/node/node_storage_init/README.md index e1b6768878ec..39f7d6fc1205 100644 --- a/core/node/node_storage_init/README.md +++ b/core/node/node_storage_init/README.md @@ -1,5 +1,5 @@ # `zksync_node_storage_init` -A set of actions to ensure that any ZKsync node has initialized storage and can start running. +A set of actions to ensure that any Node has initialized storage and can start running. This includes genesis, but not limited to it, and may involve other steps. diff --git a/core/node/node_storage_init/src/external_node/revert.rs b/core/node/node_storage_init/src/external_node/revert.rs index 86d137c6b660..db06a4492bb7 100644 --- a/core/node/node_storage_init/src/external_node/revert.rs +++ b/core/node/node_storage_init/src/external_node/revert.rs @@ -34,6 +34,12 @@ impl RevertStorage for ExternalNodeReverter { Ok(()) } + async fn is_reorg_needed(&self, stop_receiver: watch::Receiver) -> anyhow::Result { + ReorgDetector::new(self.client.clone(), self.pool.clone()) + .check_reorg_presence(stop_receiver) + .await + } + async fn last_correct_batch_for_reorg( &self, stop_receiver: watch::Receiver, diff --git a/core/node/node_storage_init/src/lib.rs b/core/node/node_storage_init/src/lib.rs index 10b0131908ca..a8b72b769a18 100644 --- a/core/node/node_storage_init/src/lib.rs +++ b/core/node/node_storage_init/src/lib.rs @@ -182,10 +182,7 @@ impl NodeStorageInitializer { ) -> anyhow::Result { // May be `true` if stop signal is received, but the node will shut down without launching any tasks anyway. let initialized = if let Some(reverter) = &self.strategy.block_reverter { - reverter - .last_correct_batch_for_reorg(stop_receiver) - .await? - .is_none() + !reverter.is_reorg_needed(stop_receiver).await? } else { true }; diff --git a/core/node/node_storage_init/src/main_node/genesis.rs b/core/node/node_storage_init/src/main_node/genesis.rs index a5d6c0e628ac..cef25e87ba7c 100644 --- a/core/node/node_storage_init/src/main_node/genesis.rs +++ b/core/node/node_storage_init/src/main_node/genesis.rs @@ -55,16 +55,13 @@ impl InitializeStorage for MainNodeGenesis { ) .await?; - if let Some(ecosystem_contracts) = &self.contracts.ecosystem_contracts { - zksync_node_genesis::save_set_chain_id_tx( - &mut storage, - &self.l1_client, - self.contracts.diamond_proxy_addr, - ecosystem_contracts.state_transition_proxy_addr, - ) - .await - .context("Failed to save SetChainId upgrade transaction")?; - } + zksync_node_genesis::save_set_chain_id_tx( + &mut storage, + &self.l1_client, + self.contracts.diamond_proxy_addr, + ) + .await + .context("Failed to save SetChainId upgrade transaction")?; Ok(()) } diff --git a/core/node/node_storage_init/src/traits.rs b/core/node/node_storage_init/src/traits.rs index 3b6467764d97..d28b0226d845 100644 --- a/core/node/node_storage_init/src/traits.rs +++ b/core/node/node_storage_init/src/traits.rs @@ -18,6 +18,9 @@ pub trait InitializeStorage: fmt::Debug + Send + Sync + 'static { /// This trait assumes that for any invalid state there exists a batch number to which the storage can be rolled back. #[async_trait::async_trait] pub trait RevertStorage: fmt::Debug + Send + Sync + 'static { + /// Checks whether a reorg is needed for the storage. + async fn is_reorg_needed(&self, stop_receiver: watch::Receiver) -> anyhow::Result; + /// Checks if the storage is invalid state and has to be rolled back. async fn last_correct_batch_for_reorg( &self, diff --git a/core/node/node_sync/src/external_io.rs b/core/node/node_sync/src/external_io.rs index d3d908cfc169..eb79965fa28b 100644 --- a/core/node/node_sync/src/external_io.rs +++ b/core/node/node_sync/src/external_io.rs @@ -113,7 +113,7 @@ impl ExternalIO { .connection_tagged("sync_layer") .await? .protocol_versions_dal() - .get_base_system_contract_hashes_by_version_id(protocol_version as u16) + .get_base_system_contract_hashes_by_version_id(protocol_version) .await?; if base_system_contract_hashes.is_some() { return Ok(()); @@ -410,7 +410,7 @@ impl StateKeeperIO for ExternalIO { .connection_tagged("sync_layer") .await? .protocol_versions_dal() - .get_base_system_contract_hashes_by_version_id(protocol_version as u16) + .get_base_system_contract_hashes_by_version_id(protocol_version) .await? .with_context(|| { format!("Cannot load base system contracts' hashes for {protocol_version:?}. They should already be present") diff --git a/core/node/node_sync/src/tree_data_fetcher/provider/mod.rs b/core/node/node_sync/src/tree_data_fetcher/provider/mod.rs index 432808422632..c627006f70e7 100644 --- a/core/node/node_sync/src/tree_data_fetcher/provider/mod.rs +++ b/core/node/node_sync/src/tree_data_fetcher/provider/mod.rs @@ -144,17 +144,11 @@ impl L1DataProvider { diamond_proxy_addr: l1_diamond_proxy_addr, }; let gateway_chain_data = if let Some(client) = gateway_client { - let contract = bridgehub_contract(); - let function_name = if contract.function("getZKChain").is_ok() { - "getZKChain" - } else { - "getHyperchain" - }; let gateway_diamond_proxy = CallFunctionArgs::new( - function_name, + "getZKChain", zksync_types::ethabi::Token::Uint(l2_chain_id.as_u64().into()), ) - .for_contract(L2_BRIDGEHUB_ADDRESS, &contract) + .for_contract(L2_BRIDGEHUB_ADDRESS, &bridgehub_contract()) .call(&client) .await?; let chain_id = client.fetch_chain_id().await?; diff --git a/core/node/node_sync/src/tree_data_fetcher/provider/tests.rs b/core/node/node_sync/src/tree_data_fetcher/provider/tests.rs index e8c855359390..14ab34bab10d 100644 --- a/core/node/node_sync/src/tree_data_fetcher/provider/tests.rs +++ b/core/node/node_sync/src/tree_data_fetcher/provider/tests.rs @@ -256,13 +256,8 @@ fn mock_l1_client(block_number: U64, logs: Vec, chain_id: SLChainId) .method("eth_chainId", move || Ok(U64::from(chain_id.0))) .method("eth_call", move |req: CallRequest, _block_id: BlockId| { let contract = bridgehub_contract(); - let function_name = if contract.function("getZKChain").is_ok() { - "getZKChain" - } else { - "getHyperchain" - }; let expected_input = contract - .function(function_name) + .function("getZKChain") .unwrap() .encode_input(&[ethabi::Token::Uint(ERA_CHAIN_ID.into())]) .unwrap(); diff --git a/core/node/reorg_detector/src/lib.rs b/core/node/reorg_detector/src/lib.rs index d1954ca4b74b..ec5b505d7803 100644 --- a/core/node/reorg_detector/src/lib.rs +++ b/core/node/reorg_detector/src/lib.rs @@ -266,26 +266,32 @@ impl ReorgDetector { &self.health_check } - async fn check_consistency(&mut self) -> Result<(), Error> { + async fn find_last_diverged_batch(&mut self) -> Result, HashMatchError> { let mut storage = self.pool.connection().await?; - let Some(local_l1_batch) = storage + // Create a readonly transaction to get a consistent view of the storage. + let mut storage_tx = storage + .transaction_builder()? + .set_readonly() + .build() + .await?; + let Some(local_l1_batch) = storage_tx .blocks_dal() .get_last_l1_batch_number_with_tree_data() .await? else { - return Ok(()); + return Ok(None); }; - let Some(local_l2_block) = storage.blocks_dal().get_sealed_l2_block_number().await? else { - return Ok(()); + let Some(local_l2_block) = storage_tx.blocks_dal().get_sealed_l2_block_number().await? + else { + return Ok(None); }; + drop(storage_tx); drop(storage); let remote_l1_batch = self.client.sealed_l1_batch_number().await?; let remote_l2_block = self.client.sealed_l2_block_number().await?; - let checked_l1_batch = local_l1_batch.min(remote_l1_batch); let checked_l2_block = local_l2_block.min(remote_l2_block); - let root_hashes_match = self.root_hashes_match(checked_l1_batch).await?; let l2_block_hashes_match = self.l2_block_hashes_match(checked_l2_block).await?; @@ -295,13 +301,21 @@ impl ReorgDetector { // In other cases either there is only a height mismatch which means that one of // the nodes needs to do catching up; however, it is not certain that there is actually // a re-org taking place. - if root_hashes_match && l2_block_hashes_match { + Ok(if root_hashes_match && l2_block_hashes_match { self.event_handler .update_correct_block(checked_l2_block, checked_l1_batch); + None + } else { + let diverged_l1_batch = checked_l1_batch + (root_hashes_match as u32); + self.event_handler.report_divergence(diverged_l1_batch); + Some(diverged_l1_batch) + }) + } + + async fn check_consistency(&mut self) -> Result<(), Error> { + let Some(diverged_l1_batch) = self.find_last_diverged_batch().await? else { return Ok(()); - } - let diverged_l1_batch = checked_l1_batch + (root_hashes_match as u32); - self.event_handler.report_divergence(diverged_l1_batch); + }; // Check that the first L1 batch matches, to make sure that // we are actually tracking the same chain as the main node. @@ -455,15 +469,7 @@ impl ReorgDetector { ) -> Result<(), Error> { while !*stop_receiver.borrow_and_update() { let sleep_interval = match self.check_consistency().await { - Err(Error::HashMatch(HashMatchError::MissingData(MissingData::RootHash))) => { - tracing::debug!("Last L1 batch on the main node doesn't have a state root hash; waiting until it is computed"); - self.sleep_interval / 10 - } - Err(err) if err.is_retriable() => { - tracing::warn!("Following transient error occurred: {err}"); - tracing::info!("Trying again after a delay"); - self.sleep_interval - } + Err(Error::HashMatch(err)) => self.handle_hash_err(err)?, Err(err) => return Err(err), Ok(()) if stop_after_success => return Ok(()), Ok(()) => self.sleep_interval, @@ -480,6 +486,46 @@ impl ReorgDetector { } Ok(()) } + + /// Returns the sleep interval if the error is transient. + fn handle_hash_err(&self, err: HashMatchError) -> Result { + match err { + HashMatchError::MissingData(MissingData::RootHash) => { + tracing::debug!("Last L1 batch on the main node doesn't have a state root hash; waiting until it is computed"); + Ok(self.sleep_interval / 10) + } + err if err.is_retriable() => { + tracing::warn!("Following transient error occurred: {err}"); + tracing::info!("Trying again after a delay"); + Ok(self.sleep_interval) + } + err => Err(err), + } + } + + /// Checks whether a reorg is present. Unlike [`Self::run_once()`], this method doesn't pinpoint the first diverged L1 batch; + /// it just checks whether diverged batches / blocks exist in general. + /// + /// Internally retries transient errors. Returns `Ok(false)` if a stop signal is received. + pub async fn check_reorg_presence( + &mut self, + mut stop_receiver: watch::Receiver, + ) -> anyhow::Result { + while !*stop_receiver.borrow_and_update() { + let sleep_interval = match self.find_last_diverged_batch().await { + Err(err) => self.handle_hash_err(err)?, + Ok(maybe_diverged_batch) => return Ok(maybe_diverged_batch.is_some()), + }; + + if tokio::time::timeout(sleep_interval, stop_receiver.changed()) + .await + .is_ok() + { + break; + } + } + Ok(false) + } } /// Fallible and async predicate for binary search. diff --git a/core/node/reorg_detector/src/tests.rs b/core/node/reorg_detector/src/tests.rs index 5465cf8662d6..64e9c224d224 100644 --- a/core/node/reorg_detector/src/tests.rs +++ b/core/node/reorg_detector/src/tests.rs @@ -312,12 +312,19 @@ async fn reorg_is_detected_on_batch_hash_mismatch() { store_l2_block(&mut storage, 2, l2_block_hash).await; detector.check_consistency().await.unwrap(); + let (_stop_sender, stop_receiver) = watch::channel(false); + assert!(!detector + .check_reorg_presence(stop_receiver.clone()) + .await + .unwrap()); + seal_l1_batch(&mut storage, 2, H256::repeat_byte(0xff)).await; // ^ Hash of L1 batch #2 differs from that on the main node. assert_matches!( detector.check_consistency().await, Err(Error::ReorgDetected(L1BatchNumber(1))) ); + assert!(detector.check_reorg_presence(stop_receiver).await.unwrap()); } #[tokio::test] @@ -621,6 +628,9 @@ async fn reorg_is_detected_based_on_l2_block_hashes(last_correct_l1_batch: u32) detector.check_consistency().await, Err(Error::ReorgDetected(L1BatchNumber(num))) if num == last_correct_l1_batch ); + + let (_stop_sender, stop_receiver) = watch::channel(false); + assert!(detector.check_reorg_presence(stop_receiver).await.unwrap()); } #[derive(Debug)] diff --git a/core/node/state_keeper/src/executor/tests/mod.rs b/core/node/state_keeper/src/executor/tests/mod.rs index eade0233d0e0..8d735e9ed920 100644 --- a/core/node/state_keeper/src/executor/tests/mod.rs +++ b/core/node/state_keeper/src/executor/tests/mod.rs @@ -1,16 +1,20 @@ -// FIXME: move storage-agnostic tests to VM executor crate - use assert_matches::assert_matches; use rand::{thread_rng, Rng}; use test_casing::{test_casing, Product}; +use zksync_contracts::l2_message_root; use zksync_dal::{ConnectionPool, Core}; -use zksync_multivm::interface::{BatchTransactionExecutionResult, ExecutionResult, Halt}; +use zksync_multivm::interface::{ + BatchTransactionExecutionResult, Call, CallType, ExecutionResult, Halt, +}; use zksync_test_contracts::{Account, TestContract}; use zksync_types::{ - get_nonce_key, utils::storage_key_for_eth_balance, vm::FastVmMode, web3, PriorityOpId, H256, + get_nonce_key, + utils::{deployed_address_create, storage_key_for_eth_balance}, + vm::FastVmMode, + web3, Execute, PriorityOpId, H256, L2_MESSAGE_ROOT_ADDRESS, U256, }; -use self::tester::{AccountExt, StorageSnapshot, TestConfig, Tester}; +use self::tester::{AccountExt, StorageSnapshot, TestConfig, Tester, TRANSFER_VALUE}; mod read_storage_factory; mod tester; @@ -63,7 +67,30 @@ async fn execute_l2_tx(storage_type: StorageType, vm_mode: FastVmMode) { let mut tester = Tester::new(connection_pool, vm_mode); tester.genesis().await; tester.fund(&[alice.address()]).await; - let mut executor = tester.create_batch_executor(storage_type).await; + + let l2_message_root = l2_message_root(); + let encoded_data = l2_message_root + .function("initialize") + .unwrap() + .encode_input(&[]) + .unwrap(); + + let message_root_init_txn = alice.get_l2_tx_for_execute( + Execute { + contract_address: Some(L2_MESSAGE_ROOT_ADDRESS), + calldata: encoded_data, + value: U256::zero(), + factory_deps: vec![], + }, + None, + ); + + let mut executor = tester + .create_batch_executor_with_init_transactions( + storage_type, + &[message_root_init_txn.clone()], + ) + .await; let res = executor.execute_tx(alice.execute()).await.unwrap(); assert_executed(&res); @@ -106,7 +133,25 @@ async fn execute_l2_tx_after_snapshot_recovery( let mut alice = Account::random(); let connection_pool = ConnectionPool::::constrained_test_pool(1).await; - let mut storage_snapshot = StorageSnapshot::new(&connection_pool, &mut alice, 10).await; + let l2_message_root = l2_message_root(); + let encoded_data = l2_message_root + .function("initialize") + .unwrap() + .encode_input(&[]) + .unwrap(); + + let message_root_init_txn = alice.get_l2_tx_for_execute( + Execute { + contract_address: Some(L2_MESSAGE_ROOT_ADDRESS), + calldata: encoded_data, + value: U256::zero(), + factory_deps: vec![], + }, + None, + ); + + let mut storage_snapshot = + StorageSnapshot::new(&connection_pool, &mut alice, 10, &[message_root_init_txn]).await; assert!(storage_snapshot.storage_logs.len() > 10); // sanity check assert!(!storage_snapshot.factory_deps.is_empty()); if let Some(mutation) = mutation { @@ -138,8 +183,29 @@ async fn execute_l1_tx(vm_mode: FastVmMode) { tester.genesis().await; tester.fund(&[alice.address()]).await; + + let l2_message_root = l2_message_root(); + let encoded_data = l2_message_root + .function("initialize") + .unwrap() + .encode_input(&[]) + .unwrap(); + + let message_root_init_txn = alice.get_l2_tx_for_execute( + Execute { + contract_address: Some(L2_MESSAGE_ROOT_ADDRESS), + calldata: encoded_data, + value: U256::zero(), + factory_deps: vec![], + }, + None, + ); + let mut executor = tester - .create_batch_executor(StorageType::AsyncRocksdbCache) + .create_batch_executor_with_init_transactions( + StorageType::AsyncRocksdbCache, + &[message_root_init_txn.clone()], + ) .await; let res = executor @@ -160,8 +226,29 @@ async fn execute_l2_and_l1_txs(vm_mode: FastVmMode) { let mut tester = Tester::new(connection_pool, vm_mode); tester.genesis().await; tester.fund(&[alice.address()]).await; + + let l2_message_root = l2_message_root(); + let encoded_data = l2_message_root + .function("initialize") + .unwrap() + .encode_input(&[]) + .unwrap(); + + let message_root_init_txn = alice.get_l2_tx_for_execute( + Execute { + contract_address: Some(L2_MESSAGE_ROOT_ADDRESS), + calldata: encoded_data, + value: U256::zero(), + factory_deps: vec![], + }, + None, + ); + let mut executor = tester - .create_batch_executor(StorageType::AsyncRocksdbCache) + .create_batch_executor_with_init_transactions( + StorageType::AsyncRocksdbCache, + &[message_root_init_txn.clone()], + ) .await; let res = executor.execute_tx(alice.execute()).await.unwrap(); @@ -243,8 +330,29 @@ async fn rollback(vm_mode: FastVmMode) { tester.genesis().await; tester.fund(&[alice.address()]).await; + + let l2_message_root = l2_message_root(); + let encoded_data = l2_message_root + .function("initialize") + .unwrap() + .encode_input(&[]) + .unwrap(); + + let message_root_init_txn = alice.get_l2_tx_for_execute( + Execute { + contract_address: Some(L2_MESSAGE_ROOT_ADDRESS), + calldata: encoded_data, + value: U256::zero(), + factory_deps: vec![], + }, + None, + ); + let mut executor = tester - .create_batch_executor(StorageType::AsyncRocksdbCache) + .create_batch_executor_with_init_transactions( + StorageType::AsyncRocksdbCache, + &[message_root_init_txn.clone()], + ) .await; let tx = alice.execute(); @@ -297,8 +405,29 @@ async fn too_big_gas_limit(vm_mode: FastVmMode) { let mut tester = Tester::new(connection_pool, vm_mode); tester.genesis().await; tester.fund(&[alice.address()]).await; + + let l2_message_root = l2_message_root(); + let encoded_data = l2_message_root + .function("initialize") + .unwrap() + .encode_input(&[]) + .unwrap(); + + let message_root_init_txn = alice.get_l2_tx_for_execute( + Execute { + contract_address: Some(L2_MESSAGE_ROOT_ADDRESS), + calldata: encoded_data, + value: U256::zero(), + factory_deps: vec![], + }, + None, + ); + let mut executor = tester - .create_batch_executor(StorageType::AsyncRocksdbCache) + .create_batch_executor_with_init_transactions( + StorageType::AsyncRocksdbCache, + &[message_root_init_txn.clone()], + ) .await; let big_gas_limit_tx = alice.execute_with_gas_limit(u32::MAX); @@ -341,8 +470,29 @@ async fn deploy_and_call_loadtest(vm_mode: FastVmMode) { let mut tester = Tester::new(connection_pool, vm_mode); tester.genesis().await; tester.fund(&[alice.address()]).await; + + let l2_message_root = l2_message_root(); + let encoded_data = l2_message_root + .function("initialize") + .unwrap() + .encode_input(&[]) + .unwrap(); + + let message_root_init_txn = alice.get_l2_tx_for_execute( + Execute { + contract_address: Some(L2_MESSAGE_ROOT_ADDRESS), + calldata: encoded_data, + value: U256::zero(), + factory_deps: vec![], + }, + None, + ); + let mut executor = tester - .create_batch_executor(StorageType::AsyncRocksdbCache) + .create_batch_executor_with_init_transactions( + StorageType::AsyncRocksdbCache, + &[message_root_init_txn.clone()], + ) .await; let tx = alice.deploy_loadnext_tx(); @@ -389,13 +539,35 @@ async fn deploy_failedcall(vm_mode: FastVmMode) { async fn execute_reverted_tx(vm_mode: FastVmMode) { let connection_pool = ConnectionPool::::constrained_test_pool(1).await; let mut alice = Account::random(); + let mut bob = Account::random(); let mut tester = Tester::new(connection_pool, vm_mode); tester.genesis().await; - tester.fund(&[alice.address()]).await; + tester.fund(&[alice.address(), bob.address()]).await; + + let l2_message_root = l2_message_root(); + let encoded_data = l2_message_root + .function("initialize") + .unwrap() + .encode_input(&[]) + .unwrap(); + + let message_root_init_txn = bob.get_l2_tx_for_execute( + Execute { + contract_address: Some(L2_MESSAGE_ROOT_ADDRESS), + calldata: encoded_data, + value: U256::zero(), + factory_deps: vec![], + }, + None, + ); + let mut executor = tester - .create_batch_executor(StorageType::AsyncRocksdbCache) + .create_batch_executor_with_init_transactions( + StorageType::AsyncRocksdbCache, + &[message_root_init_txn.clone()], + ) .await; let tx = alice.deploy_loadnext_tx(); @@ -427,8 +599,29 @@ async fn execute_realistic_scenario(vm_mode: FastVmMode) { tester.genesis().await; tester.fund(&[alice.address()]).await; tester.fund(&[bob.address()]).await; + + let l2_message_root = l2_message_root(); + let encoded_data = l2_message_root + .function("initialize") + .unwrap() + .encode_input(&[]) + .unwrap(); + + let message_root_init_txn = alice.get_l2_tx_for_execute( + Execute { + contract_address: Some(L2_MESSAGE_ROOT_ADDRESS), + calldata: encoded_data, + value: U256::zero(), + factory_deps: vec![], + }, + None, + ); + let mut executor = tester - .create_batch_executor(StorageType::AsyncRocksdbCache) + .create_batch_executor_with_init_transactions( + StorageType::AsyncRocksdbCache, + &[message_root_init_txn.clone()], + ) .await; // A good tx should be executed successfully. @@ -567,8 +760,30 @@ async fn catchup_rocksdb_cache() { tester.genesis().await; tester.fund(&[alice.address(), bob.address()]).await; + let l2_message_root = l2_message_root(); + let encoded_data = l2_message_root + .function("initialize") + .unwrap() + .encode_input(&[]) + .unwrap(); + + let message_root_init_txn = alice.get_l2_tx_for_execute( + Execute { + contract_address: Some(L2_MESSAGE_ROOT_ADDRESS), + calldata: encoded_data, + value: U256::zero(), + factory_deps: vec![], + }, + None, + ); + // Execute a bunch of transactions to populate Postgres-based storage (note that RocksDB stays empty) - let mut executor = tester.create_batch_executor(StorageType::Postgres).await; + let mut executor = tester + .create_batch_executor_with_init_transactions( + StorageType::Postgres, + &[message_root_init_txn.clone()], + ) + .await; for _ in 0..10 { let res = executor.execute_tx(alice.execute()).await.unwrap(); assert_executed(&res); @@ -582,7 +797,10 @@ async fn catchup_rocksdb_cache() { // Async RocksDB cache should be aware of the tx and should reject it let mut executor = tester - .create_batch_executor(StorageType::AsyncRocksdbCache) + .create_batch_executor_with_init_transactions( + StorageType::AsyncRocksdbCache, + &[message_root_init_txn.clone()], + ) .await; let res = executor.execute_tx(tx.clone()).await.unwrap(); assert_rejected(&res); @@ -595,7 +813,12 @@ async fn catchup_rocksdb_cache() { tester.wait_for_tasks().await; // Sync RocksDB storage should be aware of the tx and should reject it - let mut executor = tester.create_batch_executor(StorageType::Rocksdb).await; + let mut executor = tester + .create_batch_executor_with_init_transactions( + StorageType::Rocksdb, + &[message_root_init_txn.clone()], + ) + .await; let res = executor.execute_tx(tx).await.unwrap(); assert_rejected(&res); } @@ -633,9 +856,9 @@ async fn execute_tx_with_large_packable_bytecode(vm_mode: FastVmMode) { executor.finish_batch().await.unwrap(); } -#[test_casing(2, [FastVmMode::Old, FastVmMode::Shadow])] // new VM doesn't support call tracing yet +#[test_casing(3, FAST_VM_MODES)] #[tokio::test] -async fn execute_tx_with_call_traces(vm_mode: FastVmMode) { +async fn execute_txs_with_call_traces(vm_mode: FastVmMode) { let connection_pool = ConnectionPool::::constrained_test_pool(1).await; let mut alice = Account::random(); let mut tester = Tester::with_config( @@ -655,4 +878,35 @@ async fn execute_tx_with_call_traces(vm_mode: FastVmMode) { assert_matches!(res.tx_result.result, ExecutionResult::Success { .. }); assert!(!res.call_traces.is_empty()); + + find_first_call(&res.call_traces, &|call| { + call.from == alice.address && call.value == TRANSFER_VALUE.into() + }) + .expect("no transfer call"); + + let deploy_tx = alice.deploy_loadnext_tx().tx; + let res = executor.execute_tx(deploy_tx).await.unwrap(); + assert_matches!(res.tx_result.result, ExecutionResult::Success { .. }); + assert!(!res.call_traces.is_empty()); + + let create_call = find_first_call(&res.call_traces, &|call| { + call.from == alice.address && call.r#type == CallType::Create + }) + .expect("no create call"); + + let expected_address = deployed_address_create(alice.address, 0.into()); + assert_eq!(create_call.to, expected_address); + assert!(!create_call.input.is_empty()); +} + +fn find_first_call<'a>(calls: &'a [Call], predicate: &impl Fn(&Call) -> bool) -> Option<&'a Call> { + for call in calls { + if predicate(call) { + return Some(call); + } + if let Some(call) = find_first_call(&call.calls, predicate) { + return Some(call); + } + } + None } diff --git a/core/node/state_keeper/src/executor/tests/tester.rs b/core/node/state_keeper/src/executor/tests/tester.rs index 3727d9c16bfb..6c5015fbca46 100644 --- a/core/node/state_keeper/src/executor/tests/tester.rs +++ b/core/node/state_keeper/src/executor/tests/tester.rs @@ -3,14 +3,16 @@ use std::{collections::HashMap, fmt::Debug, sync::Arc}; +use assert_matches::assert_matches; use tempfile::TempDir; use tokio::{sync::watch, task::JoinHandle}; use zksync_config::configs::chain::StateKeeperConfig; -use zksync_dal::{ConnectionPool, Core, CoreDal}; +use zksync_contracts::l2_rollup_da_validator_bytecode; +use zksync_dal::{Connection, ConnectionPool, Core, CoreDal}; use zksync_multivm::{ interface::{ executor::{BatchExecutor, BatchExecutorFactory}, - L1BatchEnv, L2BlockEnv, SystemEnv, + ExecutionResult, L1BatchEnv, L2BlockEnv, SystemEnv, }, utils::StorageWritesDeduplicator, vm_latest::constants::INITIAL_STORAGE_WRITE_PUBDATA_BYTES, @@ -23,8 +25,10 @@ use zksync_test_contracts::{ }; use zksync_types::{ block::L2BlockHasher, + bytecode::BytecodeHash, commitment::PubdataParams, ethabi::Token, + get_code_key, get_known_code_key, protocol_version::ProtocolSemanticVersion, snapshots::{SnapshotRecoveryStatus, SnapshotStorageLog}, system_contracts::get_system_smart_contracts, @@ -38,12 +42,13 @@ use zksync_vm_executor::batch::{MainBatchExecutorFactory, TraceCalls}; use super::{read_storage_factory::RocksdbStorageFactory, StorageType}; use crate::{ - testonly, - testonly::BASE_SYSTEM_CONTRACTS, + testonly::{self, apply_genesis_logs, BASE_SYSTEM_CONTRACTS}, tests::{default_l1_batch_env, default_system_env}, AsyncRocksdbCache, }; +pub(super) const TRANSFER_VALUE: u64 = 123_456_789; + /// Representation of configuration parameters used by the state keeper. /// Has sensible defaults for most tests, each of which can be overridden. #[derive(Debug)] @@ -97,6 +102,22 @@ impl Tester { self.config = config; } + /// Extension of `create_batch_executor` that allows us to run some initial transactions to bootstrap the state. + pub(super) async fn create_batch_executor_with_init_transactions( + &mut self, + storage_type: StorageType, + transactions: &[Transaction], + ) -> Box> { + let mut executor = self.create_batch_executor(storage_type).await; + + for txn in transactions { + let res = executor.execute_tx(txn.clone()).await.unwrap(); + assert_matches!(res.tx_result.result, ExecutionResult::Success { .. }); + } + + executor + } + /// Creates a batch executor instance with the specified storage type. /// This function intentionally uses sensible defaults to not introduce boilerplate. pub(super) async fn create_batch_executor( @@ -270,6 +291,9 @@ impl Tester { ) .await .unwrap(); + + // Also setting up the DA for tests + Self::setup_da(&mut storage).await; } } @@ -308,6 +332,33 @@ impl Tester { } } + async fn setup_contract(conn: &mut Connection<'_, Core>, address: Address, code: Vec) { + let hash: H256 = BytecodeHash::for_bytecode(&code).value(); + let known_code_key = get_known_code_key(&hash); + let code_key = get_code_key(&address); + + let logs = [ + StorageLog::new_write_log(known_code_key, H256::from_low_u64_be(1)), + StorageLog::new_write_log(code_key, hash), + ]; + apply_genesis_logs(conn, &logs).await; + + let factory_deps = HashMap::from([(hash, code)]); + conn.factory_deps_dal() + .insert_factory_deps(L2BlockNumber(0), &factory_deps) + .await + .unwrap(); + } + + async fn setup_da(conn: &mut Connection<'_, Core>) { + Self::setup_contract( + conn, + Address::repeat_byte(0x23), + l2_rollup_da_validator_bytecode(), + ) + .await; + } + pub(super) async fn wait_for_tasks(&mut self) { for task in self.tasks.drain(..) { task.await.expect("Failed to join a task"); @@ -380,6 +431,7 @@ impl AccountExt for Account { TxType::L2, ) } + fn l1_execute(&mut self, serial_id: PriorityOpId) -> Transaction { self.get_l1_tx(Execute::transfer(Address::random(), 0.into()), serial_id.0) } @@ -443,7 +495,7 @@ impl AccountExt for Account { /// Automatically increments nonce of the account. fn execute_with_gas_limit(&mut self, gas_limit: u32) -> Transaction { self.get_l2_tx_for_execute( - Execute::transfer(Address::random(), 0.into()), + Execute::transfer(Address::random(), TRANSFER_VALUE.into()), Some(testonly::fee(gas_limit)), ) } @@ -557,6 +609,7 @@ impl StorageSnapshot { connection_pool: &ConnectionPool, alice: &mut Account, transaction_count: u32, + transactions: &[Transaction], ) -> Self { let mut tester = Tester::new(connection_pool.clone(), FastVmMode::Old); tester.genesis().await; @@ -594,6 +647,30 @@ impl StorageSnapshot { }; let mut storage_writes_deduplicator = StorageWritesDeduplicator::new(); + for transaction in transactions { + let tx_hash = transaction.hash(); // probably incorrect + let res = executor.execute_tx(transaction.clone()).await.unwrap(); + if !res.tx_result.result.is_failed() { + let storage_logs = &res.tx_result.logs.storage_logs; + storage_writes_deduplicator + .apply(storage_logs.iter().filter(|log| log.log.is_write())); + } else { + panic!("Unexpected tx execution result: {res:?}"); + }; + + let mut hasher = L2BlockHasher::new( + L2BlockNumber(l2_block_env.number), + l2_block_env.timestamp, + l2_block_env.prev_block_hash, + ); + hasher.push_tx_hash(tx_hash); + + l2_block_env.number += 1; + l2_block_env.timestamp += 1; + l2_block_env.prev_block_hash = hasher.finalize(ProtocolVersionId::latest()); + executor.start_next_l2_block(l2_block_env).await.unwrap(); + } + for _ in 0..transaction_count { let tx = alice.execute(); let tx_hash = tx.hash(); // probably incorrect diff --git a/core/node/state_keeper/src/io/mempool.rs b/core/node/state_keeper/src/io/mempool.rs index cf354891236b..f553fcb57a08 100644 --- a/core/node/state_keeper/src/io/mempool.rs +++ b/core/node/state_keeper/src/io/mempool.rs @@ -15,12 +15,12 @@ use zksync_multivm::{interface::Halt, utils::derive_base_fee_and_gas_per_pubdata use zksync_node_fee_model::BatchFeeModelInputProvider; use zksync_types::{ block::UnsealedL1BatchHeader, - commitment::{L1BatchCommitmentMode, PubdataParams}, + commitment::{PubdataParams, PubdataType}, protocol_upgrade::ProtocolUpgradeTx, utils::display_timestamp, Address, L1BatchNumber, L2BlockNumber, L2ChainId, ProtocolVersionId, Transaction, H256, U256, }; -use zksync_vm_executor::storage::L1BatchParamsProvider; +use zksync_vm_executor::storage::{get_base_system_contracts_by_version_id, L1BatchParamsProvider}; use crate::{ io::{ @@ -58,7 +58,7 @@ pub struct MempoolIO { batch_fee_input_provider: Arc, chain_id: L2ChainId, l2_da_validator_address: Option
, - pubdata_type: L1BatchCommitmentMode, + pubdata_type: PubdataType, } impl IoSealCriteria for MempoolIO { @@ -382,18 +382,15 @@ impl StateKeeperIO for MempoolIO { protocol_version: ProtocolVersionId, _cursor: &IoCursor, ) -> anyhow::Result { - self.pool - .connection_tagged("state_keeper") - .await? - .protocol_versions_dal() - .load_base_system_contracts_by_version_id(protocol_version as u16) - .await - .context("failed loading base system contracts")? - .with_context(|| { - format!( - "no base system contracts persisted for protocol version {protocol_version:?}" - ) - }) + get_base_system_contracts_by_version_id( + &mut self.pool.connection_tagged("state_keeper").await?, + protocol_version, + ) + .await + .context("failed loading base system contracts")? + .with_context(|| { + format!("no base system contracts persisted for protocol version {protocol_version:?}") + }) } async fn load_batch_version_id( @@ -497,7 +494,7 @@ impl MempoolIO { delay_interval: Duration, chain_id: L2ChainId, l2_da_validator_address: Option
, - pubdata_type: L1BatchCommitmentMode, + pubdata_type: PubdataType, ) -> anyhow::Result { Ok(Self { mempool, diff --git a/core/node/state_keeper/src/io/seal_logic/mod.rs b/core/node/state_keeper/src/io/seal_logic/mod.rs index 655bf182ba8f..475be3319efa 100644 --- a/core/node/state_keeper/src/io/seal_logic/mod.rs +++ b/core/node/state_keeper/src/io/seal_logic/mod.rs @@ -133,6 +133,7 @@ impl UpdatesManager { system_logs: finished_batch.final_execution_state.system_logs.clone(), pubdata_input: finished_batch.pubdata_input.clone(), fee_address: self.fee_account_address, + batch_fee_input: self.batch_fee_input, }; let final_bootloader_memory = finished_batch diff --git a/core/node/state_keeper/src/keeper.rs b/core/node/state_keeper/src/keeper.rs index c892fd8534ec..401150a3fccd 100644 --- a/core/node/state_keeper/src/keeper.rs +++ b/core/node/state_keeper/src/keeper.rs @@ -259,7 +259,7 @@ impl ZkSyncStateKeeper { } /// This function is meant to be called only once during the state-keeper initialization. - /// It will check if we should load a protocol upgrade or a `setChainId` transaction, + /// It will check if we should load a protocol upgrade or a `GenesisUpgrade` transaction, /// perform some checks and return it. pub(super) async fn load_protocol_upgrade_tx( &mut self, @@ -268,9 +268,9 @@ impl ZkSyncStateKeeper { l1_batch_number: L1BatchNumber, ) -> Result, Error> { // After the Shared Bridge is integrated, - // there has to be a setChainId upgrade transaction after the chain genesis. + // there has to be a GenesisUpgrade upgrade transaction after the chain genesis. // It has to be the first transaction of the first batch. - // The setChainId upgrade does not bump the protocol version, but attaches an upgrade + // The GenesisUpgrade upgrade does not bump the protocol version, but attaches an upgrade // transaction to the genesis protocol version. let first_batch_in_shared_bridge = l1_batch_number == L1BatchNumber(1) && !protocol_version.is_pre_shared_bridge(); diff --git a/core/node/state_keeper/src/mempool_actor.rs b/core/node/state_keeper/src/mempool_actor.rs index fea1fcf89291..322f159bf53d 100644 --- a/core/node/state_keeper/src/mempool_actor.rs +++ b/core/node/state_keeper/src/mempool_actor.rs @@ -83,6 +83,14 @@ impl MempoolFetcher { let latency = KEEPER_METRICS.mempool_sync.start(); let mut storage = self.pool.connection_tagged("state_keeper").await?; let mempool_info = self.mempool.get_mempool_info(); + + KEEPER_METRICS + .mempool_stashed_accounts + .set(mempool_info.stashed_accounts.len()); + KEEPER_METRICS + .mempool_purged_accounts + .set(mempool_info.purged_accounts.len()); + let protocol_version = storage .blocks_dal() .pending_protocol_version() diff --git a/core/node/state_keeper/src/metrics.rs b/core/node/state_keeper/src/metrics.rs index 7da5babd2199..14d578f683f4 100644 --- a/core/node/state_keeper/src/metrics.rs +++ b/core/node/state_keeper/src/metrics.rs @@ -64,6 +64,10 @@ pub struct StateKeeperMetrics { /// Latency to synchronize the mempool with Postgres. #[metrics(buckets = Buckets::LATENCIES)] pub mempool_sync: Histogram, + /// Number of stashed accounts in mempool + pub mempool_stashed_accounts: Gauge, + /// Number of purged accounts in mempool + pub mempool_purged_accounts: Gauge, /// Latency of the state keeper waiting for a transaction. #[metrics(buckets = Buckets::LATENCIES)] pub waiting_for_tx: Histogram, diff --git a/core/node/state_keeper/src/testonly/mod.rs b/core/node/state_keeper/src/testonly/mod.rs index 3da666628b1b..0484fe3198fd 100644 --- a/core/node/state_keeper/src/testonly/mod.rs +++ b/core/node/state_keeper/src/testonly/mod.rs @@ -1,10 +1,12 @@ //! Test utilities that can be used for testing sequencer that may //! be useful outside of this crate. +use std::collections::HashSet; + use async_trait::async_trait; use once_cell::sync::Lazy; use zksync_contracts::BaseSystemContracts; -use zksync_dal::{ConnectionPool, Core, CoreDal as _}; +use zksync_dal::{Connection, ConnectionPool, Core, CoreDal as _}; use zksync_multivm::interface::{ executor::{BatchExecutor, BatchExecutorFactory}, storage::{InMemoryStorage, StorageView}, @@ -74,40 +76,48 @@ impl BatchExecutor for MockBatchExecutor { } } +pub(crate) async fn apply_genesis_logs(storage: &mut Connection<'_, Core>, logs: &[StorageLog]) { + storage + .storage_logs_dal() + .append_storage_logs(L2BlockNumber(0), logs) + .await + .unwrap(); + + let all_hashed_keys: Vec<_> = logs.iter().map(|log| log.key.hashed_key()).collect(); + let repeated_writes = storage + .storage_logs_dedup_dal() + .filter_written_slots(&all_hashed_keys) + .await + .unwrap(); + let initial_writes: Vec<_> = HashSet::from_iter(all_hashed_keys) + .difference(&repeated_writes) + .copied() + .collect(); + storage + .storage_logs_dedup_dal() + .insert_initial_writes(L1BatchNumber(0), &initial_writes) + .await + .unwrap(); +} + /// Adds funds for specified account list. /// Expects genesis to be performed (i.e. `setup_storage` called beforehand). pub async fn fund(pool: &ConnectionPool, addresses: &[Address]) { let mut storage = pool.connection().await.unwrap(); let eth_amount = U256::from(10u32).pow(U256::from(32)); //10^32 wei - - for address in addresses { - let key = storage_key_for_standard_token_balance( - AccountTreeId::new(L2_BASE_TOKEN_ADDRESS), - address, - ); - let value = u256_to_h256(eth_amount); - let storage_log = StorageLog::new_write_log(key, value); - - storage - .storage_logs_dal() - .append_storage_logs(L2BlockNumber(0), &[storage_log]) - .await - .unwrap(); - if storage - .storage_logs_dedup_dal() - .filter_written_slots(&[storage_log.key.hashed_key()]) - .await - .unwrap() - .is_empty() - { - storage - .storage_logs_dedup_dal() - .insert_initial_writes(L1BatchNumber(0), &[storage_log.key.hashed_key()]) - .await - .unwrap(); - } - } + let storage_logs: Vec<_> = addresses + .iter() + .map(|address| { + let key = storage_key_for_standard_token_balance( + AccountTreeId::new(L2_BASE_TOKEN_ADDRESS), + address, + ); + StorageLog::new_write_log(key, u256_to_h256(eth_amount)) + }) + .collect(); + + apply_genesis_logs(&mut storage, &storage_logs).await; } pub(crate) const DEFAULT_GAS_PER_PUBDATA: u32 = 10000; diff --git a/core/node/state_keeper/src/tests/mod.rs b/core/node/state_keeper/src/tests/mod.rs index b73741998a03..e235cddf8423 100644 --- a/core/node/state_keeper/src/tests/mod.rs +++ b/core/node/state_keeper/src/tests/mod.rs @@ -365,7 +365,7 @@ async fn load_upgrade_tx() { // TODO: add one more test case for the shared bridge after it's integrated. // If we are processing the 1st batch while using the shared bridge, - // we should load the upgrade transaction -- that's the `SetChainIdUpgrade`. + // we should load the upgrade transaction -- that's the `GenesisUpgrade`. } /// Unconditionally seal the batch without triggering specific criteria. diff --git a/core/node/state_keeper/src/updates/mod.rs b/core/node/state_keeper/src/updates/mod.rs index 06ac4bcd5de0..b4f548527652 100644 --- a/core/node/state_keeper/src/updates/mod.rs +++ b/core/node/state_keeper/src/updates/mod.rs @@ -30,7 +30,7 @@ pub mod l2_block_updates; pub struct UpdatesManager { batch_timestamp: u64, pub fee_account_address: Address, - batch_fee_input: BatchFeeInput, + pub batch_fee_input: BatchFeeInput, base_fee_per_gas: u64, base_system_contract_hashes: BaseSystemContractsHashes, protocol_version: ProtocolVersionId, diff --git a/core/tests/loadnext/Cargo.toml b/core/tests/loadnext/Cargo.toml index 91f987035acf..32957f0372ac 100644 --- a/core/tests/loadnext/Cargo.toml +++ b/core/tests/loadnext/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "loadnext" -version = "0.1.0" +version.workspace = true edition.workspace = true authors.workspace = true homepage.workspace = true diff --git a/core/tests/loadnext/src/executor.rs b/core/tests/loadnext/src/executor.rs index 43a1be164b64..d0fbb696b607 100644 --- a/core/tests/loadnext/src/executor.rs +++ b/core/tests/loadnext/src/executor.rs @@ -400,6 +400,7 @@ impl Executor { ) .await .unwrap(); + eth_nonce += U256::one(); eth_txs.push(res); } @@ -428,6 +429,19 @@ impl Executor { } } + let balance = self + .pool + .master_wallet + .get_balance(BlockNumber::Latest, self.l2_main_token) + .await?; + let necessary_balance = + U256::from(self.erc20_transfer_amount() * self.config.accounts_amount as u128); + + tracing::info!( + "Master account token balance on l2: {balance:?}, necessary balance \ + for initial transfers {necessary_balance:?}" + ); + // And then we will prepare an L2 transaction to send ERC20 token (for transfers and fees). let mut builder = master_wallet .start_transfer() @@ -441,10 +455,8 @@ impl Executor { self.l2_main_token, MIN_ALLOWANCE_FOR_PAYMASTER_ESTIMATE.into(), ); - let fee = builder.estimate_fee(Some(paymaster_params)).await?; builder = builder.fee(fee.clone()); - let paymaster_params = get_approval_based_paymaster_input( paymaster_address, self.l2_main_token, diff --git a/core/tests/loadnext/src/fs_utils.rs b/core/tests/loadnext/src/fs_utils.rs index 0e5107f40861..9f44b1ff4946 100644 --- a/core/tests/loadnext/src/fs_utils.rs +++ b/core/tests/loadnext/src/fs_utils.rs @@ -17,7 +17,7 @@ pub struct Token { } pub fn read_tokens(network: Network) -> anyhow::Result> { - let home = Workspace::locate().core(); + let home = Workspace::locate().root(); let path = home.join(format!("etc/tokens/{network}.json")); let file = File::open(path)?; let reader = BufReader::new(file); diff --git a/core/tests/loadnext/src/sdk/abi/update-abi.sh b/core/tests/loadnext/src/sdk/abi/update-abi.sh index 3fdcd4d58028..34b7e759c6cf 100755 --- a/core/tests/loadnext/src/sdk/abi/update-abi.sh +++ b/core/tests/loadnext/src/sdk/abi/update-abi.sh @@ -7,7 +7,7 @@ cat $ZKSYNC_HOME/contracts/l1-contracts/artifacts/contracts/bridgehub/IBridgehub cat $ZKSYNC_HOME/contracts/l1-contracts/artifacts/contracts/state-transition/IStateTransitionManager.sol/IStateTransitionManager.json | jq '{ abi: .abi}' > IStateTransitionManager.json cat $ZKSYNC_HOME/contracts/l1-contracts/artifacts/contracts/state-transition/chain-interfaces/IZkSyncHyperchain.sol/IZkSyncHyperchain.json | jq '{ abi: .abi}' > IZkSyncHyperchain.json # Default L1 bridge -cat $ZKSYNC_HOME/contracts/l1-contracts/artifacts/contracts/bridge/interfaces/IL1SharedBridge.sol/IL1SharedBridge.json | jq '{ abi: .abi}' > IL1SharedBridge.json +cat $ZKSYNC_HOME/contracts/l1-contracts/artifacts/contracts/bridge/interfaces/IL1AssetRouter.sol/IL1AssetRouter.json | jq '{ abi: .abi}' > IL1AssetRouter.json cat $ZKSYNC_HOME/contracts/l1-contracts/artifacts/contracts/bridge/interfaces/IL1ERC20Bridge.sol/IL1ERC20Bridge.json | jq '{ abi: .abi}' > IL1ERC20Bridge.json # Paymaster interface cat $ZKSYNC_HOME/contracts/l2-contracts/artifacts-zk/contracts/interfaces/IPaymasterFlow.sol/IPaymasterFlow.json | jq '{ abi: .abi}' > IPaymasterFlow.json diff --git a/core/tests/loadnext/src/sdk/ethereum/mod.rs b/core/tests/loadnext/src/sdk/ethereum/mod.rs index 4557c2c43200..bbb3514e2a0d 100644 --- a/core/tests/loadnext/src/sdk/ethereum/mod.rs +++ b/core/tests/loadnext/src/sdk/ethereum/mod.rs @@ -475,7 +475,7 @@ impl EthereumProvider { .as_u64() .ok_or(ClientError::Other)? } else { - 600000u64 + 800000u64 } }; diff --git a/core/tests/recovery-test/package.json b/core/tests/recovery-test/package.json index 8b2ea7f054c0..28c469fc725d 100644 --- a/core/tests/recovery-test/package.json +++ b/core/tests/recovery-test/package.json @@ -23,7 +23,7 @@ "@types/node": "^18.19.15", "@types/node-fetch": "^2.5.7", "chai": "^4.3.4", - "ethers": "^6.7.1", + "ethers": "^6.13.5", "mocha": "^9.0.2", "mocha-steps": "^1.3.0", "node-fetch": "^2.6.1", diff --git a/core/tests/revert-test/package.json b/core/tests/revert-test/package.json index c3be63dff631..7dc2566eb0ef 100644 --- a/core/tests/revert-test/package.json +++ b/core/tests/revert-test/package.json @@ -24,7 +24,7 @@ "@types/node-fetch": "^2.5.7", "chai": "^4.3.4", "ethereumjs-abi": "^0.6.8", - "ethers": "^6.7.1", + "ethers": "^6.13.5", "mocha": "^9.0.2", "mocha-steps": "^1.3.0", "node-fetch": "^2.6.1", diff --git a/core/tests/revert-test/tests/revert-and-restart-en.test.ts b/core/tests/revert-test/tests/revert-and-restart-en.test.ts index ca7f1735b356..a27e3836461a 100644 --- a/core/tests/revert-test/tests/revert-and-restart-en.test.ts +++ b/core/tests/revert-test/tests/revert-and-restart-en.test.ts @@ -54,7 +54,18 @@ function compileBinaries() { console.log('compiling binaries'); run( 'cargo', - ['build', '--release', '--bin', 'zksync_external_node', '--bin', 'zksync_server', '--bin', 'block_reverter'], + [ + 'build', + '--manifest-path', + './core/Cargo.toml', + '--release', + '--bin', + 'zksync_external_node', + '--bin', + 'zksync_server', + '--bin', + 'block_reverter' + ], { cwd: process.env.ZKSYNC_HOME } ); } diff --git a/core/tests/revert-test/tests/utils.ts b/core/tests/revert-test/tests/utils.ts index dac19f228ffc..d2cf9df837ad 100644 --- a/core/tests/revert-test/tests/utils.ts +++ b/core/tests/revert-test/tests/utils.ts @@ -144,10 +144,11 @@ async function runBlockReverter( --secrets-path=${configPaths['secrets.yaml']} --wallets-path=${configPaths['wallets.yaml']} --genesis-path=${configPaths['genesis.yaml']} + --gateway-chain-path=${configPaths['gateway_chain.yaml']} `; } - const cmd = `cd ${pathToHome} && RUST_LOG=off cargo run --bin block_reverter --release -- ${args.join( + const cmd = `cd ${pathToHome} && RUST_LOG=off cargo run --manifest-path ./core/Cargo.toml --bin block_reverter --release -- ${args.join( ' ' )} ${fileConfigFlags}`; @@ -312,14 +313,14 @@ export class NodeSpawner { public async spawnMainNode(enableExecute: boolean): Promise> { const env = this.env ?? process.env; - env.ETH_SENDER_SENDER_AGGREGATED_BLOCK_EXECUTE_DEADLINE = enableExecute ? '1' : '10000'; + env.ETH_SENDER_SENDER_L1_BATCH_MIN_AGE_BEFORE_EXECUTE_SECONDS = enableExecute ? '0' : '10000'; // Set full mode for the Merkle tree as it is required to get blocks committed. env.DATABASE_MERKLE_TREE_MODE = 'full'; const { fileConfig, pathToHome, options, logs } = this; if (fileConfig.loadFromFile) { - replaceL1BatchMinAgeBeforeExecuteSeconds(pathToHome, fileConfig, enableExecute ? 1 : 10000); + replaceL1BatchMinAgeBeforeExecuteSeconds(pathToHome, fileConfig, enableExecute ? 0 : 10000); } let components = 'api,tree,eth,state_keeper,commitment_generator,da_dispatcher,vm_runner_protective_reads'; diff --git a/core/tests/ts-integration/package.json b/core/tests/ts-integration/package.json index ee0fa9c99848..11d9c2d2e4ed 100644 --- a/core/tests/ts-integration/package.json +++ b/core/tests/ts-integration/package.json @@ -4,7 +4,7 @@ "license": "MIT", "private": true, "scripts": { - "test": "zk f jest --forceExit --verbose --testTimeout 120000", + "test": "zk f jest --forceExit --verbose --testTimeout 150000", "long-running-test": "zk f jest", "fee-test": "RUN_FEE_TEST=1 zk f jest -- fees.test.ts", "api-test": "zk f jest -- api/web3.test.ts api/debug.test.ts", @@ -22,8 +22,9 @@ "@types/node": "^18.19.15", "@types/node-fetch": "^2.5.7", "chalk": "^4.0.0", + "elliptic": "^6.5.5", "ethereumjs-abi": "^0.6.8", - "ethers": "^6.7.1", + "ethers": "^6.13.5", "hardhat": "=2.22.2", "jest": "^29.0.3", "jest-environment-node": "^29.0.3", @@ -32,8 +33,7 @@ "ts-jest": "^29.0.1", "ts-node": "^10.1.0", "typescript": "^4.3.5", - "zksync-ethers": "^6.9.0", - "elliptic": "^6.5.5", - "yaml": "^2.4.2" + "yaml": "^2.4.2", + "zksync-ethers": "https://github.com/zksync-sdk/zksync-ethers#sb-use-new-encoding-in-sdk" } } diff --git a/core/tests/ts-integration/src/context-owner.ts b/core/tests/ts-integration/src/context-owner.ts index 6b9c4d0541b2..72d977cda6db 100644 --- a/core/tests/ts-integration/src/context-owner.ts +++ b/core/tests/ts-integration/src/context-owner.ts @@ -604,7 +604,7 @@ export class TestContextOwner { // Reset the reporter context. this.reporter = new Reporter(); try { - if (this.env.nodeMode == NodeMode.Main && isLocalHost(this.env.network.toLowerCase())) { + if (this.env.nodeMode == NodeMode.Main && isLocalHost(this.env.network)) { // Check that the VM execution hasn't diverged using the VM playground. The component and thus the main node // will crash on divergence, so we just need to make sure that the test doesn't exit before the VM playground // processes all batches on the node. @@ -612,9 +612,25 @@ export class TestContextOwner { await this.waitForVmPlayground(); this.reporter.finishAction(); } - this.reporter.startAction(`Tearing down the context`); + this.reporter.startAction(`Collecting funds`); await this.collectFunds(); this.reporter.finishAction(); + this.reporter.startAction(`Destroying providers`); + // Destroy providers so that they drop potentially active connections to the node. Not doing so might cause + // unexpected network errors to propagate during node termination. + try { + this.l1Provider.destroy(); + } catch (err: any) { + // Catch any request cancellation errors that propagate here after destroying L1 provider + console.log(`Caught error while destroying L1 provider: ${err}`); + } + try { + this.l2Provider.destroy(); + } catch (err: any) { + // Catch any request cancellation errors that propagate here after destroying L2 provider + console.log(`Caught error while destroying L2 provider: ${err}`); + } + this.reporter.finishAction(); } catch (error: any) { // Report the issue to the console and mark the last action as failed. this.reporter.error(`An error occurred: ${error.message || error}`); diff --git a/core/tests/ts-integration/src/env.ts b/core/tests/ts-integration/src/env.ts index 58dc5b08a8d9..1c0725acc13a 100644 --- a/core/tests/ts-integration/src/env.ts +++ b/core/tests/ts-integration/src/env.ts @@ -168,6 +168,7 @@ async function loadTestEnvironmentFromFile(fileConfig: FileConfig): Promise { l2Address: baseTokenAddressL2 }, timestampAsserterAddress, - timestampAsserterMinTimeTillEndSec + timestampAsserterMinTimeTillEndSec, + l2WETHAddress: undefined }; } diff --git a/core/tests/ts-integration/src/helpers.ts b/core/tests/ts-integration/src/helpers.ts index 40d18f1bad63..58b698681388 100644 --- a/core/tests/ts-integration/src/helpers.ts +++ b/core/tests/ts-integration/src/helpers.ts @@ -29,6 +29,10 @@ export function getContractSource(relativePath: string): string { return source; } +export function readContract(path: string, fileName: string) { + return JSON.parse(fs.readFileSync(`${path}/${fileName}.sol/${fileName}.json`, { encoding: 'utf-8' })); +} + /** * Performs a contract deployment * @@ -71,13 +75,87 @@ export async function anyTransaction(wallet: zksync.Wallet): Promise { - // Send a dummy transaction and wait until the new L1 batch is created. - const oldReceipt = await anyTransaction(wallet); + const MAX_ATTEMPTS = 3; + + let txResponse: ethers.TransactionResponse | null = null; + let txReceipt: ethers.TransactionReceipt | null = null; + let nonce = Number(await wallet.getNonce()); + for (let i = 0; i < MAX_ATTEMPTS; i++) { + // Send a dummy transaction and wait for it to execute. We override `maxFeePerGas` as the default ethers behavior + // is to fetch `maxFeePerGas` from the latest sealed block and double it which is not enough for scenarios with + // extreme gas price fluctuations. + let gasPrice = await wallet.provider.getGasPrice(); + if (!txResponse || !txResponse.maxFeePerGas || txResponse.maxFeePerGas < gasPrice) { + txResponse = await wallet + .transfer({ + to: wallet.address, + amount: 0, + overrides: { maxFeePerGas: gasPrice, nonce: nonce, maxPriorityFeePerGas: 0, type: 2 } + }) + .catch((e) => { + // Unlike `waitForTransaction` below, these errors are not wrapped as `EthersError` for some reason + if (e.message.match(/Not enough gas/)) { + console.log( + `Transaction did not have enough gas, likely gas price went up (attempt ${i + 1}/${MAX_ATTEMPTS})` + ); + return null; + } else if (e.message.match(/max fee per gas less than block base fee/)) { + console.log( + `Transaction's max fee per gas was lower than block base fee, likely gas price went up (attempt ${i + 1}/${MAX_ATTEMPTS})` + ); + return null; + } else if (e.message.match(/nonce too low/)) { + if (!txResponse) { + // Our transaction was never accepted to the mempool with this nonce so it must have been used by another transaction. + return wallet.getNonce().then((newNonce) => { + console.log( + `Transaction's nonce is too low, updating from ${nonce} to ${newNonce} (attempt ${i + 1}/${MAX_ATTEMPTS})` + ); + nonce = newNonce; + return null; + }); + } else { + console.log( + `Transaction's nonce is too low, likely previous attempt succeeded, waiting longer (attempt ${i + 1}/${MAX_ATTEMPTS})` + ); + return txResponse; + } + } else { + return Promise.reject(e); + } + }); + if (!txResponse) { + continue; + } + } else { + console.log('Gas price has not gone up, waiting longer'); + } + txReceipt = await wallet.provider.waitForTransaction(txResponse.hash, 1, 3000).catch((e) => { + if (ethers.isError(e, 'TIMEOUT')) { + console.log(`Transaction timed out, potentially gas price went up (attempt ${i + 1}/${MAX_ATTEMPTS})`); + return null; + } else if (ethers.isError(e, 'UNKNOWN_ERROR') && e.message.match(/Not enough gas/)) { + console.log( + `Transaction did not have enough gas, likely gas price went up (attempt ${i + 1}/${MAX_ATTEMPTS})` + ); + return null; + } else { + return Promise.reject(e); + } + }); + if (txReceipt) { + // Transaction got executed, so we can safely assume it will be sealed in the next batch + break; + } + } + if (!txReceipt) { + throw new Error('Failed to force an L1 batch to seal'); + } // Invariant: even with 1 transaction, l1 batch must be eventually sealed, so this loop must exit. - while (!(await wallet.provider.getTransactionReceipt(oldReceipt.hash))?.l1BatchNumber) { + while (!(await wallet.provider.getTransactionReceipt(txReceipt.hash))?.l1BatchNumber) { await zksync.utils.sleep(wallet.provider.pollingInterval); } - return (await wallet.provider.getTransactionReceipt(oldReceipt.hash))!; + return (await wallet.provider.getTransactionReceipt(txReceipt.hash))!; } /** @@ -97,6 +175,16 @@ export async function waitUntilBlockFinalized(wallet: zksync.Wallet, blockNumber } } +export async function waitForL2ToL1LogProof(wallet: zksync.Wallet, blockNumber: number, txHash: string) { + // First, we wait for block to be finalized. + await waitUntilBlockFinalized(wallet, blockNumber); + + // Second, we wait for the log proof. + while ((await wallet.provider.getLogProof(txHash)) == null) { + await zksync.utils.sleep(wallet.provider.pollingInterval); + } +} + /** * Returns an increased gas price to decrease chances of L1 transactions being stuck * diff --git a/core/tests/ts-integration/src/modifiers/balance-checker.ts b/core/tests/ts-integration/src/modifiers/balance-checker.ts index 0935e8996691..12cc21b56658 100644 --- a/core/tests/ts-integration/src/modifiers/balance-checker.ts +++ b/core/tests/ts-integration/src/modifiers/balance-checker.ts @@ -284,7 +284,6 @@ function extractRefundForL1ToL2(receipt: zksync.types.TransactionReceipt, refund * @param token Address of the token * @param ignoreUndeployedToken Whether allow token to be not deployed. * If it's set to `true` and token is not deployed, then function returns 0. - * @returns Token balance */ async function getBalance( diff --git a/core/tests/ts-integration/src/retry-provider.ts b/core/tests/ts-integration/src/retry-provider.ts index 4c89e0407b9e..9cf63da0638c 100644 --- a/core/tests/ts-integration/src/retry-provider.ts +++ b/core/tests/ts-integration/src/retry-provider.ts @@ -4,6 +4,22 @@ import { Reporter } from './reporter'; import { AugmentedTransactionResponse } from './transaction-response'; import { L1Provider, RetryableL1Wallet } from './l1-provider'; +// Error markers observed on stage so far. +const IGNORED_ERRORS = [ + 'timeout', + 'etimedout', + 'econnrefused', + 'econnreset', + 'bad gateway', + 'service temporarily unavailable', + 'nonetwork' +]; + +function isIgnored(err: any): boolean { + const errString: string = err.toString().toLowerCase(); + return IGNORED_ERRORS.some((sampleErr) => errString.indexOf(sampleErr) !== -1); +} + /** * RetryProvider retries every RPC request if it detects a timeout-related issue on the server side. */ @@ -11,17 +27,39 @@ export class RetryProvider extends zksync.Provider { private readonly reporter: Reporter; private readonly knownTransactionHashes: Set = new Set(); - constructor(_url?: string | { url: string; timeout: number }, network?: ethers.Networkish, reporter?: Reporter) { - let url; + constructor(_url: string | { url: string; timeout: number }, network?: ethers.Networkish, reporter?: Reporter) { + let fetchRequest: ethers.FetchRequest; if (typeof _url === 'object') { - const fetchRequest: ethers.FetchRequest = new ethers.FetchRequest(_url.url); + fetchRequest = new ethers.FetchRequest(_url.url); fetchRequest.timeout = _url.timeout; - url = fetchRequest; } else { - url = _url; + fetchRequest = new ethers.FetchRequest(_url); } + let defaultGetUrlFunc = ethers.FetchRequest.createGetUrlFunc(); + fetchRequest.getUrlFunc = async (req: ethers.FetchRequest, signal?: ethers.FetchCancelSignal) => { + // Retry network requests that failed because of temporary issues (such as timeout, econnreset). + for (let retry = 0; retry < 50; retry++) { + try { + const result = await defaultGetUrlFunc(req, signal); + // If we obtained result not from the first attempt, print a warning. + if (retry != 0) { + this.reporter?.debug(`RPC request ${req} took ${retry} retries to succeed`); + } + return result; + } catch (err: any) { + if (isIgnored(err)) { + // Error is related to timeouts. Sleep a bit and try again. + await zksync.utils.sleep(this.pollingInterval); + continue; + } + // Re-throw any non-timeout-related error. + throw err; + } + } + return Promise.reject(new Error(`Retried too many times, giving up on request=${req}`)); + }; - super(url, network); + super(fetchRequest, network); this.reporter = reporter ?? new Reporter(); } @@ -35,19 +73,7 @@ export class RetryProvider extends zksync.Provider { } return result; } catch (err: any) { - // Error markers observed on stage so far. - const ignoredErrors = [ - 'timeout', - 'etimedout', - 'econnrefused', - 'econnreset', - 'bad gateway', - 'service temporarily unavailable', - 'nonetwork' - ]; - const errString: string = err.toString().toLowerCase(); - const found = ignoredErrors.some((sampleErr) => errString.indexOf(sampleErr) !== -1); - if (found) { + if (isIgnored(err)) { // Error is related to timeouts. Sleep a bit and try again. await zksync.utils.sleep(this.pollingInterval); continue; diff --git a/core/tests/ts-integration/src/types.ts b/core/tests/ts-integration/src/types.ts index 014031a3dd7e..f3496c3da376 100644 --- a/core/tests/ts-integration/src/types.ts +++ b/core/tests/ts-integration/src/types.ts @@ -96,6 +96,7 @@ export interface TestEnvironment { healthcheckPort: string; timestampAsserterAddress: string; timestampAsserterMinTimeTillEndSec: number; + l2WETHAddress: string | undefined; } /** diff --git a/core/tests/ts-integration/tests/api/web3.test.ts b/core/tests/ts-integration/tests/api/web3.test.ts index 16e712bb9255..da934c2e459f 100644 --- a/core/tests/ts-integration/tests/api/web3.test.ts +++ b/core/tests/ts-integration/tests/api/web3.test.ts @@ -207,7 +207,6 @@ describe('web3 API compatibility tests', () => { return; } - const EIP1559_TX_TYPE = 2; const amount = 1; const erc20ABI = ['function transfer(address to, uint256 amount)']; const erc20contract = new ethers.Contract(l2Token, erc20ABI, alice); @@ -230,8 +229,9 @@ describe('web3 API compatibility tests', () => { expect(tx1.l1BatchNumber).toEqual(expect.anything()); // Can be anything except `null` or `undefined`. expect(tx1.l1BatchTxIndex).toEqual(expect.anything()); // Can be anything except `null` or `undefined`. expect(tx1.chainId).toEqual(chainId); - expect(tx1.type).toEqual(EIP1559_TX_TYPE); + expect(tx1.type).toEqual(EIP712_TX_TYPE); + const EIP1559_TX_TYPE = 2; expect(receipt!.l1BatchNumber).toEqual(expect.anything()); // Can be anything except `null` or `undefined`. expect(receipt!.l1BatchTxIndex).toEqual(expect.anything()); // Can be anything except `null` or `undefined`. expect(receipt!.logs[0].l1BatchNumber).toEqual(receipt!.l1BatchNumber); @@ -240,6 +240,7 @@ describe('web3 API compatibility tests', () => { expect(block.l1BatchTimestamp).toEqual(expect.anything()); expect(blockWithTransactions.l1BatchNumber).toEqual(receipt!.l1BatchNumber); expect(blockWithTransactions.l1BatchTimestamp).toEqual(expect.anything()); + for (const tx of blockWithTransactions.prefetchedTransactions) { expect(tx.l1BatchNumber).toEqual(expect.anything()); // Can be anything except `null` or `undefined`. expect(tx.l1BatchTxIndex).toEqual(expect.anything()); // Can be anything except `null` or `undefined`. diff --git a/core/tests/ts-integration/tests/base-token.test.ts b/core/tests/ts-integration/tests/base-token.test.ts index 432ce70ae17f..a22014751035 100644 --- a/core/tests/ts-integration/tests/base-token.test.ts +++ b/core/tests/ts-integration/tests/base-token.test.ts @@ -7,7 +7,7 @@ import { Token } from '../src/types'; import * as zksync from 'zksync-ethers'; import * as ethers from 'ethers'; -import { scaledGasPrice } from '../src/helpers'; +import { scaledGasPrice, waitForL2ToL1LogProof } from '../src/helpers'; const SECONDS = 2000; jest.setTimeout(100 * SECONDS); @@ -78,7 +78,7 @@ describe('base ERC20 contract checks', () => { // TODO: should all the following tests use strict equality? const finalEthBalance = await alice.getBalanceL1(); - expect(initialEthBalance).toBeGreaterThan(finalEthBalance + fee); // Fee should be taken from the ETH balance on L1. + expect(initialEthBalance).toBeGreaterThanOrEqual(finalEthBalance + fee); // Fee should be taken from the ETH balance on L1. const finalL1Balance = await alice.getBalanceL1(baseTokenDetails.l1Address); expect(initialL1Balance).toBeGreaterThanOrEqual(finalL1Balance + amount); @@ -167,7 +167,8 @@ describe('base ERC20 contract checks', () => { const withdrawalPromise = alice.withdraw({ token: baseTokenDetails.l2Address, amount }); await expect(withdrawalPromise).toBeAccepted([]); const withdrawalTx = await withdrawalPromise; - await withdrawalTx.waitFinalize(); + const l2Receipt = await withdrawalTx.wait(); + await waitForL2ToL1LogProof(alice, l2Receipt!.blockNumber, withdrawalTx.hash); await expect(alice.finalizeWithdrawal(withdrawalTx.hash)).toBeAccepted([]); const receipt = await alice._providerL2().getTransactionReceipt(withdrawalTx.hash); diff --git a/core/tests/ts-integration/tests/contracts.test.ts b/core/tests/ts-integration/tests/contracts.test.ts index de1c632ab9cc..aa9dbe6e1a89 100644 --- a/core/tests/ts-integration/tests/contracts.test.ts +++ b/core/tests/ts-integration/tests/contracts.test.ts @@ -7,7 +7,7 @@ */ import { TestMaster } from '../src'; -import { deployContract, getTestContract, waitForNewL1Batch } from '../src/helpers'; +import { deployContract, getTestContract, scaledGasPrice, waitForNewL1Batch } from '../src/helpers'; import { shouldOnlyTakeFee } from '../src/modifiers/balance-checker'; import * as ethers from 'ethers'; @@ -99,22 +99,24 @@ describe('Smart contract behavior checks', () => { return; } + const gasPrice = await scaledGasPrice(alice); const infiniteLoop = await deployContract(alice, contracts.infinite, []); // Test eth_call first // TODO: provide a proper error for transactions that consume too much gas. // await expect(infiniteLoop.callStatic.infiniteLoop()).toBeRejected('cannot estimate transaction: out of gas'); // ...and then an actual transaction - await expect(infiniteLoop.infiniteLoop({ gasLimit: 1_000_000 })).toBeReverted([]); + await expect(infiniteLoop.infiniteLoop({ gasLimit: 1_000_000, gasPrice })).toBeReverted([]); }); test('Should test reverting storage logs', async () => { // In this test we check that if transaction reverts, it rolls back the storage slots. const prevValue = await counterContract.get(); + const gasPrice = await scaledGasPrice(alice); - // We manually provide a constant, since otherwise the exception would be thrown - // while estimating gas - await expect(counterContract.incrementWithRevert(5, true, { gasLimit: 5000000 })).toBeReverted([]); + // We manually provide a gas limit and gas price, since otherwise the exception would be thrown + // while querying zks_estimateFee. + await expect(counterContract.incrementWithRevert(5, true, { gasLimit: 5000000, gasPrice })).toBeReverted(); // The tx has been reverted, so the value Should not have been changed: const newValue = await counterContract.get(); diff --git a/core/tests/ts-integration/tests/erc20.test.ts b/core/tests/ts-integration/tests/erc20.test.ts index 9173989ea98b..a0345fb71ab1 100644 --- a/core/tests/ts-integration/tests/erc20.test.ts +++ b/core/tests/ts-integration/tests/erc20.test.ts @@ -8,10 +8,10 @@ import { shouldChangeTokenBalances, shouldOnlyTakeFee } from '../src/modifiers/b import * as zksync from 'zksync-ethers'; import * as ethers from 'ethers'; -import { scaledGasPrice, waitUntilBlockFinalized } from '../src/helpers'; +import { scaledGasPrice, waitForL2ToL1LogProof } from '../src/helpers'; import { L2_DEFAULT_ETH_PER_ACCOUNT } from '../src/context-owner'; -describe('ERC20 contract checks', () => { +describe('L1 ERC20 contract checks', () => { let testMaster: TestMaster; let alice: zksync.Wallet; let bob: zksync.Wallet; @@ -96,6 +96,7 @@ describe('ERC20 contract checks', () => { test('Incorrect transfer should revert', async () => { const value = ethers.parseEther('1000000.0'); + const gasPrice = await scaledGasPrice(alice); // Since gas estimation is expected to fail, we request gas limit for similar non-failing tx. const gasLimit = await aliceErc20.transfer.estimateGas(bob.address, 1); @@ -109,12 +110,16 @@ describe('ERC20 contract checks', () => { const feeTaken = await shouldOnlyTakeFee(alice); // Send transfer, it should revert due to lack of balance. - await expect(aliceErc20.transfer(bob.address, value, { gasLimit })).toBeReverted([noBalanceChange, feeTaken]); + await expect(aliceErc20.transfer(bob.address, value, { gasLimit, gasPrice })).toBeReverted([ + noBalanceChange, + feeTaken + ]); }); test('Transfer to zero address should revert', async () => { const zeroAddress = ethers.ZeroAddress; const value = 200n; + const gasPrice = await scaledGasPrice(alice); // Since gas estimation is expected to fail, we request gas limit for similar non-failing tx. const gasLimit = await aliceErc20.transfer.estimateGas(bob.address, 1); @@ -127,7 +132,10 @@ describe('ERC20 contract checks', () => { const feeTaken = await shouldOnlyTakeFee(alice); // Send transfer, it should revert because transfers to zero address are not allowed. - await expect(aliceErc20.transfer(zeroAddress, value, { gasLimit })).toBeReverted([noBalanceChange, feeTaken]); + await expect(aliceErc20.transfer(zeroAddress, value, { gasLimit, gasPrice })).toBeReverted([ + noBalanceChange, + feeTaken + ]); }); test('Approve and transferFrom should work', async () => { @@ -166,7 +174,8 @@ describe('ERC20 contract checks', () => { }); await expect(withdrawalPromise).toBeAccepted([l2BalanceChange, feeCheck]); const withdrawalTx = await withdrawalPromise; - await withdrawalTx.waitFinalize(); + const l2TxReceipt = await alice.provider.getTransactionReceipt(withdrawalTx.hash); + await waitForL2ToL1LogProof(alice, l2TxReceipt!.blockNumber, withdrawalTx.hash); // Note: For L1 we should use L1 token address. const l1BalanceChange = await shouldChangeTokenBalances( @@ -176,6 +185,7 @@ describe('ERC20 contract checks', () => { l1: true } ); + await expect(alice.finalizeWithdrawal(withdrawalTx.hash)).toBeAccepted([l1BalanceChange]); }); @@ -206,7 +216,7 @@ describe('ERC20 contract checks', () => { // It throws once it gets status == 0 in the receipt and doesn't wait for the finalization. const l2Hash = zksync.utils.getL2HashFromPriorityOp(l1Receipt, await alice.provider.getMainContractAddress()); const l2TxReceipt = await alice.provider.getTransactionReceipt(l2Hash); - await waitUntilBlockFinalized(alice, l2TxReceipt!.blockNumber); + await waitForL2ToL1LogProof(alice, l2TxReceipt!.blockNumber, l2Hash); // Claim failed deposit. await expect(alice.claimFailedDeposit(l2Hash)).toBeAccepted(); await expect(alice.getBalanceL1(tokenDetails.l1Address)).resolves.toEqual(initialBalance); diff --git a/core/tests/ts-integration/tests/ether.test.ts b/core/tests/ts-integration/tests/ether.test.ts index b3f4b6ee14a9..099cf2de8c68 100644 --- a/core/tests/ts-integration/tests/ether.test.ts +++ b/core/tests/ts-integration/tests/ether.test.ts @@ -11,7 +11,7 @@ import { import { checkReceipt } from '../src/modifiers/receipt-check'; import * as zksync from 'zksync-ethers'; -import { scaledGasPrice } from '../src/helpers'; +import { scaledGasPrice, waitForL2ToL1LogProof } from '../src/helpers'; import { ethers } from 'ethers'; describe('ETH token checks', () => { @@ -59,10 +59,9 @@ describe('ETH token checks', () => { const gasPerPubdataByte = zksync.utils.REQUIRED_L1_TO_L2_GAS_PER_PUBDATA_LIMIT; - const l2GasLimit = await zksync.utils.estimateDefaultBridgeDepositL2Gas( + const l2GasLimit = await alice.provider.estimateDefaultBridgeDepositL2Gas( alice.providerL1!, - alice.provider, - zksync.utils.ETH_ADDRESS, + zksync.utils.ETH_ADDRESS_IN_CONTRACTS, amount, alice.address, alice.address, @@ -203,7 +202,10 @@ describe('ETH token checks', () => { const EIP_1559_TX_TYPE = 0x02; const value = 200n; - await expect(alice.sendTransaction({ type: EIP_2930_TX_TYPE, to: bob.address, value })).toBeRejected( + // SDK sets maxFeePerGas to the type 1 transactions, causing issues on the SDK level + const gasPrice = await scaledGasPrice(alice); + + await expect(alice.sendTransaction({ type: EIP_2930_TX_TYPE, to: bob.address, value, gasPrice })).toBeRejected( 'access lists are not supported' ); @@ -258,7 +260,8 @@ describe('ETH token checks', () => { }); await expect(withdrawalPromise).toBeAccepted([l2ethBalanceChange]); const withdrawalTx = await withdrawalPromise; - await withdrawalTx.waitFinalize(); + const l2TxReceipt = await alice.provider.getTransactionReceipt(withdrawalTx.hash); + await waitForL2ToL1LogProof(alice, l2TxReceipt!.blockNumber, withdrawalTx.hash); // TODO (SMA-1374): Enable L1 ETH checks as soon as they're supported. await expect(alice.finalizeWithdrawal(withdrawalTx.hash)).toBeAccepted(); diff --git a/core/tests/ts-integration/tests/fees.test.ts b/core/tests/ts-integration/tests/fees.test.ts index 765dc8b73a81..f598cf49ce02 100644 --- a/core/tests/ts-integration/tests/fees.test.ts +++ b/core/tests/ts-integration/tests/fees.test.ts @@ -190,14 +190,16 @@ testFees('Test fees', function () { await ( await alice.sendTransaction({ to: receiver, - value: BigInt(1) + value: BigInt(1), + type: 2 }) ).wait(); await ( await alice.sendTransaction({ data: aliceErc20.interface.encodeFunctionData('transfer', [receiver, 1n]), - to: tokenDetails.l2Address + to: tokenDetails.l2Address, + type: 2 }) ).wait(); @@ -221,22 +223,26 @@ testFees('Test fees', function () { [ { to: ethers.Wallet.createRandom().address, - value: 1n + value: 1n, + type: 2 }, { to: receiver, - value: 1n + value: 1n, + type: 2 }, { data: aliceErc20.interface.encodeFunctionData('transfer', [ ethers.Wallet.createRandom().address, 1n ]), - to: tokenDetails.l2Address + to: tokenDetails.l2Address, + type: 2 }, { data: aliceErc20.interface.encodeFunctionData('transfer', [receiver, 1n]), - to: tokenDetails.l2Address + to: tokenDetails.l2Address, + type: 2 } ], gasPrice, @@ -362,6 +368,9 @@ testFees('Test fees', function () { newPubdataPrice: requiredPubdataPrice }); + // Wait for current batch to close so gas price is updated with the new config set above + await waitForNewL1Batch(alice); + const l1Messenger = new ethers.Contract(zksync.utils.L1_MESSENGER_ADDRESS, zksync.utils.L1_MESSENGER, alice); // Firstly, let's test a successful transaction. @@ -401,9 +410,13 @@ testFees('Test fees', function () { }); afterAll(async () => { - await mainNodeSpawner.killAndSpawnMainNode(); // Returning the pubdata price to the default one // Spawning with no options restores defaults. + await mainNodeSpawner.killAndSpawnMainNode(); + + // Wait for current batch to close so gas price returns to normal. + await waitForNewL1Batch(alice); + await testMaster.deinitialize(); __ZKSYNC_TEST_CONTEXT_OWNER__.setL2NodePid(mainNodeSpawner.mainNode!.proc.pid!); }); @@ -441,8 +454,9 @@ async function updateReport( oldReport: string ): Promise { const expectedL1Price = +ethers.formatEther(l1Receipt.gasUsed * newL1GasPrice); - - const estimatedL2GasPrice = await sender.provider.getGasPrice(); + // This is flaky without multiplying by 3. + const estimatedL2GasPrice = ethers.getBigInt(await sender.provider.send('eth_gasPrice', [])) * 3n; + transactionRequest.maxFeePerGas = estimatedL2GasPrice; const estimatedL2GasLimit = await sender.estimateGas(transactionRequest); const estimatedPrice = estimatedL2GasPrice * estimatedL2GasLimit; diff --git a/core/tests/ts-integration/tests/l1.test.ts b/core/tests/ts-integration/tests/l1.test.ts index 2d9b9fd78d69..2e3cddb29f9e 100644 --- a/core/tests/ts-integration/tests/l1.test.ts +++ b/core/tests/ts-integration/tests/l1.test.ts @@ -8,7 +8,14 @@ import { TestMaster } from '../src'; import * as zksync from 'zksync-ethers'; import * as ethers from 'ethers'; -import { bigIntMax, deployContract, getTestContract, scaledGasPrice, waitForNewL1Batch } from '../src/helpers'; +import { + bigIntMax, + deployContract, + getTestContract, + scaledGasPrice, + waitForL2ToL1LogProof, + waitForNewL1Batch +} from '../src/helpers'; import { L1_MESSENGER, L1_MESSENGER_ADDRESS, REQUIRED_L1_TO_L2_GAS_PER_PUBDATA_LIMIT } from 'zksync-ethers/build/utils'; const contracts = { @@ -135,6 +142,7 @@ describe('Tests for L1 behavior', () => { const l2ToL1LogIndex = receipt.l2ToL1Logs.findIndex( (log: zksync.types.L2ToL1Log) => log.sender == L1_MESSENGER_ADDRESS ); + await waitForL2ToL1LogProof(alice, receipt.blockNumber, tx.hash); const msgProof = await alice.provider.getLogProof(tx.hash, l2ToL1LogIndex); expect(msgProof).toBeTruthy(); diff --git a/core/tests/ts-integration/tests/l2-erc20.test.ts b/core/tests/ts-integration/tests/l2-erc20.test.ts new file mode 100644 index 000000000000..16b55b648993 --- /dev/null +++ b/core/tests/ts-integration/tests/l2-erc20.test.ts @@ -0,0 +1,262 @@ +/** + * This suite contains tests checking default ERC-20 contract behavior. + */ + +import { TestMaster } from '../src'; +import { Token } from '../src/types'; +import { shouldChangeTokenBalances, shouldOnlyTakeFee } from '../src/modifiers/balance-checker'; + +import * as zksync from 'zksync-ethers'; +import * as ethers from 'ethers'; +import { Provider, Wallet } from 'ethers'; +import { scaledGasPrice, deployContract, readContract, waitForL2ToL1LogProof } from '../src/helpers'; +import { encodeNTVAssetId } from 'zksync-ethers/build/utils'; + +describe('L2 native ERC20 contract checks', () => { + let testMaster: TestMaster; + let alice: zksync.Wallet; + let isETHBasedChain: boolean; + let baseTokenAddress: string; + let zkTokenAssetId: string; + let tokenDetails: Token; + let aliceErc20: zksync.Contract; + let l1NativeTokenVault: ethers.Contract; + let l1Wallet: Wallet; + let l2Wallet: Wallet; + let l1Provider: Provider; + let l2Provider: Provider; + let l2NativeTokenVault: zksync.Contract; + + beforeAll(async () => { + testMaster = TestMaster.getInstance(__filename); + alice = testMaster.mainAccount(); + const bridgeContracts = await alice.getL1BridgeContracts(); + const assetRouter = bridgeContracts.shared; + l2Provider = alice._providerL2(); + l1Provider = alice._providerL1(); + l2Wallet = new Wallet(alice.privateKey, l2Provider); + l1Wallet = new Wallet(alice.privateKey, l1Provider); + const L2_NATIVE_TOKEN_VAULT_ADDRESS = '0x0000000000000000000000000000000000010004'; + const ARTIFACTS_PATH = '../../../contracts/l1-contracts/out'; + const l2NtvInterface = readContract(`${ARTIFACTS_PATH}`, 'L2NativeTokenVault').abi; + l2NativeTokenVault = new zksync.Contract(L2_NATIVE_TOKEN_VAULT_ADDRESS, l2NtvInterface, l2Wallet); + const l1AssetRouterInterface = readContract(`${ARTIFACTS_PATH}`, 'L1AssetRouter').abi; + const l1NativeTokenVaultInterface = readContract(`${ARTIFACTS_PATH}`, 'L1NativeTokenVault').abi; + const l1AssetRouter = new ethers.Contract(await assetRouter.getAddress(), l1AssetRouterInterface, l1Wallet); + l1NativeTokenVault = new ethers.Contract( + await l1AssetRouter.nativeTokenVault(), + l1NativeTokenVaultInterface, + l1Wallet + ); + + // Get the information about base token address directly from the L2. + baseTokenAddress = await alice._providerL2().getBaseTokenContractAddress(); + isETHBasedChain = baseTokenAddress == zksync.utils.ETH_ADDRESS_IN_CONTRACTS; + + const ZkSyncERC20 = await readContract('../../../contracts/l1-contracts/zkout', 'TestnetERC20Token'); + + aliceErc20 = await deployContract(alice, ZkSyncERC20, ['ZKsync', 'ZK', 18]); + const l2TokenAddress = await aliceErc20.getAddress(); + tokenDetails = { + name: 'ZKsync', + symbol: 'ZK', + decimals: 18n, + l1Address: ethers.ZeroAddress, + l2Address: l2TokenAddress + }; + const mintTx = await aliceErc20.mint(alice.address, 1000n); + await mintTx.wait(); + + // We will test that the token can be withdrawn and work with without explicit registration + const l2ChainId = (await l2Provider.getNetwork()).chainId; + zkTokenAssetId = encodeNTVAssetId(l2ChainId, l2TokenAddress); + + const tokenApprovalTx = await aliceErc20.approve(L2_NATIVE_TOKEN_VAULT_ADDRESS, 100n); + await tokenApprovalTx.wait(); + }); + + test('check weth', async () => { + const weth = testMaster.environment().l2WETHAddress; + if (!weth) { + console.log('skip weth'); + return; + } + const wethabi = await readContract('../../../contracts/l2-contracts/zkout', 'L2WETH').abi; + const wethContract = new zksync.Contract(weth, wethabi, alice); + + const name = await wethContract.name(); + expect(name).toEqual('Wrapped ETH'); + + const addressFromNTV = await l2NativeTokenVault.WETH_TOKEN(); + expect(addressFromNTV.toLowerCase()).toEqual(weth.toLowerCase()); + + const wrapTx = await wethContract.deposit({ value: 1 }); + await expect(wrapTx).toBeAccepted(); + + const balance = await wethContract.balanceOf(alice.address); + expect(balance).toEqual(1n); + + const withdrawTx = alice.withdraw({ + token: weth, + amount: 1 + }); + let thrown = false; + try { + await withdrawTx; + } catch (err: any) { + thrown = true; + // TokenNotSupported(weth) + expect(err.toString()).toContain(ethers.concat(['0x06439c6b', ethers.zeroPadBytes('0x', 12), weth])); + } + expect(thrown).toBeTruthy(); + }); + + test('Token properties are correct', async () => { + await expect(aliceErc20.name()).resolves.toBe(tokenDetails.name); + await expect(aliceErc20.decimals()).resolves.toBe(tokenDetails.decimals); + await expect(aliceErc20.symbol()).resolves.toBe(tokenDetails.symbol); + await expect(aliceErc20.balanceOf(alice.address)).resolves.toBeGreaterThan(0n); // 'Alice should have non-zero balance' + }); + + test('Can perform a withdrawal', async () => { + if (testMaster.isFastMode()) { + return; + } + const amount = 10n; + + const l2BalanceChange = await shouldChangeTokenBalances(tokenDetails.l2Address, [ + { wallet: alice, change: -amount } + ]); + const feeCheck = await shouldOnlyTakeFee(alice); + const withdrawalPromise = alice.withdraw({ + token: tokenDetails.l2Address, + amount + }); + await expect(withdrawalPromise).toBeAccepted([l2BalanceChange, feeCheck]); + const withdrawalTx = await withdrawalPromise; + const l2TxReceipt = await alice.provider.getTransactionReceipt(withdrawalTx.hash); + await withdrawalTx.waitFinalize(); + await waitForL2ToL1LogProof(alice, l2TxReceipt!.blockNumber, withdrawalTx.hash); + + await expect(alice.finalizeWithdrawal(withdrawalTx.hash)).toBeAccepted(); + + tokenDetails.l1Address = await l1NativeTokenVault.tokenAddress(zkTokenAssetId); + const balanceAfterBridging = await alice.getBalanceL1(tokenDetails.l1Address); + expect(balanceAfterBridging).toEqual(10n); + }); + + test('Can perform a deposit', async () => { + const amount = 1n; // 1 wei is enough. + const gasPrice = await scaledGasPrice(alice); + + // Note: for L1 we should use L1 token address. + const l1BalanceChange = await shouldChangeTokenBalances( + tokenDetails.l1Address, + [{ wallet: alice, change: -amount }], + { + l1: true + } + ); + const l2BalanceChange = await shouldChangeTokenBalances(tokenDetails.l2Address, [ + { wallet: alice, change: amount } + ]); + const feeCheck = await shouldOnlyTakeFee(alice, true); + + await expect( + alice.deposit({ + token: tokenDetails.l1Address, + amount, + approveERC20: true, + approveBaseERC20: true, + approveOverrides: { + gasPrice + }, + overrides: { + gasPrice + } + }) + ).toBeAccepted([l1BalanceChange, l2BalanceChange, feeCheck]); + }); + + test('Should claim failed deposit', async () => { + if (testMaster.isFastMode()) { + return; + } + + const amount = 1n; + const initialBalance = await alice.getBalanceL1(tokenDetails.l1Address); + // Deposit to the zero address is forbidden and should fail with the current implementation. + const depositHandle = await alice.deposit({ + token: tokenDetails.l1Address, + to: ethers.ZeroAddress, + amount, + approveERC20: true, + approveBaseERC20: true, + l2GasLimit: 5_000_000 // Setting the limit manually to avoid estimation for L1->L2 transaction + }); + const l1Receipt = await depositHandle.waitL1Commit(); + + // L1 balance should change, but tx should fail in L2. + await expect(alice.getBalanceL1(tokenDetails.l1Address)).resolves.toEqual(initialBalance - amount); + await expect(depositHandle).toBeReverted(); + + // Wait for tx to be finalized. + // `waitFinalize` is not used because it doesn't work as expected for failed transactions. + // It throws once it gets status == 0 in the receipt and doesn't wait for the finalization. + const l2Hash = zksync.utils.getL2HashFromPriorityOp(l1Receipt, await alice.provider.getMainContractAddress()); + const l2TxReceipt = await alice.provider.getTransactionReceipt(l2Hash); + await waitForL2ToL1LogProof(alice, l2TxReceipt!.blockNumber, l2Hash); + + // Claim failed deposit. + await expect(alice.claimFailedDeposit(l2Hash)).toBeAccepted(); + await expect(alice.getBalanceL1(tokenDetails.l1Address)).resolves.toEqual(initialBalance); + }); + + test('Can perform a deposit with precalculated max value', async () => { + if (!isETHBasedChain) { + // approving whole base token balance + const baseTokenDetails = testMaster.environment().baseToken; + const baseTokenMaxAmount = await alice.getBalanceL1(baseTokenDetails.l1Address); + await (await alice.approveERC20(baseTokenDetails.l1Address, baseTokenMaxAmount)).wait(); + } + + // depositing the max amount: the whole balance of the token + const tokenDepositAmount = await alice.getBalanceL1(tokenDetails.l1Address); + + // approving the needed allowance for the deposit + await (await alice.approveERC20(tokenDetails.l1Address, tokenDepositAmount)).wait(); + + // fee of the deposit in ether + const depositFee = await alice.getFullRequiredDepositFee({ + token: tokenDetails.l1Address + }); + + // checking if alice has enough funds to pay the fee + const l1Fee = depositFee.l1GasLimit * (depositFee.maxFeePerGas! || depositFee.gasPrice!); + const l2Fee = depositFee.baseCost; + const aliceBalance = await alice.getBalanceL1(); + if (aliceBalance < l1Fee + l2Fee) { + throw new Error('Not enough balance to pay the fee'); + } + + // deposit handle with the precalculated max amount + const depositHandle = await alice.deposit({ + token: tokenDetails.l1Address, + amount: tokenDepositAmount, + l2GasLimit: depositFee.l2GasLimit, + approveBaseERC20: true, + approveERC20: true, + overrides: depositFee + }); + + // checking the l2 balance change + const l2TokenBalanceChange = await shouldChangeTokenBalances(tokenDetails.l2Address, [ + { wallet: alice, change: tokenDepositAmount } + ]); + await expect(depositHandle).toBeAccepted([l2TokenBalanceChange]); + }); + + afterAll(async () => { + await testMaster.deinitialize(); + }); +}); diff --git a/core/tests/ts-integration/tests/system.test.ts b/core/tests/ts-integration/tests/system.test.ts index 38b21c5839ae..fd833578e86c 100644 --- a/core/tests/ts-integration/tests/system.test.ts +++ b/core/tests/ts-integration/tests/system.test.ts @@ -11,7 +11,7 @@ import { L2_DEFAULT_ETH_PER_ACCOUNT } from '../src/context-owner'; import * as zksync from 'zksync-ethers'; import * as ethers from 'ethers'; -import { SYSTEM_CONTEXT_ADDRESS, getTestContract } from '../src/helpers'; +import { SYSTEM_CONTEXT_ADDRESS, getTestContract, waitForL2ToL1LogProof } from '../src/helpers'; import { DataAvailabityMode } from '../src/types'; import { BigNumberish } from 'ethers'; @@ -251,6 +251,9 @@ describe('System behavior checks', () => { testMaster.reporter.debug( `Obtained withdrawal receipt for Bob: blockNumber=${bobReceipt.blockNumber}, l1BatchNumber=${bobReceipt.l1BatchNumber}, status=${bobReceipt.status}` ); + + await waitForL2ToL1LogProof(alice, aliceReceipt.blockNumber, aliceReceipt.hash); + await waitForL2ToL1LogProof(bob, bobReceipt.blockNumber, bobReceipt.hash); await expect(alice.finalizeWithdrawal(aliceReceipt.hash)).toBeAccepted([aliceChange]); testMaster.reporter.debug('Finalized withdrawal for Alice'); await expect(alice.finalizeWithdrawal(bobReceipt.hash)).toBeAccepted([bobChange]); @@ -295,6 +298,9 @@ describe('System behavior checks', () => { testMaster.reporter.debug( `Obtained withdrawal receipt #2: blockNumber=${receipt2.blockNumber}, l1BatchNumber=${receipt2.l1BatchNumber}, status=${receipt2.status}` ); + + await waitForL2ToL1LogProof(alice, receipt1.blockNumber, receipt1.hash); + await waitForL2ToL1LogProof(alice, receipt2.blockNumber, receipt2.hash); await expect(alice.finalizeWithdrawal(receipt1.hash)).toBeAccepted([change1]); testMaster.reporter.debug('Finalized withdrawal #1'); await expect(alice.finalizeWithdrawal(receipt2.hash)).toBeAccepted([change2]); diff --git a/core/tests/upgrade-test/package.json b/core/tests/upgrade-test/package.json index 5bb23c36d3b8..57aefb7b7caa 100644 --- a/core/tests/upgrade-test/package.json +++ b/core/tests/upgrade-test/package.json @@ -23,7 +23,7 @@ "@types/node-fetch": "^2.5.7", "chai": "^4.3.4", "chai-as-promised": "^7.1.1", - "ethers": "^6.7.1", + "ethers": "^6.13.5", "mocha": "^9.0.2", "mocha-steps": "^1.3.0", "node-fetch": "^2.6.1", diff --git a/core/tests/upgrade-test/tests/upgrade.test.ts b/core/tests/upgrade-test/tests/upgrade.test.ts index b4b950028e1e..b593e5ad6677 100644 --- a/core/tests/upgrade-test/tests/upgrade.test.ts +++ b/core/tests/upgrade-test/tests/upgrade.test.ts @@ -24,13 +24,14 @@ async function logsPath(name: string): Promise { return await logsTestPath(fileConfig.chain, 'logs/upgrade/', name); } +const L2_BRIDGEHUB_ADDRESS = '0x0000000000000000000000000000000000010002'; const pathToHome = path.join(__dirname, '../../../..'); const fileConfig = shouldLoadConfigFromFile(); const contracts: Contracts = initContracts(pathToHome, fileConfig.loadFromFile); const ZK_CHAIN_INTERFACE = JSON.parse( - readFileSync(pathToHome + '/contracts/l1-contracts/out/IZkSyncHyperchain.sol/IZkSyncHyperchain.json').toString() + readFileSync(pathToHome + '/contracts/l1-contracts/out/IZKChain.sol/IZKChain.json').toString() ).abi; const depositAmount = ethers.parseEther('0.001'); @@ -70,6 +71,8 @@ describe('Upgrade test', function () { let slMainContract: ethers.Contract; let bootloaderHash: string; + let defaultAccountHash: string; + let bytecodeSupplier: string; let executeOperation: string; let forceDeployAddress: string; let forceDeployBytecode: string; @@ -118,6 +121,7 @@ describe('Upgrade test', function () { ethProviderAddress = secretsConfig.l1.l1_rpc_url; web3JsonRpc = generalConfig.api.web3_json_rpc.http_url; contractsL2DefaultUpgradeAddr = contractsConfig.l2.default_l2_upgrader; + bytecodeSupplier = contractsConfig.ecosystem_contracts.l1_bytecodes_supplier_addr; contractsPriorityTxMaxGasLimit = '72000000'; gatewayInfo = getGatewayInfo(pathToHome, fileConfig.chain); @@ -176,7 +180,7 @@ describe('Upgrade test', function () { const l1CtmContract = new ethers.Contract( contractsConfig.ecosystem_contracts.state_transition_proxy_addr, - contracts.stateTransitionManager, + contracts.chainTypeManager, tester.syncWallet.providerL1 ); ecosystemGovernance = await l1CtmContract.owner(); @@ -262,10 +266,12 @@ describe('Upgrade test', function () { ); bootloaderHash = ethers.hexlify(zksync.utils.hashBytecode(bootloaderCode)); + defaultAccountHash = ethers.hexlify(zksync.utils.hashBytecode(defaultAACode)); - await publishBytecode(tester.syncWallet, bootloaderCode); - await publishBytecode(tester.syncWallet, defaultAACode); - await publishBytecode(tester.syncWallet, forceDeployBytecode); + let nonce = await tester.ethWallet.getNonce(); + nonce += await publishBytecode(tester.ethWallet, bytecodeSupplier, bootloaderCode, nonce); + nonce += await publishBytecode(tester.ethWallet, bytecodeSupplier, defaultAACode, nonce); + await publishBytecode(tester.ethWallet, bytecodeSupplier, forceDeployBytecode, nonce); }); step('Schedule governance call', async () => { @@ -303,11 +309,14 @@ describe('Upgrade test', function () { reserved: [0, 0, 0, 0], data, signature: '0x', - factoryDeps: [ethers.hexlify(zksync.utils.hashBytecode(forceDeployBytecode))], + factoryDeps: [ + bootloaderHash, + defaultAccountHash, + ethers.hexlify(zksync.utils.hashBytecode(forceDeployBytecode)) + ], paymasterInput: '0x', reservedDynamic: '0x' }, - factoryDeps: [forceDeployBytecode], bootloaderHash, upgradeTimestamp: 0 }, @@ -315,6 +324,21 @@ describe('Upgrade test', function () { ); executeOperation = chainUpgradeCalldata; + const pauseMigrationCalldata = await pauseMigrationsCalldata( + alice._providerL1(), + alice._providerL2(), + gatewayInfo + ); + console.log('Scheduling pause migration'); + await sendGovernanceOperation(pauseMigrationCalldata.scheduleTransparentOperation, 0, null); + + console.log('Sending pause migration'); + await sendGovernanceOperation( + pauseMigrationCalldata.executeOperation, + pauseMigrationCalldata.executeOperationValue, + gatewayInfo ? gatewayInfo.gatewayProvider : null + ); + console.log('Sending scheduleTransparentOperation'); await sendGovernanceOperation(stmUpgradeData.scheduleTransparentOperation, 0, null); @@ -326,12 +350,18 @@ describe('Upgrade test', function () { ); console.log('Sending chain admin operation'); - await ( - await slAdminGovWallet.sendTransaction({ - to: await slChainAdminContract.getAddress(), - data: setTimestampCalldata - }) - ).wait(); + // Different chain admin impls are used depending on whether gateway is used. + if (gatewayInfo) { + // ChainAdmin.sol: `setUpgradeTimestamp` has onlySelf so we do multicall. + await sendChainAdminOperation({ + target: await slChainAdminContract.getAddress(), + data: setTimestampCalldata, + value: 0 + }); + } else { + // ChainAdminOwnable.sol: `setUpgradeTimestamp` has onlyOwner so we call it directly. + await chainAdminSetTimestamp(setTimestampCalldata); + } // Wait for server to process L1 event. await utils.sleep(2); @@ -436,6 +466,17 @@ describe('Upgrade test', function () { console.log('Transaction complete!'); } + async function chainAdminSetTimestamp(data: string) { + const transaction = await slAdminGovWallet.sendTransaction({ + to: await slChainAdminContract.getAddress(), + data, + type: 0 + }); + console.log(`Sent chain admin operation, tx_hash=${transaction.hash}, nonce=${transaction.nonce}`); + await transaction.wait(); + console.log(`Chain admin operation succeeded, tx_hash=${transaction.hash}`); + } + async function sendChainAdminOperation(call: Call) { const executeMulticallData = slChainAdminContract.interface.encodeFunctionData('multicall', [[call], true]); @@ -483,18 +524,26 @@ function readCode(newPath: string, legacyPath: string): string { } } -async function publishBytecode(wallet: zksync.Wallet, bytecode: string) { - const txHandle = await wallet.requestExecute({ - contractAddress: ethers.ZeroAddress, - calldata: '0x', - l2GasLimit: 20000000, - factoryDeps: [bytecode], - overrides: { - gasLimit: 3000000 - } - }); - await txHandle.wait(); - await waitForNewL1Batch(wallet); +async function publishBytecode( + wallet: ethers.Wallet, + bytecodeSupplierAddr: string, + bytecode: string, + nonce: number +): Promise { + const hash = zksync.utils.hashBytecode(bytecode); + const abi = [ + 'function publishBytecode(bytes calldata _bytecode) public', + 'function publishingBlock(bytes32 _hash) public view returns (uint256)' + ]; + + const contract = new ethers.Contract(bytecodeSupplierAddr, abi, wallet); + const block = await contract.publishingBlock(hash); + if (block == BigInt(0)) { + const tx = await contract.publishBytecode(bytecode, { nonce }); + await tx.wait(); + return 1; + } + return 0; } async function checkedRandomTransfer(sender: zksync.Wallet, amount: bigint): Promise { @@ -578,7 +627,6 @@ async function prepareUpgradeCalldata( paymasterInput: BytesLike; reservedDynamic: BytesLike; }; - factoryDeps: BytesLike[]; bootloaderHash?: BytesLike; defaultAAHash?: BytesLike; verifier?: string; @@ -604,7 +652,7 @@ async function prepareUpgradeCalldata( const zksyncAddress = await l2Provider.getMainContractAddress(); settlementLayerDiamondProxy = new ethers.Contract(zksyncAddress, ZK_CHAIN_INTERFACE, l1Provider); } - const settlementLayerCTMAddress = await settlementLayerDiamondProxy.getStateTransitionManager(); + const settlementLayerCTMAddress = await settlementLayerDiamondProxy.getChainTypeManager(); const oldProtocolVersion = Number(await settlementLayerDiamondProxy.getProtocolVersion()); const newProtocolVersion = addToProtocolVersion(oldProtocolVersion, 1, 1); @@ -613,7 +661,6 @@ async function prepareUpgradeCalldata( const upgradeInitData = contracts.l1DefaultUpgradeAbi.encodeFunctionData('upgrade', [ [ params.l2ProtocolUpgradeTx, - params.factoryDeps, params.bootloaderHash ?? ethers.ZeroHash, params.defaultAAHash ?? ethers.ZeroHash, params.verifier ?? ethers.ZeroAddress, @@ -633,7 +680,7 @@ async function prepareUpgradeCalldata( }; // Prepare calldata for upgrading STM - const stmUpgradeCalldata = contracts.stateTransitionManager.encodeFunctionData('setNewVersionUpgrade', [ + const stmUpgradeCalldata = contracts.chainTypeManager.encodeFunctionData('setNewVersionUpgrade', [ upgradeParam, oldProtocolVersion, // The protocol version will not have any deadline in this upgrade @@ -670,6 +717,25 @@ async function prepareUpgradeCalldata( }; } +async function pauseMigrationsCalldata( + l1Provider: ethers.Provider, + l2Provider: zksync.Provider, + gatewayInfo: GatewayInfo | null +) { + const l1BridgehubAddr = await l2Provider.getBridgehubContractAddress(); + const to = gatewayInfo ? L2_BRIDGEHUB_ADDRESS : l1BridgehubAddr; + + const iface = new ethers.Interface(['function pauseMigration() external']); + + return prepareGovernanceCalldata( + to, + iface.encodeFunctionData('pauseMigration', []), + l1BridgehubAddr, + l1Provider, + gatewayInfo + ); +} + interface UpgradeCalldata { scheduleTransparentOperation: string; executeOperation: string; diff --git a/core/tests/upgrade-test/tests/utils.ts b/core/tests/upgrade-test/tests/utils.ts index 7ea7efb88cb8..9d29bcda4045 100644 --- a/core/tests/upgrade-test/tests/utils.ts +++ b/core/tests/upgrade-test/tests/utils.ts @@ -40,7 +40,7 @@ export interface Contracts { l2ForceDeployUpgraderAbi: any; complexUpgraderAbi: any; counterBytecode: any; - stateTransitionManager: any; + chainTypeManager: any; } export function initContracts(pathToHome: string, zkStack: boolean): Contracts { @@ -68,10 +68,8 @@ export function initContracts(pathToHome: string, zkStack: boolean): Contracts { counterBytecode: require( `${pathToHome}/core/tests/ts-integration/artifacts-zk/contracts/counter/counter.sol/Counter.json` ).deployedBytecode, - stateTransitionManager: new ethers.Interface( - require( - `${CONTRACTS_FOLDER}/l1-contracts/out/StateTransitionManager.sol/StateTransitionManager.json` - ).abi + chainTypeManager: new ethers.Interface( + require(`${CONTRACTS_FOLDER}/l1-contracts/out/ChainTypeManager.sol/ChainTypeManager.json`).abi ) }; } else { @@ -99,10 +97,8 @@ export function initContracts(pathToHome: string, zkStack: boolean): Contracts { ), counterBytecode: require(`${pathToHome}/core/tests/ts-integration/zkout/counter.sol/Counter.json`) .deployedBytecode, - stateTransitionManager: new ethers.Interface( - require( - `${L1_CONTRACTS_FOLDER}/state-transition/StateTransitionManager.sol/StateTransitionManager.json` - ).abi + chainTypeManager: new ethers.Interface( + require(`${L1_CONTRACTS_FOLDER}/state-transition/ChainTypeManager.sol/ChainTypeManager.json`).abi ) }; } diff --git a/core/tests/vm-benchmark/Cargo.toml b/core/tests/vm-benchmark/Cargo.toml index eb4a5a239252..f7c1bf880bad 100644 --- a/core/tests/vm-benchmark/Cargo.toml +++ b/core/tests/vm-benchmark/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "vm-benchmark" -version = "0.1.0" +version.workspace = true edition.workspace = true license.workspace = true publish = false diff --git a/docker-compose-gpu-runner-cuda-12-0.yml b/docker-compose-gpu-runner-cuda-12-0.yml index bd91a5a5b0e4..dd2311db7bd8 100644 --- a/docker-compose-gpu-runner-cuda-12-0.yml +++ b/docker-compose-gpu-runner-cuda-12-0.yml @@ -16,7 +16,7 @@ services: command: node --dev --datadir /rethdata --http --http.addr 0.0.0.0 --http.port 8545 --http.corsdomain "*" --dev.block-time 300ms --chain /chaindata/reth_config zk: - image: ghcr.io/matter-labs/zk-environment:cuda-12_0-latest + image: ghcr.io/matter-labs/zk-environment:cuda-12-latest depends_on: - reth - postgres @@ -59,7 +59,9 @@ services: resources: reservations: devices: - - capabilities: [ gpu ] + - driver: nvidia + count: all + capabilities: [gpu] postgres: image: "postgres:14" @@ -72,4 +74,4 @@ services: volumes: postgres-data: - reth-data: \ No newline at end of file + reth-data: diff --git a/docker-compose-gpu-runner.yml b/docker-compose-gpu-runner.yml index 32665eb7010a..807e567de647 100644 --- a/docker-compose-gpu-runner.yml +++ b/docker-compose-gpu-runner.yml @@ -49,7 +49,9 @@ services: resources: reservations: devices: - - capabilities: [ gpu ] + - driver: nvidia + count: all + capabilities: [gpu] postgres: image: "postgres:14" command: postgres -c 'max_connections=200' diff --git a/docker/contract-verifier/Dockerfile b/docker/contract-verifier/Dockerfile index d5f3c53db99f..14cae24c0642 100644 --- a/docker/contract-verifier/Dockerfile +++ b/docker/contract-verifier/Dockerfile @@ -17,7 +17,7 @@ ENV RUSTC_WRAPPER=${RUSTC_WRAPPER} WORKDIR /usr/src/zksync COPY . . -RUN cargo build --release --bin zksync_contract_verifier +RUN cargo build --manifest-path ./core/Cargo.toml --release --bin zksync_contract_verifier FROM ghcr.io/matter-labs/zksync-runtime-base:latest @@ -96,7 +96,7 @@ RUN mkdir -p /etc/vyper-bin/0.4.0 \ && mv vyper0.4.0 /etc/vyper-bin/0.4.0/vyper \ && chmod +x /etc/vyper-bin/0.4.0/vyper -COPY --from=builder /usr/src/zksync/target/release/zksync_contract_verifier /usr/bin/ +COPY --from=builder /usr/src/zksync/core/target/release/zksync_contract_verifier /usr/bin/ COPY contracts/system-contracts/zkout/ /contracts/system-contracts/zkout/ # CMD tail -f /dev/null diff --git a/docker/external-node/Dockerfile b/docker/external-node/Dockerfile index 2effe1051b4a..31fab51719a5 100644 --- a/docker/external-node/Dockerfile +++ b/docker/external-node/Dockerfile @@ -15,16 +15,17 @@ ENV RUSTC_WRAPPER=${RUSTC_WRAPPER} WORKDIR /usr/src/zksync COPY . . -RUN cargo build --release --bin zksync_external_node --bin block_reverter +RUN cargo build --manifest-path ./core/Cargo.toml --release --bin zksync_external_node --bin block_reverter FROM ghcr.io/matter-labs/zksync-runtime-base:latest -COPY --from=builder /usr/src/zksync/target/release/zksync_external_node /usr/bin -COPY --from=builder /usr/src/zksync/target/release/block_reverter /usr/bin +COPY --from=builder /usr/src/zksync/core/target/release/zksync_external_node /usr/bin +COPY --from=builder /usr/src/zksync/core/target/release/block_reverter /usr/bin COPY --from=builder /usr/local/cargo/bin/sqlx /usr/bin COPY --from=builder /usr/src/zksync/docker/external-node/entrypoint.sh /usr/bin COPY contracts/system-contracts/zkout/ /contracts/system-contracts/zkout/ COPY contracts/l1-contracts/out/ /contracts/l1-contracts/out/ +COPY contracts/l1-contracts/zkout/ /contracts/l1-contracts/zkout/ COPY contracts/l2-contracts/zkout/ /contracts/l2-contracts/zkout/ COPY etc/tokens/ /etc/tokens/ COPY etc/ERC20/ /etc/ERC20/ diff --git a/docker/proof-fri-gpu-compressor-gar/Dockerfile b/docker/proof-fri-gpu-compressor-gar/Dockerfile new file mode 100644 index 000000000000..d74440bd009b --- /dev/null +++ b/docker/proof-fri-gpu-compressor-gar/Dockerfile @@ -0,0 +1,18 @@ +ARG PROOF_COMPRESSOR_IMAGE +FROM us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/proof-fri-gpu-compressor:2.0-$PROOF_COMPRESSOR_IMAGE as proof_fri_gpu +FROM nvidia/cuda:12.4.0-runtime-ubuntu22.04 as app + +# HACK copying to root is the only way to make Docker layer caching work for these files for some reason +COPY *.bin / +COPY ./setup_compact.key /setup_compact.key + +RUN apt-get update && apt-get install -y curl libpq5 ca-certificates && rm -rf /var/lib/apt/lists/* + +# copy finalization hints required for assembly generation +COPY --from=proof_fri_gpu /prover/data/keys/ /prover/data/keys/ +COPY --from=proof_fri_gpu /usr/bin/zksync_proof_fri_compressor /usr/bin/ + +ENV COMPACT_CRS_FILE=/setup_compact.key + + +ENTRYPOINT ["zksync_proof_fri_compressor"] diff --git a/docker/server-v2/Dockerfile b/docker/server-v2/Dockerfile index 9557156fa7c4..869f6bdb463f 100644 --- a/docker/server-v2/Dockerfile +++ b/docker/server-v2/Dockerfile @@ -17,7 +17,7 @@ WORKDIR /usr/src/zksync COPY . . -RUN cargo build --release --features=rocksdb/io-uring --bin zksync_server --bin block_reverter --bin merkle_tree_consistency_checker +RUN cargo build --manifest-path ./core/Cargo.toml --release --features=rocksdb/io-uring --bin zksync_server --bin block_reverter --bin merkle_tree_consistency_checker FROM ghcr.io/matter-labs/zksync-runtime-base:latest @@ -28,11 +28,12 @@ EXPOSE 3000 EXPOSE 3031 EXPOSE 3030 -COPY --from=builder /usr/src/zksync/target/release/zksync_server /usr/bin -COPY --from=builder /usr/src/zksync/target/release/block_reverter /usr/bin -COPY --from=builder /usr/src/zksync/target/release/merkle_tree_consistency_checker /usr/bin +COPY --from=builder /usr/src/zksync/core/target/release/zksync_server /usr/bin +COPY --from=builder /usr/src/zksync/core/target/release/block_reverter /usr/bin +COPY --from=builder /usr/src/zksync/core/target/release/merkle_tree_consistency_checker /usr/bin COPY contracts/system-contracts/zkout/ /contracts/system-contracts/zkout/ COPY contracts/l1-contracts/out/ /contracts/l1-contracts/out/ +COPY contracts/l1-contracts/zkout/ /contracts/l1-contracts/zkout/ COPY contracts/l2-contracts/zkout/ /contracts/l2-contracts/zkout/ COPY etc/tokens/ /etc/tokens/ COPY etc/ERC20/ /etc/ERC20/ diff --git a/docker/snapshots-creator/Dockerfile b/docker/snapshots-creator/Dockerfile index 2d3c83064981..753020094df7 100644 --- a/docker/snapshots-creator/Dockerfile +++ b/docker/snapshots-creator/Dockerfile @@ -15,13 +15,13 @@ ENV RUSTC_WRAPPER=${RUSTC_WRAPPER} WORKDIR /usr/src/zksync COPY . . -RUN cargo build --release --bin snapshots_creator +RUN cargo build --manifest-path ./core/Cargo.toml --release --bin snapshots_creator FROM ghcr.io/matter-labs/zksync-runtime-base:latest RUN apt-get update && apt-get install -y liburing-dev && \ rm -rf /var/lib/apt/lists/* -COPY --from=builder /usr/src/zksync/target/release/snapshots_creator /usr/bin +COPY --from=builder /usr/src/zksync/core/target/release/snapshots_creator /usr/bin ENTRYPOINT ["snapshots_creator"] diff --git a/docker/verified-sources-fetcher/Dockerfile b/docker/verified-sources-fetcher/Dockerfile index 87475f3187f3..ebc6619582fd 100644 --- a/docker/verified-sources-fetcher/Dockerfile +++ b/docker/verified-sources-fetcher/Dockerfile @@ -16,7 +16,7 @@ ENV RUSTC_WRAPPER=${RUSTC_WRAPPER} WORKDIR /usr/src/zksync COPY . . -RUN cargo build --release --bin verified_sources_fetcher +RUN cargo build --manifest-path ./core/Cargo.toml --release --bin verified_sources_fetcher FROM ghcr.io/matter-labs/zksync-runtime-base:latest @@ -26,6 +26,6 @@ RUN curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | gpg --dearmor - RUN echo "deb [signed-by=/usr/share/keyrings/cloud.google.gpg] https://packages.cloud.google.com/apt cloud-sdk main" | tee -a /etc/apt/sources.list.d/google-cloud-sdk.list RUN apt-get update && apt-get install -y google-cloud-cli && rm -rf /var/lib/apt/lists/* -COPY --from=builder /usr/src/zksync/target/release/verified_sources_fetcher /usr/bin/ +COPY --from=builder /usr/src/zksync/core/target/release/verified_sources_fetcher /usr/bin/ ENTRYPOINT ["verified_sources_fetcher"] diff --git a/docker/zk-environment/22.04_amd64_cuda_11_8.Dockerfile b/docker/zk-environment/22.04_amd64_cuda_11_8.Dockerfile index fe44d55acbbc..37505a098575 100644 --- a/docker/zk-environment/22.04_amd64_cuda_11_8.Dockerfile +++ b/docker/zk-environment/22.04_amd64_cuda_11_8.Dockerfile @@ -84,7 +84,7 @@ RUN cargo install --version=0.8.0 sqlx-cli RUN cargo install cargo-nextest RUN git clone https://github.com/matter-labs/foundry-zksync -RUN cd foundry-zksync && cargo build --release --bins +RUN cd foundry-zksync && git reset --hard 27360d4c8d12beddbb730dae07ad33a206b38f4b && cargo build --release --bins RUN mv ./foundry-zksync/target/release/forge /usr/local/cargo/bin/ RUN mv ./foundry-zksync/target/release/cast /usr/local/cargo/bin/ @@ -123,8 +123,12 @@ ENV NVIDIA_REQUIRE_CUDA "cuda>=11.8 brand=tesla,driver>=450,driver<451 brand=tes ENV NV_CUDA_CUDART_VERSION 11.8.89-1 ENV NV_CUDA_COMPAT_PACKAGE cuda-compat-11-8 -RUN wget -c -O - https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2004/${NVARCH}/3bf863cc.pub | apt-key add - && \ - echo "deb https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2004/${NVARCH} /" > /etc/apt/sources.list.d/cuda.list +# curl purging is removed, it's required in next steps +RUN apt-get update && apt-get install -y --no-install-recommends \ + gnupg2 curl ca-certificates && \ + curl -fsSLO https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2204/${NVARCH}/cuda-keyring_1.0-1_all.deb && \ + dpkg -i cuda-keyring_1.0-1_all.deb && \ + rm -rf /var/lib/apt/lists/* ENV CUDA_VERSION 11.8.0 diff --git a/docker/zk-environment/22.04_amd64_cuda_12_0.Dockerfile b/docker/zk-environment/22.04_amd64_cuda_12.Dockerfile similarity index 70% rename from docker/zk-environment/22.04_amd64_cuda_12_0.Dockerfile rename to docker/zk-environment/22.04_amd64_cuda_12.Dockerfile index da041b121816..b3230edb8c07 100644 --- a/docker/zk-environment/22.04_amd64_cuda_12_0.Dockerfile +++ b/docker/zk-environment/22.04_amd64_cuda_12.Dockerfile @@ -82,7 +82,7 @@ RUN cargo install --version=0.8.0 sqlx-cli RUN cargo install cargo-nextest RUN git clone https://github.com/matter-labs/foundry-zksync -RUN cd foundry-zksync && cargo build --release --bins +RUN cd foundry-zksync && git reset --hard 27360d4c8d12beddbb730dae07ad33a206b38f4b && cargo build --release --bins RUN mv ./foundry-zksync/target/release/forge /usr/local/cargo/bin/ RUN mv ./foundry-zksync/target/release/cast /usr/local/cargo/bin/ @@ -115,27 +115,27 @@ ENV ZKSYNC_HOME=/usr/src/zksync ENV PATH="${ZKSYNC_HOME}/bin:${PATH}" ENV CI=1 ENV RUSTC_WRAPPER=/usr/local/cargo/bin/sccache -ENV DEBIAN_FRONTEND noninteractive +ENV DEBIAN_FRONTEND=noninteractive # Setup nvidia-cuda env ENV NVARCH x86_64 -ENV NVIDIA_REQUIRE_CUDA "cuda>=12.0 brand=tesla,driver>=450,driver<451 brand=tesla,driver>=470,driver<471 brand=unknown,driver>=470,driver<471 brand=nvidia,driver>=470,driver<471 brand=nvidiartx,driver>=470,driver<471 brand=geforce,driver>=470,driver<471 brand=geforcertx,driver>=470,driver<471 brand=quadro,driver>=470,driver<471 brand=quadrortx,driver>=470,driver<471 brand=titan,driver>=470,driver<471 brand=titanrtx,driver>=470,driver<471" -ENV NV_CUDA_CUDART_VERSION 12.0.107-1 -ENV NV_CUDA_COMPAT_PACKAGE cuda-compat-12-0 +ENV NVIDIA_REQUIRE_CUDA "cuda>=12.2 brand=tesla,driver>=450,driver<451 brand=tesla,driver>=470,driver<471 brand=unknown,driver>=470,driver<471 brand=nvidia,driver>=470,driver<471 brand=nvidiartx,driver>=470,driver<471 brand=geforce,driver>=470,driver<471 brand=geforcertx,driver>=470,driver<471 brand=quadro,driver>=470,driver<471 brand=quadrortx,driver>=470,driver<471 brand=titan,driver>=470,driver<471 brand=titanrtx,driver>=470,driver<471 brand=tesla,driver>=525,driver<526 brand=unknown,driver>=525,driver<526 brand=nvidia,driver>=525,driver<526 brand=nvidiartx,driver>=525,driver<526 brand=geforce,driver>=525,driver<526 brand=geforcertx,driver>=525,driver<526 brand=quadro,driver>=525,driver<526 brand=quadrortx,driver>=525,driver<526 brand=titan,driver>=525,driver<526 brand=titanrtx,driver>=525,driver<526" +ENV NV_CUDA_CUDART_VERSION 12.2.140-1 +ENV NV_CUDA_COMPAT_PACKAGE cuda-compat-12-2 # curl purging is removed, it's required in next steps RUN apt-get update && apt-get install -y --no-install-recommends \ gnupg2 curl ca-certificates && \ - wget -c -O - https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2004/${NVARCH}/3bf863cc.pub | apt-key add - && \ - echo "deb https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2004/${NVARCH} /" > /etc/apt/sources.list.d/cuda.list && \ + curl -fsSLO https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2204/${NVARCH}/cuda-keyring_1.0-1_all.deb && \ + dpkg -i cuda-keyring_1.0-1_all.deb && \ rm -rf /var/lib/apt/lists/* -ENV CUDA_VERSION 12.0.0 +ENV CUDA_VERSION 12.2.2 # For libraries in the cuda-compat-* package: https://docs.nvidia.com/cuda/eula/index.html#attachment-a RUN apt-get update && apt-get install -y --no-install-recommends \ - cuda-cudart-12-0=${NV_CUDA_CUDART_VERSION} \ + cuda-cudart-12-2=${NV_CUDA_CUDART_VERSION} \ ${NV_CUDA_COMPAT_PACKAGE} \ && rm -rf /var/lib/apt/lists/* @@ -150,41 +150,27 @@ ENV LD_LIBRARY_PATH /usr/local/nvidia/lib:/usr/local/nvidia/lib64 ENV NVIDIA_VISIBLE_DEVICES all ENV NVIDIA_DRIVER_CAPABILITIES compute,utility -ENV NV_CUDA_LIB_VERSION 12.0.0-1 - -ENV NV_NVTX_VERSION 12.0.76-1 -ENV NV_LIBNPP_VERSION 12.0.0.30-1 -ENV NV_LIBNPP_PACKAGE libnpp-12-0=${NV_LIBNPP_VERSION} -ENV NV_LIBCUSPARSE_VERSION 12.0.0.76-1 - -ENV NV_LIBCUBLAS_PACKAGE_NAME libcublas-12-0 -ENV NV_LIBCUBLAS_VERSION 12.0.1.189-1 -ENV NV_LIBCUBLAS_PACKAGE ${NV_LIBCUBLAS_PACKAGE_NAME}=${NV_LIBCUBLAS_VERSION} - -ENV NV_LIBNCCL_PACKAGE_NAME libnccl2 -ENV NV_LIBNCCL_PACKAGE_VERSION 2.17.1-1 -ENV NCCL_VERSION 2.17.1-1 -ENV NV_LIBNCCL_PACKAGE ${NV_LIBNCCL_PACKAGE_NAME}=${NV_LIBNCCL_PACKAGE_VERSION}+cuda12.0 +ENV NV_CUDA_LIB_VERSION 12.2.2-1 -ENV NV_NVTX_VERSION 12.0.76-1 -ENV NV_LIBNPP_VERSION 12.0.0.30-1 -ENV NV_LIBNPP_PACKAGE libnpp-12-0=${NV_LIBNPP_VERSION} -ENV NV_LIBCUSPARSE_VERSION 12.0.0.76-1 +ENV NV_NVTX_VERSION 12.2.140-1 +ENV NV_LIBNPP_VERSION 12.2.1.4-1 +ENV NV_LIBNPP_PACKAGE libnpp-12-2=${NV_LIBNPP_VERSION} +ENV NV_LIBCUSPARSE_VERSION 12.1.2.141-1 -ENV NV_LIBCUBLAS_PACKAGE_NAME libcublas-12-0 -ENV NV_LIBCUBLAS_VERSION 12.0.1.189-1 +ENV NV_LIBCUBLAS_PACKAGE_NAME libcublas-12-2 +ENV NV_LIBCUBLAS_VERSION 12.2.5.6-1 ENV NV_LIBCUBLAS_PACKAGE ${NV_LIBCUBLAS_PACKAGE_NAME}=${NV_LIBCUBLAS_VERSION} ENV NV_LIBNCCL_PACKAGE_NAME libnccl2 -ENV NV_LIBNCCL_PACKAGE_VERSION 2.17.1-1 -ENV NCCL_VERSION 2.17.1-1 -ENV NV_LIBNCCL_PACKAGE ${NV_LIBNCCL_PACKAGE_NAME}=${NV_LIBNCCL_PACKAGE_VERSION}+cuda12.0 +ENV NV_LIBNCCL_PACKAGE_VERSION 2.19.3-1 +ENV NCCL_VERSION 2.19.3-1 +ENV NV_LIBNCCL_PACKAGE ${NV_LIBNCCL_PACKAGE_NAME}=${NV_LIBNCCL_PACKAGE_VERSION}+cuda12.2 RUN apt-get update && apt-get install -y --no-install-recommends \ - cuda-libraries-12-0=${NV_CUDA_LIB_VERSION} \ + cuda-libraries-12-2=${NV_CUDA_LIB_VERSION} \ ${NV_LIBNPP_PACKAGE} \ - cuda-nvtx-12-0=${NV_NVTX_VERSION} \ - libcusparse-12-0=${NV_LIBCUSPARSE_VERSION} \ + cuda-nvtx-12-2=${NV_NVTX_VERSION} \ + libcusparse-12-2=${NV_LIBCUSPARSE_VERSION} \ ${NV_LIBCUBLAS_PACKAGE} \ ${NV_LIBNCCL_PACKAGE} \ && rm -rf /var/lib/apt/lists/* @@ -194,57 +180,57 @@ RUN apt-mark hold ${NV_LIBCUBLAS_PACKAGE_NAME} ${NV_LIBNCCL_PACKAGE_NAME} #### devel -ENV NV_CUDA_LIB_VERSION "12.0.0-1" +ENV NV_CUDA_LIB_VERSION "12.2.2-1" -ENV NV_CUDA_CUDART_DEV_VERSION 12.0.107-1 -ENV NV_NVML_DEV_VERSION 12.0.76-1 -ENV NV_LIBCUSPARSE_DEV_VERSION 12.0.0.76-1 -ENV NV_LIBNPP_DEV_VERSION 12.0.0.30-1 -ENV NV_LIBNPP_DEV_PACKAGE libnpp-dev-12-0=${NV_LIBNPP_DEV_VERSION} +ENV NV_CUDA_CUDART_DEV_VERSION 12.2.140-1 +ENV NV_NVML_DEV_VERSION 12.2.140-1 +ENV NV_LIBCUSPARSE_DEV_VERSION 12.1.2.141-1 +ENV NV_LIBNPP_DEV_VERSION 12.2.1.4-1 +ENV NV_LIBNPP_DEV_PACKAGE libnpp-dev-12-2=${NV_LIBNPP_DEV_VERSION} -ENV NV_LIBCUBLAS_DEV_VERSION 12.0.1.189-1 -ENV NV_LIBCUBLAS_DEV_PACKAGE_NAME libcublas-dev-12-0 +ENV NV_LIBCUBLAS_DEV_VERSION 12.2.5.6-1 +ENV NV_LIBCUBLAS_DEV_PACKAGE_NAME libcublas-dev-12-2 ENV NV_LIBCUBLAS_DEV_PACKAGE ${NV_LIBCUBLAS_DEV_PACKAGE_NAME}=${NV_LIBCUBLAS_DEV_VERSION} -ENV NV_CUDA_NSIGHT_COMPUTE_VERSION 12.0.0-1 -ENV NV_CUDA_NSIGHT_COMPUTE_DEV_PACKAGE cuda-nsight-compute-12-0=${NV_CUDA_NSIGHT_COMPUTE_VERSION} +ENV NV_CUDA_NSIGHT_COMPUTE_VERSION 12.2.2-1 +ENV NV_CUDA_NSIGHT_COMPUTE_DEV_PACKAGE cuda-nsight-compute-12-2=${NV_CUDA_NSIGHT_COMPUTE_VERSION} -ENV NV_NVPROF_VERSION 12.0.90-1 -ENV NV_NVPROF_DEV_PACKAGE cuda-nvprof-12-0=${NV_NVPROF_VERSION} +ENV NV_NVPROF_VERSION 12.2.142-1 +ENV NV_NVPROF_DEV_PACKAGE cuda-nvprof-12-2=${NV_NVPROF_VERSION} ENV NV_LIBNCCL_DEV_PACKAGE_NAME libnccl-dev -ENV NV_LIBNCCL_DEV_PACKAGE_VERSION 2.17.1-1 -ENV NCCL_VERSION 2.17.1-1 -ENV NV_LIBNCCL_DEV_PACKAGE ${NV_LIBNCCL_DEV_PACKAGE_NAME}=${NV_LIBNCCL_DEV_PACKAGE_VERSION}+cuda12.0 - -ENV NV_CUDA_CUDART_DEV_VERSION 12.0.107-1 -ENV NV_NVML_DEV_VERSION 12.0.76-1 -ENV NV_LIBCUSPARSE_DEV_VERSION 12.0.0.76-1 -ENV NV_LIBNPP_DEV_VERSION 12.0.0.30-1 -ENV NV_LIBNPP_DEV_PACKAGE libnpp-dev-12-0=${NV_LIBNPP_DEV_VERSION} - -ENV NV_LIBCUBLAS_DEV_PACKAGE_NAME libcublas-dev-12-0 -ENV NV_LIBCUBLAS_DEV_VERSION 12.0.1.189-1 +ENV NV_LIBNCCL_DEV_PACKAGE_VERSION 2.19.3-1 +ENV NCCL_VERSION 2.19.3-1 +ENV NV_LIBNCCL_DEV_PACKAGE ${NV_LIBNCCL_DEV_PACKAGE_NAME}=${NV_LIBNCCL_DEV_PACKAGE_VERSION}+cuda12.2 + +ENV NV_CUDA_CUDART_DEV_VERSION 12.2.140-1 +ENV NV_NVML_DEV_VERSION 12.2.140-1 +ENV NV_LIBCUSPARSE_DEV_VERSION 12.1.2.141-1 +ENV NV_LIBNPP_DEV_VERSION 12.2.1.4-1 +ENV NV_LIBNPP_DEV_PACKAGE libnpp-dev-12-2=${NV_LIBNPP_DEV_VERSION} + +ENV NV_LIBCUBLAS_DEV_PACKAGE_NAME libcublas-dev-12-2 +ENV NV_LIBCUBLAS_DEV_VERSION 12.2.5.6-1 ENV NV_LIBCUBLAS_DEV_PACKAGE ${NV_LIBCUBLAS_DEV_PACKAGE_NAME}=${NV_LIBCUBLAS_DEV_VERSION} -ENV NV_CUDA_NSIGHT_COMPUTE_VERSION 12.0.0-1 -ENV NV_CUDA_NSIGHT_COMPUTE_DEV_PACKAGE cuda-nsight-compute-12-0=${NV_CUDA_NSIGHT_COMPUTE_VERSION} +ENV NV_CUDA_NSIGHT_COMPUTE_VERSION 12.2.2-1 +ENV NV_CUDA_NSIGHT_COMPUTE_DEV_PACKAGE cuda-nsight-compute-12-2=${NV_CUDA_NSIGHT_COMPUTE_VERSION} ENV NV_LIBNCCL_DEV_PACKAGE_NAME libnccl-dev -ENV NV_LIBNCCL_DEV_PACKAGE_VERSION 2.17.1-1 -ENV NCCL_VERSION 2.17.1-1 -ENV NV_LIBNCCL_DEV_PACKAGE ${NV_LIBNCCL_DEV_PACKAGE_NAME}=${NV_LIBNCCL_DEV_PACKAGE_VERSION}+cuda12.0 +ENV NV_LIBNCCL_DEV_PACKAGE_VERSION 2.19.3-1 +ENV NCCL_VERSION 2.19.3-1 +ENV NV_LIBNCCL_DEV_PACKAGE ${NV_LIBNCCL_DEV_PACKAGE_NAME}=${NV_LIBNCCL_DEV_PACKAGE_VERSION}+cuda12.2 RUN apt-get update && apt-get install -y --no-install-recommends \ libtinfo5 libncursesw5 \ - cuda-cudart-dev-12-0=${NV_CUDA_CUDART_DEV_VERSION} \ - cuda-command-line-tools-12-0=${NV_CUDA_LIB_VERSION} \ - cuda-minimal-build-12-0=${NV_CUDA_LIB_VERSION} \ - cuda-libraries-dev-12-0=${NV_CUDA_LIB_VERSION} \ - cuda-nvml-dev-12-0=${NV_NVML_DEV_VERSION} \ + cuda-cudart-dev-12-2=${NV_CUDA_CUDART_DEV_VERSION} \ + cuda-command-line-tools-12-2=${NV_CUDA_LIB_VERSION} \ + cuda-minimal-build-12-2=${NV_CUDA_LIB_VERSION} \ + cuda-libraries-dev-12-2=${NV_CUDA_LIB_VERSION} \ + cuda-nvml-dev-12-2=${NV_NVML_DEV_VERSION} \ ${NV_NVPROF_DEV_PACKAGE} \ ${NV_LIBNPP_DEV_PACKAGE} \ - libcusparse-dev-12-0=${NV_LIBCUSPARSE_DEV_VERSION} \ + libcusparse-dev-12-2=${NV_LIBCUSPARSE_DEV_VERSION} \ ${NV_LIBCUBLAS_DEV_PACKAGE} \ ${NV_LIBNCCL_DEV_PACKAGE} \ ${NV_CUDA_NSIGHT_COMPUTE_DEV_PACKAGE} \ diff --git a/docker/zk-environment/Dockerfile b/docker/zk-environment/Dockerfile index c04e5720e4d7..f17347b2ef39 100644 --- a/docker/zk-environment/Dockerfile +++ b/docker/zk-environment/Dockerfile @@ -47,7 +47,7 @@ RUN cargo install cargo-spellcheck RUN cargo install sccache RUN git clone https://github.com/matter-labs/foundry-zksync -RUN cd foundry-zksync && cargo build --release --bins +RUN cd foundry-zksync && git reset --hard 27360d4c8d12beddbb730dae07ad33a206b38f4b && cargo build --release --bins RUN mv ./foundry-zksync/target/release/forge /usr/local/cargo/bin/ RUN mv ./foundry-zksync/target/release/cast /usr/local/cargo/bin/ diff --git a/docs/src/announcements/attester_commitee.md b/docs/src/announcements/attester_commitee.md index 148e51a4f976..d4205ee52aa9 100644 --- a/docs/src/announcements/attester_commitee.md +++ b/docs/src/announcements/attester_commitee.md @@ -2,8 +2,8 @@ ## Overview -The Attester committee is a subset of ZKSync nodes. After each l1 batch execution, participating nodes sign its -execution result and send back to the network. +The Attester committee is a subset of Nodes. After each l1 batch execution, participating nodes sign its execution +result and send back to the network. The ultimate goal is to make L1 commit operation contingent on such signatures. This will improve the security and finality guarantees: having these signatures on L1 shows that additional actors executed the corresponding blocks - and @@ -36,7 +36,7 @@ Participants can leave the committee at any time. The only action that is required to participate is to share your attester public key with the Main Node operator (by opening an issue in this repo or using any other communication channel). You can find it in the comment in the `consensus_secrets.yaml` file (that was - in most cases - generated by the tool described -[here](https://github.com/matter-labs/zksync-era/blob/main/docs/guides/external-node/10_decentralization.md#generating-secrets)) +[here](https://github.com/matter-labs/zksync-era/blob/main/docs/src/guides/external-node/10_decentralization.md#generating-secrets)) > [!WARNING] > diff --git a/docs/src/guides/advanced/15_prover_keys.md b/docs/src/guides/advanced/15_prover_keys.md index 5a3a264e8ddd..bf127b7a635f 100644 --- a/docs/src/guides/advanced/15_prover_keys.md +++ b/docs/src/guides/advanced/15_prover_keys.md @@ -19,23 +19,23 @@ We offer 13 distinct types of **'base' circuits**, including Vm, Decommitter, an each corresponding to a basic type, while one is a 'node,' and another is a 'scheduler' that oversees all others. You can find more details in the [full list here][recursive_circuit_list]. -In our new proof system, there's also a final element known as the compressor, or **snark wrapper**, representing an +In our new proof system, there's also a final steps known as the compressor and **snark wrapper**, representing an additional type of circuit. It's essential to note that each circuit type requires its unique set of keys. Also, the base circuits, leaves, node and scheduler are STARK based with FRI commitments, while the snark wrapper is SNARK based with KZG commitment. This results in slightly different contents of the keys, but their role stays the same. +More info about commitment schemes can be found [here](https://en.wikipedia.org/wiki/Commitment_scheme). ## Keys -### Setup key (big, 14GB) +### Setup keys (big, >700MB each) -> In the following [CPU](https://github.com/matter-labs/zksync-era/blob/main/prover/setup-data-cpu-keys.json) and -> [GPU](https://github.com/matter-labs/zksync-era/blob/main/prover/setup-data-gpu-keys.json) links, you'll find GCS -> buckets containing the latest keys. +The following [link](https://github.com/matter-labs/zksync-era/blob/main/prover/setup-data-gpu-keys.json) provides the +GCS buckets containing the latest setup keys. -The primary key for a given circuit is called `setup key`. These keys can be substantial in size - approximately 14GB +The primary key for a given circuit is called `setup key`. These keys can be substantial in size - approximately 700MB for our circuits. Due to their size, we don't store them directly on GitHub; instead, they need to be generated. If you’re wondering what these setup keys contain, think of them as the 'source code of the circuit.' @@ -117,8 +117,7 @@ friendly hash function (currently Poseidon2). https://github.com/matter-labs/era-zkevm_test_harness/blob/3cd647aa57fc2e1180bab53f7a3b61ec47502a46/circuit_definitions/src/circuit_definitions/base_layer/mod.rs#L77 [recursive_circuit_list]: https://github.com/matter-labs/era-zkevm_test_harness/blob/3cd647aa57fc2e1180bab53f7a3b61ec47502a46/circuit_definitions/src/circuit_definitions/recursion_layer/mod.rs#L29 -[verification_key_list]: - https://github.com/matter-labs/zksync-era/tree/6d18061df4a18803d3c6377305ef711ce60317e1/prover/data/keys +[verification_key_list]: https://github.com/matter-labs/zksync-era/tree/main/prover/data/keys [env_variables_for_hash]: https://github.com/matter-labs/zksync-era/blob/6d18061df4a18803d3c6377305ef711ce60317e1/etc/env/base/contracts.toml#L61 [prover_setup_data]: diff --git a/docs/src/guides/external-node/00_quick_start.md b/docs/src/guides/external-node/00_quick_start.md index 07e52085cf4f..547d65fcbc0c 100644 --- a/docs/src/guides/external-node/00_quick_start.md +++ b/docs/src/guides/external-node/00_quick_start.md @@ -6,7 +6,15 @@ Install `docker compose` and `Docker` ## Running ZKsync node locally -These commands start ZKsync node locally inside docker. +These commands start ZKsync Node locally inside docker. + +For adjusting the Dockerfiles to use them with other chains setup using ZK Stack, see +[setup_for_other_chains](11_setup_for_other_chains.md) + +> [!NOTE] +> +> If you want to run Node for a chain different than ZKsync ERA, you can ask the company hosting the chains for the +> ready docker-compose files. To start a mainnet instance, run: @@ -58,11 +66,16 @@ The HTTP JSON-RPC API can be accessed on port `3060` and WebSocket API can be ac > setup). > > For requirements for nodes running from DB dump see the [running](03_running.md) section. DB dumps are a way to start -> ZKsync node with full historical transactions history. +> Node with full historical transactions history. > > For nodes with pruning disabled, expect the storage requirements on mainnet to grow at 1TB per month. If you want to > stop historical DB pruning you can read more about this in the [pruning](08_pruning.md) section. +> [!NOTE] +> +> For chains other than ZKSync Era, the system requirements can be slightly lower (CPU and RAM) or even much lower +> (storage), depending on the chain. + - 32 GB of RAM and a relatively modern CPU - 50 GB of storage for testnet nodes - 500 GB of storage for mainnet nodes diff --git a/docs/src/guides/external-node/01_intro.md b/docs/src/guides/external-node/01_intro.md index 10fc55acac21..b5842e160b6c 100644 --- a/docs/src/guides/external-node/01_intro.md +++ b/docs/src/guides/external-node/01_intro.md @@ -1,39 +1,36 @@ -# ZkSync Node Documentation +# Node Documentation -This documentation explains the basics of the ZKsync Node. +The Node (sometimes referred to as External Node or EN) is a read-only replica of the main node. -## Disclaimers +## What is the Node -- The ZKsync node is in the alpha phase, and should be used with caution. -- The ZKsync node is a read-only replica of the main node. +The Node is a read-replica of the main (centralized) node that can be run by external parties. It functions by receiving +blocks from the given ZK Stack chain and re-applying transactions locally, starting from the genesis block. The Node +shares most of its codebase with the main node. Consequently, when it re-applies transactions, it does so exactly as the +main node did in the past. -## What is the ZKsync node +**It has three modes of initialization:** -The ZKsync node is a read-replica of the main (centralized) node that can be run by external parties. It functions by -receiving blocks from the ZKsync network and re-applying transactions locally, starting from the genesis block. The -ZKsync node shares most of its codebase with the main node. Consequently, when it re-applies transactions, it does so -exactly as the main node did in the past. - -**It has two modes of initialization:** - -- recovery from a DB dump, in Ethereum terms this corresponds to archival node +- recovery from genesis (Not supported on ZKsync Era), in Ethereum terms this corresponds to archival node, this option + is slower than recovery from DB dump, but is the easiest way to spin up new Node. +- recovery from a DB dump, in Ethereum terms this corresponds to archival node. - recovery from a snapshot, in Ethereum terms this corresponds to light node, such nodes will only have access to transactions data from after the node was initialized. The database can be pruned on such nodes. ## High-level overview -At a high level, the ZKsync node can be seen as an application that has the following modules: +At a high level, the Node can be seen as an application that has the following modules: - API server that provides the publicly available Web3 interface. -- Consensus layer that interacts with the peer network and retrieves transactions and blocks to re-execute. +- Consensus layer (ZKsync Era only for now) that interacts with the peer network and retrieves transactions and blocks + to re-execute. - Sequencer component that actually executes and persists transactions received from the synchronization layer. -- Several checker modules that ensure the consistency of the ZKsync node state. +- Several checker modules that ensure the consistency of the Node state. With the EN, you are able to: -- Locally recreate and verify the ZKsync Era mainnet/testnet state. -- Interact with the recreated state in a trustless way (in a sense that the validity is locally verified, and you should - not rely on a third-party API ZKsync Era provides). +- Locally recreate and verify a ZK Stack chain's, for example ZKsync Era's mainnet/testnet state. +- Interact with the recreated state in a trustless way. The validity is locally verified. - Use the Web3 API without having to query the main node. - Send L2 transactions (that will be proxied to the main node). @@ -43,12 +40,12 @@ With the EN, you _can not_: - Generate proofs. - Submit data to L1. -A more detailed overview of the EN's components is provided in the [components](06_components.md) section. +A more detailed overview of the Node's components is provided in the [components](06_components.md) section. ## API overview -API exposed by the ZKsync node strives to be Web3-compliant. If some method is exposed but behaves differently compared -to Ethereum, it should be considered a bug. Please [report][contact_us] such cases. +API exposed by the Node strives to be Web3-compliant. If some method is exposed but behaves differently compared to +Ethereum, it should be considered a bug. Please [report][contact_us] such cases. [contact_us]: https://zksync.io/contact @@ -58,42 +55,42 @@ Data getters in this namespace operate in the L2 space: require/return L2 block Available methods: -| Method | Notes | -| ----------------------------------------- | ---------------------------------------------------------------------------------- | -| `eth_blockNumber` | | -| `eth_chainId` | | -| `eth_call` | | -| `eth_estimateGas` | | -| `eth_gasPrice` | | -| `eth_newFilter` | Maximum amount of installed filters is configurable | -| `eth_newBlockFilter` | Same as above | -| `eth_newPendingTransactionsFilter` | Same as above | -| `eth_uninstallFilter` | | -| `eth_getLogs` | Maximum amount of returned entities can be configured | -| `eth_getFilterLogs` | Same as above | -| `eth_getFilterChanges` | Same as above | -| `eth_getBalance` | | -| `eth_getBlockByNumber` | | -| `eth_getBlockByHash` | | -| `eth_getBlockTransactionCountByNumber` | | -| `eth_getBlockTransactionCountByHash` | | -| `eth_getCode` | | -| `eth_getStorageAt` | | -| `eth_getTransactionCount` | | -| `eth_getTransactionByHash` | | -| `eth_getTransactionByBlockHashAndIndex` | | -| `eth_getTransactionByBlockNumberAndIndex` | | -| `eth_getTransactionReceipt` | | -| `eth_protocolVersion` | | -| `eth_sendRawTransaction` | | -| `eth_syncing` | ZKsync node is considered synced if it's less than 11 blocks behind the main node. | -| `eth_coinbase` | Always returns a zero address | -| `eth_accounts` | Always returns an empty list | -| `eth_getCompilers` | Always returns an empty list | -| `eth_hashrate` | Always returns zero | -| `eth_getUncleCountByBlockHash` | Always returns zero | -| `eth_getUncleCountByBlockNumber` | Always returns zero | -| `eth_mining` | Always returns false | +| Method | Notes | +| ----------------------------------------- | --------------------------------------------------------------------------- | +| `eth_blockNumber` | | +| `eth_chainId` | | +| `eth_call` | | +| `eth_estimateGas` | | +| `eth_gasPrice` | | +| `eth_newFilter` | Maximum amount of installed filters is configurable | +| `eth_newBlockFilter` | Same as above | +| `eth_newPendingTransactionsFilter` | Same as above | +| `eth_uninstallFilter` | | +| `eth_getLogs` | Maximum amount of returned entities can be configured | +| `eth_getFilterLogs` | Same as above | +| `eth_getFilterChanges` | Same as above | +| `eth_getBalance` | | +| `eth_getBlockByNumber` | | +| `eth_getBlockByHash` | | +| `eth_getBlockTransactionCountByNumber` | | +| `eth_getBlockTransactionCountByHash` | | +| `eth_getCode` | | +| `eth_getStorageAt` | | +| `eth_getTransactionCount` | | +| `eth_getTransactionByHash` | | +| `eth_getTransactionByBlockHashAndIndex` | | +| `eth_getTransactionByBlockNumberAndIndex` | | +| `eth_getTransactionReceipt` | | +| `eth_protocolVersion` | | +| `eth_sendRawTransaction` | | +| `eth_syncing` | Node is considered synced if it's less than 11 blocks behind the main node. | +| `eth_coinbase` | Always returns a zero address | +| `eth_accounts` | Always returns an empty list | +| `eth_getCompilers` | Always returns an empty list | +| `eth_hashrate` | Always returns zero | +| `eth_getUncleCountByBlockHash` | Always returns zero | +| `eth_getUncleCountByBlockNumber` | Always returns zero | +| `eth_mining` | Always returns false | ### PubSub @@ -153,5 +150,5 @@ Always refer to the documentation linked above to see the list of stabilized met ### `en` namespace -This namespace contains methods that ZKsync nodes call on the main node while syncing. If this namespace is enabled, -other ENs can sync from this node. +This namespace contains methods that Nodes call on the main node while syncing. If this namespace is enabled, other ENs +can sync from this node. diff --git a/docs/src/guides/external-node/02_configuration.md b/docs/src/guides/external-node/02_configuration.md index 5b8b7512eb3e..90da7c1eea79 100644 --- a/docs/src/guides/external-node/02_configuration.md +++ b/docs/src/guides/external-node/02_configuration.md @@ -1,7 +1,7 @@ -# ZkSync Node Configuration +# Node Configuration -This document outlines various configuration options for the EN. Currently, the ZKsync node requires the definition of -numerous environment variables. To streamline this process, we provide prepared configs for the ZKsync Era - for both +This document outlines various configuration options for the EN. Currently, the Node requires the definition of numerous +environment variables. To streamline this process, we provide prepared configs for the ZKsync Era - for both [mainnet](prepared_configs/mainnet-config.env) and [testnet](prepared_configs/testnet-sepolia-config.env). You can use these files as a starting point and modify only the necessary sections. @@ -10,7 +10,7 @@ default settings.** ## Database -The ZKsync node uses two databases: PostgreSQL and RocksDB. +The Node uses two databases: PostgreSQL and RocksDB. PostgreSQL serves as the main source of truth in the EN, so all the API requests fetch the state from there. The PostgreSQL connection is configured by the `DATABASE_URL`. Additionally, the `DATABASE_POOL_SIZE` variable defines the @@ -22,12 +22,12 @@ recommended to use an NVME SSD for RocksDB. RocksDB requires two variables to be ## L1 Web3 client -ZKsync node requires a connection to an Ethereum node. The corresponding env variable is `EN_ETH_CLIENT_URL`. Make sure -to set the URL corresponding to the correct L1 network (L1 mainnet for L2 mainnet and L1 sepolia for L2 testnet). +Node requires a connection to an Ethereum node. The corresponding env variable is `EN_ETH_CLIENT_URL`. Make sure to set +the URL corresponding to the correct L1 network (L1 mainnet for L2 mainnet and L1 sepolia for L2 testnet). -Note: Currently, the ZKsync node makes 2 requests to the L1 per L1 batch, so the Web3 client usage for a synced node -should not be high. However, during the synchronization phase the new batches would be persisted on the ZKsync node -quickly, so make sure that the L1 client won't exceed any limits (e.g. in case you use Infura). +Note: Currently, the Node makes 2 requests to the L1 per L1 batch, so the Web3 client usage for a synced node should not +be high. However, during the synchronization phase the new batches would be persisted on the Node quickly, so make sure +that the L1 client won't exceed any limits (e.g. in case you use Infura). ## Exposed ports @@ -50,13 +50,13 @@ the metrics, leave this port not configured, and the metrics won't be collected. There are variables that allow you to fine-tune the limits of the RPC servers, such as limits on the number of returned entries or the limit for the accepted transaction size. Provided files contain sane defaults that are recommended for -use, but these can be edited, e.g. to make the ZKsync node more/less restrictive. +use, but these can be edited, e.g. to make the Node more/less restrictive. ## JSON-RPC API namespaces There are 7 total supported API namespaces: `eth`, `net`, `web3`, `debug` - standard ones; `zks` - rollup-specific one; -`pubsub` - a.k.a. `eth_subscribe`; `en` - used by ZKsync nodes while syncing. You can configure what namespaces you want -to enable using `EN_API_NAMESPACES` and specifying namespace names in a comma-separated list. By default, all but the +`pubsub` - a.k.a. `eth_subscribe`; `en` - used by Nodes while syncing. You can configure what namespaces you want to +enable using `EN_API_NAMESPACES` and specifying namespace names in a comma-separated list. By default, all but the `debug` namespace are enabled. ## Logging and observability @@ -64,8 +64,8 @@ to enable using `EN_API_NAMESPACES` and specifying namespace names in a comma-se `MISC_LOG_FORMAT` defines the format in which logs are shown: `plain` corresponds to the human-readable format, while the other option is `json` (recommended for deployments). -`RUST_LOG` variable allows you to set up the logs granularity (e.g. make the ZKsync node emit fewer logs). You can read -about the format [here](https://docs.rs/env_logger/0.10.0/env_logger/#enabling-logging). +`RUST_LOG` variable allows you to set up the logs granularity (e.g. make the Node emit fewer logs). You can read about +the format [here](https://docs.rs/env_logger/0.10.0/env_logger/#enabling-logging). `MISC_SENTRY_URL` and `MISC_OTLP_URL` variables can be configured to set up Sentry and OpenTelemetry exporters. diff --git a/docs/src/guides/external-node/03_running.md b/docs/src/guides/external-node/03_running.md index caa528238aea..c59733825c4d 100644 --- a/docs/src/guides/external-node/03_running.md +++ b/docs/src/guides/external-node/03_running.md @@ -1,8 +1,9 @@ -# Running the ZkSync Node +# Running the Node > [!NOTE] > -> If you want to just run node with recommended default setting, please see the [quick start](00_quick_start.md) page. +> If you want to just run ZKSync node with recommended default setting, please see the [quick start](00_quick_start.md) +> page. This section assumes that you have prepared a configuration file as described on the [previous page](02_configuration.md). @@ -14,12 +15,14 @@ This configuration is approximate and should be considered as **minimal** requir - 32-core CPU - 64GB RAM - SSD storage (NVME recommended): - - Sepolia Testnet - 10GB ZKsync node + 50GB PostgreSQL (at the time of writing, will grow over time, so should be + - ZKsync Sepolia Testnet - 10GB Node + 50GB PostgreSQL (at the time of writing, will grow over time, so should be constantly monitored) - - Mainnet - 3TB ZKsync node + 8TB PostgreSQL (at the time of writing, will grow over time, so should be constantly + - ZKsync Mainnet - 3TB Node + 8TB PostgreSQL (at the time of writing, will grow over time, so should be constantly monitored) - 100 Mbps connection (1 Gbps+ recommended) +For smaller chains, less powerful hardware may be sufficient, especially in terms of disk space. + ## A note about PostgreSQL storage By far, the heaviest table to maintain is the `call_traces` table. This table is only required for the `debug` @@ -36,23 +39,23 @@ it in Docker. There are many of guides on that, [here's one example](https://www.docker.com/blog/how-to-use-the-postgres-docker-official-image/). Note however that if you run PostgresSQL as a stand-alone Docker image (e.g. not in Docker-compose with a network shared -between ZKsync node and Postgres), ZKsync node won't be able to access Postgres via `localhost` or `127.0.0.1` URLs. To -make it work, you'll have to either run it with a `--network host` (on Linux) or use `host.docker.internal` instead of -`localhost` in the ZKsync node configuration ([official docs][host_docker_internal]). +between Node and Postgres), Node won't be able to access Postgres via `localhost` or `127.0.0.1` URLs. To make it work, +you'll have to either run it with a `--network host` (on Linux) or use `host.docker.internal` instead of `localhost` in +the Node configuration ([official docs][host_docker_internal]). Besides running Postgres, you are expected to have a DB dump from a corresponding env. You can restore it using `pg_restore -O -C --dbname=`. You can also refer to -[ZKsync Node configuration management blueprint](https://github.com/matter-labs/zksync-era/blob/main/docs/guides/external-node/00_quick_start.md#advanced-setup) +[Node configuration management blueprint](https://github.com/matter-labs/zksync-era/blob/main/docs/src/guides/external-node/00_quick_start.md#advanced-setup) for advanced DB instance configurations. [host_docker_internal](https://docs.docker.com/desktop/networking/#i-want-to-connect-from-a-container-to-a-service-on-the-host) ## Running -Assuming you have the ZKsync node Docker image, an env file with the prepared configuration, and you have restored your -DB with the pg dump, that is all you need. +Assuming you have the Node Docker image, an env file with the prepared configuration, and you have restored your DB with +the pg dump, that is all you need. Sample running command: @@ -69,9 +72,9 @@ in RocksDB (mainly the Merkle tree) is absent. Before the node can make any prog RocksDB and verify consistency. The exact time required for that depends on the hardware configuration, but it is reasonable to expect the state rebuild on the mainnet to take more than 20 hours. -## Redeploying the ZKsync node with a new PG dump +## Redeploying the Node with a new PG dump -If you've been running the ZKsync node for some time and are going to redeploy it using a new PG dump, you should +If you've been running the Node for some time and are going to redeploy it using a new PG dump, you should - Stop the EN - Remove SK cache (corresponding to `EN_STATE_CACHE_PATH`) diff --git a/docs/src/guides/external-node/04_observability.md b/docs/src/guides/external-node/04_observability.md index 05b39b74c5d2..cdf0f9dd25f8 100644 --- a/docs/src/guides/external-node/04_observability.md +++ b/docs/src/guides/external-node/04_observability.md @@ -1,6 +1,6 @@ -# ZKsync node Observability +# Node Observability -The ZKsync node provides several options for setting up observability. Configuring logs and sentry is described in the +The Node provides several options for setting up observability. Configuring logs and sentry is described in the [configuration](02_configuration.md) section, so this section focuses on the exposed metrics. This section is written with the assumption that you're familiar with @@ -16,8 +16,8 @@ By default, latency histograms are distributed in the following buckets (in seco ## Metrics -ZKsync node exposes a lot of metrics, a significant amount of which aren't interesting outside the development flow. -This section's purpose is to highlight metrics that may be worth observing in the external setup. +Node exposes a lot of metrics, a significant amount of which aren't interesting outside the development flow. This +section's purpose is to highlight metrics that may be worth observing in the external setup. If you are not planning to scrape Prometheus metrics, please unset `EN_PROMETHEUS_PORT` environment variable to prevent memory leaking. @@ -25,7 +25,7 @@ memory leaking. | Metric name | Type | Labels | Description | | ---------------------------------------------- | --------- | ------------------------------------- | ------------------------------------------------------------------ | | `external_node_synced` | Gauge | - | 1 if synced, 0 otherwise. Matches `eth_call` behavior | -| `external_node_sync_lag` | Gauge | - | How many blocks behind the main node the ZKsync node is | +| `external_node_sync_lag` | Gauge | - | How many blocks behind the main node the Node is | | `external_node_fetcher_requests` | Histogram | `stage`, `actor` | Duration of requests performed by the different fetcher components | | `external_node_fetcher_cache_requests` | Histogram | - | Duration of requests performed by the fetcher cache layer | | `external_node_fetcher_miniblock` | Gauge | `status` | The number of the last L2 block update fetched from the main node | diff --git a/docs/src/guides/external-node/05_troubleshooting.md b/docs/src/guides/external-node/05_troubleshooting.md index 43d6ae26b135..49369897143f 100644 --- a/docs/src/guides/external-node/05_troubleshooting.md +++ b/docs/src/guides/external-node/05_troubleshooting.md @@ -1,8 +1,8 @@ -# ZKsync node Troubleshooting +# Node Troubleshooting -The ZKsync node tries to follow the fail-fast principle: if an anomaly is discovered, instead of attempting state -recovery, in most cases it will restart. Most of the time it will manifest as crashes, and if it happens once, it -shouldn't be treated as a problem. +The Node tries to follow the fail-fast principle: if an anomaly is discovered, instead of attempting state recovery, in +most cases it will restart. Most of the time it will manifest as crashes, and if it happens once, it shouldn't be +treated as a problem. However, if the node enters the crash loop or otherwise behaves unexpectedly, it may indicate either a bug in the implementation or a problem with configuration. This section tries to cover common problems. @@ -24,8 +24,8 @@ Other kinds of panic aren't normally expected. While in most cases, the state wi ## Genesis Issues -The ZKsync node is supposed to start with an applied DB dump. If you see any genesis-related errors, it probably means -the ZKsync node was started without an applied dump. +The Node is supposed to start with an applied DB dump. If you see any genesis-related errors, it probably means the Node +was started without an applied dump. [contact_us]: https://zksync.io/contact @@ -43,7 +43,7 @@ you don't consider actionable, you may disable logs for a component by tweaking | WARN | "Following transport error occurred" | There was a problem with fetching data from the main node. | | WARN | "Unable to get the gas price" | There was a problem with fetching data from the main node. | | WARN | "Consistency checker error" | There are problems querying L1, check the Web3 URL you specified in the config. | -| WARN | "Reorg detected" | Reorg was detected on the main node, the ZKsync node will rollback and restart | +| WARN | "Reorg detected" | Reorg was detected on the main node, the Node will rollback and restart | Same as with panics, normally it's only a problem if a WARN+ level log appears many times in a row. diff --git a/docs/src/guides/external-node/06_components.md b/docs/src/guides/external-node/06_components.md index 733400058a82..ec6ff4f9936a 100644 --- a/docs/src/guides/external-node/06_components.md +++ b/docs/src/guides/external-node/06_components.md @@ -1,58 +1,56 @@ -# ZKsync node components +# Node components This section contains an overview of the EN's main components. ## API -The ZKsync node can serve both the HTTP and the WS Web3 API, as well as PubSub. Whenever possible, it provides data -based on the local state, with a few exceptions: +The Node can serve both the HTTP and the WS Web3 API, as well as PubSub. Whenever possible, it provides data based on +the local state, with a few exceptions: - Submitting transactions: Since it is a read replica, submitted transactions are proxied to the main node, and the - response is returned from the main node. -- Querying transactions: The ZKsync node is not aware of the main node's mempool, and it does not sync rejected - transactions. Therefore, if a local lookup for a transaction or its receipt fails, the ZKsync node will attempt the - same query on the main node. + response is returned from the main node. -[06_components.md](06_components.md) Querying transactions: The Node is not + aware of the main node's mempool, and it does not sync rejected transactions. Therefore, if a local lookup for a + transaction or its receipt fails, the Node will attempt the same query on the main node. Apart from these cases, the API does not depend on the main node. Even if the main node is temporarily unavailable, the -ZKsync node can continue to serve the state it has locally. +Node can continue to serve the state it has locally. ## Fetcher -The Fetcher component is responsible for maintaining synchronization between the ZKsync node and the main node. Its -primary task is to fetch new blocks in order to update the local chain state. However, its responsibilities extend -beyond that. For instance, the Fetcher is also responsible for keeping track of L1 batch statuses. This involves -monitoring whether locally applied batches have been committed, proven, or executed on L1. +The Fetcher component is responsible for maintaining synchronization between the Node and the main node. Its primary +task is to fetch new blocks in order to update the local chain state. However, its responsibilities extend beyond that. +For instance, the Fetcher is also responsible for keeping track of L1 batch statuses. This involves monitoring whether +locally applied batches have been committed, proven, or executed on L1. -It is worth noting that in addition to fetching the _state_, the ZKsync node also retrieves the L1 gas price from the -main node for the purpose of estimating fees for L2 transactions (since this also happens based on the local state). -This information is necessary to ensure that gas estimations are performed in the exact same manner as the main node, -thereby reducing the chances of a transaction not being included in a block. +It is worth noting that in addition to fetching the _state_, the Node also retrieves the L1 gas price from the main node +for the purpose of estimating fees for L2 transactions (since this also happens based on the local state). This +information is necessary to ensure that gas estimations are performed in the exact same manner as the main node, thereby +reducing the chances of a transaction not being included in a block. ## State Keeper / VM The State Keeper component serves as the "sequencer" part of the node. It shares most of its functionality with the main node, with one key distinction. The main node retrieves transactions from the mempool and has the authority to decide -when a specific L2 block or L1 batch should be sealed. On the other hand, the ZKsync node retrieves transactions from -the queue populated by the Fetcher and seals the corresponding blocks/batches based on the data obtained from the -Fetcher queue. +when a specific L2 block or L1 batch should be sealed. On the other hand, the Node retrieves transactions from the queue +populated by the Fetcher and seals the corresponding blocks/batches based on the data obtained from the Fetcher queue. -The actual execution of batches takes place within the VM, which is identical in both the Main and ZKsync nodes. +The actual execution of batches takes place within the VM, which is identical in both the Main and Nodes. ## Reorg Detector -In ZKsync Era, it is theoretically possible for L1 batches to be reverted before the corresponding "execute" operation -is applied on L1, that is before the block is [final][finality]. Such situations are highly uncommon and typically occur -due to significant issues: e.g. a bug in the sequencer implementation preventing L1 batch commitment. Prior to batch -finality, the ZKsync operator can perform a rollback, reverting one or more batches and restoring the blockchain state -to a previous point. Finalized batches cannot be reverted at all. +In a ZK Stack chain, it is theoretically possible for L1 batches to be reverted before the corresponding "execute" +operation is applied on L1, that is before the block is [final][finality]. Such situations are highly uncommon and +typically occur due to significant issues: e.g. a bug in the sequencer implementation preventing L1 batch commitment. +Prior to batch finality, the chain operator can perform a rollback, reverting one or more batches and restoring the +blockchain state to a previous point. Finalized batches cannot be reverted at all. -However, even though such situations are rare, the ZKsync node must handle them correctly. +However, even though such situations are rare, the Node must handle them correctly. -To address this, the ZKsync node incorporates a Reorg Detector component. This module keeps track of all L1 batches that -have not yet been finalized. It compares the locally obtained state root hashes with those provided by the main node's -API. If the root hashes for the latest available L1 batch do not match, the Reorg Detector searches for the specific L1 -batch responsible for the divergence. Subsequently, it rolls back the local state and restarts the node. Upon restart, -the EN resumes normal operation. +To address this, the Node incorporates a Reorg Detector component. This module keeps track of all L1 batches that have +not yet been finalized. It compares the locally obtained state root hashes with those provided by the main node's API. +If the root hashes for the latest available L1 batch do not match, the Reorg Detector searches for the specific L1 batch +responsible for the divergence. Subsequently, it rolls back the local state and restarts the node. Upon restart, the EN +resumes normal operation. [finality]: https://docs.zksync.io/zk-stack/concepts/finality @@ -67,13 +65,12 @@ When the Consistency Checker detects that a particular batch has been sent to L1 known as the "block commitment" for the L1 transaction. The block commitment contains crucial data such as the state root and batch number, and is the same commitment that is used for generating a proof for the batch. The Consistency Checker then compares the locally obtained commitment with the actual commitment sent to L1. If the data does not match, -it indicates a potential bug in either the main node or ZKsync node implementation or that the main node API has -provided incorrect data. In either case, the state of the ZKsync node cannot be trusted, and the ZKsync node enters a -crash loop until the issue is resolved. +it indicates a potential bug in either the main node or Node implementation or that the main node API has provided +incorrect data. In either case, the state of the Node cannot be trusted, and the Node enters a crash loop until the +issue is resolved. ## Health check server -The ZKsync node also exposes an additional server that returns HTTP 200 response when the ZKsync node is operating -normally, and HTTP 503 response when some of the health checks don't pass (e.g. when the ZKsync node is not fully -initialized yet). This server can be used, for example, to implement the readiness probe in an orchestration solution -you use. +The Node also exposes an additional server that returns HTTP 200 response when the Node is operating normally, and HTTP +503 response when some of the health checks don't pass (e.g. when the Node is not fully initialized yet). This server +can be used, for example, to implement the readiness probe in an orchestration solution you use. diff --git a/docs/src/guides/external-node/07_snapshots_recovery.md b/docs/src/guides/external-node/07_snapshots_recovery.md index 0053717af063..ecab9c3702df 100644 --- a/docs/src/guides/external-node/07_snapshots_recovery.md +++ b/docs/src/guides/external-node/07_snapshots_recovery.md @@ -39,10 +39,10 @@ error mentioning the first locally retained block or L1 batch if queried this mi used for [pruning](08_pruning.md) because logically, recovering from a snapshot is equivalent to pruning node storage to the snapshot L1 batch. -## Configuration +## Configuration (for ZKsync Era) -To enable snapshot recovery on mainnet, you need to set environment variables for a node before starting it for the -first time: +To enable snapshot recovery on ZKsync mainnet, you need to set environment variables for a node before starting it for +the first time: ```yaml EN_SNAPSHOTS_RECOVERY_ENABLED: 'true' @@ -50,7 +50,7 @@ EN_SNAPSHOTS_OBJECT_STORE_BUCKET_BASE_URL: 'zksync-era-mainnet-external-node-sna EN_SNAPSHOTS_OBJECT_STORE_MODE: 'GCSAnonymousReadOnly' ``` -For the Sepolia testnet, use: +For the ZKsync Sepolia testnet, use: ```yaml EN_SNAPSHOTS_RECOVERY_ENABLED: 'true' @@ -58,7 +58,7 @@ EN_SNAPSHOTS_OBJECT_STORE_BUCKET_BASE_URL: 'zksync-era-boojnet-external-node-sna EN_SNAPSHOTS_OBJECT_STORE_MODE: 'GCSAnonymousReadOnly' ``` -For a working examples of a fully configured Nodes recovering from snapshots, see +For a working examples of a fully configured ZKsync Nodes recovering from snapshots, see [Docker Compose examples](docker-compose-examples) and [_Quick Start_](00_quick_start.md). If a node is already recovered (does not matter whether from a snapshot or from a Postgres dump), setting these env diff --git a/docs/src/guides/external-node/08_pruning.md b/docs/src/guides/external-node/08_pruning.md index 06bd9f8d8a9d..7bb1a64dbed8 100644 --- a/docs/src/guides/external-node/08_pruning.md +++ b/docs/src/guides/external-node/08_pruning.md @@ -1,9 +1,8 @@ # Pruning -It is possible to configure a ZKsync node to periodically prune all data from L1 batches older than a configurable -threshold. Data is pruned both from Postgres and from tree (RocksDB). Pruning happens continuously (i.e., does not -require stopping the node) in the background during normal node operation. It is designed to not significantly impact -node performance. +It is possible to configure a Node to periodically prune all data from L1 batches older than a configurable threshold. +Data is pruned both from Postgres and from tree (RocksDB). Pruning happens continuously (i.e., does not require stopping +the node) in the background during normal node operation. It is designed to not significantly impact node performance. Types of pruned data in Postgres include: diff --git a/docs/src/guides/external-node/09_treeless_mode.md b/docs/src/guides/external-node/09_treeless_mode.md index ceeea6f86c67..05062c30abef 100644 --- a/docs/src/guides/external-node/09_treeless_mode.md +++ b/docs/src/guides/external-node/09_treeless_mode.md @@ -1,10 +1,10 @@ # Treeless Operation Mode -Normally, a ZKsync node needs to run the Merkle tree component (aka _metadata calculator_) in order to compute L1 batch -state root hashes. A state root hash from the previous batch can be accessed by L2 contracts, so processing transactions -in an L1 batch cannot start until the state root hash of the previous L1 batch is computed. Merkle tree requires -non-trivial storage space and RAM (roughly 3 TB and 32 GB respectively for an archival mainnet node as of July 2024). -While storage and RAM requirements can be significantly lowered with [snapshot recovery](07_snapshots_recovery.md) and +Normally, a Node needs to run the Merkle tree component (aka _metadata calculator_) in order to compute L1 batch state +root hashes. A state root hash from the previous batch can be accessed by L2 contracts, so processing transactions in an +L1 batch cannot start until the state root hash of the previous L1 batch is computed. Merkle tree requires non-trivial +storage space and RAM (roughly 3 TB and 32 GB respectively for an archival mainnet node as of July 2024). While storage +and RAM requirements can be significantly lowered with [snapshot recovery](07_snapshots_recovery.md) and [pruning](08_pruning.md), **treeless operation mode** allows to run a node without a local Merkle tree instance at all. ## How it works diff --git a/docs/src/guides/external-node/10_decentralization.md b/docs/src/guides/external-node/10_decentralization.md index 951538e6ab86..7f301cfbf045 100644 --- a/docs/src/guides/external-node/10_decentralization.md +++ b/docs/src/guides/external-node/10_decentralization.md @@ -1,8 +1,8 @@ # Decentralization -In the default setup, the ZKsync node will fetch data from the ZKsync API endpoint maintained by Matter Labs. To reduce -the reliance on this centralized endpoint we have developed a decentralized p2p networking stack (aka gossipnet) which -will eventually be used instead of ZKsync API for synchronizing data. +In the default setup, the Node will fetch data from the ZKsync API endpoint maintained by Matter Labs. To reduce the +reliance on this centralized endpoint we have developed a decentralized p2p networking stack (aka gossipnet) which will +eventually be used instead of ZKsync API for synchronizing data. On the gossipnet, the data integrity will be protected by the BFT (byzantine fault-tolerant) consensus algorithm (currently data is signed just by the main node though). @@ -35,9 +35,9 @@ chmod 600 consensus_secrets.yaml ### Preparing configuration file Copy the template of the consensus configuration file (for -[mainnet](https://github.com/matter-labs/zksync-era/blob/main/docs/guides/external-node/prepared_configs/mainnet_consensus_config.yaml) +[mainnet](https://github.com/matter-labs/zksync-era/blob/main/docs/src/guides/external-node/prepared_configs/mainnet_consensus_config.yaml) or -[testnet](https://github.com/matter-labs/zksync-era/blob/main/docs/guides/external-node/prepared_configs/testnet_consensus_config.yaml) +[testnet](https://github.com/matter-labs/zksync-era/blob/main/docs/src/guides/external-node/prepared_configs/testnet_consensus_config.yaml) ). > [!NOTE] diff --git a/docs/src/guides/external-node/11_setup_for_other_chains.md b/docs/src/guides/external-node/11_setup_for_other_chains.md new file mode 100644 index 000000000000..9d1c88ea91be --- /dev/null +++ b/docs/src/guides/external-node/11_setup_for_other_chains.md @@ -0,0 +1,50 @@ +# Steps to modify the docker-compose files to support Other Chains + +Below are the steps for adjusting ZKsync Era docker-compose files from [here](00_quick_start.md) to support chains other +than ZKsync Era. + +> [!NOTE] +> +> If you want to run Node for a given chain, you can first ask the company hosting the chains for the Dockerfiles. + +## 1. Update `EN_L2_CHAIN_ID` + +The `EN_L2_CHAIN_ID` environment variable specifies the Layer 2 chain ID of the blockchain. + +You can get it using main node rpc call `eth_chainId` or by asking the company hosting the chain. For example: + +``` +curl -X POST https://mainnet.era.zksync.io \ +-H "Content-Type: application/json" \ +-d '{"jsonrpc": "2.0", "method": "eth_chainId", "params": [], "id": 1}' +``` + +returns + +``` +{ "jsonrpc": "2.0", "result": "0x144", "id": 1} +``` + +where `0x144` is the chain ID (324 in decimal) + +## 2. Update `EN_MAIN_NODE_URL` + +The `EN_MAIN_NODE_URL` The EN_MAIN_NODE_URL environment variable should point to the main node URL of the target chain + +## 3. Update snapshots recovery settings + +Snapshots recovery is a feature that allows faster Node startup at the cost of no transaction history. By default the +ZKsync Era docker-compose file has this feature enabled, but it's only recommended to use if the Node first startup time +is too slow. It can be disabled by changing `EN_SNAPSHOTS_RECOVERY_ENABLED` to `false` + +If you want to keep this feature enabled for a Node, ask the company hosting the chain for the bucket name where the +snapshots are stored and update the value of `EN_SNAPSHOTS_OBJECT_STORE_BUCKET_BASE_URL` + +## 4. Disable consensus + +Chains other than ZKsync Era aren't currently running consensus(as of December 2024). You need to disable it by removing +`--enable-consensus` flag from `entrypoint.sh` invocation in docker-compose + +## 5. (Validium chains only) Set `EN_L1_BATCH_COMMIT_DATA_GENERATOR_MODE` + +For validium chains, you need to set `EN_L1_BATCH_COMMIT_DATA_GENERATOR_MODE: "Validium"` diff --git a/docs/src/specs/l1_smart_contracts.md b/docs/src/specs/l1_smart_contracts.md index 65c408714ba3..23fede090124 100644 --- a/docs/src/specs/l1_smart_contracts.md +++ b/docs/src/specs/l1_smart_contracts.md @@ -184,7 +184,7 @@ fee-on-transfer tokens or other custom logic for handling user balances. The owner of the L1ERC20Bridge is the Governance contract. -### L1SharedBridge +### L1AssetRouter The main bridge implementation handles transfers Ether, ERC20 tokens and of WETH tokens between the two domains. It is designed to streamline and enhance the user experience for bridging WETH tokens by minimizing the number of transactions diff --git a/etc/env/base/chain.toml b/etc/env/base/chain.toml index 6d1fdae53cee..7b632c3ae3a4 100644 --- a/etc/env/base/chain.toml +++ b/etc/env/base/chain.toml @@ -90,8 +90,8 @@ fee_model_version = "V2" validation_computational_gas_limit = 300000 save_call_traces = true -bootloader_hash = "0x010008c3be57ae5800e077b6c2056d9d75ad1a7b4f0ce583407961cc6fe0b678" -default_aa_hash = "0x0100055dba11508480be023137563caec69debc85f826cb3a4b68246a7cabe30" +bootloader_hash = "0x010008c753336bc8d1ddca235602b9f31d346412b2d463cd342899f7bfb73baf" +default_aa_hash = "0x0100055d760f11a3d737e7fd1816e600a4cd874a9f17f7a225d1f1c537c51a1e" protective_reads_persistence_enabled = false diff --git a/etc/env/base/contracts.toml b/etc/env/base/contracts.toml index ef52ed4c711b..d3eaabf92bd8 100644 --- a/etc/env/base/contracts.toml +++ b/etc/env/base/contracts.toml @@ -15,7 +15,6 @@ DIAMOND_PROXY_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" L1_MULTICALL3_ADDR = "0xcA11bde05977b3631167028862bE2a173976CA11" L1_ERC20_BRIDGE_PROXY_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" L1_ERC20_BRIDGE_IMPL_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" -L2_ERC20_BRIDGE_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" L2_TESTNET_PAYMASTER_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" L1_ALLOW_LIST_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" CREATE2_FACTORY_ADDR = "0xce0042B868300000d44A59004Da54A005ffdcf9f" @@ -26,13 +25,10 @@ RECURSION_NODE_LEVEL_VK_HASH = "0x1186ec268d49f1905f8d9c1e9d39fc33e98c74f91d91a2 RECURSION_LEAF_LEVEL_VK_HASH = "0x101e08b00193e529145ee09823378ef51a3bc8966504064f1f6ba3f1ba863210" RECURSION_CIRCUITS_SET_VKS_HASH = "0x18c1639094f58177409186e8c48d9f577c9410901d2f1d486b3e7d6cf553ae4c" GENESIS_TX_HASH = "0xb99ebfea46cbe05a21cd80fe5597d97b204befc52a16303f579c607dc1ac2e2e" -GENESIS_ROOT = "0x7275936e5a0063b159d5d22734931fea07871e8d57e564d61ef56e4a6ee23e5c" -GENESIS_BATCH_COMMITMENT = "0xf5f9a5abe62e8a6e0cb2d34d27435c3e5a8fbd7e2e54ca1d108fc58cb86c708a" PRIORITY_TX_MAX_GAS_LIMIT = 72000000 DEPLOY_L2_BRIDGE_COUNTERPART_GAS_LIMIT = 10000000 -GENESIS_ROLLUP_LEAF_INDEX = "54" -GENESIS_PROTOCOL_VERSION = "25" -GENESIS_PROTOCOL_SEMANTIC_VERSION = "0.25.0" +GENESIS_PROTOCOL_VERSION = "26" +GENESIS_PROTOCOL_SEMANTIC_VERSION = "0.26.0" L1_WETH_BRIDGE_IMPL_ADDR = "0x5E6D086F5eC079ADFF4FB3774CDf3e8D6a34F7E9" L1_WETH_BRIDGE_PROXY_ADDR = "0x5E6D086F5eC079ADFF4FB3774CDf3e8D6a34F7E9" L1_WETH_TOKEN_ADDR = "0x5E6D086F5eC079ADFF4FB3774CDf3e8D6a34F7E9" @@ -41,6 +37,19 @@ L2_WETH_TOKEN_IMPL_ADDR = "0x5E6D086F5eC079ADFF4FB3774CDf3e8D6a34F7E9" L2_WETH_TOKEN_PROXY_ADDR = "0x5E6D086F5eC079ADFF4FB3774CDf3e8D6a34F7E9" BLOB_VERSIONED_HASH_RETRIEVER_ADDR = "0x0000000000000000000000000000000000000000" +GENESIS_ROOT = "0x09e68951458b18c24ae5f4100160b53c4888c9b3c3c1859cc674bc02236675ad" +GENESIS_BATCH_COMMITMENT = "0x7238eab6a0e9f5bb84421feae6b6b9ae80816d490c875d29ff3ded375a3e078f" +GENESIS_ROLLUP_LEAF_INDEX = "64" + +# Ecosystem-wide params +L1_ROLLUP_DA_VALIDATOR = "0x0000000000000000000000000000000000000000" +L1_VALIDIUM_DA_VALIDATOR = "0x0000000000000000000000000000000000000000" + +# Chain-specific params +L1_DA_VALIDATOR_ADDR = "0x0000000000000000000000000000000000000000" +L2_DA_VALIDATOR_ADDR = "0x0000000000000000000000000000000000000000" +L1_RELAYED_SL_DA_VALIDATOR = "0x0000000000000000000000000000000000000000" + L1_SHARED_BRIDGE_IMPL_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" # These are currently not used, but will be used once the shared bridge is up BRIDGEHUB_PROXY_ADDR = "0x0000000000000000000000000000000000000000" @@ -48,13 +57,29 @@ BRIDGEHUB_IMPL_ADDR = "0x0000000000000000000000000000000000000000" STATE_TRANSITION_PROXY_ADDR = "0x0000000000000000000000000000000000000000" STATE_TRANSITION_IMPL_ADDR = "0x0000000000000000000000000000000000000000" TRANSPARENT_PROXY_ADMIN_ADDR = "0x0000000000000000000000000000000000000000" +L2_PROXY_ADMIN_ADDR = "0x0000000000000000000000000000000000000000" BASE_TOKEN_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" BASE_TOKEN_BRIDGE_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" GENESIS_UPGRADE_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" -MAX_NUMBER_OF_HYPERCHAINS = 100 +MAX_NUMBER_OF_ZK_CHAINS = 100 L1_SHARED_BRIDGE_PROXY_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" -L2_SHARED_BRIDGE_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" -L2_SHARED_BRIDGE_IMPL_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" +L1_NATIVE_TOKEN_VAULT_IMPL_ADDR ="0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" +L1_NATIVE_TOKEN_VAULT_PROXY_ADDR ="0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" +L2_NATIVE_TOKEN_VAULT_IMPL_ADDR = "0x0000000000000000000000000000000000010004" +L2_NATIVE_TOKEN_VAULT_PROXY_ADDR = "0x0000000000000000000000000000000000010004" +L2_SHARED_BRIDGE_IMPL_ADDR = "0x0000000000000000000000000000000000010003" +L2_SHARED_BRIDGE_ADDR = "0x0000000000000000000000000000000000010003" +L2_ERC20_BRIDGE_ADDR = "0x0000000000000000000000000000000000010003" +CTM_DEPLOYMENT_TRACKER_IMPL_ADDR ="0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" +CTM_DEPLOYMENT_TRACKER_PROXY_ADDR ="0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" +MESSAGE_ROOT_IMPL_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" +MESSAGE_ROOT_PROXY_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" +L1_NULLIFIER_IMPL_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" +L1_NULLIFIER_PROXY_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" +L1_BRIDGED_STANDARD_ERC20_IMPL_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" +L1_BRIDGED_TOKEN_BEACON_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" +L2_LEGACY_SHARED_BRIDGE_IMPL_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" +L2_LEGACY_SHARED_BRIDGE_ADDR = "0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" FRI_RECURSION_LEAF_LEVEL_VK_HASH = "0xf9664f4324c1400fa5c3822d667f30e873f53f1b8033180cd15fe41c1e2355c6" FRI_RECURSION_NODE_LEVEL_VK_HASH = "0xf520cd5b37e74e19fdb369c8d676a04dce8a19457497ac6686d2bb95d94109c8" FRI_RECURSION_SCHEDULER_LEVEL_VK_HASH = "0x14f97b81e54b35fe673d8708cc1a19e1ea5b5e348e12d31e39824ed4f42bbca2" @@ -64,6 +89,9 @@ SHARED_BRIDGE_UPGRADE_STORAGE_SWITCH = 0 ERA_CHAIN_ID = 9 ERA_DIAMOND_PROXY_ADDR = "0x0000000000000000000000000000000000000000" CHAIN_ADMIN_ADDR = "0x0000000000000000000000000000000000000000" +CTM_ASSET_INFO = "0xf9664f4324c1400fa5c3822d667f30e873f53f1b8033180cd15fe41c1e2355c6" + +L1_CHAIN_ID = 9 [contracts.test] dummy_verifier = true easy_priority_mode = false diff --git a/etc/env/base/fri_proof_compressor.toml b/etc/env/base/fri_proof_compressor.toml index a8825ca98613..c8855d6b2a48 100644 --- a/etc/env/base/fri_proof_compressor.toml +++ b/etc/env/base/fri_proof_compressor.toml @@ -5,8 +5,8 @@ prometheus_pushgateway_url = "http://127.0.0.1:9091" prometheus_push_interval_ms = 100 generation_timeout_in_secs = 3600 max_attempts = 5 -universal_setup_path = "../keys/setup/setup_2^24.key" -universal_setup_download_url = "https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_2^24.key" +universal_setup_path = "../keys/setup/setup_compact.key" +universal_setup_download_url = "https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_compact.key" verify_wrapper_proof = true universal_fflonk_setup_path = "../keys/setup/setup_fflonk_compact.key" universal_fflonk_setup_download_url = "https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_fflonk_compact.key" diff --git a/etc/env/file_based/general.yaml b/etc/env/file_based/general.yaml index 31dd1a0ed742..ece5a1156c5f 100644 --- a/etc/env/file_based/general.yaml +++ b/etc/env/file_based/general.yaml @@ -41,7 +41,7 @@ api: estimate_gas_scale_factor: 1.3 estimate_gas_acceptable_overestimation: 5000 max_tx_size: 1000000 - api_namespaces: [ en,eth,net,web3,zks,pubsub,debug ] + api_namespaces: [ en,eth,net,web3,zks,pubsub,debug,unstable ] state_keeper: transaction_slots: 8192 max_allowed_l2_tx_gas_limit: 15000000000 @@ -182,8 +182,8 @@ proof_compressor: prometheus_push_interval_ms: 100 generation_timeout_in_secs: 3600 max_attempts: 5 - universal_setup_path: keys/setup/setup_2^24.key - universal_setup_download_url: https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_2^24.key + universal_setup_path: keys/setup/setup_compact.key + universal_setup_download_url: https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_compact.key verify_wrapper_proof: true universal_fflonk_setup_path: keys/setup/setup_fflonk_compact.key universal_fflonk_setup_download_url: https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_fflonk_compact.key diff --git a/etc/env/file_based/genesis.yaml b/etc/env/file_based/genesis.yaml index 9f94dd0c04b6..480f798ce4df 100644 --- a/etc/env/file_based/genesis.yaml +++ b/etc/env/file_based/genesis.yaml @@ -1,9 +1,9 @@ -genesis_root: 0x9b30c35100835c0d811c9d385cc9804816dbceb4461b8fe4cbb8d0d5ecdacdec -genesis_rollup_leaf_index: 54 -genesis_batch_commitment: 0x043d432c1b668e54ada198d683516109e45e4f7f81f216ff4c4f469117732e50 -genesis_protocol_version: 25 -default_aa_hash: 0x01000523eadd3061f8e701acda503defb7ac3734ae3371e4daf7494651d8b523 -bootloader_hash: 0x010008e15394cd83a8d463d61e00b4361afbc27c932b07a9d2100861b7d05e78 +genesis_root: 0x7bdb3d822ad837a3611c436d3be457363a08d06d83b74469831482353a7d8277 +genesis_rollup_leaf_index: 68 +genesis_batch_commitment: 0x81f5e324a4019e4161fb9dc5058a588aa364a551fdd5c0e8788521e64e7ad596 +genesis_protocol_version: 26 +default_aa_hash: 0x010004dbf8be36c421254d005352f8245146906919be0099e8a50d0e78df85e0 +bootloader_hash: 0x0100088580465d88420e6369230ee94a32ff356dbcdd407a4be49fc8009b2a81 l1_chain_id: 9 l2_chain_id: 270 fee_account: '0x0000000000000000000000000000000000000001' @@ -11,7 +11,7 @@ prover: fflonk_snark_wrapper_vk_hash: 0x560b19cfd6bcf1049c6409c18d81db288ab7639db080ed3b48df17ddfbcc4666 dummy_verifier: true snark_wrapper_vk_hash: 0x14f97b81e54b35fe673d8708cc1a19e1ea5b5e348e12d31e39824ed4f42bbca2 -genesis_protocol_semantic_version: 0.25.0 +genesis_protocol_semantic_version: 0.26.0 l1_batch_commit_data_generator_mode: Rollup # TODO: uncomment once EVM emulator is present in the `contracts` submodule # evm_emulator_hash: 0x01000e53aa35d9d19fa99341c2e2901cf93b3668f01569dd5c6ca409c7696b91 diff --git a/etc/env/file_based/overrides/validium.yaml b/etc/env/file_based/overrides/validium.yaml index 1af02dd95893..516973f66689 100644 --- a/etc/env/file_based/overrides/validium.yaml +++ b/etc/env/file_based/overrides/validium.yaml @@ -4,3 +4,5 @@ eth: state_keeper: pubdata_overhead_part: 0 compute_overhead_part: 1 +da_client: + no_da: {} diff --git a/etc/multivm_bootloaders/vm_gateway/commit b/etc/multivm_bootloaders/vm_gateway/commit index a3547f577034..b6352645c93a 100644 --- a/etc/multivm_bootloaders/vm_gateway/commit +++ b/etc/multivm_bootloaders/vm_gateway/commit @@ -1 +1 @@ -a8bf0ca28d43899882a2e123e2fdf1379f0fd656 +16dedf6d77695ce00f81fce35a3066381b97fca1 diff --git a/etc/multivm_bootloaders/vm_gateway/fee_estimate.yul/fee_estimate.yul.zbin b/etc/multivm_bootloaders/vm_gateway/fee_estimate.yul/fee_estimate.yul.zbin index fb6017f69cf0..9f8ed5b9d676 100644 Binary files a/etc/multivm_bootloaders/vm_gateway/fee_estimate.yul/fee_estimate.yul.zbin and b/etc/multivm_bootloaders/vm_gateway/fee_estimate.yul/fee_estimate.yul.zbin differ diff --git a/etc/multivm_bootloaders/vm_gateway/gas_test.yul/gas_test.yul.zbin b/etc/multivm_bootloaders/vm_gateway/gas_test.yul/gas_test.yul.zbin index c1726d8301ff..3268a37a313c 100644 Binary files a/etc/multivm_bootloaders/vm_gateway/gas_test.yul/gas_test.yul.zbin and b/etc/multivm_bootloaders/vm_gateway/gas_test.yul/gas_test.yul.zbin differ diff --git a/etc/multivm_bootloaders/vm_gateway/playground_batch.yul/playground_batch.yul.zbin b/etc/multivm_bootloaders/vm_gateway/playground_batch.yul/playground_batch.yul.zbin index b154276bd611..ef3354d54659 100644 Binary files a/etc/multivm_bootloaders/vm_gateway/playground_batch.yul/playground_batch.yul.zbin and b/etc/multivm_bootloaders/vm_gateway/playground_batch.yul/playground_batch.yul.zbin differ diff --git a/etc/multivm_bootloaders/vm_gateway/proved_batch.yul/proved_batch.yul.zbin b/etc/multivm_bootloaders/vm_gateway/proved_batch.yul/proved_batch.yul.zbin index 2506ce065d74..e877b81cc2fe 100644 Binary files a/etc/multivm_bootloaders/vm_gateway/proved_batch.yul/proved_batch.yul.zbin and b/etc/multivm_bootloaders/vm_gateway/proved_batch.yul/proved_batch.yul.zbin differ diff --git a/etc/nix/tee_prover.nix b/etc/nix/tee_prover.nix index 55545d1bb8e4..5811297ce854 100644 --- a/etc/nix/tee_prover.nix +++ b/etc/nix/tee_prover.nix @@ -7,7 +7,7 @@ let in craneLib.buildPackage (commonArgs // { inherit pname; - version = (builtins.fromTOML (builtins.readFile ../../core/bin/zksync_tee_prover/Cargo.toml)).package.version; + version = (builtins.fromTOML (builtins.readFile ../../core/Cargo.toml)).workspace.package.version; inherit cargoExtraArgs; cargoArtifacts = craneLib.buildDepsOnly (commonArgs // { diff --git a/etc/nix/zksync.nix b/etc/nix/zksync.nix index 1ecac58b5d91..16d452c01bfd 100644 --- a/etc/nix/zksync.nix +++ b/etc/nix/zksync.nix @@ -3,7 +3,7 @@ }: craneLib.buildPackage (commonArgs // { pname = "zksync"; - version = (builtins.fromTOML (builtins.readFile ../../core/bin/zksync_tee_prover/Cargo.toml)).package.version; + version = (builtins.fromTOML (builtins.readFile ../../core/Cargo.toml)).workspace.package.version; cargoExtraArgs = "--all"; cargoArtifacts = craneLib.buildDepsOnly (commonArgs // { diff --git a/flake.nix b/flake.nix index 8c08e880910d..630d719aa4df 100644 --- a/flake.nix +++ b/flake.nix @@ -84,17 +84,7 @@ snappy.dev ]; - src = with pkgs.lib.fileset; toSource { - root = ./.; - fileset = unions [ - ./Cargo.lock - ./Cargo.toml - ./core - ./prover - ./zkstack_cli - ./.github/release-please/manifest.json - ]; - }; + src = ./core/.; env = { OPENSSL_NO_VENDOR = "1"; diff --git a/infrastructure/local-gateway-upgrade-testing/README.md b/infrastructure/local-gateway-upgrade-testing/README.md new file mode 100644 index 000000000000..d878055f5ff4 --- /dev/null +++ b/infrastructure/local-gateway-upgrade-testing/README.md @@ -0,0 +1,118 @@ +# Local upgrade testing + +While it is theoretically possible to do it in CI-like style, it generally leads to needless recompilations, esp of rust +programs. + +Here we contain the files/instructions needed to test the gateway upgrade locally. + +## Step 0 + +- pull zksync-era to ~/zksync-era +- pull zksync-era-private to ~/zksync-era-private + +## Step 1: Preparation + +To easiest way to avoid needless is caching. There are two ways to avoid caching: + +- Cache target/etc in a separate directory +- Have two folders of zksync-era and switch between those + +We use the second approach for robustness and simplicity. + +### Enabling `era-cacher` + +Copy `era-cacher` to some other folder (as the zksync-era one will change) and add it to PATH, so it can be invoked. + +You should download a clone of zksync-era, put it into the `zksync-era-old` directory. It should point to the commit of +`main` we will upgrade from. + +## Step 2: spawning old chain + +Run `use-old-era.sh`. The old contents of the zksync-era will be moved to `zksync-era-new` folder (there the gateway +version is stored), while the old one will be present in `zksync-era-new`. + +## Step 3: Move to new chain and upgrade it + +Use upgrade scripts as in the example below. + +## Full flow + +``` +# make sure that there are 2 folders: zksync-era with old era and zksync-era-private with new era +# if test was run previously you probably need to move folder +mv ~/zksync-era-current ~/zksync-era-private + +cd ~ && use-old-era.sh && cd ./zksync-era-current + +zkstackup --local && zkstack dev clean all && zkstack up --observability false + +zkstack ecosystem init --deploy-paymaster --deploy-erc20 \ + --deploy-ecosystem --l1-rpc-url=http://127.0.0.1:8545 \ + --server-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ + --server-db-name=zksync_server_localhost_era \ + --ignore-prerequisites --verbose \ + --observability=false + +cd ~ && use-new-era.sh && cd ./zksync-era-current + +zkstackup --local +zkstack dev contracts +zkstack dev database migrate + +zkstack chain gateway-upgrade -- adapt-config + +# Server should be started in a different window for consistency +zkstack server --ignore-prerequisites --chain era + +zkstack e gateway-upgrade --ecosystem-upgrade-stage no-governance-prepare + +# only if chain has weth deployed before upgrade. +# i.e. you must run it iff `predeployed_l2_wrapped_base_token_address` is set in config. +zkstack chain gateway-upgrade -- set-l2weth-for-chain + +zkstack e gateway-upgrade --ecosystem-upgrade-stage governance-stage1 + +zkstack chain gateway-upgrade -- prepare-stage1 + +# restart the server. wait for all L1 txs to exeucte!!!! + +zkstack chain gateway-upgrade -- schedule-stage1 + +# turn off the server => we need it because we need to somehow update validator timelock +# also getPriorityTreeStartIndex needs to be updated. + +zkstack chain gateway-upgrade -- finalize-stage1 + +# restart the server + +cd ~/zksync-era +zkstack dev test integration --no-deps --ignore-prerequisites --chain era +cd ~/zksync-era-current + +zkstack ecosystem gateway-upgrade --ecosystem-upgrade-stage governance-stage2 +zkstack ecosystem gateway-upgrade --ecosystem-upgrade-stage no-governance-stage2 + +# turn off the server + +zkstack chain gateway-upgrade -- finalize-stage2 + +# turn on the server + +zkstack dev test integration --no-deps --ignore-prerequisites --chain era + + + +zkstack ecosystem gateway-upgrade --ecosystem-upgrade-stage governance-stage3 +zkstack ecosystem gateway-upgrade --ecosystem-upgrade-stage no-governance-stage3 + +# in separate window +zkstack server --ignore-prerequisites --chain gateway + +# wait for era server to finalize all L1 txs +# stop era server! + +zkstack chain migrate-to-gateway --chain era --gateway-chain-name gateway + +# restart era server! +zkstack dev test integration --no-deps --ignore-prerequisites --chain era +``` diff --git a/infrastructure/local-gateway-upgrade-testing/era-cacher/use-new-era.sh b/infrastructure/local-gateway-upgrade-testing/era-cacher/use-new-era.sh new file mode 100755 index 000000000000..7b2bc9ad495a --- /dev/null +++ b/infrastructure/local-gateway-upgrade-testing/era-cacher/use-new-era.sh @@ -0,0 +1,27 @@ +#!/bin/bash + +OLD_REPO=~/zksync-era +NEW_REPO=~/zksync-era-private + +WORKING_DIRECTORY=~/zksync-era-current + +# Check if the folder exists +if [ ! -d "$NEW_REPO" ]; then + echo "Error: The folder '$NEW_REPO' does not exist." + exit 1 +else + echo "Updating to use new era" +fi + +rm -rf $NEW_REPO/chains +mkdir $NEW_REPO/chains +cp -rf $WORKING_DIRECTORY/chains $NEW_REPO + + +rm -rf $NEW_REPO/configs +mkdir $NEW_REPO/configs +cp -rf $WORKING_DIRECTORY/configs $NEW_REPO + + +mv $WORKING_DIRECTORY $OLD_REPO +mv $NEW_REPO $WORKING_DIRECTORY diff --git a/infrastructure/local-gateway-upgrade-testing/era-cacher/use-old-era.sh b/infrastructure/local-gateway-upgrade-testing/era-cacher/use-old-era.sh new file mode 100755 index 000000000000..e52d6ad278b7 --- /dev/null +++ b/infrastructure/local-gateway-upgrade-testing/era-cacher/use-old-era.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +OLD_REPO=~/zksync-era +NEW_REPO=~/zksync-era-private + +WORKING_DIRECTORY=~/zksync-era-current + +# Check if the folder exists +if [ ! -d "$OLD_REPO" ]; then + echo "Error: The folder '$OLD_REPO' does not exist." + exit 1 +else + echo "Updating to use old era." +fi + +mv $OLD_REPO $WORKING_DIRECTORY diff --git a/infrastructure/zk/src/server.ts b/infrastructure/zk/src/server.ts index 8b10559361ae..14a27f6a48be 100644 --- a/infrastructure/zk/src/server.ts +++ b/infrastructure/zk/src/server.ts @@ -17,7 +17,7 @@ export async function server(rebuildTree: boolean, uring: boolean, components?: if (components) { options += ` --components=${components}`; } - await utils.spawn(`cargo run --bin zksync_server --release ${options}`); + await utils.spawn(`cargo run --manifest-path core/Cargo.toml--bin zksync_server --release ${options}`); } export async function externalNode(reinit: boolean = false, args: string[]) { @@ -37,7 +37,9 @@ export async function externalNode(reinit: boolean = false, args: string[]) { clean(path.dirname(process.env.EN_MERKLE_TREE_PATH!)); } - await utils.spawn(`cargo run --release --bin zksync_external_node -- ${args.join(' ')}`); + await utils.spawn( + `cargo run --manifest-path core/Cargo.toml --release --bin zksync_external_node -- ${args.join(' ')}` + ); } async function create_genesis(cmd: string) { @@ -61,7 +63,7 @@ async function create_genesis(cmd: string) { export async function genesisFromSources() { // Note that that all the chains have the same chainId at genesis. It will be changed // via an upgrade transaction during the registration of the chain. - await create_genesis('cargo run --bin zksync_server --release -- --genesis'); + await create_genesis('cargo run --manifest-path core/Cargo.toml --bin zksync_server --release -- --genesis'); } export async function genesisFromBinary() { diff --git a/package.json b/package.json index b293bedd8f69..6c7457ba29c0 100644 --- a/package.json +++ b/package.json @@ -7,6 +7,7 @@ "packages": [ "contracts", "contracts/l1-contracts", + "contracts/da-contracts", "contracts/l2-contracts", "contracts/system-contracts", "etc/ERC20", @@ -28,6 +29,7 @@ "local-prep": "yarn workspace local-setup-preparation", "l1-contracts": "yarn workspace l1-contracts", "l2-contracts": "yarn workspace l2-contracts", + "da-contracts": "yarn workspace da-contracts", "revert-test": "yarn workspace revert-test", "upgrade-test": "yarn workspace upgrade-test", "recovery-test": "yarn workspace recovery-test", diff --git a/prover/Cargo.lock b/prover/Cargo.lock index 6fa199e7d5f4..19223f360857 100644 --- a/prover/Cargo.lock +++ b/prover/Cargo.lock @@ -625,9 +625,9 @@ dependencies = [ [[package]] name = "boojum" -version = "0.30.12" +version = "0.30.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14bd053feb7173130679a2119e105b5e78af7eb6b0e752de6793e4ee63d8e899" +checksum = "d689807d79092f8f7cfcb72a2313a43da77d56314e41324810566f385875c185" dependencies = [ "arrayvec 0.7.6", "bincode", @@ -657,9 +657,9 @@ dependencies = [ [[package]] name = "boojum-cuda" -version = "0.152.10" +version = "0.152.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbd43bc7fc457920cb3b823e4f95ffbbf180b2c48b8d643125cd121325cdd8db" +checksum = "896aeb550e4b92e6c96858c1d0aa8413c00f97fb91f321a2bf3ed912942870f8" dependencies = [ "boojum", "cmake", @@ -805,9 +805,9 @@ dependencies = [ [[package]] name = "circuit_definitions" -version = "0.150.19" +version = "0.150.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10ebc81d5c2f6ee8de436c242f6466fb315fe25afcbc81aa1c47dfca39a55403" +checksum = "1f04f9c7c6b39255199aaba49802c5f40f95bcff24f5a456446a912d254f4bb1" dependencies = [ "circuit_encodings", "crossbeam", @@ -819,26 +819,26 @@ dependencies = [ [[package]] name = "circuit_encodings" -version = "0.150.19" +version = "0.150.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33375d2448a78c1aed9b8755f7939a6b6f19e2fa80f44f4930a5b4c2bb7cbb44" +checksum = "fc3399f1981164c3c687ea15b1eedd35a16f28069c845a24530de21f996f3fdd" dependencies = [ "derivative", "serde", - "zk_evm 0.150.19", + "zk_evm 0.150.20", "zkevm_circuits", ] [[package]] name = "circuit_sequencer_api" -version = "0.150.19" +version = "0.150.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca2fec5c28e5a9f085279e70e13b2eebb63a95ee0bfb99d58095ac01c1c7b256" +checksum = "b5583037ec61607ac481b0c887b7fb4f860e65c92c6f3f7be74f6bab7c40c3ce" dependencies = [ "derivative", "rayon", "serde", - "zk_evm 0.150.19", + "zk_evm 0.150.20", "zksync_bellman", ] @@ -1706,9 +1706,9 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "era_cudart" -version = "0.152.10" +version = "0.152.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6f881cf689ba889bb0fa04c0e71aba701acd7fafd3fa545e3f2782f2a8c0ba0" +checksum = "6ff6fc4fba6bf756cdebd6750161d280af2859d217dad89bfb2823ac760bf0e8" dependencies = [ "bitflags 2.6.0", "era_cudart_sys", @@ -1717,11 +1717,11 @@ dependencies = [ [[package]] name = "era_cudart_sys" -version = "0.152.10" +version = "0.152.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f29cbd9e6d97fc1f05b484f960e921fe69548b4773a361b2e403e4cb9d6d575" +checksum = "7e0daeb39d2111a868a50e0bd7d90fa355f93022038088c0dd865bbdda1113ef" dependencies = [ - "serde_json", + "regex-lite", ] [[package]] @@ -1838,9 +1838,9 @@ dependencies = [ [[package]] name = "fflonk" -version = "0.30.12" +version = "0.30.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e63d70f1cbf9e572ccaf22ca1dfce4b93ff48b9a5e8dd70de50d87edb960d173" +checksum = "b36c5fa909ab71b7eb4b8f7fd092f72ed83b93f2615e42f245ca808d8f308917" dependencies = [ "bincode", "byteorder", @@ -1855,9 +1855,9 @@ dependencies = [ [[package]] name = "fflonk-cuda" -version = "0.152.10" +version = "0.152.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b008e6158c95747b3b135adbd7f6d563c406849a10c00abfef109b4d0442a589" +checksum = "890b635123fe176814ddbda1fbe006c55ca02375e5dde83539018f283219a8ba" dependencies = [ "bincode", "byteorder", @@ -1975,9 +1975,9 @@ dependencies = [ [[package]] name = "franklin-crypto" -version = "0.30.12" +version = "0.30.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82d7b8e5864df7f3747e5e64a5b87b4a57aa2a4a20c55c9e96a3a305a8143c45" +checksum = "8309d8fc22fc389d831390473b0ee9fe94e85f19a8b9229b9aec8aa73f5bcee3" dependencies = [ "arr_macro", "bit-vec 0.6.3", @@ -4550,9 +4550,9 @@ dependencies = [ [[package]] name = "proof-compression" -version = "0.152.10" +version = "0.152.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40624617ed1535349cf31671a7091703d3a31e64d6a7760a5e952c68ee1f4f0e" +checksum = "de4c8014afcd29bfe93ac3bd1ea0d9f2da06fa9895337bead3f3d0d904080e36" dependencies = [ "bincode", "byteorder", @@ -4562,6 +4562,7 @@ dependencies = [ "serde", "serde_json", "shivini", + "zksync-gpu-prover", ] [[package]] @@ -4705,7 +4706,7 @@ dependencies = [ [[package]] name = "prover_cli" -version = "0.1.0" +version = "17.1.1" dependencies = [ "anyhow", "assert_cmd", @@ -4738,7 +4739,7 @@ dependencies = [ [[package]] name = "prover_version" -version = "0.1.0" +version = "17.1.1" dependencies = [ "zksync_prover_fri_types", ] @@ -4945,6 +4946,12 @@ dependencies = [ "regex-syntax 0.8.5", ] +[[package]] +name = "regex-lite" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53a49587ad06b26609c52e423de037e7f57f20d53535d66e08c695f347df952a" + [[package]] name = "regex-syntax" version = "0.6.29" @@ -5070,9 +5077,9 @@ dependencies = [ [[package]] name = "rescue_poseidon" -version = "0.30.12" +version = "0.30.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c250446885c257bee70bc0f2600229ce72f03073b87fb8f5dd278dba16b11f30" +checksum = "5e631fd184b6d2f2c04f9dc75405289d99fd0d6612d8dfbb478c01bfbab648fb" dependencies = [ "addchain", "arrayvec 0.7.6", @@ -5860,9 +5867,9 @@ checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde" [[package]] name = "shivini" -version = "0.152.10" +version = "0.152.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c336213e4ec7651d2984e892326d09c195ee166493c192b0d8aad36288e5c5f" +checksum = "8937f1fe25a1ea33a40bdf560847b934fe68322c40cd54dd77ab433128022cce" dependencies = [ "bincode", "boojum", @@ -5951,9 +5958,9 @@ dependencies = [ [[package]] name = "snark_wrapper" -version = "0.30.12" +version = "0.30.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f361c2c47b71ee43f62954ce69f7730e14acb7fb3b0f2c697da02f97327c569" +checksum = "eddb498315057210abd25e2fbe2ea30ab69a07ca0c166406a3e7c056ec8fbbfd" dependencies = [ "derivative", "rand 0.4.6", @@ -7839,9 +7846,9 @@ dependencies = [ [[package]] name = "zk_evm" -version = "0.150.19" +version = "0.150.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84ee848aa90ae045457795b1c0afeb388fbd9fa1e57aa0e8791b28f405e7cc2c" +checksum = "f11d0310228af78e804e5e7deccd1ad6797fce1c44c3b8016722ab78dc183c4a" dependencies = [ "anyhow", "lazy_static", @@ -7849,7 +7856,7 @@ dependencies = [ "serde", "serde_json", "static_assertions", - "zk_evm_abstractions 0.150.19", + "zk_evm_abstractions 0.150.20", ] [[package]] @@ -7880,22 +7887,22 @@ dependencies = [ [[package]] name = "zk_evm_abstractions" -version = "0.150.19" +version = "0.150.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f08feaa3e3d99e1e57234fe6ba2aa062609492c6499b2344121c4a699292ab7" +checksum = "d7616edbdeeeb214211e9bdc4346b6a62c6c6118c3d2b83b7db24c01f65f6e25" dependencies = [ "anyhow", "num_enum 0.6.1", "serde", "static_assertions", - "zkevm_opcode_defs 0.150.19", + "zkevm_opcode_defs 0.150.20", ] [[package]] name = "zkevm-assembly" -version = "0.150.19" +version = "0.150.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecd4bc83f3a711d820829dccce24fa59ab4c588c2745203ec6a6ad8c871362b7" +checksum = "c2dc9539ce7f550231934e6b1faae23387fd132f1ac053b8e674d30968158bff" dependencies = [ "env_logger 0.9.3", "hex", @@ -7908,14 +7915,14 @@ dependencies = [ "smallvec", "structopt", "thiserror 1.0.69", - "zkevm_opcode_defs 0.150.19", + "zkevm_opcode_defs 0.150.20", ] [[package]] name = "zkevm_circuits" -version = "0.150.19" +version = "0.150.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "760cfbbce18f42bbecd2565de9bf658234cac2431cce9b0c1df08e9df645d467" +checksum = "6f36004572f5086c513715e11f38230e2538c159d4f5d90dc518833c6fc78293" dependencies = [ "arrayvec 0.7.6", "boojum", @@ -7927,7 +7934,7 @@ dependencies = [ "seq-macro", "serde", "smallvec", - "zkevm_opcode_defs 0.150.19", + "zkevm_opcode_defs 0.150.20", "zksync_cs_derive", ] @@ -7975,9 +7982,9 @@ dependencies = [ [[package]] name = "zkevm_opcode_defs" -version = "0.150.19" +version = "0.150.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f2bd8ef52c8f9911dd034b91d29f087ab52f80a80f9d996deb881abbb953793" +checksum = "ce6b4a47c0e7f95b51d29ca336821321cec4bbba0acdd412c3a209270a0d37fe" dependencies = [ "bitflags 2.6.0", "blake2 0.10.6", @@ -7992,9 +7999,9 @@ dependencies = [ [[package]] name = "zkevm_test_harness" -version = "0.150.19" +version = "0.150.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e24b28c5b855e4e28d85455b48f346e9d46a00c6af84d5bbc38e5b5f7410b5cb" +checksum = "36ed8dd80455d90a51a6618a5bc07685beaad582cabca71ccef25866cd73993b" dependencies = [ "bincode", "circuit_definitions", @@ -8020,9 +8027,9 @@ dependencies = [ [[package]] name = "zksync-gpu-ffi" -version = "0.152.10" +version = "0.152.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36f6a84e4e361977a2dc5dbe783e3856e40b1050dc1b9bb3e9833a5e59c20697" +checksum = "6c3fbd4c8df140131d28b05581b19418bc5e561beb21dec6f24ca2f34343399c" dependencies = [ "cmake", "crossbeam", @@ -8035,9 +8042,9 @@ dependencies = [ [[package]] name = "zksync-gpu-prover" -version = "0.152.10" +version = "0.152.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ba58bbaf4920c635553d3dfb7796636223f55e75ae6512eb9c98f48f0a03215" +checksum = "0256175ceb3ea675d4c0ebcd690fdd45138bab1c5bc298b2e0db320e5abc0bdb" dependencies = [ "bit-vec 0.6.3", "cfg-if", @@ -8052,9 +8059,9 @@ dependencies = [ [[package]] name = "zksync-wrapper-prover" -version = "0.152.10" +version = "0.152.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4bcf41946f95a1e64ce99cde1d54966a04c5ef2c89d9a87f0fa61e39987510b" +checksum = "390d8f99cf47fade7f2fe38925f9787b3d27641a878887ae980e4ab5f6731ac0" dependencies = [ "circuit_definitions", "zkevm_test_harness", @@ -8063,7 +8070,7 @@ dependencies = [ [[package]] name = "zksync_basic_types" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "chrono", @@ -8084,9 +8091,9 @@ dependencies = [ [[package]] name = "zksync_bellman" -version = "0.30.12" +version = "0.30.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d06d424f7e3862d7a6715179bafffbe7a5dce17129f95ac4124502ab9f1edfb8" +checksum = "78fc3c598daf718b6fc791bfbb01c4634199e479ea9b2c82d06cd108b967d441" dependencies = [ "arrayvec 0.7.6", "bit-vec 0.6.3", @@ -8107,7 +8114,7 @@ dependencies = [ [[package]] name = "zksync_circuit_prover" -version = "0.1.0" +version = "17.1.1" dependencies = [ "anyhow", "async-trait", @@ -8137,7 +8144,7 @@ dependencies = [ [[package]] name = "zksync_circuit_prover_service" -version = "0.1.0" +version = "17.1.1" dependencies = [ "anyhow", "async-trait", @@ -8157,9 +8164,9 @@ dependencies = [ [[package]] name = "zksync_concurrency" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8312ab73d3caa55775bd531795b507fa8f76bd9dabfaeb0954fe43e8fc1323b" +checksum = "cec98400a9e8ba02bfd029eacfe7d6fb7b85b8ef00de59d6bb119d29cc9f7442" dependencies = [ "anyhow", "once_cell", @@ -8176,7 +8183,7 @@ dependencies = [ [[package]] name = "zksync_config" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "rand 0.8.5", @@ -8192,9 +8199,9 @@ dependencies = [ [[package]] name = "zksync_consensus_crypto" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86b539960de98df3c3bd27d2d9b97de862027686bbb3bdfc5aaad5b74bb929a1" +checksum = "c04840825dfbe3b9f708d245c87618d5dcf28f29d7b58922971351068a0b8231" dependencies = [ "anyhow", "blst", @@ -8213,9 +8220,9 @@ dependencies = [ [[package]] name = "zksync_consensus_roles" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c49949546895a10431b9daec6ec4208ef0917ace006446d304b51f5b234ba462" +checksum = "05498eab1de26869028b5822cfa4490cac625508d427d59668dc73e8162de65f" dependencies = [ "anyhow", "bit-vec 0.6.3", @@ -8235,9 +8242,9 @@ dependencies = [ [[package]] name = "zksync_consensus_storage" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "feb0d6a54e7d8d2adeee4ba38662161e9309180ad497299092e5641db9fb1c1e" +checksum = "b20eb99fdd0e171a370214d2b7c99b5d4e8c11b9828a6b5705423bf653849a70" dependencies = [ "anyhow", "async-trait", @@ -8255,9 +8262,9 @@ dependencies = [ [[package]] name = "zksync_consensus_utils" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "723e2a4b056cc5af192a83163c89a6951ee75c098cc5c4a4cdc435f4232d88bd" +checksum = "f2f9fa69ef68e6a1955a1d7b33077103fb6d106b560fec0d599c6de268f5be03" dependencies = [ "anyhow", "rand 0.8.5", @@ -8267,7 +8274,7 @@ dependencies = [ [[package]] name = "zksync_contracts" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "envy", "hex", @@ -8280,7 +8287,7 @@ dependencies = [ [[package]] name = "zksync_core_leftovers" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "ctrlc", @@ -8294,7 +8301,7 @@ dependencies = [ [[package]] name = "zksync_crypto_primitives" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "blake2 0.10.6", @@ -8310,9 +8317,9 @@ dependencies = [ [[package]] name = "zksync_cs_derive" -version = "0.30.12" +version = "0.30.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23237b019a469bfa59c11108beff84a63a43f52fa3afbf1b461527031fc47644" +checksum = "97ab7469afcd9e1cb220fe17b3c9f2abe031648b94add97da37065c58be08554" dependencies = [ "proc-macro-error", "proc-macro2 1.0.92", @@ -8322,7 +8329,7 @@ dependencies = [ [[package]] name = "zksync_dal" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "bigdecimal", @@ -8357,7 +8364,7 @@ dependencies = [ [[package]] name = "zksync_db_connection" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "rand 0.8.5", @@ -8373,7 +8380,7 @@ dependencies = [ [[package]] name = "zksync_env_config" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "envy", @@ -8384,7 +8391,7 @@ dependencies = [ [[package]] name = "zksync_eth_client" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "async-trait", "jsonrpsee", @@ -8401,7 +8408,7 @@ dependencies = [ [[package]] name = "zksync_eth_signer" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "async-trait", "rlp", @@ -8412,9 +8419,9 @@ dependencies = [ [[package]] name = "zksync_ff" -version = "0.30.12" +version = "0.30.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d5aa518ed0ea7ef737d50de02025f5a593dbb11104b3c1bf5a00f39581b47dc" +checksum = "6583c2db6dc787600879d27ec98d2eb628a757ee41831e54f8be1dae4acc599f" dependencies = [ "byteorder", "hex", @@ -8425,9 +8432,9 @@ dependencies = [ [[package]] name = "zksync_ff_derive" -version = "0.30.12" +version = "0.30.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e33b43100a1278e2f64820368db8751c2441860ea74ab5749074cf8f864647af" +checksum = "8f62e93dde881d8dd44d1864c7682394dde6d18e582fc5af78768221a1766fdf" dependencies = [ "num-bigint 0.4.6", "num-integer", @@ -8440,9 +8447,9 @@ dependencies = [ [[package]] name = "zksync_kzg" -version = "0.150.19" +version = "0.150.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9da880b8282a97d9dfd6ac9f0189d310c0602059a8de20aa66a883979d6adba" +checksum = "174f82592590901cbcf2b298059c89f817b404299ffbd050a3915ea72357f545" dependencies = [ "boojum", "derivative", @@ -8457,7 +8464,7 @@ dependencies = [ [[package]] name = "zksync_l1_contract_interface" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "circuit_definitions", @@ -8474,7 +8481,7 @@ dependencies = [ [[package]] name = "zksync_mini_merkle_tree" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "once_cell", "zksync_basic_types", @@ -8483,7 +8490,7 @@ dependencies = [ [[package]] name = "zksync_multivm" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "circuit_sequencer_api", @@ -8498,7 +8505,7 @@ dependencies = [ "zk_evm 0.133.0", "zk_evm 0.140.0", "zk_evm 0.141.0", - "zk_evm 0.150.19", + "zk_evm 0.150.20", "zksync_contracts", "zksync_mini_merkle_tree", "zksync_system_constants", @@ -8509,7 +8516,7 @@ dependencies = [ [[package]] name = "zksync_object_store" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "async-trait", @@ -8532,9 +8539,9 @@ dependencies = [ [[package]] name = "zksync_pairing" -version = "0.30.12" +version = "0.30.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43f0d96f3e386f3b4c76a614d73b71714d6712e917d462bf8053b8af352da0b3" +checksum = "baafdd03ca7a48dc9b6808be3630f2d8a003aa425d71946e9158d8c0aeb1cc79" dependencies = [ "byteorder", "cfg-if", @@ -8545,7 +8552,7 @@ dependencies = [ [[package]] name = "zksync_proof_fri_compressor" -version = "0.1.0" +version = "17.1.1" dependencies = [ "anyhow", "async-trait", @@ -8582,9 +8589,9 @@ dependencies = [ [[package]] name = "zksync_protobuf" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8986ad796f8e00d8999fee72effba1a21bce40f5f877d681ac9cd89a94834d8" +checksum = "d9032e12528c2466293b206d6edb53b7e900e4a4cc4573e4d075ac2dc00e1b55" dependencies = [ "anyhow", "bit-vec 0.6.3", @@ -8603,9 +8610,9 @@ dependencies = [ [[package]] name = "zksync_protobuf_build" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d870b31995e3acb8e47afeb68ebeeffcf6121e70020e65b3d5d31692115d236" +checksum = "7c644fc8ef3c4d343ea42cebd5551e3562933f15dd9b0e68a52c2657603eb0f5" dependencies = [ "anyhow", "heck 0.5.0", @@ -8620,7 +8627,7 @@ dependencies = [ [[package]] name = "zksync_protobuf_config" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "hex", @@ -8640,7 +8647,7 @@ dependencies = [ [[package]] name = "zksync_prover_autoscaler" -version = "0.1.0" +version = "17.1.1" dependencies = [ "anyhow", "async-trait", @@ -8677,7 +8684,7 @@ dependencies = [ [[package]] name = "zksync_prover_dal" -version = "0.1.0" +version = "17.1.1" dependencies = [ "sqlx", "strum", @@ -8687,7 +8694,7 @@ dependencies = [ [[package]] name = "zksync_prover_fri" -version = "0.1.0" +version = "17.1.1" dependencies = [ "anyhow", "async-trait", @@ -8721,7 +8728,7 @@ dependencies = [ [[package]] name = "zksync_prover_fri_gateway" -version = "0.1.0" +version = "17.1.1" dependencies = [ "anyhow", "async-trait", @@ -8747,7 +8754,7 @@ dependencies = [ [[package]] name = "zksync_prover_fri_types" -version = "0.1.0" +version = "17.1.1" dependencies = [ "circuit_definitions", "serde", @@ -8757,7 +8764,7 @@ dependencies = [ [[package]] name = "zksync_prover_fri_utils" -version = "0.1.0" +version = "17.1.1" dependencies = [ "anyhow", "regex", @@ -8775,7 +8782,7 @@ dependencies = [ [[package]] name = "zksync_prover_interface" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "chrono", "circuit_definitions", @@ -8784,6 +8791,7 @@ dependencies = [ "serde", "serde_with", "strum", + "zksync_bellman", "zksync_object_store", "zksync_types", "zksync_vm_interface", @@ -8791,7 +8799,7 @@ dependencies = [ [[package]] name = "zksync_prover_job_monitor" -version = "0.1.0" +version = "17.1.1" dependencies = [ "anyhow", "async-trait", @@ -8813,7 +8821,7 @@ dependencies = [ [[package]] name = "zksync_prover_job_processor" -version = "0.1.0" +version = "17.1.1" dependencies = [ "anyhow", "async-trait", @@ -8828,7 +8836,7 @@ dependencies = [ [[package]] name = "zksync_prover_keystore" -version = "0.1.0" +version = "17.1.1" dependencies = [ "anyhow", "bincode", @@ -8841,6 +8849,7 @@ dependencies = [ "hex", "md5", "once_cell", + "proof-compression", "serde", "serde_json", "sha3 0.10.8", @@ -8855,7 +8864,7 @@ dependencies = [ [[package]] name = "zksync_queued_job_processor" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "async-trait", @@ -8867,9 +8876,9 @@ dependencies = [ [[package]] name = "zksync_solidity_vk_codegen" -version = "0.30.12" +version = "0.30.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb10f377dcc24fe2268cc5f530c16af1c879a791570d8fe64064b58ba143c7cc" +checksum = "bb05a12f5552d7947427f755e29f548ce94733851f1fa16edaf8b75c28033e73" dependencies = [ "ethereum-types", "franklin-crypto", @@ -8884,7 +8893,7 @@ dependencies = [ [[package]] name = "zksync_system_constants" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "once_cell", "zksync_basic_types", @@ -8892,9 +8901,10 @@ dependencies = [ [[package]] name = "zksync_types" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", + "async-trait", "bigdecimal", "blake2 0.10.6", "chrono", @@ -8923,7 +8933,7 @@ dependencies = [ [[package]] name = "zksync_utils" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "futures 0.3.31", @@ -8938,7 +8948,7 @@ dependencies = [ [[package]] name = "zksync_vk_setup_data_generator_server_fri" -version = "0.1.0" +version = "17.1.1" dependencies = [ "anyhow", "bincode", @@ -8962,7 +8972,7 @@ dependencies = [ [[package]] name = "zksync_vlog" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "chrono", @@ -8992,8 +9002,8 @@ source = "git+https://github.com/matter-labs/vm2.git?rev=457d8a7eea9093af9440662 dependencies = [ "enum_dispatch", "primitive-types", - "zk_evm_abstractions 0.150.19", - "zkevm_opcode_defs 0.150.19", + "zk_evm_abstractions 0.150.20", + "zkevm_opcode_defs 0.150.20", "zksync_vm2_interface", ] @@ -9007,7 +9017,7 @@ dependencies = [ [[package]] name = "zksync_vm_interface" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "async-trait", @@ -9023,7 +9033,7 @@ dependencies = [ [[package]] name = "zksync_web3_decl" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "async-trait", @@ -9044,7 +9054,7 @@ dependencies = [ [[package]] name = "zksync_witness_generator" -version = "0.1.0" +version = "17.1.1" dependencies = [ "anyhow", "async-trait", @@ -9082,7 +9092,7 @@ dependencies = [ [[package]] name = "zksync_witness_vector_generator" -version = "0.1.0" +version = "17.1.1" dependencies = [ "anyhow", "async-trait", diff --git a/prover/Cargo.toml b/prover/Cargo.toml index 7e571db3f025..194f6b90a2e7 100644 --- a/prover/Cargo.toml +++ b/prover/Cargo.toml @@ -4,7 +4,7 @@ members = ["crates/bin/*", "crates/lib/*"] resolver = "2" [workspace.package] -version = "0.1.0" +version = "17.1.1" # x-release-please-version edition = "2021" authors = ["The Matter Labs Team "] homepage = "https://zksync.io/" @@ -67,49 +67,47 @@ tracing-test = "0.2.5" url = "2.5.2" vise = "0.2.0" -# Proving dependencies -circuit_definitions = "=0.150.19" -circuit_sequencer_api = "=0.150.19" -zkevm_test_harness = "=0.150.19" -proof-compression-gpu = { package = "proof-compression", version = "=0.152.10"} -fflonk-gpu = { package = "fflonk-cuda", version = "=0.152.10"} -fflonk = "0.30.12" -franklin-crypto = "0.30.12" +circuit_definitions = "=0.150.20" +circuit_sequencer_api = "=0.150.20" +zkevm_test_harness = "=0.150.20" +fflonk = "=0.30.13" +franklin-crypto = "=0.30.13" # GPU proving dependencies -wrapper_prover = { package = "zksync-wrapper-prover", version = "=0.152.10"} -shivini = "=0.152.10" -boojum-cuda = "=0.152.10" +proof-compression-gpu = { package = "proof-compression", version = "=0.152.11"} +fflonk-gpu = { package = "fflonk-cuda", version = "=0.152.11"} +wrapper_prover = { package = "zksync-wrapper-prover", version = "=0.152.11"} +shivini = "=0.152.11" +boojum-cuda = "=0.152.11" # Core workspace dependencies -zksync_multivm = { path = "../core/lib/multivm", version = "0.1.0" } -zksync_vlog = { path = "../core/lib/vlog" } -zksync_basic_types = { path = "../core/lib/basic_types" } -zksync_config = { path = "../core/lib/config" } -zksync_dal = { path = "../core/lib/dal" } -zksync_db_connection = { path = "../core/lib/db_connection" } -zksync_env_config = { path = "../core/lib/env_config" } -zksync_object_store = { path = "../core/lib/object_store" } -zksync_prover_interface = { path = "../core/lib/prover_interface" } -zksync_queued_job_processor = { path = "../core/lib/queued_job_processor" } -zksync_system_constants = { path = "../core/lib/constants" } -zksync_types = { path = "../core/lib/types" } -zksync_utils = { path = "../core/lib/utils" } -zksync_eth_client = { path = "../core/lib/eth_client" } -zksync_contracts = { path = "../core/lib/contracts" } -zksync_core_leftovers = { path = "../core/lib/zksync_core_leftovers" } -zksync_periodic_job = { path = "../core/lib/periodic_job" } -zksync_protobuf_config = { path = "../core/lib/protobuf_config" } +zksync_multivm = { version = "=26.1.0-non-semver-compat", path = "../core/lib/multivm" } +zksync_vlog = { version = "=26.1.0-non-semver-compat", path = "../core/lib/vlog" } +zksync_basic_types = { version = "=26.1.0-non-semver-compat", path = "../core/lib/basic_types" } +zksync_config = { version = "=26.1.0-non-semver-compat", path = "../core/lib/config" } +zksync_dal = { version = "=26.1.0-non-semver-compat", path = "../core/lib/dal" } +zksync_db_connection = { version = "=26.1.0-non-semver-compat", path = "../core/lib/db_connection" } +zksync_env_config = { version = "=26.1.0-non-semver-compat", path = "../core/lib/env_config" } +zksync_object_store = { version = "=26.1.0-non-semver-compat", path = "../core/lib/object_store" } +zksync_prover_interface = { version = "=26.1.0-non-semver-compat", path = "../core/lib/prover_interface" } +zksync_queued_job_processor = { version = "=26.1.0-non-semver-compat", path = "../core/lib/queued_job_processor" } +zksync_system_constants = { version = "=26.1.0-non-semver-compat", path = "../core/lib/constants" } +zksync_types = { version = "=26.1.0-non-semver-compat", path = "../core/lib/types" } +zksync_utils = { version = "=26.1.0-non-semver-compat", path = "../core/lib/utils" } +zksync_eth_client = { version = "=26.1.0-non-semver-compat", path = "../core/lib/eth_client" } +zksync_contracts = { version = "=26.1.0-non-semver-compat", path = "../core/lib/contracts" } +zksync_core_leftovers = { version = "=26.1.0-non-semver-compat", path = "../core/lib/zksync_core_leftovers" } +zksync_protobuf_config = { version = "=26.1.0-non-semver-compat", path = "../core/lib/protobuf_config" } # Prover workspace dependencies -zksync_prover_dal = { path = "crates/lib/prover_dal" } -zksync_prover_fri_types = { path = "crates/lib/prover_fri_types" } -zksync_prover_fri_utils = { path = "crates/lib/prover_fri_utils" } -zksync_prover_keystore = { path = "crates/lib/keystore" } -zksync_vk_setup_data_generator_server_fri = { path = "crates/bin/vk_setup_data_generator_server_fri" } -zksync_prover_job_processor = { path = "crates/lib/prover_job_processor" } -zksync_circuit_prover_service = { path = "crates/lib/circuit_prover_service" } -zksync_prover_job_monitor = { path = "crates/bin/prover_job_monitor" } +zksync_prover_dal = { version = "17.1.1", path = "crates/lib/prover_dal" } +zksync_prover_fri_types = { version = "17.1.1", path = "crates/lib/prover_fri_types" } +zksync_prover_fri_utils = { version = "17.1.1", path = "crates/lib/prover_fri_utils" } +zksync_prover_keystore = { version = "17.1.1", path = "crates/lib/keystore" } +zksync_vk_setup_data_generator_server_fri = { version = "17.1.1", path = "crates/bin/vk_setup_data_generator_server_fri" } +zksync_prover_job_processor = { version = "17.1.1", path = "crates/lib/prover_job_processor" } +zksync_circuit_prover_service = { version = "17.1.1", path = "crates/lib/circuit_prover_service" } +zksync_prover_job_monitor = { version = "17.1.1", path = "crates/bin/prover_job_monitor" } # for `perf` profiling [profile.perf] diff --git a/prover/crates/bin/proof_fri_compressor/src/compressor.rs b/prover/crates/bin/proof_fri_compressor/src/compressor.rs index 3671fa183b5d..581e1fed8a48 100644 --- a/prover/crates/bin/proof_fri_compressor/src/compressor.rs +++ b/prover/crates/bin/proof_fri_compressor/src/compressor.rs @@ -2,27 +2,14 @@ use std::{sync::Arc, time::Instant}; use anyhow::Context as _; use async_trait::async_trait; -use circuit_sequencer_api::proof::FinalProof; -use fflonk_gpu::{FflonkSnarkVerifierCircuit, FflonkSnarkVerifierCircuitProof}; +use proof_compression_gpu::{run_proof_chain, SnarkWrapper, SnarkWrapperProof}; use tokio::task::JoinHandle; -use wrapper_prover::{GPUWrapperConfigs, WrapperProver}; -use zkevm_test_harness::proof_wrapper_utils::{get_trusted_setup, DEFAULT_WRAPPER_CONFIG}; use zksync_object_store::ObjectStore; use zksync_prover_dal::{ConnectionPool, Prover, ProverDal}; use zksync_prover_fri_types::{ circuit_definitions::{ boojum::field::goldilocks::GoldilocksField, - circuit_definitions::{ - aux_layer::{ - wrapper::ZkSyncCompressionWrapper, ZkSyncCompressionForWrapperCircuit, - ZkSyncCompressionLayerCircuit, ZkSyncCompressionProof, - ZkSyncCompressionProofForWrapper, ZkSyncCompressionVerificationKeyForWrapper, - }, - recursion_layer::{ - ZkSyncRecursionLayerProof, ZkSyncRecursionLayerStorageType, - ZkSyncRecursionVerificationKey, - }, - }, + circuit_definitions::recursion_layer::ZkSyncRecursionLayerProof, zkevm_circuits::scheduler::block_header::BlockAuxilaryOutputWitness, }, get_current_pod_name, AuxOutputWitnessWrapper, FriProofWrapper, @@ -39,23 +26,16 @@ use crate::metrics::METRICS; pub struct ProofCompressor { blob_store: Arc, pool: ConnectionPool, - compression_mode: u8, max_attempts: u32, protocol_version: ProtocolSemanticVersion, keystore: Keystore, is_fflonk: bool, } -pub enum Proof { - Plonk(Box), - Fflonk(FflonkSnarkVerifierCircuitProof), -} - impl ProofCompressor { pub fn new( blob_store: Arc, pool: ConnectionPool, - compression_mode: u8, max_attempts: u32, protocol_version: ProtocolSemanticVersion, keystore: Keystore, @@ -64,7 +44,6 @@ impl ProofCompressor { Self { blob_store, pool, - compression_mode, max_attempts, protocol_version, keystore, @@ -85,151 +64,6 @@ impl ProofCompressor { } array } - - #[tracing::instrument(skip(proof, _compression_mode))] - pub fn generate_plonk_proof( - proof: ZkSyncRecursionLayerProof, - _compression_mode: u8, - keystore: Keystore, - ) -> anyhow::Result { - let scheduler_vk = keystore - .load_recursive_layer_verification_key( - ZkSyncRecursionLayerStorageType::SchedulerCircuit as u8, - ) - .context("get_recursiver_layer_vk_for_circuit_type()")?; - - let wrapper_proof = { - let crs = get_trusted_setup(); - let wrapper_config = DEFAULT_WRAPPER_CONFIG; - let mut prover = WrapperProver::::new(&crs, wrapper_config).unwrap(); - - prover - .generate_setup_data(scheduler_vk.into_inner()) - .unwrap(); - prover.generate_proofs(proof.into_inner()).unwrap(); - - prover.get_wrapper_proof().unwrap() - }; - - // (Re)serialization should always succeed. - let serialized = bincode::serialize(&wrapper_proof) - .expect("Failed to serialize proof with ZkSyncSnarkWrapperCircuit"); - - // For sending to L1, we can use the `FinalProof` type, that has a generic circuit inside, that is not used for serialization. - // So `FinalProof` and `Proof>>` are compatible on serialization bytecode level. - let final_proof: FinalProof = - bincode::deserialize(&serialized).expect("Failed to deserialize final proof"); - Ok(final_proof) - } - - #[tracing::instrument(skip(proof, compression_mode, keystore))] - pub fn generate_fflonk_proof( - proof: ZkSyncRecursionLayerProof, - compression_mode: u8, - keystore: Keystore, - ) -> anyhow::Result { - let scheduler_vk = keystore - .load_recursive_layer_verification_key( - ZkSyncRecursionLayerStorageType::SchedulerCircuit as u8, - ) - .context("get_recursiver_layer_vk_for_circuit_type()")?; - - // compress proof step by step: 1 -> 2 -> 3 -> 4 -> 5(wrapper) - let (compression_wrapper_proof, compression_wrapper_vk) = Self::compress_proof( - &keystore, - proof.into_inner(), - scheduler_vk.into_inner(), - compression_mode, - )?; - - // construct fflonk snark verifier circuit - let wrapper_function = - ZkSyncCompressionWrapper::from_numeric_circuit_type(compression_mode); - let fixed_parameters = compression_wrapper_vk.fixed_parameters.clone(); - let circuit = FflonkSnarkVerifierCircuit { - witness: Some(compression_wrapper_proof), - vk: compression_wrapper_vk, - fixed_parameters, - transcript_params: (), - wrapper_function, - }; - - tracing::info!("Proving FFLONK snark verifier"); - - let setup = keystore.load_fflonk_snark_verifier_setup_data()?; - - tracing::info!("Loaded setup data for FFLONK verification"); - - let proof = fflonk_gpu::gpu_prove_fflonk_snark_verifier_circuit_with_precomputation( - &circuit, - &setup, - &setup.get_verification_key(), - ); - tracing::info!("Finished proof generation"); - Ok(proof) - } - - pub fn compress_proof( - keystore: &Keystore, - proof: ZkSyncCompressionProof, - vk: ZkSyncRecursionVerificationKey, - compression_steps: u8, - ) -> anyhow::Result<( - ZkSyncCompressionProofForWrapper, - ZkSyncCompressionVerificationKeyForWrapper, - )> { - let worker = franklin_crypto::boojum::worker::Worker::new(); - let mut compression_circuit = - ZkSyncCompressionLayerCircuit::from_witness_and_vk(Some(proof), vk.clone(), 1); - let mut compression_wrapper_circuit = None; - - for step_idx in 1..compression_steps { - tracing::info!("Proving compression {:?}", step_idx); - let setup_data = keystore.load_compression_setup_data(step_idx)?; - let (proof, vk) = - proof_compression_gpu::prove_compression_layer_circuit_with_precomputations( - compression_circuit.clone(), - &setup_data.setup, - setup_data.finalization_hint, - setup_data.vk, - &worker, - ); - tracing::info!("Proof for compression {:?} is generated!", step_idx); - - if step_idx + 1 == compression_steps { - compression_wrapper_circuit = - Some(ZkSyncCompressionForWrapperCircuit::from_witness_and_vk( - Some(proof), - vk, - compression_steps, - )); - } else { - compression_circuit = ZkSyncCompressionLayerCircuit::from_witness_and_vk( - Some(proof), - vk, - step_idx + 1, - ); - } - } - - // last wrapping step - tracing::info!("Proving compression {} for wrapper", compression_steps); - - let setup_data = keystore.load_compression_wrapper_setup_data(compression_steps)?; - let (proof, vk) = - proof_compression_gpu::prove_compression_wrapper_circuit_with_precomputations( - compression_wrapper_circuit.unwrap(), - &setup_data.setup, - setup_data.finalization_hint, - setup_data.vk, - &worker, - ); - tracing::info!( - "Proof for compression wrapper {} is generated!", - compression_steps - ); - Ok((proof, vk)) - } } #[async_trait] @@ -237,7 +71,7 @@ impl JobProcessor for ProofCompressor { type Job = ZkSyncRecursionLayerProof; type JobId = L1BatchNumber; - type JobArtifacts = Proof; + type JobArtifacts = SnarkWrapperProof; const SERVICE_NAME: &'static str = "ProofCompressor"; @@ -292,23 +126,19 @@ impl JobProcessor for ProofCompressor { job: ZkSyncRecursionLayerProof, _started_at: Instant, ) -> JoinHandle> { - let compression_mode = self.compression_mode; let keystore = self.keystore.clone(); - let is_fflonk = self.is_fflonk; + let snark_wrapper_mode = if self.is_fflonk { + SnarkWrapper::FFfonk + } else { + SnarkWrapper::Plonk + }; + tokio::task::spawn_blocking(move || { - if !is_fflonk { - Ok(Proof::Plonk(Box::new(Self::generate_plonk_proof( - job, - compression_mode, - keystore, - )?))) - } else { - Ok(Proof::Fflonk(Self::generate_fflonk_proof( - job, - compression_mode, - keystore, - )?)) - } + Ok(run_proof_chain( + snark_wrapper_mode, + &keystore, + job.into_inner(), + )) }) } @@ -333,16 +163,18 @@ impl JobProcessor for ProofCompressor { Self::aux_output_witness_to_array(aux_output_witness_wrapper.0); let l1_batch_proof = match artifacts { - Proof::Plonk(proof) => L1BatchProofForL1::Plonk(PlonkL1BatchProofForL1 { - aggregation_result_coords, - scheduler_proof: *proof, - protocol_version: self.protocol_version, - }), - Proof::Fflonk(proof) => L1BatchProofForL1::Fflonk(FflonkL1BatchProofForL1 { + SnarkWrapperProof::Plonk(proof) => L1BatchProofForL1::Plonk(PlonkL1BatchProofForL1 { aggregation_result_coords, scheduler_proof: proof, protocol_version: self.protocol_version, }), + SnarkWrapperProof::FFfonk(proof) => { + L1BatchProofForL1::Fflonk(FflonkL1BatchProofForL1 { + aggregation_result_coords, + scheduler_proof: proof, + protocol_version: self.protocol_version, + }) + } }; let blob_save_started_at = Instant::now(); diff --git a/prover/crates/bin/proof_fri_compressor/src/main.rs b/prover/crates/bin/proof_fri_compressor/src/main.rs index dae03ab41465..bb46c1e7cb75 100644 --- a/prover/crates/bin/proof_fri_compressor/src/main.rs +++ b/prover/crates/bin/proof_fri_compressor/src/main.rs @@ -1,7 +1,7 @@ #![allow(incomplete_features)] // We have to use generic const exprs. #![feature(generic_const_exprs)] -use std::{env, time::Duration}; +use std::time::Duration; use anyhow::Context as _; use clap::Parser; @@ -96,7 +96,6 @@ async fn main() -> anyhow::Result<()> { let proof_compressor = ProofCompressor::new( blob_store, pool, - config.compression_mode, config.max_attempts, protocol_version, keystore, @@ -114,7 +113,7 @@ async fn main() -> anyhow::Result<()> { }) .expect("Error setting Ctrl+C handler"); // Setting handler should always succeed. - setup_crs_keys(&config, is_fflonk); + setup_crs_keys(&config); tracing::info!("Starting proof compressor"); @@ -139,20 +138,10 @@ async fn main() -> anyhow::Result<()> { Ok(()) } -fn setup_crs_keys(config: &FriProofCompressorConfig, is_fflonk: bool) { - if is_fflonk { - download_initial_setup_keys_if_not_present( - &config.universal_fflonk_setup_path, - &config.universal_fflonk_setup_download_url, - ); - - env::set_var("COMPACT_CRS_FILE", &config.universal_fflonk_setup_path); - return; - } - +fn setup_crs_keys(config: &FriProofCompressorConfig) { download_initial_setup_keys_if_not_present( &config.universal_setup_path, &config.universal_setup_download_url, ); - env::set_var("CRS_FILE", &config.universal_setup_path); + std::env::set_var("COMPACT_CRS_FILE", &config.universal_setup_path); } diff --git a/prover/crates/bin/prover_cli/src/config/mod.rs b/prover/crates/bin/prover_cli/src/config/mod.rs index b3df2e7d2c56..88cf3a55f908 100644 --- a/prover/crates/bin/prover_cli/src/config/mod.rs +++ b/prover/crates/bin/prover_cli/src/config/mod.rs @@ -6,7 +6,7 @@ pub fn get_envfile() -> anyhow::Result { if let Ok(envfile) = std::env::var("PLI__CONFIG") { return Ok(envfile.into()); } - Ok(Workspace::locate().core().join("etc/pliconfig")) + Ok(Workspace::locate().root().join("etc/pliconfig")) } pub fn load_envfile(path: impl AsRef) -> anyhow::Result<()> { diff --git a/prover/crates/bin/prover_cli/src/helper.rs b/prover/crates/bin/prover_cli/src/helper.rs index 7fe0c990e4e0..b793ce5f2be1 100644 --- a/prover/crates/bin/prover_cli/src/helper.rs +++ b/prover/crates/bin/prover_cli/src/helper.rs @@ -24,7 +24,7 @@ fn read_file_to_json_value(path: &PathBuf) -> serde_json::Value { } fn load_contract_if_present(path: &str) -> Contract { - let path = Workspace::locate().core().join(path); + let path = Workspace::locate().root().join(path); path.exists() .then(|| { serde_json::from_value(read_file_to_json_value(&path)["abi"].take()).unwrap_or_else( diff --git a/prover/crates/bin/vk_setup_data_generator_server_fri/Cargo.toml b/prover/crates/bin/vk_setup_data_generator_server_fri/Cargo.toml index f385c33dd6ad..895d43ae42b7 100644 --- a/prover/crates/bin/vk_setup_data_generator_server_fri/Cargo.toml +++ b/prover/crates/bin/vk_setup_data_generator_server_fri/Cargo.toml @@ -40,4 +40,4 @@ proptest.workspace = true [features] default = [] -gpu = ["zksync_prover_keystore/gpu", "proof-compression-gpu", "shivini"] +gpu = ["zksync_prover_keystore/gpu", "proof-compression-gpu/allocator", "shivini"] diff --git a/prover/crates/bin/vk_setup_data_generator_server_fri/src/main.rs b/prover/crates/bin/vk_setup_data_generator_server_fri/src/main.rs index edd88846d1bc..3e5370b2c888 100644 --- a/prover/crates/bin/vk_setup_data_generator_server_fri/src/main.rs +++ b/prover/crates/bin/vk_setup_data_generator_server_fri/src/main.rs @@ -1,6 +1,5 @@ -#![feature(allocator_api)] -#![allow(dead_code)] // todo: remove after setup is generated -#![allow(unused_imports)] // todo: remove after setup is generated +#![feature(allocator_api, generic_const_exprs)] +#![allow(incomplete_features)] //! Tool to generate different types of keys used by the proving system. //! @@ -12,60 +11,41 @@ use clap::{Parser, Subcommand}; use commitment_generator::read_and_update_contract_toml; use indicatif::{ProgressBar, ProgressStyle}; #[cfg(feature = "gpu")] -use shivini::ProverContext; +use proof_compression_gpu::{ + precompute_proof_chain_with_fflonk, precompute_proof_chain_with_plonk, BlobStorageExt, +}; use tracing::level_filters::LevelFilter; use zkevm_test_harness::{ - boojum::worker::Worker, compute_setups::{ basic_vk_count, generate_base_layer_vks, generate_recursive_layer_vks, recursive_layer_vk_count, }, - data_source::{in_memory_data_source::InMemoryDataSource, SetupDataSource}, - proof_wrapper_utils::{ - check_trusted_setup_file_existace, get_wrapper_setup_and_vk_from_scheduler_vk, - WrapperConfig, - }, -}; -use zksync_prover_fri_types::{ - circuit_definitions::circuit_definitions::recursion_layer::ZkSyncRecursionLayerStorageType, - ProverServiceDataKey, + data_source::in_memory_data_source::InMemoryDataSource, }; -#[cfg(feature = "gpu")] -use zksync_prover_keystore::setup_data_generator::get_fflonk_snark_verifier_setup_and_vk; +use zksync_prover_fri_types::ProverServiceDataKey; use zksync_prover_keystore::{ keystore::Keystore, setup_data_generator::{CPUSetupDataGenerator, GPUSetupDataGenerator, SetupDataGenerator}, }; -#[cfg(feature = "gpu")] -use crate::utils::{ - generate_compression_for_wrapper_vks, generate_compression_vks, - get_plonk_wrapper_setup_and_vk_from_scheduler_vk, -}; - mod commitment_generator; -mod utils; mod vk_commitment_helper; #[cfg(test)] mod tests; + /// Generates new verification keys, and stores them in `keystore`. /// Jobs describe how many generators can run in parallel (each one is around 30 GB). /// If quiet is true, it doesn't display any progress bar. fn generate_vks(keystore: &Keystore, jobs: usize, quiet: bool) -> anyhow::Result<()> { - // Start by checking the trusted setup existence. - // This is used at the last step, but we want to fail early if user didn't configure everything - // correctly. - check_trusted_setup_file_existace(); - let progress_bar = if quiet { None } else { let count = basic_vk_count() + recursive_layer_vk_count() + 2; let progress_bar = ProgressBar::new(count as u64); progress_bar.set_style(ProgressStyle::default_bar() - .template("{spinner:.green} [{elapsed_precise}] [{wide_bar:.cyan/blue}] {pos:>7}/{len:7} ({eta})") - .progress_chars("#>-")); + .template("{spinner:.green} [{elapsed_precise}] [{wide_bar:.cyan/blue}] {pos:>7}/{len:7} ({eta})") + .progress_chars("#>-")); Some(progress_bar) }; @@ -89,66 +69,7 @@ fn generate_vks(keystore: &Keystore, jobs: usize, quiet: bool) -> anyhow::Result }) .map_err(|err| anyhow::anyhow!("Failed generating recursive vk's: {err}"))?; - #[cfg(feature = "gpu")] - { - let config = WrapperConfig::new(5); - let worker = Worker::new(); - - tracing::info!("Creating prover context"); - - let _context = ProverContext::create().context("failed initializing gpu prover context")?; - tracing::info!("Generating verification keys for compression layers."); - generate_compression_vks(config, &mut in_memory_source, &worker); - - tracing::info!("Generating verification keys for compression for wrapper."); - - generate_compression_for_wrapper_vks(config, &mut in_memory_source, &worker); - - tracing::info!("Saving keys & hints"); - } - - keystore.save_keys_from_data_source(&in_memory_source)?; - - // Generate snark VK - let scheduler_vk = in_memory_source - .get_recursion_layer_vk(ZkSyncRecursionLayerStorageType::SchedulerCircuit as u8) - .map_err(|err| anyhow::anyhow!("Failed to get scheduler vk: {err}"))?; - - tracing::info!("Generating PLONK verification keys for snark wrapper."); - - let (_, plonk_vk) = - get_wrapper_setup_and_vk_from_scheduler_vk(scheduler_vk.clone(), WrapperConfig::new(1)); - - keystore - .save_snark_verification_key(plonk_vk) - .context("save_plonk_snark_vk")?; - - if let Some(p) = pb.lock().unwrap().as_ref() { - p.inc(1) - } - - tracing::info!("PLONK vk is generated"); - - #[cfg(feature = "gpu")] - { - tracing::info!("Generating FFLONK verification keys for snark wrapper."); - - let (_, fflonk_vk) = get_fflonk_snark_verifier_setup_and_vk(&mut in_memory_source); - - keystore - .save_fflonk_snark_verification_key(fflonk_vk) - .context("save_fflonk_snark_vk")?; - - if let Some(p) = pb.lock().unwrap().as_ref() { - p.inc(1) - } - - tracing::info!("FFLONK vk is generated"); - } - - // Let's also update the commitments file. - let commitments = keystore.generate_commitments()?; - keystore.save_commitments(&commitments) + keystore.save_keys_from_data_source(&in_memory_source) } #[derive(Debug, Parser)] @@ -171,9 +92,6 @@ enum CircuitSelector { Recursive, /// Select circuits from basic group. Basic, - Compression, - CompressionWrapper, - Snark, } #[derive(Debug, Parser)] @@ -215,6 +133,10 @@ enum Command { #[arg(long)] quiet: bool, }, + #[command(name = "generate-compressor-data")] + GenerateCompressorPrecomputations, + #[command(name = "generate-crs")] + GenerateCompactCrs, /// Generates setup keys (used by the CPU prover). #[command(name = "generate-sk")] GenerateSetupKeys { @@ -280,17 +202,6 @@ fn generate_setup_keys( .numeric_circuit .expect("--numeric-circuit must be provided"), ), - CircuitSelector::Compression => ProverServiceDataKey::new_compression( - options - .numeric_circuit - .expect("--numeric-circuit must be provided"), - ), - CircuitSelector::CompressionWrapper => ProverServiceDataKey::new_compression_wrapper( - options - .numeric_circuit - .expect("--numeric-circuit must be provided"), - ), - CircuitSelector::Snark => ProverServiceDataKey::snark(), }; let digest = generator @@ -325,7 +236,6 @@ fn main() -> anyhow::Result<()> { read_and_update_contract_toml(&keystore, dryrun) } - Command::GenerateSetupKeys { options } => { let generator = CPUSetupDataGenerator { keystore: keystore_from_optional_path( @@ -344,5 +254,44 @@ fn main() -> anyhow::Result<()> { }; generate_setup_keys(&generator, &options) } + Command::GenerateCompressorPrecomputations => { + #[cfg(not(feature = "gpu"))] + { + anyhow::bail!("Must compile with --gpu feature to use this option.") + } + #[cfg(feature = "gpu")] + { + let keystore = Keystore::locate(); + precompute_proof_chain_with_plonk(&keystore); + precompute_proof_chain_with_fflonk(&keystore); + + let commitments = keystore.generate_commitments()?; + keystore.save_commitments(&commitments) + } + } + Command::GenerateCompactCrs => { + #[cfg(not(feature = "gpu"))] + { + anyhow::bail!("Must compile with --gpu feature to use this option.") + } + #[cfg(feature = "gpu")] + { + let keystore = Keystore::locate(); + + if std::env::var("COMPACT_CRS_FILE").is_err() { + return Err(anyhow::anyhow!("COMPACT_CRS_FILE env variable is not set")); + } + + if std::env::var("IGNITION_TRANSCRIPT_PATH").is_err() { + return Err(anyhow::anyhow!( + "IGNITION_TRANSCRIPT_PATH env variable is not set" + )); + } + + Ok(proof_compression_gpu::create_compact_raw_crs( + keystore.write_compact_raw_crs(), + )) + } + } } } diff --git a/prover/crates/bin/vk_setup_data_generator_server_fri/src/utils.rs b/prover/crates/bin/vk_setup_data_generator_server_fri/src/utils.rs deleted file mode 100644 index 85ce0b5be0b6..000000000000 --- a/prover/crates/bin/vk_setup_data_generator_server_fri/src/utils.rs +++ /dev/null @@ -1,141 +0,0 @@ -use circuit_definitions::{ - boojum::worker::Worker, - circuit_definitions::{ - aux_layer::{ - ZkSyncCompressionForWrapperCircuit, ZkSyncCompressionLayerCircuit, - ZkSyncCompressionLayerStorage, ZkSyncSnarkWrapperSetup, ZkSyncSnarkWrapperVK, - }, - recursion_layer::{ZkSyncRecursionLayerStorageType, ZkSyncRecursionLayerVerificationKey}, - }, -}; -#[cfg(feature = "gpu")] -use shivini::cs::gpu_setup_and_vk_from_base_setup_vk_params_and_hints; -use zkevm_test_harness::{ - data_source::{BlockDataSource, SetupDataSource}, - proof_wrapper_utils::{ - check_trusted_setup_file_existace, get_vk_for_previous_circuit, - get_wrapper_setup_and_vk_from_compression_vk, WrapperConfig, - }, - prover_utils::light::{ - create_light_compression_for_wrapper_setup_data, create_light_compression_layer_setup_data, - }, -}; - -#[cfg(feature = "gpu")] -pub(crate) fn generate_compression_vks( - config: WrapperConfig, - source: &mut DS, - worker: &Worker, -) { - for circuit_type in config.get_compression_types() { - let vk = get_vk_for_previous_circuit(source, circuit_type).unwrap_or_else(|_| { - panic!( - "VK of previous circuit should be present. Current circuit type: {}", - circuit_type - ) - }); - - let compression_circuit = - ZkSyncCompressionLayerCircuit::from_witness_and_vk(None, vk, circuit_type); - let proof_config = compression_circuit.proof_config_for_compression_step(); - - let (setup_base, vk_geometry, vars_hint, witness_hint, finalization_hint) = - create_light_compression_layer_setup_data( - compression_circuit, - worker, - proof_config.fri_lde_factor, - proof_config.merkle_tree_cap_size, - ); - - let (_, vk) = gpu_setup_and_vk_from_base_setup_vk_params_and_hints( - setup_base, - vk_geometry, - vars_hint.clone(), - witness_hint, - worker, - ) - .expect("failed creating GPU compression layer setup data"); - - source - .set_compression_vk(ZkSyncCompressionLayerStorage::from_inner( - circuit_type, - vk.clone(), - )) - .unwrap(); - source - .set_compression_hint(ZkSyncCompressionLayerStorage::from_inner( - circuit_type, - finalization_hint.clone(), - )) - .unwrap(); - } -} - -#[cfg(feature = "gpu")] -pub(crate) fn generate_compression_for_wrapper_vks( - config: WrapperConfig, - source: &mut DS, - worker: &Worker, -) { - let compression_for_wrapper_type = config.get_compression_for_wrapper_type(); - let vk = get_vk_for_previous_circuit(source, compression_for_wrapper_type).unwrap(); - - let circuit = ZkSyncCompressionForWrapperCircuit::from_witness_and_vk( - None, - vk, - compression_for_wrapper_type, - ); - - let proof_config = circuit.proof_config_for_compression_step(); - - let (setup_base, vk_geometry, vars_hint, witness_hint, finalization_hint) = - create_light_compression_for_wrapper_setup_data( - circuit, - worker, - proof_config.fri_lde_factor, - proof_config.merkle_tree_cap_size, - ); - - let (_, vk) = gpu_setup_and_vk_from_base_setup_vk_params_and_hints( - setup_base, - vk_geometry, - vars_hint.clone(), - witness_hint, - worker, - ) - .expect("failed creating GPU compression for wrapper layer setup data"); - - source - .set_compression_for_wrapper_vk(ZkSyncCompressionLayerStorage::from_inner( - compression_for_wrapper_type, - vk.clone(), - )) - .unwrap(); - source - .set_compression_for_wrapper_hint(ZkSyncCompressionLayerStorage::from_inner( - compression_for_wrapper_type, - finalization_hint.clone(), - )) - .unwrap(); -} - -/// Computes wrapper vk from scheduler vk -/// We store all vks in the RAM -pub fn get_plonk_wrapper_setup_and_vk_from_scheduler_vk( - source: &mut DS, - vk: ZkSyncRecursionLayerVerificationKey, - config: WrapperConfig, -) -> (ZkSyncSnarkWrapperSetup, ZkSyncSnarkWrapperVK) { - // Check trusted setup file for later - check_trusted_setup_file_existace(); - - // Check circuit type correctness - assert_eq!( - vk.numeric_circuit_type(), - ZkSyncRecursionLayerStorageType::SchedulerCircuit as u8 - ); - - let wrapper_type = config.get_wrapper_type(); - let wrapper_vk = source.get_compression_for_wrapper_vk(wrapper_type).unwrap(); - get_wrapper_setup_and_vk_from_compression_vk(wrapper_vk, config) -} diff --git a/prover/crates/bin/vk_setup_data_generator_server_fri/src/vk_commitment_helper.rs b/prover/crates/bin/vk_setup_data_generator_server_fri/src/vk_commitment_helper.rs index 2753799dc722..2f5bbf269267 100644 --- a/prover/crates/bin/vk_setup_data_generator_server_fri/src/vk_commitment_helper.rs +++ b/prover/crates/bin/vk_setup_data_generator_server_fri/src/vk_commitment_helper.rs @@ -24,6 +24,6 @@ pub fn read_contract_toml() -> anyhow::Result { pub fn get_contract_toml_path() -> PathBuf { Workspace::locate() - .core() + .root() .join("etc/env/base/contracts.toml") } diff --git a/prover/crates/lib/circuit_prover_service/Cargo.toml b/prover/crates/lib/circuit_prover_service/Cargo.toml index ca7d1ede02f1..3b152528ab68 100644 --- a/prover/crates/lib/circuit_prover_service/Cargo.toml +++ b/prover/crates/lib/circuit_prover_service/Cargo.toml @@ -13,7 +13,7 @@ categories.workspace = true [dependencies] zksync_prover_job_processor.workspace = true zksync_prover_fri_types.workspace = true -zksync_prover_keystore.workspace = true +zksync_prover_keystore = { workspace = true, features = ["gpu-light"] } zksync_prover_dal.workspace = true zksync_types.workspace = true zksync_object_store.workspace = true @@ -24,8 +24,6 @@ tokio = { workspace = true, features = ["macros", "time"] } tokio-util.workspace = true tracing.workspace = true -shivini = { workspace = true, features = [ - "circuit_definitions", -] } +shivini = { workspace = true, features = ["circuit_definitions"] } zkevm_test_harness.workspace = true vise.workspace = true diff --git a/prover/crates/lib/keystore/Cargo.toml b/prover/crates/lib/keystore/Cargo.toml index a247a24bdd8b..2c21067f2428 100644 --- a/prover/crates/lib/keystore/Cargo.toml +++ b/prover/crates/lib/keystore/Cargo.toml @@ -17,6 +17,7 @@ zkevm_test_harness.workspace = true circuit_definitions = { workspace = true, features = ["log_tracing"] } shivini = { workspace = true, optional = true } fflonk-gpu = { workspace = true, optional = true } +proof-compression-gpu = { workspace = true, optional = true } fflonk.workspace = true boojum-cuda = { workspace = true, optional = true } @@ -37,4 +38,4 @@ futures = { workspace = true, features = ["compat"] } default = [] # feature to not compile era-bellman-cuda, but to be able to use GPU features gpu-light = ["dep:shivini", "dep:boojum-cuda"] -gpu = ["dep:shivini", "dep:fflonk-gpu", "dep:boojum-cuda"] +gpu = ["dep:shivini", "dep:fflonk-gpu", "dep:boojum-cuda", "dep:proof-compression-gpu"] diff --git a/prover/crates/lib/keystore/src/compressor.rs b/prover/crates/lib/keystore/src/compressor.rs new file mode 100644 index 000000000000..ea4518238fd6 --- /dev/null +++ b/prover/crates/lib/keystore/src/compressor.rs @@ -0,0 +1,221 @@ +use std::{ + fs::File, + io::{Read, Write}, +}; + +use circuit_definitions::circuit_definitions::recursion_layer::ZkSyncRecursionLayerStorageType; +use zksync_prover_fri_types::ProverServiceDataKey; + +use crate::keystore::{Keystore, ProverServiceDataType}; + +const COMPACT_CRS_ENV_VAR: &str = "COMPACT_CRS_FILE"; + +impl proof_compression_gpu::BlobStorage for Keystore { + fn read_scheduler_vk(&self) -> Box { + let filepath = self.get_file_path( + ProverServiceDataKey::new_recursive( + ZkSyncRecursionLayerStorageType::SchedulerCircuit as u8, + ), + ProverServiceDataType::VerificationKey, + ); + + Box::new(File::open(filepath).unwrap()) + } + + fn read_compression_layer_finalization_hint(&self, circuit_id: u8) -> Box { + let filepath = self.get_file_path( + ProverServiceDataKey::new_compression(circuit_id), + ProverServiceDataType::FinalizationHints, + ); + + Box::new(File::open(filepath).unwrap()) + } + + fn read_compression_layer_vk(&self, circuit_id: u8) -> Box { + let filepath = self.get_file_path( + ProverServiceDataKey::new_compression(circuit_id), + ProverServiceDataType::VerificationKey, + ); + + Box::new(File::open(filepath).unwrap()) + } + + fn read_compression_layer_precomputation(&self, circuit_id: u8) -> Box { + let filepath = self.get_file_path( + ProverServiceDataKey::new_compression(circuit_id), + ProverServiceDataType::SetupData, + ); + + Box::new(File::open(filepath).unwrap()) + } + + fn read_compression_wrapper_finalization_hint(&self, circuit_id: u8) -> Box { + let filepath = self.get_file_path( + ProverServiceDataKey::new_compression_wrapper(circuit_id), + ProverServiceDataType::FinalizationHints, + ); + + Box::new(File::open(filepath).unwrap()) + } + + fn read_compression_wrapper_vk(&self, circuit_id: u8) -> Box { + let filepath = self.get_file_path( + ProverServiceDataKey::new_compression_wrapper(circuit_id), + ProverServiceDataType::VerificationKey, + ); + + Box::new(File::open(filepath).unwrap()) + } + + fn read_compression_wrapper_precomputation( + &self, + circuit_id: u8, + ) -> Box { + let filepath = self.get_file_path( + ProverServiceDataKey::new_compression_wrapper(circuit_id), + ProverServiceDataType::SetupData, + ); + + Box::new(File::open(filepath).unwrap()) + } + + fn read_fflonk_vk(&self) -> Box { + let filepath = self.get_file_path( + ProverServiceDataKey::snark(), + ProverServiceDataType::FflonkSnarkVerificationKey, + ); + + Box::new(File::open(filepath).unwrap()) + } + + fn read_fflonk_precomputation(&self) -> Box { + let filepath = self.get_file_path( + ProverServiceDataKey::snark(), + ProverServiceDataType::FflonkSetupData, + ); + + Box::new(File::open(filepath).unwrap()) + } + + fn read_plonk_vk(&self) -> Box { + let filepath = self.get_file_path( + ProverServiceDataKey::snark(), + ProverServiceDataType::SnarkVerificationKey, + ); + + Box::new(File::open(filepath).unwrap()) + } + + fn read_plonk_precomputation(&self) -> Box { + let filepath = self.get_file_path( + ProverServiceDataKey::snark(), + ProverServiceDataType::PlonkSetupData, + ); + + Box::new(File::open(filepath).unwrap()) + } + + fn read_compact_raw_crs(&self) -> Box { + let filepath = + std::env::var(COMPACT_CRS_ENV_VAR).expect("No compact CRS file path provided"); + Box::new(File::open(filepath).unwrap()) + } +} + +impl proof_compression_gpu::BlobStorageExt for Keystore { + fn write_compression_layer_finalization_hint(&self, circuit_id: u8) -> Box { + let filepath = self.get_file_path( + ProverServiceDataKey::new_compression(circuit_id), + ProverServiceDataType::FinalizationHints, + ); + + Box::new(File::create(filepath).unwrap()) + } + + fn write_compression_layer_vk(&self, circuit_id: u8) -> Box { + let filepath = self.get_file_path( + ProverServiceDataKey::new_compression(circuit_id), + ProverServiceDataType::VerificationKey, + ); + + Box::new(File::create(filepath).unwrap()) + } + + fn write_compression_layer_precomputation(&self, circuit_id: u8) -> Box { + let filepath = self.get_file_path( + ProverServiceDataKey::new_compression(circuit_id), + ProverServiceDataType::SetupData, + ); + + Box::new(File::create(filepath).unwrap()) + } + + fn write_compression_wrapper_finalization_hint(&self, circuit_id: u8) -> Box { + let filepath = self.get_file_path( + ProverServiceDataKey::new_compression_wrapper(circuit_id), + ProverServiceDataType::FinalizationHints, + ); + + Box::new(File::create(filepath).unwrap()) + } + + fn write_compression_wrapper_vk(&self, circuit_id: u8) -> Box { + let filepath = self.get_file_path( + ProverServiceDataKey::new_compression_wrapper(circuit_id), + ProverServiceDataType::VerificationKey, + ); + + Box::new(File::create(filepath).unwrap()) + } + + fn write_compression_wrapper_precomputation(&self, circuit_id: u8) -> Box { + let filepath = self.get_file_path( + ProverServiceDataKey::new_compression_wrapper(circuit_id), + ProverServiceDataType::SetupData, + ); + + Box::new(File::create(filepath).unwrap()) + } + + fn write_fflonk_vk(&self) -> Box { + let filepath = self.get_file_path( + ProverServiceDataKey::snark(), + ProverServiceDataType::FflonkSnarkVerificationKey, + ); + + Box::new(File::create(filepath).unwrap()) + } + + fn write_fflonk_precomputation(&self) -> Box { + let filepath = self.get_file_path( + ProverServiceDataKey::snark(), + ProverServiceDataType::FflonkSetupData, + ); + + Box::new(File::create(filepath).unwrap()) + } + + fn write_plonk_vk(&self) -> Box { + let filepath = self.get_file_path( + ProverServiceDataKey::snark(), + ProverServiceDataType::SnarkVerificationKey, + ); + + Box::new(File::create(filepath).unwrap()) + } + + fn write_plonk_precomputation(&self) -> Box { + let filepath = self.get_file_path( + ProverServiceDataKey::snark(), + ProverServiceDataType::PlonkSetupData, + ); + + Box::new(File::create(filepath).unwrap()) + } + + fn write_compact_raw_crs(&self) -> Box { + let filepath = + std::env::var(COMPACT_CRS_ENV_VAR).expect("No compact CRS file path provided"); + Box::new(File::create(filepath).unwrap()) + } +} diff --git a/prover/crates/lib/keystore/src/keystore.rs b/prover/crates/lib/keystore/src/keystore.rs index fb84436916e7..eec7b4c8344e 100644 --- a/prover/crates/lib/keystore/src/keystore.rs +++ b/prover/crates/lib/keystore/src/keystore.rs @@ -41,6 +41,8 @@ pub enum ProverServiceDataType { VerificationKey, SetupData, FinalizationHints, + PlonkSetupData, + FflonkSetupData, SnarkVerificationKey, FflonkSnarkVerificationKey, } @@ -78,7 +80,7 @@ impl Keystore { // - We're running from the core workspace. // - We're running the binary from the docker. let data_dir_path = match Workspace::locate() { - Workspace::None => { + Workspace::Root => { // We're running a binary, likely in a docker. // Keys can be in one of a few paths. // We want to be very conservative here, and checking @@ -114,7 +116,7 @@ impl Keystore { &self.basedir } - fn get_file_path( + pub(crate) fn get_file_path( &self, key: ProverServiceDataKey, service_data_type: ProverServiceDataType, @@ -130,6 +132,12 @@ impl Keystore { ProverServiceDataType::FinalizationHints => self .basedir .join(format!("finalization_hints_{}.bin", name)), + ProverServiceDataType::PlonkSetupData => { + self.basedir.join(format!("plonk_setup_{}_data.bin", name)) + } + ProverServiceDataType::FflonkSetupData => { + self.basedir.join(format!("fflonk_setup_{}_data.bin", name)) + } ProverServiceDataType::SnarkVerificationKey => { self.basedir.join(format!("verification_{}_key.json", name)) } @@ -187,6 +195,14 @@ impl Keystore { &self, circuit_type: u8, ) -> anyhow::Result { + if circuit_type == ZkSyncRecursionLayerStorageType::SchedulerCircuit as u8 { + let vk = Self::load_json_from_file(self.get_file_path( + ProverServiceDataKey::new_recursive(circuit_type), + ProverServiceDataType::VerificationKey, + ))?; + return Ok(ZkSyncRecursionLayerVerificationKey::SchedulerCircuit(vk)); + } + Self::load_json_from_file(self.get_file_path( ProverServiceDataKey::new_recursive(circuit_type), ProverServiceDataType::VerificationKey, @@ -197,20 +213,43 @@ impl Keystore { &self, circuit_type: u8, ) -> anyhow::Result { - Self::load_json_from_file(self.get_file_path( + let key = Self::load_json_from_file(self.get_file_path( ProverServiceDataKey::new_compression(circuit_type), ProverServiceDataType::VerificationKey, - )) + ))?; + + match circuit_type { + 1 => Ok(ZkSyncCompressionLayerVerificationKey::CompressionMode1Circuit(key)), + 2 => Ok(ZkSyncCompressionLayerVerificationKey::CompressionMode2Circuit(key)), + 3 => Ok(ZkSyncCompressionLayerVerificationKey::CompressionMode3Circuit(key)), + 4 => Ok(ZkSyncCompressionLayerVerificationKey::CompressionMode4Circuit(key)), + _ => Err(anyhow::anyhow!( + "Invalid compression circuit type: {}", + circuit_type + )), + } } pub fn load_compression_for_wrapper_vk( &self, circuit_type: u8, ) -> anyhow::Result { - Self::load_json_from_file(self.get_file_path( + let key = Self::load_json_from_file(self.get_file_path( ProverServiceDataKey::new_compression_wrapper(circuit_type), ProverServiceDataType::VerificationKey, - )) + ))?; + + match circuit_type { + 1 => Ok(ZkSyncCompressionForWrapperVerificationKey::CompressionMode1Circuit(key)), + 2 => Ok(ZkSyncCompressionForWrapperVerificationKey::CompressionMode2Circuit(key)), + 3 => Ok(ZkSyncCompressionForWrapperVerificationKey::CompressionMode3Circuit(key)), + 4 => Ok(ZkSyncCompressionForWrapperVerificationKey::CompressionMode4Circuit(key)), + 5 => Ok(ZkSyncCompressionForWrapperVerificationKey::CompressionMode5Circuit(key)), + _ => Err(anyhow::anyhow!( + "Invalid compression circuit type: {}", + circuit_type + )), + } } pub fn save_base_layer_verification_key( @@ -233,6 +272,12 @@ impl Keystore { ProverServiceDataKey::new_recursive(vk.numeric_circuit_type()), ProverServiceDataType::VerificationKey, ); + + if let ZkSyncRecursionLayerVerificationKey::SchedulerCircuit(key) = vk { + tracing::info!("saving recursive layer verification key to: {:?}", filepath); + return Self::save_json_pretty(filepath, &key); + } + tracing::info!("saving recursive layer verification key to: {:?}", filepath); Self::save_json_pretty(filepath, &vk) } @@ -249,7 +294,7 @@ impl Keystore { "saving compression layer verification key to: {:?}", filepath ); - Self::save_json_pretty(filepath, &vk) + Self::save_json_pretty(filepath, &vk.into_inner()) } pub fn save_compression_for_wrapper_vk( @@ -264,7 +309,7 @@ impl Keystore { "saving compression wrapper verification key to: {:?}", filepath ); - Self::save_json_pretty(filepath, &vk) + Self::save_json_pretty(filepath, &vk.into_inner()) } /// @@ -645,45 +690,6 @@ impl Keystore { ) .context("save_finalization_hints()")?; - // Compression - // todo: don't use hardcoded values - for circuit in 1..5 { - let vk = source - .get_compression_vk(circuit as u8) - .map_err(|err| anyhow::anyhow!("No vk exist for circuit type: {circuit}: {err}"))?; - - self.save_compression_vk(vk) - .context("save_compression_vk()")?; - - let hint = source.get_compression_hint(circuit as u8).map_err(|err| { - anyhow::anyhow!("No finalization hint exist for circuit type: {circuit}: {err}") - })?; - - self.save_finalization_hints( - ProverServiceDataKey::new_compression(circuit as u8), - &hint.into_inner(), - ) - .context("save_finalization_hints()")?; - } - - // Compression wrapper - let vk = source - .get_compression_for_wrapper_vk(5) - .map_err(|err| anyhow::anyhow!("No vk exist for circuit type: 5: {err}"))?; - - self.save_compression_for_wrapper_vk(vk) - .context("save_compression_wrapper_vk()")?; - - let hint = source.get_compression_for_wrapper_hint(5).map_err(|err| { - anyhow::anyhow!("No finalization hint exist for circuit type: 5: {err}") - })?; - - self.save_finalization_hints( - ProverServiceDataKey::new_compression_wrapper(5), - &hint.into_inner(), - ) - .context("save_finalization_hints()")?; - Ok(()) } diff --git a/prover/crates/lib/keystore/src/lib.rs b/prover/crates/lib/keystore/src/lib.rs index e5f00fd307ba..bc8bbdcd992c 100644 --- a/prover/crates/lib/keystore/src/lib.rs +++ b/prover/crates/lib/keystore/src/lib.rs @@ -28,6 +28,9 @@ pub mod keystore; pub mod setup_data_generator; pub mod utils; +#[cfg(feature = "gpu")] +pub mod compressor; + #[derive(Debug, Serialize, Deserialize)] #[serde( bound = "F: serde::Serialize + serde::de::DeserializeOwned, P: serde::Serialize + serde::de::DeserializeOwned" diff --git a/prover/crates/lib/keystore/src/setup_data_generator.rs b/prover/crates/lib/keystore/src/setup_data_generator.rs index 162d94ced6dd..ae926d17ca3f 100644 --- a/prover/crates/lib/keystore/src/setup_data_generator.rs +++ b/prover/crates/lib/keystore/src/setup_data_generator.rs @@ -7,21 +7,9 @@ use anyhow::Context as _; #[cfg(any(feature = "gpu", feature = "gpu-light"))] use boojum_cuda::poseidon2::GLHasher; #[cfg(any(feature = "gpu", feature = "gpu-light"))] -use circuit_definitions::circuit_definitions::aux_layer::{ - wrapper::ZkSyncCompressionWrapper, CompressionProofsTreeHasherForWrapper, -}; -#[cfg(feature = "gpu")] -use fflonk_gpu::{ - FflonkDeviceSetup, FflonkSnarkVerifierCircuit, FflonkSnarkVerifierCircuitDeviceSetup, - FflonkSnarkVerifierCircuitVK, -}; -#[cfg(any(feature = "gpu", feature = "gpu-light"))] use shivini::cs::gpu_setup_and_vk_from_base_setup_vk_params_and_hints; #[cfg(any(feature = "gpu", feature = "gpu-light"))] -use zkevm_test_harness::{ - compute_setups::light::generate_light_circuit_setup_data, - data_source::in_memory_data_source::InMemoryDataSource, -}; +use zkevm_test_harness::compute_setups::light::generate_light_circuit_setup_data; use zkevm_test_harness::{ compute_setups::{generate_circuit_setup_data, CircuitSetupData}, data_source::SetupDataSource, @@ -55,10 +43,10 @@ pub fn generate_setup_data_common( .into_inner(), ), ProvingStage::Compression => { - unreachable!("Compression stage should not be generated with CPU.") + unreachable!("Compression stage setup data should be generated with a generate-compressor-data command") } ProvingStage::CompressionWrapper => { - unreachable!("CompressionWrapper stage should not be generated with CPU.") + unreachable!("CompressionWrapper stage setup data should be generated with a generate-compressor-data command") } _ => ( Some(keystore.load_finalization_hints(circuit)?), @@ -108,23 +96,9 @@ pub trait SetupDataGenerator { } if circuit == ProverServiceDataKey::snark() { - #[cfg(not(feature = "gpu"))] - { - anyhow::bail!("Must compile with --gpu feature to use this option."); - } - #[cfg(feature = "gpu")] - { - let mut data_source = self.keystore().load_keys_to_data_source()?; - let (setup, _) = get_fflonk_snark_verifier_setup_and_vk(&mut data_source); - if !dry_run { - self.keystore() - .save_fflonk_snark_setup_data(setup) - .context("save_setup_data()")?; - } - return Ok(String::from( - "FFLONK is serialized differently, skipping hashing.", - )); - } + unreachable!( + "Snark setup data should be generated with generate-compressor-data command" + ) } let serialized = self.generate_setup_data(circuit)?; @@ -146,7 +120,7 @@ pub trait SetupDataGenerator { dry_run: bool, recompute_if_missing: bool, ) -> anyhow::Result> { - Ok(ProverServiceDataKey::all() + Ok(ProverServiceDataKey::all_boojum() .iter() .map(|circuit| { tracing::info!("Generating setup data for {:?}", circuit.name()); @@ -206,44 +180,10 @@ impl SetupDataGenerator for GPUSetupDataGenerator { let worker = Worker::new(); match circuit.stage { - ProvingStage::CompressionWrapper => { - let (gpu_setup_data, verification_key) = - gpu_setup_and_vk_from_base_setup_vk_params_and_hints::< - CompressionProofsTreeHasherForWrapper, - _, - >( - circuit_setup_data.setup_base, - circuit_setup_data.vk_geometry, - circuit_setup_data.vars_hint.clone(), - circuit_setup_data.wits_hint, - &worker, - ) - .context("failed creating GPU base layer setup data")?; - - let gpu_prover_setup_data = GpuProverSetupData { - setup: gpu_setup_data, - vk: verification_key.clone(), - finalization_hint: circuit_setup_data.finalization_hint, - }; - - let serialized_vk = get_vk_by_circuit(self.keystore.clone(), circuit)?; - - assert_eq!( - bincode::serialize(&verification_key) - .expect("Failed serializing setup data"), - serialized_vk, - "Verification key mismatch for circuit: {:?}", - circuit.name() - ); - - // Serialization should always succeed. - Ok(bincode::serialize(&gpu_prover_setup_data) - .expect("Failed serializing setup data")) - } - ProvingStage::Snark => { - unreachable!( - "We cannot serialize Fflonk data with bincode, it is done separately" - ) + ProvingStage::CompressionWrapper + | ProvingStage::Snark + | ProvingStage::Compression => { + unreachable!("Setup data for compression, compression-wrapper and snark stages should be generated with generate-compressor-data command") } _ => { let (gpu_setup_data, verification_key) = @@ -320,31 +260,3 @@ fn get_vk_by_circuit(keystore: Keystore, circuit: ProverServiceDataKey) -> anyho } } } - -#[cfg(feature = "gpu")] -pub fn get_fflonk_snark_verifier_setup_and_vk( - data_source: &mut InMemoryDataSource, -) -> ( - FflonkSnarkVerifierCircuitDeviceSetup, - FflonkSnarkVerifierCircuitVK, -) { - let vk = data_source - .get_compression_for_wrapper_vk(5) - .unwrap() - .into_inner(); - let fixed_parameters = vk.fixed_parameters.clone(); - // todo: do not hardcode this value - let wrapper_function = ZkSyncCompressionWrapper::from_numeric_circuit_type(5); - - let circuit = FflonkSnarkVerifierCircuit { - witness: None, - vk, - fixed_parameters, - transcript_params: (), - wrapper_function, - }; - let setup = FflonkDeviceSetup::<_, _, _>::create_setup_on_device(&circuit).unwrap(); - let snark_vk = setup.get_verification_key(); - - (setup, snark_vk) -} diff --git a/prover/crates/lib/prover_fri_types/src/lib.rs b/prover/crates/lib/prover_fri_types/src/lib.rs index 44c54da578ee..5b7a54a31052 100644 --- a/prover/crates/lib/prover_fri_types/src/lib.rs +++ b/prover/crates/lib/prover_fri_types/src/lib.rs @@ -30,7 +30,7 @@ pub mod queue; pub const MAX_COMPRESSION_CIRCUITS: u8 = 5; // THESE VALUES SHOULD BE UPDATED ON ANY PROTOCOL UPGRADE OF PROVERS -pub const PROVER_PROTOCOL_VERSION: ProtocolVersionId = ProtocolVersionId::Version25; +pub const PROVER_PROTOCOL_VERSION: ProtocolVersionId = ProtocolVersionId::Version26; pub const PROVER_PROTOCOL_PATCH: VersionPatch = VersionPatch(0); pub const PROVER_PROTOCOL_SEMANTIC_VERSION: ProtocolSemanticVersion = ProtocolSemanticVersion { minor: PROVER_PROTOCOL_VERSION, @@ -294,12 +294,6 @@ impl ProverServiceDataKey { for numeric_circuit in ZkSyncRecursionLayerStorageType::as_iter_u8() { results.push(ProverServiceDataKey::new_recursive(numeric_circuit)) } - - for numeric_circuit in 1..MAX_COMPRESSION_CIRCUITS { - results.push(ProverServiceDataKey::new_compression(numeric_circuit)); - } - results.push(ProverServiceDataKey::new_compression_wrapper(5)); - results } @@ -311,12 +305,6 @@ impl ProverServiceDataKey { } } - pub fn all() -> Vec { - let mut keys = Self::all_boojum(); - keys.push(Self::snark()); - keys - } - pub fn is_base_layer(&self) -> bool { self.stage == ProvingStage::BasicCircuits } diff --git a/prover/crates/lib/prover_job_processor/Cargo.toml b/prover/crates/lib/prover_job_processor/Cargo.toml index 5197b33b1f95..fea4c6195fd7 100644 --- a/prover/crates/lib/prover_job_processor/Cargo.toml +++ b/prover/crates/lib/prover_job_processor/Cargo.toml @@ -14,7 +14,7 @@ categories.workspace = true async-trait.workspace = true anyhow.workspace = true futures.workspace = true -tokio.workspace = true +tokio = { workspace = true, features = ["rt"] } tokio-stream.workspace = true tokio-util.workspace = true tracing.workspace = true diff --git a/prover/data/keys/finalization_hints_compression_1.bin b/prover/data/keys/finalization_hints_compression_1.bin index 8f5cadc55c57..d71029042a99 100644 Binary files a/prover/data/keys/finalization_hints_compression_1.bin and b/prover/data/keys/finalization_hints_compression_1.bin differ diff --git a/prover/data/keys/finalization_hints_compression_2.bin b/prover/data/keys/finalization_hints_compression_2.bin index c6a4253d81e6..5dc0776779f4 100644 Binary files a/prover/data/keys/finalization_hints_compression_2.bin and b/prover/data/keys/finalization_hints_compression_2.bin differ diff --git a/prover/data/keys/finalization_hints_compression_3.bin b/prover/data/keys/finalization_hints_compression_3.bin index ef3f24e18c23..f8148a2cb2ee 100644 Binary files a/prover/data/keys/finalization_hints_compression_3.bin and b/prover/data/keys/finalization_hints_compression_3.bin differ diff --git a/prover/data/keys/finalization_hints_compression_4.bin b/prover/data/keys/finalization_hints_compression_4.bin index ca6c08bab1f1..d15a487c3572 100644 Binary files a/prover/data/keys/finalization_hints_compression_4.bin and b/prover/data/keys/finalization_hints_compression_4.bin differ diff --git a/prover/data/keys/finalization_hints_compression_wrapper_1.bin b/prover/data/keys/finalization_hints_compression_wrapper_1.bin new file mode 100644 index 000000000000..d71029042a99 --- /dev/null +++ b/prover/data/keys/finalization_hints_compression_wrapper_1.bin @@ -0,0 +1,35 @@ +{ + "row_finalization_hints": [ + [ + 3, + 0, + 0, + 0, + 0, + 0, + 0, + 0 + ] + ], + "column_finalization_hints": [], + "nop_gates_to_add": 22419, + "final_trace_len": 65536, + "public_inputs": [ + [ + 0, + 43116 + ], + [ + 1, + 43116 + ], + [ + 2, + 43116 + ], + [ + 3, + 43116 + ] + ] +} \ No newline at end of file diff --git a/prover/data/keys/finalization_hints_compression_wrapper_5.bin b/prover/data/keys/finalization_hints_compression_wrapper_5.bin index cb61da940e2a..a9a54f2be7b2 100644 Binary files a/prover/data/keys/finalization_hints_compression_wrapper_5.bin and b/prover/data/keys/finalization_hints_compression_wrapper_5.bin differ diff --git a/prover/data/keys/verification_compression_1_key.json b/prover/data/keys/verification_compression_1_key.json index 3de9e823d17b..87ba550c1858 100644 --- a/prover/data/keys/verification_compression_1_key.json +++ b/prover/data/keys/verification_compression_1_key.json @@ -1,262 +1,260 @@ { - "CompressionMode1Circuit": { - "fixed_parameters": { - "parameters": { - "num_columns_under_copy_permutation": 52, - "num_witness_columns": 78, - "num_constant_columns": 4, - "max_allowed_constraint_degree": 8 - }, - "lookup_parameters": "NoLookup", - "domain_size": 65536, - "total_tables_len": 0, - "public_inputs_locations": [ - [ - 0, - 43116 - ], - [ - 1, - 43116 - ], - [ - 2, - 43116 - ], - [ - 3, - 43116 - ] + "fixed_parameters": { + "parameters": { + "num_columns_under_copy_permutation": 52, + "num_witness_columns": 78, + "num_constant_columns": 4, + "max_allowed_constraint_degree": 8 + }, + "lookup_parameters": "NoLookup", + "domain_size": 65536, + "total_tables_len": 0, + "public_inputs_locations": [ + [ + 0, + 43116 ], - "extra_constant_polys_for_selectors": 4, - "table_ids_column_idxes": [], - "quotient_degree": 8, - "selectors_placement": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 1, - "num_constants": 0, - "degree": 7, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "Fork": { - "left": { - "Fork": { - "left": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 4, - "num_constants": 4, - "degree": 3, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "Fork": { - "left": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 2, - "num_constants": 0, - "degree": 2, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "GateOnly": { - "gate_idx": 6, - "num_constants": 0, - "degree": 2, - "needs_selector": true, - "is_lookup": false - } + [ + 1, + 43116 + ], + [ + 2, + 43116 + ], + [ + 3, + 43116 + ] + ], + "extra_constant_polys_for_selectors": 4, + "table_ids_column_idxes": [], + "quotient_degree": 8, + "selectors_placement": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 1, + "num_constants": 0, + "degree": 7, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "Fork": { + "left": { + "Fork": { + "left": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 4, + "num_constants": 4, + "degree": 3, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "Fork": { + "left": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 2, + "num_constants": 0, + "degree": 2, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "GateOnly": { + "gate_idx": 6, + "num_constants": 0, + "degree": 2, + "needs_selector": true, + "is_lookup": false } } - }, - "right": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 5, - "num_constants": 0, - "degree": 2, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "GateOnly": { - "gate_idx": 7, - "num_constants": 0, - "degree": 2, - "needs_selector": true, - "is_lookup": false - } + } + }, + "right": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 5, + "num_constants": 0, + "degree": 2, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "GateOnly": { + "gate_idx": 7, + "num_constants": 0, + "degree": 2, + "needs_selector": true, + "is_lookup": false } } } } } } - }, - "right": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 9, - "num_constants": 4, - "degree": 2, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "GateOnly": { - "gate_idx": 0, - "num_constants": 4, - "degree": 1, - "needs_selector": true, - "is_lookup": false - } + } + }, + "right": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 9, + "num_constants": 4, + "degree": 2, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "GateOnly": { + "gate_idx": 0, + "num_constants": 4, + "degree": 1, + "needs_selector": true, + "is_lookup": false } } } } - }, - "right": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 3, - "num_constants": 2, - "degree": 3, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "GateOnly": { - "gate_idx": 8, - "num_constants": 0, - "degree": 0, - "needs_selector": true, - "is_lookup": false - } + } + }, + "right": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 3, + "num_constants": 2, + "degree": 3, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "GateOnly": { + "gate_idx": 8, + "num_constants": 0, + "degree": 0, + "needs_selector": true, + "is_lookup": false } } } } } } - }, - "fri_lde_factor": 32, - "cap_size": 16 + } }, - "setup_merkle_tree_cap": [ - [ - 18429053439012828355, - 13569717100398864647, - 5674934326174107161, - 17602965228088658523 - ], - [ - 1853596005538808278, - 4863986522266180298, - 1446747858708973345, - 6556557904065432671 - ], - [ - 15389763462077761347, - 347990863686799186, - 9600431685918762723, - 18230110007683373373 - ], - [ - 4488979291903240149, - 15827713959121695389, - 5212372431509901484, - 15330056117710268303 - ], - [ - 2154621009093329369, - 2697922564969508975, - 8553713447457143603, - 11909287098676194610 - ], - [ - 17675899931204043754, - 1900641380227488316, - 13799770936458315711, - 16155696965522802314 - ], - [ - 4325071727231912895, - 15495124842543512517, - 7693387508183911518, - 2229191662604882855 - ], - [ - 12528623798638620709, - 7099783078856962509, - 1026199882135084179, - 7487829827063298337 - ], - [ - 17775767559834649815, - 11648203906640999220, - 18372230812591541111, - 2380029132644171876 - ], - [ - 17282415096853816692, - 7048528242178291549, - 1168458073834679094, - 4050365242852861872 - ], - [ - 12458945294827240519, - 9750085849866836452, - 10571283035096727478, - 296889776702427073 - ], - [ - 3087401981646414061, - 5145016559209358338, - 198041048326501493, - 16681321004258095323 - ], - [ - 15873896825395209700, - 17464513449028297769, - 6391802199611869536, - 24117562251396832 - ], - [ - 5930377685985813311, - 11344757707146885659, - 17036003029203547492, - 15372566160771512018 - ], - [ - 3165044285750289057, - 15764455157826377125, - 729334742516289605, - 8335221493745507200 - ], - [ - 318679242859206967, - 16131519447675179661, - 1921616860721123326, - 9785286945407043240 - ] + "fri_lde_factor": 32, + "cap_size": 16 + }, + "setup_merkle_tree_cap": [ + [ + 18429053439012828355, + 13569717100398864647, + 5674934326174107161, + 17602965228088658523 + ], + [ + 1853596005538808278, + 4863986522266180298, + 1446747858708973345, + 6556557904065432671 + ], + [ + 15389763462077761347, + 347990863686799186, + 9600431685918762723, + 18230110007683373373 + ], + [ + 4488979291903240149, + 15827713959121695389, + 5212372431509901484, + 15330056117710268303 + ], + [ + 2154621009093329369, + 2697922564969508975, + 8553713447457143603, + 11909287098676194610 + ], + [ + 17675899931204043754, + 1900641380227488316, + 13799770936458315711, + 16155696965522802314 + ], + [ + 4325071727231912895, + 15495124842543512517, + 7693387508183911518, + 2229191662604882855 + ], + [ + 12528623798638620709, + 7099783078856962509, + 1026199882135084179, + 7487829827063298337 + ], + [ + 17775767559834649815, + 11648203906640999220, + 18372230812591541111, + 2380029132644171876 + ], + [ + 17282415096853816692, + 7048528242178291549, + 1168458073834679094, + 4050365242852861872 + ], + [ + 12458945294827240519, + 9750085849866836452, + 10571283035096727478, + 296889776702427073 + ], + [ + 3087401981646414061, + 5145016559209358338, + 198041048326501493, + 16681321004258095323 + ], + [ + 15873896825395209700, + 17464513449028297769, + 6391802199611869536, + 24117562251396832 + ], + [ + 5930377685985813311, + 11344757707146885659, + 17036003029203547492, + 15372566160771512018 + ], + [ + 3165044285750289057, + 15764455157826377125, + 729334742516289605, + 8335221493745507200 + ], + [ + 318679242859206967, + 16131519447675179661, + 1921616860721123326, + 9785286945407043240 ] - } + ] } \ No newline at end of file diff --git a/prover/data/keys/verification_compression_2_key.json b/prover/data/keys/verification_compression_2_key.json index 6110bba910d2..d0ee06b55fa2 100644 --- a/prover/data/keys/verification_compression_2_key.json +++ b/prover/data/keys/verification_compression_2_key.json @@ -1,275 +1,273 @@ { - "CompressionMode2Circuit": { - "fixed_parameters": { - "parameters": { - "num_columns_under_copy_permutation": 56, - "num_witness_columns": 74, - "num_constant_columns": 4, - "max_allowed_constraint_degree": 8 - }, - "lookup_parameters": "NoLookup", - "domain_size": 8192, - "total_tables_len": 0, - "public_inputs_locations": [ - [ - 0, - 6733 - ], - [ - 1, - 6733 - ], - [ - 2, - 6733 - ], - [ - 3, - 6733 - ] + "fixed_parameters": { + "parameters": { + "num_columns_under_copy_permutation": 56, + "num_witness_columns": 74, + "num_constant_columns": 4, + "max_allowed_constraint_degree": 8 + }, + "lookup_parameters": "NoLookup", + "domain_size": 8192, + "total_tables_len": 0, + "public_inputs_locations": [ + [ + 0, + 6733 ], - "extra_constant_polys_for_selectors": 4, - "table_ids_column_idxes": [], - "quotient_degree": 8, - "selectors_placement": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 2, - "num_constants": 0, - "degree": 7, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "Fork": { - "left": { - "Fork": { - "left": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 5, - "num_constants": 4, - "degree": 3, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "Fork": { - "left": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 1, - "num_constants": 0, - "degree": 2, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "GateOnly": { - "gate_idx": 6, - "num_constants": 0, - "degree": 2, - "needs_selector": true, - "is_lookup": false - } + [ + 1, + 6733 + ], + [ + 2, + 6733 + ], + [ + 3, + 6733 + ] + ], + "extra_constant_polys_for_selectors": 4, + "table_ids_column_idxes": [], + "quotient_degree": 8, + "selectors_placement": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 2, + "num_constants": 0, + "degree": 7, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "Fork": { + "left": { + "Fork": { + "left": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 5, + "num_constants": 4, + "degree": 3, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "Fork": { + "left": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 1, + "num_constants": 0, + "degree": 2, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "GateOnly": { + "gate_idx": 6, + "num_constants": 0, + "degree": 2, + "needs_selector": true, + "is_lookup": false } } - }, - "right": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 3, - "num_constants": 0, - "degree": 2, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "GateOnly": { - "gate_idx": 7, - "num_constants": 0, - "degree": 2, - "needs_selector": true, - "is_lookup": false - } + } + }, + "right": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 3, + "num_constants": 0, + "degree": 2, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "GateOnly": { + "gate_idx": 7, + "num_constants": 0, + "degree": 2, + "needs_selector": true, + "is_lookup": false } } } } } } - }, - "right": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 10, - "num_constants": 4, - "degree": 2, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 8, - "num_constants": 0, - "degree": 2, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "GateOnly": { - "gate_idx": 9, - "num_constants": 0, - "degree": 0, - "needs_selector": true, - "is_lookup": false - } + } + }, + "right": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 10, + "num_constants": 4, + "degree": 2, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 8, + "num_constants": 0, + "degree": 2, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "GateOnly": { + "gate_idx": 9, + "num_constants": 0, + "degree": 0, + "needs_selector": true, + "is_lookup": false } } } } } } - }, - "right": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 4, - "num_constants": 2, - "degree": 3, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "GateOnly": { - "gate_idx": 0, - "num_constants": 4, - "degree": 1, - "needs_selector": true, - "is_lookup": false - } + } + }, + "right": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 4, + "num_constants": 2, + "degree": 3, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "GateOnly": { + "gate_idx": 0, + "num_constants": 4, + "degree": 1, + "needs_selector": true, + "is_lookup": false } } } } } } - }, - "fri_lde_factor": 512, - "cap_size": 16 + } }, - "setup_merkle_tree_cap": [ - [ - 547951636004245258, - 2067145936569319297, - 17315326678965070615, - 9012275275825325303 - ], - [ - 2486396189205338767, - 5385179748876990489, - 12789897155193748843, - 7497933664242847399 - ], - [ - 13702746436389342903, - 9589251880490084074, - 13207771356722054410, - 18277427829245158207 - ], - [ - 4132253692687914984, - 18227617432180132982, - 15881065677296839946, - 1403286368390823633 - ], - [ - 10537224762035068222, - 6013042199447780632, - 4527978597574086909, - 3232268373890188602 - ], - [ - 2586162944092407317, - 10215063509965051427, - 2471146708845362031, - 2843440288814318700 - ], - [ - 13819482850499157207, - 16859612470974380629, - 12731935288150853121, - 2902152945695153589 - ], - [ - 9392752661572461801, - 18377486933710861607, - 16200620130393023183, - 8365721844043961790 - ], - [ - 13954020715669265948, - 16716522360195023699, - 5205834515171426098, - 956788140632831896 - ], - [ - 3827733001679131898, - 17013298671136301019, - 3148453751188517867, - 16820158206874288178 - ], - [ - 2520856291381872645, - 4479900374678830382, - 13546063785562157656, - 3382667115615401371 - ], - [ - 15571586947246471933, - 17916455524698235528, - 4478607733734426679, - 12215920239082917056 - ], - [ - 12759564984587008435, - 5665565051416464627, - 4275508608287240845, - 7137116798231081511 - ], - [ - 5980650793630714101, - 9419587808931370936, - 7668041611691340219, - 1071524060976592787 - ], - [ - 8785315220105920748, - 14191148990050265889, - 8665763888918223523, - 10691214928896807830 - ], - [ - 16389667087475658615, - 7098223971082400278, - 3821718345101781981, - 17410025916491040451 - ] + "fri_lde_factor": 512, + "cap_size": 16 + }, + "setup_merkle_tree_cap": [ + [ + 547951636004245258, + 2067145936569319297, + 17315326678965070615, + 9012275275825325303 + ], + [ + 2486396189205338767, + 5385179748876990489, + 12789897155193748843, + 7497933664242847399 + ], + [ + 13702746436389342903, + 9589251880490084074, + 13207771356722054410, + 18277427829245158207 + ], + [ + 4132253692687914984, + 18227617432180132982, + 15881065677296839946, + 1403286368390823633 + ], + [ + 10537224762035068222, + 6013042199447780632, + 4527978597574086909, + 3232268373890188602 + ], + [ + 2586162944092407317, + 10215063509965051427, + 2471146708845362031, + 2843440288814318700 + ], + [ + 13819482850499157207, + 16859612470974380629, + 12731935288150853121, + 2902152945695153589 + ], + [ + 9392752661572461801, + 18377486933710861607, + 16200620130393023183, + 8365721844043961790 + ], + [ + 13954020715669265948, + 16716522360195023699, + 5205834515171426098, + 956788140632831896 + ], + [ + 3827733001679131898, + 17013298671136301019, + 3148453751188517867, + 16820158206874288178 + ], + [ + 2520856291381872645, + 4479900374678830382, + 13546063785562157656, + 3382667115615401371 + ], + [ + 15571586947246471933, + 17916455524698235528, + 4478607733734426679, + 12215920239082917056 + ], + [ + 12759564984587008435, + 5665565051416464627, + 4275508608287240845, + 7137116798231081511 + ], + [ + 5980650793630714101, + 9419587808931370936, + 7668041611691340219, + 1071524060976592787 + ], + [ + 8785315220105920748, + 14191148990050265889, + 8665763888918223523, + 10691214928896807830 + ], + [ + 16389667087475658615, + 7098223971082400278, + 3821718345101781981, + 17410025916491040451 ] - } + ] } \ No newline at end of file diff --git a/prover/data/keys/verification_compression_3_key.json b/prover/data/keys/verification_compression_3_key.json index 63bcbca918d1..12ee585e2aa2 100644 --- a/prover/data/keys/verification_compression_3_key.json +++ b/prover/data/keys/verification_compression_3_key.json @@ -1,275 +1,273 @@ { - "CompressionMode3Circuit": { - "fixed_parameters": { - "parameters": { - "num_columns_under_copy_permutation": 68, - "num_witness_columns": 62, - "num_constant_columns": 4, - "max_allowed_constraint_degree": 8 - }, - "lookup_parameters": "NoLookup", - "domain_size": 4096, - "total_tables_len": 0, - "public_inputs_locations": [ - [ - 0, - 3921 - ], - [ - 1, - 3921 - ], - [ - 2, - 3921 - ], - [ - 3, - 3921 - ] + "fixed_parameters": { + "parameters": { + "num_columns_under_copy_permutation": 68, + "num_witness_columns": 62, + "num_constant_columns": 4, + "max_allowed_constraint_degree": 8 + }, + "lookup_parameters": "NoLookup", + "domain_size": 4096, + "total_tables_len": 0, + "public_inputs_locations": [ + [ + 0, + 3921 ], - "extra_constant_polys_for_selectors": 4, - "table_ids_column_idxes": [], - "quotient_degree": 8, - "selectors_placement": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 2, - "num_constants": 0, - "degree": 7, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "Fork": { - "left": { - "Fork": { - "left": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 5, - "num_constants": 4, - "degree": 3, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "Fork": { - "left": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 1, - "num_constants": 0, - "degree": 2, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "GateOnly": { - "gate_idx": 6, - "num_constants": 0, - "degree": 2, - "needs_selector": true, - "is_lookup": false - } + [ + 1, + 3921 + ], + [ + 2, + 3921 + ], + [ + 3, + 3921 + ] + ], + "extra_constant_polys_for_selectors": 4, + "table_ids_column_idxes": [], + "quotient_degree": 8, + "selectors_placement": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 2, + "num_constants": 0, + "degree": 7, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "Fork": { + "left": { + "Fork": { + "left": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 5, + "num_constants": 4, + "degree": 3, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "Fork": { + "left": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 1, + "num_constants": 0, + "degree": 2, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "GateOnly": { + "gate_idx": 6, + "num_constants": 0, + "degree": 2, + "needs_selector": true, + "is_lookup": false } } - }, - "right": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 3, - "num_constants": 0, - "degree": 2, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "GateOnly": { - "gate_idx": 7, - "num_constants": 0, - "degree": 2, - "needs_selector": true, - "is_lookup": false - } + } + }, + "right": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 3, + "num_constants": 0, + "degree": 2, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "GateOnly": { + "gate_idx": 7, + "num_constants": 0, + "degree": 2, + "needs_selector": true, + "is_lookup": false } } } } } } - }, - "right": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 10, - "num_constants": 4, - "degree": 2, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 8, - "num_constants": 0, - "degree": 2, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "GateOnly": { - "gate_idx": 9, - "num_constants": 0, - "degree": 0, - "needs_selector": true, - "is_lookup": false - } + } + }, + "right": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 10, + "num_constants": 4, + "degree": 2, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 8, + "num_constants": 0, + "degree": 2, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "GateOnly": { + "gate_idx": 9, + "num_constants": 0, + "degree": 0, + "needs_selector": true, + "is_lookup": false } } } } } } - }, - "right": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 4, - "num_constants": 2, - "degree": 3, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "GateOnly": { - "gate_idx": 0, - "num_constants": 4, - "degree": 1, - "needs_selector": true, - "is_lookup": false - } + } + }, + "right": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 4, + "num_constants": 2, + "degree": 3, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "GateOnly": { + "gate_idx": 0, + "num_constants": 4, + "degree": 1, + "needs_selector": true, + "is_lookup": false } } } } } } - }, - "fri_lde_factor": 1024, - "cap_size": 16 + } }, - "setup_merkle_tree_cap": [ - [ - 10641025459109778062, - 11883023202620503830, - 1683460580494782039, - 519148484945527748 - ], - [ - 16854836347499347615, - 303914895048281273, - 8827365744032766288, - 7504886056916538809 - ], - [ - 18314198719448317357, - 10244100031050759980, - 9715483427672900470, - 13743747809877920521 - ], - [ - 1369713858293079339, - 2549296113044324317, - 10668787197724003505, - 13374818625902068059 - ], - [ - 11103822364554965998, - 1026596418533733662, - 11884006841460801711, - 16731940920918762822 - ], - [ - 8630324660886613970, - 15367922833873963141, - 538631244507362153, - 7257912375828853679 - ], - [ - 9919593341926293968, - 3835923795731948402, - 11747327108631657899, - 16458305757432722328 - ], - [ - 2324222966121258034, - 7413735041546781516, - 3485204485068670103, - 13438812829186206067 - ], - [ - 14474408113900929346, - 18061817840123336299, - 13649805566841328142, - 7129805169327250752 - ], - [ - 7300284926102090815, - 9725670656634972183, - 8151388754725946636, - 16128844202426193104 - ], - [ - 14707729241338825087, - 3836468945858242082, - 16768198593546824923, - 14062570092263697929 - ], - [ - 14327921344401839882, - 6479671009164937102, - 13089063982746955768, - 2683374136101896185 - ], - [ - 14787507835189907449, - 6738567890582174761, - 16728637974735308667, - 537766695323716501 - ], - [ - 8795483109816567655, - 14842674875555441461, - 11332449659964817297, - 4865859390541196365 - ], - [ - 11192585970910716125, - 3048970808099494060, - 7684470944992674010, - 9304044469675055850 - ], - [ - 5655924464102745207, - 8506077190272738679, - 8512156535337592685, - 15415747564657306809 - ] + "fri_lde_factor": 1024, + "cap_size": 16 + }, + "setup_merkle_tree_cap": [ + [ + 10641025459109778062, + 11883023202620503830, + 1683460580494782039, + 519148484945527748 + ], + [ + 16854836347499347615, + 303914895048281273, + 8827365744032766288, + 7504886056916538809 + ], + [ + 18314198719448317357, + 10244100031050759980, + 9715483427672900470, + 13743747809877920521 + ], + [ + 1369713858293079339, + 2549296113044324317, + 10668787197724003505, + 13374818625902068059 + ], + [ + 11103822364554965998, + 1026596418533733662, + 11884006841460801711, + 16731940920918762822 + ], + [ + 8630324660886613970, + 15367922833873963141, + 538631244507362153, + 7257912375828853679 + ], + [ + 9919593341926293968, + 3835923795731948402, + 11747327108631657899, + 16458305757432722328 + ], + [ + 2324222966121258034, + 7413735041546781516, + 3485204485068670103, + 13438812829186206067 + ], + [ + 14474408113900929346, + 18061817840123336299, + 13649805566841328142, + 7129805169327250752 + ], + [ + 7300284926102090815, + 9725670656634972183, + 8151388754725946636, + 16128844202426193104 + ], + [ + 14707729241338825087, + 3836468945858242082, + 16768198593546824923, + 14062570092263697929 + ], + [ + 14327921344401839882, + 6479671009164937102, + 13089063982746955768, + 2683374136101896185 + ], + [ + 14787507835189907449, + 6738567890582174761, + 16728637974735308667, + 537766695323716501 + ], + [ + 8795483109816567655, + 14842674875555441461, + 11332449659964817297, + 4865859390541196365 + ], + [ + 11192585970910716125, + 3048970808099494060, + 7684470944992674010, + 9304044469675055850 + ], + [ + 5655924464102745207, + 8506077190272738679, + 8512156535337592685, + 15415747564657306809 ] - } + ] } \ No newline at end of file diff --git a/prover/data/keys/verification_compression_4_key.json b/prover/data/keys/verification_compression_4_key.json index 5bd28e4d6d6b..821fb25d4048 100644 --- a/prover/data/keys/verification_compression_4_key.json +++ b/prover/data/keys/verification_compression_4_key.json @@ -1,1741 +1,1739 @@ { - "CompressionMode4Circuit": { - "fixed_parameters": { - "parameters": { - "num_columns_under_copy_permutation": 48, - "num_witness_columns": 0, - "num_constant_columns": 4, - "max_allowed_constraint_degree": 8 - }, - "lookup_parameters": "NoLookup", - "domain_size": 32768, - "total_tables_len": 0, - "public_inputs_locations": [ - [ - 0, - 31025 - ], - [ - 1, - 31025 - ], - [ - 2, - 31025 - ], - [ - 3, - 31025 - ] + "fixed_parameters": { + "parameters": { + "num_columns_under_copy_permutation": 48, + "num_witness_columns": 0, + "num_constant_columns": 4, + "max_allowed_constraint_degree": 8 + }, + "lookup_parameters": "NoLookup", + "domain_size": 32768, + "total_tables_len": 0, + "public_inputs_locations": [ + [ + 0, + 31025 ], - "extra_constant_polys_for_selectors": 4, - "table_ids_column_idxes": [], - "quotient_degree": 8, - "selectors_placement": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 4, - "num_constants": 1, - "degree": 7, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "Fork": { - "left": { - "Fork": { - "left": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 7, - "num_constants": 4, - "degree": 3, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "Fork": { - "left": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 1, - "num_constants": 0, - "degree": 2, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "GateOnly": { - "gate_idx": 8, - "num_constants": 0, - "degree": 2, - "needs_selector": true, - "is_lookup": false - } + [ + 1, + 31025 + ], + [ + 2, + 31025 + ], + [ + 3, + 31025 + ] + ], + "extra_constant_polys_for_selectors": 4, + "table_ids_column_idxes": [], + "quotient_degree": 8, + "selectors_placement": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 4, + "num_constants": 1, + "degree": 7, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "Fork": { + "left": { + "Fork": { + "left": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 7, + "num_constants": 4, + "degree": 3, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "Fork": { + "left": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 1, + "num_constants": 0, + "degree": 2, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "GateOnly": { + "gate_idx": 8, + "num_constants": 0, + "degree": 2, + "needs_selector": true, + "is_lookup": false } } - }, - "right": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 5, - "num_constants": 0, - "degree": 2, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "GateOnly": { - "gate_idx": 9, - "num_constants": 0, - "degree": 2, - "needs_selector": true, - "is_lookup": false - } + } + }, + "right": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 5, + "num_constants": 0, + "degree": 2, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "GateOnly": { + "gate_idx": 9, + "num_constants": 0, + "degree": 2, + "needs_selector": true, + "is_lookup": false } } } } } } - }, - "right": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 12, - "num_constants": 4, - "degree": 2, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "Fork": { - "left": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 10, - "num_constants": 0, - "degree": 2, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 3, - "num_constants": 0, - "degree": 1, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "GateOnly": { - "gate_idx": 11, - "num_constants": 0, - "degree": 0, - "needs_selector": true, - "is_lookup": false - } + } + }, + "right": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 12, + "num_constants": 4, + "degree": 2, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "Fork": { + "left": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 10, + "num_constants": 0, + "degree": 2, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 3, + "num_constants": 0, + "degree": 1, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "GateOnly": { + "gate_idx": 11, + "num_constants": 0, + "degree": 0, + "needs_selector": true, + "is_lookup": false } } } } - }, - "right": { - "GateOnly": { - "gate_idx": 2, - "num_constants": 0, - "degree": 1, - "needs_selector": true, - "is_lookup": false - } + } + }, + "right": { + "GateOnly": { + "gate_idx": 2, + "num_constants": 0, + "degree": 1, + "needs_selector": true, + "is_lookup": false } } } } } } - }, - "right": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 6, - "num_constants": 2, - "degree": 3, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "GateOnly": { - "gate_idx": 0, - "num_constants": 4, - "degree": 1, - "needs_selector": true, - "is_lookup": false - } + } + }, + "right": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 6, + "num_constants": 2, + "degree": 3, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "GateOnly": { + "gate_idx": 0, + "num_constants": 4, + "degree": 1, + "needs_selector": true, + "is_lookup": false } } } } } } - }, - "fri_lde_factor": 1024, - "cap_size": 256 + } }, - "setup_merkle_tree_cap": [ - [ - 4079970138244939135, - 8104423815789893989, - 13632023733358255593, - 13346032064987220184 - ], - [ - 12809474467971181399, - 17625072709782170540, - 6594064732490183681, - 8714625183861721175 - ], - [ - 7171332554251640845, - 18284102552959382973, - 3637671217742590741, - 16765711635591410580 - ], - [ - 763060458426938791, - 16159161330183244037, - 2533596966746622429, - 13441156130846388257 - ], - [ - 5525639466278377590, - 11073269405581050439, - 4833809564449931762, - 9314656237570540457 - ], - [ - 1137979745697828455, - 14966651582384421379, - 13384164937770371175, - 14074716572336098280 - ], - [ - 11906323633157650458, - 1175046947414337453, - 4283188400738641426, - 13792950671402095073 - ], - [ - 9310952691940114113, - 4820182619662404377, - 9479969369442954044, - 8512369778696933060 - ], - [ - 16946138649176650488, - 9475913097060757863, - 6608972026504748865, - 7215353463715689998 - ], - [ - 349222139041408316, - 1507489261015035841, - 7596560681014222842, - 8808235055298411214 - ], - [ - 8007370340350072341, - 2549612240734498963, - 105233771877650291, - 8758305155554767568 - ], - [ - 17190433766023179459, - 1755290160765045579, - 5619893747453895498, - 9139477571875211116 - ], - [ - 15755591792726495281, - 17501027650687476798, - 5579351800673089158, - 5931628352091930640 - ], - [ - 7269556889579729096, - 8636006399519326592, - 15556402257631967994, - 12423945489648480054 - ], - [ - 6411602498376025591, - 16377643789139599240, - 1264958035077382258, - 16706608130239118866 - ], - [ - 14246313207191082462, - 2201136836754342130, - 12301078636732201867, - 10991169525653220176 - ], - [ - 2101214821629968022, - 11996351959015322539, - 15461759537099893272, - 5820786797852667153 - ], - [ - 10546547765646585412, - 16670337245061195679, - 14026017154393171841, - 15865302123450709636 - ], - [ - 14546229884213013754, - 1736742091912698149, - 9563343697509131991, - 15957923614096898715 - ], - [ - 10053685874691243725, - 4837706724113512400, - 2806343138575002644, - 1217140278849814125 - ], - [ - 1445801437405723985, - 16370378782744105412, - 14937074551650631907, - 9349528097788576916 - ], - [ - 313624596753562026, - 2737902703720676400, - 1491628534561577719, - 4958865646973460786 - ], - [ - 4749220647213961766, - 1523283588343260013, - 14435216093380418825, - 12058673416937256217 - ], - [ - 12221430523773573392, - 2119312783391782040, - 113666136908336034, - 5557001496562966971 - ], - [ - 8943540790257615175, - 15963411684352909178, - 12833466372967679781, - 16755820399299554647 - ], - [ - 10568513175105063273, - 8152439411055527457, - 13191469024927433668, - 16583335250317103083 - ], - [ - 8728976170665901948, - 12047344511237318236, - 2665537844838432218, - 17220904479682973198 - ], - [ - 9924606303320583729, - 15168541434821629608, - 15267127988736394576, - 4612730729137963542 - ], - [ - 15187295020445613202, - 10979580190616738578, - 2487576517666265788, - 7799526392944971411 - ], - [ - 14001553124798608592, - 983427750240831173, - 10498633617369923042, - 15506856636873354247 - ], - [ - 9695584132680310661, - 15058940491468263542, - 16902767176197422459, - 4307070711535476522 - ], - [ - 16170129192711277380, - 15594325905653564554, - 8589261531750893019, - 16380506012444184509 - ], - [ - 1298482137029698068, - 7046350962355653091, - 4851290198964994943, - 9583994567704854822 - ], - [ - 12002196043506075596, - 6568364019768992619, - 2982180816490272604, - 11005221893883916442 - ], - [ - 11195444762924129211, - 4080035169749061493, - 12552903944803877103, - 1737834024116304977 - ], - [ - 14989371064091658414, - 7021573041416327989, - 10313274026717983047, - 12022812057768375145 - ], - [ - 4605818886224729183, - 1445887803225464457, - 10940822604464014966, - 9884898953608657154 - ], - [ - 16595525430044003662, - 4052207436631314262, - 10293047811043558578, - 12338411972889057265 - ], - [ - 17022547446139753073, - 10651710177937554340, - 6940830771318907626, - 6641453047926556761 - ], - [ - 10509711725088536487, - 4962100381463002257, - 770256729671337313, - 5857473086438310769 - ], - [ - 13267845343537121240, - 12321261427265206453, - 7120567552214639265, - 17302478469072029437 - ], - [ - 15157833123384882226, - 8158551777234184706, - 12348570826200339644, - 14734791159756873167 - ], - [ - 3385809684799950973, - 12900259269431347094, - 17672545490781962423, - 8013657472810936941 - ], - [ - 9546288143416776531, - 8356139039634733121, - 17477514712681213981, - 9515108802170635886 - ], - [ - 15455406291521834876, - 18083571768045422071, - 2009769659787731673, - 7263333966403015872 - ], - [ - 61130843037975416, - 18121830552682131571, - 9667884638217866764, - 6932529860678316702 - ], - [ - 15509823136620654552, - 6676934950708893059, - 13351533805402596549, - 14340984679679191575 - ], - [ - 15880919573481609124, - 14836853565796210968, - 15058034487233879726, - 11000215022533975317 - ], - [ - 10611445668418674805, - 9013654201900818768, - 1617123938865368568, - 16484766808972651318 - ], - [ - 13652743593752436493, - 13843264938563223779, - 10417182020840583510, - 16106479734523413326 - ], - [ - 17003899699844258217, - 17283344455088140068, - 17379245612757089745, - 1779134255761065541 - ], - [ - 11263035503709897880, - 1042839549428969974, - 6638626002246118718, - 1733549590874866310 - ], - [ - 13054247239013491523, - 14047141927303128296, - 2417752406294216107, - 5255181314162279551 - ], - [ - 1784021430104795099, - 3912913366054617723, - 8208877227749479136, - 4259205675774136108 - ], - [ - 801501735568309733, - 3220301656144181340, - 6144806934797449899, - 10479314068206076631 - ], - [ - 864619228795552444, - 2705960387071331097, - 15702747436396598972, - 14131926486457006694 - ], - [ - 15349058561965444062, - 16738574548455051915, - 4609208407223910426, - 11352062762791244662 - ], - [ - 16513285945090237658, - 6443875617703638442, - 17271016792575259072, - 426511686464611712 - ], - [ - 13863640462921288570, - 3362196992389213358, - 3576502768827883720, - 49219399301262346 - ], - [ - 14852014958673205840, - 11855705779815876870, - 8141476064801289266, - 15234506534160954104 - ], - [ - 6292328028167907211, - 6280576736728723621, - 1814915348986377963, - 3167789754322444841 - ], - [ - 6093870088272451581, - 12912333461662707605, - 16038365026642624017, - 10037265733136426182 - ], - [ - 5658473063427008398, - 18194712009700816671, - 3316188885192160812, - 15032222957557455487 - ], - [ - 10746739009272326386, - 4381896503060636995, - 13390900032103397758, - 11290296227744685851 - ], - [ - 7946268274905655961, - 12464655577446728520, - 3476152367809517039, - 3103729420470642525 - ], - [ - 15652670185218353632, - 17922284652588341925, - 3502474425690015164, - 3577240886292713575 - ], - [ - 13130871911189865405, - 2598654314674217849, - 6130707868677033708, - 11318789126458218054 - ], - [ - 14442525432094720658, - 10861520212227307580, - 10308956417568414174, - 101391938116250123 - ], - [ - 8015353735581004985, - 11791356411393233202, - 15134704188872104026, - 2916434958525680154 - ], - [ - 17776268955107890735, - 3978578384988358930, - 7512482176825678493, - 4207030036138354730 - ], - [ - 4778990000266723258, - 12646326430153649418, - 15602744563429649630, - 13225673449752707138 - ], - [ - 9874615904187846642, - 785813436574501824, - 7422887093860615814, - 12575067740028803899 - ], - [ - 2168314129068013776, - 5876170257821873353, - 6574425448964925595, - 13006818379253162445 - ], - [ - 18203396842093379784, - 1637929000957737331, - 16472903772026963266, - 10222221849709050007 - ], - [ - 8175241234035594021, - 11131701218615583700, - 2590544239124773213, - 13103205380700956240 - ], - [ - 10265760611171832222, - 10772001381712818941, - 9769892419500727073, - 3757944212165946207 - ], - [ - 12890674101364520820, - 17436315026775313277, - 17685632275460283213, - 11597862884205200021 - ], - [ - 6541476428026208530, - 1572664979326233234, - 7220008363221767753, - 5030515443926464143 - ], - [ - 1736392533925503753, - 4028348047388923757, - 1322644146499193870, - 7899110541856172042 - ], - [ - 824249951832333568, - 3650442342240934725, - 9551872020421810878, - 4459093260544332906 - ], - [ - 4897538583957298940, - 9763535759998548001, - 17962120881583547780, - 16423532894596203353 - ], - [ - 15621981029420175013, - 12716027808121583594, - 5698997410684688521, - 14893034348453337964 - ], - [ - 57123003983559085, - 1764189019842202487, - 12021079431755992353, - 4611397927843211028 - ], - [ - 11271204071286517021, - 11016763947620063917, - 10240257375147160558, - 6936015967868091242 - ], - [ - 13263159746058683924, - 2115544844352440396, - 17037308825725243672, - 15392971905394245550 - ], - [ - 3040132672294734369, - 8799577028392802142, - 4698252977355096983, - 12029143421110018733 - ], - [ - 3367138533962530706, - 11052858577054708144, - 13777717310915519372, - 1118835749836496249 - ], - [ - 18247935773305597594, - 4791613470261596504, - 2831278673598556804, - 13054754323096013606 - ], - [ - 16660028838922677973, - 9396606308613201784, - 10417460426584440111, - 8380249813777095858 - ], - [ - 8666375557464313815, - 6304275044317841466, - 17005733772133350822, - 14343984316653970581 - ], - [ - 6123226050306352446, - 17155936354664339685, - 8617663439980301335, - 15168460408431057703 - ], - [ - 4711490317757564870, - 4007952793500570494, - 1619724888362180342, - 135640370275358583 - ], - [ - 3568489405188009952, - 10063008786740635782, - 3645030669998184283, - 11251876386972219611 - ], - [ - 8493411482257270591, - 2420069605766207080, - 4319637578909518850, - 7366398383772225801 - ], - [ - 10514279907222008963, - 4576784878418470993, - 13703409256143697610, - 12595458216589602418 - ], - [ - 6518196967791403281, - 6267315352755318878, - 17017845178414282191, - 6219869337455185408 - ], - [ - 9419176152740350408, - 7255025946876060056, - 13692775553690785222, - 6482964045212267647 - ], - [ - 10371771524251272238, - 7264554963155886115, - 11851596230763322123, - 9644034659422561803 - ], - [ - 16478128758974022864, - 10586973455001136017, - 10940854718906381238, - 4322569144361190672 - ], - [ - 1408324526595066767, - 8225966379387133915, - 575234437450946120, - 4532159296152332514 - ], - [ - 14713730919085929141, - 8299030584912591200, - 12653081274697230155, - 7281510145820913452 - ], - [ - 13065256168386635886, - 11725981090441231815, - 9241488846717901227, - 2660860847185625194 - ], - [ - 17344786539017984359, - 16667262327893622443, - 2245517842543022928, - 5289295469095289187 - ], - [ - 7712996453163241402, - 13291387059438293554, - 15475873660461985862, - 421113001330390910 - ], - [ - 6600556276994686684, - 15236925279661890975, - 9085425445873596043, - 13489979627098239874 - ], - [ - 16522817509787049424, - 7880119001244729499, - 2065132977854154043, - 13961595284131100852 - ], - [ - 10136466656472241068, - 7462847284592418995, - 5211060891495522982, - 3429812446680794228 - ], - [ - 1290650595638649346, - 13577818566931833714, - 2388552128026682549, - 13062451014076475307 - ], - [ - 6070278064232182539, - 12399409837026713208, - 7006572737093555522, - 15229533042676039053 - ], - [ - 4546934482854074283, - 12178466512696871783, - 1691876649621188863, - 11473100481876466355 - ], - [ - 1236556729692669368, - 5549499687074217624, - 5958432331744161923, - 17960851426383425726 - ], - [ - 11313384441935481196, - 506230122531558484, - 17480105233549292653, - 3497216050489019068 - ], - [ - 14180330000703954943, - 17537654316605413864, - 13893109338811179145, - 12773435230801104558 - ], - [ - 1789329554161917105, - 3821166735342421302, - 492899993274707886, - 12499398353307226422 - ], - [ - 3089012429777109607, - 10341877957962585240, - 7745611914889843192, - 7156440096296115871 - ], - [ - 14047653524331542619, - 14986552290847971271, - 8344707263938736217, - 5768162811052933747 - ], - [ - 3282503115484594392, - 3455102204735354908, - 6152707383721113591, - 17881334209336571790 - ], - [ - 6895592993382091339, - 14944664838702526606, - 14158277091240311047, - 13527182248087302901 - ], - [ - 3709694612749923839, - 2612160447494745836, - 1406956200130224054, - 1366284088614500522 - ], - [ - 9948565747047514924, - 7052268914527763961, - 18019747825808196928, - 14284818641981927036 - ], - [ - 3586214080877747437, - 18259935751422321449, - 7615481956713457290, - 14815795422370035012 - ], - [ - 127402153735286355, - 14056365981282611643, - 13874770993027970720, - 619630456240824192 - ], - [ - 14160230678035047748, - 5889480608083619036, - 7255863117098942211, - 15199656156616971091 - ], - [ - 3095946492180700178, - 4006392168790405864, - 6879209997262175243, - 299198988833119182 - ], - [ - 7324876720724904977, - 16163169761447383767, - 12506750534396872240, - 2471214392724636123 - ], - [ - 9097827375986012190, - 14565705700237004602, - 14313871429102448506, - 11871591868576605558 - ], - [ - 9799079875206163527, - 16651435323107739422, - 12479220145998529239, - 3293893419326195678 - ], - [ - 10308258292354323463, - 12348814193023657478, - 7617037745166680507, - 4395321161975898727 - ], - [ - 5350571522462508856, - 13168312859432101164, - 16051585591307343397, - 14229124174436500603 - ], - [ - 7166265506509230502, - 7161154874163920078, - 8397077088165043439, - 11599821267858973994 - ], - [ - 7480649953471058826, - 10095090134750662786, - 10564643199160392009, - 13520621355659318250 - ], - [ - 10857779123272725120, - 15686674254635154618, - 8867730228951457290, - 10801878621148582310 - ], - [ - 15060256657026761959, - 9661131710054923901, - 672914961534049680, - 12834176932586128630 - ], - [ - 949448556211506716, - 15381404281513795275, - 11155631745566842701, - 2704703395424058389 - ], - [ - 9515248836992226419, - 8107063249314601218, - 7169007343668853680, - 4398533039543530267 - ], - [ - 5048521287079394266, - 469091126174822039, - 15224036421670482877, - 2109812614761568090 - ], - [ - 16374381239073062350, - 1415659237932604676, - 15755278916993952324, - 4477381067900828660 - ], - [ - 5908643369108814128, - 16993840892601214320, - 9745154490046936812, - 12675544964328790510 - ], - [ - 13069705042519055121, - 114347527485448732, - 13607966080279761362, - 3198027414138631154 - ], - [ - 2126201229935790021, - 11762486874095747924, - 4405447136645256490, - 4546150485765128281 - ], - [ - 11517522298498019347, - 9683562397139382420, - 17997189851623366056, - 15075626419278367114 - ], - [ - 17300749044902256514, - 13180322067670835054, - 13510754977375972746, - 16789287824862069583 - ], - [ - 12482184120647536152, - 6673581288163589602, - 12763923036507066435, - 14094147320392535720 - ], - [ - 14259634259170511729, - 4875730172347810212, - 84468584086030163, - 8303744294867418687 - ], - [ - 12134644548742181831, - 8520649738990136053, - 16059641390155564470, - 5416979754786543764 - ], - [ - 2092318536537973522, - 11767101658840205425, - 6426865564773940591, - 14647807617379392454 - ], - [ - 450267335414320409, - 9743488482513949727, - 60939637019009091, - 17766639401038188429 - ], - [ - 10940698167282175513, - 14751185897733548414, - 12951179730146965260, - 10875205992060835228 - ], - [ - 13364210655207872649, - 6433202412249539021, - 9336639704508334838, - 13505754981533474450 - ], - [ - 13696994814871272283, - 4490009507613100808, - 13804331039068475494, - 5468505803402562249 - ], - [ - 4777271447313491592, - 9940020440702119485, - 1101181637482946058, - 644606693604161858 - ], - [ - 15097959041405784257, - 12307130809829041484, - 17258499521253983865, - 10749514694060779271 - ], - [ - 10190208760039486310, - 4226050742652743727, - 8941293745652956008, - 16590522505585423775 - ], - [ - 345192297192706835, - 446809322130030426, - 14301758245246751976, - 9719056934946360728 - ], - [ - 12881116110058824616, - 9527685388634025534, - 11091584611747550280, - 17824266919172201804 - ], - [ - 1574239507079237453, - 8582461342969153012, - 12264251495254783586, - 17637680562426650364 - ], - [ - 10192143269632239849, - 13007991288866360616, - 15140175217087410009, - 12752906480866993968 - ], - [ - 1870988781431107523, - 12976682012942521853, - 16429062848935476734, - 5798003006437035985 - ], - [ - 14584766751103981854, - 17047930049374537599, - 16615811923791215213, - 4382675641211926136 - ], - [ - 2714557854446349417, - 7104188094330759435, - 4252584297915226480, - 8214575447584991866 - ], - [ - 11055975993893155083, - 4263965260378501220, - 14890629048371839452, - 12114106467257211085 - ], - [ - 1696215674801501046, - 2266423673248165355, - 17663731467117251495, - 3131727223950690444 - ], - [ - 9538270027605424981, - 14695627410990929758, - 1100103051852107643, - 7780696685697285282 - ], - [ - 3640909033400638252, - 4685184407249885115, - 1159593584695683466, - 5810155451150284456 - ], - [ - 2142365296746734893, - 15072293834204351367, - 10906328353503930623, - 567086761313849045 - ], - [ - 16792244974449676806, - 5281042885555694958, - 803668749758477336, - 9632569928427671141 - ], - [ - 11458024915503659884, - 12163640565717382175, - 5426693528062973190, - 5318451835514658298 - ], - [ - 17468129298890753615, - 16365877952312079431, - 4255216049796249864, - 17519162521801620751 - ], - [ - 16881958987441626136, - 5348165195536725074, - 1110205049250373278, - 3270960662287149636 - ], - [ - 1226689297335858879, - 5569006707448968175, - 12862213669554513549, - 13476251685033412618 - ], - [ - 17469709307599228055, - 10239286546577328519, - 11089348310818650527, - 16456908862000481769 - ], - [ - 1488822551903623063, - 1027724056630207939, - 17697306299429716060, - 16855149560650760331 - ], - [ - 6973362373999440967, - 10660889882557485296, - 10425405668156759777, - 6301066711999204638 - ], - [ - 414734469595850357, - 14255487988571755338, - 8593074929345797359, - 16605827151888774599 - ], - [ - 16875797800314241514, - 12421815564591930411, - 2652019482537730851, - 6038511721575984408 - ], - [ - 5083130506118840282, - 10236076700696608757, - 89798661707215807, - 11047945556430945234 - ], - [ - 9052849932438286930, - 766869919370273028, - 9513938692410545262, - 9325822678740812785 - ], - [ - 12214954222172122496, - 2137993706327729295, - 9068121284446752470, - 16719368474204358065 - ], - [ - 12846496216910539260, - 14961944612175511231, - 4871177192578157967, - 871092876547424272 - ], - [ - 8432570778385641968, - 16742266319428376459, - 5202631662503988626, - 9179077767480984950 - ], - [ - 6907086971366422845, - 3630230185711868320, - 6363326632303072661, - 2155376917773117715 - ], - [ - 7260111458604834720, - 15869987976273960274, - 5471864801301693834, - 6186332987204233992 - ], - [ - 7579472070725254127, - 4175627325036864033, - 2726864388901619006, - 17182366690618521544 - ], - [ - 10366850615209571638, - 594983328964439422, - 5708701194817491298, - 8320252948121397632 - ], - [ - 7681701787398279771, - 8922827186766353002, - 16390646056989747875, - 16861016347708202354 - ], - [ - 18349539097929417681, - 11011444774390028397, - 14929165998832565332, - 4785028369071377058 - ], - [ - 12474665121325276393, - 6203136048752329670, - 8857782650797383532, - 1205842604249274162 - ], - [ - 4986065786927816995, - 16774907198741044886, - 3781179129421387661, - 10415439284245950953 - ], - [ - 11574881136629381449, - 12379735189792408408, - 16543138864606381298, - 2352837174543239308 - ], - [ - 2755808935111250351, - 11413691641496289631, - 7433067894962399074, - 9263092778396333619 - ], - [ - 510815690565069916, - 1326078112306964442, - 2280931398968953373, - 16372347667232436246 - ], - [ - 13049250986319352420, - 1312888884206484061, - 5191125907416704942, - 12910692959434040696 - ], - [ - 12641131533859353693, - 806990755667531292, - 8339582499246533062, - 15459494525405539126 - ], - [ - 5334506198163160487, - 12788298331318017466, - 3878039519170214619, - 14733573925349283487 - ], - [ - 5190557862976962645, - 13714749928322729293, - 10374841881342744627, - 15288181470367912639 - ], - [ - 16231478233162755783, - 12802076246758748329, - 14761367109514758527, - 14411193599867587859 - ], - [ - 8198731374774297764, - 3230920736551397516, - 11406632512199098782, - 10984385724696684374 - ], - [ - 2230082119129443391, - 15626857176238648658, - 10525636546408036753, - 713830293126230928 - ], - [ - 4947353515945045110, - 14890750024678328507, - 11992121003625623155, - 9247439145474008589 - ], - [ - 2476255618728017726, - 12423839440278091466, - 11131574346213868564, - 15878857548422208415 - ], - [ - 14063080495591128251, - 17832973426635180302, - 16234044432643152520, - 3790040177800561340 - ], - [ - 17654367747593125286, - 14771556851404254590, - 7054247606189603403, - 16389491650139306501 - ], - [ - 16945878684828151504, - 3218677612764542854, - 4129066622553533042, - 12411143505986390453 - ], - [ - 18139421373310678346, - 12565918292252415379, - 15834446535016534449, - 6148063108469358612 - ], - [ - 12292679265601130740, - 13386299384345137465, - 13364501573803460934, - 8733289102497600737 - ], - [ - 6160628888236701076, - 812243480588759160, - 13247424403141713486, - 10099500208290322254 - ], - [ - 10969580462260030094, - 16960852675866882693, - 14194600920049272873, - 2806447904271636378 - ], - [ - 5308612537476412691, - 2017232117327961782, - 4894043882170046108, - 7279530397891640384 - ], - [ - 4144733560007592583, - 5771344498053405563, - 15232642512723560772, - 17432794023851843115 - ], - [ - 13083133801966421336, - 6640909047775641455, - 7780326336877273740, - 3087015496142074629 - ], - [ - 4093431377560277258, - 11212918479165511440, - 8471957796338043904, - 10134266515436065987 - ], - [ - 7717602192687906239, - 16498238127262085325, - 2796866393834457756, - 6080730252731365434 - ], - [ - 15198975112515001105, - 3427384774999580491, - 9100144446909554927, - 12939101842468805683 - ], - [ - 3513270711153070627, - 6760090775854266850, - 2266114679123970134, - 2453164217927145379 - ], - [ - 8454909244339253322, - 3937159550392777548, - 17092850706817714179, - 14355781411083550660 - ], - [ - 14878805576521609961, - 14710363931287834639, - 3026158727470434848, - 664513313042659381 - ], - [ - 8491751794767535837, - 13990906406579930747, - 10128524312616192734, - 11329057191619161908 - ], - [ - 14453858867510385195, - 7990714203132759977, - 5706371533563158927, - 4128865490009717707 - ], - [ - 7793399965923611298, - 13429890700846305668, - 6463577189314693043, - 1403793223780405881 - ], - [ - 14506683092001193039, - 2676502583153744002, - 432387115071868059, - 5324230977146090619 - ], - [ - 5163412501615965388, - 13723489526376938207, - 13122017355634831416, - 9867447233188068220 - ], - [ - 2169005641163984550, - 9186165665928554404, - 17910278673295197365, - 11381918494994166306 - ], - [ - 5758488670302072688, - 18181832326798847807, - 10253561012448568621, - 6796662601196927726 - ], - [ - 15384372095875884299, - 15758325895562962606, - 979979951038690002, - 4884709252008623452 - ], - [ - 9658033725016409161, - 11061216333798295999, - 10656853269529126466, - 10494481123210514991 - ], - [ - 10565302142069456280, - 4482395191206056917, - 1026499487185603956, - 7313927946426328835 - ], - [ - 914075172237814299, - 4712333083011398137, - 5903513285138462152, - 27234332817070790 - ], - [ - 6165097805071238615, - 12405114942064306895, - 644764352603959519, - 9890780672007958877 - ], - [ - 15139300154181340570, - 3763515407393500498, - 747278468332165545, - 12401993133389174873 - ], - [ - 1560524376846745097, - 4692895970840561536, - 8733218268882211058, - 11050746819680980390 - ], - [ - 2481724169828996194, - 13708290945708520871, - 4344699263192501279, - 7140288180697285542 - ], - [ - 296768972336373070, - 10813546555235346490, - 16170115378690748299, - 12802703395795280191 - ], - [ - 14251656741089442088, - 4214370918062569580, - 10032045984805229135, - 10373930023291015419 - ], - [ - 17093829866009459220, - 12859940170163163826, - 18210851741109329458, - 16336121553624015019 - ], - [ - 4407838941860440582, - 11854162025145257857, - 12199975997855699065, - 2502264419023857875 - ], - [ - 12466276041107521420, - 5321886014476179588, - 7215900333286173827, - 6244069735947509039 - ], - [ - 5660993473650802623, - 11420246285415070761, - 4658057095848491886, - 6674509485613885322 - ], - [ - 16443753989364975722, - 553371339109155814, - 6176240530340905521, - 5112633271096324804 - ], - [ - 17344015413911877127, - 17921285748955845167, - 472207513366524064, - 9508903352311585055 - ], - [ - 2373580342727259876, - 17626197481532596235, - 5443748763702275935, - 16239948710647217063 - ], - [ - 1893590215294085348, - 16922448579707654020, - 3562803535149926022, - 5008057946422210179 - ], - [ - 9528486283906265098, - 16540897732900210948, - 7987610981843235045, - 8503796247003141361 - ], - [ - 7151582572260738324, - 8798367050545135378, - 17208414855521381994, - 16950417755338618580 - ], - [ - 16098392335079248174, - 15218667139563807688, - 3579977320329860139, - 5640122365751888962 - ], - [ - 9584075531015381817, - 16275032664923657166, - 13987470479712899701, - 2201274726914907785 - ], - [ - 9927398107804295577, - 5687396867124688978, - 10639332412514046012, - 10390174472889436172 - ], - [ - 7906944927470502930, - 9629467849018808831, - 16716405557811073978, - 2687565502976952246 - ], - [ - 2357326643063601119, - 12372080030488676843, - 17862973568880540528, - 9758076484635237383 - ], - [ - 6955909340644692347, - 9788637714503672561, - 10015565612607022102, - 5622624961359105679 - ], - [ - 8476474886341847071, - 14658488683343597259, - 14961921379651609802, - 14123545985510427091 - ], - [ - 1388357604146037902, - 15713286211316016536, - 5612470285625982657, - 6487364037172184435 - ], - [ - 15189267229737199628, - 6502744762576006736, - 13803864300857374808, - 5415597147102054081 - ], - [ - 11784930967835797340, - 11401415610557072973, - 1534900691166631710, - 4724373879472771719 - ], - [ - 10801273164646452905, - 11959510535773071656, - 13440563277297614072, - 11732355214966685480 - ], - [ - 709242285317259075, - 14093566268118334778, - 17068650979174969466, - 14497803119243527377 - ], - [ - 1887862044793269490, - 8537802901513975848, - 1791222441789963991, - 9563652556460700888 - ] + "fri_lde_factor": 1024, + "cap_size": 256 + }, + "setup_merkle_tree_cap": [ + [ + 4079970138244939135, + 8104423815789893989, + 13632023733358255593, + 13346032064987220184 + ], + [ + 12809474467971181399, + 17625072709782170540, + 6594064732490183681, + 8714625183861721175 + ], + [ + 7171332554251640845, + 18284102552959382973, + 3637671217742590741, + 16765711635591410580 + ], + [ + 763060458426938791, + 16159161330183244037, + 2533596966746622429, + 13441156130846388257 + ], + [ + 5525639466278377590, + 11073269405581050439, + 4833809564449931762, + 9314656237570540457 + ], + [ + 1137979745697828455, + 14966651582384421379, + 13384164937770371175, + 14074716572336098280 + ], + [ + 11906323633157650458, + 1175046947414337453, + 4283188400738641426, + 13792950671402095073 + ], + [ + 9310952691940114113, + 4820182619662404377, + 9479969369442954044, + 8512369778696933060 + ], + [ + 16946138649176650488, + 9475913097060757863, + 6608972026504748865, + 7215353463715689998 + ], + [ + 349222139041408316, + 1507489261015035841, + 7596560681014222842, + 8808235055298411214 + ], + [ + 8007370340350072341, + 2549612240734498963, + 105233771877650291, + 8758305155554767568 + ], + [ + 17190433766023179459, + 1755290160765045579, + 5619893747453895498, + 9139477571875211116 + ], + [ + 15755591792726495281, + 17501027650687476798, + 5579351800673089158, + 5931628352091930640 + ], + [ + 7269556889579729096, + 8636006399519326592, + 15556402257631967994, + 12423945489648480054 + ], + [ + 6411602498376025591, + 16377643789139599240, + 1264958035077382258, + 16706608130239118866 + ], + [ + 14246313207191082462, + 2201136836754342130, + 12301078636732201867, + 10991169525653220176 + ], + [ + 2101214821629968022, + 11996351959015322539, + 15461759537099893272, + 5820786797852667153 + ], + [ + 10546547765646585412, + 16670337245061195679, + 14026017154393171841, + 15865302123450709636 + ], + [ + 14546229884213013754, + 1736742091912698149, + 9563343697509131991, + 15957923614096898715 + ], + [ + 10053685874691243725, + 4837706724113512400, + 2806343138575002644, + 1217140278849814125 + ], + [ + 1445801437405723985, + 16370378782744105412, + 14937074551650631907, + 9349528097788576916 + ], + [ + 313624596753562026, + 2737902703720676400, + 1491628534561577719, + 4958865646973460786 + ], + [ + 4749220647213961766, + 1523283588343260013, + 14435216093380418825, + 12058673416937256217 + ], + [ + 12221430523773573392, + 2119312783391782040, + 113666136908336034, + 5557001496562966971 + ], + [ + 8943540790257615175, + 15963411684352909178, + 12833466372967679781, + 16755820399299554647 + ], + [ + 10568513175105063273, + 8152439411055527457, + 13191469024927433668, + 16583335250317103083 + ], + [ + 8728976170665901948, + 12047344511237318236, + 2665537844838432218, + 17220904479682973198 + ], + [ + 9924606303320583729, + 15168541434821629608, + 15267127988736394576, + 4612730729137963542 + ], + [ + 15187295020445613202, + 10979580190616738578, + 2487576517666265788, + 7799526392944971411 + ], + [ + 14001553124798608592, + 983427750240831173, + 10498633617369923042, + 15506856636873354247 + ], + [ + 9695584132680310661, + 15058940491468263542, + 16902767176197422459, + 4307070711535476522 + ], + [ + 16170129192711277380, + 15594325905653564554, + 8589261531750893019, + 16380506012444184509 + ], + [ + 1298482137029698068, + 7046350962355653091, + 4851290198964994943, + 9583994567704854822 + ], + [ + 12002196043506075596, + 6568364019768992619, + 2982180816490272604, + 11005221893883916442 + ], + [ + 11195444762924129211, + 4080035169749061493, + 12552903944803877103, + 1737834024116304977 + ], + [ + 14989371064091658414, + 7021573041416327989, + 10313274026717983047, + 12022812057768375145 + ], + [ + 4605818886224729183, + 1445887803225464457, + 10940822604464014966, + 9884898953608657154 + ], + [ + 16595525430044003662, + 4052207436631314262, + 10293047811043558578, + 12338411972889057265 + ], + [ + 17022547446139753073, + 10651710177937554340, + 6940830771318907626, + 6641453047926556761 + ], + [ + 10509711725088536487, + 4962100381463002257, + 770256729671337313, + 5857473086438310769 + ], + [ + 13267845343537121240, + 12321261427265206453, + 7120567552214639265, + 17302478469072029437 + ], + [ + 15157833123384882226, + 8158551777234184706, + 12348570826200339644, + 14734791159756873167 + ], + [ + 3385809684799950973, + 12900259269431347094, + 17672545490781962423, + 8013657472810936941 + ], + [ + 9546288143416776531, + 8356139039634733121, + 17477514712681213981, + 9515108802170635886 + ], + [ + 15455406291521834876, + 18083571768045422071, + 2009769659787731673, + 7263333966403015872 + ], + [ + 61130843037975416, + 18121830552682131571, + 9667884638217866764, + 6932529860678316702 + ], + [ + 15509823136620654552, + 6676934950708893059, + 13351533805402596549, + 14340984679679191575 + ], + [ + 15880919573481609124, + 14836853565796210968, + 15058034487233879726, + 11000215022533975317 + ], + [ + 10611445668418674805, + 9013654201900818768, + 1617123938865368568, + 16484766808972651318 + ], + [ + 13652743593752436493, + 13843264938563223779, + 10417182020840583510, + 16106479734523413326 + ], + [ + 17003899699844258217, + 17283344455088140068, + 17379245612757089745, + 1779134255761065541 + ], + [ + 11263035503709897880, + 1042839549428969974, + 6638626002246118718, + 1733549590874866310 + ], + [ + 13054247239013491523, + 14047141927303128296, + 2417752406294216107, + 5255181314162279551 + ], + [ + 1784021430104795099, + 3912913366054617723, + 8208877227749479136, + 4259205675774136108 + ], + [ + 801501735568309733, + 3220301656144181340, + 6144806934797449899, + 10479314068206076631 + ], + [ + 864619228795552444, + 2705960387071331097, + 15702747436396598972, + 14131926486457006694 + ], + [ + 15349058561965444062, + 16738574548455051915, + 4609208407223910426, + 11352062762791244662 + ], + [ + 16513285945090237658, + 6443875617703638442, + 17271016792575259072, + 426511686464611712 + ], + [ + 13863640462921288570, + 3362196992389213358, + 3576502768827883720, + 49219399301262346 + ], + [ + 14852014958673205840, + 11855705779815876870, + 8141476064801289266, + 15234506534160954104 + ], + [ + 6292328028167907211, + 6280576736728723621, + 1814915348986377963, + 3167789754322444841 + ], + [ + 6093870088272451581, + 12912333461662707605, + 16038365026642624017, + 10037265733136426182 + ], + [ + 5658473063427008398, + 18194712009700816671, + 3316188885192160812, + 15032222957557455487 + ], + [ + 10746739009272326386, + 4381896503060636995, + 13390900032103397758, + 11290296227744685851 + ], + [ + 7946268274905655961, + 12464655577446728520, + 3476152367809517039, + 3103729420470642525 + ], + [ + 15652670185218353632, + 17922284652588341925, + 3502474425690015164, + 3577240886292713575 + ], + [ + 13130871911189865405, + 2598654314674217849, + 6130707868677033708, + 11318789126458218054 + ], + [ + 14442525432094720658, + 10861520212227307580, + 10308956417568414174, + 101391938116250123 + ], + [ + 8015353735581004985, + 11791356411393233202, + 15134704188872104026, + 2916434958525680154 + ], + [ + 17776268955107890735, + 3978578384988358930, + 7512482176825678493, + 4207030036138354730 + ], + [ + 4778990000266723258, + 12646326430153649418, + 15602744563429649630, + 13225673449752707138 + ], + [ + 9874615904187846642, + 785813436574501824, + 7422887093860615814, + 12575067740028803899 + ], + [ + 2168314129068013776, + 5876170257821873353, + 6574425448964925595, + 13006818379253162445 + ], + [ + 18203396842093379784, + 1637929000957737331, + 16472903772026963266, + 10222221849709050007 + ], + [ + 8175241234035594021, + 11131701218615583700, + 2590544239124773213, + 13103205380700956240 + ], + [ + 10265760611171832222, + 10772001381712818941, + 9769892419500727073, + 3757944212165946207 + ], + [ + 12890674101364520820, + 17436315026775313277, + 17685632275460283213, + 11597862884205200021 + ], + [ + 6541476428026208530, + 1572664979326233234, + 7220008363221767753, + 5030515443926464143 + ], + [ + 1736392533925503753, + 4028348047388923757, + 1322644146499193870, + 7899110541856172042 + ], + [ + 824249951832333568, + 3650442342240934725, + 9551872020421810878, + 4459093260544332906 + ], + [ + 4897538583957298940, + 9763535759998548001, + 17962120881583547780, + 16423532894596203353 + ], + [ + 15621981029420175013, + 12716027808121583594, + 5698997410684688521, + 14893034348453337964 + ], + [ + 57123003983559085, + 1764189019842202487, + 12021079431755992353, + 4611397927843211028 + ], + [ + 11271204071286517021, + 11016763947620063917, + 10240257375147160558, + 6936015967868091242 + ], + [ + 13263159746058683924, + 2115544844352440396, + 17037308825725243672, + 15392971905394245550 + ], + [ + 3040132672294734369, + 8799577028392802142, + 4698252977355096983, + 12029143421110018733 + ], + [ + 3367138533962530706, + 11052858577054708144, + 13777717310915519372, + 1118835749836496249 + ], + [ + 18247935773305597594, + 4791613470261596504, + 2831278673598556804, + 13054754323096013606 + ], + [ + 16660028838922677973, + 9396606308613201784, + 10417460426584440111, + 8380249813777095858 + ], + [ + 8666375557464313815, + 6304275044317841466, + 17005733772133350822, + 14343984316653970581 + ], + [ + 6123226050306352446, + 17155936354664339685, + 8617663439980301335, + 15168460408431057703 + ], + [ + 4711490317757564870, + 4007952793500570494, + 1619724888362180342, + 135640370275358583 + ], + [ + 3568489405188009952, + 10063008786740635782, + 3645030669998184283, + 11251876386972219611 + ], + [ + 8493411482257270591, + 2420069605766207080, + 4319637578909518850, + 7366398383772225801 + ], + [ + 10514279907222008963, + 4576784878418470993, + 13703409256143697610, + 12595458216589602418 + ], + [ + 6518196967791403281, + 6267315352755318878, + 17017845178414282191, + 6219869337455185408 + ], + [ + 9419176152740350408, + 7255025946876060056, + 13692775553690785222, + 6482964045212267647 + ], + [ + 10371771524251272238, + 7264554963155886115, + 11851596230763322123, + 9644034659422561803 + ], + [ + 16478128758974022864, + 10586973455001136017, + 10940854718906381238, + 4322569144361190672 + ], + [ + 1408324526595066767, + 8225966379387133915, + 575234437450946120, + 4532159296152332514 + ], + [ + 14713730919085929141, + 8299030584912591200, + 12653081274697230155, + 7281510145820913452 + ], + [ + 13065256168386635886, + 11725981090441231815, + 9241488846717901227, + 2660860847185625194 + ], + [ + 17344786539017984359, + 16667262327893622443, + 2245517842543022928, + 5289295469095289187 + ], + [ + 7712996453163241402, + 13291387059438293554, + 15475873660461985862, + 421113001330390910 + ], + [ + 6600556276994686684, + 15236925279661890975, + 9085425445873596043, + 13489979627098239874 + ], + [ + 16522817509787049424, + 7880119001244729499, + 2065132977854154043, + 13961595284131100852 + ], + [ + 10136466656472241068, + 7462847284592418995, + 5211060891495522982, + 3429812446680794228 + ], + [ + 1290650595638649346, + 13577818566931833714, + 2388552128026682549, + 13062451014076475307 + ], + [ + 6070278064232182539, + 12399409837026713208, + 7006572737093555522, + 15229533042676039053 + ], + [ + 4546934482854074283, + 12178466512696871783, + 1691876649621188863, + 11473100481876466355 + ], + [ + 1236556729692669368, + 5549499687074217624, + 5958432331744161923, + 17960851426383425726 + ], + [ + 11313384441935481196, + 506230122531558484, + 17480105233549292653, + 3497216050489019068 + ], + [ + 14180330000703954943, + 17537654316605413864, + 13893109338811179145, + 12773435230801104558 + ], + [ + 1789329554161917105, + 3821166735342421302, + 492899993274707886, + 12499398353307226422 + ], + [ + 3089012429777109607, + 10341877957962585240, + 7745611914889843192, + 7156440096296115871 + ], + [ + 14047653524331542619, + 14986552290847971271, + 8344707263938736217, + 5768162811052933747 + ], + [ + 3282503115484594392, + 3455102204735354908, + 6152707383721113591, + 17881334209336571790 + ], + [ + 6895592993382091339, + 14944664838702526606, + 14158277091240311047, + 13527182248087302901 + ], + [ + 3709694612749923839, + 2612160447494745836, + 1406956200130224054, + 1366284088614500522 + ], + [ + 9948565747047514924, + 7052268914527763961, + 18019747825808196928, + 14284818641981927036 + ], + [ + 3586214080877747437, + 18259935751422321449, + 7615481956713457290, + 14815795422370035012 + ], + [ + 127402153735286355, + 14056365981282611643, + 13874770993027970720, + 619630456240824192 + ], + [ + 14160230678035047748, + 5889480608083619036, + 7255863117098942211, + 15199656156616971091 + ], + [ + 3095946492180700178, + 4006392168790405864, + 6879209997262175243, + 299198988833119182 + ], + [ + 7324876720724904977, + 16163169761447383767, + 12506750534396872240, + 2471214392724636123 + ], + [ + 9097827375986012190, + 14565705700237004602, + 14313871429102448506, + 11871591868576605558 + ], + [ + 9799079875206163527, + 16651435323107739422, + 12479220145998529239, + 3293893419326195678 + ], + [ + 10308258292354323463, + 12348814193023657478, + 7617037745166680507, + 4395321161975898727 + ], + [ + 5350571522462508856, + 13168312859432101164, + 16051585591307343397, + 14229124174436500603 + ], + [ + 7166265506509230502, + 7161154874163920078, + 8397077088165043439, + 11599821267858973994 + ], + [ + 7480649953471058826, + 10095090134750662786, + 10564643199160392009, + 13520621355659318250 + ], + [ + 10857779123272725120, + 15686674254635154618, + 8867730228951457290, + 10801878621148582310 + ], + [ + 15060256657026761959, + 9661131710054923901, + 672914961534049680, + 12834176932586128630 + ], + [ + 949448556211506716, + 15381404281513795275, + 11155631745566842701, + 2704703395424058389 + ], + [ + 9515248836992226419, + 8107063249314601218, + 7169007343668853680, + 4398533039543530267 + ], + [ + 5048521287079394266, + 469091126174822039, + 15224036421670482877, + 2109812614761568090 + ], + [ + 16374381239073062350, + 1415659237932604676, + 15755278916993952324, + 4477381067900828660 + ], + [ + 5908643369108814128, + 16993840892601214320, + 9745154490046936812, + 12675544964328790510 + ], + [ + 13069705042519055121, + 114347527485448732, + 13607966080279761362, + 3198027414138631154 + ], + [ + 2126201229935790021, + 11762486874095747924, + 4405447136645256490, + 4546150485765128281 + ], + [ + 11517522298498019347, + 9683562397139382420, + 17997189851623366056, + 15075626419278367114 + ], + [ + 17300749044902256514, + 13180322067670835054, + 13510754977375972746, + 16789287824862069583 + ], + [ + 12482184120647536152, + 6673581288163589602, + 12763923036507066435, + 14094147320392535720 + ], + [ + 14259634259170511729, + 4875730172347810212, + 84468584086030163, + 8303744294867418687 + ], + [ + 12134644548742181831, + 8520649738990136053, + 16059641390155564470, + 5416979754786543764 + ], + [ + 2092318536537973522, + 11767101658840205425, + 6426865564773940591, + 14647807617379392454 + ], + [ + 450267335414320409, + 9743488482513949727, + 60939637019009091, + 17766639401038188429 + ], + [ + 10940698167282175513, + 14751185897733548414, + 12951179730146965260, + 10875205992060835228 + ], + [ + 13364210655207872649, + 6433202412249539021, + 9336639704508334838, + 13505754981533474450 + ], + [ + 13696994814871272283, + 4490009507613100808, + 13804331039068475494, + 5468505803402562249 + ], + [ + 4777271447313491592, + 9940020440702119485, + 1101181637482946058, + 644606693604161858 + ], + [ + 15097959041405784257, + 12307130809829041484, + 17258499521253983865, + 10749514694060779271 + ], + [ + 10190208760039486310, + 4226050742652743727, + 8941293745652956008, + 16590522505585423775 + ], + [ + 345192297192706835, + 446809322130030426, + 14301758245246751976, + 9719056934946360728 + ], + [ + 12881116110058824616, + 9527685388634025534, + 11091584611747550280, + 17824266919172201804 + ], + [ + 1574239507079237453, + 8582461342969153012, + 12264251495254783586, + 17637680562426650364 + ], + [ + 10192143269632239849, + 13007991288866360616, + 15140175217087410009, + 12752906480866993968 + ], + [ + 1870988781431107523, + 12976682012942521853, + 16429062848935476734, + 5798003006437035985 + ], + [ + 14584766751103981854, + 17047930049374537599, + 16615811923791215213, + 4382675641211926136 + ], + [ + 2714557854446349417, + 7104188094330759435, + 4252584297915226480, + 8214575447584991866 + ], + [ + 11055975993893155083, + 4263965260378501220, + 14890629048371839452, + 12114106467257211085 + ], + [ + 1696215674801501046, + 2266423673248165355, + 17663731467117251495, + 3131727223950690444 + ], + [ + 9538270027605424981, + 14695627410990929758, + 1100103051852107643, + 7780696685697285282 + ], + [ + 3640909033400638252, + 4685184407249885115, + 1159593584695683466, + 5810155451150284456 + ], + [ + 2142365296746734893, + 15072293834204351367, + 10906328353503930623, + 567086761313849045 + ], + [ + 16792244974449676806, + 5281042885555694958, + 803668749758477336, + 9632569928427671141 + ], + [ + 11458024915503659884, + 12163640565717382175, + 5426693528062973190, + 5318451835514658298 + ], + [ + 17468129298890753615, + 16365877952312079431, + 4255216049796249864, + 17519162521801620751 + ], + [ + 16881958987441626136, + 5348165195536725074, + 1110205049250373278, + 3270960662287149636 + ], + [ + 1226689297335858879, + 5569006707448968175, + 12862213669554513549, + 13476251685033412618 + ], + [ + 17469709307599228055, + 10239286546577328519, + 11089348310818650527, + 16456908862000481769 + ], + [ + 1488822551903623063, + 1027724056630207939, + 17697306299429716060, + 16855149560650760331 + ], + [ + 6973362373999440967, + 10660889882557485296, + 10425405668156759777, + 6301066711999204638 + ], + [ + 414734469595850357, + 14255487988571755338, + 8593074929345797359, + 16605827151888774599 + ], + [ + 16875797800314241514, + 12421815564591930411, + 2652019482537730851, + 6038511721575984408 + ], + [ + 5083130506118840282, + 10236076700696608757, + 89798661707215807, + 11047945556430945234 + ], + [ + 9052849932438286930, + 766869919370273028, + 9513938692410545262, + 9325822678740812785 + ], + [ + 12214954222172122496, + 2137993706327729295, + 9068121284446752470, + 16719368474204358065 + ], + [ + 12846496216910539260, + 14961944612175511231, + 4871177192578157967, + 871092876547424272 + ], + [ + 8432570778385641968, + 16742266319428376459, + 5202631662503988626, + 9179077767480984950 + ], + [ + 6907086971366422845, + 3630230185711868320, + 6363326632303072661, + 2155376917773117715 + ], + [ + 7260111458604834720, + 15869987976273960274, + 5471864801301693834, + 6186332987204233992 + ], + [ + 7579472070725254127, + 4175627325036864033, + 2726864388901619006, + 17182366690618521544 + ], + [ + 10366850615209571638, + 594983328964439422, + 5708701194817491298, + 8320252948121397632 + ], + [ + 7681701787398279771, + 8922827186766353002, + 16390646056989747875, + 16861016347708202354 + ], + [ + 18349539097929417681, + 11011444774390028397, + 14929165998832565332, + 4785028369071377058 + ], + [ + 12474665121325276393, + 6203136048752329670, + 8857782650797383532, + 1205842604249274162 + ], + [ + 4986065786927816995, + 16774907198741044886, + 3781179129421387661, + 10415439284245950953 + ], + [ + 11574881136629381449, + 12379735189792408408, + 16543138864606381298, + 2352837174543239308 + ], + [ + 2755808935111250351, + 11413691641496289631, + 7433067894962399074, + 9263092778396333619 + ], + [ + 510815690565069916, + 1326078112306964442, + 2280931398968953373, + 16372347667232436246 + ], + [ + 13049250986319352420, + 1312888884206484061, + 5191125907416704942, + 12910692959434040696 + ], + [ + 12641131533859353693, + 806990755667531292, + 8339582499246533062, + 15459494525405539126 + ], + [ + 5334506198163160487, + 12788298331318017466, + 3878039519170214619, + 14733573925349283487 + ], + [ + 5190557862976962645, + 13714749928322729293, + 10374841881342744627, + 15288181470367912639 + ], + [ + 16231478233162755783, + 12802076246758748329, + 14761367109514758527, + 14411193599867587859 + ], + [ + 8198731374774297764, + 3230920736551397516, + 11406632512199098782, + 10984385724696684374 + ], + [ + 2230082119129443391, + 15626857176238648658, + 10525636546408036753, + 713830293126230928 + ], + [ + 4947353515945045110, + 14890750024678328507, + 11992121003625623155, + 9247439145474008589 + ], + [ + 2476255618728017726, + 12423839440278091466, + 11131574346213868564, + 15878857548422208415 + ], + [ + 14063080495591128251, + 17832973426635180302, + 16234044432643152520, + 3790040177800561340 + ], + [ + 17654367747593125286, + 14771556851404254590, + 7054247606189603403, + 16389491650139306501 + ], + [ + 16945878684828151504, + 3218677612764542854, + 4129066622553533042, + 12411143505986390453 + ], + [ + 18139421373310678346, + 12565918292252415379, + 15834446535016534449, + 6148063108469358612 + ], + [ + 12292679265601130740, + 13386299384345137465, + 13364501573803460934, + 8733289102497600737 + ], + [ + 6160628888236701076, + 812243480588759160, + 13247424403141713486, + 10099500208290322254 + ], + [ + 10969580462260030094, + 16960852675866882693, + 14194600920049272873, + 2806447904271636378 + ], + [ + 5308612537476412691, + 2017232117327961782, + 4894043882170046108, + 7279530397891640384 + ], + [ + 4144733560007592583, + 5771344498053405563, + 15232642512723560772, + 17432794023851843115 + ], + [ + 13083133801966421336, + 6640909047775641455, + 7780326336877273740, + 3087015496142074629 + ], + [ + 4093431377560277258, + 11212918479165511440, + 8471957796338043904, + 10134266515436065987 + ], + [ + 7717602192687906239, + 16498238127262085325, + 2796866393834457756, + 6080730252731365434 + ], + [ + 15198975112515001105, + 3427384774999580491, + 9100144446909554927, + 12939101842468805683 + ], + [ + 3513270711153070627, + 6760090775854266850, + 2266114679123970134, + 2453164217927145379 + ], + [ + 8454909244339253322, + 3937159550392777548, + 17092850706817714179, + 14355781411083550660 + ], + [ + 14878805576521609961, + 14710363931287834639, + 3026158727470434848, + 664513313042659381 + ], + [ + 8491751794767535837, + 13990906406579930747, + 10128524312616192734, + 11329057191619161908 + ], + [ + 14453858867510385195, + 7990714203132759977, + 5706371533563158927, + 4128865490009717707 + ], + [ + 7793399965923611298, + 13429890700846305668, + 6463577189314693043, + 1403793223780405881 + ], + [ + 14506683092001193039, + 2676502583153744002, + 432387115071868059, + 5324230977146090619 + ], + [ + 5163412501615965388, + 13723489526376938207, + 13122017355634831416, + 9867447233188068220 + ], + [ + 2169005641163984550, + 9186165665928554404, + 17910278673295197365, + 11381918494994166306 + ], + [ + 5758488670302072688, + 18181832326798847807, + 10253561012448568621, + 6796662601196927726 + ], + [ + 15384372095875884299, + 15758325895562962606, + 979979951038690002, + 4884709252008623452 + ], + [ + 9658033725016409161, + 11061216333798295999, + 10656853269529126466, + 10494481123210514991 + ], + [ + 10565302142069456280, + 4482395191206056917, + 1026499487185603956, + 7313927946426328835 + ], + [ + 914075172237814299, + 4712333083011398137, + 5903513285138462152, + 27234332817070790 + ], + [ + 6165097805071238615, + 12405114942064306895, + 644764352603959519, + 9890780672007958877 + ], + [ + 15139300154181340570, + 3763515407393500498, + 747278468332165545, + 12401993133389174873 + ], + [ + 1560524376846745097, + 4692895970840561536, + 8733218268882211058, + 11050746819680980390 + ], + [ + 2481724169828996194, + 13708290945708520871, + 4344699263192501279, + 7140288180697285542 + ], + [ + 296768972336373070, + 10813546555235346490, + 16170115378690748299, + 12802703395795280191 + ], + [ + 14251656741089442088, + 4214370918062569580, + 10032045984805229135, + 10373930023291015419 + ], + [ + 17093829866009459220, + 12859940170163163826, + 18210851741109329458, + 16336121553624015019 + ], + [ + 4407838941860440582, + 11854162025145257857, + 12199975997855699065, + 2502264419023857875 + ], + [ + 12466276041107521420, + 5321886014476179588, + 7215900333286173827, + 6244069735947509039 + ], + [ + 5660993473650802623, + 11420246285415070761, + 4658057095848491886, + 6674509485613885322 + ], + [ + 16443753989364975722, + 553371339109155814, + 6176240530340905521, + 5112633271096324804 + ], + [ + 17344015413911877127, + 17921285748955845167, + 472207513366524064, + 9508903352311585055 + ], + [ + 2373580342727259876, + 17626197481532596235, + 5443748763702275935, + 16239948710647217063 + ], + [ + 1893590215294085348, + 16922448579707654020, + 3562803535149926022, + 5008057946422210179 + ], + [ + 9528486283906265098, + 16540897732900210948, + 7987610981843235045, + 8503796247003141361 + ], + [ + 7151582572260738324, + 8798367050545135378, + 17208414855521381994, + 16950417755338618580 + ], + [ + 16098392335079248174, + 15218667139563807688, + 3579977320329860139, + 5640122365751888962 + ], + [ + 9584075531015381817, + 16275032664923657166, + 13987470479712899701, + 2201274726914907785 + ], + [ + 9927398107804295577, + 5687396867124688978, + 10639332412514046012, + 10390174472889436172 + ], + [ + 7906944927470502930, + 9629467849018808831, + 16716405557811073978, + 2687565502976952246 + ], + [ + 2357326643063601119, + 12372080030488676843, + 17862973568880540528, + 9758076484635237383 + ], + [ + 6955909340644692347, + 9788637714503672561, + 10015565612607022102, + 5622624961359105679 + ], + [ + 8476474886341847071, + 14658488683343597259, + 14961921379651609802, + 14123545985510427091 + ], + [ + 1388357604146037902, + 15713286211316016536, + 5612470285625982657, + 6487364037172184435 + ], + [ + 15189267229737199628, + 6502744762576006736, + 13803864300857374808, + 5415597147102054081 + ], + [ + 11784930967835797340, + 11401415610557072973, + 1534900691166631710, + 4724373879472771719 + ], + [ + 10801273164646452905, + 11959510535773071656, + 13440563277297614072, + 11732355214966685480 + ], + [ + 709242285317259075, + 14093566268118334778, + 17068650979174969466, + 14497803119243527377 + ], + [ + 1887862044793269490, + 8537802901513975848, + 1791222441789963991, + 9563652556460700888 ] - } + ] } \ No newline at end of file diff --git a/prover/data/keys/verification_compression_wrapper_1_key.json b/prover/data/keys/verification_compression_wrapper_1_key.json new file mode 100644 index 000000000000..7962a007a353 --- /dev/null +++ b/prover/data/keys/verification_compression_wrapper_1_key.json @@ -0,0 +1,260 @@ +{ + "fixed_parameters": { + "parameters": { + "num_columns_under_copy_permutation": 52, + "num_witness_columns": 78, + "num_constant_columns": 4, + "max_allowed_constraint_degree": 8 + }, + "lookup_parameters": "NoLookup", + "domain_size": 65536, + "total_tables_len": 0, + "public_inputs_locations": [ + [ + 0, + 43116 + ], + [ + 1, + 43116 + ], + [ + 2, + 43116 + ], + [ + 3, + 43116 + ] + ], + "extra_constant_polys_for_selectors": 4, + "table_ids_column_idxes": [], + "quotient_degree": 8, + "selectors_placement": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 1, + "num_constants": 0, + "degree": 7, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "Fork": { + "left": { + "Fork": { + "left": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 4, + "num_constants": 4, + "degree": 3, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "Fork": { + "left": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 2, + "num_constants": 0, + "degree": 2, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "GateOnly": { + "gate_idx": 6, + "num_constants": 0, + "degree": 2, + "needs_selector": true, + "is_lookup": false + } + } + } + }, + "right": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 5, + "num_constants": 0, + "degree": 2, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "GateOnly": { + "gate_idx": 7, + "num_constants": 0, + "degree": 2, + "needs_selector": true, + "is_lookup": false + } + } + } + } + } + } + } + }, + "right": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 9, + "num_constants": 4, + "degree": 2, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "GateOnly": { + "gate_idx": 0, + "num_constants": 4, + "degree": 1, + "needs_selector": true, + "is_lookup": false + } + } + } + } + } + }, + "right": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 3, + "num_constants": 2, + "degree": 3, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "GateOnly": { + "gate_idx": 8, + "num_constants": 0, + "degree": 0, + "needs_selector": true, + "is_lookup": false + } + } + } + } + } + } + } + }, + "fri_lde_factor": 2, + "cap_size": 16 + }, + "setup_merkle_tree_cap": [ + [ + 11089233109673513909, + 4694567683723191950, + 2252594152558732892, + 328970513026299243 + ], + [ + 13110006211642469395, + 294375540944797602, + 16182235873004377367, + 2953023061901047966 + ], + [ + 12954993379135284962, + 6922678615292520290, + 17867758875593058615, + 339647768012530579 + ], + [ + 12809050784791995014, + 16689513786002107471, + 11773267927233763048, + 2032047837067080396 + ], + [ + 7271809219891052194, + 5444455652423618380, + 1935299370455922934, + 3289711575126119664 + ], + [ + 11181297440700596994, + 10603106417644946600, + 665809790647260411, + 187324941596609709 + ], + [ + 9724160063639293155, + 1608086271555753498, + 16260908485634719227, + 632759144306004522 + ], + [ + 16171710219953295253, + 11735348771071823498, + 13328414849155243388, + 388220022403515252 + ], + [ + 6347515368180647794, + 3011684189305320549, + 17039076561062895245, + 2001704612642658194 + ], + [ + 3707095019654850078, + 11362163228164094076, + 1561353676808431273, + 3424316751369411482 + ], + [ + 716932689692970095, + 18233950677301858032, + 17408689052397185323, + 2942664824563666170 + ], + [ + 12477682224945956203, + 420380906955838894, + 12174965353807932632, + 2912634883207646728 + ], + [ + 17124977403072675, + 18022072095965759474, + 364660436009892494, + 3385316467253708000 + ], + [ + 16889669530683164245, + 523943222152237493, + 1642074572079780044, + 993946077205588770 + ], + [ + 4746369334258751576, + 10488207333534533554, + 13843593047124360807, + 1554413355203877957 + ], + [ + 674378918528041149, + 9245737942013154408, + 5431441209257727894, + 970073226355719652 + ] + ] +} \ No newline at end of file diff --git a/prover/data/keys/verification_compression_wrapper_5_key.json b/prover/data/keys/verification_compression_wrapper_5_key.json index 8dd3feeabf37..b9a682b5ed3b 100644 --- a/prover/data/keys/verification_compression_wrapper_5_key.json +++ b/prover/data/keys/verification_compression_wrapper_5_key.json @@ -1,240 +1,238 @@ { - "CompressionMode5Circuit": { - "fixed_parameters": { - "parameters": { - "num_columns_under_copy_permutation": 28, - "num_witness_columns": 0, - "num_constant_columns": 4, - "max_allowed_constraint_degree": 4 - }, - "lookup_parameters": "NoLookup", - "domain_size": 131072, - "total_tables_len": 0, - "public_inputs_locations": [ - [ - 0, - 102321 - ], - [ - 1, - 102321 - ], - [ - 2, - 102321 - ], - [ - 3, - 102321 - ] + "fixed_parameters": { + "parameters": { + "num_columns_under_copy_permutation": 28, + "num_witness_columns": 0, + "num_constant_columns": 4, + "max_allowed_constraint_degree": 4 + }, + "lookup_parameters": "NoLookup", + "domain_size": 131072, + "total_tables_len": 0, + "public_inputs_locations": [ + [ + 0, + 102321 ], - "extra_constant_polys_for_selectors": 4, - "table_ids_column_idxes": [], - "quotient_degree": 8, - "selectors_placement": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 4, - "num_constants": 1, - "degree": 7, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "Fork": { - "left": { - "Fork": { - "left": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 7, - "num_constants": 4, - "degree": 3, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "Fork": { - "left": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 1, - "num_constants": 0, - "degree": 2, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "GateOnly": { - "gate_idx": 8, - "num_constants": 0, - "degree": 2, - "needs_selector": true, - "is_lookup": false - } + [ + 1, + 102321 + ], + [ + 2, + 102321 + ], + [ + 3, + 102321 + ] + ], + "extra_constant_polys_for_selectors": 4, + "table_ids_column_idxes": [], + "quotient_degree": 8, + "selectors_placement": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 4, + "num_constants": 1, + "degree": 7, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "Fork": { + "left": { + "Fork": { + "left": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 7, + "num_constants": 4, + "degree": 3, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "Fork": { + "left": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 1, + "num_constants": 0, + "degree": 2, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "GateOnly": { + "gate_idx": 8, + "num_constants": 0, + "degree": 2, + "needs_selector": true, + "is_lookup": false } } - }, - "right": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 5, - "num_constants": 0, - "degree": 2, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "GateOnly": { - "gate_idx": 9, - "num_constants": 0, - "degree": 2, - "needs_selector": true, - "is_lookup": false - } + } + }, + "right": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 5, + "num_constants": 0, + "degree": 2, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "GateOnly": { + "gate_idx": 9, + "num_constants": 0, + "degree": 2, + "needs_selector": true, + "is_lookup": false } } } } } } - }, - "right": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 11, - "num_constants": 4, - "degree": 2, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "GateOnly": { - "gate_idx": 0, - "num_constants": 4, - "degree": 1, - "needs_selector": true, - "is_lookup": false - } + } + }, + "right": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 11, + "num_constants": 4, + "degree": 2, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "GateOnly": { + "gate_idx": 0, + "num_constants": 4, + "degree": 1, + "needs_selector": true, + "is_lookup": false } } } } - }, - "right": { - "Fork": { - "left": { - "Fork": { - "left": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 6, - "num_constants": 2, - "degree": 3, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "GateOnly": { - "gate_idx": 10, - "num_constants": 0, - "degree": 0, - "needs_selector": true, - "is_lookup": false - } + } + }, + "right": { + "Fork": { + "left": { + "Fork": { + "left": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 6, + "num_constants": 2, + "degree": 3, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "GateOnly": { + "gate_idx": 10, + "num_constants": 0, + "degree": 0, + "needs_selector": true, + "is_lookup": false } - } - }, - "right": { - "GateOnly": { - "gate_idx": 3, - "num_constants": 0, - "degree": 1, - "needs_selector": true, - "is_lookup": false } } + }, + "right": { + "GateOnly": { + "gate_idx": 3, + "num_constants": 0, + "degree": 1, + "needs_selector": true, + "is_lookup": false + } } - }, - "right": { - "GateOnly": { - "gate_idx": 2, - "num_constants": 0, - "degree": 1, - "needs_selector": true, - "is_lookup": false - } + } + }, + "right": { + "GateOnly": { + "gate_idx": 2, + "num_constants": 0, + "degree": 1, + "needs_selector": true, + "is_lookup": false } } } } } } - }, - "fri_lde_factor": 512, - "cap_size": 8 + } }, - "setup_merkle_tree_cap": [ - [ - 16914330110517127429, - 16187587722999224490, - 18023818126147572005, - 2874604292345707728 - ], - [ - 16768425182336866443, - 12214564422923037261, - 13582724864735257774, - 1643211286062672867 - ], - [ - 15218535791823658828, - 14934144541713644491, - 5667321981618564549, - 961076974096461270 - ], - [ - 12538243484307626811, - 154885234119673688, - 6664593122905080257, - 3057958811251130407 - ], - [ - 12286013330322000138, - 5761148084208728264, - 2870132557697656188, - 1515812049810917361 - ], - [ - 12416599860285890728, - 16995984316626848023, - 6201310732706348436, - 3434944201786144260 - ], - [ - 2716626386491435001, - 128784567533803364, - 15547067112370735343, - 1561596902164897368 - ], - [ - 8739964537157479045, - 17599924720345455572, - 12386657679573206086, - 835122317104749492 - ] + "fri_lde_factor": 512, + "cap_size": 8 + }, + "setup_merkle_tree_cap": [ + [ + 16914330110517127429, + 16187587722999224490, + 18023818126147572005, + 2874604292345707728 + ], + [ + 16768425182336866443, + 12214564422923037261, + 13582724864735257774, + 1643211286062672867 + ], + [ + 15218535791823658828, + 14934144541713644491, + 5667321981618564549, + 961076974096461270 + ], + [ + 12538243484307626811, + 154885234119673688, + 6664593122905080257, + 3057958811251130407 + ], + [ + 12286013330322000138, + 5761148084208728264, + 2870132557697656188, + 1515812049810917361 + ], + [ + 12416599860285890728, + 16995984316626848023, + 6201310732706348436, + 3434944201786144260 + ], + [ + 2716626386491435001, + 128784567533803364, + 15547067112370735343, + 1561596902164897368 + ], + [ + 8739964537157479045, + 17599924720345455572, + 12386657679573206086, + 835122317104749492 ] - } + ] } \ No newline at end of file diff --git a/prover/data/keys/verification_scheduler_key.json b/prover/data/keys/verification_scheduler_key.json index 8a52cc244bac..bda175b7e18c 100644 --- a/prover/data/keys/verification_scheduler_key.json +++ b/prover/data/keys/verification_scheduler_key.json @@ -1,270 +1,268 @@ { - "SchedulerCircuit": { - "fixed_parameters": { - "parameters": { - "num_columns_under_copy_permutation": 130, - "num_witness_columns": 0, - "num_constant_columns": 4, - "max_allowed_constraint_degree": 8 - }, - "lookup_parameters": { - "UseSpecializedColumnsWithTableIdAsConstant": { - "width": 3, - "num_repetitions": 4, - "share_table_id": true - } - }, - "domain_size": 1048576, - "total_tables_len": 132096, - "public_inputs_locations": [ - [ - 0, - 1043851 - ], - [ - 1, - 1043851 - ], - [ - 2, - 1043851 - ], - [ - 3, - 1043851 - ] + "fixed_parameters": { + "parameters": { + "num_columns_under_copy_permutation": 130, + "num_witness_columns": 0, + "num_constant_columns": 4, + "max_allowed_constraint_degree": 8 + }, + "lookup_parameters": { + "UseSpecializedColumnsWithTableIdAsConstant": { + "width": 3, + "num_repetitions": 4, + "share_table_id": true + } + }, + "domain_size": 1048576, + "total_tables_len": 132096, + "public_inputs_locations": [ + [ + 0, + 1043851 ], - "extra_constant_polys_for_selectors": 4, - "table_ids_column_idxes": [ - 8 + [ + 1, + 1043851 ], - "quotient_degree": 8, - "selectors_placement": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 1, - "num_constants": 0, - "degree": 7, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "Fork": { - "left": { - "Fork": { - "left": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 4, - "num_constants": 4, - "degree": 3, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "Fork": { - "left": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 5, - "num_constants": 1, - "degree": 2, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "GateOnly": { - "gate_idx": 6, - "num_constants": 0, - "degree": 2, - "needs_selector": true, - "is_lookup": false - } + [ + 2, + 1043851 + ], + [ + 3, + 1043851 + ] + ], + "extra_constant_polys_for_selectors": 4, + "table_ids_column_idxes": [ + 8 + ], + "quotient_degree": 8, + "selectors_placement": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 1, + "num_constants": 0, + "degree": 7, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "Fork": { + "left": { + "Fork": { + "left": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 4, + "num_constants": 4, + "degree": 3, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "Fork": { + "left": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 5, + "num_constants": 1, + "degree": 2, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "GateOnly": { + "gate_idx": 6, + "num_constants": 0, + "degree": 2, + "needs_selector": true, + "is_lookup": false } } - }, - "right": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 2, - "num_constants": 0, - "degree": 2, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "GateOnly": { - "gate_idx": 7, - "num_constants": 0, - "degree": 2, - "needs_selector": true, - "is_lookup": false - } + } + }, + "right": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 2, + "num_constants": 0, + "degree": 2, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "GateOnly": { + "gate_idx": 7, + "num_constants": 0, + "degree": 2, + "needs_selector": true, + "is_lookup": false } } } } } } - }, - "right": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 9, - "num_constants": 4, - "degree": 2, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "GateOnly": { - "gate_idx": 0, - "num_constants": 4, - "degree": 1, - "needs_selector": true, - "is_lookup": false - } + } + }, + "right": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 9, + "num_constants": 4, + "degree": 2, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "GateOnly": { + "gate_idx": 0, + "num_constants": 4, + "degree": 1, + "needs_selector": true, + "is_lookup": false } } } } - }, - "right": { - "Fork": { - "left": { - "GateOnly": { - "gate_idx": 3, - "num_constants": 2, - "degree": 3, - "needs_selector": true, - "is_lookup": false - } - }, - "right": { - "GateOnly": { - "gate_idx": 8, - "num_constants": 0, - "degree": 0, - "needs_selector": true, - "is_lookup": false - } + } + }, + "right": { + "Fork": { + "left": { + "GateOnly": { + "gate_idx": 3, + "num_constants": 2, + "degree": 3, + "needs_selector": true, + "is_lookup": false + } + }, + "right": { + "GateOnly": { + "gate_idx": 8, + "num_constants": 0, + "degree": 0, + "needs_selector": true, + "is_lookup": false } } } } } } - }, - "fri_lde_factor": 2, - "cap_size": 16 + } }, - "setup_merkle_tree_cap": [ - [ - 9887208323851505217, - 1123001217986730435, - 343259880253311786, - 2151140818520262118 - ], - [ - 12495904531249642919, - 17232615797756148395, - 3335544159309667561, - 6261962261160675850 - ], - [ - 3290174806954782361, - 3957604867997030178, - 12129129725630125865, - 1636089896333385634 - ], - [ - 14645858759272203991, - 11653487901803110416, - 2499237237036147984, - 1841727833267838231 - ], - [ - 18193008520821522692, - 14508611443656176962, - 15201308762805005611, - 16051075400380887227 - ], - [ - 4504987266706704494, - 7397695837427186224, - 10067172051000661467, - 5044520361343796759 - ], - [ - 9408005523417633181, - 14924548137262927482, - 8927260223716946348, - 25087104176919469 - ], - [ - 11857324568001808264, - 5783626311717767938, - 10769426771780222703, - 8523712547334248178 - ], - [ - 18394924697039022030, - 3773697459649116941, - 6013511991919985339, - 17810626771729638933 - ], - [ - 13290121767754155136, - 11225142773614876536, - 4764911669339622945, - 17476639133556434478 - ], - [ - 11822797557540925718, - 17521847674855164779, - 18126641713175128985, - 3215884914057380988 - ], - [ - 15220380051263546850, - 7948573237324556416, - 264360501330239312, - 16455579027557250339 - ], - [ - 17738768733790921549, - 4021891743990340907, - 17352941271057641152, - 15584530612705924787 - ], - [ - 7157587680183062137, - 8837818432071888650, - 16467824236289155049, - 17557580094049845697 - ], - [ - 15526977922222496027, - 5885713491624121557, - 8813450728670527813, - 10234120825800411733 - ], - [ - 12554317685609787988, - 4789370247234643566, - 16370523223191414986, - 9108687955872827734 - ] + "fri_lde_factor": 2, + "cap_size": 16 + }, + "setup_merkle_tree_cap": [ + [ + 9887208323851505217, + 1123001217986730435, + 343259880253311786, + 2151140818520262118 + ], + [ + 12495904531249642919, + 17232615797756148395, + 3335544159309667561, + 6261962261160675850 + ], + [ + 3290174806954782361, + 3957604867997030178, + 12129129725630125865, + 1636089896333385634 + ], + [ + 14645858759272203991, + 11653487901803110416, + 2499237237036147984, + 1841727833267838231 + ], + [ + 18193008520821522692, + 14508611443656176962, + 15201308762805005611, + 16051075400380887227 + ], + [ + 4504987266706704494, + 7397695837427186224, + 10067172051000661467, + 5044520361343796759 + ], + [ + 9408005523417633181, + 14924548137262927482, + 8927260223716946348, + 25087104176919469 + ], + [ + 11857324568001808264, + 5783626311717767938, + 10769426771780222703, + 8523712547334248178 + ], + [ + 18394924697039022030, + 3773697459649116941, + 6013511991919985339, + 17810626771729638933 + ], + [ + 13290121767754155136, + 11225142773614876536, + 4764911669339622945, + 17476639133556434478 + ], + [ + 11822797557540925718, + 17521847674855164779, + 18126641713175128985, + 3215884914057380988 + ], + [ + 15220380051263546850, + 7948573237324556416, + 264360501330239312, + 16455579027557250339 + ], + [ + 17738768733790921549, + 4021891743990340907, + 17352941271057641152, + 15584530612705924787 + ], + [ + 7157587680183062137, + 8837818432071888650, + 16467824236289155049, + 17557580094049845697 + ], + [ + 15526977922222496027, + 5885713491624121557, + 8813450728670527813, + 10234120825800411733 + ], + [ + 12554317685609787988, + 4789370247234643566, + 16370523223191414986, + 9108687955872827734 ] - } + ] } \ No newline at end of file diff --git a/prover/rust-toolchain b/prover/rust-toolchain index 03c040b91f1f..bc5d1d6bbd8e 100644 --- a/prover/rust-toolchain +++ b/prover/rust-toolchain @@ -1 +1,2 @@ -nightly-2024-08-01 +[toolchain] +channel = "nightly-2024-08-01" diff --git a/prover/setup-data-gpu-keys.json b/prover/setup-data-gpu-keys.json index 8a3bdeb262c1..6ec6570c6fa9 100644 --- a/prover/setup-data-gpu-keys.json +++ b/prover/setup-data-gpu-keys.json @@ -1,5 +1,5 @@ { - "us": "gs://matterlabs-setup-data-us/dc3bba7-gpu/", - "europe": "gs://matterlabs-setup-data-europe/dc3bba7-gpu/", - "asia": "gs://matterlabs-setup-data-asia/dc3bba7-gpu/" + "us": "gs://matterlabs-setup-data-us/a5d7741-gpu/", + "europe": "gs://matterlabs-setup-data-europe/a5d7741-gpu/", + "asia": "gs://matterlabs-setup-data-asia/a5d7741-gpu/" } diff --git a/yarn.lock b/yarn.lock index 5df8cb570e0f..eabdce369080 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1737,15 +1737,23 @@ sinon-chai "^3.7.0" ts-morph "^22.0.0" -"@matterlabs/hardhat-zksync-node@^0.0.1-beta.7": - version "0.0.1" - resolved "https://registry.yarnpkg.com/@matterlabs/hardhat-zksync-node/-/hardhat-zksync-node-0.0.1.tgz#d44bda3c0069b149e2a67c9697eb81166b169ea6" - integrity sha512-rMabl+I813lzXINqTq5OvujQ30wsfO9mTLMPDXuYzEEhEzvnXlaVxuqynKBXrgXAxjmr+G79rqvcWgeKygtwBA== +"@matterlabs/hardhat-zksync-node@^1.2.0": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@matterlabs/hardhat-zksync-node/-/hardhat-zksync-node-1.2.1.tgz#786d51b28ad3aa5b8b973831e016151326d844e4" + integrity sha512-BZDJyEB9iu54D6sOKTGeJrN5TRFLrg6k9E1x3lEwpOfewPwg1eTfb9e/LKGSCePbSremZIHzK3eDRr80hVdDjA== dependencies: - "@matterlabs/hardhat-zksync-solc" "^1.0.5" - axios "^1.4.0" - chalk "4.1.2" - fs-extra "^11.1.1" + "@matterlabs/hardhat-zksync-solc" "^1.2.5" + axios "^1.7.2" + chai "^4.3.4" + chalk "^4.1.2" + debug "^4.3.5" + fs-extra "^11.2.0" + proxyquire "^2.1.3" + semver "^7.6.2" + sinon "^18.0.0" + sinon-chai "^3.7.0" + source-map-support "^0.5.21" + undici "^6.18.2" "@matterlabs/hardhat-zksync-solc@0.4.2": version "0.4.2" @@ -1771,16 +1779,7 @@ proper-lockfile "^4.1.2" semver "^7.5.1" -"@matterlabs/hardhat-zksync-solc@^0.3.15": - version "0.3.17" - resolved "https://registry.yarnpkg.com/@matterlabs/hardhat-zksync-solc/-/hardhat-zksync-solc-0.3.17.tgz#72f199544dc89b268d7bfc06d022a311042752fd" - integrity sha512-aZgQ0yfXW5xPkfuEH1d44ncWV4T2LzKZd0VVPo4PL5cUrYs2/II1FaEDp5zsf3FxOR1xT3mBsjuSrtJkk4AL8Q== - dependencies: - "@nomiclabs/hardhat-docker" "^2.0.0" - chalk "4.1.2" - dockerode "^3.3.4" - -"@matterlabs/hardhat-zksync-solc@^1.0.5", "@matterlabs/hardhat-zksync-solc@^1.1.4": +"@matterlabs/hardhat-zksync-solc@=1.1.4", "@matterlabs/hardhat-zksync-solc@^1.0.5": version "1.1.4" resolved "https://registry.yarnpkg.com/@matterlabs/hardhat-zksync-solc/-/hardhat-zksync-solc-1.1.4.tgz#04a2fad6fb6b6944c64ad969080ee65b9af3f617" integrity sha512-4/usbogh9neewR2/v8Dn2OzqVblZMUuT/iH2MyPZgPRZYQlL4SlZtMvokU9UQjZT6iSoaKCbbdWESHDHSzfUjA== @@ -1797,6 +1796,15 @@ sinon-chai "^3.7.0" undici "^5.14.0" +"@matterlabs/hardhat-zksync-solc@^0.3.15": + version "0.3.17" + resolved "https://registry.yarnpkg.com/@matterlabs/hardhat-zksync-solc/-/hardhat-zksync-solc-0.3.17.tgz#72f199544dc89b268d7bfc06d022a311042752fd" + integrity sha512-aZgQ0yfXW5xPkfuEH1d44ncWV4T2LzKZd0VVPo4PL5cUrYs2/II1FaEDp5zsf3FxOR1xT3mBsjuSrtJkk4AL8Q== + dependencies: + "@nomiclabs/hardhat-docker" "^2.0.0" + chalk "4.1.2" + dockerode "^3.3.4" + "@matterlabs/hardhat-zksync-solc@^1.2.0": version "1.2.0" resolved "https://registry.yarnpkg.com/@matterlabs/hardhat-zksync-solc/-/hardhat-zksync-solc-1.2.0.tgz#c1ccd1eca0381840196f220b339da08320ad9583" @@ -1814,7 +1822,7 @@ sinon-chai "^3.7.0" undici "^6.18.2" -"@matterlabs/hardhat-zksync-solc@^1.2.4": +"@matterlabs/hardhat-zksync-solc@^1.2.4", "@matterlabs/hardhat-zksync-solc@^1.2.5": version "1.2.5" resolved "https://registry.yarnpkg.com/@matterlabs/hardhat-zksync-solc/-/hardhat-zksync-solc-1.2.5.tgz#fbeeabc3fea0dd232fa3c8cb31bd93c103eba11a" integrity sha512-iZyznWl1Hoe/Z46hnUe1s2drBZBjJOS/eN+Ql2lIBX9B6NevBl9DYzkKzH5HEIMCLGnX9sWpRAJqUQJWy9UB6w== @@ -1882,6 +1890,11 @@ resolved "https://registry.yarnpkg.com/@matterlabs/prettier-config/-/prettier-config-1.0.3.tgz#3e2eb559c0112bbe9671895f935700dad2a15d38" integrity sha512-JW7nHREPqEtjBWz3EfxLarkmJBD8vi7Kx/1AQ6eBZnz12eHc1VkOyrc6mpR5ogTf0dOUNXFAfZut+cDe2dn4kQ== +"@matterlabs/zksync-contracts@^0.6.1": + version "0.6.1" + resolved "https://registry.yarnpkg.com/@matterlabs/zksync-contracts/-/zksync-contracts-0.6.1.tgz#39f061959d5890fd0043a2f1ae710f764b172230" + integrity sha512-+hucLw4DhGmTmQlXOTEtpboYCaOm/X2VJcWmnW4abNcOgQXEHX+mTxQrxEfPjIZT0ZE6z5FTUrOK9+RgUZwBMQ== + "@metamask/eth-sig-util@^4.0.0": version "4.0.1" resolved "https://registry.yarnpkg.com/@metamask/eth-sig-util/-/eth-sig-util-4.0.1.tgz#3ad61f6ea9ad73ba5b19db780d40d9aae5157088" @@ -2305,11 +2318,21 @@ resolved "https://registry.yarnpkg.com/@openzeppelin/contracts-upgradeable/-/contracts-upgradeable-4.9.5.tgz#572b5da102fc9be1d73f34968e0ca56765969812" integrity sha512-f7L1//4sLlflAN7fVzJLoRedrf5Na3Oal5PZfIq55NFcVZ90EpV1q5xOvL4lFvg3MNICSDr2hH0JUBxwlxcoPg== +"@openzeppelin/contracts-upgradeable@4.9.5": + version "4.9.5" + resolved "https://registry.yarnpkg.com/@openzeppelin/contracts-upgradeable/-/contracts-upgradeable-4.9.5.tgz#572b5da102fc9be1d73f34968e0ca56765969812" + integrity sha512-f7L1//4sLlflAN7fVzJLoRedrf5Na3Oal5PZfIq55NFcVZ90EpV1q5xOvL4lFvg3MNICSDr2hH0JUBxwlxcoPg== + "@openzeppelin/contracts-v4@npm:@openzeppelin/contracts@4.9.5": version "4.9.5" resolved "https://registry.yarnpkg.com/@openzeppelin/contracts/-/contracts-4.9.5.tgz#1eed23d4844c861a1835b5d33507c1017fa98de8" integrity sha512-ZK+W5mVhRppff9BE6YdR8CC52C8zAvsVAiWhEtQ5+oNxFE6h1WdeWo+FJSF8KKvtxxVYZ7MTP/5KoVpAU3aSWg== +"@openzeppelin/contracts@4.9.5": + version "4.9.5" + resolved "https://registry.yarnpkg.com/@openzeppelin/contracts/-/contracts-4.9.5.tgz#1eed23d4844c861a1835b5d33507c1017fa98de8" + integrity sha512-ZK+W5mVhRppff9BE6YdR8CC52C8zAvsVAiWhEtQ5+oNxFE6h1WdeWo+FJSF8KKvtxxVYZ7MTP/5KoVpAU3aSWg== + "@openzeppelin/contracts@^4.8.0": version "4.9.6" resolved "https://registry.yarnpkg.com/@openzeppelin/contracts/-/contracts-4.9.6.tgz#2a880a24eb19b4f8b25adc2a5095f2aa27f39677" @@ -2920,10 +2943,12 @@ resolved "https://registry.yarnpkg.com/@types/node/-/node-11.11.6.tgz#df929d1bb2eee5afdda598a41930fe50b43eaa6a" integrity sha512-Exw4yUWMBXM3X+8oqzJNRqZSwUAaS4+7NdvHqQuFi/d+synz++xmX3QIf+BFqneW8N31R8Ky+sikfZUXq07ggQ== -"@types/node@18.15.13": - version "18.15.13" - resolved "https://registry.yarnpkg.com/@types/node/-/node-18.15.13.tgz#f64277c341150c979e42b00e4ac289290c9df469" - integrity sha512-N+0kuo9KgrUQ1Sn/ifDXsvg0TTleP7rIy4zOBGECxAljqvqfqpTfzx0Q1NUedOixRMBfe2Whhb056a42cWs26Q== +"@types/node@22.7.5": + version "22.7.5" + resolved "https://registry.yarnpkg.com/@types/node/-/node-22.7.5.tgz#cfde981727a7ab3611a481510b473ae54442b92b" + integrity sha512-jML7s2NAzMWc//QSJ1a3prpk78cOPchGvXJsC3C6R6PSMoooztvRVQEz89gmBTBY1SPMaqo5teB4uNHPdetShQ== + dependencies: + undici-types "~6.19.2" "@types/node@^10.0.3": version "10.17.60" @@ -5464,18 +5489,18 @@ ethers@^5.0.2, ethers@^5.7.0, ethers@^5.7.2, ethers@~5.7.0, ethers@~5.7.2: "@ethersproject/web" "5.7.1" "@ethersproject/wordlists" "5.7.0" -ethers@^6.7.1: - version "6.12.1" - resolved "https://registry.yarnpkg.com/ethers/-/ethers-6.12.1.tgz#517ff6d66d4fd5433e38e903051da3e57c87ff37" - integrity sha512-j6wcVoZf06nqEcBbDWkKg8Fp895SS96dSnTCjiXT+8vt2o02raTn4Lo9ERUuIVU5bAjoPYeA+7ytQFexFmLuVw== +ethers@^6.13.5: + version "6.13.5" + resolved "https://registry.yarnpkg.com/ethers/-/ethers-6.13.5.tgz#8c1d6ac988ac08abc3c1d8fabbd4b8b602851ac4" + integrity sha512-+knKNieu5EKRThQJWwqaJ10a6HE9sSehGeqWN65//wE7j47ZpFhKAnHB/JJFibwwg61I/koxaPsXbXpD/skNOQ== dependencies: "@adraffy/ens-normalize" "1.10.1" "@noble/curves" "1.2.0" "@noble/hashes" "1.3.2" - "@types/node" "18.15.13" + "@types/node" "22.7.5" aes-js "4.0.0-beta.5" - tslib "2.4.0" - ws "8.5.0" + tslib "2.7.0" + ws "8.17.1" ethers@~5.5.0: version "5.5.4" @@ -5750,6 +5775,14 @@ file-entry-cache@^6.0.1: dependencies: flat-cache "^3.0.4" +fill-keys@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/fill-keys/-/fill-keys-1.0.2.tgz#9a8fa36f4e8ad634e3bf6b4f3c8882551452eb20" + integrity sha512-tcgI872xXjwFF4xgQmLxi76GnwJG3g/3isB1l4/G5Z4zrbddGpBjqZCO9oEAcB5wX0Hj/5iQB3toxfO7in1hHA== + dependencies: + is-object "~1.0.1" + merge-descriptors "~1.0.0" + fill-range@^7.0.1: version "7.0.1" resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" @@ -6827,6 +6860,13 @@ is-core-module@^2.11.0, is-core-module@^2.13.0, is-core-module@^2.13.1: dependencies: hasown "^2.0.0" +is-core-module@^2.16.0: + version "2.16.1" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.16.1.tgz#2a98801a849f43e2add644fbb6bc6229b19a4ef4" + integrity sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w== + dependencies: + hasown "^2.0.2" + is-data-view@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/is-data-view/-/is-data-view-1.0.1.tgz#4b4d3a511b70f3dc26d42c03ca9ca515d847759f" @@ -6895,6 +6935,11 @@ is-number@^7.0.0: resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== +is-object@~1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-object/-/is-object-1.0.2.tgz#a56552e1c665c9e950b4a025461da87e72f86fcf" + integrity sha512-2rRIahhZr2UWb45fIOuvZGpFtz0TyOZLf32KxBbSoUCeZR495zCKlWUKKUByk3geS2eAs7ZAABt0Y/Rx0GiQGA== + is-path-inside@^3.0.3: version "3.0.3" resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283" @@ -8123,6 +8168,11 @@ memorystream@^0.3.1: resolved "https://registry.yarnpkg.com/memorystream/-/memorystream-0.3.1.tgz#86d7090b30ce455d63fbae12dda51a47ddcaf9b2" integrity sha512-S3UwM3yj5mtUSEfP41UZmt/0SCoVYUcU1rkXv+BQ5Ig8ndL4sPoJNBUJERafdPb5jjHJGuMgytgKvKIf58XNBw== +merge-descriptors@~1.0.0: + version "1.0.3" + resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.3.tgz#d80319a65f3c7935351e5cfdac8f9318504dbed5" + integrity sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ== + merge-stream@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" @@ -8380,6 +8430,11 @@ mocha@^9.0.2: yargs-parser "20.2.4" yargs-unparser "2.0.0" +module-not-found-error@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/module-not-found-error/-/module-not-found-error-1.0.1.tgz#cf8b4ff4f29640674d6cdd02b0e3bc523c2bbdc0" + integrity sha512-pEk4ECWQXV6z2zjhRZUongnLJNUeGQJ3w6OQ5ctGwD+i5o93qjRQUk2Rt6VdNeu3sEP0AB4LcfvdebpxBRVr4g== + moo@^0.5.0: version "0.5.2" resolved "https://registry.yarnpkg.com/moo/-/moo-0.5.2.tgz#f9fe82473bc7c184b0d32e2215d3f6e67278733c" @@ -9220,6 +9275,15 @@ proxy-from-env@^1.1.0: resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz#e102f16ca355424865755d2c9e8ea4f24d58c3e2" integrity sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg== +proxyquire@^2.1.3: + version "2.1.3" + resolved "https://registry.yarnpkg.com/proxyquire/-/proxyquire-2.1.3.tgz#2049a7eefa10a9a953346a18e54aab2b4268df39" + integrity sha512-BQWfCqYM+QINd+yawJz23tbBM40VIGXOdDw3X344KcclI/gtBbdWF6SlQ4nK/bYhF9d27KYug9WzljHC6B9Ysg== + dependencies: + fill-keys "^1.0.2" + module-not-found-error "^1.0.1" + resolve "^1.11.1" + prr@~1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476" @@ -9547,6 +9611,15 @@ resolve@^1.1.6, resolve@^1.10.0, resolve@^1.12.0, resolve@^1.20.0, resolve@^1.22 path-parse "^1.0.7" supports-preserve-symlinks-flag "^1.0.0" +resolve@^1.11.1: + version "1.22.10" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.10.tgz#b663e83ffb09bbf2386944736baae803029b8b39" + integrity sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w== + dependencies: + is-core-module "^2.16.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + responselike@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/responselike/-/responselike-3.0.0.tgz#20decb6c298aff0dbee1c355ca95461d42823626" @@ -10068,7 +10141,7 @@ source-map-support@0.5.13: buffer-from "^1.0.0" source-map "^0.6.0" -source-map-support@^0.5.13: +source-map-support@^0.5.13, source-map-support@^0.5.21: version "0.5.21" resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== @@ -10196,7 +10269,7 @@ string-length@^4.0.1: char-regex "^1.0.2" strip-ansi "^6.0.0" -"string-width-cjs@npm:string-width@^4.2.0", string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.2, string-width@^4.2.3: +"string-width-cjs@npm:string-width@^4.2.0": version "4.2.3" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== @@ -10213,6 +10286,15 @@ string-width@^2.1.0, string-width@^2.1.1: is-fullwidth-code-point "^2.0.0" strip-ansi "^4.0.0" +string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.2, string-width@^4.2.3: + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + string-width@^5.0.1, string-width@^5.1.2: version "5.1.2" resolved "https://registry.yarnpkg.com/string-width/-/string-width-5.1.2.tgz#14f8daec6d81e7221d2a357e668cab73bdbca794" @@ -10279,7 +10361,7 @@ string_decoder@~1.1.1: dependencies: safe-buffer "~5.1.0" -"strip-ansi-cjs@npm:strip-ansi@^6.0.1", strip-ansi@^6.0.0, strip-ansi@^6.0.1: +"strip-ansi-cjs@npm:strip-ansi@^6.0.1": version "6.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== @@ -10300,6 +10382,13 @@ strip-ansi@^5.1.0: dependencies: ansi-regex "^4.1.0" +strip-ansi@^6.0.0, strip-ansi@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + strip-ansi@^7.0.1: version "7.1.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45" @@ -10400,7 +10489,7 @@ synckit@^0.8.6: version "0.1.0" dependencies: "@matterlabs/hardhat-zksync-deploy" "^0.7.0" - "@matterlabs/hardhat-zksync-solc" "^1.1.4" + "@matterlabs/hardhat-zksync-solc" "=1.1.4" "@matterlabs/hardhat-zksync-verify" "^1.4.3" commander "^9.4.1" eslint "^8.51.0" @@ -10582,6 +10671,11 @@ toidentifier@1.0.1: resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== +toml@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/toml/-/toml-3.0.0.tgz#342160f1af1904ec9d204d03a5d61222d762c5ee" + integrity sha512-y/mWCZinnvxjTKYhJ+pYxwD0mRLVvOtdS2Awbgxln6iEnt4rk0yBxeSBHkGJcPucRiG0e55mwWp+g/05rsrd6w== + tough-cookie@~2.5.0: version "2.5.0" resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.5.0.tgz#cd9fb2a0aa1d5a12b473bd9fb96fa3dcff65ade2" @@ -10699,10 +10793,10 @@ tsconfig-paths@^3.15.0: minimist "^1.2.6" strip-bom "^3.0.0" -tslib@2.4.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.4.0.tgz#7cecaa7f073ce680a05847aa77be941098f36dc3" - integrity sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ== +tslib@2.7.0: + version "2.7.0" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.7.0.tgz#d9b40c5c40ab59e8738f297df3087bf1a2690c01" + integrity sha512-gLXCKdN1/j47AiHiOkJN69hJmcbGTHI0ImLmbYLHykhgeN0jVGola9yVjFgzCUklsZQMW55o+dW7IXv3RCXDzA== tslib@^1.8.1, tslib@^1.9.0, tslib@^1.9.3: version "1.14.1" @@ -10915,6 +11009,11 @@ undici-types@~5.26.4: resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-5.26.5.tgz#bcd539893d00b56e964fd2657a4866b221a65617" integrity sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA== +undici-types@~6.19.2: + version "6.19.8" + resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-6.19.8.tgz#35111c9d1437ab83a7cdc0abae2f26d88eda0a02" + integrity sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw== + undici@^5.14.0: version "5.28.4" resolved "https://registry.yarnpkg.com/undici/-/undici-5.28.4.tgz#6b280408edb6a1a604a9b20340f45b422e373068" @@ -11150,7 +11249,16 @@ workerpool@6.2.1: resolved "https://registry.yarnpkg.com/workerpool/-/workerpool-6.2.1.tgz#46fc150c17d826b86a008e5a4508656777e9c343" integrity sha512-ILEIE97kDZvF9Wb9f6h5aXK4swSlKGUcOEGiIYb2OOu/IrDU9iwj0fD//SsA6E5ibwJxpEvhullJY4Sl4GcpAw== -"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0", wrap-ansi@^7.0.0: +"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0": + version "7.0.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + +wrap-ansi@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== @@ -11186,10 +11294,10 @@ ws@7.4.6: resolved "https://registry.yarnpkg.com/ws/-/ws-7.4.6.tgz#5654ca8ecdeee47c33a9a4bf6d28e2be2980377c" integrity sha512-YmhHDO4MzaDLB+M9ym/mDA5z0naX8j7SIlT8f8z+I0VtzsRbekxEutHSme7NPS2qE8StCYQNUnfWdXta/Yu85A== -ws@8.5.0: - version "8.5.0" - resolved "https://registry.yarnpkg.com/ws/-/ws-8.5.0.tgz#bfb4be96600757fe5382de12c670dab984a1ed4f" - integrity sha512-BWX0SWVgLPzYwF8lTzEy1egjhS4S4OEAHfsO8o65WOVsrnSRGaSiUaa9e0ggGlkMTtBlmOpEXiie9RUcBO86qg== +ws@8.17.1: + version "8.17.1" + resolved "https://registry.yarnpkg.com/ws/-/ws-8.17.1.tgz#9293da530bb548febc95371d90f9c878727d919b" + integrity sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ== ws@^7.4.6: version "7.5.9" @@ -11231,6 +11339,11 @@ yaml@^2.4.2: resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.4.2.tgz#7a2b30f2243a5fc299e1f14ca58d475ed4bc5362" integrity sha512-B3VqDZ+JAg1nZpaEmWtTXUlBneoGx6CPM9b0TENK6aoSu5t73dItudwdgmi6tHlIZZId4dZ9skcAQ2UbcyAeVA== +yaml@^2.7.0: + version "2.7.0" + resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.7.0.tgz#aef9bb617a64c937a9a748803786ad8d3ffe1e98" + integrity sha512-+hSoy/QHluxmC9kCIJyL/uyFmLmc+e5CFR5Wa+bpIhIj85LVb9ZH2nVnqrHoSvKogwODv0ClqZkmiSSaIH5LTA== + yargs-parser@20.2.4: version "20.2.4" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.4.tgz#b42890f14566796f85ae8e3a25290d205f154a54" @@ -11315,3 +11428,7 @@ zksync-ethers@^6.9.0: version "6.9.0" resolved "https://registry.yarnpkg.com/zksync-ethers/-/zksync-ethers-6.9.0.tgz#efaff1d59e2cff837eeda84c4ba59fdca4972a91" integrity sha512-2CppwvLHtz689L7E9EhevbFtsqVukKC/lVicwdeUS2yqV46ET4iBR11rYdEfGW2oEo1h6yJuuwIBDFm2SybkIA== + +"zksync-ethers@https://github.com/zksync-sdk/zksync-ethers#sb-use-new-encoding-in-sdk": + version "6.12.1" + resolved "https://github.com/zksync-sdk/zksync-ethers#bc6e3ab201f743fcbb53e0216f3de421bb3a617f" diff --git a/zkstack_cli/Cargo.lock b/zkstack_cli/Cargo.lock index b31fc9f54421..bbdb50159076 100644 --- a/zkstack_cli/Cargo.lock +++ b/zkstack_cli/Cargo.lock @@ -798,35 +798,6 @@ dependencies = [ "memchr", ] -[[package]] -name = "common" -version = "0.1.0" -dependencies = [ - "anyhow", - "async-trait", - "clap", - "cliclack", - "console", - "ethers", - "futures", - "git_version_macro", - "once_cell", - "serde", - "serde_json", - "serde_yaml", - "sqlx", - "strum", - "thiserror", - "tokio", - "toml", - "types", - "url", - "xshell", - "zksync_system_constants", - "zksync_types", - "zksync_web3_decl", -] - [[package]] name = "common-path" version = "1.0.0" @@ -848,29 +819,6 @@ dependencies = [ "crossbeam-utils", ] -[[package]] -name = "config" -version = "0.1.0" -dependencies = [ - "anyhow", - "clap", - "common", - "ethers", - "rand", - "serde", - "serde_json", - "serde_yaml", - "strum", - "thiserror", - "types", - "url", - "xshell", - "zksync_basic_types", - "zksync_config", - "zksync_protobuf", - "zksync_protobuf_config", -] - [[package]] name = "configparser" version = "3.1.0" @@ -2094,13 +2042,6 @@ version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" -[[package]] -name = "git_version_macro" -version = "0.1.0" -dependencies = [ - "chrono", -] - [[package]] name = "glob" version = "0.3.1" @@ -6282,18 +6223,6 @@ version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" -[[package]] -name = "types" -version = "0.1.0" -dependencies = [ - "clap", - "ethers", - "serde", - "strum", - "thiserror", - "zksync_basic_types", -] - [[package]] name = "uint" version = "0.9.5" @@ -7116,7 +7045,7 @@ dependencies = [ [[package]] name = "zkstack" -version = "0.1.0" +version = "0.1.2" dependencies = [ "anyhow", "chrono", @@ -7124,8 +7053,6 @@ dependencies = [ "clap-markdown", "clap_complete", "cliclack", - "common", - "config", "dirs", "ethers", "futures", @@ -7144,14 +7071,18 @@ dependencies = [ "thiserror", "tokio", "toml", - "types", "url", "xshell", + "zkstack_cli_common", + "zkstack_cli_config", + "zkstack_cli_types", "zksync_basic_types", "zksync_config", "zksync_consensus_crypto", "zksync_consensus_roles", "zksync_consensus_utils", + "zksync_contracts", + "zksync_eth_client", "zksync_protobuf", "zksync_protobuf_build", "zksync_protobuf_config", @@ -7160,9 +7091,81 @@ dependencies = [ "zksync_web3_decl", ] +[[package]] +name = "zkstack_cli_common" +version = "0.1.2" +dependencies = [ + "anyhow", + "async-trait", + "clap", + "cliclack", + "console", + "ethers", + "futures", + "once_cell", + "serde", + "serde_json", + "serde_yaml", + "sqlx", + "strum", + "thiserror", + "tokio", + "toml", + "url", + "xshell", + "zkstack_cli_git_version_macro", + "zkstack_cli_types", + "zksync_system_constants", + "zksync_types", + "zksync_web3_decl", +] + +[[package]] +name = "zkstack_cli_config" +version = "0.1.2" +dependencies = [ + "anyhow", + "clap", + "ethers", + "rand", + "serde", + "serde_json", + "serde_yaml", + "strum", + "thiserror", + "url", + "xshell", + "zkstack_cli_common", + "zkstack_cli_types", + "zksync_basic_types", + "zksync_config", + "zksync_protobuf", + "zksync_protobuf_config", + "zksync_system_constants", +] + +[[package]] +name = "zkstack_cli_git_version_macro" +version = "0.1.2" +dependencies = [ + "chrono", +] + +[[package]] +name = "zkstack_cli_types" +version = "0.1.2" +dependencies = [ + "clap", + "ethers", + "serde", + "strum", + "thiserror", + "zksync_basic_types", +] + [[package]] name = "zksync_basic_types" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "chrono", @@ -7183,9 +7186,9 @@ dependencies = [ [[package]] name = "zksync_concurrency" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8312ab73d3caa55775bd531795b507fa8f76bd9dabfaeb0954fe43e8fc1323b" +checksum = "cec98400a9e8ba02bfd029eacfe7d6fb7b85b8ef00de59d6bb119d29cc9f7442" dependencies = [ "anyhow", "once_cell", @@ -7202,7 +7205,7 @@ dependencies = [ [[package]] name = "zksync_config" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "rand", @@ -7216,9 +7219,9 @@ dependencies = [ [[package]] name = "zksync_consensus_crypto" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86b539960de98df3c3bd27d2d9b97de862027686bbb3bdfc5aaad5b74bb929a1" +checksum = "c04840825dfbe3b9f708d245c87618d5dcf28f29d7b58922971351068a0b8231" dependencies = [ "anyhow", "blst", @@ -7237,9 +7240,9 @@ dependencies = [ [[package]] name = "zksync_consensus_roles" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c49949546895a10431b9daec6ec4208ef0917ace006446d304b51f5b234ba462" +checksum = "05498eab1de26869028b5822cfa4490cac625508d427d59668dc73e8162de65f" dependencies = [ "anyhow", "bit-vec", @@ -7259,9 +7262,9 @@ dependencies = [ [[package]] name = "zksync_consensus_utils" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "723e2a4b056cc5af192a83163c89a6951ee75c098cc5c4a4cdc435f4232d88bd" +checksum = "f2f9fa69ef68e6a1955a1d7b33077103fb6d106b560fec0d599c6de268f5be03" dependencies = [ "anyhow", "rand", @@ -7271,7 +7274,7 @@ dependencies = [ [[package]] name = "zksync_contracts" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "envy", "hex", @@ -7284,7 +7287,7 @@ dependencies = [ [[package]] name = "zksync_crypto_primitives" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "blake2", @@ -7298,9 +7301,37 @@ dependencies = [ "zksync_basic_types", ] +[[package]] +name = "zksync_eth_client" +version = "26.1.0-non-semver-compat" +dependencies = [ + "async-trait", + "jsonrpsee", + "rlp", + "thiserror", + "tracing", + "vise", + "zksync_config", + "zksync_contracts", + "zksync_eth_signer", + "zksync_types", + "zksync_web3_decl", +] + +[[package]] +name = "zksync_eth_signer" +version = "26.1.0-non-semver-compat" +dependencies = [ + "async-trait", + "rlp", + "thiserror", + "zksync_basic_types", + "zksync_crypto_primitives", +] + [[package]] name = "zksync_mini_merkle_tree" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "once_cell", "zksync_basic_types", @@ -7309,9 +7340,9 @@ dependencies = [ [[package]] name = "zksync_protobuf" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8986ad796f8e00d8999fee72effba1a21bce40f5f877d681ac9cd89a94834d8" +checksum = "d9032e12528c2466293b206d6edb53b7e900e4a4cc4573e4d075ac2dc00e1b55" dependencies = [ "anyhow", "bit-vec", @@ -7330,9 +7361,9 @@ dependencies = [ [[package]] name = "zksync_protobuf_build" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d870b31995e3acb8e47afeb68ebeeffcf6121e70020e65b3d5d31692115d236" +checksum = "7c644fc8ef3c4d343ea42cebd5551e3562933f15dd9b0e68a52c2657603eb0f5" dependencies = [ "anyhow", "heck", @@ -7347,7 +7378,7 @@ dependencies = [ [[package]] name = "zksync_protobuf_config" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "hex", @@ -7367,7 +7398,7 @@ dependencies = [ [[package]] name = "zksync_system_constants" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "once_cell", "zksync_basic_types", @@ -7375,9 +7406,10 @@ dependencies = [ [[package]] name = "zksync_types" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", + "async-trait", "bigdecimal", "blake2", "chrono", @@ -7406,7 +7438,7 @@ dependencies = [ [[package]] name = "zksync_utils" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "futures", @@ -7421,7 +7453,7 @@ dependencies = [ [[package]] name = "zksync_vlog" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "chrono", @@ -7446,7 +7478,7 @@ dependencies = [ [[package]] name = "zksync_web3_decl" -version = "0.1.0" +version = "26.1.0-non-semver-compat" dependencies = [ "anyhow", "async-trait", diff --git a/zkstack_cli/Cargo.toml b/zkstack_cli/Cargo.toml index c382e2059007..0d726ccc3600 100644 --- a/zkstack_cli/Cargo.toml +++ b/zkstack_cli/Cargo.toml @@ -9,7 +9,7 @@ members = [ resolver = "2" [workspace.package] -version = "0.1.0" +version = "0.1.2" # x-release-please-version edition = "2021" homepage = "https://zksync.io/" license = "MIT OR Apache-2.0" @@ -21,24 +21,25 @@ keywords = ["zk", "cryptography", "blockchain", "ZKStack", "ZKsync"] [workspace.dependencies] # Local dependencies -common = { path = "crates/common" } -config = { path = "crates/config" } -types = { path = "crates/types" } -git_version_macro = { path = "crates/git_version_macro" } +zkstack_cli_common = { version = "0.1.2", path = "crates/common" } +zkstack_cli_config = { version = "0.1.2", path = "crates/config" } +zkstack_cli_types = { version = "0.1.2", path = "crates/types" } +zkstack_cli_git_version_macro = { version = "0.1.2", path = "crates/git_version_macro" } # ZkSync deps -zksync_config = { path = "../core/lib/config" } -zksync_protobuf_config = { path = "../core/lib/protobuf_config" } -zksync_basic_types = { path = "../core/lib/basic_types" } -zksync_system_constants = { path = "../core/lib/constants" } -zksync_types = { path = "../core/lib/types" } -zksync_web3_decl = { path = "../core/lib/web3_decl" } -zksync_eth_client = { path = "../core/lib/eth_client" } -zksync_consensus_roles = "=0.7.0" -zksync_consensus_crypto = "=0.7.0" -zksync_consensus_utils = "=0.7.0" -zksync_protobuf = "=0.7.0" -zksync_protobuf_build = "=0.7.0" +zksync_config = { version = "=26.1.0-non-semver-compat", path = "../core/lib/config" } +zksync_protobuf_config = { version = "=26.1.0-non-semver-compat", path = "../core/lib/protobuf_config" } +zksync_basic_types = { version = "=26.1.0-non-semver-compat", path = "../core/lib/basic_types" } +zksync_system_constants = { version = "=26.1.0-non-semver-compat", path = "../core/lib/constants" } +zksync_types = { version = "=26.1.0-non-semver-compat", path = "../core/lib/types" } +zksync_web3_decl = { version = "=26.1.0-non-semver-compat", path = "../core/lib/web3_decl" } +zksync_eth_client = { version = "=26.1.0-non-semver-compat", path = "../core/lib/eth_client" } +zksync_contracts = { version = "=26.1.0-non-semver-compat", path = "../core/lib/contracts" } +zksync_consensus_roles = "=0.8.0" +zksync_consensus_crypto = "=0.8.0" +zksync_consensus_utils = "=0.8.0" +zksync_protobuf = "=0.8.0" +zksync_protobuf_build = "=0.8.0" # External dependencies anyhow = "1.0.82" diff --git a/zkstack_cli/crates/common/Cargo.toml b/zkstack_cli/crates/common/Cargo.toml index c906c3d28d04..7af8fab92bb8 100644 --- a/zkstack_cli/crates/common/Cargo.toml +++ b/zkstack_cli/crates/common/Cargo.toml @@ -1,6 +1,6 @@ [package] -name = "common" -version = "0.1.0" +name = "zkstack_cli_common" +version.workspace = true edition.workspace = true homepage.workspace = true license.workspace = true @@ -24,12 +24,12 @@ serde_yaml.workspace = true sqlx.workspace = true tokio.workspace = true toml.workspace = true -types.workspace = true +zkstack_cli_types.workspace = true url.workspace = true xshell.workspace = true thiserror.workspace = true strum.workspace = true -git_version_macro.workspace = true +zkstack_cli_git_version_macro.workspace = true async-trait.workspace = true zksync_system_constants.workspace = true zksync_types.workspace = true diff --git a/zkstack_cli/crates/common/src/contracts.rs b/zkstack_cli/crates/common/src/contracts.rs index 4cef4467f382..268c1a7ae521 100644 --- a/zkstack_cli/crates/common/src/contracts.rs +++ b/zkstack_cli/crates/common/src/contracts.rs @@ -6,32 +6,23 @@ use crate::cmd::Cmd; pub fn build_l1_contracts(shell: Shell, link_to_code: PathBuf) -> anyhow::Result<()> { let _dir_guard = shell.push_dir(link_to_code.join("contracts/l1-contracts")); + Ok(Cmd::new(cmd!(shell, "yarn build:foundry")).run()?) +} + +pub fn build_l1_da_contracts(shell: Shell, link_to_code: PathBuf) -> anyhow::Result<()> { + let _dir_guard = shell.push_dir(link_to_code.join("contracts/da-contracts")); Ok(Cmd::new(cmd!(shell, "forge build")).run()?) } pub fn build_l2_contracts(shell: Shell, link_to_code: PathBuf) -> anyhow::Result<()> { let _dir_guard = shell.push_dir(link_to_code.join("contracts/l2-contracts")); - Ok(Cmd::new(cmd!( - shell, - "forge build --zksync --zk-enable-eravm-extensions" - )) - .run()?) + Cmd::new(cmd!(shell, "yarn build:foundry")).run()?; + Ok(()) } pub fn build_system_contracts(shell: Shell, link_to_code: PathBuf) -> anyhow::Result<()> { let _dir_guard = shell.push_dir(link_to_code.join("contracts/system-contracts")); // Do not update era-contract's lockfile to avoid dirty submodule Cmd::new(cmd!(shell, "yarn install --frozen-lockfile")).run()?; - Cmd::new(cmd!(shell, "yarn preprocess:system-contracts")).run()?; - Cmd::new(cmd!( - shell, - "forge build --zksync --zk-enable-eravm-extensions" - )) - .run()?; - Cmd::new(cmd!(shell, "yarn preprocess:bootloader")).run()?; - Ok(Cmd::new(cmd!( - shell, - "forge build --zksync --zk-enable-eravm-extensions" - )) - .run()?) + Ok(Cmd::new(cmd!(shell, "yarn build:foundry")).run()?) } diff --git a/zkstack_cli/crates/common/src/ethereum.rs b/zkstack_cli/crates/common/src/ethereum.rs index 2100746fecff..96ec8b4f3597 100644 --- a/zkstack_cli/crates/common/src/ethereum.rs +++ b/zkstack_cli/crates/common/src/ethereum.rs @@ -8,7 +8,7 @@ use ethers::{ providers::Middleware, types::{Address, TransactionRequest}, }; -use types::TokenInfo; +use zkstack_cli_types::TokenInfo; use crate::{logger, wallets::Wallet}; diff --git a/zkstack_cli/crates/common/src/external_node.rs b/zkstack_cli/crates/common/src/external_node.rs index 8a5cbc3cd14c..7f9031bae4ff 100644 --- a/zkstack_cli/crates/common/src/external_node.rs +++ b/zkstack_cli/crates/common/src/external_node.rs @@ -17,7 +17,7 @@ pub fn run( let cmd = Cmd::new( cmd!( shell, - "cargo run --release --bin zksync_external_node -- + "cargo run --manifest-path ./core/Cargo.toml --release --bin zksync_external_node -- --config-path {config_path} --secrets-path {secrets_path} --external-node-config-path {en_config_path} diff --git a/zkstack_cli/crates/common/src/forge.rs b/zkstack_cli/crates/common/src/forge.rs index a7cf08a50bc0..1bf0570873d4 100644 --- a/zkstack_cli/crates/common/src/forge.rs +++ b/zkstack_cli/crates/common/src/forge.rs @@ -69,6 +69,17 @@ impl ForgeScript { return Ok(res?); } } + + // TODO: This line is very helpful for debugging purposes, + // maybe it makes sense to make it conditionally displayed. + let command = format!( + "forge script {} --legacy {}", + script_path.to_str().unwrap(), + args_no_resume.join(" ") + ); + + println!("Command: {}", command); + let mut cmd = Cmd::new(cmd!( shell, "forge script {script_path} --legacy {args_no_resume...}" @@ -291,6 +302,8 @@ pub struct ForgeScriptArgs { pub verifier_api_key: Option, #[clap(long)] pub resume: bool, + #[clap(long)] + pub zksync: bool, /// List of additional arguments that can be passed through the CLI. /// /// e.g.: `zkstack init -a --private-key=` @@ -304,6 +317,9 @@ impl ForgeScriptArgs { pub fn build(&mut self) -> Vec { self.add_verify_args(); self.cleanup_contract_args(); + if self.zksync { + self.add_arg(ForgeScriptArg::Zksync); + } self.args .iter() .map(|arg| arg.to_string()) @@ -399,6 +415,10 @@ impl ForgeScriptArgs { .iter() .any(|arg| WALLET_ARGS.contains(&arg.as_ref())) } + + pub fn with_zksync(&mut self) { + self.zksync = true; + } } #[derive(Debug, Clone, ValueEnum, Display, Serialize, Deserialize, Default)] diff --git a/zkstack_cli/crates/common/src/server.rs b/zkstack_cli/crates/common/src/server.rs index 7f8c2a90e589..0dd93bcd3324 100644 --- a/zkstack_cli/crates/common/src/server.rs +++ b/zkstack_cli/crates/common/src/server.rs @@ -70,7 +70,7 @@ impl Server { let mut cmd = Cmd::new( cmd!( shell, - "cargo run --release --bin zksync_server {uring...} -- + "cargo run --manifest-path ./core/Cargo.toml --release --bin zksync_server {uring...} -- --genesis-path {genesis_path} --wallets-path {wallets_path} --config-path {general_path} @@ -96,7 +96,7 @@ impl Server { /// Builds the server. pub fn build(&self, shell: &Shell) -> anyhow::Result<()> { - let _dir_guard = shell.push_dir(&self.code_path); + let _dir_guard = shell.push_dir(self.code_path.join("core")); Cmd::new(cmd!(shell, "cargo build --release --bin zksync_server")).run()?; Ok(()) } diff --git a/zkstack_cli/crates/common/src/version.rs b/zkstack_cli/crates/common/src/version.rs index 43be7a07b7ee..f21018a0d7dd 100644 --- a/zkstack_cli/crates/common/src/version.rs +++ b/zkstack_cli/crates/common/src/version.rs @@ -1,7 +1,7 @@ -const GIT_VERSION: &str = git_version_macro::build_git_revision!(); -const GIT_BRANCH: &str = git_version_macro::build_git_branch!(); -const GIT_SUBMODULES: &[(&str, &str)] = git_version_macro::build_git_submodules!(); -const BUILD_TIMESTAMP: &str = git_version_macro::build_timestamp!(); +const GIT_VERSION: &str = zkstack_cli_git_version_macro::build_git_revision!(); +const GIT_BRANCH: &str = zkstack_cli_git_version_macro::build_git_branch!(); +const GIT_SUBMODULES: &[(&str, &str)] = zkstack_cli_git_version_macro::build_git_submodules!(); +const BUILD_TIMESTAMP: &str = zkstack_cli_git_version_macro::build_timestamp!(); /// Returns a multi-line version message that includes: /// - provided crate version diff --git a/zkstack_cli/crates/common/src/wallets.rs b/zkstack_cli/crates/common/src/wallets.rs index 43a9864474cc..ba1f6d239724 100644 --- a/zkstack_cli/crates/common/src/wallets.rs +++ b/zkstack_cli/crates/common/src/wallets.rs @@ -4,7 +4,7 @@ use ethers::{ types::{Address, H256}, }; use serde::{Deserialize, Serialize}; -use types::parse_h256; +use zkstack_cli_types::parse_h256; #[derive(Serialize, Deserialize)] struct WalletSerde { diff --git a/zkstack_cli/crates/config/Cargo.toml b/zkstack_cli/crates/config/Cargo.toml index 9320beffef22..0926f2522cb2 100644 --- a/zkstack_cli/crates/config/Cargo.toml +++ b/zkstack_cli/crates/config/Cargo.toml @@ -1,6 +1,6 @@ [package] -name = "config" -version = "0.1.0" +name = "zkstack_cli_config" +version.workspace = true edition.workspace = true homepage.workspace = true license.workspace = true @@ -13,7 +13,7 @@ keywords.workspace = true [dependencies] anyhow.workspace = true clap.workspace = true -common.workspace = true +zkstack_cli_common.workspace = true ethers.workspace = true rand.workspace = true serde.workspace = true @@ -21,7 +21,7 @@ serde_json.workspace = true serde_yaml.workspace = true strum.workspace = true thiserror.workspace = true -types.workspace = true +zkstack_cli_types.workspace = true url.workspace = true xshell.workspace = true @@ -29,3 +29,4 @@ zksync_protobuf_config.workspace = true zksync_protobuf.workspace = true zksync_config.workspace = true zksync_basic_types.workspace = true +zksync_system_constants.workspace = true diff --git a/zkstack_cli/crates/config/src/chain.rs b/zkstack_cli/crates/config/src/chain.rs index cd93299cfc41..ec46fd3f5c30 100644 --- a/zkstack_cli/crates/config/src/chain.rs +++ b/zkstack_cli/crates/config/src/chain.rs @@ -4,10 +4,13 @@ use std::{ }; use serde::{Deserialize, Serialize, Serializer}; -use types::{BaseToken, L1BatchCommitmentMode, L1Network, ProverMode, WalletCreation}; use xshell::Shell; +use zkstack_cli_types::{BaseToken, L1BatchCommitmentMode, L1Network, ProverMode, WalletCreation}; use zksync_basic_types::L2ChainId; -use zksync_config::configs::{GatewayChainConfig, GatewayConfig}; +use zksync_config::{ + configs::{gateway::GatewayChainConfig, GatewayConfig}, + DAClientConfig::{Avail, Eigen, NoDA}, +}; use crate::{ consts::{ @@ -19,7 +22,7 @@ use crate::{ FileConfigWithDefaultName, ReadConfig, ReadConfigWithBasePath, SaveConfig, SaveConfigWithBasePath, ZkStackConfig, }, - ContractsConfig, GeneralConfig, GenesisConfig, SecretsConfig, WalletsConfig, + ContractsConfig, GeneralConfig, GenesisConfig, SecretsConfig, WalletsConfig, GATEWAY_FILE, }; /// Chain configuration file. This file is created in the chain @@ -67,6 +70,14 @@ pub struct ChainConfig { pub evm_emulator: bool, } +#[derive(Debug, Clone)] +pub enum DAValidatorType { + Rollup = 0, + NoDA = 1, + Avail = 2, + EigenDA = 3, +} + impl Serialize for ChainConfig { fn serialize(&self, serializer: S) -> Result where @@ -101,6 +112,21 @@ impl ChainConfig { } anyhow::bail!("Wallets configs has not been found"); } + + pub fn get_da_validator_type(&self) -> anyhow::Result { + let general = self.get_general_config().expect("General config not found"); + match ( + self.l1_batch_commit_data_generator_mode, + general.da_client_config, + ) { + (L1BatchCommitmentMode::Rollup, _) => Ok(DAValidatorType::Rollup), + (L1BatchCommitmentMode::Validium, None | Some(NoDA)) => Ok(DAValidatorType::NoDA), + (L1BatchCommitmentMode::Validium, Some(Avail(_))) => Ok(DAValidatorType::Avail), + (L1BatchCommitmentMode::Validium, Some(Eigen(_))) => Ok(DAValidatorType::EigenDA), + _ => anyhow::bail!("DAValidatorType is not supported"), + } + } + pub fn get_contracts_config(&self) -> anyhow::Result { ContractsConfig::read_with_base_path(self.get_shell(), &self.configs) } @@ -137,6 +163,10 @@ impl ChainConfig { self.configs.join(SECRETS_FILE) } + pub fn path_to_gateway_config(&self) -> PathBuf { + self.configs.join(GATEWAY_FILE) + } + pub fn save_general_config(&self, general_config: &GeneralConfig) -> anyhow::Result<()> { general_config.save_with_base_path(self.get_shell(), &self.configs) } diff --git a/zkstack_cli/crates/config/src/consts.rs b/zkstack_cli/crates/config/src/consts.rs index 6c5dfc8165ce..95c097bf4247 100644 --- a/zkstack_cli/crates/config/src/consts.rs +++ b/zkstack_cli/crates/config/src/consts.rs @@ -29,7 +29,7 @@ pub const ZKSYNC_ERA_GIT_REPO: &str = "https://github.com/matter-labs/zksync-era /// Name of the docker-compose file inside zksync repository pub const DOCKER_COMPOSE_FILE: &str = "docker-compose.yml"; /// Path to the config file with mnemonic for localhost wallets -pub(crate) const CONFIGS_PATH: &str = "etc/env/file_based"; +pub const CONFIGS_PATH: &str = "etc/env/file_based"; /// Path to the docker-compose file for grafana pub const ERA_OBSERVABILITY_COMPOSE_FILE: &str = "era-observability/docker-compose.yml"; /// Path to era observability repository diff --git a/zkstack_cli/crates/config/src/contracts.rs b/zkstack_cli/crates/config/src/contracts.rs index bac17958e3b7..f8b7b212ea4a 100644 --- a/zkstack_cli/crates/config/src/contracts.rs +++ b/zkstack_cli/crates/config/src/contracts.rs @@ -1,5 +1,6 @@ use ethers::types::{Address, H256}; use serde::{Deserialize, Serialize}; +use zksync_system_constants::{L2_ASSET_ROUTER_ADDRESS, L2_NATIVE_TOKEN_VAULT_ADDRESS}; use crate::{ consts::CONTRACTS_FILE, @@ -38,6 +39,12 @@ impl ContractsConfig { .deployed_addresses .bridges .shared_bridge_proxy_addr; + self.bridges.l1_nullifier_addr = Some( + deploy_l1_output + .deployed_addresses + .bridges + .l1_nullifier_proxy_addr, + ); self.ecosystem_contracts.bridgehub_proxy_addr = deploy_l1_output .deployed_addresses .bridgehub @@ -49,6 +56,26 @@ impl ContractsConfig { self.ecosystem_contracts.transparent_proxy_admin_addr = deploy_l1_output .deployed_addresses .transparent_proxy_admin_addr; + self.ecosystem_contracts.l1_bytecodes_supplier_addr = Some( + deploy_l1_output + .deployed_addresses + .state_transition + .bytecodes_supplier_addr, + ); + self.ecosystem_contracts.stm_deployment_tracker_proxy_addr = Some( + deploy_l1_output + .deployed_addresses + .bridgehub + .ctm_deployment_tracker_proxy_addr, + ); + self.ecosystem_contracts.force_deployments_data = Some( + deploy_l1_output + .contracts_config + .force_deployments_data + .clone(), + ); + self.ecosystem_contracts.expected_rollup_l2_da_validator = + Some(deploy_l1_output.expected_rollup_l2_da_validator_addr); self.l1.default_upgrade_addr = deploy_l1_output .deployed_addresses .state_transition @@ -61,6 +88,8 @@ impl ContractsConfig { self.l1.multicall3_addr = deploy_l1_output.multicall3_addr; self.ecosystem_contracts.validator_timelock_addr = deploy_l1_output.deployed_addresses.validator_timelock_addr; + self.ecosystem_contracts.native_token_vault_addr = + Some(deploy_l1_output.deployed_addresses.native_token_vault_addr); self.l1.verifier_addr = deploy_l1_output .deployed_addresses .state_transition @@ -70,6 +99,26 @@ impl ContractsConfig { self.ecosystem_contracts .diamond_cut_data .clone_from(&deploy_l1_output.contracts_config.diamond_cut_data); + self.l1.rollup_l1_da_validator_addr = Some( + deploy_l1_output + .deployed_addresses + .rollup_l1_da_validator_addr, + ); + self.l1.no_da_validium_l1_validator_addr = Some( + deploy_l1_output + .deployed_addresses + .no_da_validium_l1_validator_addr, + ); + self.l1.avail_l1_da_validator_addr = Some( + deploy_l1_output + .deployed_addresses + .avail_l1_da_validator_addr, + ); + self.l1.eigenda_l1_validator_addr = Some( + deploy_l1_output + .deployed_addresses + .eigenda_l1_validator_addr, + ); self.l1.chain_admin_addr = deploy_l1_output.deployed_addresses.chain_admin; } @@ -77,15 +126,20 @@ impl ContractsConfig { self.l1.diamond_proxy_addr = register_chain_output.diamond_proxy_addr; self.l1.governance_addr = register_chain_output.governance_addr; self.l1.chain_admin_addr = register_chain_output.chain_admin_addr; + self.l1.access_control_restriction_addr = + Some(register_chain_output.access_control_restriction_addr); + self.l1.chain_proxy_admin_addr = Some(register_chain_output.chain_proxy_admin_addr); + self.l2.legacy_shared_bridge_addr = register_chain_output.l2_legacy_shared_bridge_addr; } pub fn set_l2_shared_bridge( &mut self, initialize_bridges_output: &InitializeBridgeOutput, ) -> anyhow::Result<()> { - self.bridges.shared.l2_address = Some(initialize_bridges_output.l2_shared_bridge_proxy); - self.bridges.erc20.l2_address = Some(initialize_bridges_output.l2_shared_bridge_proxy); - self.l2.legacy_shared_bridge_addr = Some(initialize_bridges_output.l2_shared_bridge_proxy); + self.bridges.shared.l2_address = Some(L2_ASSET_ROUTER_ADDRESS); + self.bridges.erc20.l2_address = Some(L2_ASSET_ROUTER_ADDRESS); + self.l2.l2_native_token_vault_proxy_addr = Some(L2_NATIVE_TOKEN_VAULT_ADDRESS); + self.l2.da_validator_addr = Some(initialize_bridges_output.l2_da_validator_address); Ok(()) } @@ -135,27 +189,25 @@ pub struct EcosystemContracts { pub state_transition_proxy_addr: Address, pub transparent_proxy_admin_addr: Address, // `Option` to be able to parse configs from pre-gateway protocol version. - // TODO(EVM-927): not used without gateway version. #[serde(skip_serializing_if = "Option::is_none")] pub stm_deployment_tracker_proxy_addr: Option
, pub validator_timelock_addr: Address, pub diamond_cut_data: String, // `Option` to be able to parse configs from pre-gateway protocol version. - // TODO(EVM-927): not used without gateway version. #[serde(skip_serializing_if = "Option::is_none")] pub force_deployments_data: Option, // `Option` to be able to parse configs from pre-gateway protocol version. - // TODO(EVM-927): not used without gateway version. #[serde(skip_serializing_if = "Option::is_none")] pub native_token_vault_addr: Option
, // `Option` to be able to parse configs from pre-gateway protocol version. - // TODO(EVM-927): not used without gateway version. #[serde(skip_serializing_if = "Option::is_none")] pub l1_bytecodes_supplier_addr: Option
, // `Option` to be able to parse configs from pre-gateway protocol version. - // TODO(EVM-927): not used without gateway version. #[serde(skip_serializing_if = "Option::is_none")] pub expected_rollup_l2_da_validator: Option
, + // `Option` to be able to parse configs from pre-gateway protocol version. + #[serde(skip_serializing_if = "Option::is_none")] + pub l1_wrapped_base_token_store: Option
, } impl ZkStackConfig for EcosystemContracts {} @@ -165,7 +217,6 @@ pub struct BridgesContracts { pub erc20: BridgeContractsDefinition, pub shared: BridgeContractsDefinition, // `Option` to be able to parse configs from pre-gateway protocol version. - // TODO(EVM-927): not used without gateway version. #[serde(skip_serializing_if = "Option::is_none")] pub l1_nullifier_addr: Option
, } @@ -185,11 +236,9 @@ pub struct L1Contracts { #[serde(default)] pub chain_admin_addr: Address, // `Option` to be able to parse configs from pre-gateway protocol version. - // TODO(EVM-927): not used without gateway version. #[serde(skip_serializing_if = "Option::is_none")] pub access_control_restriction_addr: Option
, // `Option` to be able to parse configs from pre-gateway protocol version. - // TODO(EVM-927): not used without gateway version. #[serde(skip_serializing_if = "Option::is_none")] pub chain_proxy_admin_addr: Option
, pub multicall3_addr: Address, @@ -197,23 +246,21 @@ pub struct L1Contracts { pub validator_timelock_addr: Address, pub base_token_addr: Address, // `Option` to be able to parse configs from pre-gateway protocol version. - // TODO(EVM-927): not used without gateway version. #[serde(skip_serializing_if = "Option::is_none")] pub base_token_asset_id: Option, // `Option` to be able to parse configs from pre-gateway protocol version. - // TODO(EVM-927): not used without gateway version. #[serde(skip_serializing_if = "Option::is_none")] pub rollup_l1_da_validator_addr: Option
, // `Option` to be able to parse configs from pre-gateway protocol version. - // TODO(EVM-927): not used without gateway version. #[serde(skip_serializing_if = "Option::is_none")] pub avail_l1_da_validator_addr: Option
, // `Option` to be able to parse configs from pre-gateway protocol version. - // TODO(EVM-927): not used without gateway version. #[serde(skip_serializing_if = "Option::is_none")] pub no_da_validium_l1_validator_addr: Option
, + // `Option` to be able to parse configs from previous protocol version + #[serde(skip_serializing_if = "Option::is_none")] + pub eigenda_l1_validator_addr: Option
, // `Option` to be able to parse configs from pre-gateway protocol version. - // TODO(EVM-927): not used without gateway version. #[serde(skip_serializing_if = "Option::is_none")] pub transaction_filterer_addr: Option
, } @@ -223,19 +270,15 @@ pub struct L2Contracts { pub testnet_paymaster_addr: Address, pub default_l2_upgrader: Address, // `Option` to be able to parse configs from pre-gateway protocol version. - // TODO(EVM-927): not used without gateway version. #[serde(skip_serializing_if = "Option::is_none")] pub da_validator_addr: Option
, // `Option` to be able to parse configs from pre-gateway protocol version. - // TODO(EVM-927): not used without gateway version. #[serde(skip_serializing_if = "Option::is_none")] pub l2_native_token_vault_proxy_addr: Option
, + // `Option` to be able to parse configs from previous protocol version + #[serde(skip_serializing_if = "Option::is_none")] + pub legacy_shared_bridge_addr: Option
, pub consensus_registry: Option
, pub multicall3: Option
, - pub legacy_shared_bridge_addr: Option
, pub timestamp_asserter_addr: Option
, - // `Option` to be able to parse configs from pre-gateway protocol version. - // TODO(EVM-927): not used without gateway version. - #[serde(skip_serializing_if = "Option::is_none")] - pub predeployed_l2_wrapped_base_token_address: Option
, } diff --git a/zkstack_cli/crates/config/src/ecosystem.rs b/zkstack_cli/crates/config/src/ecosystem.rs index 5fe85b175de4..906cf8dd012b 100644 --- a/zkstack_cli/crates/config/src/ecosystem.rs +++ b/zkstack_cli/crates/config/src/ecosystem.rs @@ -3,11 +3,11 @@ use std::{ path::{Path, PathBuf}, }; -use common::{config::global_config, logger}; use serde::{Deserialize, Serialize, Serializer}; use thiserror::Error; -use types::{L1Network, ProverMode, WalletCreation}; use xshell::Shell; +use zkstack_cli_common::{config::global_config, logger}; +use zkstack_cli_types::{L1Network, ProverMode, WalletCreation}; use zksync_basic_types::L2ChainId; use crate::{ diff --git a/zkstack_cli/crates/config/src/explorer_compose.rs b/zkstack_cli/crates/config/src/explorer_compose.rs index 13dd665d2e3d..5c6593cd7a46 100644 --- a/zkstack_cli/crates/config/src/explorer_compose.rs +++ b/zkstack_cli/crates/config/src/explorer_compose.rs @@ -4,9 +4,9 @@ use std::{ }; use anyhow::Context; -use common::{db, docker::adjust_localhost_for_docker}; use serde::{Deserialize, Serialize}; use url::Url; +use zkstack_cli_common::{db, docker::adjust_localhost_for_docker}; use crate::{ consts::{ diff --git a/zkstack_cli/crates/config/src/forge_interface/deploy_ecosystem/input.rs b/zkstack_cli/crates/config/src/forge_interface/deploy_ecosystem/input.rs index 17b2bac38a3f..5cd67198aa70 100644 --- a/zkstack_cli/crates/config/src/forge_interface/deploy_ecosystem/input.rs +++ b/zkstack_cli/crates/config/src/forge_interface/deploy_ecosystem/input.rs @@ -6,6 +6,7 @@ use ethers::{ }; use rand::Rng; use serde::{Deserialize, Serialize}; +use zkstack_cli_types::L1Network; use zksync_basic_types::L2ChainId; use crate::{ @@ -111,6 +112,7 @@ pub struct DeployL1Config { pub era_chain_id: L2ChainId, pub owner_address: Address, pub testnet_verifier: bool, + pub support_l2_legacy_shared_bridge_test: bool, pub contracts: ContractsDeployL1Config, pub tokens: TokensDeployL1Config, } @@ -124,11 +126,14 @@ impl DeployL1Config { initial_deployment_config: &InitialDeploymentConfig, era_chain_id: L2ChainId, testnet_verifier: bool, + l1_network: L1Network, + support_l2_legacy_shared_bridge_test: bool, ) -> Self { Self { era_chain_id, testnet_verifier, owner_address: wallets_config.governor.address, + support_l2_legacy_shared_bridge_test, contracts: ContractsDeployL1Config { create2_factory_addr: initial_deployment_config.create2_factory_addr, create2_factory_salt: initial_deployment_config.create2_factory_salt, @@ -162,6 +167,8 @@ impl DeployL1Config { priority_tx_max_gas_limit: initial_deployment_config.priority_tx_max_gas_limit, validator_timelock_execution_delay: initial_deployment_config .validator_timelock_execution_delay, + avail_l1_da_validator_addr: l1_network.avail_l1_da_validator_addr(), + eigenda_l1_validator_addr: l1_network.eigenda_l1_validator_addr(), }, tokens: TokensDeployL1Config { token_weth_address: initial_deployment_config.token_weth_address, @@ -196,6 +203,10 @@ pub struct ContractsDeployL1Config { pub bootloader_hash: H256, pub default_aa_hash: H256, pub evm_emulator_hash: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub avail_l1_da_validator_addr: Option
, + #[serde(skip_serializing_if = "Option::is_none")] + pub eigenda_l1_validator_addr: Option
, } #[derive(Debug, Deserialize, Serialize, Clone)] diff --git a/zkstack_cli/crates/config/src/forge_interface/deploy_ecosystem/output.rs b/zkstack_cli/crates/config/src/forge_interface/deploy_ecosystem/output.rs index 7a922cbdf3c0..9294cba6ef8c 100644 --- a/zkstack_cli/crates/config/src/forge_interface/deploy_ecosystem/output.rs +++ b/zkstack_cli/crates/config/src/forge_interface/deploy_ecosystem/output.rs @@ -16,26 +16,10 @@ pub struct DeployL1Output { pub era_chain_id: u32, pub l1_chain_id: u32, pub multicall3_addr: Address, - pub owner_addr: Address, + pub owner_address: Address, pub contracts_config: DeployL1ContractsConfigOutput, pub deployed_addresses: DeployL1DeployedAddressesOutput, -} - -impl ZkStackConfig for DeployL1Output {} - -#[derive(Debug, Deserialize, Serialize, Clone)] -pub struct DeployL1ContractsConfigOutput { - pub diamond_init_max_l2_gas_per_batch: u64, - pub diamond_init_batch_overhead_l1_gas: u64, - pub diamond_init_max_pubdata_per_batch: u64, - pub diamond_init_minimal_l2_gas_price: u64, - pub diamond_init_priority_tx_max_pubdata: u64, - pub diamond_init_pubdata_pricing_mode: u64, - pub priority_tx_max_gas_limit: u64, - pub recursion_circuits_set_vks_hash: H256, - pub recursion_leaf_level_vk_hash: H256, - pub recursion_node_level_vk_hash: H256, - pub diamond_cut_data: String, + pub expected_rollup_l2_da_validator_addr: Address, } #[derive(Debug, Deserialize, Serialize, Clone)] @@ -45,15 +29,34 @@ pub struct DeployL1DeployedAddressesOutput { pub transparent_proxy_admin_addr: Address, pub validator_timelock_addr: Address, pub chain_admin: Address, + pub access_control_restriction_addr: Address, pub bridgehub: L1BridgehubOutput, pub bridges: L1BridgesOutput, pub state_transition: L1StateTransitionOutput, + pub rollup_l1_da_validator_addr: Address, + pub no_da_validium_l1_validator_addr: Address, + pub avail_l1_da_validator_addr: Address, + pub eigenda_l1_validator_addr: Address, + pub l1_rollup_da_manager: Address, + pub native_token_vault_addr: Address, +} + +impl ZkStackConfig for DeployL1Output {} + +#[derive(Debug, Deserialize, Serialize, Clone)] +pub struct DeployL1ContractsConfigOutput { + pub diamond_cut_data: String, + pub force_deployments_data: String, } #[derive(Debug, Deserialize, Serialize, Clone)] pub struct L1BridgehubOutput { pub bridgehub_implementation_addr: Address, pub bridgehub_proxy_addr: Address, + pub ctm_deployment_tracker_proxy_addr: Address, + pub ctm_deployment_tracker_implementation_addr: Address, + pub message_root_proxy_addr: Address, + pub message_root_implementation_addr: Address, } #[derive(Debug, Deserialize, Serialize, Clone)] @@ -62,21 +65,24 @@ pub struct L1BridgesOutput { pub erc20_bridge_proxy_addr: Address, pub shared_bridge_implementation_addr: Address, pub shared_bridge_proxy_addr: Address, + pub l1_nullifier_implementation_addr: Address, + pub l1_nullifier_proxy_addr: Address, } #[derive(Debug, Deserialize, Serialize, Clone)] pub struct L1StateTransitionOutput { + pub state_transition_proxy_addr: Address, + pub state_transition_implementation_addr: Address, + pub verifier_addr: Address, pub admin_facet_addr: Address, - pub default_upgrade_addr: Address, - pub diamond_init_addr: Address, - pub diamond_proxy_addr: Address, + pub mailbox_facet_addr: Address, pub executor_facet_addr: Address, - pub genesis_upgrade_addr: Address, pub getters_facet_addr: Address, - pub mailbox_facet_addr: Address, - pub state_transition_implementation_addr: Address, - pub state_transition_proxy_addr: Address, - pub verifier_addr: Address, + pub diamond_init_addr: Address, + pub genesis_upgrade_addr: Address, + pub default_upgrade_addr: Address, + pub diamond_proxy_addr: Address, + pub bytecodes_supplier_addr: Address, } #[derive(Debug, Deserialize, Serialize, Clone)] diff --git a/zkstack_cli/crates/config/src/forge_interface/deploy_gateway_ctm/input.rs b/zkstack_cli/crates/config/src/forge_interface/deploy_gateway_ctm/input.rs index 8f35180077a2..afd71cd97757 100644 --- a/zkstack_cli/crates/config/src/forge_interface/deploy_gateway_ctm/input.rs +++ b/zkstack_cli/crates/config/src/forge_interface/deploy_gateway_ctm/input.rs @@ -1,7 +1,6 @@ -/// TODO(EVM-927): Note that the contents of this file are not useable without Gateway contracts. use ethers::abi::Address; use serde::{Deserialize, Serialize}; -use types::ProverMode; +use zkstack_cli_types::ProverMode; use zksync_basic_types::{H256, U256}; use zksync_config::GenesisConfig; @@ -91,7 +90,6 @@ impl DeployGatewayCTMInput { l1_chain_id: U256::from(ecosystem_config.l1_network.chain_id()), testnet_verifier: ecosystem_config.prover_version == ProverMode::NoProofs, - recursion_node_level_vk_hash: H256::zero(), recursion_leaf_level_vk_hash: H256::zero(), recursion_circuits_set_vks_hash: H256::zero(), diff --git a/zkstack_cli/crates/config/src/forge_interface/deploy_gateway_ctm/mod.rs b/zkstack_cli/crates/config/src/forge_interface/deploy_gateway_ctm/mod.rs index cb22b3529e85..7d1a54844d0c 100644 --- a/zkstack_cli/crates/config/src/forge_interface/deploy_gateway_ctm/mod.rs +++ b/zkstack_cli/crates/config/src/forge_interface/deploy_gateway_ctm/mod.rs @@ -1,3 +1,2 @@ -/// TODO(EVM-927): Note that the contents of this file are not useable without Gateway contracts. pub mod input; pub mod output; diff --git a/zkstack_cli/crates/config/src/forge_interface/deploy_gateway_ctm/output.rs b/zkstack_cli/crates/config/src/forge_interface/deploy_gateway_ctm/output.rs index ee85d11a5eb6..33661fb6ebef 100644 --- a/zkstack_cli/crates/config/src/forge_interface/deploy_gateway_ctm/output.rs +++ b/zkstack_cli/crates/config/src/forge_interface/deploy_gateway_ctm/output.rs @@ -1,4 +1,3 @@ -/// TODO(EVM-927): Note that the contents of this file are not useable without Gateway contracts. use ethers::abi::Address; use serde::{Deserialize, Serialize}; diff --git a/zkstack_cli/crates/config/src/forge_interface/deploy_l2_contracts/input.rs b/zkstack_cli/crates/config/src/forge_interface/deploy_l2_contracts/input.rs index 3836dca9d24c..78ffcd16eaa8 100644 --- a/zkstack_cli/crates/config/src/forge_interface/deploy_l2_contracts/input.rs +++ b/zkstack_cli/crates/config/src/forge_interface/deploy_l2_contracts/input.rs @@ -1,8 +1,8 @@ use ethers::types::Address; use serde::{Deserialize, Serialize}; -use zksync_basic_types::L2ChainId; +use zksync_basic_types::{L2ChainId, U256}; -use crate::{traits::ZkStackConfig, ChainConfig}; +use crate::{traits::ZkStackConfig, ChainConfig, ContractsConfig}; impl ZkStackConfig for DeployL2ContractsInput {} @@ -16,20 +16,27 @@ pub struct DeployL2ContractsInput { pub bridgehub: Address, pub governance: Address, pub erc20_bridge: Address, + pub da_validator_type: U256, pub consensus_registry_owner: Address, } impl DeployL2ContractsInput { - pub fn new(chain_config: &ChainConfig, era_chain_id: L2ChainId) -> anyhow::Result { + pub fn new( + chain_config: &ChainConfig, + contracts_config: &ContractsConfig, + era_chain_id: L2ChainId, + ) -> anyhow::Result { let contracts = chain_config.get_contracts_config()?; let wallets = chain_config.get_wallets_config()?; + Ok(Self { era_chain_id, chain_id: chain_config.chain_id, l1_shared_bridge: contracts.bridges.shared.l1_address, bridgehub: contracts.ecosystem_contracts.bridgehub_proxy_addr, - governance: wallets.governor.address, + governance: contracts_config.l1.governance_addr, erc20_bridge: contracts.bridges.erc20.l1_address, + da_validator_type: U256::from(chain_config.get_da_validator_type()? as u8), consensus_registry_owner: wallets.governor.address, }) } diff --git a/zkstack_cli/crates/config/src/forge_interface/deploy_l2_contracts/output.rs b/zkstack_cli/crates/config/src/forge_interface/deploy_l2_contracts/output.rs index 7b2b56c81548..e797686561ae 100644 --- a/zkstack_cli/crates/config/src/forge_interface/deploy_l2_contracts/output.rs +++ b/zkstack_cli/crates/config/src/forge_interface/deploy_l2_contracts/output.rs @@ -12,8 +12,7 @@ impl ZkStackConfig for TimestampAsserterOutput {} #[derive(Debug, Clone, Serialize, Deserialize)] pub struct InitializeBridgeOutput { - pub l2_shared_bridge_implementation: Address, - pub l2_shared_bridge_proxy: Address, + pub l2_da_validator_address: Address, } #[derive(Debug, Clone, Serialize, Deserialize)] diff --git a/zkstack_cli/crates/config/src/forge_interface/gateway_chain_upgrade/input.rs b/zkstack_cli/crates/config/src/forge_interface/gateway_chain_upgrade/input.rs index 5fce7ebe3388..41100c55a2ae 100644 --- a/zkstack_cli/crates/config/src/forge_interface/gateway_chain_upgrade/input.rs +++ b/zkstack_cli/crates/config/src/forge_interface/gateway_chain_upgrade/input.rs @@ -1,7 +1,6 @@ -/// TODO(EVM-927): Note that the contents of this file are not useable without Gateway contracts. use ethers::types::Address; use serde::{Deserialize, Serialize}; -use types::L1BatchCommitmentMode; +use zkstack_cli_types::L1BatchCommitmentMode; use zksync_basic_types::L2ChainId; use crate::{traits::ZkStackConfig, ChainConfig}; diff --git a/zkstack_cli/crates/config/src/forge_interface/gateway_chain_upgrade/mod.rs b/zkstack_cli/crates/config/src/forge_interface/gateway_chain_upgrade/mod.rs index cb22b3529e85..7d1a54844d0c 100644 --- a/zkstack_cli/crates/config/src/forge_interface/gateway_chain_upgrade/mod.rs +++ b/zkstack_cli/crates/config/src/forge_interface/gateway_chain_upgrade/mod.rs @@ -1,3 +1,2 @@ -/// TODO(EVM-927): Note that the contents of this file are not useable without Gateway contracts. pub mod input; pub mod output; diff --git a/zkstack_cli/crates/config/src/forge_interface/gateway_chain_upgrade/output.rs b/zkstack_cli/crates/config/src/forge_interface/gateway_chain_upgrade/output.rs index 57c82effcc47..94b6d25a52bf 100644 --- a/zkstack_cli/crates/config/src/forge_interface/gateway_chain_upgrade/output.rs +++ b/zkstack_cli/crates/config/src/forge_interface/gateway_chain_upgrade/output.rs @@ -1,4 +1,3 @@ -/// TODO(EVM-927): Note that the contents of this file are not useable without Gateway contracts. use ethers::types::Address; use serde::{Deserialize, Serialize}; diff --git a/zkstack_cli/crates/config/src/forge_interface/gateway_ecosystem_upgrade/input.rs b/zkstack_cli/crates/config/src/forge_interface/gateway_ecosystem_upgrade/input.rs index d71f327ede45..8bd300f50581 100644 --- a/zkstack_cli/crates/config/src/forge_interface/gateway_ecosystem_upgrade/input.rs +++ b/zkstack_cli/crates/config/src/forge_interface/gateway_ecosystem_upgrade/input.rs @@ -1,4 +1,3 @@ -/// TODO(EVM-927): Note that the contents of this file are not useable without Gateway contracts. use ethers::types::{Address, H256}; use serde::{Deserialize, Serialize}; use zksync_basic_types::L2ChainId; @@ -15,6 +14,7 @@ pub struct GatewayEcosystemUpgradeInput { pub testnet_verifier: bool, pub contracts: GatewayUpgradeContractsConfig, pub tokens: GatewayUpgradeTokensConfig, + pub governance_upgrade_timer_initial_delay: u64, } impl ZkStackConfig for GatewayEcosystemUpgradeInput {} @@ -33,6 +33,8 @@ impl GatewayEcosystemUpgradeInput { era_chain_id, testnet_verifier, owner_address: current_contracts_config.l1.governance_addr, + // TODO: for local testing, even 0 is fine - but before prod, we should load it from some configuration. + governance_upgrade_timer_initial_delay: 0, contracts: GatewayUpgradeContractsConfig { create2_factory_addr: initial_deployment_config.create2_factory_addr, create2_factory_salt: initial_deployment_config.create2_factory_salt, diff --git a/zkstack_cli/crates/config/src/forge_interface/gateway_ecosystem_upgrade/mod.rs b/zkstack_cli/crates/config/src/forge_interface/gateway_ecosystem_upgrade/mod.rs index cb22b3529e85..7d1a54844d0c 100644 --- a/zkstack_cli/crates/config/src/forge_interface/gateway_ecosystem_upgrade/mod.rs +++ b/zkstack_cli/crates/config/src/forge_interface/gateway_ecosystem_upgrade/mod.rs @@ -1,3 +1,2 @@ -/// TODO(EVM-927): Note that the contents of this file are not useable without Gateway contracts. pub mod input; pub mod output; diff --git a/zkstack_cli/crates/config/src/forge_interface/gateway_ecosystem_upgrade/output.rs b/zkstack_cli/crates/config/src/forge_interface/gateway_ecosystem_upgrade/output.rs index 8f030eb47b73..2aab8a1e5422 100644 --- a/zkstack_cli/crates/config/src/forge_interface/gateway_ecosystem_upgrade/output.rs +++ b/zkstack_cli/crates/config/src/forge_interface/gateway_ecosystem_upgrade/output.rs @@ -1,4 +1,3 @@ -/// TODO(EVM-927): Note that the contents of this file are not useable without Gateway contracts. use ethers::types::{Address, H256}; use serde::{Deserialize, Serialize}; use zksync_basic_types::web3::Bytes; @@ -19,6 +18,10 @@ pub struct GatewayEcosystemUpgradeOutput { pub contracts_config: GatewayEcosystemUpgradeContractsOutput, pub deployed_addresses: GatewayEcosystemUpgradeDeployedAddresses, + /// List of transactions that were executed during the upgrade. + /// This is added later by the zkstack and not present in the toml file that solidity creates. + #[serde(default)] + pub transactions: Vec, } impl FileConfigWithDefaultName for GatewayEcosystemUpgradeOutput { @@ -45,6 +48,12 @@ pub struct GatewayEcosystemUpgradeContractsOutput { pub recursion_circuits_set_vks_hash: H256, pub recursion_leaf_level_vk_hash: H256, pub recursion_node_level_vk_hash: H256, + + pub new_protocol_version: u64, + pub old_protocol_version: u64, + + pub old_validator_timelock: Address, + pub l1_legacy_shared_bridge: Address, } #[derive(Debug, Deserialize, Serialize, Clone)] diff --git a/zkstack_cli/crates/config/src/forge_interface/gateway_preparation/input.rs b/zkstack_cli/crates/config/src/forge_interface/gateway_preparation/input.rs index 263905d9fb35..6c4fc4d764a5 100644 --- a/zkstack_cli/crates/config/src/forge_interface/gateway_preparation/input.rs +++ b/zkstack_cli/crates/config/src/forge_interface/gateway_preparation/input.rs @@ -1,4 +1,3 @@ -/// TODO(EVM-927): Note that the contents of this file are not useable without Gateway contracts. use ethers::utils::hex; use serde::{Deserialize, Serialize}; use zksync_basic_types::{web3::Bytes, Address}; diff --git a/zkstack_cli/crates/config/src/forge_interface/gateway_preparation/mod.rs b/zkstack_cli/crates/config/src/forge_interface/gateway_preparation/mod.rs index cb22b3529e85..7d1a54844d0c 100644 --- a/zkstack_cli/crates/config/src/forge_interface/gateway_preparation/mod.rs +++ b/zkstack_cli/crates/config/src/forge_interface/gateway_preparation/mod.rs @@ -1,3 +1,2 @@ -/// TODO(EVM-927): Note that the contents of this file are not useable without Gateway contracts. pub mod input; pub mod output; diff --git a/zkstack_cli/crates/config/src/forge_interface/gateway_preparation/output.rs b/zkstack_cli/crates/config/src/forge_interface/gateway_preparation/output.rs index 7d27725b2825..c201625be28b 100644 --- a/zkstack_cli/crates/config/src/forge_interface/gateway_preparation/output.rs +++ b/zkstack_cli/crates/config/src/forge_interface/gateway_preparation/output.rs @@ -1,4 +1,3 @@ -/// TODO(EVM-927): Note that the contents of this file are not useable without Gateway contracts. use serde::{Deserialize, Serialize}; use zksync_basic_types::{Address, H256}; diff --git a/zkstack_cli/crates/config/src/forge_interface/register_chain/input.rs b/zkstack_cli/crates/config/src/forge_interface/register_chain/input.rs index 7d30c7f49a70..f44b8c1f50c3 100644 --- a/zkstack_cli/crates/config/src/forge_interface/register_chain/input.rs +++ b/zkstack_cli/crates/config/src/forge_interface/register_chain/input.rs @@ -1,39 +1,53 @@ use ethers::types::Address; use rand::Rng; use serde::{Deserialize, Serialize}; -use types::L1BatchCommitmentMode; -use zksync_basic_types::L2ChainId; +use zkstack_cli_types::L1BatchCommitmentMode; +use zksync_basic_types::{L2ChainId, H256}; use crate::{traits::ZkStackConfig, ChainConfig, ContractsConfig}; +#[derive(Debug, Deserialize, Serialize, Clone)] +pub struct RegisterChainL1Config { + contracts_config: Contracts, + deployed_addresses: DeployedAddresses, + chain: ChainL1Config, + owner_address: Address, + governance: Address, + create2_factory_address: Address, + create2_salt: H256, + initialize_legacy_bridge: bool, +} + #[derive(Debug, Deserialize, Serialize, Clone)] struct Bridgehub { bridgehub_proxy_addr: Address, } +#[derive(Debug, Deserialize, Serialize, Clone)] +struct Bridges { + shared_bridge_proxy_addr: Address, + l1_nullifier_proxy_addr: Address, + erc20_bridge_proxy_addr: Address, +} + #[derive(Debug, Deserialize, Serialize, Clone)] struct StateTransition { - state_transition_proxy_addr: Address, + chain_type_manager_proxy_addr: Address, } #[derive(Debug, Deserialize, Serialize, Clone)] struct DeployedAddresses { state_transition: StateTransition, bridgehub: Bridgehub, + bridges: Bridges, validator_timelock_addr: Address, + native_token_vault_addr: Address, } #[derive(Debug, Deserialize, Serialize, Clone)] struct Contracts { diamond_cut_data: String, -} - -#[derive(Debug, Deserialize, Serialize, Clone)] -pub struct RegisterChainL1Config { - contracts_config: Contracts, - deployed_addresses: DeployedAddresses, - chain: ChainL1Config, - owner_address: Address, + force_deployments_data: String, } #[derive(Debug, Deserialize, Serialize, Clone)] @@ -55,21 +69,39 @@ impl ZkStackConfig for RegisterChainL1Config {} impl RegisterChainL1Config { pub fn new(chain_config: &ChainConfig, contracts: &ContractsConfig) -> anyhow::Result { + let initialize_legacy_bridge = chain_config.legacy_bridge.unwrap_or_default(); let wallets_config = chain_config.get_wallets_config()?; Ok(Self { contracts_config: Contracts { diamond_cut_data: contracts.ecosystem_contracts.diamond_cut_data.clone(), + force_deployments_data: contracts + .ecosystem_contracts + .force_deployments_data + .clone() + .expect("force_deployment_data"), }, deployed_addresses: DeployedAddresses { state_transition: StateTransition { - state_transition_proxy_addr: contracts + chain_type_manager_proxy_addr: contracts .ecosystem_contracts .state_transition_proxy_addr, }, bridgehub: Bridgehub { bridgehub_proxy_addr: contracts.ecosystem_contracts.bridgehub_proxy_addr, }, + bridges: Bridges { + shared_bridge_proxy_addr: contracts.bridges.shared.l1_address, + l1_nullifier_proxy_addr: contracts + .bridges + .l1_nullifier_addr + .expect("l1_nullifier_addr"), + erc20_bridge_proxy_addr: contracts.bridges.erc20.l1_address, + }, validator_timelock_addr: contracts.ecosystem_contracts.validator_timelock_addr, + native_token_vault_addr: contracts + .ecosystem_contracts + .native_token_vault_addr + .expect("native_token_vault_addr"), }, chain: ChainL1Config { chain_chain_id: chain_config.chain_id, @@ -88,6 +120,10 @@ impl RegisterChainL1Config { allow_evm_emulator: chain_config.evm_emulator, }, owner_address: wallets_config.governor.address, + governance: contracts.l1.governance_addr, + create2_factory_address: contracts.create2_factory_addr, + create2_salt: H256::random(), + initialize_legacy_bridge, }) } } diff --git a/zkstack_cli/crates/config/src/forge_interface/register_chain/output.rs b/zkstack_cli/crates/config/src/forge_interface/register_chain/output.rs index a3e23f7bae42..951f36aa9fa8 100644 --- a/zkstack_cli/crates/config/src/forge_interface/register_chain/output.rs +++ b/zkstack_cli/crates/config/src/forge_interface/register_chain/output.rs @@ -8,6 +8,9 @@ pub struct RegisterChainOutput { pub diamond_proxy_addr: Address, pub governance_addr: Address, pub chain_admin_addr: Address, + pub l2_legacy_shared_bridge_addr: Option
, + pub access_control_restriction_addr: Address, + pub chain_proxy_admin_addr: Address, } impl ZkStackConfig for RegisterChainOutput {} diff --git a/zkstack_cli/crates/config/src/forge_interface/script_params.rs b/zkstack_cli/crates/config/src/forge_interface/script_params.rs index eb693c83a54c..b7496540ab18 100644 --- a/zkstack_cli/crates/config/src/forge_interface/script_params.rs +++ b/zkstack_cli/crates/config/src/forge_interface/script_params.rs @@ -39,9 +39,9 @@ pub const DEPLOY_L2_CONTRACTS_SCRIPT_PARAMS: ForgeScriptParams = ForgeScriptPara }; pub const REGISTER_CHAIN_SCRIPT_PARAMS: ForgeScriptParams = ForgeScriptParams { - input: "script-config/register-hyperchain.toml", - output: "script-out/output-register-hyperchain.toml", - script_path: "deploy-scripts/RegisterHyperchain.s.sol", + input: "script-config/register-zk-chain.toml", + output: "script-out/output-register-zk-chain.toml", + script_path: "deploy-scripts/RegisterZKChain.s.sol", }; pub const DEPLOY_ERC20_SCRIPT_PARAMS: ForgeScriptParams = ForgeScriptParams { @@ -74,39 +74,33 @@ pub const ENABLE_EVM_EMULATOR_PARAMS: ForgeScriptParams = ForgeScriptParams { script_path: "deploy-scripts/EnableEvmEmulator.s.sol", }; -// TODO(EVM-927): the following script does not work without gateway contracts. pub const DEPLOY_GATEWAY_CTM: ForgeScriptParams = ForgeScriptParams { input: "script-config/config-deploy-gateway-ctm.toml", output: "script-out/output-deploy-gateway-ctm.toml", script_path: "deploy-scripts/GatewayCTMFromL1.s.sol", }; -// TODO(EVM-927): the following script does not work without gateway contracts. pub const GATEWAY_PREPARATION: ForgeScriptParams = ForgeScriptParams { input: "script-config/gateway-preparation-l1.toml", output: "script-out/output-gateway-preparation-l1.toml", script_path: "deploy-scripts/GatewayPreparation.s.sol", }; -// TODO(EVM-927): the following script does not work without gateway contracts. pub const GATEWAY_GOVERNANCE_TX_PATH1: &str = "contracts/l1-contracts/script-out/gateway-deploy-governance-txs-1.json"; -// TODO(EVM-927): the following script does not work without gateway contracts. pub const GATEWAY_UPGRADE_ECOSYSTEM_PARAMS: ForgeScriptParams = ForgeScriptParams { input: "script-config/gateway-upgrade-ecosystem.toml", output: "script-out/gateway-upgrade-ecosystem.toml", script_path: "deploy-scripts/upgrade/EcosystemUpgrade.s.sol", }; -// TODO(EVM-927): the following script does not work without gateway contracts. pub const GATEWAY_UPGRADE_CHAIN_PARAMS: ForgeScriptParams = ForgeScriptParams { input: "script-config/gateway-upgrade-chain.toml", output: "script-out/gateway-upgrade-chain.toml", script_path: "deploy-scripts/upgrade/ChainUpgrade.s.sol", }; -// TODO(EVM-927): the following script does not work without gateway contracts. pub const FINALIZE_UPGRADE_SCRIPT_PARAMS: ForgeScriptParams = ForgeScriptParams { input: "script-config/gateway-finalize-upgrade.toml", output: "script-out/gateway-finalize-upgrade.toml", diff --git a/zkstack_cli/crates/config/src/forge_interface/setup_legacy_bridge/mod.rs b/zkstack_cli/crates/config/src/forge_interface/setup_legacy_bridge/mod.rs index 201cf86b734b..aa0764864606 100644 --- a/zkstack_cli/crates/config/src/forge_interface/setup_legacy_bridge/mod.rs +++ b/zkstack_cli/crates/config/src/forge_interface/setup_legacy_bridge/mod.rs @@ -8,6 +8,8 @@ pub struct SetupLegacyBridgeInput { pub bridgehub: Address, pub diamond_proxy: Address, pub shared_bridge_proxy: Address, + pub l1_nullifier_proxy: Address, + pub l1_native_token_vault: Address, pub transparent_proxy_admin: Address, pub erc20bridge_proxy: Address, pub token_weth_address: Address, diff --git a/zkstack_cli/crates/config/src/gateway.rs b/zkstack_cli/crates/config/src/gateway.rs index 0bdbcdf25475..67b5ad327cc2 100644 --- a/zkstack_cli/crates/config/src/gateway.rs +++ b/zkstack_cli/crates/config/src/gateway.rs @@ -1,4 +1,3 @@ -/// TODO(EVM-927): Note that the contents of this file are not useable without Gateway contracts. use ethers::utils::hex; use zksync_config::configs::{gateway::GatewayChainConfig, GatewayConfig}; diff --git a/zkstack_cli/crates/config/src/general.rs b/zkstack_cli/crates/config/src/general.rs index 0079105b66ca..c1639d6bea15 100644 --- a/zkstack_cli/crates/config/src/general.rs +++ b/zkstack_cli/crates/config/src/general.rs @@ -1,9 +1,9 @@ use std::path::{Path, PathBuf}; use anyhow::Context; -use common::yaml::merge_yaml; use url::Url; use xshell::Shell; +use zkstack_cli_common::yaml::merge_yaml; use zksync_config::configs::object_store::ObjectStoreMode; pub use zksync_config::configs::GeneralConfig; use zksync_protobuf_config::{encode_yaml_repr, read_yaml_repr}; diff --git a/zkstack_cli/crates/config/src/portal.rs b/zkstack_cli/crates/config/src/portal.rs index 2b6f0ffd5156..ab121e8e177d 100644 --- a/zkstack_cli/crates/config/src/portal.rs +++ b/zkstack_cli/crates/config/src/portal.rs @@ -1,8 +1,8 @@ use std::path::{Path, PathBuf}; use serde::{Deserialize, Serialize}; -use types::TokenInfo; use xshell::Shell; +use zkstack_cli_types::TokenInfo; use crate::{ consts::{ diff --git a/zkstack_cli/crates/config/src/secrets.rs b/zkstack_cli/crates/config/src/secrets.rs index cf0a9927c560..91e8964b4651 100644 --- a/zkstack_cli/crates/config/src/secrets.rs +++ b/zkstack_cli/crates/config/src/secrets.rs @@ -1,8 +1,8 @@ use std::{path::Path, str::FromStr}; use anyhow::Context; -use common::db::DatabaseConfig; use xshell::Shell; +use zkstack_cli_common::db::DatabaseConfig; use zksync_basic_types::url::SensitiveUrl; pub use zksync_config::configs::Secrets as SecretsConfig; use zksync_protobuf_config::{encode_yaml_repr, read_yaml_repr}; diff --git a/zkstack_cli/crates/config/src/traits.rs b/zkstack_cli/crates/config/src/traits.rs index a4a4ad22c613..d21641e33ff5 100644 --- a/zkstack_cli/crates/config/src/traits.rs +++ b/zkstack_cli/crates/config/src/traits.rs @@ -1,12 +1,12 @@ use std::path::{Path, PathBuf}; use anyhow::{bail, Context}; -use common::files::{ - read_json_file, read_toml_file, read_yaml_file, save_json_file, save_toml_file, save_yaml_file, -}; use serde::{de::DeserializeOwned, Serialize}; use url::Url; use xshell::Shell; +use zkstack_cli_common::files::{ + read_json_file, read_toml_file, read_yaml_file, save_json_file, save_toml_file, save_yaml_file, +}; // Configs that we use only inside ZK Stack CLI, we don't have protobuf implementation for them. pub trait ZkStackConfig {} diff --git a/zkstack_cli/crates/config/src/wallet_creation.rs b/zkstack_cli/crates/config/src/wallet_creation.rs index 6cfdf08a36d3..547b3a0e1c90 100644 --- a/zkstack_cli/crates/config/src/wallet_creation.rs +++ b/zkstack_cli/crates/config/src/wallet_creation.rs @@ -1,9 +1,9 @@ use std::path::{Path, PathBuf}; -use common::wallets::Wallet; use rand::thread_rng; -use types::WalletCreation; use xshell::Shell; +use zkstack_cli_common::wallets::Wallet; +use zkstack_cli_types::WalletCreation; use crate::{ consts::{BASE_PATH, TEST_CONFIG_PATH}, diff --git a/zkstack_cli/crates/config/src/wallets.rs b/zkstack_cli/crates/config/src/wallets.rs index 735848f6e34d..edd12b16bfe6 100644 --- a/zkstack_cli/crates/config/src/wallets.rs +++ b/zkstack_cli/crates/config/src/wallets.rs @@ -1,6 +1,6 @@ -use common::wallets::Wallet; use rand::{CryptoRng, Rng}; use serde::{Deserialize, Serialize}; +use zkstack_cli_common::wallets::Wallet; use crate::{ consts::WALLETS_FILE, diff --git a/zkstack_cli/crates/git_version_macro/Cargo.toml b/zkstack_cli/crates/git_version_macro/Cargo.toml index eb70b450a4cf..3a8352abafc1 100644 --- a/zkstack_cli/crates/git_version_macro/Cargo.toml +++ b/zkstack_cli/crates/git_version_macro/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "git_version_macro" +name = "zkstack_cli_git_version_macro" edition = "2021" description = "Procedural macro to generate metainformation about build in compile time" version.workspace = true diff --git a/zkstack_cli/crates/types/Cargo.toml b/zkstack_cli/crates/types/Cargo.toml index 97e00e1ba46b..2ce034f1cbd5 100644 --- a/zkstack_cli/crates/types/Cargo.toml +++ b/zkstack_cli/crates/types/Cargo.toml @@ -1,6 +1,6 @@ [package] -name = "types" -version = "0.1.0" +name = "zkstack_cli_types" +version.workspace = true edition.workspace = true homepage.workspace = true license.workspace = true diff --git a/zkstack_cli/crates/types/src/l1_network.rs b/zkstack_cli/crates/types/src/l1_network.rs index cc7b47147548..0578f685b6ec 100644 --- a/zkstack_cli/crates/types/src/l1_network.rs +++ b/zkstack_cli/crates/types/src/l1_network.rs @@ -1,4 +1,7 @@ +use std::str::FromStr; + use clap::ValueEnum; +use ethers::types::Address; use serde::{Deserialize, Serialize}; use strum::EnumIter; @@ -35,4 +38,25 @@ impl L1Network { L1Network::Mainnet => 1, } } + + pub fn avail_l1_da_validator_addr(&self) -> Option
{ + match self { + L1Network::Localhost => None, + L1Network::Sepolia | L1Network::Holesky => { + Some(Address::from_str("0xd99d6569785547ac72150d0309aeDb30C7871b51").unwrap()) + } + L1Network::Mainnet => None, // TODO: add mainnet address after it is known + } + } + + pub fn eigenda_l1_validator_addr(&self) -> Option
{ + match self { + L1Network::Localhost => None, + L1Network::Sepolia | L1Network::Holesky => { + None + //TODO: add real address + } + L1Network::Mainnet => None, // TODO: add mainnet address after it is known + } + } } diff --git a/zkstack_cli/crates/zkstack/Cargo.toml b/zkstack_cli/crates/zkstack/Cargo.toml index a19ced3f39c4..169fe593ba14 100644 --- a/zkstack_cli/crates/zkstack/Cargo.toml +++ b/zkstack_cli/crates/zkstack/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zkstack" -version = "0.1.0" +version.workspace = true edition.workspace = true homepage.workspace = true license.workspace = true @@ -17,8 +17,8 @@ clap.workspace = true clap_complete.workspace = true clap-markdown.workspace = true cliclack.workspace = true -common.workspace = true -config.workspace = true +zkstack_cli_common.workspace = true +zkstack_cli_config.workspace = true dirs.workspace = true ethers.workspace = true futures.workspace = true @@ -34,7 +34,7 @@ sqruff-lib = "0.19.0" thiserror.workspace = true tokio.workspace = true toml.workspace = true -types.workspace = true +zkstack_cli_types.workspace = true url.workspace = true xshell.workspace = true zksync_basic_types.workspace = true @@ -43,11 +43,13 @@ zksync_consensus_roles.workspace = true zksync_consensus_crypto.workspace = true zksync_protobuf.workspace = true zksync_protobuf_config.workspace = true -prost.workspace = true -reqwest = "0.12.8" zksync_types.workspace = true zksync_web3_decl.workspace = true zksync_system_constants.workspace = true +zksync_eth_client.workspace = true +zksync_contracts.workspace = true +prost.workspace = true +reqwest = "0.12.8" [dev-dependencies] rand.workspace = true @@ -60,3 +62,8 @@ dirs.workspace = true ethers.workspace = true xshell.workspace = true zksync_protobuf_build.workspace = true + +[features] +# Features that allows gateway-chain related actions. +# These should be available for outside users until stabilized. +gateway = [] diff --git a/zkstack_cli/crates/zkstack/build.rs b/zkstack_cli/crates/zkstack/build.rs index e52e952bf730..d2d478f80227 100644 --- a/zkstack_cli/crates/zkstack/build.rs +++ b/zkstack_cli/crates/zkstack/build.rs @@ -112,15 +112,22 @@ impl ShellAutocomplete for clap_complete::Shell { .context(format!("could not read .{}rc", shell))?; if !shell_rc_content.contains("# zkstack completion") { - std::fs::write( - shell_rc, + let completion_snippet = if shell == "zsh" { + format!( + "{}\n# zkstack completion\nautoload -Uz compinit\ncompinit\nsource \"{}\"\n", + shell_rc_content, + completion_file.to_str().unwrap() + ) + } else { format!( "{}\n# zkstack completion\nsource \"{}\"\n", shell_rc_content, completion_file.to_str().unwrap() - ), - ) - .context(format!("could not write .{}rc", shell))?; + ) + }; + + std::fs::write(shell_rc, completion_snippet) + .context(format!("could not write .{}rc", shell))?; } } else { println!( diff --git a/zkstack_cli/crates/zkstack/completion/_zkstack.zsh b/zkstack_cli/crates/zkstack/completion/_zkstack.zsh index 3b0bed93eb4f..7d9b405680e5 100644 --- a/zkstack_cli/crates/zkstack/completion/_zkstack.zsh +++ b/zkstack_cli/crates/zkstack/completion/_zkstack.zsh @@ -108,6 +108,7 @@ _arguments "${_arguments_options[@]}" : \ '*--additional-args=[List of additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ '--chain=[Chain to use]:CHAIN:_default' \ '--resume[]' \ +'--zksync[]' \ '-v[Verbose mode]' \ '--verbose[Verbose mode]' \ '--ignore-prerequisites[Ignores prerequisites checks]' \ @@ -133,13 +134,17 @@ _arguments "${_arguments_options[@]}" : \ '-o+[Enable Grafana]' \ '--observability=[Enable Grafana]' \ '--update-submodules=[]:UPDATE_SUBMODULES:(true false)' \ +'--validium-type=[Type of the Validium network]:VALIDIUM_TYPE:(no-da avail eigen-da)' \ +'--support-l2-legacy-shared-bridge-test=[]' \ '--chain=[Chain to use]:CHAIN:_default' \ '--resume[]' \ +'--zksync[]' \ '-d[]' \ '--dont-drop[]' \ '--ecosystem-only[Initialize ecosystem only and skip chain initialization (chain can be initialized later with \`chain init\` subcommand)]' \ '--dev[Use defaults for all options and flags. Suitable for local development]' \ '--no-port-reallocation[Do not reallocate ports]' \ +'--skip-contract-compilation-override[]' \ '-v[Verbose mode]' \ '--verbose[Verbose mode]' \ '--ignore-prerequisites[Ignores prerequisites checks]' \ @@ -269,6 +274,7 @@ _arguments "${_arguments_options[@]}" : \ '--l1-rpc-url=[L1 RPC URL]:L1_RPC_URL:_default' \ '--chain=[Chain to use]:CHAIN:_default' \ '--resume[]' \ +'--zksync[]' \ '-v[Verbose mode]' \ '--verbose[Verbose mode]' \ '--ignore-prerequisites[Ignores prerequisites checks]' \ @@ -289,8 +295,10 @@ _arguments "${_arguments_options[@]}" : \ '--deploy-paymaster=[]' \ '--l1-rpc-url=[L1 RPC URL]:L1_RPC_URL:_default' \ '--update-submodules=[]:UPDATE_SUBMODULES:(true false)' \ +'--validium-type=[Type of the Validium network]:VALIDIUM_TYPE:(no-da avail eigen-da)' \ '--chain=[Chain to use]:CHAIN:_default' \ '--resume[]' \ +'--zksync[]' \ '-d[]' \ '--dont-drop[]' \ '--no-port-reallocation[Do not reallocate ports]' \ @@ -448,6 +456,7 @@ _arguments "${_arguments_options[@]}" : \ '*--additional-args=[List of additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ '--chain=[Chain to use]:CHAIN:_default' \ '--resume[]' \ +'--zksync[]' \ '-v[Verbose mode]' \ '--verbose[Verbose mode]' \ '--ignore-prerequisites[Ignores prerequisites checks]' \ @@ -465,6 +474,7 @@ _arguments "${_arguments_options[@]}" : \ '*--additional-args=[List of additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ '--chain=[Chain to use]:CHAIN:_default' \ '--resume[]' \ +'--zksync[]' \ '-v[Verbose mode]' \ '--verbose[Verbose mode]' \ '--ignore-prerequisites[Ignores prerequisites checks]' \ @@ -482,23 +492,7 @@ _arguments "${_arguments_options[@]}" : \ '*--additional-args=[List of additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ '--chain=[Chain to use]:CHAIN:_default' \ '--resume[]' \ -'-v[Verbose mode]' \ -'--verbose[Verbose mode]' \ -'--ignore-prerequisites[Ignores prerequisites checks]' \ -'-h[Print help (see more with '\''--help'\'')]' \ -'--help[Print help (see more with '\''--help'\'')]' \ -&& ret=0 -;; -(initialize-bridges) -_arguments "${_arguments_options[@]}" : \ -'--verify=[Verify deployed contracts]' \ -'--verifier=[Verifier to use]:VERIFIER:(etherscan sourcify blockscout oklink)' \ -'--verifier-url=[Verifier URL, if using a custom provider]:VERIFIER_URL:_default' \ -'--verifier-api-key=[Verifier API key]:VERIFIER_API_KEY:_default' \ -'*-a+[List of additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ -'*--additional-args=[List of additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ -'--chain=[Chain to use]:CHAIN:_default' \ -'--resume[]' \ +'--zksync[]' \ '-v[Verbose mode]' \ '--verbose[Verbose mode]' \ '--ignore-prerequisites[Ignores prerequisites checks]' \ @@ -516,6 +510,7 @@ _arguments "${_arguments_options[@]}" : \ '*--additional-args=[List of additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ '--chain=[Chain to use]:CHAIN:_default' \ '--resume[]' \ +'--zksync[]' \ '-v[Verbose mode]' \ '--verbose[Verbose mode]' \ '--ignore-prerequisites[Ignores prerequisites checks]' \ @@ -533,6 +528,7 @@ _arguments "${_arguments_options[@]}" : \ '*--additional-args=[List of additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ '--chain=[Chain to use]:CHAIN:_default' \ '--resume[]' \ +'--zksync[]' \ '-v[Verbose mode]' \ '--verbose[Verbose mode]' \ '--ignore-prerequisites[Ignores prerequisites checks]' \ @@ -550,6 +546,7 @@ _arguments "${_arguments_options[@]}" : \ '*--additional-args=[List of additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ '--chain=[Chain to use]:CHAIN:_default' \ '--resume[]' \ +'--zksync[]' \ '-v[Verbose mode]' \ '--verbose[Verbose mode]' \ '--ignore-prerequisites[Ignores prerequisites checks]' \ @@ -567,6 +564,7 @@ _arguments "${_arguments_options[@]}" : \ '*--additional-args=[List of additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ '--chain=[Chain to use]:CHAIN:_default' \ '--resume[]' \ +'--zksync[]' \ '-v[Verbose mode]' \ '--verbose[Verbose mode]' \ '--ignore-prerequisites[Ignores prerequisites checks]' \ @@ -584,6 +582,7 @@ _arguments "${_arguments_options[@]}" : \ '*--additional-args=[List of additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ '--chain=[Chain to use]:CHAIN:_default' \ '--resume[]' \ +'--zksync[]' \ '-v[Verbose mode]' \ '--verbose[Verbose mode]' \ '--ignore-prerequisites[Ignores prerequisites checks]' \ @@ -601,6 +600,7 @@ _arguments "${_arguments_options[@]}" : \ '*--additional-args=[List of additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ '--chain=[Chain to use]:CHAIN:_default' \ '--resume[]' \ +'--zksync[]' \ '-v[Verbose mode]' \ '--verbose[Verbose mode]' \ '--ignore-prerequisites[Ignores prerequisites checks]' \ @@ -618,6 +618,7 @@ _arguments "${_arguments_options[@]}" : \ '*--additional-args=[List of additional arguments that can be passed through the CLI]:ADDITIONAL_ARGS:_default' \ '--chain=[Chain to use]:CHAIN:_default' \ '--resume[]' \ +'--zksync[]' \ '-v[Verbose mode]' \ '--verbose[Verbose mode]' \ '--ignore-prerequisites[Ignores prerequisites checks]' \ @@ -701,10 +702,6 @@ _arguments "${_arguments_options[@]}" : \ _arguments "${_arguments_options[@]}" : \ && ret=0 ;; -(initialize-bridges) -_arguments "${_arguments_options[@]}" : \ -&& ret=0 -;; (deploy-consensus-registry) _arguments "${_arguments_options[@]}" : \ && ret=0 @@ -1496,6 +1493,7 @@ esac (contracts) _arguments "${_arguments_options[@]}" : \ '--l1-contracts=[Build L1 contracts]' \ +'--l1-da-contracts=[Build L1 DA contracts]' \ '--l2-contracts=[Build L2 contracts]' \ '--system-contracts=[Build system contracts]' \ '--chain=[Chain to use]:CHAIN:_default' \ @@ -1908,9 +1906,7 @@ _arguments "${_arguments_options[@]}" : \ '(--clone)--bellman-cuda-dir=[]:BELLMAN_CUDA_DIR:_default' \ '--bellman-cuda=[]' \ '--setup-compressor-key=[]' \ -'--plonk-path=[]:PLONK_PATH:_default' \ -'--fflonk-path=[]:FFLONK_PATH:_default' \ -'--compressor-type=[]:COMPRESSOR_TYPE:(fflonk plonk all)' \ +'--path=[]:PATH:_default' \ '--region=[]:REGION:(us europe asia)' \ '--mode=[]:MODE:(download generate)' \ '--setup-keys=[]' \ @@ -1981,9 +1977,7 @@ _arguments "${_arguments_options[@]}" : \ ;; (compressor-keys) _arguments "${_arguments_options[@]}" : \ -'--plonk-path=[]:PLONK_PATH:_default' \ -'--fflonk-path=[]:FFLONK_PATH:_default' \ -'--compressor-type=[]:COMPRESSOR_TYPE:(fflonk plonk all)' \ +'--path=[]:PATH:_default' \ '--chain=[Chain to use]:CHAIN:_default' \ '-v[Verbose mode]' \ '--verbose[Verbose mode]' \ @@ -2711,10 +2705,6 @@ _arguments "${_arguments_options[@]}" : \ _arguments "${_arguments_options[@]}" : \ && ret=0 ;; -(initialize-bridges) -_arguments "${_arguments_options[@]}" : \ -&& ret=0 -;; (deploy-consensus-registry) _arguments "${_arguments_options[@]}" : \ && ret=0 @@ -3264,7 +3254,6 @@ _zkstack__chain_commands() { 'register-chain:Register a new chain on L1 (executed by L1 governor). This command deploys and configures Governance, ChainAdmin, and DiamondProxy contracts, registers chain with BridgeHub and sets pending admin for DiamondProxy. Note\: After completion, L2 governor can accept ownership by running \`accept-chain-ownership\`' \ 'deploy-l2-contracts:Deploy all L2 contracts (executed by L1 governor)' \ 'accept-chain-ownership:Accept ownership of L2 chain (executed by L2 governor). This command should be run after \`register-chain\` to accept ownership of newly created DiamondProxy contract' \ -'initialize-bridges:Initialize bridges on L2' \ 'deploy-consensus-registry:Deploy L2 consensus registry' \ 'deploy-multicall3:Deploy L2 multicall3' \ 'deploy-timestamp-asserter:Deploy L2 TimestampAsserter' \ @@ -3379,7 +3368,6 @@ _zkstack__chain__help_commands() { 'register-chain:Register a new chain on L1 (executed by L1 governor). This command deploys and configures Governance, ChainAdmin, and DiamondProxy contracts, registers chain with BridgeHub and sets pending admin for DiamondProxy. Note\: After completion, L2 governor can accept ownership by running \`accept-chain-ownership\`' \ 'deploy-l2-contracts:Deploy all L2 contracts (executed by L1 governor)' \ 'accept-chain-ownership:Accept ownership of L2 chain (executed by L2 governor). This command should be run after \`register-chain\` to accept ownership of newly created DiamondProxy contract' \ -'initialize-bridges:Initialize bridges on L2' \ 'deploy-consensus-registry:Deploy L2 consensus registry' \ 'deploy-multicall3:Deploy L2 multicall3' \ 'deploy-timestamp-asserter:Deploy L2 TimestampAsserter' \ @@ -3476,11 +3464,6 @@ _zkstack__chain__help__init__configs_commands() { local commands; commands=() _describe -t commands 'zkstack chain help init configs commands' commands "$@" } -(( $+functions[_zkstack__chain__help__initialize-bridges_commands] )) || -_zkstack__chain__help__initialize-bridges_commands() { - local commands; commands=() - _describe -t commands 'zkstack chain help initialize-bridges commands' commands "$@" -} (( $+functions[_zkstack__chain__help__register-chain_commands] )) || _zkstack__chain__help__register-chain_commands() { local commands; commands=() @@ -3522,11 +3505,6 @@ _zkstack__chain__init__help__help_commands() { local commands; commands=() _describe -t commands 'zkstack chain init help help commands' commands "$@" } -(( $+functions[_zkstack__chain__initialize-bridges_commands] )) || -_zkstack__chain__initialize-bridges_commands() { - local commands; commands=() - _describe -t commands 'zkstack chain initialize-bridges commands' commands "$@" -} (( $+functions[_zkstack__chain__register-chain_commands] )) || _zkstack__chain__register-chain_commands() { local commands; commands=() @@ -4703,7 +4681,6 @@ _zkstack__help__chain_commands() { 'register-chain:Register a new chain on L1 (executed by L1 governor). This command deploys and configures Governance, ChainAdmin, and DiamondProxy contracts, registers chain with BridgeHub and sets pending admin for DiamondProxy. Note\: After completion, L2 governor can accept ownership by running \`accept-chain-ownership\`' \ 'deploy-l2-contracts:Deploy all L2 contracts (executed by L1 governor)' \ 'accept-chain-ownership:Accept ownership of L2 chain (executed by L2 governor). This command should be run after \`register-chain\` to accept ownership of newly created DiamondProxy contract' \ -'initialize-bridges:Initialize bridges on L2' \ 'deploy-consensus-registry:Deploy L2 consensus registry' \ 'deploy-multicall3:Deploy L2 multicall3' \ 'deploy-timestamp-asserter:Deploy L2 TimestampAsserter' \ @@ -4794,11 +4771,6 @@ _zkstack__help__chain__init__configs_commands() { local commands; commands=() _describe -t commands 'zkstack help chain init configs commands' commands "$@" } -(( $+functions[_zkstack__help__chain__initialize-bridges_commands] )) || -_zkstack__help__chain__initialize-bridges_commands() { - local commands; commands=() - _describe -t commands 'zkstack help chain initialize-bridges commands' commands "$@" -} (( $+functions[_zkstack__help__chain__register-chain_commands] )) || _zkstack__help__chain__register-chain_commands() { local commands; commands=() diff --git a/zkstack_cli/crates/zkstack/completion/zkstack.fish b/zkstack_cli/crates/zkstack/completion/zkstack.fish index 5f5249616c74..a4cfb31a62c4 100644 --- a/zkstack_cli/crates/zkstack/completion/zkstack.fish +++ b/zkstack_cli/crates/zkstack/completion/zkstack.fish @@ -44,7 +44,11 @@ complete -c zkstack -n "__fish_zkstack_needs_command" -f -a "consensus" -d 'Cons complete -c zkstack -n "__fish_zkstack_needs_command" -f -a "update" -d 'Update ZKsync' complete -c zkstack -n "__fish_zkstack_needs_command" -f -a "markdown" -d 'Print markdown help' complete -c zkstack -n "__fish_zkstack_needs_command" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' -complete -c zkstack -n "__fish_zkstack_using_subcommand autocomplete" -l generate -d 'The shell to generate the autocomplete script for' -r -f -a "{bash\t'',elvish\t'',fish\t'',powershell\t'',zsh\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand autocomplete" -l generate -d 'The shell to generate the autocomplete script for' -r -f -a "bash\t'' +elvish\t'' +fish\t'' +powershell\t'' +zsh\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand autocomplete" -s o -l out -d 'The out directory to write the autocomplete script to' -r -F complete -c zkstack -n "__fish_zkstack_using_subcommand autocomplete" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand autocomplete" -s v -l verbose -d 'Verbose mode' @@ -61,22 +65,35 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and not __fis complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and not __fish_seen_subcommand_from create build-transactions init change-default-chain setup-observability help" -f -a "setup-observability" -d 'Setup observability for the ecosystem, downloading Grafana dashboards from the era-observability repo' complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and not __fish_seen_subcommand_from create build-transactions init change-default-chain setup-observability help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from create" -l ecosystem-name -r -complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from create" -l l1-network -d 'L1 Network' -r -f -a "{localhost\t'',sepolia\t'',holesky\t'',mainnet\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from create" -l l1-network -d 'L1 Network' -r -f -a "localhost\t'' +sepolia\t'' +holesky\t'' +mainnet\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from create" -l link-to-code -d 'Code link' -r -f -a "(__fish_complete_directories)" complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from create" -l chain-name -r complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from create" -l chain-id -d 'Chain ID' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from create" -l prover-mode -d 'Prover options' -r -f -a "{no-proofs\t'',gpu\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from create" -l wallet-creation -d 'Wallet options' -r -f -a "{localhost\t'Load wallets from localhost mnemonic, they are funded for localhost env',random\t'Generate random wallets',empty\t'Generate placeholder wallets',in-file\t'Specify file with wallets'}" +complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from create" -l prover-mode -d 'Prover options' -r -f -a "no-proofs\t'' +gpu\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from create" -l wallet-creation -d 'Wallet options' -r -f -a "localhost\t'Load wallets from localhost mnemonic, they are funded for localhost env' +random\t'Generate random wallets' +empty\t'Generate placeholder wallets' +in-file\t'Specify file with wallets'" complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from create" -l wallet-path -d 'Wallet path' -r -F -complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from create" -l l1-batch-commit-data-generator-mode -d 'Commit data generation mode' -r -f -a "{rollup\t'',validium\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from create" -l l1-batch-commit-data-generator-mode -d 'Commit data generation mode' -r -f -a "rollup\t'' +validium\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from create" -l base-token-address -d 'Base token address' -r complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from create" -l base-token-price-nominator -d 'Base token nominator' -r complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from create" -l base-token-price-denominator -d 'Base token denominator' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from create" -l set-as-default -d 'Set as default chain' -r -f -a "{true\t'',false\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from create" -l evm-emulator -d 'Enable EVM emulator' -r -f -a "{true\t'',false\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from create" -l update-submodules -d 'Whether to update git submodules of repo' -r -f -a "{true\t'',false\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from create" -l start-containers -d 'Start reth and postgres containers after creation' -r -f -a "{true\t'',false\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from create" -l update-submodules -r -f -a "{true\t'',false\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from create" -l set-as-default -d 'Set as default chain' -r -f -a "true\t'' +false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from create" -l evm-emulator -d 'Enable EVM emulator' -r -f -a "true\t'' +false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from create" -l update-submodules -d 'Whether to update git submodules of repo' -r -f -a "true\t'' +false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from create" -l start-containers -d 'Start reth and postgres containers after creation' -r -f -a "true\t'' +false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from create" -l update-submodules -r -f -a "true\t'' +false\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from create" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from create" -l legacy-bridge complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from create" -s v -l verbose -d 'Verbose mode' @@ -85,36 +102,57 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_se complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from build-transactions" -l sender -d 'Address of the transaction sender' -r complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from build-transactions" -l l1-rpc-url -d 'L1 RPC URL' -r complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from build-transactions" -s o -l out -d 'Output directory for the generated files' -r -F -complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from build-transactions" -l verify -d 'Verify deployed contracts' -r -f -a "{true\t'',false\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from build-transactions" -l verifier -d 'Verifier to use' -r -f -a "{etherscan\t'',sourcify\t'',blockscout\t'',oklink\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from build-transactions" -l verify -d 'Verify deployed contracts' -r -f -a "true\t'' +false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from build-transactions" -l verifier -d 'Verifier to use' -r -f -a "etherscan\t'' +sourcify\t'' +blockscout\t'' +oklink\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from build-transactions" -l verifier-url -d 'Verifier URL, if using a custom provider' -r complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from build-transactions" -l verifier-api-key -d 'Verifier API key' -r complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from build-transactions" -s a -l additional-args -d 'List of additional arguments that can be passed through the CLI' -r complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from build-transactions" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from build-transactions" -l resume +complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from build-transactions" -l zksync complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from build-transactions" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from build-transactions" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from build-transactions" -s h -l help -d 'Print help (see more with \'--help\')' -complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l deploy-erc20 -d 'Deploy ERC20 contracts' -r -f -a "{true\t'',false\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l deploy-ecosystem -d 'Deploy ecosystem contracts' -r -f -a "{true\t'',false\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l deploy-erc20 -d 'Deploy ERC20 contracts' -r -f -a "true\t'' +false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l deploy-ecosystem -d 'Deploy ecosystem contracts' -r -f -a "true\t'' +false\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l ecosystem-contracts-path -d 'Path to ecosystem contracts' -r -F complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l l1-rpc-url -d 'L1 RPC URL' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l verify -d 'Verify deployed contracts' -r -f -a "{true\t'',false\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l verifier -d 'Verifier to use' -r -f -a "{etherscan\t'',sourcify\t'',blockscout\t'',oklink\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l verify -d 'Verify deployed contracts' -r -f -a "true\t'' +false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l verifier -d 'Verifier to use' -r -f -a "etherscan\t'' +sourcify\t'' +blockscout\t'' +oklink\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l verifier-url -d 'Verifier URL, if using a custom provider' -r complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l verifier-api-key -d 'Verifier API key' -r complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -s a -l additional-args -d 'List of additional arguments that can be passed through the CLI' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l deploy-paymaster -d 'Deploy Paymaster contract' -r -f -a "{true\t'',false\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l deploy-paymaster -d 'Deploy Paymaster contract' -r -f -a "true\t'' +false\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l server-db-url -d 'Server database url without database name' -r complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l server-db-name -d 'Server database name' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -s o -l observability -d 'Enable Grafana' -r -f -a "{true\t'',false\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l update-submodules -r -f -a "{true\t'',false\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -s o -l observability -d 'Enable Grafana' -r -f -a "true\t'' +false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l update-submodules -r -f -a "true\t'' +false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l validium-type -d 'Type of the Validium network' -r -f -a "no-da\t'' +avail\t'' +eigen-da\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l support-l2-legacy-shared-bridge-test -r -f -a "true\t'' +false\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l resume +complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l zksync complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -s d -l dont-drop complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l ecosystem-only -d 'Initialize ecosystem only and skip chain initialization (chain can be initialized later with `chain init` subcommand)' complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l dev -d 'Use defaults for all options and flags. Suitable for local development' complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l no-port-reallocation -d 'Do not reallocate ports' +complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l skip-contract-compilation-override complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from init" -s h -l help -d 'Print help (see more with \'--help\')' @@ -132,67 +170,89 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_se complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from help" -f -a "change-default-chain" -d 'Change the default chain' complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from help" -f -a "setup-observability" -d 'Setup observability for the ecosystem, downloading Grafana dashboards from the era-observability repo' complete -c zkstack -n "__fish_zkstack_using_subcommand ecosystem; and __fish_seen_subcommand_from help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -l chain -d 'Chain to use' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -s v -l verbose -d 'Verbose mode' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -l ignore-prerequisites -d 'Ignores prerequisites checks' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -s h -l help -d 'Print help' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "create" -d 'Create a new chain, setting the necessary configurations for later initialization' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "build-transactions" -d 'Create unsigned transactions for chain deployment' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "init" -d 'Initialize chain, deploying necessary contracts and performing on-chain operations' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "genesis" -d 'Run server genesis' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "register-chain" -d 'Register a new chain on L1 (executed by L1 governor). This command deploys and configures Governance, ChainAdmin, and DiamondProxy contracts, registers chain with BridgeHub and sets pending admin for DiamondProxy. Note: After completion, L2 governor can accept ownership by running `accept-chain-ownership`' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "deploy-l2-contracts" -d 'Deploy all L2 contracts (executed by L1 governor)' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "accept-chain-ownership" -d 'Accept ownership of L2 chain (executed by L2 governor). This command should be run after `register-chain` to accept ownership of newly created DiamondProxy contract' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "initialize-bridges" -d 'Initialize bridges on L2' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "deploy-consensus-registry" -d 'Deploy L2 consensus registry' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "deploy-multicall3" -d 'Deploy L2 multicall3' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "deploy-timestamp-asserter" -d 'Deploy L2 TimestampAsserter' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "deploy-upgrader" -d 'Deploy Default Upgrader' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "deploy-paymaster" -d 'Deploy paymaster smart contract' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "update-token-multiplier-setter" -d 'Update Token Multiplier Setter address on L1' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "enable-evm-emulator" -d 'Enable EVM emulation on chain (Not supported yet)' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -l chain -d 'Chain to use' -r +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -s v -l verbose -d 'Verbose mode' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -l ignore-prerequisites -d 'Ignores prerequisites checks' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -s h -l help -d 'Print help' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "create" -d 'Create a new chain, setting the necessary configurations for later initialization' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "build-transactions" -d 'Create unsigned transactions for chain deployment' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "init" -d 'Initialize chain, deploying necessary contracts and performing on-chain operations' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "genesis" -d 'Run server genesis' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "register-chain" -d 'Register a new chain on L1 (executed by L1 governor). This command deploys and configures Governance, ChainAdmin, and DiamondProxy contracts, registers chain with BridgeHub and sets pending admin for DiamondProxy. Note: After completion, L2 governor can accept ownership by running `accept-chain-ownership`' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "deploy-l2-contracts" -d 'Deploy all L2 contracts (executed by L1 governor)' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "accept-chain-ownership" -d 'Accept ownership of L2 chain (executed by L2 governor). This command should be run after `register-chain` to accept ownership of newly created DiamondProxy contract' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "deploy-consensus-registry" -d 'Deploy L2 consensus registry' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "deploy-multicall3" -d 'Deploy L2 multicall3' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "deploy-timestamp-asserter" -d 'Deploy L2 TimestampAsserter' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "deploy-upgrader" -d 'Deploy Default Upgrader' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "deploy-paymaster" -d 'Deploy paymaster smart contract' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "update-token-multiplier-setter" -d 'Update Token Multiplier Setter address on L1' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "enable-evm-emulator" -d 'Enable EVM emulation on chain (Not supported yet)' +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and not __fish_seen_subcommand_from create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from create" -l chain-name -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from create" -l chain-id -d 'Chain ID' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from create" -l prover-mode -d 'Prover options' -r -f -a "{no-proofs\t'',gpu\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from create" -l wallet-creation -d 'Wallet options' -r -f -a "{localhost\t'Load wallets from localhost mnemonic, they are funded for localhost env',random\t'Generate random wallets',empty\t'Generate placeholder wallets',in-file\t'Specify file with wallets'}" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from create" -l prover-mode -d 'Prover options' -r -f -a "no-proofs\t'' +gpu\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from create" -l wallet-creation -d 'Wallet options' -r -f -a "localhost\t'Load wallets from localhost mnemonic, they are funded for localhost env' +random\t'Generate random wallets' +empty\t'Generate placeholder wallets' +in-file\t'Specify file with wallets'" complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from create" -l wallet-path -d 'Wallet path' -r -F -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from create" -l l1-batch-commit-data-generator-mode -d 'Commit data generation mode' -r -f -a "{rollup\t'',validium\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from create" -l l1-batch-commit-data-generator-mode -d 'Commit data generation mode' -r -f -a "rollup\t'' +validium\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from create" -l base-token-address -d 'Base token address' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from create" -l base-token-price-nominator -d 'Base token nominator' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from create" -l base-token-price-denominator -d 'Base token denominator' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from create" -l set-as-default -d 'Set as default chain' -r -f -a "{true\t'',false\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from create" -l evm-emulator -d 'Enable EVM emulator' -r -f -a "{true\t'',false\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from create" -l update-submodules -d 'Whether to update git submodules of repo' -r -f -a "{true\t'',false\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from create" -l set-as-default -d 'Set as default chain' -r -f -a "true\t'' +false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from create" -l evm-emulator -d 'Enable EVM emulator' -r -f -a "true\t'' +false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from create" -l update-submodules -d 'Whether to update git submodules of repo' -r -f -a "true\t'' +false\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from create" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from create" -l legacy-bridge complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from create" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from create" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from create" -s h -l help -d 'Print help (see more with \'--help\')' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from build-transactions" -s o -l out -d 'Output directory for the generated files' -r -F -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from build-transactions" -l verify -d 'Verify deployed contracts' -r -f -a "{true\t'',false\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from build-transactions" -l verifier -d 'Verifier to use' -r -f -a "{etherscan\t'',sourcify\t'',blockscout\t'',oklink\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from build-transactions" -l verify -d 'Verify deployed contracts' -r -f -a "true\t'' +false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from build-transactions" -l verifier -d 'Verifier to use' -r -f -a "etherscan\t'' +sourcify\t'' +blockscout\t'' +oklink\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from build-transactions" -l verifier-url -d 'Verifier URL, if using a custom provider' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from build-transactions" -l verifier-api-key -d 'Verifier API key' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from build-transactions" -s a -l additional-args -d 'List of additional arguments that can be passed through the CLI' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from build-transactions" -l l1-rpc-url -d 'L1 RPC URL' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from build-transactions" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from build-transactions" -l resume +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from build-transactions" -l zksync complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from build-transactions" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from build-transactions" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from build-transactions" -s h -l help -d 'Print help (see more with \'--help\')' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from init" -l verify -d 'Verify deployed contracts' -r -f -a "{true\t'',false\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from init" -l verifier -d 'Verifier to use' -r -f -a "{etherscan\t'',sourcify\t'',blockscout\t'',oklink\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from init" -l verify -d 'Verify deployed contracts' -r -f -a "true\t'' +false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from init" -l verifier -d 'Verifier to use' -r -f -a "etherscan\t'' +sourcify\t'' +blockscout\t'' +oklink\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from init" -l verifier-url -d 'Verifier URL, if using a custom provider' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from init" -l verifier-api-key -d 'Verifier API key' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from init" -s a -l additional-args -d 'List of additional arguments that can be passed through the CLI' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from init" -l server-db-url -d 'Server database url without database name' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from init" -l server-db-name -d 'Server database name' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from init" -l deploy-paymaster -r -f -a "{true\t'',false\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from init" -l deploy-paymaster -r -f -a "true\t'' +false\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from init" -l l1-rpc-url -d 'L1 RPC URL' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from init" -l update-submodules -r -f -a "{true\t'',false\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from init" -l update-submodules -r -f -a "true\t'' +false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from init" -l validium-type -d 'Type of the Validium network' -r -f -a "no-da\t'' +avail\t'' +eigen-da\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from init" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from init" -l resume +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from init" -l zksync complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from init" -s d -l dont-drop complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from init" -l no-port-reallocation -d 'Do not reallocate ports' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from init" -l dev -d 'Use defaults for all options and flags. Suitable for local development' @@ -212,113 +272,153 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_s complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from genesis" -f -a "init-database" -d 'Initialize databases' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from genesis" -f -a "server" -d 'Runs server genesis' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from genesis" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from register-chain" -l verify -d 'Verify deployed contracts' -r -f -a "{true\t'',false\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from register-chain" -l verifier -d 'Verifier to use' -r -f -a "{etherscan\t'',sourcify\t'',blockscout\t'',oklink\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from register-chain" -l verify -d 'Verify deployed contracts' -r -f -a "true\t'' +false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from register-chain" -l verifier -d 'Verifier to use' -r -f -a "etherscan\t'' +sourcify\t'' +blockscout\t'' +oklink\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from register-chain" -l verifier-url -d 'Verifier URL, if using a custom provider' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from register-chain" -l verifier-api-key -d 'Verifier API key' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from register-chain" -s a -l additional-args -d 'List of additional arguments that can be passed through the CLI' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from register-chain" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from register-chain" -l resume +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from register-chain" -l zksync complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from register-chain" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from register-chain" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from register-chain" -s h -l help -d 'Print help (see more with \'--help\')' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-l2-contracts" -l verify -d 'Verify deployed contracts' -r -f -a "{true\t'',false\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-l2-contracts" -l verifier -d 'Verifier to use' -r -f -a "{etherscan\t'',sourcify\t'',blockscout\t'',oklink\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-l2-contracts" -l verify -d 'Verify deployed contracts' -r -f -a "true\t'' +false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-l2-contracts" -l verifier -d 'Verifier to use' -r -f -a "etherscan\t'' +sourcify\t'' +blockscout\t'' +oklink\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-l2-contracts" -l verifier-url -d 'Verifier URL, if using a custom provider' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-l2-contracts" -l verifier-api-key -d 'Verifier API key' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-l2-contracts" -s a -l additional-args -d 'List of additional arguments that can be passed through the CLI' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-l2-contracts" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-l2-contracts" -l resume +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-l2-contracts" -l zksync complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-l2-contracts" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-l2-contracts" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-l2-contracts" -s h -l help -d 'Print help (see more with \'--help\')' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from accept-chain-ownership" -l verify -d 'Verify deployed contracts' -r -f -a "{true\t'',false\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from accept-chain-ownership" -l verifier -d 'Verifier to use' -r -f -a "{etherscan\t'',sourcify\t'',blockscout\t'',oklink\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from accept-chain-ownership" -l verify -d 'Verify deployed contracts' -r -f -a "true\t'' +false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from accept-chain-ownership" -l verifier -d 'Verifier to use' -r -f -a "etherscan\t'' +sourcify\t'' +blockscout\t'' +oklink\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from accept-chain-ownership" -l verifier-url -d 'Verifier URL, if using a custom provider' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from accept-chain-ownership" -l verifier-api-key -d 'Verifier API key' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from accept-chain-ownership" -s a -l additional-args -d 'List of additional arguments that can be passed through the CLI' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from accept-chain-ownership" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from accept-chain-ownership" -l resume +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from accept-chain-ownership" -l zksync complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from accept-chain-ownership" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from accept-chain-ownership" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from accept-chain-ownership" -s h -l help -d 'Print help (see more with \'--help\')' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from initialize-bridges" -l verify -d 'Verify deployed contracts' -r -f -a "{true\t'',false\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from initialize-bridges" -l verifier -d 'Verifier to use' -r -f -a "{etherscan\t'',sourcify\t'',blockscout\t'',oklink\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from initialize-bridges" -l verifier-url -d 'Verifier URL, if using a custom provider' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from initialize-bridges" -l verifier-api-key -d 'Verifier API key' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from initialize-bridges" -s a -l additional-args -d 'List of additional arguments that can be passed through the CLI' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from initialize-bridges" -l chain -d 'Chain to use' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from initialize-bridges" -l resume -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from initialize-bridges" -s v -l verbose -d 'Verbose mode' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from initialize-bridges" -l ignore-prerequisites -d 'Ignores prerequisites checks' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from initialize-bridges" -s h -l help -d 'Print help (see more with \'--help\')' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-consensus-registry" -l verify -d 'Verify deployed contracts' -r -f -a "{true\t'',false\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-consensus-registry" -l verifier -d 'Verifier to use' -r -f -a "{etherscan\t'',sourcify\t'',blockscout\t'',oklink\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-consensus-registry" -l verify -d 'Verify deployed contracts' -r -f -a "true\t'' +false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-consensus-registry" -l verifier -d 'Verifier to use' -r -f -a "etherscan\t'' +sourcify\t'' +blockscout\t'' +oklink\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-consensus-registry" -l verifier-url -d 'Verifier URL, if using a custom provider' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-consensus-registry" -l verifier-api-key -d 'Verifier API key' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-consensus-registry" -s a -l additional-args -d 'List of additional arguments that can be passed through the CLI' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-consensus-registry" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-consensus-registry" -l resume +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-consensus-registry" -l zksync complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-consensus-registry" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-consensus-registry" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-consensus-registry" -s h -l help -d 'Print help (see more with \'--help\')' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-multicall3" -l verify -d 'Verify deployed contracts' -r -f -a "{true\t'',false\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-multicall3" -l verifier -d 'Verifier to use' -r -f -a "{etherscan\t'',sourcify\t'',blockscout\t'',oklink\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-multicall3" -l verify -d 'Verify deployed contracts' -r -f -a "true\t'' +false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-multicall3" -l verifier -d 'Verifier to use' -r -f -a "etherscan\t'' +sourcify\t'' +blockscout\t'' +oklink\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-multicall3" -l verifier-url -d 'Verifier URL, if using a custom provider' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-multicall3" -l verifier-api-key -d 'Verifier API key' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-multicall3" -s a -l additional-args -d 'List of additional arguments that can be passed through the CLI' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-multicall3" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-multicall3" -l resume +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-multicall3" -l zksync complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-multicall3" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-multicall3" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-multicall3" -s h -l help -d 'Print help (see more with \'--help\')' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -l verify -d 'Verify deployed contracts' -r -f -a "{true\t'',false\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -l verifier -d 'Verifier to use' -r -f -a "{etherscan\t'',sourcify\t'',blockscout\t'',oklink\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -l verify -d 'Verify deployed contracts' -r -f -a "true\t'' +false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -l verifier -d 'Verifier to use' -r -f -a "etherscan\t'' +sourcify\t'' +blockscout\t'' +oklink\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -l verifier-url -d 'Verifier URL, if using a custom provider' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -l verifier-api-key -d 'Verifier API key' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -s a -l additional-args -d 'List of additional arguments that can be passed through the CLI' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -l resume +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -l zksync complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-timestamp-asserter" -s h -l help -d 'Print help (see more with \'--help\')' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-upgrader" -l verify -d 'Verify deployed contracts' -r -f -a "{true\t'',false\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-upgrader" -l verifier -d 'Verifier to use' -r -f -a "{etherscan\t'',sourcify\t'',blockscout\t'',oklink\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-upgrader" -l verify -d 'Verify deployed contracts' -r -f -a "true\t'' +false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-upgrader" -l verifier -d 'Verifier to use' -r -f -a "etherscan\t'' +sourcify\t'' +blockscout\t'' +oklink\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-upgrader" -l verifier-url -d 'Verifier URL, if using a custom provider' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-upgrader" -l verifier-api-key -d 'Verifier API key' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-upgrader" -s a -l additional-args -d 'List of additional arguments that can be passed through the CLI' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-upgrader" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-upgrader" -l resume +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-upgrader" -l zksync complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-upgrader" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-upgrader" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-upgrader" -s h -l help -d 'Print help (see more with \'--help\')' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-paymaster" -l verify -d 'Verify deployed contracts' -r -f -a "{true\t'',false\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-paymaster" -l verifier -d 'Verifier to use' -r -f -a "{etherscan\t'',sourcify\t'',blockscout\t'',oklink\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-paymaster" -l verify -d 'Verify deployed contracts' -r -f -a "true\t'' +false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-paymaster" -l verifier -d 'Verifier to use' -r -f -a "etherscan\t'' +sourcify\t'' +blockscout\t'' +oklink\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-paymaster" -l verifier-url -d 'Verifier URL, if using a custom provider' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-paymaster" -l verifier-api-key -d 'Verifier API key' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-paymaster" -s a -l additional-args -d 'List of additional arguments that can be passed through the CLI' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-paymaster" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-paymaster" -l resume +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-paymaster" -l zksync complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-paymaster" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-paymaster" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from deploy-paymaster" -s h -l help -d 'Print help (see more with \'--help\')' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from update-token-multiplier-setter" -l verify -d 'Verify deployed contracts' -r -f -a "{true\t'',false\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from update-token-multiplier-setter" -l verifier -d 'Verifier to use' -r -f -a "{etherscan\t'',sourcify\t'',blockscout\t'',oklink\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from update-token-multiplier-setter" -l verify -d 'Verify deployed contracts' -r -f -a "true\t'' +false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from update-token-multiplier-setter" -l verifier -d 'Verifier to use' -r -f -a "etherscan\t'' +sourcify\t'' +blockscout\t'' +oklink\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from update-token-multiplier-setter" -l verifier-url -d 'Verifier URL, if using a custom provider' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from update-token-multiplier-setter" -l verifier-api-key -d 'Verifier API key' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from update-token-multiplier-setter" -s a -l additional-args -d 'List of additional arguments that can be passed through the CLI' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from update-token-multiplier-setter" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from update-token-multiplier-setter" -l resume +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from update-token-multiplier-setter" -l zksync complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from update-token-multiplier-setter" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from update-token-multiplier-setter" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from update-token-multiplier-setter" -s h -l help -d 'Print help (see more with \'--help\')' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from enable-evm-emulator" -l verify -d 'Verify deployed contracts' -r -f -a "{true\t'',false\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from enable-evm-emulator" -l verifier -d 'Verifier to use' -r -f -a "{etherscan\t'',sourcify\t'',blockscout\t'',oklink\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from enable-evm-emulator" -l verify -d 'Verify deployed contracts' -r -f -a "true\t'' +false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from enable-evm-emulator" -l verifier -d 'Verifier to use' -r -f -a "etherscan\t'' +sourcify\t'' +blockscout\t'' +oklink\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from enable-evm-emulator" -l verifier-url -d 'Verifier URL, if using a custom provider' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from enable-evm-emulator" -l verifier-api-key -d 'Verifier API key' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from enable-evm-emulator" -s a -l additional-args -d 'List of additional arguments that can be passed through the CLI' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from enable-evm-emulator" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from enable-evm-emulator" -l resume +complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from enable-evm-emulator" -l zksync complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from enable-evm-emulator" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from enable-evm-emulator" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from enable-evm-emulator" -s h -l help -d 'Print help (see more with \'--help\')' @@ -329,7 +429,6 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_s complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from help" -f -a "register-chain" -d 'Register a new chain on L1 (executed by L1 governor). This command deploys and configures Governance, ChainAdmin, and DiamondProxy contracts, registers chain with BridgeHub and sets pending admin for DiamondProxy. Note: After completion, L2 governor can accept ownership by running `accept-chain-ownership`' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from help" -f -a "deploy-l2-contracts" -d 'Deploy all L2 contracts (executed by L1 governor)' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from help" -f -a "accept-chain-ownership" -d 'Accept ownership of L2 chain (executed by L2 governor). This command should be run after `register-chain` to accept ownership of newly created DiamondProxy contract' -complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from help" -f -a "initialize-bridges" -d 'Initialize bridges on L2' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from help" -f -a "deploy-consensus-registry" -d 'Deploy L2 consensus registry' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from help" -f -a "deploy-multicall3" -d 'Deploy L2 multicall3' complete -c zkstack -n "__fish_zkstack_using_subcommand chain; and __fish_seen_subcommand_from help" -f -a "deploy-timestamp-asserter" -d 'Deploy L2 TimestampAsserter' @@ -397,7 +496,14 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_sub complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_subcommand_from snapshot" -s h -l help -d 'Print help' complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_subcommand_from snapshot" -f -a "create" complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_subcommand_from snapshot" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' -complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_subcommand_from lint" -s t -l targets -r -f -a "{md\t'',sol\t'',js\t'',ts\t'',rs\t'',contracts\t'',autocompletion\t'',rust-toolchain\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_subcommand_from lint" -s t -l targets -r -f -a "md\t'' +sol\t'' +js\t'' +ts\t'' +rs\t'' +contracts\t'' +autocompletion\t'' +rust-toolchain\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_subcommand_from lint" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_subcommand_from lint" -s c -l check complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_subcommand_from lint" -s v -l verbose -d 'Verbose mode' @@ -420,9 +526,14 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_sub complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_subcommand_from prover" -f -a "insert-batch" complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_subcommand_from prover" -f -a "insert-version" complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_subcommand_from prover" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' -complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_subcommand_from contracts" -l l1-contracts -d 'Build L1 contracts' -r -f -a "{true\t'',false\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_subcommand_from contracts" -l l2-contracts -d 'Build L2 contracts' -r -f -a "{true\t'',false\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_subcommand_from contracts" -l system-contracts -d 'Build system contracts' -r -f -a "{true\t'',false\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_subcommand_from contracts" -l l1-contracts -d 'Build L1 contracts' -r -f -a "true\t'' +false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_subcommand_from contracts" -l l1-da-contracts -d 'Build L1 DA contracts' -r -f -a "true\t'' +false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_subcommand_from contracts" -l l2-contracts -d 'Build L2 contracts' -r -f -a "true\t'' +false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_subcommand_from contracts" -l system-contracts -d 'Build system contracts' -r -f -a "true\t'' +false\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_subcommand_from contracts" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_subcommand_from contracts" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand dev; and __fish_seen_subcommand_from contracts" -l ignore-prerequisites -d 'Ignores prerequisites checks' @@ -480,7 +591,8 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_ complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l bucket-name -r complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l location -r complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l project-id -r -complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l shall-save-to-public-bucket -r -f -a "{true\t'',false\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l shall-save-to-public-bucket -r -f -a "true\t'' +false\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l public-store-dir -r complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l public-bucket-base-url -r complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l public-credentials-file -r @@ -488,41 +600,65 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_ complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l public-location -r complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l public-project-id -r complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l bellman-cuda-dir -r -complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l bellman-cuda -r -f -a "{true\t'',false\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l setup-compressor-key -r -f -a "{true\t'',false\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l plonk-path -r -complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l fflonk-path -r -complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l compressor-type -r -f -a "{fflonk\t'',plonk\t'',all\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l region -r -f -a "{us\t'',europe\t'',asia\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l mode -r -f -a "{download\t'',generate\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l setup-keys -r -f -a "{true\t'',false\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l setup-database -r -f -a "{true\t'',false\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l bellman-cuda -r -f -a "true\t'' +false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l setup-compressor-key -r -f -a "true\t'' +false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l path -r +complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l region -r -f -a "us\t'' +europe\t'' +asia\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l mode -r -f -a "download\t'' +generate\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l setup-keys -r -f -a "true\t'' +false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l setup-database -r -f -a "true\t'' +false\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l prover-db-url -d 'Prover database url without database name' -r complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l prover-db-name -d 'Prover database name' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -s u -l use-default -d 'Use default database urls and names' -r -f -a "{true\t'',false\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -s d -l dont-drop -r -f -a "{true\t'',false\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l cloud-type -r -f -a "{gcp\t'',local\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -s u -l use-default -d 'Use default database urls and names' -r -f -a "true\t'' +false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -s d -l dont-drop -r -f -a "true\t'' +false\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l cloud-type -r -f -a "gcp\t'' +local\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l dev complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l clone complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -s h -l help -d 'Print help' -complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from setup-keys" -l region -r -f -a "{us\t'',europe\t'',asia\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from setup-keys" -l mode -r -f -a "{download\t'',generate\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from setup-keys" -l region -r -f -a "us\t'' +europe\t'' +asia\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from setup-keys" -l mode -r -f -a "download\t'' +generate\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from setup-keys" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from setup-keys" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from setup-keys" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from setup-keys" -s h -l help -d 'Print help' -complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from run" -l component -r -f -a "{gateway\t'',witness-generator\t'',witness-vector-generator\t'',prover\t'',circuit-prover\t'',compressor\t'',prover-job-monitor\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from run" -l round -r -f -a "{all-rounds\t'',basic-circuits\t'',leaf-aggregation\t'',node-aggregation\t'',recursion-tip\t'',scheduler\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from run" -l component -r -f -a "gateway\t'' +witness-generator\t'' +witness-vector-generator\t'' +prover\t'' +circuit-prover\t'' +compressor\t'' +prover-job-monitor\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from run" -l round -r -f -a "all-rounds\t'' +basic-circuits\t'' +leaf-aggregation\t'' +node-aggregation\t'' +recursion-tip\t'' +scheduler\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from run" -l threads -r complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from run" -l max-allocation -d 'Memory allocation limit in bytes (for prover component)' -r complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from run" -s l -l light-wvg-count -r complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from run" -s h -l heavy-wvg-count -r complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from run" -s m -l max-allocation -r -complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from run" -l mode -r -f -a "{fflonk\t'',plonk\t''}" -complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from run" -l docker -r -f -a "{true\t'',false\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from run" -l mode -r -f -a "fflonk\t'' +plonk\t''" +complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from run" -l docker -r -f -a "true\t'' +false\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from run" -l tag -r complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from run" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from run" -s v -l verbose -d 'Verbose mode' @@ -534,9 +670,7 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_ complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init-bellman-cuda" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init-bellman-cuda" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init-bellman-cuda" -s h -l help -d 'Print help' -complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from compressor-keys" -l plonk-path -r -complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from compressor-keys" -l fflonk-path -r -complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from compressor-keys" -l compressor-type -r -f -a "{fflonk\t'',plonk\t'',all\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from compressor-keys" -l path -r complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from compressor-keys" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from compressor-keys" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from compressor-keys" -l ignore-prerequisites -d 'Ignores prerequisites checks' @@ -609,7 +743,8 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fis complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from build" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from build" -s h -l help -d 'Print help' complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from run" -l components -d 'Components of server to run' -r -complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from run" -l enable-consensus -d 'Enable consensus' -r -f -a "{true\t'',false\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from run" -l enable-consensus -d 'Enable consensus' -r -f -a "true\t'' +false\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from run" -s a -l additional-args -d 'Additional arguments that can be passed through the CLI' -r complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from run" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from run" -l reinit @@ -628,7 +763,8 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fis complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from help" -f -a "run" -d 'Run external node' complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from help" -f -a "wait" -d 'Wait for external node to start' complete -c zkstack -n "__fish_zkstack_using_subcommand external-node; and __fish_seen_subcommand_from help" -f -a "help" -d 'Print this message or the help of the given subcommand(s)' -complete -c zkstack -n "__fish_zkstack_using_subcommand containers" -s o -l observability -d 'Enable Grafana' -r -f -a "{true\t'',false\t''}" +complete -c zkstack -n "__fish_zkstack_using_subcommand containers" -s o -l observability -d 'Enable Grafana' -r -f -a "true\t'' +false\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand containers" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand containers" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand containers" -l ignore-prerequisites -d 'Ignores prerequisites checks' @@ -763,7 +899,6 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_su complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from chain" -f -a "register-chain" -d 'Register a new chain on L1 (executed by L1 governor). This command deploys and configures Governance, ChainAdmin, and DiamondProxy contracts, registers chain with BridgeHub and sets pending admin for DiamondProxy. Note: After completion, L2 governor can accept ownership by running `accept-chain-ownership`' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from chain" -f -a "deploy-l2-contracts" -d 'Deploy all L2 contracts (executed by L1 governor)' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from chain" -f -a "accept-chain-ownership" -d 'Accept ownership of L2 chain (executed by L2 governor). This command should be run after `register-chain` to accept ownership of newly created DiamondProxy contract' -complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from chain" -f -a "initialize-bridges" -d 'Initialize bridges on L2' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from chain" -f -a "deploy-consensus-registry" -d 'Deploy L2 consensus registry' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from chain" -f -a "deploy-multicall3" -d 'Deploy L2 multicall3' complete -c zkstack -n "__fish_zkstack_using_subcommand help; and __fish_seen_subcommand_from chain" -f -a "deploy-timestamp-asserter" -d 'Deploy L2 TimestampAsserter' diff --git a/zkstack_cli/crates/zkstack/completion/zkstack.sh b/zkstack_cli/crates/zkstack/completion/zkstack.sh index ae934b0e5d3a..4fa85e45f3d8 100644 --- a/zkstack_cli/crates/zkstack/completion/zkstack.sh +++ b/zkstack_cli/crates/zkstack/completion/zkstack.sh @@ -96,9 +96,6 @@ _zkstack() { zkstack__chain,init) cmd="zkstack__chain__init" ;; - zkstack__chain,initialize-bridges) - cmd="zkstack__chain__initialize__bridges" - ;; zkstack__chain,register-chain) cmd="zkstack__chain__register__chain" ;; @@ -162,9 +159,6 @@ _zkstack() { zkstack__chain__help,init) cmd="zkstack__chain__help__init" ;; - zkstack__chain__help,initialize-bridges) - cmd="zkstack__chain__help__initialize__bridges" - ;; zkstack__chain__help,register-chain) cmd="zkstack__chain__help__register__chain" ;; @@ -804,9 +798,6 @@ _zkstack() { zkstack__help__chain,init) cmd="zkstack__help__chain__init" ;; - zkstack__help__chain,initialize-bridges) - cmd="zkstack__help__chain__initialize__bridges" - ;; zkstack__help__chain,register-chain) cmd="zkstack__help__chain__register__chain" ;; @@ -1144,7 +1135,7 @@ _zkstack() { return 0 ;; zkstack__chain) - opts="-v -h --verbose --chain --ignore-prerequisites --help create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" + opts="-v -h --verbose --chain --ignore-prerequisites --help create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1162,7 +1153,7 @@ _zkstack() { return 0 ;; zkstack__chain__accept__chain__ownership) - opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --additional-args --verbose --chain --ignore-prerequisites --help" + opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --zksync --additional-args --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1204,7 +1195,7 @@ _zkstack() { return 0 ;; zkstack__chain__build__transactions) - opts="-o -a -v -h --out --verify --verifier --verifier-url --verifier-api-key --resume --additional-args --l1-rpc-url --verbose --chain --ignore-prerequisites --help" + opts="-o -a -v -h --out --verify --verifier --verifier-url --verifier-api-key --resume --zksync --additional-args --l1-rpc-url --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1335,7 +1326,7 @@ _zkstack() { return 0 ;; zkstack__chain__deploy__consensus__registry) - opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --additional-args --verbose --chain --ignore-prerequisites --help" + opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --zksync --additional-args --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1377,7 +1368,7 @@ _zkstack() { return 0 ;; zkstack__chain__deploy__l2__contracts) - opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --additional-args --verbose --chain --ignore-prerequisites --help" + opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --zksync --additional-args --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1419,7 +1410,7 @@ _zkstack() { return 0 ;; zkstack__chain__deploy__multicall3) - opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --additional-args --verbose --chain --ignore-prerequisites --help" + opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --zksync --additional-args --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1461,7 +1452,7 @@ _zkstack() { return 0 ;; zkstack__chain__deploy__paymaster) - opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --additional-args --verbose --chain --ignore-prerequisites --help" + opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --zksync --additional-args --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1503,7 +1494,7 @@ _zkstack() { return 0 ;; zkstack__chain__deploy__timestamp__asserter) - opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --additional-args --verbose --chain --ignore-prerequisites --help" + opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --zksync --additional-args --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1545,7 +1536,7 @@ _zkstack() { return 0 ;; zkstack__chain__deploy__upgrader) - opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --additional-args --verbose --chain --ignore-prerequisites --help" + opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --zksync --additional-args --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1587,7 +1578,7 @@ _zkstack() { return 0 ;; zkstack__chain__enable__evm__emulator) - opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --additional-args --verbose --chain --ignore-prerequisites --help" + opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --zksync --additional-args --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1755,7 +1746,7 @@ _zkstack() { return 0 ;; zkstack__chain__help) - opts="create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" + opts="create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1992,20 +1983,6 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; - zkstack__chain__help__initialize__bridges) - opts="" - if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then - COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) - return 0 - fi - case "${prev}" in - *) - COMPREPLY=() - ;; - esac - COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) - return 0 - ;; zkstack__chain__help__register__chain) opts="" if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then @@ -2035,7 +2012,7 @@ _zkstack() { return 0 ;; zkstack__chain__init) - opts="-a -d -v -h --verify --verifier --verifier-url --verifier-api-key --resume --additional-args --server-db-url --server-db-name --dont-drop --deploy-paymaster --l1-rpc-url --no-port-reallocation --update-submodules --dev --verbose --chain --ignore-prerequisites --help configs help" + opts="-a -d -v -h --verify --verifier --verifier-url --verifier-api-key --resume --zksync --additional-args --server-db-url --server-db-name --dont-drop --deploy-paymaster --l1-rpc-url --no-port-reallocation --update-submodules --dev --validium-type --verbose --chain --ignore-prerequisites --help configs help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -2085,6 +2062,10 @@ _zkstack() { COMPREPLY=($(compgen -W "true false" -- "${cur}")) return 0 ;; + --validium-type) + COMPREPLY=($(compgen -W "no-da avail eigen-da" -- "${cur}")) + return 0 + ;; --chain) COMPREPLY=($(compgen -f "${cur}")) return 0 @@ -2168,50 +2149,8 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; - zkstack__chain__initialize__bridges) - opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --additional-args --verbose --chain --ignore-prerequisites --help" - if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then - COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) - return 0 - fi - case "${prev}" in - --verify) - COMPREPLY=($(compgen -W "true false" -- "${cur}")) - return 0 - ;; - --verifier) - COMPREPLY=($(compgen -W "etherscan sourcify blockscout oklink" -- "${cur}")) - return 0 - ;; - --verifier-url) - COMPREPLY=($(compgen -f "${cur}")) - return 0 - ;; - --verifier-api-key) - COMPREPLY=($(compgen -f "${cur}")) - return 0 - ;; - --additional-args) - COMPREPLY=($(compgen -f "${cur}")) - return 0 - ;; - -a) - COMPREPLY=($(compgen -f "${cur}")) - return 0 - ;; - --chain) - COMPREPLY=($(compgen -f "${cur}")) - return 0 - ;; - *) - COMPREPLY=() - ;; - esac - COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) - return 0 - ;; zkstack__chain__register__chain) - opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --additional-args --verbose --chain --ignore-prerequisites --help" + opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --zksync --additional-args --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -2253,7 +2192,7 @@ _zkstack() { return 0 ;; zkstack__chain__update__token__multiplier__setter) - opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --additional-args --verbose --chain --ignore-prerequisites --help" + opts="-a -v -h --verify --verifier --verifier-url --verifier-api-key --resume --zksync --additional-args --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -2871,7 +2810,7 @@ _zkstack() { return 0 ;; zkstack__dev__contracts) - opts="-v -h --l1-contracts --l2-contracts --system-contracts --verbose --chain --ignore-prerequisites --help" + opts="-v -h --l1-contracts --l1-da-contracts --l2-contracts --system-contracts --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -2881,6 +2820,10 @@ _zkstack() { COMPREPLY=($(compgen -W "true false" -- "${cur}")) return 0 ;; + --l1-da-contracts) + COMPREPLY=($(compgen -W "true false" -- "${cur}")) + return 0 + ;; --l2-contracts) COMPREPLY=($(compgen -W "true false" -- "${cur}")) return 0 @@ -4907,7 +4850,7 @@ _zkstack() { return 0 ;; zkstack__ecosystem__build__transactions) - opts="-o -a -v -h --sender --l1-rpc-url --out --verify --verifier --verifier-url --verifier-api-key --resume --additional-args --verbose --chain --ignore-prerequisites --help" + opts="-o -a -v -h --sender --l1-rpc-url --out --verify --verifier --verifier-url --verifier-api-key --resume --zksync --additional-args --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -5181,7 +5124,7 @@ _zkstack() { return 0 ;; zkstack__ecosystem__init) - opts="-a -d -o -v -h --deploy-erc20 --deploy-ecosystem --ecosystem-contracts-path --l1-rpc-url --verify --verifier --verifier-url --verifier-api-key --resume --additional-args --deploy-paymaster --server-db-url --server-db-name --dont-drop --ecosystem-only --dev --observability --no-port-reallocation --update-submodules --verbose --chain --ignore-prerequisites --help" + opts="-a -d -o -v -h --deploy-erc20 --deploy-ecosystem --ecosystem-contracts-path --l1-rpc-url --verify --verifier --verifier-url --verifier-api-key --resume --zksync --additional-args --deploy-paymaster --server-db-url --server-db-name --dont-drop --ecosystem-only --dev --observability --no-port-reallocation --update-submodules --validium-type --support-l2-legacy-shared-bridge-test --skip-contract-compilation-override --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -5251,6 +5194,14 @@ _zkstack() { COMPREPLY=($(compgen -W "true false" -- "${cur}")) return 0 ;; + --validium-type) + COMPREPLY=($(compgen -W "no-da avail eigen-da" -- "${cur}")) + return 0 + ;; + --support-l2-legacy-shared-bridge-test) + COMPREPLY=($(compgen -W "true false" -- "${cur}")) + return 0 + ;; --chain) COMPREPLY=($(compgen -f "${cur}")) return 0 @@ -5701,7 +5652,7 @@ _zkstack() { return 0 ;; zkstack__help__chain) - opts="create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership initialize-bridges deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator" + opts="create build-transactions init genesis register-chain deploy-l2-contracts accept-chain-ownership deploy-consensus-registry deploy-multicall3 deploy-timestamp-asserter deploy-upgrader deploy-paymaster update-token-multiplier-setter enable-evm-emulator" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -5924,20 +5875,6 @@ _zkstack() { COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 ;; - zkstack__help__chain__initialize__bridges) - opts="" - if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then - COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) - return 0 - fi - case "${prev}" in - *) - COMPREPLY=() - ;; - esac - COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) - return 0 - ;; zkstack__help__chain__register__chain) opts="" if [[ ${cur} == -* || ${COMP_CWORD} -eq 4 ]] ; then @@ -7169,24 +7106,16 @@ _zkstack() { return 0 ;; zkstack__prover__compressor__keys) - opts="-v -h --plonk-path --fflonk-path --compressor-type --verbose --chain --ignore-prerequisites --help" + opts="-v -h --path --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 fi case "${prev}" in - --plonk-path) - COMPREPLY=($(compgen -f "${cur}")) - return 0 - ;; - --fflonk-path) + --path) COMPREPLY=($(compgen -f "${cur}")) return 0 ;; - --compressor-type) - COMPREPLY=($(compgen -W "fflonk plonk all" -- "${cur}")) - return 0 - ;; --chain) COMPREPLY=($(compgen -f "${cur}")) return 0 @@ -7297,7 +7226,7 @@ _zkstack() { return 0 ;; zkstack__prover__init) - opts="-u -d -v -h --dev --proof-store-dir --bucket-base-url --credentials-file --bucket-name --location --project-id --shall-save-to-public-bucket --public-store-dir --public-bucket-base-url --public-credentials-file --public-bucket-name --public-location --public-project-id --clone --bellman-cuda-dir --bellman-cuda --setup-compressor-key --plonk-path --fflonk-path --compressor-type --region --mode --setup-keys --setup-database --prover-db-url --prover-db-name --use-default --dont-drop --cloud-type --verbose --chain --ignore-prerequisites --help" + opts="-u -d -v -h --dev --proof-store-dir --bucket-base-url --credentials-file --bucket-name --location --project-id --shall-save-to-public-bucket --public-store-dir --public-bucket-base-url --public-credentials-file --public-bucket-name --public-location --public-project-id --clone --bellman-cuda-dir --bellman-cuda --setup-compressor-key --path --region --mode --setup-keys --setup-database --prover-db-url --prover-db-name --use-default --dont-drop --cloud-type --verbose --chain --ignore-prerequisites --help" if [[ ${cur} == -* || ${COMP_CWORD} -eq 3 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -7367,18 +7296,10 @@ _zkstack() { COMPREPLY=($(compgen -W "true false" -- "${cur}")) return 0 ;; - --plonk-path) - COMPREPLY=($(compgen -f "${cur}")) - return 0 - ;; - --fflonk-path) + --path) COMPREPLY=($(compgen -f "${cur}")) return 0 ;; - --compressor-type) - COMPREPLY=($(compgen -W "fflonk plonk all" -- "${cur}")) - return 0 - ;; --region) COMPREPLY=($(compgen -W "us europe asia" -- "${cur}")) return 0 diff --git a/zkstack_cli/crates/zkstack/src/accept_ownership.rs b/zkstack_cli/crates/zkstack/src/accept_ownership.rs index ab13661d6adb..1123df0f2240 100644 --- a/zkstack_cli/crates/zkstack/src/accept_ownership.rs +++ b/zkstack_cli/crates/zkstack/src/accept_ownership.rs @@ -1,13 +1,4 @@ use anyhow::Context; -use common::{ - forge::{Forge, ForgeScript, ForgeScriptArgs}, - spinner::Spinner, - wallets::Wallet, -}; -use config::{ - forge_interface::script_params::ACCEPT_GOVERNANCE_SCRIPT_PARAMS, ChainConfig, ContractsConfig, - EcosystemConfig, -}; use ethers::{ abi::{parse_abi, Token}, contract::BaseContract, @@ -15,6 +6,15 @@ use ethers::{ }; use lazy_static::lazy_static; use xshell::Shell; +use zkstack_cli_common::{ + forge::{Forge, ForgeScript, ForgeScriptArgs}, + spinner::Spinner, + wallets::Wallet, +}; +use zkstack_cli_config::{ + forge_interface::script_params::ACCEPT_GOVERNANCE_SCRIPT_PARAMS, ChainConfig, ContractsConfig, + EcosystemConfig, +}; use zksync_basic_types::U256; use crate::{ @@ -27,6 +27,12 @@ lazy_static! { parse_abi(&[ "function governanceAcceptOwner(address governor, address target) public", "function chainAdminAcceptAdmin(address admin, address target) public", + "function setDAValidatorPair(address chainAdmin, address target, address l1DaValidator, address l2DaValidator) public", + "function makePermanentRollup(address chainAdmin, address target) public", + "function governanceExecuteCalls(bytes calldata callsToExecute, address target) public", + "function adminExecuteUpgrade(bytes memory diamondCut, address adminAddr, address accessControlRestriction, address chainDiamondProxy)", + "function adminScheduleUpgrade(address adminAddr, address accessControlRestriction, uint256 newProtocolVersion, uint256 timestamp)", + "function updateValidator(address adminAddr,address accessControlRestriction,address validatorTimelock,uint256 chainId,address validatorAddress,bool addValidator) public" ]) .unwrap(), ); @@ -92,8 +98,6 @@ pub async fn accept_owner( accept_ownership(shell, governor, forge).await } -// TODO(EVM-927): this function does not work without the Gateway contracts. -#[allow(unused)] #[allow(clippy::too_many_arguments)] pub async fn set_da_validator_pair( shell: &Shell, @@ -106,9 +110,6 @@ pub async fn set_da_validator_pair( forge_args: &ForgeScriptArgs, l1_rpc_url: String, ) -> anyhow::Result<()> { - // TODO(EVM-927): this function does not work without the Gateway contracts. - anyhow::bail!("Gateway upgrade not supported yet!"); - // resume doesn't properly work here. let mut forge_args = forge_args.clone(); forge_args.resume = false; @@ -137,8 +138,6 @@ pub async fn set_da_validator_pair( accept_ownership(shell, governor, forge).await } -// TODO(EVM-927): this function does not work without the Gateway contracts. -#[allow(unused)] #[allow(clippy::too_many_arguments)] pub async fn make_permanent_rollup( shell: &Shell, @@ -149,9 +148,6 @@ pub async fn make_permanent_rollup( forge_args: &ForgeScriptArgs, l1_rpc_url: String, ) -> anyhow::Result<()> { - // TODO(EVM-927): this function does not work without the Gateway contracts. - anyhow::bail!("Gateway upgrade not supported yet!"); - // resume doesn't properly work here. let mut forge_args = forge_args.clone(); forge_args.resume = false; @@ -175,8 +171,6 @@ pub async fn make_permanent_rollup( accept_ownership(shell, governor, forge).await } -// TODO(EVM-927): this function does not work without the Gateway contracts. -#[allow(unused)] #[allow(clippy::too_many_arguments)] pub async fn governance_execute_calls( shell: &Shell, @@ -186,9 +180,6 @@ pub async fn governance_execute_calls( forge_args: &ForgeScriptArgs, l1_rpc_url: String, ) -> anyhow::Result<()> { - // TODO(EVM-927): this function does not work without the Gateway contracts. - anyhow::bail!("Gateway upgrade not supported yet!"); - // resume doesn't properly work here. let mut forge_args = forge_args.clone(); forge_args.resume = false; @@ -214,8 +205,6 @@ pub async fn governance_execute_calls( accept_ownership(shell, governor, forge).await } -// TODO(EVM-927): this function does not work without the Gateway contracts. -#[allow(unused)] #[allow(clippy::too_many_arguments)] pub async fn admin_execute_upgrade( shell: &Shell, @@ -226,9 +215,6 @@ pub async fn admin_execute_upgrade( forge_args: &ForgeScriptArgs, l1_rpc_url: String, ) -> anyhow::Result<()> { - // TODO(EVM-927): this function does not work without the Gateway contracts. - anyhow::bail!("Gateway upgrade not supported yet!"); - // resume doesn't properly work here. let mut forge_args = forge_args.clone(); forge_args.resume = false; @@ -264,8 +250,6 @@ pub async fn admin_execute_upgrade( accept_ownership(shell, governor, forge).await } -// TODO(EVM-927): this function does not work without the Gateway contracts. -#[allow(unused)] #[allow(clippy::too_many_arguments)] pub async fn admin_schedule_upgrade( shell: &Shell, @@ -277,9 +261,6 @@ pub async fn admin_schedule_upgrade( forge_args: &ForgeScriptArgs, l1_rpc_url: String, ) -> anyhow::Result<()> { - // TODO(EVM-927): this function does not work without the Gateway contracts. - anyhow::bail!("Gateway upgrade not supported yet!"); - // resume doesn't properly work here. let mut forge_args = forge_args.clone(); forge_args.resume = false; @@ -314,8 +295,6 @@ pub async fn admin_schedule_upgrade( accept_ownership(shell, governor, forge).await } -// TODO(EVM-927): this function does not work without the Gateway contracts. -#[allow(unused)] #[allow(clippy::too_many_arguments)] pub async fn admin_update_validator( shell: &Shell, @@ -328,9 +307,6 @@ pub async fn admin_update_validator( forge_args: &ForgeScriptArgs, l1_rpc_url: String, ) -> anyhow::Result<()> { - // TODO(EVM-927): this function does not work without the Gateway contracts. - anyhow::bail!("Gateway upgrade not supported yet!"); - // resume doesn't properly work here. let mut forge_args = forge_args.clone(); forge_args.resume = false; diff --git a/zkstack_cli/crates/zkstack/src/commands/args/containers.rs b/zkstack_cli/crates/zkstack/src/commands/args/containers.rs index c996d65598ff..bff596f93cfe 100644 --- a/zkstack_cli/crates/zkstack/src/commands/args/containers.rs +++ b/zkstack_cli/crates/zkstack/src/commands/args/containers.rs @@ -16,7 +16,7 @@ pub struct ContainersArgsFinal { impl ContainersArgs { pub fn fill_values_with_prompt(self) -> ContainersArgsFinal { let observability = self.observability.unwrap_or_else(|| { - common::PromptConfirm::new(MSG_OBSERVABILITY_RUN_PROMPT) + zkstack_cli_common::PromptConfirm::new(MSG_OBSERVABILITY_RUN_PROMPT) .default(true) .ask() }); diff --git a/zkstack_cli/crates/zkstack/src/commands/args/wait.rs b/zkstack_cli/crates/zkstack/src/commands/args/wait.rs index a3a7e32ae8b4..315552cdb72a 100644 --- a/zkstack_cli/crates/zkstack/src/commands/args/wait.rs +++ b/zkstack_cli/crates/zkstack/src/commands/args/wait.rs @@ -2,10 +2,10 @@ use std::{fmt, future::Future, time::Duration}; use anyhow::Context as _; use clap::Parser; -use common::logger; use reqwest::StatusCode; use serde::{Deserialize, Serialize}; use tokio::time::MissedTickBehavior; +use zkstack_cli_common::logger; use crate::messages::{ msg_wait_connect_err, msg_wait_non_successful_response, msg_wait_not_healthy, diff --git a/zkstack_cli/crates/zkstack/src/commands/autocomplete.rs b/zkstack_cli/crates/zkstack/src/commands/autocomplete.rs index 0f2105cd5efa..ee6526e5aa80 100644 --- a/zkstack_cli/crates/zkstack/src/commands/autocomplete.rs +++ b/zkstack_cli/crates/zkstack/src/commands/autocomplete.rs @@ -6,7 +6,7 @@ use std::{ use anyhow::Context; use clap::CommandFactory; use clap_complete::{generate, Generator}; -use common::logger; +use zkstack_cli_common::logger; use super::args::AutocompleteArgs; use crate::{ diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/accept_chain_ownership.rs b/zkstack_cli/crates/zkstack/src/commands/chain/accept_chain_ownership.rs index cf3e2981b3c7..46b92248ae3f 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/accept_chain_ownership.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/accept_chain_ownership.rs @@ -1,7 +1,7 @@ use anyhow::Context; -use common::{forge::ForgeScriptArgs, logger, spinner::Spinner}; -use config::EcosystemConfig; use xshell::Shell; +use zkstack_cli_common::{forge::ForgeScriptArgs, logger, spinner::Spinner}; +use zkstack_cli_config::EcosystemConfig; use crate::{ accept_ownership::accept_admin, diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/args/build_transactions.rs b/zkstack_cli/crates/zkstack/src/commands/chain/args/build_transactions.rs index 793bea487f7e..ab92c91bbda3 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/args/build_transactions.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/args/build_transactions.rs @@ -1,9 +1,9 @@ use std::path::PathBuf; use clap::Parser; -use common::{config::global_config, forge::ForgeScriptArgs, Prompt}; use serde::{Deserialize, Serialize}; use url::Url; +use zkstack_cli_common::{config::global_config, forge::ForgeScriptArgs, Prompt}; use crate::{ consts::DEFAULT_UNSIGNED_TRANSACTIONS_DIR, diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/args/create.rs b/zkstack_cli/crates/zkstack/src/commands/chain/args/create.rs index 5310a46d23c4..c29696b92dec 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/args/create.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/args/create.rs @@ -2,12 +2,12 @@ use std::{path::PathBuf, str::FromStr}; use anyhow::{bail, Context}; use clap::{Parser, ValueEnum, ValueHint}; -use common::{Prompt, PromptConfirm, PromptSelect}; -use config::forge_interface::deploy_ecosystem::output::Erc20Token; use serde::{Deserialize, Serialize}; use slugify_rs::slugify; use strum::{Display, EnumIter, IntoEnumIterator}; -use types::{BaseToken, L1BatchCommitmentMode, L1Network, ProverMode, WalletCreation}; +use zkstack_cli_common::{Prompt, PromptConfirm, PromptSelect}; +use zkstack_cli_config::forge_interface::deploy_ecosystem::output::Erc20Token; +use zkstack_cli_types::{BaseToken, L1BatchCommitmentMode, L1Network, ProverMode, WalletCreation}; use zksync_basic_types::H160; use crate::{ diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/args/genesis.rs b/zkstack_cli/crates/zkstack/src/commands/chain/args/genesis.rs index f990cbfd77da..ef98a777352e 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/args/genesis.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/args/genesis.rs @@ -1,10 +1,10 @@ use anyhow::Context; use clap::Parser; -use common::{db::DatabaseConfig, Prompt}; -use config::ChainConfig; use serde::{Deserialize, Serialize}; use slugify_rs::slugify; use url::Url; +use zkstack_cli_common::{db::DatabaseConfig, Prompt}; +use zkstack_cli_config::ChainConfig; use crate::{ defaults::{generate_db_names, DBNames, DATABASE_SERVER_URL}, diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/args/init/configs.rs b/zkstack_cli/crates/zkstack/src/commands/chain/args/init/configs.rs index b34809643cf5..828ad4ebcd43 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/args/init/configs.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/args/init/configs.rs @@ -1,14 +1,14 @@ use clap::Parser; -use common::Prompt; -use config::ChainConfig; use serde::{Deserialize, Serialize}; -use types::L1Network; use url::Url; +use zkstack_cli_common::Prompt; +use zkstack_cli_config::ChainConfig; +use zkstack_cli_types::L1Network; use crate::{ commands::chain::args::{ genesis::{GenesisArgs, GenesisArgsFinal}, - init::InitArgsFinal, + init::{da_configs::ValidiumType, InitArgsFinal}, }, defaults::LOCAL_RPC_URL, messages::{ @@ -28,11 +28,12 @@ pub struct InitConfigsArgs { pub no_port_reallocation: bool, } -#[derive(Debug, Serialize, Deserialize, Clone)] +#[derive(Debug, Clone)] pub struct InitConfigsArgsFinal { pub genesis_args: GenesisArgsFinal, pub l1_rpc_url: String, pub no_port_reallocation: bool, + pub validium_config: Option, } impl InitConfigsArgs { @@ -55,6 +56,7 @@ impl InitConfigsArgs { genesis_args: self.genesis_args.fill_values_with_prompt(config), l1_rpc_url, no_port_reallocation: self.no_port_reallocation, + validium_config: Some(ValidiumType::read()), } } } @@ -65,6 +67,7 @@ impl InitConfigsArgsFinal { genesis_args: init_args.genesis_args.clone(), l1_rpc_url: init_args.l1_rpc_url.clone(), no_port_reallocation: init_args.no_port_reallocation, + validium_config: init_args.validium_config.clone(), } } } diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/args/init/da_configs.rs b/zkstack_cli/crates/zkstack/src/commands/chain/args/init/da_configs.rs new file mode 100644 index 000000000000..7a6466b802a7 --- /dev/null +++ b/zkstack_cli/crates/zkstack/src/commands/chain/args/init/da_configs.rs @@ -0,0 +1,149 @@ +use clap::{Parser, ValueEnum}; +use serde::{Deserialize, Serialize}; +use strum::{Display, EnumIter, IntoEnumIterator}; +use url::Url; +use zkstack_cli_common::{Prompt, PromptSelect}; +use zksync_config::{ + configs::da_client::avail::{ + AvailClientConfig, AvailDefaultConfig, AvailGasRelayConfig, AvailSecrets, + }, + AvailConfig, +}; + +use crate::{ + defaults::{AVAIL_BRIDGE_API_URL, AVAIL_RPC_URL}, + messages::{ + MSG_AVAIL_API_NODE_URL_PROMPT, MSG_AVAIL_API_TIMEOUT_MS, MSG_AVAIL_APP_ID_PROMPT, + MSG_AVAIL_BRIDGE_API_URL_PROMPT, MSG_AVAIL_CLIENT_TYPE_PROMPT, + MSG_AVAIL_FINALITY_STATE_PROMPT, MSG_AVAIL_GAS_RELAY_API_KEY_PROMPT, + MSG_AVAIL_GAS_RELAY_API_URL_PROMPT, MSG_AVAIL_GAS_RELAY_MAX_RETRIES_PROMPT, + MSG_AVAIL_SEED_PHRASE_PROMPT, MSG_INVALID_URL_ERR, MSG_VALIDIUM_TYPE_PROMPT, + }, +}; + +#[derive(Debug, Serialize, Deserialize, Parser, Clone)] +pub struct ValidiumTypeArgs { + #[clap(long, help = "Type of the Validium network")] + pub validium_type: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, EnumIter, Display, ValueEnum)] +pub enum ValidiumTypeInternal { + NoDA, + Avail, + EigenDA, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, EnumIter, Display, ValueEnum)] +pub enum AvailClientTypeInternal { + FullClient, + GasRelay, +} + +#[derive(Debug, Clone, PartialEq)] +pub enum ValidiumType { + NoDA, + Avail((AvailConfig, AvailSecrets)), + EigenDA, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, EnumIter, Display, ValueEnum)] +pub enum AvailFinalityState { + InBlock, + Finalized, +} + +impl ValidiumType { + pub fn read() -> Self { + match PromptSelect::new(MSG_VALIDIUM_TYPE_PROMPT, ValidiumTypeInternal::iter()).ask() { + ValidiumTypeInternal::EigenDA => ValidiumType::EigenDA, + ValidiumTypeInternal::NoDA => ValidiumType::NoDA, + ValidiumTypeInternal::Avail => { + let avail_client_type = PromptSelect::new( + MSG_AVAIL_CLIENT_TYPE_PROMPT, + AvailClientTypeInternal::iter(), + ) + .ask(); + + let client_config = + match avail_client_type { + AvailClientTypeInternal::FullClient => { + AvailClientConfig::FullClient(AvailDefaultConfig { + api_node_url: Prompt::new(MSG_AVAIL_API_NODE_URL_PROMPT) + .default(AVAIL_RPC_URL.as_str()) + .validate_with(url_validator) + .ask(), + app_id: Prompt::new(MSG_AVAIL_APP_ID_PROMPT) + .validate_with(|input: &String| -> Result<(), String> { + input.parse::().map(|_| ()).map_err(|_| { + "Please enter a positive number".to_string() + }) + }) + .ask(), + finality_state: Some( + PromptSelect::new( + MSG_AVAIL_FINALITY_STATE_PROMPT, + AvailFinalityState::iter(), + ) + .ask() + .to_string(), + ), + }) + } + AvailClientTypeInternal::GasRelay => { + AvailClientConfig::GasRelay(AvailGasRelayConfig { + gas_relay_api_url: Prompt::new(MSG_AVAIL_GAS_RELAY_API_URL_PROMPT) + .validate_with(url_validator) + .ask(), + max_retries: Prompt::new(MSG_AVAIL_GAS_RELAY_MAX_RETRIES_PROMPT) + .validate_with(|input: &String| -> Result<(), String> { + input.parse::().map(|_| ()).map_err(|_| { + "Please enter a positive number".to_string() + }) + }) + .ask(), + }) + } + }; + + let avail_config = AvailConfig { + bridge_api_url: Prompt::new(MSG_AVAIL_BRIDGE_API_URL_PROMPT) + .default(AVAIL_BRIDGE_API_URL.as_str()) + .validate_with(url_validator) + .ask(), + timeout_ms: Prompt::new(MSG_AVAIL_API_TIMEOUT_MS) + .validate_with(|input: &String| -> Result<(), String> { + input + .parse::() + .map(|_| ()) + .map_err(|_| "Please enter a positive number".to_string()) + }) + .ask(), + config: client_config, + }; + + let avail_secrets = match avail_client_type { + AvailClientTypeInternal::FullClient => AvailSecrets { + seed_phrase: Some(Prompt::new(MSG_AVAIL_SEED_PHRASE_PROMPT).ask()), + gas_relay_api_key: None, + }, + AvailClientTypeInternal::GasRelay => AvailSecrets { + seed_phrase: None, + gas_relay_api_key: Some( + Prompt::new(MSG_AVAIL_GAS_RELAY_API_KEY_PROMPT).ask(), + ), + }, + }; + + ValidiumType::Avail((avail_config, avail_secrets)) + } + } + } +} + +#[allow(clippy::ptr_arg)] +fn url_validator(val: &String) -> Result<(), String> { + Url::parse(val) + .map(|_| ()) + .map_err(|_| MSG_INVALID_URL_ERR.to_string()) +} diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/args/init/mod.rs b/zkstack_cli/crates/zkstack/src/commands/chain/args/init/mod.rs index 59810964d2c3..770ac50996a2 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/args/init/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/args/init/mod.rs @@ -1,12 +1,15 @@ use clap::Parser; -use common::{forge::ForgeScriptArgs, Prompt}; -use config::ChainConfig; use serde::{Deserialize, Serialize}; -use types::L1Network; use url::Url; +use zkstack_cli_common::{forge::ForgeScriptArgs, Prompt}; +use zkstack_cli_config::ChainConfig; +use zkstack_cli_types::{L1BatchCommitmentMode, L1Network}; use crate::{ - commands::chain::args::genesis::{GenesisArgs, GenesisArgsFinal}, + commands::chain::args::{ + genesis::{GenesisArgs, GenesisArgsFinal}, + init::da_configs::ValidiumType, + }, defaults::LOCAL_RPC_URL, messages::{ MSG_DEPLOY_PAYMASTER_PROMPT, MSG_DEV_ARG_HELP, MSG_L1_RPC_URL_HELP, @@ -16,6 +19,7 @@ use crate::{ }; pub mod configs; +pub(crate) mod da_configs; #[derive(Debug, Clone, Serialize, Deserialize, Parser)] pub struct InitArgs { @@ -39,6 +43,8 @@ pub struct InitArgs { pub update_submodules: Option, #[clap(long, help = MSG_DEV_ARG_HELP)] pub dev: bool, + #[clap(flatten)] + pub validium_args: da_configs::ValidiumTypeArgs, } impl InitArgs { @@ -58,7 +64,7 @@ impl InitArgs { true } else { self.deploy_paymaster.unwrap_or_else(|| { - common::PromptConfirm::new(MSG_DEPLOY_PAYMASTER_PROMPT) + zkstack_cli_common::PromptConfirm::new(MSG_DEPLOY_PAYMASTER_PROMPT) .default(true) .ask() }) @@ -82,23 +88,35 @@ impl InitArgs { }) }; + let validium_config = match config.l1_batch_commit_data_generator_mode { + L1BatchCommitmentMode::Validium => match self.validium_args.validium_type { + None => Some(ValidiumType::read()), + Some(da_configs::ValidiumTypeInternal::NoDA) => Some(ValidiumType::NoDA), + Some(da_configs::ValidiumTypeInternal::Avail) => panic!( + "Avail is not supported via CLI args, use interactive mode" // TODO: Add support for configuration via CLI args + ), + Some(da_configs::ValidiumTypeInternal::EigenDA) => Some(ValidiumType::EigenDA), + }, + _ => None, + }; + InitArgsFinal { forge_args: self.forge_args, genesis_args: genesis.fill_values_with_prompt(config), deploy_paymaster, l1_rpc_url, no_port_reallocation: self.no_port_reallocation, - dev: self.dev, + validium_config, } } } -#[derive(Debug, Serialize, Deserialize, Clone)] +#[derive(Debug, Clone)] pub struct InitArgsFinal { pub forge_args: ForgeScriptArgs, pub genesis_args: GenesisArgsFinal, pub deploy_paymaster: bool, pub l1_rpc_url: String, pub no_port_reallocation: bool, - pub dev: bool, + pub validium_config: Option, } diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/build_transactions.rs b/zkstack_cli/crates/zkstack/src/commands/chain/build_transactions.rs index d3953c656596..3bf4db7188f7 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/build_transactions.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/build_transactions.rs @@ -1,10 +1,10 @@ use anyhow::Context; -use common::{git, logger, spinner::Spinner}; -use config::{ - copy_configs, traits::SaveConfigWithBasePath, update_from_chain_config, EcosystemConfig, -}; use ethers::utils::hex::ToHex; use xshell::Shell; +use zkstack_cli_common::{git, logger, spinner::Spinner}; +use zkstack_cli_config::{ + copy_configs, traits::SaveConfigWithBasePath, update_from_chain_config, EcosystemConfig, +}; use crate::{ commands::chain::{ @@ -18,13 +18,12 @@ use crate::{ }, }; -const REGISTER_CHAIN_TXNS_FILE_SRC: &str = - "contracts/l1-contracts/broadcast/RegisterHyperchain.s.sol/9/dry-run/run-latest.json"; -const REGISTER_CHAIN_TXNS_FILE_DST: &str = "register-hyperchain-txns.json"; +pub const REGISTER_CHAIN_TXNS_FILE_SRC: &str = + "contracts/l1-contracts/broadcast/RegisterZKChain.s.sol/9/dry-run/run-latest.json"; +pub const REGISTER_CHAIN_TXNS_FILE_DST: &str = "register-zk-chain-txns.json"; -const SCRIPT_CONFIG_FILE_SRC: &str = - "contracts/l1-contracts/script-config/register-hyperchain.toml"; -const SCRIPT_CONFIG_FILE_DST: &str = "register-hyperchain.toml"; +const SCRIPT_CONFIG_FILE_SRC: &str = "contracts/l1-contracts/script-config/register-zk-chain.toml"; +const SCRIPT_CONFIG_FILE_DST: &str = "register-zk-chain.toml"; pub(crate) async fn run(args: BuildTransactionsArgs, shell: &Shell) -> anyhow::Result<()> { let config = EcosystemConfig::from_file(shell)?; diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/common.rs b/zkstack_cli/crates/zkstack/src/commands/chain/common.rs index 0c35b3ee4fe0..e14c2460afcd 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/common.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/common.rs @@ -1,6 +1,6 @@ -use common::spinner::Spinner; -use config::{ChainConfig, EcosystemConfig}; -use types::{BaseToken, L1Network, WalletCreation}; +use zkstack_cli_common::spinner::Spinner; +use zkstack_cli_config::{ChainConfig, EcosystemConfig}; +use zkstack_cli_types::{BaseToken, L1Network, WalletCreation}; use crate::{ consts::AMOUNT_FOR_DISTRIBUTION_TO_WALLETS, @@ -30,7 +30,7 @@ pub async fn distribute_eth( if let Some(setter) = chain_wallets.token_multiplier_setter { addresses.push(setter.address) } - common::ethereum::distribute_eth( + zkstack_cli_common::ethereum::distribute_eth( wallets.operator, addresses, l1_rpc_url, @@ -59,7 +59,7 @@ pub async fn mint_base_token( let addresses = vec![wallets.governor.address, chain_wallets.governor.address]; let amount = AMOUNT_FOR_DISTRIBUTION_TO_WALLETS * base_token.nominator as u128 / base_token.denominator as u128; - common::ethereum::mint_token( + zkstack_cli_common::ethereum::mint_token( wallets.governor, base_token.address, addresses, diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/convert_to_gateway.rs b/zkstack_cli/crates/zkstack/src/commands/chain/convert_to_gateway.rs index 3043f3a52837..0b06cd8de3c2 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/convert_to_gateway.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/convert_to_gateway.rs @@ -1,11 +1,13 @@ -/// TODO(EVM-927): Note that the contents of this file are not useable without Gateway contracts. use anyhow::Context; -use common::{ +use ethers::{abi::parse_abi, contract::BaseContract, types::Bytes, utils::hex}; +use lazy_static::lazy_static; +use xshell::Shell; +use zkstack_cli_common::{ config::global_config, forge::{Forge, ForgeScriptArgs}, wallets::Wallet, }; -use config::{ +use zkstack_cli_config::{ forge_interface::{ deploy_ecosystem::input::InitialDeploymentConfig, deploy_gateway_ctm::{input::DeployGatewayCTMInput, output::DeployGatewayCTMOutput}, @@ -15,9 +17,6 @@ use config::{ traits::{ReadConfig, SaveConfig, SaveConfigWithBasePath}, ChainConfig, EcosystemConfig, GenesisConfig, }; -use ethers::{abi::parse_abi, contract::BaseContract, types::Bytes, utils::hex}; -use lazy_static::lazy_static; -use xshell::Shell; use zksync_basic_types::H256; use zksync_config::configs::GatewayConfig; @@ -49,11 +48,7 @@ lazy_static! { ); } -#[allow(unused)] pub async fn run(args: ForgeScriptArgs, shell: &Shell) -> anyhow::Result<()> { - // TODO(EVM-927): this function does not work without the Gateway contracts. - anyhow::bail!("Gateway upgrade not supported yet!"); - let chain_name = global_config().chain_name.clone(); let ecosystem_config = EcosystemConfig::from_file(shell)?; let chain_config = ecosystem_config @@ -163,7 +158,7 @@ pub async fn run(args: ForgeScriptArgs, shell: &Shell) -> anyhow::Result<()> { Ok(()) } -async fn calculate_gateway_ctm( +pub async fn calculate_gateway_ctm( shell: &Shell, forge_args: ForgeScriptArgs, config: &EcosystemConfig, @@ -214,7 +209,7 @@ async fn calculate_gateway_ctm( Ok(gateway_config) } -async fn deploy_gateway_ctm( +pub async fn deploy_gateway_ctm( shell: &Shell, forge_args: ForgeScriptArgs, config: &EcosystemConfig, @@ -259,7 +254,7 @@ async fn deploy_gateway_ctm( Ok(()) } -async fn gateway_governance_whitelisting( +pub async fn gateway_governance_whitelisting( shell: &Shell, forge_args: ForgeScriptArgs, config: &EcosystemConfig, @@ -397,7 +392,7 @@ async fn gateway_governance_whitelisting( } #[allow(clippy::too_many_arguments)] -async fn call_script( +pub async fn call_script( shell: &Shell, forge_args: ForgeScriptArgs, data: &Bytes, diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/create.rs b/zkstack_cli/crates/zkstack/src/commands/chain/create.rs index 367dddd95884..529d861a2559 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/create.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/create.rs @@ -1,13 +1,13 @@ use std::cell::OnceCell; use anyhow::Context; -use common::{logger, spinner::Spinner}; -use config::{ +use xshell::Shell; +use zkstack_cli_common::{logger, spinner::Spinner}; +use zkstack_cli_config::{ create_local_configs_dir, create_wallets, traits::{ReadConfigWithBasePath, SaveConfigWithBasePath}, ChainConfig, EcosystemConfig, GenesisConfig, }; -use xshell::Shell; use zksync_basic_types::L2ChainId; use crate::{ @@ -25,7 +25,7 @@ pub fn run(args: ChainCreateArgs, shell: &Shell) -> anyhow::Result<()> { create(args, &mut ecosystem_config, shell) } -fn create( +pub fn create( args: ChainCreateArgs, ecosystem_config: &mut EcosystemConfig, shell: &Shell, diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/deploy_l2_contracts.rs b/zkstack_cli/crates/zkstack/src/commands/chain/deploy_l2_contracts.rs index 09732145d1db..7cf628b1170b 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/deploy_l2_contracts.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/deploy_l2_contracts.rs @@ -1,12 +1,13 @@ use std::path::Path; use anyhow::Context; -use common::{ +use xshell::Shell; +use zkstack_cli_common::{ contracts::build_l2_contracts, forge::{Forge, ForgeScriptArgs}, spinner::Spinner, }; -use config::{ +use zkstack_cli_config::{ forge_interface::{ deploy_l2_contracts::{ input::DeployL2ContractsInput, @@ -20,7 +21,6 @@ use config::{ traits::{ReadConfig, SaveConfig, SaveConfigWithBasePath}, ChainConfig, ContractsConfig, EcosystemConfig, }; -use xshell::Shell; use crate::{ messages::{ @@ -33,7 +33,6 @@ use crate::{ pub enum Deploy2ContractsOption { All, Upgrader, - InitiailizeBridges, ConsensusRegistry, Multicall3, TimestampAsserter, @@ -61,6 +60,7 @@ pub async fn run( &ecosystem_config, &mut contracts, args, + true, ) .await?; } @@ -104,16 +104,6 @@ pub async fn run( ) .await?; } - Deploy2ContractsOption::InitiailizeBridges => { - initialize_bridges( - shell, - &chain_config, - &ecosystem_config, - &mut contracts, - args, - ) - .await? - } } contracts.save_with_base_path(shell, &chain_config.configs)?; @@ -131,39 +121,23 @@ async fn build_and_deploy( forge_args: ForgeScriptArgs, signature: Option<&str>, mut update_config: impl FnMut(&Shell, &Path) -> anyhow::Result<()>, + with_broadcast: bool, ) -> anyhow::Result<()> { build_l2_contracts(shell.clone(), ecosystem_config.link_to_code.clone())?; - call_forge(shell, chain_config, ecosystem_config, forge_args, signature).await?; - update_config( - shell, - &DEPLOY_L2_CONTRACTS_SCRIPT_PARAMS.output(&chain_config.link_to_code), - )?; - Ok(()) -} - -pub async fn initialize_bridges( - shell: &Shell, - chain_config: &ChainConfig, - ecosystem_config: &EcosystemConfig, - contracts_config: &mut ContractsConfig, - forge_args: ForgeScriptArgs, -) -> anyhow::Result<()> { - let signature = if let Some(true) = chain_config.legacy_bridge { - Some("runDeployLegacySharedBridge") - } else { - Some("runDeploySharedBridge") - }; - build_and_deploy( + call_forge( shell, chain_config, ecosystem_config, forge_args, signature, - |shell, out| { - contracts_config.set_l2_shared_bridge(&InitializeBridgeOutput::read(shell, out)?) - }, + with_broadcast, ) - .await + .await?; + update_config( + shell, + &DEPLOY_L2_CONTRACTS_SCRIPT_PARAMS.output(&chain_config.link_to_code), + )?; + Ok(()) } pub async fn deploy_upgrader( @@ -182,6 +156,7 @@ pub async fn deploy_upgrader( |shell, out| { contracts_config.set_default_l2_upgrade(&DefaultL2UpgradeOutput::read(shell, out)?) }, + true, ) .await } @@ -202,6 +177,7 @@ pub async fn deploy_consensus_registry( |shell, out| { contracts_config.set_consensus_registry(&ConsensusRegistryOutput::read(shell, out)?) }, + true, ) .await } @@ -220,6 +196,7 @@ pub async fn deploy_multicall3( forge_args, Some("runDeployMulticall3"), |shell, out| contracts_config.set_multicall3(&Multicall3Output::read(shell, out)?), + true, ) .await } @@ -241,6 +218,7 @@ pub async fn deploy_timestamp_asserter( contracts_config .set_timestamp_asserter_addr(&TimestampAsserterOutput::read(shell, out)?) }, + true, ) .await } @@ -251,18 +229,14 @@ pub async fn deploy_l2_contracts( ecosystem_config: &EcosystemConfig, contracts_config: &mut ContractsConfig, forge_args: ForgeScriptArgs, + with_broadcast: bool, ) -> anyhow::Result<()> { - let signature = if let Some(true) = chain_config.legacy_bridge { - Some("runWithLegacyBridge") - } else { - None - }; build_and_deploy( shell, chain_config, ecosystem_config, forge_args, - signature, + None, |shell, out| { contracts_config.set_l2_shared_bridge(&InitializeBridgeOutput::read(shell, out)?)?; contracts_config.set_default_l2_upgrade(&DefaultL2UpgradeOutput::read(shell, out)?)?; @@ -272,6 +246,7 @@ pub async fn deploy_l2_contracts( .set_timestamp_asserter_addr(&TimestampAsserterOutput::read(shell, out)?)?; Ok(()) }, + with_broadcast, ) .await } @@ -282,8 +257,13 @@ async fn call_forge( ecosystem_config: &EcosystemConfig, forge_args: ForgeScriptArgs, signature: Option<&str>, + with_broadcast: bool, ) -> anyhow::Result<()> { - let input = DeployL2ContractsInput::new(chain_config, ecosystem_config.era_chain_id)?; + let input = DeployL2ContractsInput::new( + chain_config, + &ecosystem_config.get_contracts_config()?, + ecosystem_config.era_chain_id, + )?; let foundry_contracts_path = chain_config.path_to_l1_foundry(); let secrets = chain_config.get_secrets_config()?; input.save( @@ -304,8 +284,10 @@ async fn call_forge( .l1_rpc_url .expose_str() .to_string(), - ) - .with_broadcast(); + ); + if with_broadcast { + forge = forge.with_broadcast(); + } if let Some(signature) = signature { forge = forge.with_signature(signature); diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/deploy_paymaster.rs b/zkstack_cli/crates/zkstack/src/commands/chain/deploy_paymaster.rs index c6b48ca87856..1c103ea29910 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/deploy_paymaster.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/deploy_paymaster.rs @@ -1,6 +1,7 @@ use anyhow::Context; -use common::forge::{Forge, ForgeScriptArgs}; -use config::{ +use xshell::Shell; +use zkstack_cli_common::forge::{Forge, ForgeScriptArgs}; +use zkstack_cli_config::{ forge_interface::{ paymaster::{DeployPaymasterInput, DeployPaymasterOutput}, script_params::DEPLOY_PAYMASTER_SCRIPT_PARAMS, @@ -8,7 +9,6 @@ use config::{ traits::{ReadConfig, SaveConfig, SaveConfigWithBasePath}, ChainConfig, ContractsConfig, EcosystemConfig, }; -use xshell::Shell; use crate::{ messages::{MSG_CHAIN_NOT_INITIALIZED, MSG_L1_SECRETS_MUST_BE_PRESENTED}, diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/enable_evm_emulator.rs b/zkstack_cli/crates/zkstack/src/commands/chain/enable_evm_emulator.rs index a6bbd2c9dc70..e15cdbacf103 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/enable_evm_emulator.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/enable_evm_emulator.rs @@ -1,7 +1,7 @@ use anyhow::Context; -use common::{forge::ForgeScriptArgs, logger}; -use config::{traits::ReadConfigWithBasePath, EcosystemConfig, GenesisConfig}; use xshell::Shell; +use zkstack_cli_common::{forge::ForgeScriptArgs, logger}; +use zkstack_cli_config::{traits::ReadConfigWithBasePath, EcosystemConfig, GenesisConfig}; use crate::{ enable_evm_emulator::enable_evm_emulator, diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/gateway_upgrade.rs b/zkstack_cli/crates/zkstack/src/commands/chain/gateway_upgrade.rs index 8153bcfb0b7e..f096daef032a 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/gateway_upgrade.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/gateway_upgrade.rs @@ -1,38 +1,37 @@ -/// TODO(EVM-927): Note that the contents of this file are not useable without Gateway contracts. use anyhow::Context; use clap::{Parser, ValueEnum}; -use common::{ - config::global_config, - forge::{Forge, ForgeScriptArgs}, -}; -use config::{ - forge_interface::{ - gateway_chain_upgrade::{ - input::GatewayChainUpgradeInput, output::GatewayChainUpgradeOutput, - }, - gateway_ecosystem_upgrade::output::GatewayEcosystemUpgradeOutput, - script_params::{GATEWAY_UPGRADE_CHAIN_PARAMS, GATEWAY_UPGRADE_ECOSYSTEM_PARAMS}, - }, - traits::{ReadConfig, ReadConfigWithBasePath, SaveConfig, SaveConfigWithBasePath}, - ChainConfig, EcosystemConfig, -}; use ethers::{ - abi::{encode, parse_abi}, + abi::parse_abi, contract::BaseContract, + providers::{Http, Middleware, Provider}, + signers::Signer, + types::{transaction::eip2718::TypedTransaction, Eip1559TransactionRequest}, utils::hex, }; use lazy_static::lazy_static; use serde::{Deserialize, Serialize}; use strum::EnumIter; -use types::L1BatchCommitmentMode; use xshell::Shell; -use zksync_basic_types::{H256, U256}; -use zksync_types::{web3::keccak256, Address, L2_NATIVE_TOKEN_VAULT_ADDRESS}; +use zkstack_cli_common::{ + config::global_config, + forge::{Forge, ForgeScriptArgs}, +}; +use zkstack_cli_config::{ + forge_interface::{ + gateway_ecosystem_upgrade::output::GatewayEcosystemUpgradeOutput, + script_params::{ACCEPT_GOVERNANCE_SCRIPT_PARAMS, GATEWAY_UPGRADE_ECOSYSTEM_PARAMS}, + }, + traits::{ReadConfig, ReadConfigWithBasePath, SaveConfigWithBasePath}, + ChainConfig, EcosystemConfig, +}; +use zkstack_cli_types::L1BatchCommitmentMode; +use zksync_basic_types::U256; +use zksync_types::Address; use crate::{ - accept_ownership::{ - admin_execute_upgrade, admin_schedule_upgrade, admin_update_validator, - set_da_validator_pair, + commands::dev::commands::gateway::{ + check_chain_readiness, fetch_chain_info, get_admin_call_builder, + set_upgrade_timestamp_calldata, DAMode, GatewayUpgradeArgsInner, GatewayUpgradeInfo, }, messages::{MSG_CHAIN_NOT_INITIALIZED, MSG_L1_SECRETS_MUST_BE_PRESENTED}, utils::forge::{fill_forge_private_key, WalletOwner}, @@ -47,9 +46,6 @@ lazy_static! { Debug, Serialize, Deserialize, Clone, Copy, ValueEnum, EnumIter, strum::Display, PartialEq, Eq, )] pub enum GatewayChainUpgradeStage { - // some config paaram - AdaptConfig, - // Does not require admin, still needs to be done to update configs, etc PrepareStage1, @@ -78,6 +74,8 @@ pub struct GatewayUpgradeArgs { pub forge_args: ForgeScriptArgs, chain_upgrade_stage: GatewayChainUpgradeStage, + + l2_wrapped_base_token_addr: Option
, } lazy_static! { @@ -90,11 +88,7 @@ lazy_static! { ); } -#[allow(unused)] pub async fn run(args: GatewayUpgradeArgs, shell: &Shell) -> anyhow::Result<()> { - // TODO(EVM-927): this function does not work without the Gateway contracts. - anyhow::bail!("Gateway upgrade not supported yet!"); - let ecosystem_config = EcosystemConfig::from_file(shell)?; let chain_name = global_config().chain_name.clone(); @@ -111,15 +105,14 @@ pub async fn run(args: GatewayUpgradeArgs, shell: &Shell) -> anyhow::Result<()> .to_string(); match args.chain_upgrade_stage { - GatewayChainUpgradeStage::AdaptConfig => adapt_config(shell, chain_config).await, GatewayChainUpgradeStage::PrepareStage1 => { - prepare_stage1(shell, args, ecosystem_config, chain_config, l1_url).await + prepare_stage1(shell, ecosystem_config, chain_config, l1_url).await } GatewayChainUpgradeStage::ScheduleStage1 => { - schedule_stage1(shell, args, ecosystem_config, chain_config, l1_url).await + schedule_stage1(shell, ecosystem_config, chain_config, l1_url).await } GatewayChainUpgradeStage::FinalizeStage1 => { - finalize_stage1(shell, args, ecosystem_config, chain_config, l1_url).await + finalize_stage1(shell, ecosystem_config, chain_config, l1_url).await } GatewayChainUpgradeStage::FinalizeStage2 => { finalize_stage2(shell, ecosystem_config, chain_config).await @@ -133,173 +126,130 @@ pub async fn run(args: GatewayUpgradeArgs, shell: &Shell) -> anyhow::Result<()> } } -fn encode_ntv_asset_id(l1_chain_id: U256, addr: Address) -> H256 { - let encoded_data = encode(&[ - ethers::abi::Token::Uint(l1_chain_id), - ethers::abi::Token::Address(L2_NATIVE_TOKEN_VAULT_ADDRESS), - ethers::abi::Token::Address(addr), - ]); - - H256(keccak256(&encoded_data)) -} - -async fn adapt_config(shell: &Shell, chain_config: ChainConfig) -> anyhow::Result<()> { - println!("Adapting config"); - let mut contracts_config = chain_config.get_contracts_config()?; - let genesis_config = chain_config.get_genesis_config()?; - - contracts_config.l2.legacy_shared_bridge_addr = contracts_config.bridges.shared.l2_address; - contracts_config.l1.base_token_asset_id = Some(encode_ntv_asset_id( - genesis_config.l1_chain_id.0.into(), - contracts_config.l1.base_token_addr, - )); - - contracts_config.save_with_base_path(shell, &chain_config.configs)?; - println!("Done"); - - Ok(()) -} - async fn prepare_stage1( shell: &Shell, - args: GatewayUpgradeArgs, ecosystem_config: EcosystemConfig, chain_config: ChainConfig, l1_url: String, ) -> anyhow::Result<()> { - let chain_upgrade_config_path = - GATEWAY_UPGRADE_CHAIN_PARAMS.input(&ecosystem_config.link_to_code); - - let gateway_upgrade_input = GatewayChainUpgradeInput::new(&chain_config); - gateway_upgrade_input.save(shell, chain_upgrade_config_path.clone())?; - - let mut forge = Forge::new(&ecosystem_config.path_to_l1_foundry()) - .script( - &GATEWAY_UPGRADE_CHAIN_PARAMS.script(), - args.forge_args.clone(), - ) - .with_ffi() - .with_rpc_url(l1_url) - .with_slow() - .with_broadcast(); - - forge = fill_forge_private_key( - forge, - Some(&chain_config.get_wallets_config()?.governor), - WalletOwner::Governor, - )?; - - println!("Preparing the chain for the upgrade!"); - - forge.run(shell)?; - - println!("done!"); - - let chain_output = GatewayChainUpgradeOutput::read( - shell, - GATEWAY_UPGRADE_CHAIN_PARAMS.output(&ecosystem_config.link_to_code), - )?; - let gateway_ecosystem_preparation_output = GatewayEcosystemUpgradeOutput::read_with_base_path(shell, ecosystem_config.config)?; // No need to save it, we have enough for now let mut contracts_config = chain_config.get_contracts_config()?; + let general_config = chain_config.get_general_config()?; + let genesis_config = chain_config.get_genesis_config()?; - contracts_config - .ecosystem_contracts - .stm_deployment_tracker_proxy_addr = Some( - gateway_ecosystem_preparation_output - .deployed_addresses - .bridgehub - .ctm_deployment_tracker_proxy_addr, + let upgrade_info = GatewayUpgradeInfo::from_gateway_ecosystem_upgrade( + contracts_config.ecosystem_contracts.bridgehub_proxy_addr, + gateway_ecosystem_preparation_output, ); - // This is force deployment data for creating new contracts, not really relevant here tbh, - contracts_config.ecosystem_contracts.force_deployments_data = Some(hex::encode( - &gateway_ecosystem_preparation_output - .contracts_config - .force_deployments_data - .0, - )); - contracts_config.ecosystem_contracts.native_token_vault_addr = Some( - gateway_ecosystem_preparation_output - .deployed_addresses - .native_token_vault_addr, - ); - contracts_config - .ecosystem_contracts - .l1_bytecodes_supplier_addr = Some( - gateway_ecosystem_preparation_output - .deployed_addresses - .l1_bytecodes_supplier_addr, - ); - contracts_config.l1.access_control_restriction_addr = - Some(chain_output.access_control_restriction); - contracts_config.l1.chain_admin_addr = chain_output.chain_admin_addr; - contracts_config.l1.rollup_l1_da_validator_addr = Some( - gateway_ecosystem_preparation_output - .deployed_addresses - .rollup_l1_da_validator_addr, - ); - contracts_config.l1.no_da_validium_l1_validator_addr = Some( - gateway_ecosystem_preparation_output - .deployed_addresses - .validium_l1_da_validator_addr, - ); + let da_mode: DAMode = + if genesis_config.l1_batch_commit_data_generator_mode == L1BatchCommitmentMode::Rollup { + DAMode::PermanentRollup + } else { + DAMode::Validium + }; + + let chain_info = fetch_chain_info( + &upgrade_info, + &GatewayUpgradeArgsInner { + chain_id: chain_config.chain_id.as_u64(), + l1_rpc_url: l1_url, + l2_rpc_url: general_config + .api_config + .context("api config")? + .web3_json_rpc + .http_url, + validator_addr1: chain_config.get_wallets_config()?.operator.address, + validator_addr2: chain_config.get_wallets_config()?.blob_operator.address, + da_mode, + dangerous_no_cross_check: false, + }, + ) + .await?; - let validum = chain_config - .get_genesis_config()? - .l1_batch_commit_data_generator_mode - == L1BatchCommitmentMode::Validium; - - // We do not use chain output because IMHO we should delete it altogether from there - contracts_config.l2.da_validator_addr = if !validum { - Some( - gateway_ecosystem_preparation_output - .contracts_config - .expected_rollup_l2_da_validator, - ) - } else { - Some( - gateway_ecosystem_preparation_output - .contracts_config - .expected_validium_l2_da_validator, - ) - }; - contracts_config.l2.l2_native_token_vault_proxy_addr = Some(L2_NATIVE_TOKEN_VAULT_ADDRESS); - contracts_config.l2.legacy_shared_bridge_addr = contracts_config.bridges.shared.l2_address; + upgrade_info.update_contracts_config(&mut contracts_config, &chain_info, da_mode, true); contracts_config.save_with_base_path(shell, chain_config.configs)?; Ok(()) } -const NEW_PROTOCOL_VERSION: u64 = 0x1b00000000; +async fn call_chain_admin( + l1_url: String, + chain_config: ChainConfig, + data: Vec, +) -> anyhow::Result<()> { + let wallet = chain_config + .get_wallets_config()? + .governor + .private_key + .context("gov pk missing")?; + let contracts_config = chain_config.get_contracts_config()?; + + // Initialize provider + let provider = Provider::::try_from(l1_url)?; + + // Initialize wallet + let chain_id = provider.get_chainid().await?.as_u64(); + let wallet = wallet.with_chain_id(chain_id); + + let tx = TypedTransaction::Eip1559(Eip1559TransactionRequest { + to: Some(contracts_config.l1.chain_admin_addr.into()), + // 10m should be always enough + gas: Some(U256::from(10_000_000)), + data: Some(data.into()), + value: Some(U256::zero()), + nonce: Some( + provider + .get_transaction_count(wallet.address(), None) + .await?, + ), + max_fee_per_gas: Some(provider.get_gas_price().await?), + max_priority_fee_per_gas: Some(U256::zero()), + chain_id: Some(chain_id.into()), + ..Default::default() + }); + + let signed_tx = wallet.sign_transaction(&tx).await.unwrap(); + + let tx = provider + .send_raw_transaction(tx.rlp_signed(&signed_tx)) + .await + .unwrap(); + println!("Sent tx with hash: {}", hex::encode(tx.0)); + + let receipt = tx.await?.context("receipt not present")?; + + if receipt.status.unwrap() != 1.into() { + anyhow::bail!("Transaction failed!"); + } + + Ok(()) +} async fn schedule_stage1( shell: &Shell, - args: GatewayUpgradeArgs, ecosystem_config: EcosystemConfig, chain_config: ChainConfig, l1_url: String, ) -> anyhow::Result<()> { + let gateway_ecosystem_preparation_output = + GatewayEcosystemUpgradeOutput::read_with_base_path(shell, ecosystem_config.config)?; + println!("Schedule stage1 of the upgrade!!"); + let calldata = set_upgrade_timestamp_calldata( + gateway_ecosystem_preparation_output + .contracts_config + .new_protocol_version, + // Immediatelly + 0, + ); - admin_schedule_upgrade( - shell, - &ecosystem_config, - &chain_config.get_contracts_config()?, - // For now it is hardcoded both in scripts and here - U256::from(NEW_PROTOCOL_VERSION), - // We only do instant upgrades for now - U256::zero(), - &chain_config.get_wallets_config()?.governor, - &args.forge_args, - l1_url.clone(), - ) - .await?; + call_chain_admin(l1_url, chain_config, calldata).await?; println!("done!"); @@ -308,109 +258,69 @@ async fn schedule_stage1( async fn finalize_stage1( shell: &Shell, - args: GatewayUpgradeArgs, ecosystem_config: EcosystemConfig, chain_config: ChainConfig, l1_url: String, ) -> anyhow::Result<()> { println!("Finalizing stage1 of chain upgrade!"); - let mut contracts_config = chain_config.get_contracts_config()?; + let contracts_config = chain_config.get_contracts_config()?; + let general_config = chain_config.get_general_config()?; + let genesis_config = chain_config.get_genesis_config()?; + + println!("Checking chain readiness..."); + check_chain_readiness( + l1_url.clone(), + general_config + .api_config + .as_ref() + .context("api")? + .web3_json_rpc + .http_url + .clone(), + chain_config.chain_id.as_u64(), + ) + .await?; + + println!("The chain is ready!"); + let gateway_ecosystem_preparation_output = GatewayEcosystemUpgradeOutput::read_with_base_path(shell, &ecosystem_config.config)?; - let old_validator_timelock = contracts_config.l1.validator_timelock_addr; - let new_validator_timelock = gateway_ecosystem_preparation_output - .deployed_addresses - .validator_timelock_addr; - - let validators = [ - chain_config.get_wallets_config()?.operator.address, - chain_config.get_wallets_config()?.blob_operator.address, - ]; - - println!("Setting new validators!"); - for val in validators { - admin_update_validator( - shell, - &ecosystem_config, - &chain_config, - old_validator_timelock, - val, - false, - &chain_config.get_wallets_config()?.governor, - &args.forge_args, - l1_url.clone(), - ) - .await?; - - admin_update_validator( - shell, - &ecosystem_config, - &chain_config, - new_validator_timelock, - val, - true, - &chain_config.get_wallets_config()?.governor, - &args.forge_args, - l1_url.clone(), - ) - .await?; - } + let da_mode: DAMode = + if genesis_config.l1_batch_commit_data_generator_mode == L1BatchCommitmentMode::Rollup { + DAMode::PermanentRollup + } else { + DAMode::Validium + }; - println!("Setting new validators done!"); + let upgrade_info = GatewayUpgradeInfo::from_gateway_ecosystem_upgrade( + contracts_config.ecosystem_contracts.bridgehub_proxy_addr, + gateway_ecosystem_preparation_output, + ); + let args = GatewayUpgradeArgsInner { + chain_id: chain_config.chain_id.as_u64(), + l1_rpc_url: l1_url.clone(), + l2_rpc_url: general_config + .api_config + .context("api config")? + .web3_json_rpc + .http_url, + validator_addr1: chain_config.get_wallets_config()?.operator.address, + validator_addr2: chain_config.get_wallets_config()?.blob_operator.address, + da_mode, + dangerous_no_cross_check: false, + }; - contracts_config.l1.validator_timelock_addr = gateway_ecosystem_preparation_output - .deployed_addresses - .validator_timelock_addr; + let chain_info = fetch_chain_info(&upgrade_info, &args).await?; - admin_execute_upgrade( - shell, - &ecosystem_config, - &chain_config.get_contracts_config()?, - &chain_config.get_wallets_config()?.governor, - gateway_ecosystem_preparation_output - .chain_upgrade_diamond_cut - .0, - &args.forge_args, - l1_url.clone(), - ) - .await?; + let admin_calls_finalize = get_admin_call_builder(&upgrade_info, &chain_info, args); - let l1_da_validator_contract = if chain_config - .get_genesis_config()? - .l1_batch_commit_data_generator_mode - == L1BatchCommitmentMode::Rollup - { - ecosystem_config - .get_contracts_config()? - .l1 - .rollup_l1_da_validator_addr - } else { - ecosystem_config - .get_contracts_config()? - .l1 - .no_da_validium_l1_validator_addr - } - .context("l1 da validator")?; + admin_calls_finalize.display(); - set_da_validator_pair( - shell, - &ecosystem_config, - contracts_config.l1.chain_admin_addr, - &chain_config.get_wallets_config()?.governor, - contracts_config.l1.diamond_proxy_addr, - l1_da_validator_contract, - contracts_config - .l2 - .da_validator_addr - .context("l2_da_validator_addr")?, - &args.forge_args, - l1_url, - ) - .await?; + let admin_calldata = admin_calls_finalize.compile_full_calldata(); - contracts_config.save_with_base_path(shell, &chain_config.configs)?; + call_chain_admin(l1_url, chain_config, admin_calldata).await?; println!("done!"); @@ -460,7 +370,6 @@ async fn set_weth_for_chain( ]) .unwrap(), ); - let contracts_config = chain_config.get_contracts_config()?; let calldata = contract .encode( "addL2WethToStore", @@ -474,17 +383,15 @@ async fn set_weth_for_chain( .l1 .chain_admin_addr, chain_config.chain_id.as_u64(), - contracts_config - .l2 - .predeployed_l2_wrapped_base_token_address - .expect("No predeployed_l2_wrapped_base_token_address"), + args.l2_wrapped_base_token_addr + .context("l2_wrapped_base_token_addr")?, ), ) .unwrap(); let mut forge = Forge::new(&ecosystem_config.path_to_l1_foundry()) .script( - &GATEWAY_UPGRADE_ECOSYSTEM_PARAMS.script(), + &ACCEPT_GOVERNANCE_SCRIPT_PARAMS.script(), forge_args.clone(), ) .with_ffi() diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/genesis/database.rs b/zkstack_cli/crates/zkstack/src/commands/chain/genesis/database.rs index edf480946be1..fe12bc017a45 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/genesis/database.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/genesis/database.rs @@ -1,17 +1,17 @@ use std::path::PathBuf; use anyhow::Context; -use common::{ +use xshell::Shell; +use zkstack_cli_common::{ config::global_config, db::{drop_db_if_exists, init_db, migrate_db, DatabaseConfig}, logger, }; -use config::{ +use zkstack_cli_config::{ override_config, set_file_artifacts, set_rocks_db_config, set_server_database, traits::SaveConfigWithBasePath, ChainConfig, EcosystemConfig, FileArtifacts, }; -use types::ProverMode; -use xshell::Shell; +use zkstack_cli_types::ProverMode; use zksync_basic_types::commitment::L1BatchCommitmentMode; use crate::{ diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/genesis/mod.rs b/zkstack_cli/crates/zkstack/src/commands/chain/genesis/mod.rs index c1cc03174aeb..5cb289d32609 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/genesis/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/genesis/mod.rs @@ -1,8 +1,8 @@ use anyhow::Context; use clap::{command, Parser, Subcommand}; -use common::{logger, spinner::Spinner}; -use config::{ChainConfig, EcosystemConfig}; use xshell::Shell; +use zkstack_cli_common::{logger, spinner::Spinner}; +use zkstack_cli_config::{ChainConfig, EcosystemConfig}; use crate::{ commands::chain::{ diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/genesis/server.rs b/zkstack_cli/crates/zkstack/src/commands/chain/genesis/server.rs index 090792e8007a..9a52595c978c 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/genesis/server.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/genesis/server.rs @@ -1,14 +1,14 @@ use anyhow::Context; -use common::{ +use xshell::Shell; +use zkstack_cli_common::{ logger, server::{Server, ServerMode}, spinner::Spinner, }; -use config::{ +use zkstack_cli_config::{ traits::FileConfigWithDefaultName, ChainConfig, ContractsConfig, EcosystemConfig, GeneralConfig, GenesisConfig, SecretsConfig, WalletsConfig, }; -use xshell::Shell; use crate::messages::{ MSG_CHAIN_NOT_INITIALIZED, MSG_FAILED_TO_RUN_SERVER_ERR, MSG_GENESIS_COMPLETED, diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/init/configs.rs b/zkstack_cli/crates/zkstack/src/commands/chain/init/configs.rs index 31c5c681e7d3..eff256284d13 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/init/configs.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/init/configs.rs @@ -1,17 +1,22 @@ use anyhow::Context; -use common::logger; -use config::{ +use ethers::types::Address; +use xshell::Shell; +use zkstack_cli_common::logger; +use zkstack_cli_config::{ copy_configs, set_l1_rpc_url, traits::SaveConfigWithBasePath, update_from_chain_config, ChainConfig, ContractsConfig, EcosystemConfig, }; -use ethers::types::Address; -use xshell::Shell; +use zksync_config::configs::DataAvailabilitySecrets; use crate::{ commands::{ chain::{ - args::init::configs::{InitConfigsArgs, InitConfigsArgsFinal}, + args::init::{ + configs::{InitConfigsArgs, InitConfigsArgsFinal}, + da_configs::ValidiumType, + }, genesis, + utils::encode_ntv_asset_id, }, portal::update_portal_config, }, @@ -56,6 +61,13 @@ pub async fn init_configs( )?; } + let consensus_keys = generate_consensus_keys(); + + // Initialize secrets config + let mut secrets = chain_config.get_secrets_config()?; + set_l1_rpc_url(&mut secrets, init_args.l1_rpc_url.clone())?; + secrets.consensus = Some(get_consensus_secrets(&consensus_keys)); + let mut general_config = chain_config.get_general_config()?; if general_config.proof_data_handler_config.is_some() && general_config.prover_gateway.is_some() @@ -73,10 +85,23 @@ pub async fn init_configs( .consensus_config .context(MSG_CONSENSUS_CONFIG_MISSING_ERR)?; - let consensus_keys = generate_consensus_keys(); consensus_config.genesis_spec = Some(get_genesis_specs(chain_config, &consensus_keys)); general_config.consensus_config = Some(consensus_config); + if let Some(validium_config) = init_args.validium_config.clone() { + match validium_config { + ValidiumType::NoDA => { + general_config.da_client_config = None; + } + ValidiumType::Avail((avail_config, avail_secrets)) => { + general_config.da_client_config = Some(avail_config.into()); + secrets.data_availability = Some(DataAvailabilitySecrets::Avail(avail_secrets)); + } + ValidiumType::EigenDA => {} // This is left blank to be able to define the config by file instead of giving it via CLI + } + } + + secrets.save_with_base_path(shell, &chain_config.configs)?; general_config.save_with_base_path(shell, &chain_config.configs)?; // Initialize genesis config @@ -90,14 +115,12 @@ pub async fn init_configs( contracts_config.l1.governance_addr = Address::zero(); contracts_config.l1.chain_admin_addr = Address::zero(); contracts_config.l1.base_token_addr = chain_config.base_token.address; + contracts_config.l1.base_token_asset_id = Some(encode_ntv_asset_id( + genesis_config.l1_chain_id.0.into(), + contracts_config.l1.base_token_addr, + )); contracts_config.save_with_base_path(shell, &chain_config.configs)?; - // Initialize secrets config - let mut secrets = chain_config.get_secrets_config()?; - set_l1_rpc_url(&mut secrets, init_args.l1_rpc_url.clone())?; - secrets.consensus = Some(get_consensus_secrets(&consensus_keys)); - secrets.save_with_base_path(shell, &chain_config.configs)?; - genesis::database::update_configs(init_args.genesis_args.clone(), shell, chain_config)?; update_portal_config(shell, chain_config) diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/init/mod.rs b/zkstack_cli/crates/zkstack/src/commands/chain/init/mod.rs index a4c21b5bc5d1..938c24a8ac1b 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/init/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/init/mod.rs @@ -1,12 +1,14 @@ use anyhow::Context; use clap::{command, Parser, Subcommand}; -use common::{git, logger, spinner::Spinner}; -use config::{traits::SaveConfigWithBasePath, ChainConfig, EcosystemConfig}; -use types::BaseToken; use xshell::Shell; +use zkstack_cli_common::{git, logger, spinner::Spinner}; +use zkstack_cli_config::{traits::SaveConfigWithBasePath, ChainConfig, EcosystemConfig}; +use zkstack_cli_types::{BaseToken, L1BatchCommitmentMode}; +use zksync_config::DAClientConfig; +use zksync_types::Address; use crate::{ - accept_ownership::accept_admin, + accept_ownership::{accept_admin, make_permanent_rollup, set_da_validator_pair}, commands::chain::{ args::init::{ configs::{InitConfigsArgs, InitConfigsArgsFinal}, @@ -23,8 +25,8 @@ use crate::{ enable_evm_emulator::enable_evm_emulator, messages::{ msg_initializing_chain, MSG_ACCEPTING_ADMIN_SPINNER, MSG_CHAIN_INITIALIZED, - MSG_CHAIN_NOT_FOUND_ERR, MSG_DEPLOYING_PAYMASTER, MSG_GENESIS_DATABASE_ERR, - MSG_REGISTERING_CHAIN_SPINNER, MSG_SELECTED_CONFIG, + MSG_CHAIN_NOT_FOUND_ERR, MSG_DA_PAIR_REGISTRATION_SPINNER, MSG_DEPLOYING_PAYMASTER, + MSG_GENESIS_DATABASE_ERR, MSG_REGISTERING_CHAIN_SPINNER, MSG_SELECTED_CONFIG, MSG_UPDATING_TOKEN_MULTIPLIER_SETTER_SPINNER, MSG_WALLET_TOKEN_MULTIPLIER_SETTER_NOT_FOUND, }, }; @@ -123,18 +125,24 @@ pub async fn init( // Set token multiplier setter address (run by L2 Governor) if chain_config.base_token != BaseToken::eth() { let spinner = Spinner::new(MSG_UPDATING_TOKEN_MULTIPLIER_SETTER_SPINNER); + let chain_contracts = chain_config.get_contracts_config()?; set_token_multiplier_setter( shell, ecosystem_config, &chain_config.get_wallets_config()?.governor, - contracts_config.l1.chain_admin_addr, + chain_contracts + .l1 + .access_control_restriction_addr + .context("chain_contracts.l1.access_control_restriction_addr")?, + chain_contracts.l1.diamond_proxy_addr, chain_config .get_wallets_config() .unwrap() .token_multiplier_setter .context(MSG_WALLET_TOKEN_MULTIPLIER_SETTER_NOT_FOUND)? .address, - &init_args.forge_args, + chain_contracts.l1.chain_admin_addr, + &init_args.forge_args.clone(), init_args.l1_rpc_url.clone(), ) .await?; @@ -162,10 +170,46 @@ pub async fn init( ecosystem_config, &mut contracts_config, init_args.forge_args.clone(), + true, ) .await?; contracts_config.save_with_base_path(shell, &chain_config.configs)?; + let l1_da_validator_addr = get_l1_da_validator(chain_config); + + let spinner = Spinner::new(MSG_DA_PAIR_REGISTRATION_SPINNER); + set_da_validator_pair( + shell, + ecosystem_config, + contracts_config.l1.chain_admin_addr, + &chain_config.get_wallets_config()?.governor, + contracts_config.l1.diamond_proxy_addr, + l1_da_validator_addr.context("l1_da_validator_addr")?, + contracts_config + .l2 + .da_validator_addr + .context("da_validator_addr")?, + &init_args.forge_args.clone(), + init_args.l1_rpc_url.clone(), + ) + .await?; + spinner.finish(); + + if chain_config.l1_batch_commit_data_generator_mode == L1BatchCommitmentMode::Rollup { + println!("Making permanent rollup!"); + make_permanent_rollup( + shell, + ecosystem_config, + contracts_config.l1.chain_admin_addr, + &chain_config.get_wallets_config()?.governor, + contracts_config.l1.diamond_proxy_addr, + &init_args.forge_args.clone(), + init_args.l1_rpc_url.clone(), + ) + .await?; + println!("Done"); + } + // Setup legacy bridge - shouldn't be used for new chains (run by L1 Governor) if let Some(true) = chain_config.legacy_bridge { setup_legacy_bridge( @@ -200,3 +244,27 @@ pub async fn init( Ok(()) } + +pub(crate) fn get_l1_da_validator(chain_config: &ChainConfig) -> anyhow::Result
{ + let contracts_config = chain_config.get_contracts_config()?; + + let l1_da_validator_contract = match chain_config.l1_batch_commit_data_generator_mode { + L1BatchCommitmentMode::Rollup => contracts_config.l1.rollup_l1_da_validator_addr, + L1BatchCommitmentMode::Validium => { + let general_config = chain_config.get_general_config()?; + if let Some(da_client_config) = general_config.da_client_config { + match da_client_config { + DAClientConfig::Avail(_) => contracts_config.l1.avail_l1_da_validator_addr, + DAClientConfig::NoDA => contracts_config.l1.no_da_validium_l1_validator_addr, + DAClientConfig::Eigen(_) => contracts_config.l1.eigenda_l1_validator_addr, + _ => anyhow::bail!("DA client config is not supported"), + } + } else { + contracts_config.l1.no_da_validium_l1_validator_addr + } + } + } + .context("l1 da validator")?; + + Ok(l1_da_validator_contract) +} diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/migrate_from_gateway.rs b/zkstack_cli/crates/zkstack/src/commands/chain/migrate_from_gateway.rs index 71521e62c3e6..cf9b9e8e6399 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/migrate_from_gateway.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/migrate_from_gateway.rs @@ -1,13 +1,22 @@ -/// TODO(EVM-927): Note that the contents of this file are not useable without Gateway contracts. use anyhow::Context; use clap::Parser; -use common::{ +use ethers::{ + abi::parse_abi, + contract::BaseContract, + providers::{Http, Middleware, Provider}, + types::Bytes, + utils::hex, +}; +use lazy_static::lazy_static; +use serde::{Deserialize, Serialize}; +use xshell::Shell; +use zkstack_cli_common::{ config::global_config, forge::{Forge, ForgeScriptArgs}, wallets::Wallet, zks_provider::ZKSProvider, }; -use config::{ +use zkstack_cli_config::{ forge_interface::{ gateway_preparation::{input::GatewayPreparationConfig, output::GatewayPreparationOutput}, script_params::GATEWAY_PREPARATION, @@ -15,17 +24,7 @@ use config::{ traits::{ReadConfig, SaveConfig, SaveConfigWithBasePath}, EcosystemConfig, }; -use ethers::{ - abi::parse_abi, - contract::BaseContract, - providers::{Http, Middleware, Provider}, - types::Bytes, - utils::hex, -}; -use lazy_static::lazy_static; -use serde::{Deserialize, Serialize}; -use types::L1BatchCommitmentMode; -use xshell::Shell; +use zkstack_cli_types::L1BatchCommitmentMode; use zksync_basic_types::{ pubdata_da::PubdataSendingMode, settlement::SettlementMode, H256, U256, U64, }; @@ -58,11 +57,7 @@ lazy_static! { ); } -#[allow(unused)] pub async fn run(args: MigrateFromGatewayArgs, shell: &Shell) -> anyhow::Result<()> { - // TODO(EVM-927): this function does not work without the Gateway contracts. - anyhow::bail!("Gateway upgrade not supported yet!"); - let ecosystem_config = EcosystemConfig::from_file(shell)?; let chain_name = global_config().chain_name.clone(); diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/migrate_to_gateway.rs b/zkstack_cli/crates/zkstack/src/commands/chain/migrate_to_gateway.rs index e5202c570183..c51f6414ce97 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/migrate_to_gateway.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/migrate_to_gateway.rs @@ -1,11 +1,21 @@ use anyhow::Context; use clap::Parser; -use common::{ +use ethers::{ + abi::parse_abi, + contract::BaseContract, + providers::{Http, Middleware, Provider}, + types::Bytes, + utils::hex, +}; +use lazy_static::lazy_static; +use serde::{Deserialize, Serialize}; +use xshell::Shell; +use zkstack_cli_common::{ config::global_config, forge::{Forge, ForgeScriptArgs}, wallets::Wallet, }; -use config::{ +use zkstack_cli_config::{ forge_interface::{ gateway_preparation::{input::GatewayPreparationConfig, output::GatewayPreparationOutput}, script_params::GATEWAY_PREPARATION, @@ -13,17 +23,7 @@ use config::{ traits::{ReadConfig, SaveConfig, SaveConfigWithBasePath}, EcosystemConfig, }; -use ethers::{ - abi::parse_abi, - contract::BaseContract, - providers::{Http, Middleware, Provider}, - types::Bytes, - utils::hex, -}; -use lazy_static::lazy_static; -use serde::{Deserialize, Serialize}; -use types::L1BatchCommitmentMode; -use xshell::Shell; +use zkstack_cli_types::L1BatchCommitmentMode; use zksync_basic_types::{ pubdata_da::PubdataSendingMode, settlement::SettlementMode, Address, H256, U256, U64, }; @@ -67,12 +67,7 @@ lazy_static! { ); } -// TODO(EVM-927): merge gateway contracts -#[allow(unused)] pub async fn run(args: MigrateToGatewayArgs, shell: &Shell) -> anyhow::Result<()> { - // TODO(EVM-927): this function does not work without the Gateway contracts. - anyhow::bail!("Gateway upgrade not supported yet!"); - let ecosystem_config = EcosystemConfig::from_file(shell)?; let chain_name = global_config().chain_name.clone(); diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/mod.rs b/zkstack_cli/crates/zkstack/src/commands/chain/mod.rs index 474f1c779016..d6c1851d0c96 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/mod.rs @@ -1,4 +1,4 @@ -use ::common::forge::ForgeScriptArgs; +use ::zkstack_cli_common::forge::ForgeScriptArgs; use args::build_transactions::BuildTransactionsArgs; pub(crate) use args::create::ChainCreateArgsFinal; use clap::{command, Subcommand}; @@ -13,20 +13,25 @@ use crate::commands::chain::{ mod accept_chain_ownership; pub(crate) mod args; mod build_transactions; -mod common; -mod convert_to_gateway; -mod create; +pub(crate) mod common; +#[cfg(feature = "gateway")] +pub(crate) mod convert_to_gateway; +pub(crate) mod create; pub mod deploy_l2_contracts; pub mod deploy_paymaster; mod enable_evm_emulator; +#[cfg(feature = "gateway")] mod gateway_upgrade; pub mod genesis; pub mod init; +#[cfg(feature = "gateway")] mod migrate_from_gateway; +#[cfg(feature = "gateway")] mod migrate_to_gateway; pub mod register_chain; mod set_token_multiplier_setter; mod setup_legacy_bridge; +mod utils; #[derive(Subcommand, Debug)] pub enum ChainCommands { @@ -52,9 +57,6 @@ pub enum ChainCommands { /// DiamondProxy contract. #[command(alias = "accept-ownership")] AcceptChainOwnership(ForgeScriptArgs), - /// Initialize bridges on L2 - #[command(alias = "bridge")] - InitializeBridges(ForgeScriptArgs), /// Deploy L2 consensus registry #[command(alias = "consensus")] DeployConsensusRegistry(ForgeScriptArgs), @@ -72,6 +74,18 @@ pub enum ChainCommands { DeployPaymaster(ForgeScriptArgs), /// Update Token Multiplier Setter address on L1 UpdateTokenMultiplierSetter(ForgeScriptArgs), + /// Prepare chain to be an eligible gateway + #[cfg(feature = "gateway")] + ConvertToGateway(ForgeScriptArgs), + /// Migrate chain to gateway + #[cfg(feature = "gateway")] + MigrateToGateway(migrate_to_gateway::MigrateToGatewayArgs), + /// Migrate chain from gateway + #[cfg(feature = "gateway")] + MigrateFromGateway(migrate_from_gateway::MigrateFromGatewayArgs), + /// Upgrade to the protocol version that supports Gateway + #[cfg(feature = "gateway")] + GatewayUpgrade(gateway_upgrade::GatewayUpgradeArgs), /// Enable EVM emulation on chain (Not supported yet) EnableEvmEmulator(ForgeScriptArgs), } @@ -99,13 +113,18 @@ pub(crate) async fn run(shell: &Shell, args: ChainCommands) -> anyhow::Result<() ChainCommands::DeployUpgrader(args) => { deploy_l2_contracts::run(args, shell, Deploy2ContractsOption::Upgrader).await } - ChainCommands::InitializeBridges(args) => { - deploy_l2_contracts::run(args, shell, Deploy2ContractsOption::InitiailizeBridges).await - } ChainCommands::DeployPaymaster(args) => deploy_paymaster::run(args, shell).await, ChainCommands::UpdateTokenMultiplierSetter(args) => { set_token_multiplier_setter::run(args, shell).await } + #[cfg(feature = "gateway")] + ChainCommands::ConvertToGateway(args) => convert_to_gateway::run(args, shell).await, + #[cfg(feature = "gateway")] + ChainCommands::MigrateToGateway(args) => migrate_to_gateway::run(args, shell).await, + #[cfg(feature = "gateway")] + ChainCommands::MigrateFromGateway(args) => migrate_from_gateway::run(args, shell).await, + #[cfg(feature = "gateway")] + ChainCommands::GatewayUpgrade(args) => gateway_upgrade::run(args, shell).await, ChainCommands::EnableEvmEmulator(args) => enable_evm_emulator::run(args, shell).await, } } diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/register_chain.rs b/zkstack_cli/crates/zkstack/src/commands/chain/register_chain.rs index 6269b0af0ea9..626d25438385 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/register_chain.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/register_chain.rs @@ -1,10 +1,11 @@ use anyhow::Context; -use common::{ +use xshell::Shell; +use zkstack_cli_common::{ forge::{Forge, ForgeScriptArgs}, logger, spinner::Spinner, }; -use config::{ +use zkstack_cli_config::{ forge_interface::{ register_chain::{input::RegisterChainL1Config, output::RegisterChainOutput}, script_params::REGISTER_CHAIN_SCRIPT_PARAMS, @@ -12,7 +13,6 @@ use config::{ traits::{ReadConfig, SaveConfig, SaveConfigWithBasePath}, ChainConfig, ContractsConfig, EcosystemConfig, }; -use xshell::Shell; use crate::{ messages::{ diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/set_token_multiplier_setter.rs b/zkstack_cli/crates/zkstack/src/commands/chain/set_token_multiplier_setter.rs index bff3cfe467b5..e1a57dcd0f00 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/set_token_multiplier_setter.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/set_token_multiplier_setter.rs @@ -1,14 +1,16 @@ use anyhow::Context; -use common::{ +use ethers::{abi::parse_abi, contract::BaseContract, utils::hex}; +use lazy_static::lazy_static; +use xshell::Shell; +use zkstack_cli_common::{ forge::{Forge, ForgeScript, ForgeScriptArgs}, logger, spinner::Spinner, wallets::Wallet, }; -use config::{forge_interface::script_params::ACCEPT_GOVERNANCE_SCRIPT_PARAMS, EcosystemConfig}; -use ethers::{abi::parse_abi, contract::BaseContract, utils::hex}; -use lazy_static::lazy_static; -use xshell::Shell; +use zkstack_cli_config::{ + forge_interface::script_params::ACCEPT_GOVERNANCE_SCRIPT_PARAMS, EcosystemConfig, +}; use zksync_basic_types::Address; use crate::{ @@ -23,7 +25,7 @@ use crate::{ lazy_static! { static ref SET_TOKEN_MULTIPLIER_SETTER: BaseContract = BaseContract::from( parse_abi(&[ - "function chainSetTokenMultiplierSetter(address chainAdmin, address target) public" + "function chainSetTokenMultiplierSetter(address chainAdmin, address accessControlRestriction, address diamondProxyAddress, address setter) public" ]) .unwrap(), ); @@ -54,8 +56,13 @@ pub async fn run(args: ForgeScriptArgs, shell: &Shell) -> anyhow::Result<()> { shell, &ecosystem_config, &chain_config.get_wallets_config()?.governor, - contracts_config.l1.chain_admin_addr, + contracts_config + .l1 + .access_control_restriction_addr + .context("access_control_restriction_addr")?, + contracts_config.l1.diamond_proxy_addr, token_multiplier_setter_address, + contracts_config.l1.chain_admin_addr, &args.clone(), l1_url, ) @@ -70,12 +77,15 @@ pub async fn run(args: ForgeScriptArgs, shell: &Shell) -> anyhow::Result<()> { Ok(()) } +#[allow(clippy::too_many_arguments)] pub async fn set_token_multiplier_setter( shell: &Shell, ecosystem_config: &EcosystemConfig, governor: &Wallet, - chain_admin_address: Address, - target_address: Address, + access_control_restriction_address: Address, + diamond_proxy_address: Address, + new_setter_address: Address, + chain_admin_addr: Address, forge_args: &ForgeScriptArgs, l1_rpc_url: String, ) -> anyhow::Result<()> { @@ -88,7 +98,12 @@ pub async fn set_token_multiplier_setter( let calldata = SET_TOKEN_MULTIPLIER_SETTER .encode( "chainSetTokenMultiplierSetter", - (chain_admin_address, target_address), + ( + chain_admin_addr, + access_control_restriction_address, + diamond_proxy_address, + new_setter_address, + ), ) .unwrap(); let foundry_contracts_path = ecosystem_config.path_to_l1_foundry(); diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/setup_legacy_bridge.rs b/zkstack_cli/crates/zkstack/src/commands/chain/setup_legacy_bridge.rs index 8973fccced86..24ef9d3c16d9 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/setup_legacy_bridge.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/setup_legacy_bridge.rs @@ -1,16 +1,16 @@ use anyhow::Context; -use common::{ +use xshell::Shell; +use zkstack_cli_common::{ forge::{Forge, ForgeScriptArgs}, spinner::Spinner, }; -use config::{ +use zkstack_cli_config::{ forge_interface::{ script_params::SETUP_LEGACY_BRIDGE, setup_legacy_bridge::SetupLegacyBridgeInput, }, traits::SaveConfig, ChainConfig, ContractsConfig, EcosystemConfig, }; -use xshell::Shell; use crate::{ messages::{MSG_DEPLOYING_PAYMASTER, MSG_L1_SECRETS_MUST_BE_PRESENTED}, @@ -31,6 +31,14 @@ pub async fn setup_legacy_bridge( transparent_proxy_admin: contracts_config .ecosystem_contracts .transparent_proxy_admin_addr, + l1_nullifier_proxy: contracts_config + .bridges + .l1_nullifier_addr + .context("`l1_nullifier` missing")?, + l1_native_token_vault: contracts_config + .ecosystem_contracts + .native_token_vault_addr + .context("`native_token_vault` missing")?, erc20bridge_proxy: contracts_config.bridges.erc20.l1_address, token_weth_address: Default::default(), chain_id: chain_config.chain_id, diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/utils.rs b/zkstack_cli/crates/zkstack/src/commands/chain/utils.rs new file mode 100644 index 000000000000..830be06a7bb3 --- /dev/null +++ b/zkstack_cli/crates/zkstack/src/commands/chain/utils.rs @@ -0,0 +1,12 @@ +use ethers::abi::encode; +use zksync_types::{web3::keccak256, Address, H256, L2_NATIVE_TOKEN_VAULT_ADDRESS, U256}; + +pub fn encode_ntv_asset_id(l1_chain_id: U256, addr: Address) -> H256 { + let encoded_data = encode(&[ + ethers::abi::Token::Uint(l1_chain_id), + ethers::abi::Token::Address(L2_NATIVE_TOKEN_VAULT_ADDRESS), + ethers::abi::Token::Address(addr), + ]); + + H256(keccak256(&encoded_data)) +} diff --git a/zkstack_cli/crates/zkstack/src/commands/consensus/mod.rs b/zkstack_cli/crates/zkstack/src/commands/consensus/mod.rs index 7a998efedbf2..ad64207b481c 100644 --- a/zkstack_cli/crates/zkstack/src/commands/consensus/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/consensus/mod.rs @@ -3,8 +3,6 @@ use std::{borrow::Borrow, collections::HashMap, path::PathBuf, sync::Arc}; /// Consensus registry contract operations. /// Includes code duplicated from `zksync_node_consensus::registry::abi`. use anyhow::Context as _; -use common::{config::global_config, logger, wallets::Wallet}; -use config::EcosystemConfig; use conv::*; use ethers::{ abi::Detokenize, @@ -16,6 +14,8 @@ use ethers::{ }; use tokio::time::MissedTickBehavior; use xshell::Shell; +use zkstack_cli_common::{config::global_config, logger, wallets::Wallet}; +use zkstack_cli_config::EcosystemConfig; use zksync_consensus_crypto::ByteFmt; use zksync_consensus_roles::{attester, validator}; @@ -144,10 +144,10 @@ fn print_attesters(committee: &attester::Committee) { } struct Setup { - chain: config::ChainConfig, - contracts: config::ContractsConfig, - general: config::GeneralConfig, - genesis: config::GenesisConfig, + chain: zkstack_cli_config::ChainConfig, + contracts: zkstack_cli_config::ContractsConfig, + general: zkstack_cli_config::GeneralConfig, + genesis: zkstack_cli_config::GenesisConfig, } impl Setup { diff --git a/zkstack_cli/crates/zkstack/src/commands/containers.rs b/zkstack_cli/crates/zkstack/src/commands/containers.rs index 8367289bd67f..8469365a295f 100644 --- a/zkstack_cli/crates/zkstack/src/commands/containers.rs +++ b/zkstack_cli/crates/zkstack/src/commands/containers.rs @@ -1,9 +1,9 @@ use std::path::PathBuf; use anyhow::{anyhow, Context}; -use common::{docker, logger, spinner::Spinner}; -use config::{EcosystemConfig, DOCKER_COMPOSE_FILE, ERA_OBSERVABILITY_COMPOSE_FILE}; use xshell::Shell; +use zkstack_cli_common::{docker, logger, spinner::Spinner}; +use zkstack_cli_config::{EcosystemConfig, DOCKER_COMPOSE_FILE, ERA_OBSERVABILITY_COMPOSE_FILE}; use super::args::ContainersArgs; use crate::{ @@ -46,7 +46,10 @@ pub fn initialize_docker(shell: &Shell, ecosystem: &EcosystemConfig) -> anyhow:: fn start_container(shell: &Shell, compose_file: &str, retry_msg: &str) -> anyhow::Result<()> { while let Err(err) = docker::up(shell, compose_file, true) { logger::error(err.to_string()); - if !common::PromptConfirm::new(retry_msg).default(true).ask() { + if !zkstack_cli_common::PromptConfirm::new(retry_msg) + .default(true) + .ask() + { return Err(err); } } diff --git a/zkstack_cli/crates/zkstack/src/commands/contract_verifier/args/init.rs b/zkstack_cli/crates/zkstack/src/commands/contract_verifier/args/init.rs index 7ba7d3cb40cf..a3aadfbce4ae 100644 --- a/zkstack_cli/crates/zkstack/src/commands/contract_verifier/args/init.rs +++ b/zkstack_cli/crates/zkstack/src/commands/contract_verifier/args/init.rs @@ -1,7 +1,7 @@ use anyhow::Context; use clap::Parser; -use common::PromptSelect; use xshell::Shell; +use zkstack_cli_common::PromptSelect; use super::releases::{get_releases_with_arch, Arch, Version}; use crate::messages::{ diff --git a/zkstack_cli/crates/zkstack/src/commands/contract_verifier/args/releases.rs b/zkstack_cli/crates/zkstack/src/commands/contract_verifier/args/releases.rs index ab169220f299..a8199372fc2c 100644 --- a/zkstack_cli/crates/zkstack/src/commands/contract_verifier/args/releases.rs +++ b/zkstack_cli/crates/zkstack/src/commands/contract_verifier/args/releases.rs @@ -1,8 +1,8 @@ use std::str::FromStr; -use common::spinner::Spinner; use serde::Deserialize; use xshell::Shell; +use zkstack_cli_common::spinner::Spinner; use crate::messages::{MSG_INVALID_ARCH_ERR, MSG_NO_RELEASES_FOUND_ERR}; diff --git a/zkstack_cli/crates/zkstack/src/commands/contract_verifier/build.rs b/zkstack_cli/crates/zkstack/src/commands/contract_verifier/build.rs index 0ba72f6b2257..2e4107383d0a 100644 --- a/zkstack_cli/crates/zkstack/src/commands/contract_verifier/build.rs +++ b/zkstack_cli/crates/zkstack/src/commands/contract_verifier/build.rs @@ -1,7 +1,7 @@ use anyhow::Context; -use common::{cmd::Cmd, logger}; -use config::EcosystemConfig; use xshell::{cmd, Shell}; +use zkstack_cli_common::{cmd::Cmd, logger}; +use zkstack_cli_config::EcosystemConfig; use crate::messages::{ MSG_BUILDING_CONTRACT_VERIFIER, MSG_CHAIN_NOT_FOUND_ERR, @@ -13,7 +13,7 @@ pub(crate) async fn build(shell: &Shell) -> anyhow::Result<()> { let chain = ecosystem .load_current_chain() .context(MSG_CHAIN_NOT_FOUND_ERR)?; - let _dir_guard = shell.push_dir(&chain.link_to_code); + let _dir_guard = shell.push_dir(chain.link_to_code.join("core")); logger::info(MSG_BUILDING_CONTRACT_VERIFIER); diff --git a/zkstack_cli/crates/zkstack/src/commands/contract_verifier/init.rs b/zkstack_cli/crates/zkstack/src/commands/contract_verifier/init.rs index b173ad9bbb7f..56f02745050c 100644 --- a/zkstack_cli/crates/zkstack/src/commands/contract_verifier/init.rs +++ b/zkstack_cli/crates/zkstack/src/commands/contract_verifier/init.rs @@ -1,8 +1,8 @@ use std::path::{Path, PathBuf}; -use common::{cmd::Cmd, logger, spinner::Spinner}; -use config::EcosystemConfig; use xshell::{cmd, Shell}; +use zkstack_cli_common::{cmd::Cmd, logger, spinner::Spinner}; +use zkstack_cli_config::EcosystemConfig; use super::args::{init::InitContractVerifierArgs, releases::Version}; use crate::messages::{msg_binary_already_exists, msg_downloading_binary_spinner}; diff --git a/zkstack_cli/crates/zkstack/src/commands/contract_verifier/run.rs b/zkstack_cli/crates/zkstack/src/commands/contract_verifier/run.rs index ebc33840bdea..fe4221e83a46 100644 --- a/zkstack_cli/crates/zkstack/src/commands/contract_verifier/run.rs +++ b/zkstack_cli/crates/zkstack/src/commands/contract_verifier/run.rs @@ -1,7 +1,7 @@ use anyhow::Context; -use common::{cmd::Cmd, logger}; -use config::EcosystemConfig; use xshell::{cmd, Shell}; +use zkstack_cli_common::{cmd::Cmd, logger}; +use zkstack_cli_config::EcosystemConfig; use crate::messages::{ MSG_CHAIN_NOT_FOUND_ERR, MSG_FAILED_TO_RUN_CONTRACT_VERIFIER_ERR, MSG_RUNNING_CONTRACT_VERIFIER, @@ -22,7 +22,7 @@ pub(crate) async fn run(shell: &Shell) -> anyhow::Result<()> { let mut cmd = Cmd::new(cmd!( shell, - "cargo run --release --bin zksync_contract_verifier -- --config-path={config_path} --secrets-path={secrets_path}" + "cargo run --manifest-path ./core/Cargo.toml --release --bin zksync_contract_verifier -- --config-path={config_path} --secrets-path={secrets_path}" )); cmd = cmd.with_force_run(); cmd.run().context(MSG_FAILED_TO_RUN_CONTRACT_VERIFIER_ERR) diff --git a/zkstack_cli/crates/zkstack/src/commands/contract_verifier/wait.rs b/zkstack_cli/crates/zkstack/src/commands/contract_verifier/wait.rs index 011c888d3041..0b844df61f4c 100644 --- a/zkstack_cli/crates/zkstack/src/commands/contract_verifier/wait.rs +++ b/zkstack_cli/crates/zkstack/src/commands/contract_verifier/wait.rs @@ -1,7 +1,7 @@ use anyhow::Context as _; -use common::{config::global_config, logger}; -use config::EcosystemConfig; use xshell::Shell; +use zkstack_cli_common::{config::global_config, logger}; +use zkstack_cli_config::EcosystemConfig; use crate::{commands::args::WaitArgs, messages::MSG_CHAIN_NOT_FOUND_ERR}; diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/clean/mod.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/clean/mod.rs index 06dff541f94e..b45272cc97a8 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/clean/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/clean/mod.rs @@ -1,8 +1,8 @@ use anyhow::Context; use clap::Subcommand; -use common::{docker, logger}; -use config::{EcosystemConfig, DOCKER_COMPOSE_FILE}; use xshell::Shell; +use zkstack_cli_common::{docker, logger}; +use zkstack_cli_config::{EcosystemConfig, DOCKER_COMPOSE_FILE}; use crate::commands::dev::messages::{ MSG_CONTRACTS_CLEANING, MSG_CONTRACTS_CLEANING_FINISHED, MSG_DOCKER_COMPOSE_DOWN, diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/config_writer.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/config_writer.rs index 70238ed15f32..01a521334383 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/config_writer.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/config_writer.rs @@ -1,8 +1,8 @@ use anyhow::Context; use clap::Parser; -use common::{logger, Prompt}; -use config::{override_config, EcosystemConfig}; use xshell::Shell; +use zkstack_cli_common::{logger, Prompt}; +use zkstack_cli_config::{override_config, EcosystemConfig}; use crate::commands::dev::messages::{ msg_overriding_config, MSG_CHAIN_NOT_FOUND_ERR, MSG_OVERRIDE_CONFIG_PATH_HELP, diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/contracts.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/contracts.rs index 8e0384cbca99..a3150bc96235 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/contracts.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/contracts.rs @@ -1,18 +1,21 @@ use std::path::PathBuf; use clap::Parser; -use common::{ - contracts::{build_l1_contracts, build_l2_contracts, build_system_contracts}, +use xshell::Shell; +use zkstack_cli_common::{ + contracts::{ + build_l1_contracts, build_l1_da_contracts, build_l2_contracts, build_system_contracts, + }, logger, spinner::Spinner, }; -use config::EcosystemConfig; -use xshell::Shell; +use zkstack_cli_config::EcosystemConfig; use crate::commands::dev::messages::{ MSG_BUILDING_CONTRACTS, MSG_BUILDING_CONTRACTS_SUCCESS, MSG_BUILDING_L1_CONTRACTS_SPINNER, - MSG_BUILDING_L2_CONTRACTS_SPINNER, MSG_BUILDING_SYSTEM_CONTRACTS_SPINNER, - MSG_BUILD_L1_CONTRACTS_HELP, MSG_BUILD_L2_CONTRACTS_HELP, MSG_BUILD_SYSTEM_CONTRACTS_HELP, + MSG_BUILDING_L1_DA_CONTRACTS_SPINNER, MSG_BUILDING_L2_CONTRACTS_SPINNER, + MSG_BUILDING_SYSTEM_CONTRACTS_SPINNER, MSG_BUILD_L1_CONTRACTS_HELP, + MSG_BUILD_L1_DA_CONTRACTS_HELP, MSG_BUILD_L2_CONTRACTS_HELP, MSG_BUILD_SYSTEM_CONTRACTS_HELP, MSG_NOTHING_TO_BUILD_MSG, }; @@ -20,6 +23,8 @@ use crate::commands::dev::messages::{ pub struct ContractsArgs { #[clap(long, alias = "l1", help = MSG_BUILD_L1_CONTRACTS_HELP, default_missing_value = "true", num_args = 0..=1)] pub l1_contracts: Option, + #[clap(long, alias = "l1-da", help = MSG_BUILD_L1_DA_CONTRACTS_HELP, default_missing_value = "true", num_args = 0..=1)] + pub l1_da_contracts: Option, #[clap(long, alias = "l2", help = MSG_BUILD_L2_CONTRACTS_HELP, default_missing_value = "true", num_args = 0..=1)] pub l2_contracts: Option, #[clap(long, alias = "sc", help = MSG_BUILD_SYSTEM_CONTRACTS_HELP, default_missing_value = "true", num_args = 0..=1)] @@ -31,9 +36,11 @@ impl ContractsArgs { if self.l1_contracts.is_none() && self.l2_contracts.is_none() && self.system_contracts.is_none() + && self.l1_da_contracts.is_none() { return vec![ ContractType::L1, + ContractType::L1DA, ContractType::L2, ContractType::SystemContracts, ]; @@ -43,6 +50,9 @@ impl ContractsArgs { if self.l1_contracts.unwrap_or(false) { contracts.push(ContractType::L1); } + if self.l1_da_contracts.unwrap_or(false) { + contracts.push(ContractType::L1DA); + } if self.l2_contracts.unwrap_or(false) { contracts.push(ContractType::L2); } @@ -56,6 +66,7 @@ impl ContractsArgs { #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub enum ContractType { L1, + L1DA, L2, SystemContracts, } @@ -74,6 +85,11 @@ impl ContractBuilder { msg: MSG_BUILDING_L1_CONTRACTS_SPINNER.to_string(), link_to_code: ecosystem.link_to_code.clone(), }, + ContractType::L1DA => Self { + cmd: Box::new(build_l1_da_contracts), + msg: MSG_BUILDING_L1_DA_CONTRACTS_SPINNER.to_string(), + link_to_code: ecosystem.link_to_code.clone(), + }, ContractType::L2 => Self { cmd: Box::new(build_l2_contracts), msg: MSG_BUILDING_L2_CONTRACTS_SPINNER.to_string(), diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/args/new_migration.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/args/new_migration.rs index b91b048be784..74e40ab53303 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/args/new_migration.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/args/new_migration.rs @@ -1,6 +1,6 @@ use clap::{Parser, ValueEnum}; -use common::{Prompt, PromptSelect}; use strum::{Display, EnumIter, IntoEnumIterator}; +use zkstack_cli_common::{Prompt, PromptSelect}; use crate::commands::dev::messages::{ MSG_DATABASE_NEW_MIGRATION_DATABASE_HELP, MSG_DATABASE_NEW_MIGRATION_DB_PROMPT, diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/check_sqlx_data.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/check_sqlx_data.rs index 990fca78641f..abead3fe00d6 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/check_sqlx_data.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/check_sqlx_data.rs @@ -1,8 +1,8 @@ use std::path::Path; -use common::{cmd::Cmd, logger, spinner::Spinner}; -use config::EcosystemConfig; use xshell::{cmd, Shell}; +use zkstack_cli_common::{cmd::Cmd, logger, spinner::Spinner}; +use zkstack_cli_config::EcosystemConfig; use super::args::DatabaseCommonArgs; use crate::commands::dev::{ diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/drop.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/drop.rs index a5578d41f77a..e46a434cec06 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/drop.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/drop.rs @@ -1,9 +1,9 @@ -use common::{ +use xshell::Shell; +use zkstack_cli_common::{ db::{drop_db_if_exists, DatabaseConfig}, logger, spinner::Spinner, }; -use xshell::Shell; use super::args::DatabaseCommonArgs; use crate::commands::dev::{ diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/migrate.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/migrate.rs index fd22f769742e..8c21262c0712 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/migrate.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/migrate.rs @@ -1,8 +1,8 @@ use std::path::Path; -use common::{cmd::Cmd, logger, spinner::Spinner}; -use config::EcosystemConfig; use xshell::{cmd, Shell}; +use zkstack_cli_common::{cmd::Cmd, logger, spinner::Spinner}; +use zkstack_cli_config::EcosystemConfig; use super::args::DatabaseCommonArgs; use crate::commands::dev::{ @@ -42,11 +42,6 @@ fn migrate_database(shell: &Shell, link_to_code: impl AsRef, dal: Dal) -> MSG_DATABASE_MIGRATE_GERUND, &dal.path, )); - Cmd::new(cmd!( - shell, - "cargo sqlx database create --database-url {url}" - )) - .run()?; Cmd::new(cmd!(shell, "cargo sqlx migrate run --database-url {url}")).run()?; spinner.finish(); diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/new_migration.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/new_migration.rs index 2d9fa1030538..655a841e060a 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/new_migration.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/new_migration.rs @@ -1,8 +1,8 @@ use std::path::Path; -use common::{cmd::Cmd, logger, spinner::Spinner}; -use config::EcosystemConfig; use xshell::{cmd, Shell}; +use zkstack_cli_common::{cmd::Cmd, logger, spinner::Spinner}; +use zkstack_cli_config::EcosystemConfig; use super::args::new_migration::{DatabaseNewMigrationArgs, SelectedDatabase}; use crate::commands::dev::{ diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/prepare.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/prepare.rs index 288a68452fd5..82c9ed2e338b 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/prepare.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/prepare.rs @@ -1,8 +1,8 @@ use std::path::Path; -use common::{cmd::Cmd, logger, spinner::Spinner}; -use config::EcosystemConfig; use xshell::{cmd, Shell}; +use zkstack_cli_common::{cmd::Cmd, logger, spinner::Spinner}; +use zkstack_cli_config::EcosystemConfig; use super::args::DatabaseCommonArgs; use crate::commands::dev::{ diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/reset.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/reset.rs index 55d5ab1cbfcb..4a9ec022d723 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/reset.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/reset.rs @@ -1,8 +1,8 @@ use std::path::Path; -use common::logger; -use config::EcosystemConfig; use xshell::Shell; +use zkstack_cli_common::logger; +use zkstack_cli_config::EcosystemConfig; use super::{args::DatabaseCommonArgs, drop::drop_database, setup::setup_database}; use crate::commands::dev::{ diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/setup.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/setup.rs index 74ade66ba481..4eba9b615fc0 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/setup.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/setup.rs @@ -1,8 +1,8 @@ use std::path::Path; -use common::{cmd::Cmd, logger, spinner::Spinner}; -use config::EcosystemConfig; use xshell::{cmd, Shell}; +use zkstack_cli_common::{cmd::Cmd, logger, spinner::Spinner}; +use zkstack_cli_config::EcosystemConfig; use super::args::DatabaseCommonArgs; use crate::commands::dev::{ diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/fmt.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/fmt.rs index 569d2a61294e..82594cccf484 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/fmt.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/fmt.rs @@ -1,9 +1,9 @@ use std::path::PathBuf; use clap::Parser; -use common::{cmd::Cmd, logger, spinner::Spinner}; -use config::EcosystemConfig; use xshell::{cmd, Shell}; +use zkstack_cli_common::{cmd::Cmd, logger, spinner::Spinner}; +use zkstack_cli_config::EcosystemConfig; use super::sql_fmt::format_sql; use crate::commands::dev::{ @@ -42,7 +42,7 @@ async fn prettier_contracts(shell: Shell, check: bool) -> anyhow::Result<()> { } async fn rustfmt(shell: Shell, check: bool, link_to_code: PathBuf) -> anyhow::Result<()> { - for dir in [".", "prover", "zkstack_cli"] { + for dir in ["core", "prover", "zkstack_cli"] { let spinner = Spinner::new(&msg_running_rustfmt_for_dir_spinner(dir)); let _dir = shell.push_dir(link_to_code.join(dir)); let mut cmd = cmd!(shell, "cargo fmt -- --config imports_granularity=Crate --config group_imports=StdExternalCrate"); diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/gateway.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/gateway.rs new file mode 100644 index 000000000000..ea8f96de6bcd --- /dev/null +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/gateway.rs @@ -0,0 +1,815 @@ +use std::{num::NonZeroUsize, str::FromStr, sync::Arc}; + +use anyhow::Context; +use clap::{Parser, ValueEnum}; +use ethers::{ + abi::{encode, parse_abi, Token}, + contract::{abigen, BaseContract}, + providers::{Http, Middleware, Provider}, + utils::hex, +}; +use serde::{Deserialize, Serialize}; +use strum::EnumIter; +use xshell::Shell; +use zkstack_cli_config::{ + forge_interface::gateway_ecosystem_upgrade::output::GatewayEcosystemUpgradeOutput, + traits::{ReadConfig, ZkStackConfig}, + ContractsConfig, +}; +use zksync_contracts::{chain_admin_contract, hyperchain_contract, DIAMOND_CUT}; +use zksync_types::{ + ethabi, + url::SensitiveUrl, + web3::{keccak256, Bytes}, + Address, L1BatchNumber, L2BlockNumber, L2ChainId, ProtocolVersionId, H256, + L2_NATIVE_TOKEN_VAULT_ADDRESS, U256, +}; +use zksync_web3_decl::{ + client::{Client, DynClient, L2}, + namespaces::{UnstableNamespaceClient, ZksNamespaceClient}, +}; + +/// To support both functionality of assignment inside local tests +/// and to print out the changes to the user the following function is used. +#[macro_export] +macro_rules! assign_or_print { + ($statement:expr, $value:expr, $should_assign:expr) => { + if $should_assign { + $statement = $value; + } else { + println!("{} = {:#?}", stringify!($statement), $value); + } + }; +} + +#[macro_export] +macro_rules! amend_config_pre_upgrade { + () => { + assign_or_print!() + }; +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub(crate) struct GatewayUpgradeInfo { + // Information about pre-upgrade contracts. + l1_chain_id: u32, + bridgehub_addr: Address, + old_validator_timelock: Address, + l1_legacy_shared_bridge: Address, + + // Information about the post-upgrade contracts. + ctm_deployment_tracker_proxy_addr: Address, + native_token_vault_addr: Address, + l1_bytecodes_supplier_addr: Address, + rollup_l1_da_validator_addr: Address, + no_da_validium_l1_validator_addr: Address, + expected_rollup_l2_da_validator: Address, + expected_validium_l2_da_validator: Address, + new_validator_timelock: Address, + + l1_wrapped_base_token_store: Address, + chain_upgrade_diamond_cut: Bytes, + + new_protocol_version: u64, + old_protocol_version: u64, +} + +#[derive(Debug, Default)] +pub struct FetchedChainInfo { + l2_legacy_shared_bridge_addr: Address, + hyperchain_addr: Address, + base_token_addr: Address, +} + +// Bridgehub ABI +abigen!( + BridgehubAbi, + r"[ + function getHyperchain(uint256)(address) +]" +); + +// L1SharedBridgeLegacyStore ABI +abigen!( + L1SharedBridgeLegacyAbi, + r"[ + function l2BridgeAddress(uint256 _chainId)(address) +]" +); + +// L2WrappedBaseTokenStore ABI +abigen!( + L2WrappedBaseTokenStoreAbi, + r"[ + function l2WBaseTokenAddress(uint256 _chainId)(address) +]" +); + +// ZKChain ABI +abigen!( + ZKChainAbi, + r"[ + function getPubdataPricingMode()(uint256) + function getBaseToken()(address) + function getTotalBatchesCommitted() external view returns (uint256) + function getTotalBatchesVerified() external view returns (uint256) +]" +); + +// ZKChain ABI +abigen!( + ValidatorTimelockAbi, + r"[ + function validators(uint256 _chainId, address _validator)(bool) +]" +); + +async fn verify_next_batch_new_version( + batch_number: u32, + main_node_client: &DynClient, +) -> anyhow::Result<()> { + let (_, right_bound) = main_node_client + .get_l2_block_range(L1BatchNumber(batch_number)) + .await? + .context("Range must be present for a batch")?; + + let next_l2_block = right_bound + 1; + + let block_details = main_node_client + .get_block_details(L2BlockNumber(next_l2_block.as_u32())) + .await? + .with_context(|| format!("No L2 block is present after the batch {}", batch_number))?; + + let protocol_version = block_details.protocol_version.with_context(|| { + format!( + "Protocol version not present for block {}", + next_l2_block.as_u64() + ) + })?; + anyhow::ensure!( + protocol_version >= ProtocolVersionId::gateway_upgrade(), + "THe block does not yet contain the gateway upgrade" + ); + + Ok(()) +} + +pub async fn check_chain_readiness( + l1_rpc_url: String, + l2_rpc_url: String, + l2_chain_id: u64, +) -> anyhow::Result<()> { + let l1_provider = match Provider::::try_from(&l1_rpc_url) { + Ok(provider) => provider, + Err(err) => { + anyhow::bail!("Connection error: {:#?}", err); + } + }; + let l1_client = Arc::new(l1_provider); + + let l2_client = Client::http(SensitiveUrl::from_str(&l2_rpc_url).unwrap()) + .context("failed creating JSON-RPC client for main node")? + .for_network(L2ChainId::new(l2_chain_id).unwrap().into()) + .with_allowed_requests_per_second(NonZeroUsize::new(100_usize).unwrap()) + .build(); + let l2_client = Box::new(l2_client) as Box>; + + let inflight_txs_count: usize = l2_client.get_unconfirmed_txs_count().await?; + let diamond_proxy_addr = l2_client.get_main_contract().await?; + + if inflight_txs_count != 0 { + anyhow::bail!("Chain not ready since there are inflight txs!"); + } + + let zkchain = ZKChainAbi::new(diamond_proxy_addr, l1_client.clone()); + let batches_committed = zkchain.get_total_batches_committed().await?.as_u32(); + let batches_verified = zkchain.get_total_batches_verified().await?.as_u32(); + + verify_next_batch_new_version(batches_committed, l2_client.as_ref()).await?; + verify_next_batch_new_version(batches_verified, l2_client.as_ref()).await?; + + Ok(()) +} + +async fn verify_correct_l2_wrapped_base_token( + l2_rpc_url: String, + addr: Address, +) -> anyhow::Result<()> { + // Connect to the L1 Ethereum network + let l2_provider = match Provider::::try_from(&l2_rpc_url) { + Ok(provider) => provider, + Err(err) => { + anyhow::bail!("Connection error: {:#?}", err); + } + }; + + let code = l2_provider.get_code(addr, None).await?; + + if code.len() == 0 { + anyhow::bail!("L2 wrapped base token code can not be empty"); + } + + // TODO(EVM-939): also verify that the code is correct. + + Ok(()) +} + +pub async fn fetch_chain_info( + upgrade_info: &GatewayUpgradeInfo, + args: &GatewayUpgradeArgsInner, +) -> anyhow::Result { + // Connect to the L1 Ethereum network + let provider = match Provider::::try_from(&args.l1_rpc_url) { + Ok(provider) => provider, + Err(err) => { + anyhow::bail!("Connection error: {:#?}", err); + } + }; + + let client = Arc::new(provider); + let chain_id = U256::from(args.chain_id); + + let bridgehub = BridgehubAbi::new(upgrade_info.bridgehub_addr, client.clone()); + let hyperchain_addr = bridgehub.get_hyperchain(chain_id).await?; + if hyperchain_addr == Address::zero() { + anyhow::bail!("Chain not present in bridgehub"); + } + let l1_legacy_bridge = + L1SharedBridgeLegacyAbi::new(upgrade_info.l1_legacy_shared_bridge, client.clone()); + + let l2_legacy_shared_bridge_addr = l1_legacy_bridge.l_2_bridge_address(chain_id).await?; + // Creation of the shared bridge is one of the steps for chain creation, + // so it is very weird that a chain does not have it, so we fail here. + anyhow::ensure!( + l2_legacy_shared_bridge_addr != Address::zero(), + "Chain not registered inside the L1 shared bridge!" + ); + + let l2_wrapped_base_token_store = + L2WrappedBaseTokenStoreAbi::new(upgrade_info.l1_wrapped_base_token_store, client.clone()); + + let l2_predeployed_wrapped_base_token = l2_wrapped_base_token_store + .l_2w_base_token_address(chain_id) + .await?; + + // Even in case the user does not want the script to fail due to this issue, + // we still display it just in case. + if l2_predeployed_wrapped_base_token == Address::zero() && args.dangerous_no_cross_check { + println!("\n\nWARNING: the chain does not contain wrapped base token. It is dangerous since the security of it depends on the ecosystem admin\n\n"); + } + + let zkchain = ZKChainAbi::new(hyperchain_addr, client.clone()); + + let base_token_addr = zkchain.get_base_token().await?; + + if !args.dangerous_no_cross_check { + // Firstly, check that the validators are present in the current timelock + let old_timelock = + ValidatorTimelockAbi::new(upgrade_info.old_validator_timelock, client.clone()); + + if !old_timelock + .validators(chain_id, args.validator_addr1) + .await? + { + anyhow::bail!( + "{} not validator", + hex_address_display(args.validator_addr1) + ); + } + if !old_timelock + .validators(chain_id, args.validator_addr2) + .await? + { + anyhow::bail!( + "{} not validator", + hex_address_display(args.validator_addr2) + ); + } + + if l2_predeployed_wrapped_base_token == Address::zero() { + anyhow::bail!("the chain does not contain wrapped base token. It is dangerous since the security of it depends on the ecosystem admin"); + } + + verify_correct_l2_wrapped_base_token( + args.l2_rpc_url.clone(), + l2_predeployed_wrapped_base_token, + ) + .await?; + + // Secondly, we check that the DA layer corresponds to the current pubdata pricing mode. + + // On L1 it is an enum with 0 meaaning a rollup and 1 meaning a validium. + // In the old version, it denoted how the pubdata will be checked. We use it to cross-check the + // user's input + let pricing_mode = zkchain.get_pubdata_pricing_mode().await?; + let pricing_mode_rollup = pricing_mode == U256::zero(); + + if args.da_mode.is_rollup() != pricing_mode_rollup { + anyhow::bail!("DA mode in consistent with the current system"); + } + } + + Ok(FetchedChainInfo { + l2_legacy_shared_bridge_addr, + hyperchain_addr, + base_token_addr, + }) +} + +impl ZkStackConfig for GatewayUpgradeInfo {} + +pub fn encode_ntv_asset_id(l1_chain_id: U256, addr: Address) -> H256 { + let encoded_data = encode(&[ + ethers::abi::Token::Uint(l1_chain_id), + ethers::abi::Token::Address(L2_NATIVE_TOKEN_VAULT_ADDRESS), + ethers::abi::Token::Address(addr), + ]); + + H256(keccak256(&encoded_data)) +} + +impl GatewayUpgradeInfo { + pub fn from_gateway_ecosystem_upgrade( + bridgehub_addr: Address, + gateway_ecosystem_upgrade: GatewayEcosystemUpgradeOutput, + ) -> Self { + Self { + l1_chain_id: gateway_ecosystem_upgrade.l1_chain_id, + bridgehub_addr, + old_validator_timelock: gateway_ecosystem_upgrade + .contracts_config + .old_validator_timelock, + l1_legacy_shared_bridge: gateway_ecosystem_upgrade + .contracts_config + .l1_legacy_shared_bridge, + ctm_deployment_tracker_proxy_addr: gateway_ecosystem_upgrade + .deployed_addresses + .bridgehub + .ctm_deployment_tracker_proxy_addr, + native_token_vault_addr: gateway_ecosystem_upgrade + .deployed_addresses + .native_token_vault_addr, + l1_bytecodes_supplier_addr: gateway_ecosystem_upgrade + .deployed_addresses + .l1_bytecodes_supplier_addr, + rollup_l1_da_validator_addr: gateway_ecosystem_upgrade + .deployed_addresses + .rollup_l1_da_validator_addr, + no_da_validium_l1_validator_addr: gateway_ecosystem_upgrade + .deployed_addresses + .validium_l1_da_validator_addr, + expected_rollup_l2_da_validator: gateway_ecosystem_upgrade + .contracts_config + .expected_rollup_l2_da_validator, + expected_validium_l2_da_validator: gateway_ecosystem_upgrade + .contracts_config + .expected_validium_l2_da_validator, + new_validator_timelock: gateway_ecosystem_upgrade + .deployed_addresses + .validator_timelock_addr, + // Note that on the contract side of things this contract is called `L2WrappedBaseTokenStore`, + // while on the server side for consistency with the conventions, where the prefix denotes + // the location of the contracts we call it `l1_wrapped_base_token_store` + l1_wrapped_base_token_store: gateway_ecosystem_upgrade + .deployed_addresses + .l2_wrapped_base_token_store_addr, + chain_upgrade_diamond_cut: gateway_ecosystem_upgrade.chain_upgrade_diamond_cut, + new_protocol_version: gateway_ecosystem_upgrade + .contracts_config + .new_protocol_version, + old_protocol_version: gateway_ecosystem_upgrade + .contracts_config + .old_protocol_version, + } + } + + fn get_l1_da_validator(&self, da_mode: DAMode) -> Address { + if da_mode.is_rollup() { + self.rollup_l1_da_validator_addr + } else { + self.no_da_validium_l1_validator_addr + } + } + + fn get_l2_da_validator(&self, da_mode: DAMode) -> Address { + if da_mode.is_rollup() { + self.expected_rollup_l2_da_validator + } else { + self.expected_validium_l2_da_validator + } + } + + pub fn update_contracts_config( + &self, + contracts_config: &mut ContractsConfig, + chain_info: &FetchedChainInfo, + da_mode: DAMode, + assign: bool, + ) { + assign_or_print!( + contracts_config.l2.legacy_shared_bridge_addr, + Some(chain_info.l2_legacy_shared_bridge_addr), + assign + ); + + let base_token_id = + encode_ntv_asset_id(U256::from(self.l1_chain_id), chain_info.base_token_addr); + assign_or_print!( + contracts_config.l1.base_token_asset_id, + Some(base_token_id), + assign + ); + + assign_or_print!( + contracts_config + .ecosystem_contracts + .l1_wrapped_base_token_store, + Some(self.l1_wrapped_base_token_store), + assign + ); + + assign_or_print!( + contracts_config + .ecosystem_contracts + .stm_deployment_tracker_proxy_addr, + Some(self.ctm_deployment_tracker_proxy_addr), + assign + ); + assign_or_print!( + contracts_config + .ecosystem_contracts + .stm_deployment_tracker_proxy_addr, + Some(self.ctm_deployment_tracker_proxy_addr), + assign + ); + assign_or_print!( + contracts_config.ecosystem_contracts.native_token_vault_addr, + Some(self.native_token_vault_addr), + assign + ); + assign_or_print!( + contracts_config + .ecosystem_contracts + .l1_bytecodes_supplier_addr, + Some(self.l1_bytecodes_supplier_addr), + assign + ); + assign_or_print!( + contracts_config.l1.rollup_l1_da_validator_addr, + Some(self.rollup_l1_da_validator_addr), + assign + ); + assign_or_print!( + contracts_config.l1.no_da_validium_l1_validator_addr, + Some(self.no_da_validium_l1_validator_addr), + assign + ); + + assign_or_print!( + contracts_config.l2.da_validator_addr, + Some(self.get_l2_da_validator(da_mode)), + assign + ); + + assign_or_print!( + contracts_config.l2.l2_native_token_vault_proxy_addr, + Some(L2_NATIVE_TOKEN_VAULT_ADDRESS), + assign + ); + } + + // Updates to the config that should be done somewhere after the upgrade is fully over. + // They do not have to updated for the system to work smoothly during the upgrade, but after + // "stage 2" they are desirable to be updated for consistency + pub fn _post_upgrade_update_contracts_config( + &self, + _config: &mut ContractsConfig, + _assign: bool, + ) { + todo!() + } +} + +#[derive( + Debug, Serialize, Deserialize, Clone, Copy, ValueEnum, EnumIter, strum::Display, PartialEq, Eq, +)] +pub(crate) enum DAMode { + Validium, + TemporaryRollup, + PermanentRollup, +} + +impl DAMode { + fn is_rollup(&self) -> bool { + matches!(self, Self::TemporaryRollup | Self::PermanentRollup) + } +} + +#[derive(Debug, Clone, Serialize)] +struct AdminCall { + description: String, + target: Address, + #[serde(serialize_with = "serialize_hex")] + data: Vec, + value: U256, +} + +impl AdminCall { + fn into_token(self) -> Token { + let Self { + target, + data, + value, + .. + } = self; + Token::Tuple(vec![ + Token::Address(target), + Token::Uint(value), + Token::Bytes(data), + ]) + } +} + +fn hex_address_display(addr: Address) -> String { + format!("0x{}", hex::encode(addr.0)) +} + +fn serialize_hex(bytes: &Vec, serializer: S) -> Result +where + S: serde::Serializer, +{ + let hex_string = format!("0x{}", hex::encode(bytes)); + serializer.serialize_str(&hex_string) +} + +#[derive(Debug, Clone)] +pub struct AdminCallBuilder { + calls: Vec, + validator_timelock_abi: BaseContract, + zkchain_abi: ethabi::Contract, + chain_admin_abi: ethabi::Contract, +} + +impl AdminCallBuilder { + pub fn new() -> Self { + Self { + calls: vec![], + validator_timelock_abi: BaseContract::from( + parse_abi(&[ + "function addValidator(uint256 _chainId, address _newValidator) external", + ]) + .unwrap(), + ), + zkchain_abi: hyperchain_contract(), + chain_admin_abi: chain_admin_contract(), + } + } + + pub fn append_validator( + &mut self, + chain_id: u64, + validator_timelock_addr: Address, + validator_addr: Address, + ) { + let data = self + .validator_timelock_abi + .encode("addValidator", (U256::from(chain_id), validator_addr)) + .unwrap(); + let description = format!( + "Adding validator 0x{}", + hex::encode(validator_timelock_addr.0) + ); + + let call = AdminCall { + description, + data: data.to_vec(), + target: validator_timelock_addr, + value: U256::zero(), + }; + + self.calls.push(call); + } + + pub fn append_execute_upgrade( + &mut self, + hyperchain_addr: Address, + protocol_version: u64, + diamond_cut_data: Bytes, + ) { + let diamond_cut = DIAMOND_CUT.decode_input(&diamond_cut_data.0).unwrap()[0].clone(); + + let data = self + .zkchain_abi + .function("upgradeChainFromVersion") + .unwrap() + .encode_input(&[Token::Uint(protocol_version.into()), diamond_cut]) + .unwrap(); + let description = "Executing upgrade:".to_string(); + + let call = AdminCall { + description, + data: data.to_vec(), + target: hyperchain_addr, + value: U256::zero(), + }; + + self.calls.push(call); + } + + pub fn append_set_da_validator_pair( + &mut self, + hyperchain_addr: Address, + l1_da_validator: Address, + l2_da_validator: Address, + ) { + let data = self + .zkchain_abi + .function("setDAValidatorPair") + .unwrap() + .encode_input(&[ + Token::Address(l1_da_validator), + Token::Address(l2_da_validator), + ]) + .unwrap(); + let description = "Executing upgrade:".to_string(); + + let call = AdminCall { + description, + data: data.to_vec(), + target: hyperchain_addr, + value: U256::zero(), + }; + + self.calls.push(call); + } + + pub fn append_make_permanent_rollup(&mut self, hyperchain_addr: Address) { + let data = self + .zkchain_abi + .function("makePermanentRollup") + .unwrap() + .encode_input(&[]) + .unwrap(); + let description = "Make permanent rollup:".to_string(); + + let call = AdminCall { + description, + data: data.to_vec(), + target: hyperchain_addr, + value: U256::zero(), + }; + + self.calls.push(call); + } + + pub fn display(&self) { + // Serialize with pretty printing + let serialized = serde_json::to_string_pretty(&self.calls).unwrap(); + + // Output the serialized JSON + println!("{}", serialized); + } + + pub fn compile_full_calldata(self) -> Vec { + let tokens: Vec<_> = self.calls.into_iter().map(|x| x.into_token()).collect(); + + let data = self + .chain_admin_abi + .function("multicall") + .unwrap() + .encode_input(&[Token::Array(tokens), Token::Bool(true)]) + .unwrap(); + + data.to_vec() + } +} + +fn chain_admin_abi() -> BaseContract { + BaseContract::from( + parse_abi(&[ + "function setUpgradeTimestamp(uint256 _protocolVersion, uint256 _upgradeTimestamp) external", + ]) + .unwrap(), + ) +} + +pub fn set_upgrade_timestamp_calldata(packed_protocol_version: u64, timestamp: u64) -> Vec { + let chain_admin = chain_admin_abi(); + + chain_admin + .encode("setUpgradeTimestamp", (packed_protocol_version, timestamp)) + .unwrap() + .to_vec() +} + +#[derive(Parser, Debug, Clone)] +pub struct GatewayUpgradeCalldataArgs { + upgrade_description_path: String, + chain_id: u64, + l1_rpc_url: String, + l2_rpc_url: String, + validator_addr1: Address, + validator_addr2: Address, + server_upgrade_timestamp: u64, + da_mode: DAMode, + #[clap(long, default_missing_value = "false")] + dangerous_no_cross_check: Option, +} + +pub struct GatewayUpgradeArgsInner { + pub chain_id: u64, + pub l1_rpc_url: String, + pub l2_rpc_url: String, + pub validator_addr1: Address, + pub validator_addr2: Address, + pub da_mode: DAMode, + pub dangerous_no_cross_check: bool, +} + +impl From for GatewayUpgradeArgsInner { + fn from(value: GatewayUpgradeCalldataArgs) -> Self { + Self { + chain_id: value.chain_id, + l1_rpc_url: value.l1_rpc_url, + l2_rpc_url: value.l2_rpc_url, + validator_addr1: value.validator_addr1, + validator_addr2: value.validator_addr2, + da_mode: value.da_mode, + dangerous_no_cross_check: value.dangerous_no_cross_check.unwrap_or_default(), + } + } +} + +pub fn get_admin_call_builder( + upgrade_info: &GatewayUpgradeInfo, + chain_info: &FetchedChainInfo, + args: GatewayUpgradeArgsInner, +) -> AdminCallBuilder { + let mut admin_calls_finalize = AdminCallBuilder::new(); + + admin_calls_finalize.append_validator( + args.chain_id, + upgrade_info.new_validator_timelock, + args.validator_addr1, + ); + admin_calls_finalize.append_validator( + args.chain_id, + upgrade_info.new_validator_timelock, + args.validator_addr2, + ); + + admin_calls_finalize.append_execute_upgrade( + chain_info.hyperchain_addr, + upgrade_info.old_protocol_version, + upgrade_info.chain_upgrade_diamond_cut.clone(), + ); + + admin_calls_finalize.append_set_da_validator_pair( + chain_info.hyperchain_addr, + upgrade_info.get_l1_da_validator(args.da_mode), + upgrade_info.get_l2_da_validator(args.da_mode), + ); + + if args.da_mode == DAMode::PermanentRollup { + admin_calls_finalize.append_make_permanent_rollup(chain_info.hyperchain_addr); + } + + admin_calls_finalize +} + +pub(crate) async fn run(shell: &Shell, args: GatewayUpgradeCalldataArgs) -> anyhow::Result<()> { + // 0. Read the GatewayUpgradeInfo + + let upgrade_info = GatewayUpgradeInfo::read(shell, &args.upgrade_description_path)?; + + // 1. Update all the configs + + let chain_info = fetch_chain_info(&upgrade_info, &args.clone().into()).await?; + + upgrade_info.update_contracts_config(&mut Default::default(), &chain_info, args.da_mode, false); + + // 2. Generate calldata + + let schedule_calldata = set_upgrade_timestamp_calldata( + args.server_upgrade_timestamp, + upgrade_info.new_protocol_version, + ); + + println!( + "Calldata to schedule upgrade: {}", + hex::encode(&schedule_calldata) + ); + + let admin_calls_finalize = get_admin_call_builder(&upgrade_info, &chain_info, args.into()); + + admin_calls_finalize.display(); + + let chain_admin_calldata = admin_calls_finalize.compile_full_calldata(); + + println!( + "Full calldata to call `ChainAdmin` with : {}", + hex::encode(&chain_admin_calldata) + ); + + Ok(()) +} diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/genesis.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/genesis.rs index 683ffe199161..8e7a3973e037 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/genesis.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/genesis.rs @@ -1,7 +1,7 @@ use anyhow::Context; -use common::{cmd::Cmd, spinner::Spinner}; -use config::EcosystemConfig; use xshell::{cmd, Shell}; +use zkstack_cli_common::{cmd::Cmd, spinner::Spinner}; +use zkstack_cli_config::EcosystemConfig; use crate::{ commands::dev::{ diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/lint.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/lint.rs index fa17ef518901..7ba6fe27481a 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/lint.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/lint.rs @@ -6,9 +6,9 @@ use std::{ use anyhow::{bail, Context}; use clap::Parser; -use common::{cmd::Cmd, logger, spinner::Spinner}; -use config::EcosystemConfig; use xshell::{cmd, Shell}; +use zkstack_cli_common::{cmd::Cmd, logger, spinner::Spinner}; +use zkstack_cli_config::EcosystemConfig; use crate::commands::{ autocomplete::{autocomplete_file_name, generate_completions}, @@ -70,12 +70,12 @@ pub fn run(shell: &Shell, args: LintArgs) -> anyhow::Result<()> { fn lint_rs(shell: &Shell, ecosystem: &EcosystemConfig, check: bool) -> anyhow::Result<()> { let spinner = Spinner::new(&msg_running_linter_for_extension_spinner(&Target::Rs)); - let link_to_code = &ecosystem.link_to_code; + let link_to_core = &ecosystem.link_to_code.join("core"); let lint_to_prover = &ecosystem.link_to_code.join("prover"); let link_to_zkstack = &ecosystem.link_to_code.join("zkstack_cli"); spinner.freeze(); - for path in [link_to_code, lint_to_prover, link_to_zkstack] { + for path in [link_to_core, lint_to_prover, link_to_zkstack] { let _dir_guard = shell.push_dir(path); let mut cmd = cmd!(shell, "cargo clippy"); let mut common_args = vec!["--locked", "--", "-D", "warnings"]; diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/mod.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/mod.rs index a292168dc6e0..ac41d76b3c4e 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/mod.rs @@ -3,6 +3,8 @@ pub mod config_writer; pub mod contracts; pub mod database; pub mod fmt; +#[cfg(feature = "gateway")] +pub mod gateway; pub mod genesis; pub mod lint; pub(crate) mod lint_utils; diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/prover/args/insert_batch.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/prover/args/insert_batch.rs index e837bbe9eb86..355a750b9e09 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/prover/args/insert_batch.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/prover/args/insert_batch.rs @@ -19,7 +19,7 @@ pub struct InsertBatchArgsFinal { impl InsertBatchArgs { pub(crate) fn fill_values_with_prompts(self, era_version: String) -> InsertBatchArgsFinal { let number = self.number.unwrap_or_else(|| { - common::Prompt::new("Enter the number of the batch to insert").ask() + zkstack_cli_common::Prompt::new("Enter the number of the batch to insert").ask() }); if self.default { @@ -30,7 +30,7 @@ impl InsertBatchArgs { } let version = self.version.unwrap_or_else(|| { - common::Prompt::new("Enter the version of the batch to insert") + zkstack_cli_common::Prompt::new("Enter the version of the batch to insert") .default(&era_version) .ask() }); diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/prover/args/insert_version.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/prover/args/insert_version.rs index 7af98c4a7a43..72a6cd14bcba 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/prover/args/insert_version.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/prover/args/insert_version.rs @@ -35,21 +35,23 @@ impl InsertVersionArgs { } let version = self.version.unwrap_or_else(|| { - common::Prompt::new("Enter the version of the protocol to insert") + zkstack_cli_common::Prompt::new("Enter the version of the protocol to insert") .default(&era_version) .ask() }); let snark_wrapper = self.snark_wrapper.unwrap_or_else(|| { - common::Prompt::new("Enter the snark wrapper of the protocol to insert") + zkstack_cli_common::Prompt::new("Enter the snark wrapper of the protocol to insert") .default(&snark_wrapper) .ask() }); let fflonk_snark_wrapper = self.fflonk_snark_wrapper.unwrap_or_else(|| { - common::Prompt::new("Enter the fflonk snark wrapper of the protocol to insert") - .default(&fflonk_snark_wrapper) - .ask() + zkstack_cli_common::Prompt::new( + "Enter the fflonk snark wrapper of the protocol to insert", + ) + .default(&fflonk_snark_wrapper) + .ask() }); InsertVersionArgsFinal { diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/prover/info.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/prover/info.rs index baf0e6f881cd..44d80d48d0ce 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/prover/info.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/prover/info.rs @@ -4,9 +4,9 @@ use std::{ }; use anyhow::Context as _; -use common::logger; -use config::{ChainConfig, EcosystemConfig}; use xshell::{cmd, Shell}; +use zkstack_cli_common::logger; +use zkstack_cli_config::{ChainConfig, EcosystemConfig}; use crate::commands::dev::messages::MSG_CHAIN_NOT_FOUND_ERR; diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/prover/insert_batch.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/prover/insert_batch.rs index 0e0c0ba33af4..60c4dd3b88c1 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/prover/insert_batch.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/prover/insert_batch.rs @@ -1,6 +1,6 @@ -use common::{check_prerequisites, cmd::Cmd, logger, PROVER_CLI_PREREQUISITE}; -use config::{get_link_to_prover, EcosystemConfig}; use xshell::{cmd, Shell}; +use zkstack_cli_common::{check_prerequisites, cmd::Cmd, logger, PROVER_CLI_PREREQUISITE}; +use zkstack_cli_config::{get_link_to_prover, EcosystemConfig}; use crate::commands::dev::{ commands::prover::{ diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/prover/insert_version.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/prover/insert_version.rs index 86cd73c926b2..18c182ca9057 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/prover/insert_version.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/prover/insert_version.rs @@ -1,6 +1,6 @@ -use common::{check_prerequisites, cmd::Cmd, logger, PROVER_CLI_PREREQUISITE}; -use config::{get_link_to_prover, EcosystemConfig}; use xshell::{cmd, Shell}; +use zkstack_cli_common::{check_prerequisites, cmd::Cmd, logger, PROVER_CLI_PREREQUISITE}; +use zkstack_cli_config::{get_link_to_prover, EcosystemConfig}; use crate::commands::dev::{ commands::prover::{ diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/send_transactions/args/mod.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/send_transactions/args/mod.rs index 03d9ec9b7360..a7bb415fd4da 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/send_transactions/args/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/send_transactions/args/mod.rs @@ -1,8 +1,8 @@ use std::path::PathBuf; use clap::Parser; -use common::Prompt; use url::Url; +use zkstack_cli_common::Prompt; use crate::commands::dev::{ defaults::LOCAL_RPC_URL, diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/send_transactions/mod.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/send_transactions/mod.rs index 2f54579ade9e..0607747c8a1c 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/send_transactions/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/send_transactions/mod.rs @@ -9,12 +9,12 @@ use std::{ use anyhow::Context; use args::SendTransactionsArgs; use chrono::Local; -use common::{ethereum::create_ethers_client, logger}; -use config::EcosystemConfig; use ethers::{abi::Bytes, providers::Middleware, types::TransactionRequest, utils::hex}; use serde::Deserialize; use tokio::time::sleep; use xshell::Shell; +use zkstack_cli_common::{ethereum::create_ethers_client, logger}; +use zkstack_cli_config::EcosystemConfig; use zksync_basic_types::{H160, U256}; use crate::commands::dev::{ diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/snapshot.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/snapshot.rs index 8e4c7183cb55..85aaec103c0b 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/snapshot.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/snapshot.rs @@ -1,8 +1,8 @@ use anyhow::Context; use clap::Subcommand; -use common::{cmd::Cmd, logger}; -use config::EcosystemConfig; use xshell::{cmd, Shell}; +use zkstack_cli_common::{cmd::Cmd, logger}; +use zkstack_cli_config::EcosystemConfig; use crate::commands::dev::messages::{MSG_CHAIN_NOT_FOUND_ERR, MSG_RUNNING_SNAPSHOT_CREATOR}; @@ -32,7 +32,7 @@ async fn create(shell: &Shell) -> anyhow::Result<()> { logger::info(MSG_RUNNING_SNAPSHOT_CREATOR); - let mut cmd = Cmd::new(cmd!(shell, "cargo run --bin snapshots_creator --release -- --config-path={config_path} --secrets-path={secrets_path}")) + let mut cmd = Cmd::new(cmd!(shell, "cargo run --manifest-path ./core/Cargo.toml --bin snapshots_creator --release -- --config-path={config_path} --secrets-path={secrets_path}")) .env("RUST_LOG", "snapshots_creator=debug"); cmd = cmd.with_force_run(); diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/sql_fmt.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/sql_fmt.rs index 0f7ce061ce18..388d733a5f3d 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/sql_fmt.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/sql_fmt.rs @@ -1,9 +1,9 @@ use std::mem::take; use anyhow::{bail, Result}; -use common::spinner::Spinner; use sqruff_lib::{api::simple::get_simple_config, core::linter::core::Linter}; use xshell::Shell; +use zkstack_cli_common::spinner::Spinner; use super::lint_utils::{get_unignored_files, IgnoredData, Target}; use crate::commands::dev::messages::{msg_file_is_not_formatted, MSG_RUNNING_SQL_FMT_SPINNER}; diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/status/args.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/status/args.rs index 5ac52bf854a6..1cc65f194cdc 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/status/args.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/status/args.rs @@ -1,7 +1,7 @@ use anyhow::Context; use clap::Parser; -use config::EcosystemConfig; use xshell::Shell; +use zkstack_cli_config::EcosystemConfig; use crate::{ commands::dev::messages::{ diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/status/mod.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/status/mod.rs index 8687fcb04763..7f2db7533655 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/status/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/status/mod.rs @@ -2,12 +2,12 @@ use std::collections::HashMap; use anyhow::Context; use args::{StatusArgs, StatusSubcommands}; -use common::logger; use draw::{bordered_boxes, format_port_info}; use serde::Deserialize; use serde_json::Value; use utils::deslugify; use xshell::Shell; +use zkstack_cli_common::logger; use crate::{ commands::dev::messages::{ diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/build.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/build.rs index dea6a46bbef6..7465d7ca851a 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/build.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/build.rs @@ -1,5 +1,5 @@ -use config::EcosystemConfig; use xshell::Shell; +use zkstack_cli_config::EcosystemConfig; use super::utils::{build_contracts, install_and_build_dependencies}; diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/db.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/db.rs index 19f6307019b8..89adb3febe8a 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/db.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/db.rs @@ -1,7 +1,7 @@ use std::path::Path; -use common::{cmd::Cmd, db::wait_for_db, logger}; use xshell::{cmd, Shell}; +use zkstack_cli_common::{cmd::Cmd, db::wait_for_db, logger}; use crate::commands::dev::{commands::database, dals::Dal, messages::MSG_RESETTING_TEST_DATABASES}; diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/fees.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/fees.rs index e58a70e6b7cb..c4d08c6ef9d7 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/fees.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/fees.rs @@ -1,9 +1,9 @@ use std::path::PathBuf; use anyhow::Context; -use common::{cmd::Cmd, config::global_config, logger}; -use config::EcosystemConfig; use xshell::{cmd, Shell}; +use zkstack_cli_common::{cmd::Cmd, config::global_config, logger}; +use zkstack_cli_config::EcosystemConfig; use super::{ args::fees::FeesArgs, diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/integration.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/integration.rs index 8e9e421c2f4e..67976c340cb5 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/integration.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/integration.rs @@ -1,9 +1,9 @@ use std::path::PathBuf; use anyhow::Context; -use common::{cmd::Cmd, config::global_config, logger}; -use config::EcosystemConfig; use xshell::{cmd, Shell}; +use zkstack_cli_common::{cmd::Cmd, config::global_config, logger}; +use zkstack_cli_config::EcosystemConfig; use super::{ args::integration::IntegrationArgs, @@ -43,7 +43,7 @@ pub async fn run(shell: &Shell, args: IntegrationArgs) -> anyhow::Result<()> { let test_pattern = args.test_pattern; let mut command = cmd!( shell, - "yarn jest --forceExit --testTimeout 120000 -t {test_pattern...}" + "yarn jest --forceExit --testTimeout 350000 -t {test_pattern...}" ) .env("CHAIN_NAME", ecosystem_config.current_chain()) .env("MASTER_WALLET_PK", wallets.get_test_pk(&chain_config)?); diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/l1_contracts.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/l1_contracts.rs index 7d163daed671..439926a74b80 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/l1_contracts.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/l1_contracts.rs @@ -1,6 +1,6 @@ -use common::{cmd::Cmd, logger}; -use config::EcosystemConfig; use xshell::{cmd, Shell}; +use zkstack_cli_common::{cmd::Cmd, logger}; +use zkstack_cli_config::EcosystemConfig; use crate::commands::dev::messages::MSG_L1_CONTRACTS_TEST_SUCCESS; diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/loadtest.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/loadtest.rs index 72a8f97ff97d..64ea474fa2fa 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/loadtest.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/loadtest.rs @@ -1,7 +1,7 @@ use anyhow::Context; -use common::{cmd::Cmd, config::global_config, logger}; -use config::EcosystemConfig; use xshell::{cmd, Shell}; +use zkstack_cli_common::{cmd::Cmd, config::global_config, logger}; +use zkstack_cli_config::EcosystemConfig; use crate::commands::dev::messages::MSG_CHAIN_NOT_FOUND_ERR; @@ -17,28 +17,31 @@ pub fn run(shell: &Shell) -> anyhow::Result<()> { .api_config .context("API config is not found")?; - let mut command = cmd!(shell, "cargo run --release --bin loadnext") - .env( - "L2_CHAIN_ID", - chain_config - .get_genesis_config()? - .l2_chain_id - .as_u64() - .to_string(), - ) - .env( - "MAIN_TOKEN", - format!( - "{:?}", - ecosystem_config - .get_erc20_tokens() - .first() - .context("NO Erc20 tokens were deployed")? - .address - ), - ) - .env("L2_RPC_ADDRESS", general_api.web3_json_rpc.http_url) - .env("L2_WS_RPC_ADDRESS", general_api.web3_json_rpc.ws_url); + let mut command = cmd!( + shell, + "cargo run --manifest-path ./core/Cargo.toml --release --bin loadnext" + ) + .env( + "L2_CHAIN_ID", + chain_config + .get_genesis_config()? + .l2_chain_id + .as_u64() + .to_string(), + ) + .env( + "MAIN_TOKEN", + format!( + "{:?}", + ecosystem_config + .get_erc20_tokens() + .first() + .context("NO Erc20 tokens were deployed")? + .address + ), + ) + .env("L2_RPC_ADDRESS", general_api.web3_json_rpc.http_url) + .env("L2_WS_RPC_ADDRESS", general_api.web3_json_rpc.ws_url); if global_config().verbose { command = command.env("RUST_LOG", "loadnext=info") diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/prover.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/prover.rs index 200baf57215c..b74494c9d915 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/prover.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/prover.rs @@ -1,9 +1,9 @@ use std::str::FromStr; -use common::{cmd::Cmd, logger}; -use config::EcosystemConfig; use url::Url; use xshell::{cmd, Shell}; +use zkstack_cli_common::{cmd::Cmd, logger}; +use zkstack_cli_config::EcosystemConfig; use crate::commands::dev::{ commands::test::db::reset_test_databases, diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/recovery.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/recovery.rs index ae889969fd2c..e62d6cf9cdc5 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/recovery.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/recovery.rs @@ -1,9 +1,9 @@ use std::path::PathBuf; use anyhow::Context; -use common::{cmd::Cmd, logger, server::Server, spinner::Spinner}; -use config::EcosystemConfig; use xshell::{cmd, Shell}; +use zkstack_cli_common::{cmd::Cmd, logger, server::Server, spinner::Spinner}; +use zkstack_cli_config::EcosystemConfig; use super::{ args::recovery::RecoveryArgs, diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/revert.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/revert.rs index dc95c88db205..ae8b5df33222 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/revert.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/revert.rs @@ -1,9 +1,9 @@ use std::path::PathBuf; use anyhow::Context; -use common::{cmd::Cmd, logger, spinner::Spinner}; -use config::EcosystemConfig; use xshell::{cmd, Shell}; +use zkstack_cli_common::{cmd::Cmd, logger, spinner::Spinner}; +use zkstack_cli_config::EcosystemConfig; use super::{ args::revert::RevertArgs, diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/rust.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/rust.rs index 8c0c707f6a2e..dce3cd9022da 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/rust.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/rust.rs @@ -1,10 +1,10 @@ use std::str::FromStr; use anyhow::Context; -use common::{cmd::Cmd, logger}; -use config::EcosystemConfig; use url::Url; use xshell::{cmd, Shell}; +use zkstack_cli_common::{cmd::Cmd, logger}; +use zkstack_cli_config::EcosystemConfig; use super::args::rust::RustArgs; use crate::commands::dev::{ @@ -59,7 +59,7 @@ pub async fn run(shell: &Shell, args: RustArgs) -> anyhow::Result<()> { reset_test_databases(shell, &link_to_code, dals).await?; - let _dir_guard = shell.push_dir(&link_to_code); + let _dir_guard = shell.push_dir(link_to_code.join("core")); logger::info(MSG_USING_CARGO_NEXTEST); let cmd = cmd!(shell, "cargo nextest run --release"); diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/upgrade.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/upgrade.rs index 707e0086ed15..c20bbe163d7f 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/upgrade.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/upgrade.rs @@ -1,6 +1,6 @@ -use common::{cmd::Cmd, logger, spinner::Spinner}; -use config::EcosystemConfig; use xshell::{cmd, Shell}; +use zkstack_cli_common::{cmd::Cmd, logger, spinner::Spinner}; +use zkstack_cli_config::EcosystemConfig; use super::{args::upgrade::UpgradeArgs, utils::install_and_build_dependencies}; use crate::commands::dev::messages::{MSG_UPGRADE_TEST_RUN_INFO, MSG_UPGRADE_TEST_RUN_SUCCESS}; diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/utils.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/utils.rs index 8435b437169d..7c042fad1fa9 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/utils.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/utils.rs @@ -1,14 +1,14 @@ use std::collections::HashMap; use anyhow::Context; -use common::{cmd::Cmd, spinner::Spinner, wallets::Wallet}; -use config::{ChainConfig, EcosystemConfig}; use ethers::{ providers::{Http, Middleware, Provider}, utils::hex::ToHex, }; use serde::Deserialize; use xshell::{cmd, Shell}; +use zkstack_cli_common::{cmd::Cmd, spinner::Spinner, wallets::Wallet}; +use zkstack_cli_config::{ChainConfig, EcosystemConfig}; use crate::commands::dev::messages::{ MSG_INTEGRATION_TESTS_BUILDING_CONTRACTS, MSG_INTEGRATION_TESTS_BUILDING_DEPENDENCIES, @@ -67,7 +67,7 @@ impl TestWallets { let balance = provider.get_balance(wallet.address, None).await?; if balance.is_zero() { - common::ethereum::distribute_eth( + zkstack_cli_common::ethereum::distribute_eth( self.get_main_wallet()?, vec![wallet.address], l1_rpc, diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/wallet.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/wallet.rs index 6953014bf92b..ebacc5ead532 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/wallet.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/wallet.rs @@ -1,9 +1,9 @@ use std::path::PathBuf; use anyhow::Context; -use common::logger; -use config::EcosystemConfig; use xshell::Shell; +use zkstack_cli_common::logger; +use zkstack_cli_config::EcosystemConfig; use super::utils::{TestWallets, TEST_WALLETS_PATH}; use crate::commands::dev::messages::{ diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/dals.rs b/zkstack_cli/crates/zkstack/src/commands/dev/dals.rs index 9626edfed732..199c44bbeb70 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/dals.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/dals.rs @@ -1,7 +1,7 @@ use anyhow::Context as _; -use config::{EcosystemConfig, SecretsConfig}; use url::Url; use xshell::Shell; +use zkstack_cli_config::{EcosystemConfig, SecretsConfig}; use super::{ commands::database::args::DalUrls, diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/messages.rs b/zkstack_cli/crates/zkstack/src/commands/dev/messages.rs index 235aa95ee492..b65750b34341 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/messages.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/messages.rs @@ -13,6 +13,9 @@ pub(super) const MSG_SUBCOMMAND_CLEAN: &str = "Clean artifacts"; pub(super) const MSG_SUBCOMMAND_LINT_ABOUT: &str = "Lint code"; pub(super) const MSG_CONTRACTS_ABOUT: &str = "Build contracts"; pub(super) const MSG_CONFIG_WRITER_ABOUT: &str = "Overwrite general config"; +#[cfg(feature = "gateway")] +pub(super) const MSG_GATEWAY_UPGRADE_CALLDATA: &str = + "Gateway upgrade checker and calldata generator"; pub(super) const MSG_SUBCOMMAND_FMT_ABOUT: &str = "Format code"; @@ -110,9 +113,11 @@ pub(super) const MSG_NOTHING_TO_BUILD_MSG: &str = "Nothing to build!"; pub(super) const MSG_BUILDING_CONTRACTS: &str = "Building contracts"; pub(super) const MSG_BUILDING_L2_CONTRACTS_SPINNER: &str = "Building L2 contracts.."; pub(super) const MSG_BUILDING_L1_CONTRACTS_SPINNER: &str = "Building L1 contracts.."; +pub(super) const MSG_BUILDING_L1_DA_CONTRACTS_SPINNER: &str = "Building L1 DA contracts.."; pub(super) const MSG_BUILDING_SYSTEM_CONTRACTS_SPINNER: &str = "Building system contracts.."; pub(super) const MSG_BUILDING_CONTRACTS_SUCCESS: &str = "Contracts built successfully"; pub(super) const MSG_BUILD_L1_CONTRACTS_HELP: &str = "Build L1 contracts"; +pub(super) const MSG_BUILD_L1_DA_CONTRACTS_HELP: &str = "Build L1 DA contracts"; pub(super) const MSG_BUILD_L2_CONTRACTS_HELP: &str = "Build L2 contracts"; pub(super) const MSG_BUILD_SYSTEM_CONTRACTS_HELP: &str = "Build system contracts"; diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/mod.rs b/zkstack_cli/crates/zkstack/src/commands/dev/mod.rs index 409c3a764eb1..45e429129082 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/mod.rs @@ -1,5 +1,7 @@ use clap::Subcommand; use commands::status::args::StatusArgs; +#[cfg(feature = "gateway")] +use messages::MSG_GATEWAY_UPGRADE_CALLDATA; use messages::MSG_STATUS_ABOUT; use xshell::Shell; @@ -15,7 +17,7 @@ use crate::commands::dev::messages::{ MSG_SUBCOMMAND_SNAPSHOTS_CREATOR_ABOUT, MSG_SUBCOMMAND_TESTS_ABOUT, }; -mod commands; +pub(crate) mod commands; mod consts; mod dals; mod defaults; @@ -47,6 +49,9 @@ pub enum DevCommands { Status(StatusArgs), #[command(about = MSG_GENERATE_GENESIS_ABOUT, alias = "genesis")] GenerateGenesis, + #[cfg(feature = "gateway")] + #[command(about = MSG_GATEWAY_UPGRADE_CALLDATA)] + GatewayUpgradeCalldata(commands::gateway::GatewayUpgradeCalldataArgs), } pub async fn run(shell: &Shell, args: DevCommands) -> anyhow::Result<()> { @@ -65,6 +70,8 @@ pub async fn run(shell: &Shell, args: DevCommands) -> anyhow::Result<()> { } DevCommands::Status(args) => commands::status::run(shell, args).await?, DevCommands::GenerateGenesis => commands::genesis::run(shell).await?, + #[cfg(feature = "gateway")] + DevCommands::GatewayUpgradeCalldata(args) => commands::gateway::run(shell, args).await?, } Ok(()) } diff --git a/zkstack_cli/crates/zkstack/src/commands/ecosystem/args/build_transactions.rs b/zkstack_cli/crates/zkstack/src/commands/ecosystem/args/build_transactions.rs index 697fa518b6e4..3abf0a0e3124 100644 --- a/zkstack_cli/crates/zkstack/src/commands/ecosystem/args/build_transactions.rs +++ b/zkstack_cli/crates/zkstack/src/commands/ecosystem/args/build_transactions.rs @@ -1,9 +1,9 @@ use std::{path::PathBuf, str::FromStr}; use clap::Parser; -use common::{forge::ForgeScriptArgs, Prompt}; use serde::{Deserialize, Serialize}; use url::Url; +use zkstack_cli_common::{forge::ForgeScriptArgs, Prompt}; use zksync_basic_types::H160; use crate::{ diff --git a/zkstack_cli/crates/zkstack/src/commands/ecosystem/args/create.rs b/zkstack_cli/crates/zkstack/src/commands/ecosystem/args/create.rs index 9665516945e0..d997230d5611 100644 --- a/zkstack_cli/crates/zkstack/src/commands/ecosystem/args/create.rs +++ b/zkstack_cli/crates/zkstack/src/commands/ecosystem/args/create.rs @@ -1,12 +1,12 @@ use std::path::PathBuf; use clap::{Parser, ValueHint}; -use common::{Prompt, PromptConfirm, PromptSelect}; use serde::{Deserialize, Serialize}; use slugify_rs::slugify; use strum::IntoEnumIterator; -use types::{L1Network, WalletCreation}; use xshell::Shell; +use zkstack_cli_common::{Prompt, PromptConfirm, PromptSelect}; +use zkstack_cli_types::{L1Network, WalletCreation}; use crate::{ commands::chain::{args::create::ChainCreateArgs, ChainCreateArgsFinal}, @@ -70,7 +70,7 @@ impl EcosystemCreateArgs { link_to_code, wallet_creation: chain.wallet_creation, wallet_path: chain.wallet_path.clone(), - chain_args: chain, + chain_args: chain.clone(), start_containers, update_submodules: self.update_submodules, }) diff --git a/zkstack_cli/crates/zkstack/src/commands/ecosystem/args/gateway_upgrade.rs b/zkstack_cli/crates/zkstack/src/commands/ecosystem/args/gateway_upgrade.rs index 21fb714bc491..50be243ab704 100644 --- a/zkstack_cli/crates/zkstack/src/commands/ecosystem/args/gateway_upgrade.rs +++ b/zkstack_cli/crates/zkstack/src/commands/ecosystem/args/gateway_upgrade.rs @@ -1,12 +1,11 @@ -/// TODO(EVM-927): Note that the contents of this file are not useable without Gateway contracts. use std::path::PathBuf; use clap::{Parser, ValueEnum}; -use common::{forge::ForgeScriptArgs, Prompt}; use serde::{Deserialize, Serialize}; use strum::EnumIter; -use types::L1Network; use url::Url; +use zkstack_cli_common::{forge::ForgeScriptArgs, Prompt}; +use zkstack_cli_types::L1Network; use crate::{ defaults::LOCAL_RPC_URL, diff --git a/zkstack_cli/crates/zkstack/src/commands/ecosystem/args/init.rs b/zkstack_cli/crates/zkstack/src/commands/ecosystem/args/init.rs index 9b7ab5abf089..cb4206c8a995 100644 --- a/zkstack_cli/crates/zkstack/src/commands/ecosystem/args/init.rs +++ b/zkstack_cli/crates/zkstack/src/commands/ecosystem/args/init.rs @@ -1,13 +1,13 @@ use std::path::PathBuf; use clap::Parser; -use common::{forge::ForgeScriptArgs, Prompt, PromptConfirm}; use serde::{Deserialize, Serialize}; -use types::L1Network; use url::Url; +use zkstack_cli_common::{forge::ForgeScriptArgs, Prompt, PromptConfirm}; +use zkstack_cli_types::L1Network; use crate::{ - commands::chain::args::genesis::GenesisArgs, + commands::chain::args::{genesis::GenesisArgs, init::da_configs::ValidiumTypeArgs}, defaults::LOCAL_RPC_URL, messages::{ MSG_DEPLOY_ECOSYSTEM_PROMPT, MSG_DEPLOY_ERC20_PROMPT, MSG_DEV_ARG_HELP, @@ -105,6 +105,12 @@ pub struct EcosystemInitArgs { pub no_port_reallocation: bool, #[clap(long)] pub update_submodules: Option, + #[clap(flatten)] + pub validium_args: ValidiumTypeArgs, + #[clap(long, default_missing_value = "false", num_args = 0..=1)] + pub support_l2_legacy_shared_bridge_test: Option, + #[clap(long, default_missing_value = "false")] + pub skip_contract_compilation_override: bool, } impl EcosystemInitArgs { @@ -146,6 +152,11 @@ impl EcosystemInitArgs { observability, ecosystem_only: self.ecosystem_only, no_port_reallocation: self.no_port_reallocation, + skip_contract_compilation_override: self.skip_contract_compilation_override, + validium_args: self.validium_args, + support_l2_legacy_shared_bridge_test: self + .support_l2_legacy_shared_bridge_test + .unwrap_or_default(), } } } @@ -159,4 +170,7 @@ pub struct EcosystemInitArgsFinal { pub observability: bool, pub ecosystem_only: bool, pub no_port_reallocation: bool, + pub skip_contract_compilation_override: bool, + pub validium_args: ValidiumTypeArgs, + pub support_l2_legacy_shared_bridge_test: bool, } diff --git a/zkstack_cli/crates/zkstack/src/commands/ecosystem/build_transactions.rs b/zkstack_cli/crates/zkstack/src/commands/ecosystem/build_transactions.rs index ff7132360972..1cd76debbe15 100644 --- a/zkstack_cli/crates/zkstack/src/commands/ecosystem/build_transactions.rs +++ b/zkstack_cli/crates/zkstack/src/commands/ecosystem/build_transactions.rs @@ -1,7 +1,7 @@ use anyhow::Context; -use common::{git, logger, spinner::Spinner}; -use config::{traits::SaveConfigWithBasePath, EcosystemConfig}; use xshell::Shell; +use zkstack_cli_common::{git, logger, spinner::Spinner}; +use zkstack_cli_config::{traits::SaveConfigWithBasePath, EcosystemConfig}; use super::{ args::build_transactions::BuildTransactionsArgs, @@ -49,6 +49,7 @@ pub async fn run(args: BuildTransactionsArgs, shell: &Shell) -> anyhow::Result<( &args.l1_rpc_url, Some(args.sender), false, + false, ) .await?; diff --git a/zkstack_cli/crates/zkstack/src/commands/ecosystem/change_default.rs b/zkstack_cli/crates/zkstack/src/commands/ecosystem/change_default.rs index 3bd392c0558d..e5448187afd3 100644 --- a/zkstack_cli/crates/zkstack/src/commands/ecosystem/change_default.rs +++ b/zkstack_cli/crates/zkstack/src/commands/ecosystem/change_default.rs @@ -1,6 +1,6 @@ -use common::PromptSelect; -use config::{traits::SaveConfigWithBasePath, EcosystemConfig}; use xshell::Shell; +use zkstack_cli_common::PromptSelect; +use zkstack_cli_config::{traits::SaveConfigWithBasePath, EcosystemConfig}; use crate::{ commands::ecosystem::args::change_default::ChangeDefaultChain, diff --git a/zkstack_cli/crates/zkstack/src/commands/ecosystem/common.rs b/zkstack_cli/crates/zkstack/src/commands/ecosystem/common.rs index e5ba18fe4c3e..7255ba9e1ca5 100644 --- a/zkstack_cli/crates/zkstack/src/commands/ecosystem/common.rs +++ b/zkstack_cli/crates/zkstack/src/commands/ecosystem/common.rs @@ -1,6 +1,7 @@ use anyhow::Context; -use common::forge::{Forge, ForgeScriptArgs}; -use config::{ +use xshell::Shell; +use zkstack_cli_common::forge::{Forge, ForgeScriptArgs}; +use zkstack_cli_config::{ forge_interface::{ deploy_ecosystem::{ input::{DeployL1Config, InitialDeploymentConfig}, @@ -11,11 +12,11 @@ use config::{ traits::{ReadConfig, ReadConfigWithBasePath, SaveConfig}, ContractsConfig, EcosystemConfig, GenesisConfig, }; -use types::{L1Network, ProverMode}; -use xshell::Shell; +use zkstack_cli_types::{L1Network, ProverMode}; use crate::utils::forge::{check_the_balance, fill_forge_private_key, WalletOwner}; +#[allow(clippy::too_many_arguments)] pub async fn deploy_l1( shell: &Shell, forge_args: &ForgeScriptArgs, @@ -24,8 +25,10 @@ pub async fn deploy_l1( l1_rpc_url: &str, sender: Option, broadcast: bool, + support_l2_legacy_shared_bridge_test: bool, ) -> anyhow::Result { let deploy_config_path = DEPLOY_ECOSYSTEM_SCRIPT_PARAMS.input(&config.link_to_code); + dbg!(config.get_default_configs_path()); let default_genesis_config = GenesisConfig::read_with_base_path(shell, config.get_default_configs_path()) .context("failed reading genesis config")?; @@ -38,6 +41,8 @@ pub async fn deploy_l1( initial_deployment_config, config.era_chain_id, config.prover_version == ProverMode::NoProofs, + config.l1_network, + support_l2_legacy_shared_bridge_test, ); deploy_config.save(shell, deploy_config_path)?; diff --git a/zkstack_cli/crates/zkstack/src/commands/ecosystem/create.rs b/zkstack_cli/crates/zkstack/src/commands/ecosystem/create.rs index d7c6005cd1aa..404589afac2d 100644 --- a/zkstack_cli/crates/zkstack/src/commands/ecosystem/create.rs +++ b/zkstack_cli/crates/zkstack/src/commands/ecosystem/create.rs @@ -1,10 +1,10 @@ use anyhow::{bail, Context}; -use common::{logger, spinner::Spinner}; -use config::{ +use xshell::Shell; +use zkstack_cli_common::{logger, spinner::Spinner}; +use zkstack_cli_config::{ create_local_configs_dir, create_wallets, get_default_era_chain_id, traits::SaveConfigWithBasePath, EcosystemConfig, EcosystemConfigFromFileError, }; -use xshell::Shell; use crate::{ commands::{ diff --git a/zkstack_cli/crates/zkstack/src/commands/ecosystem/create_configs.rs b/zkstack_cli/crates/zkstack/src/commands/ecosystem/create_configs.rs index 38358355ff97..a58e0d38bf11 100644 --- a/zkstack_cli/crates/zkstack/src/commands/ecosystem/create_configs.rs +++ b/zkstack_cli/crates/zkstack/src/commands/ecosystem/create_configs.rs @@ -1,11 +1,11 @@ use std::path::Path; -use config::{ +use xshell::Shell; +use zkstack_cli_config::{ forge_interface::deploy_ecosystem::input::{Erc20DeploymentConfig, InitialDeploymentConfig}, traits::{SaveConfigWithBasePath, SaveConfigWithCommentAndBasePath}, AppsEcosystemConfig, }; -use xshell::Shell; use crate::messages::{MSG_SAVE_ERC20_CONFIG_ATTENTION, MSG_SAVE_INITIAL_CONFIG_ATTENTION}; diff --git a/zkstack_cli/crates/zkstack/src/commands/ecosystem/gateway_upgrade.rs b/zkstack_cli/crates/zkstack/src/commands/ecosystem/gateway_upgrade.rs new file mode 100644 index 000000000000..01905afb9a5d --- /dev/null +++ b/zkstack_cli/crates/zkstack/src/commands/ecosystem/gateway_upgrade.rs @@ -0,0 +1,645 @@ +use anyhow::Context; +use ethers::{abi::parse_abi, contract::BaseContract, utils::hex}; +use lazy_static::lazy_static; +use serde::Deserialize; +use xshell::Shell; +use zkstack_cli_common::{db::DatabaseConfig, forge::Forge, git, spinner::Spinner}; +use zkstack_cli_config::{ + forge_interface::{ + gateway_ecosystem_upgrade::{ + input::GatewayEcosystemUpgradeInput, output::GatewayEcosystemUpgradeOutput, + }, + gateway_preparation::input::GatewayPreparationConfig, + script_params::{ + FINALIZE_UPGRADE_SCRIPT_PARAMS, GATEWAY_PREPARATION, GATEWAY_UPGRADE_ECOSYSTEM_PARAMS, + }, + }, + traits::{ReadConfig, ReadConfigWithBasePath, SaveConfig, SaveConfigWithBasePath}, + EcosystemConfig, GenesisConfig, CONFIGS_PATH, +}; +use zkstack_cli_types::ProverMode; +use zksync_basic_types::commitment::L1BatchCommitmentMode; +use zksync_types::{H160, L2_NATIVE_TOKEN_VAULT_ADDRESS, SHARED_BRIDGE_ETHER_TOKEN_ADDRESS, U256}; + +use super::args::gateway_upgrade::{GatewayUpgradeArgs, GatewayUpgradeArgsFinal}; +use crate::{ + accept_ownership::{ + accept_admin, governance_execute_calls, make_permanent_rollup, set_da_validator_pair, + }, + commands::{ + chain, + chain::{ + args::genesis::GenesisArgsFinal, + convert_to_gateway::{ + calculate_gateway_ctm, call_script, GATEWAY_PREPARATION_INTERFACE, + }, + genesis::genesis, + }, + ecosystem::args::gateway_upgrade::GatewayUpgradeStage, + }, + defaults::{generate_db_names, DBNames, DATABASE_SERVER_URL}, + messages::{MSG_CHAIN_NOT_FOUND_ERR, MSG_GENESIS_DATABASE_ERR, MSG_INTALLING_DEPS_SPINNER}, + utils::forge::{fill_forge_private_key, WalletOwner}, +}; + +pub async fn run(args: GatewayUpgradeArgs, shell: &Shell) -> anyhow::Result<()> { + println!("Running ecosystem gateway upgrade args"); + + let mut ecosystem_config = EcosystemConfig::from_file(shell)?; + git::submodule_update(shell, ecosystem_config.link_to_code.clone())?; + + let mut final_ecosystem_args = args.fill_values_with_prompt(ecosystem_config.l1_network, true); + + match final_ecosystem_args.ecosystem_upgrade_stage { + GatewayUpgradeStage::NoGovernancePrepare => { + no_governance_prepare(&mut final_ecosystem_args, shell, &ecosystem_config).await?; + no_governance_prepare_gateway(shell, &mut ecosystem_config).await?; + } + GatewayUpgradeStage::GovernanceStage1 => { + governance_stage_1(&mut final_ecosystem_args, shell, &ecosystem_config).await?; + } + GatewayUpgradeStage::GovernanceStage2 => { + governance_stage_2(&mut final_ecosystem_args, shell, &ecosystem_config).await?; + } + GatewayUpgradeStage::NoGovernanceStage2 => { + no_governance_stage_2(&mut final_ecosystem_args, shell, &ecosystem_config).await?; + } + GatewayUpgradeStage::GovernanceStage3 => { + governance_stage_3(&mut final_ecosystem_args, shell, &ecosystem_config).await?; + } + GatewayUpgradeStage::NoGovernanceStage3 => { + no_governance_stage_3(&mut final_ecosystem_args, shell, &ecosystem_config).await?; + } + } + + Ok(()) +} + +#[derive(Debug, Deserialize)] +struct BroadcastFile { + pub transactions: Vec, +} +#[derive(Debug, Deserialize)] +struct BroadcastFileTransactions { + pub hash: String, +} + +async fn no_governance_prepare( + init_args: &mut GatewayUpgradeArgsFinal, + shell: &Shell, + ecosystem_config: &EcosystemConfig, +) -> anyhow::Result<()> { + let spinner = Spinner::new(MSG_INTALLING_DEPS_SPINNER); + spinner.finish(); + + let forge_args = init_args.forge_args.clone(); + let l1_rpc_url = init_args.l1_rpc_url.clone(); + + let new_genesis_config = GenesisConfig::read_with_base_path(shell, CONFIGS_PATH)?; + let current_contracts_config = ecosystem_config.get_contracts_config()?; + let initial_deployment_config = ecosystem_config.get_initial_deployment_config()?; + + let ecosystem_upgrade_config_path = + GATEWAY_UPGRADE_ECOSYSTEM_PARAMS.input(&ecosystem_config.link_to_code); + + let era_config = ecosystem_config + .load_chain(Some("era".to_string())) + .context("No era")?; + + // FIXME: we will have to force this in production environment + // assert_eq!(era_config.chain_id, ecosystem_config.era_chain_id); + + let gateway_upgrade_input = GatewayEcosystemUpgradeInput::new( + &new_genesis_config, + ¤t_contracts_config, + &initial_deployment_config, + ecosystem_config.era_chain_id, + era_config.get_contracts_config()?.l1.diamond_proxy_addr, + ecosystem_config.prover_version == ProverMode::NoProofs, + ); + gateway_upgrade_input.save(shell, ecosystem_upgrade_config_path.clone())?; + + let mut forge = Forge::new(&ecosystem_config.path_to_l1_foundry()) + .script( + &GATEWAY_UPGRADE_ECOSYSTEM_PARAMS.script(), + forge_args.clone(), + ) + .with_ffi() + .with_rpc_url(l1_rpc_url) + .with_slow() + .with_gas_limit(1_000_000_000_000) + .with_broadcast(); + + forge = fill_forge_private_key( + forge, + ecosystem_config.get_wallets()?.deployer.as_ref(), + WalletOwner::Deployer, + )?; + + println!("Preparing the ecosystem for the upgrade!"); + + forge.run(shell)?; + + println!("done!"); + + let l1_chain_id = era_config.l1_network.chain_id(); + + let broadcast_file: BroadcastFile = { + let file_content = std::fs::read_to_string( + ecosystem_config + .link_to_code + .join("contracts/l1-contracts") + .join(format!( + "broadcast/EcosystemUpgrade.s.sol/{}/run-latest.json", + l1_chain_id + )), + ) + .context("Failed to read broadcast file")?; + serde_json::from_str(&file_content).context("Failed to parse broadcast file")? + }; + + let mut output = GatewayEcosystemUpgradeOutput::read( + shell, + GATEWAY_UPGRADE_ECOSYSTEM_PARAMS.output(&ecosystem_config.link_to_code), + )?; + + // Add all the transaction hashes. + for tx in broadcast_file.transactions { + output.transactions.push(tx.hash); + } + + output.save_with_base_path(shell, &ecosystem_config.config)?; + + Ok(()) +} + +async fn no_governance_prepare_gateway( + shell: &Shell, + ecosystem_config: &mut EcosystemConfig, +) -> anyhow::Result<()> { + let spinner = Spinner::new(MSG_INTALLING_DEPS_SPINNER); + spinner.finish(); + + let mut contracts_config = ecosystem_config.get_contracts_config()?; + + let output = GatewayEcosystemUpgradeOutput::read( + shell, + GATEWAY_UPGRADE_ECOSYSTEM_PARAMS.output(&ecosystem_config.link_to_code), + )?; + + let mut s: String = "0x".to_string(); + s += &hex::encode(output.contracts_config.diamond_cut_data.0); + contracts_config.ecosystem_contracts.diamond_cut_data = s; + + s = "0x".to_string(); + s += &hex::encode(output.contracts_config.force_deployments_data.0); + contracts_config.ecosystem_contracts.force_deployments_data = Some(s); + + contracts_config.l1.rollup_l1_da_validator_addr = + Some(output.deployed_addresses.rollup_l1_da_validator_addr); + contracts_config.l1.no_da_validium_l1_validator_addr = + Some(output.deployed_addresses.validium_l1_da_validator_addr); + + contracts_config + .ecosystem_contracts + .stm_deployment_tracker_proxy_addr = Some( + output + .deployed_addresses + .bridgehub + .ctm_deployment_tracker_proxy_addr, + ); + contracts_config.ecosystem_contracts.native_token_vault_addr = + Some(output.deployed_addresses.native_token_vault_addr); + contracts_config + .ecosystem_contracts + .l1_bytecodes_supplier_addr = Some(output.deployed_addresses.l1_bytecodes_supplier_addr); + contracts_config.bridges.l1_nullifier_addr = Some(contracts_config.bridges.shared.l1_address); + contracts_config.ecosystem_contracts.validator_timelock_addr = + output.deployed_addresses.validator_timelock_addr; + contracts_config.l1.validator_timelock_addr = output.deployed_addresses.validator_timelock_addr; + contracts_config.bridges.shared.l1_address = + output.deployed_addresses.bridges.shared_bridge_proxy_addr; + contracts_config + .ecosystem_contracts + .expected_rollup_l2_da_validator = + Some(output.contracts_config.expected_rollup_l2_da_validator); + + contracts_config.save_with_base_path(shell, &ecosystem_config.config)?; + Ok(()) +} + +// Governance has approved the proposal, now it will insert the new protocol version into our STM (CTM) +async fn governance_stage_1( + init_args: &mut GatewayUpgradeArgsFinal, + shell: &Shell, + ecosystem_config: &EcosystemConfig, +) -> anyhow::Result<()> { + println!("Executing governance stage 1!"); + + let previous_output = GatewayEcosystemUpgradeOutput::read( + shell, + GATEWAY_UPGRADE_ECOSYSTEM_PARAMS.output(&ecosystem_config.link_to_code), + )?; + previous_output.save_with_base_path(shell, &ecosystem_config.config)?; + + // These are ABI-encoded + let stage1_calls = previous_output.governance_stage1_calls; + + governance_execute_calls( + shell, + ecosystem_config, + &ecosystem_config.get_wallets()?.governor, + stage1_calls.0, + &init_args.forge_args.clone(), + init_args.l1_rpc_url.clone(), + ) + .await?; + + let gateway_ecosystem_preparation_output = + GatewayEcosystemUpgradeOutput::read_with_base_path(shell, &ecosystem_config.config)?; + + let mut contracts_config = ecosystem_config.get_contracts_config()?; + + contracts_config + .ecosystem_contracts + .stm_deployment_tracker_proxy_addr = Some( + gateway_ecosystem_preparation_output + .deployed_addresses + .bridgehub + .ctm_deployment_tracker_proxy_addr, + ); + // This is force deployment data for creating new contracts, not really relevant here tbh, + contracts_config.ecosystem_contracts.force_deployments_data = Some(hex::encode( + &gateway_ecosystem_preparation_output + .contracts_config + .force_deployments_data + .0, + )); + contracts_config.ecosystem_contracts.native_token_vault_addr = Some( + gateway_ecosystem_preparation_output + .deployed_addresses + .native_token_vault_addr, + ); + contracts_config + .ecosystem_contracts + .l1_bytecodes_supplier_addr = Some( + gateway_ecosystem_preparation_output + .deployed_addresses + .l1_bytecodes_supplier_addr, + ); + + contracts_config.l1.rollup_l1_da_validator_addr = Some( + gateway_ecosystem_preparation_output + .deployed_addresses + .rollup_l1_da_validator_addr, + ); + + contracts_config.l1.no_da_validium_l1_validator_addr = Some( + gateway_ecosystem_preparation_output + .deployed_addresses + .validium_l1_da_validator_addr, + ); + + // This value is meaningless for the ecosystem, but we'll populate it for consistency + contracts_config.l2.da_validator_addr = Some(H160::zero()); + contracts_config.l2.l2_native_token_vault_proxy_addr = Some(L2_NATIVE_TOKEN_VAULT_ADDRESS); + contracts_config.l2.legacy_shared_bridge_addr = contracts_config.bridges.shared.l2_address; + + contracts_config.save_with_base_path(shell, &ecosystem_config.config)?; + + Ok(()) +} + +// Governance has approved the proposal, now it will insert the new protocol version into our STM (CTM) +async fn governance_stage_2( + init_args: &mut GatewayUpgradeArgsFinal, + shell: &Shell, + ecosystem_config: &EcosystemConfig, +) -> anyhow::Result<()> { + println!("Executing governance stage 2!"); + + let previous_output = + GatewayEcosystemUpgradeOutput::read_with_base_path(shell, &ecosystem_config.config)?; + + // These are ABI-encoded + let stage2_calls = previous_output.governance_stage2_calls; + + governance_execute_calls( + shell, + ecosystem_config, + &ecosystem_config.get_wallets()?.governor, + stage2_calls.0, + &init_args.forge_args.clone(), + init_args.l1_rpc_url.clone(), + ) + .await?; + + let mut contracts_config = ecosystem_config.get_contracts_config()?; + contracts_config.bridges.shared.l1_address = previous_output + .deployed_addresses + .bridges + .shared_bridge_proxy_addr; + + contracts_config.save_with_base_path(shell, &ecosystem_config.config)?; + println!("Stage2 finalized!"); + + Ok(()) +} + +lazy_static! { + static ref FINALIZE_UPGRADE: BaseContract = BaseContract::from( + parse_abi(&[ + "function initChains(address bridgehub, uint256[] chains) public", + "function initTokens(address l1NativeTokenVault, address[] tokens, uint256[] chains) public", + ]) + .unwrap(), + ); +} + +// Governance has approved the proposal, now it will insert the new protocol version into our STM (CTM) +async fn no_governance_stage_2( + init_args: &mut GatewayUpgradeArgsFinal, + shell: &Shell, + ecosystem_config: &EcosystemConfig, +) -> anyhow::Result<()> { + let contracts_config = ecosystem_config.get_contracts_config()?; + let wallets = ecosystem_config.get_wallets()?; + let deployer_private_key = wallets + .deployer + .context("deployer_wallet")? + .private_key_h256() + .context("deployer_priuvate_key")?; + + println!("Finalizing stage2 of the upgrade!"); + + let chains: Vec<_> = ecosystem_config + .list_of_chains() + .into_iter() + .filter_map(|name| { + let chain = ecosystem_config + .load_chain(Some(name)) + .expect("Invalid chain"); + (chain.name != "gateway").then_some(chain) + }) + .collect(); + + let chain_ids: Vec<_> = chains + .into_iter() + .map(|c| ethers::abi::Token::Uint(U256::from(c.chain_id.as_u64()))) + .collect(); + let mut tokens: Vec<_> = ecosystem_config + .get_erc20_tokens() + .into_iter() + .map(|t| ethers::abi::Token::Address(t.address)) + .collect(); + tokens.push(ethers::abi::Token::Address( + SHARED_BRIDGE_ETHER_TOKEN_ADDRESS, + )); + + // Resume for accept admin doesn't work properly. Foundry assumes that if signature of the function is the same, + // than it's the same call, but because we are calling this function multiple times during the init process, + // code assumes that doing only once is enough, but actually we need to accept admin multiple times + let mut forge_args = init_args.forge_args.clone(); + forge_args.resume = false; + + let init_chains_calldata = FINALIZE_UPGRADE + .encode( + "initChains", + ( + ethers::abi::Token::Address( + contracts_config.ecosystem_contracts.bridgehub_proxy_addr, + ), + ethers::abi::Token::Array(chain_ids.clone()), + ), + ) + .unwrap(); + let init_tokens_calldata = FINALIZE_UPGRADE + .encode( + "initTokens", + ( + ethers::abi::Token::Address( + contracts_config + .ecosystem_contracts + .native_token_vault_addr + .context("native_token_vault_addr")?, + ), + ethers::abi::Token::Array(tokens), + ethers::abi::Token::Array(chain_ids), + ), + ) + .unwrap(); + + println!("Initiing chains!"); + let foundry_contracts_path = ecosystem_config.path_to_l1_foundry(); + let forge = Forge::new(&foundry_contracts_path) + .script(&FINALIZE_UPGRADE_SCRIPT_PARAMS.script(), forge_args.clone()) + .with_ffi() + .with_rpc_url(init_args.l1_rpc_url.clone()) + .with_broadcast() + .with_calldata(&init_chains_calldata) + .with_private_key(deployer_private_key); + + forge.run(shell)?; + + println!("Initiing tokens!"); + + let forge = Forge::new(&foundry_contracts_path) + .script(&FINALIZE_UPGRADE_SCRIPT_PARAMS.script(), forge_args.clone()) + .with_ffi() + .with_rpc_url(init_args.l1_rpc_url.clone()) + .with_broadcast() + .with_calldata(&init_tokens_calldata) + .with_private_key(deployer_private_key); + + forge.run(shell)?; + + println!("Done!"); + + Ok(()) +} + +async fn governance_stage_3( + init_args: &mut GatewayUpgradeArgsFinal, + shell: &Shell, + ecosystem_config: &EcosystemConfig, +) -> anyhow::Result<()> { + let chain_config = ecosystem_config + .load_chain(Some("gateway".to_string())) + .context(MSG_CHAIN_NOT_FOUND_ERR)?; + call_script( + shell, + init_args.forge_args.clone(), + &GATEWAY_PREPARATION_INTERFACE + .encode("executeGovernanceTxs", ()) + .unwrap(), + ecosystem_config, + &chain_config, + &ecosystem_config.get_wallets()?.governor, + init_args.l1_rpc_url.clone(), + true, + ) + .await?; + + Ok(()) +} + +async fn no_governance_stage_3( + init_args: &mut GatewayUpgradeArgsFinal, + shell: &Shell, + ecosystem_config: &EcosystemConfig, +) -> anyhow::Result<()> { + let chain_config = ecosystem_config + .load_chain(Some("gateway".to_string())) + .context(MSG_CHAIN_NOT_FOUND_ERR)?; + + let chain_genesis_config = chain_config.get_genesis_config()?; + let mut chain_contracts_config = chain_config.get_contracts_config()?; + + // Fund gateway's governor (chain_config.get_wallets_config()?.governor) + chain::common::distribute_eth( + ecosystem_config, + &chain_config, + init_args.l1_rpc_url.clone(), + ) + .await?; + + // Accept ownership for DiamondProxy (run by L2 Governor) + accept_admin( + shell, + ecosystem_config, + chain_contracts_config.l1.chain_admin_addr, + &chain_config.get_wallets_config()?.governor, + chain_contracts_config.l1.diamond_proxy_addr, + &init_args.forge_args.clone(), + init_args.l1_rpc_url.clone(), + ) + .await?; + + // prepare script input + let gateway_config = calculate_gateway_ctm( + shell, + init_args.forge_args.clone(), + ecosystem_config, + &chain_config, + &chain_genesis_config, + &ecosystem_config.get_initial_deployment_config().unwrap(), + init_args.l1_rpc_url.clone(), + ) + .await?; + + let gateway_preparation_config_path = GATEWAY_PREPARATION.input(&chain_config.link_to_code); + let preparation_config = GatewayPreparationConfig::new( + &chain_config, + &chain_contracts_config, + &ecosystem_config.get_contracts_config()?, + &gateway_config, + )?; + preparation_config.save(shell, gateway_preparation_config_path)?; + + // deploy filterer + let output = call_script( + shell, + init_args.forge_args.clone(), + &GATEWAY_PREPARATION_INTERFACE + .encode("deployAndSetGatewayTransactionFilterer", ()) + .unwrap(), + ecosystem_config, + &chain_config, + &chain_config.get_wallets_config()?.governor, + init_args.l1_rpc_url.clone(), + true, + ) + .await?; + + chain_contracts_config.set_transaction_filterer(output.gateway_transaction_filterer_proxy); + + // whitelist deployer + call_script( + shell, + init_args.forge_args.clone(), + &GATEWAY_PREPARATION_INTERFACE + .encode( + "grantWhitelist", + ( + output.gateway_transaction_filterer_proxy, + vec![ + ecosystem_config.get_contracts_config()?.l1.governance_addr, + ecosystem_config + .get_wallets()? + .deployer + .context("no deployer addr")? + .address, + ], + ), + ) + .unwrap(), + ecosystem_config, + &chain_config, + &chain_config.get_wallets_config()?.governor, + init_args.l1_rpc_url.clone(), + true, + ) + .await?; + + // deploy ctm + chain::convert_to_gateway::deploy_gateway_ctm( + shell, + init_args.forge_args.clone(), + ecosystem_config, + &chain_config, + &chain_genesis_config, + &ecosystem_config.get_initial_deployment_config().unwrap(), + init_args.l1_rpc_url.clone(), + ) + .await?; + + chain_contracts_config.save_with_base_path(shell, &chain_config.configs)?; + + // Set da validators + let validium_mode = + chain_config.l1_batch_commit_data_generator_mode == L1BatchCommitmentMode::Validium; + let l1_da_validator_addr = if validium_mode { + chain_contracts_config.l1.no_da_validium_l1_validator_addr + } else { + chain_contracts_config.l1.rollup_l1_da_validator_addr + }; + set_da_validator_pair( + shell, + ecosystem_config, + chain_contracts_config.l1.chain_admin_addr, + &chain_config.get_wallets_config()?.governor, + chain_contracts_config.l1.diamond_proxy_addr, + l1_da_validator_addr.context("l1_da_validator_addr")?, + chain_contracts_config + .l2 + .da_validator_addr + .context("da_validator_addr")?, + &init_args.forge_args.clone(), + init_args.l1_rpc_url.clone(), + ) + .await?; + if !validium_mode { + make_permanent_rollup( + shell, + ecosystem_config, + chain_contracts_config.l1.chain_admin_addr, + &chain_config.get_wallets_config()?.governor, + chain_contracts_config.l1.diamond_proxy_addr, + &init_args.forge_args.clone(), + init_args.l1_rpc_url.clone(), + ) + .await?; + } + + let DBNames { server_name, .. } = generate_db_names(&chain_config); + let args = GenesisArgsFinal { + server_db: DatabaseConfig::new(DATABASE_SERVER_URL.clone(), server_name), + dont_drop: false, + }; + // Run genesis (create DB and run server with --genesis) + genesis(args, shell, &chain_config) + .await + .context(MSG_GENESIS_DATABASE_ERR)?; + + Ok(()) +} diff --git a/zkstack_cli/crates/zkstack/src/commands/ecosystem/init.rs b/zkstack_cli/crates/zkstack/src/commands/ecosystem/init.rs index 309dda7abe5b..ac66d47a831c 100644 --- a/zkstack_cli/crates/zkstack/src/commands/ecosystem/init.rs +++ b/zkstack_cli/crates/zkstack/src/commands/ecosystem/init.rs @@ -1,15 +1,16 @@ use std::{path::PathBuf, str::FromStr}; use anyhow::Context; -use common::{ +use xshell::Shell; +use zkstack_cli_common::{ config::global_config, - contracts::build_system_contracts, + contracts::{build_l1_contracts, build_l2_contracts, build_system_contracts}, forge::{Forge, ForgeScriptArgs}, git, logger, spinner::Spinner, Prompt, }; -use config::{ +use zkstack_cli_config::{ forge_interface::{ deploy_ecosystem::{ input::{DeployErc20Config, Erc20DeploymentConfig, InitialDeploymentConfig}, @@ -20,13 +21,13 @@ use config::{ traits::{FileConfigWithDefaultName, ReadConfig, SaveConfig, SaveConfigWithBasePath}, ContractsConfig, EcosystemConfig, }; -use types::L1Network; -use xshell::Shell; +use zkstack_cli_types::L1Network; use super::{ args::init::{EcosystemArgsFinal, EcosystemInitArgs, EcosystemInitArgsFinal}, common::deploy_l1, setup_observability, + utils::{build_da_contracts, install_yarn_dependencies}, }; use crate::{ accept_ownership::{accept_admin, accept_owner}, @@ -110,7 +111,13 @@ async fn init_ecosystem( initial_deployment_config: &InitialDeploymentConfig, ) -> anyhow::Result { let spinner = Spinner::new(MSG_INTALLING_DEPS_SPINNER); - build_system_contracts(shell.clone(), ecosystem_config.link_to_code.clone())?; + install_yarn_dependencies(shell, &ecosystem_config.link_to_code)?; + if !init_args.skip_contract_compilation_override { + build_da_contracts(shell, &ecosystem_config.link_to_code)?; + build_l1_contracts(shell.clone(), ecosystem_config.link_to_code.clone())?; + build_system_contracts(shell.clone(), ecosystem_config.link_to_code.clone())?; + build_l2_contracts(shell.clone(), ecosystem_config.link_to_code.clone())?; + } spinner.finish(); let contracts = deploy_ecosystem( @@ -119,6 +126,7 @@ async fn init_ecosystem( init_args.forge_args.clone(), ecosystem_config, initial_deployment_config, + init_args.support_l2_legacy_shared_bridge_test, ) .await?; contracts.save_with_base_path(shell, &ecosystem_config.config)?; @@ -177,6 +185,7 @@ async fn deploy_ecosystem( forge_args: ForgeScriptArgs, ecosystem_config: &EcosystemConfig, initial_deployment_config: &InitialDeploymentConfig, + support_l2_legacy_shared_bridge_test: bool, ) -> anyhow::Result { if ecosystem.deploy_ecosystem { return deploy_ecosystem_inner( @@ -185,6 +194,7 @@ async fn deploy_ecosystem( ecosystem_config, initial_deployment_config, ecosystem.l1_rpc_url.clone(), + support_l2_legacy_shared_bridge_test, ) .await; } @@ -246,6 +256,7 @@ async fn deploy_ecosystem_inner( config: &EcosystemConfig, initial_deployment_config: &InitialDeploymentConfig, l1_rpc_url: String, + support_l2_legacy_shared_bridge_test: bool, ) -> anyhow::Result { let spinner = Spinner::new(MSG_DEPLOYING_ECOSYSTEM_CONTRACTS_SPINNER); let contracts_config = deploy_l1( @@ -256,6 +267,7 @@ async fn deploy_ecosystem_inner( &l1_rpc_url, None, true, + support_l2_legacy_shared_bridge_test, ) .await?; spinner.finish(); @@ -293,21 +305,26 @@ async fn deploy_ecosystem_inner( ) .await?; - accept_admin( + // Note, that there is no admin in L1 asset router, so we do + // need to accept it + + accept_owner( shell, config, - contracts_config.l1.chain_admin_addr, + contracts_config.l1.governance_addr, &config.get_wallets()?.governor, - contracts_config.bridges.shared.l1_address, + contracts_config + .ecosystem_contracts + .state_transition_proxy_addr, &forge_args, l1_rpc_url.clone(), ) .await?; - accept_owner( + accept_admin( shell, config, - contracts_config.l1.governance_addr, + contracts_config.l1.chain_admin_addr, &config.get_wallets()?.governor, contracts_config .ecosystem_contracts @@ -317,14 +334,15 @@ async fn deploy_ecosystem_inner( ) .await?; - accept_admin( + accept_owner( shell, config, - contracts_config.l1.chain_admin_addr, + contracts_config.l1.governance_addr, &config.get_wallets()?.governor, contracts_config .ecosystem_contracts - .state_transition_proxy_addr, + .stm_deployment_tracker_proxy_addr + .context("stm_deployment_tracker_proxy_addr")?, &forge_args, l1_rpc_url.clone(), ) @@ -373,6 +391,7 @@ async fn init_chains( no_port_reallocation: final_init_args.no_port_reallocation, update_submodules: init_args.update_submodules, dev: final_init_args.dev, + validium_args: final_init_args.validium_args.clone(), }; let final_chain_init_args = chain_init_args.fill_values_with_prompt(&chain_config); diff --git a/zkstack_cli/crates/zkstack/src/commands/ecosystem/mod.rs b/zkstack_cli/crates/zkstack/src/commands/ecosystem/mod.rs index 3f4aa7565e19..19c2888edd0d 100644 --- a/zkstack_cli/crates/zkstack/src/commands/ecosystem/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/ecosystem/mod.rs @@ -12,6 +12,8 @@ mod change_default; mod common; mod create; pub mod create_configs; +#[cfg(feature = "gateway")] +mod gateway_upgrade; pub(crate) mod init; pub(crate) mod setup_observability; mod utils; @@ -34,6 +36,9 @@ pub enum EcosystemCommands { /// downloading Grafana dashboards from the era-observability repo #[command(alias = "obs")] SetupObservability, + /// Gateway version upgrade + #[cfg(feature = "gateway")] + GatewayUpgrade(crate::commands::ecosystem::args::gateway_upgrade::GatewayUpgradeArgs), } pub(crate) async fn run(shell: &Shell, args: EcosystemCommands) -> anyhow::Result<()> { @@ -43,5 +48,7 @@ pub(crate) async fn run(shell: &Shell, args: EcosystemCommands) -> anyhow::Resul EcosystemCommands::Init(args) => init::run(args, shell).await, EcosystemCommands::ChangeDefaultChain(args) => change_default::run(args, shell), EcosystemCommands::SetupObservability => setup_observability::run(shell), + #[cfg(feature = "gateway")] + EcosystemCommands::GatewayUpgrade(args) => gateway_upgrade::run(args, shell).await, } } diff --git a/zkstack_cli/crates/zkstack/src/commands/ecosystem/setup_observability.rs b/zkstack_cli/crates/zkstack/src/commands/ecosystem/setup_observability.rs index f20c3c24157e..23d2b6e2cca4 100644 --- a/zkstack_cli/crates/zkstack/src/commands/ecosystem/setup_observability.rs +++ b/zkstack_cli/crates/zkstack/src/commands/ecosystem/setup_observability.rs @@ -1,6 +1,6 @@ -use common::{git, logger, spinner::Spinner}; -use config::{ERA_OBSERBAVILITY_DIR, ERA_OBSERBAVILITY_GIT_REPO}; use xshell::Shell; +use zkstack_cli_common::{git, logger, spinner::Spinner}; +use zkstack_cli_config::{ERA_OBSERBAVILITY_DIR, ERA_OBSERBAVILITY_GIT_REPO}; use crate::messages::{ MSG_DOWNLOADING_ERA_OBSERVABILITY_SPINNER, MSG_ERA_OBSERVABILITY_ALREADY_SETUP, diff --git a/zkstack_cli/crates/zkstack/src/commands/ecosystem/utils.rs b/zkstack_cli/crates/zkstack/src/commands/ecosystem/utils.rs index a51adc75fb42..77fc45ff9f83 100644 --- a/zkstack_cli/crates/zkstack/src/commands/ecosystem/utils.rs +++ b/zkstack_cli/crates/zkstack/src/commands/ecosystem/utils.rs @@ -1,7 +1,7 @@ use std::path::Path; -use common::cmd::Cmd; use xshell::{cmd, Shell}; +use zkstack_cli_common::cmd::Cmd; pub(super) fn install_yarn_dependencies(shell: &Shell, link_to_code: &Path) -> anyhow::Result<()> { let _dir_guard = shell.push_dir(link_to_code); @@ -12,3 +12,8 @@ pub(super) fn build_system_contracts(shell: &Shell, link_to_code: &Path) -> anyh let _dir_guard = shell.push_dir(link_to_code.join("contracts")); Ok(Cmd::new(cmd!(shell, "yarn sc build")).run()?) } + +pub(super) fn build_da_contracts(shell: &Shell, link_to_code: &Path) -> anyhow::Result<()> { + let _dir_guard = shell.push_dir(link_to_code.join("contracts")); + Ok(Cmd::new(cmd!(shell, "yarn da build:foundry")).run()?) +} diff --git a/zkstack_cli/crates/zkstack/src/commands/explorer/backend.rs b/zkstack_cli/crates/zkstack/src/commands/explorer/backend.rs index 29cc2ecfbff0..b40205cdbe5e 100644 --- a/zkstack_cli/crates/zkstack/src/commands/explorer/backend.rs +++ b/zkstack_cli/crates/zkstack/src/commands/explorer/backend.rs @@ -1,9 +1,9 @@ use std::path::Path; use anyhow::Context; -use common::docker; -use config::{explorer_compose::ExplorerBackendComposeConfig, EcosystemConfig}; use xshell::Shell; +use zkstack_cli_common::docker; +use zkstack_cli_config::{explorer_compose::ExplorerBackendComposeConfig, EcosystemConfig}; use crate::messages::{ msg_explorer_chain_not_initialized, MSG_CHAIN_NOT_FOUND_ERR, diff --git a/zkstack_cli/crates/zkstack/src/commands/explorer/init.rs b/zkstack_cli/crates/zkstack/src/commands/explorer/init.rs index 096c45da5d8f..8bac0b84d982 100644 --- a/zkstack_cli/crates/zkstack/src/commands/explorer/init.rs +++ b/zkstack_cli/crates/zkstack/src/commands/explorer/init.rs @@ -1,14 +1,14 @@ use anyhow::Context; -use common::{config::global_config, db, logger, Prompt}; -use config::{ +use slugify_rs::slugify; +use url::Url; +use xshell::Shell; +use zkstack_cli_common::{config::global_config, db, logger, Prompt}; +use zkstack_cli_config::{ explorer::{ExplorerChainConfig, ExplorerConfig}, explorer_compose::{ExplorerBackendComposeConfig, ExplorerBackendConfig, ExplorerBackendPorts}, traits::{ConfigWithL2RpcUrl, SaveConfig}, ChainConfig, EcosystemConfig, }; -use slugify_rs::slugify; -use url::Url; -use xshell::Shell; use crate::{ consts::L2_BASE_TOKEN_ADDRESS, diff --git a/zkstack_cli/crates/zkstack/src/commands/explorer/run.rs b/zkstack_cli/crates/zkstack/src/commands/explorer/run.rs index a6519f62edba..4fbc8e1799fb 100644 --- a/zkstack_cli/crates/zkstack/src/commands/explorer/run.rs +++ b/zkstack_cli/crates/zkstack/src/commands/explorer/run.rs @@ -1,9 +1,9 @@ use std::path::Path; use anyhow::Context; -use common::{config::global_config, docker, logger}; -use config::{explorer::*, traits::SaveConfig, AppsEcosystemConfig, EcosystemConfig}; use xshell::Shell; +use zkstack_cli_common::{config::global_config, docker, logger}; +use zkstack_cli_config::{explorer::*, traits::SaveConfig, AppsEcosystemConfig, EcosystemConfig}; use crate::{ consts::{EXPLORER_APP_DOCKER_CONFIG_PATH, EXPLORER_APP_DOCKER_IMAGE}, diff --git a/zkstack_cli/crates/zkstack/src/commands/external_node/args/prepare_configs.rs b/zkstack_cli/crates/zkstack/src/commands/external_node/args/prepare_configs.rs index b1759702c461..2f688ae2eea5 100644 --- a/zkstack_cli/crates/zkstack/src/commands/external_node/args/prepare_configs.rs +++ b/zkstack_cli/crates/zkstack/src/commands/external_node/args/prepare_configs.rs @@ -1,9 +1,9 @@ use clap::Parser; -use common::{db::DatabaseConfig, Prompt}; -use config::ChainConfig; use serde::{Deserialize, Serialize}; use slugify_rs::slugify; use url::Url; +use zkstack_cli_common::{db::DatabaseConfig, Prompt}; +use zkstack_cli_config::ChainConfig; use crate::{ defaults::{generate_external_node_db_name, DATABASE_SERVER_URL, LOCAL_RPC_URL}, diff --git a/zkstack_cli/crates/zkstack/src/commands/external_node/build.rs b/zkstack_cli/crates/zkstack/src/commands/external_node/build.rs index ff15c0c77f30..581f08565162 100644 --- a/zkstack_cli/crates/zkstack/src/commands/external_node/build.rs +++ b/zkstack_cli/crates/zkstack/src/commands/external_node/build.rs @@ -1,7 +1,7 @@ use anyhow::Context; -use common::{cmd::Cmd, logger}; -use config::EcosystemConfig; use xshell::{cmd, Shell}; +use zkstack_cli_common::{cmd::Cmd, logger}; +use zkstack_cli_config::EcosystemConfig; use crate::messages::{MSG_BUILDING_EN, MSG_CHAIN_NOT_FOUND_ERR, MSG_FAILED_TO_BUILD_EN_ERR}; @@ -10,7 +10,7 @@ pub(crate) async fn build(shell: &Shell) -> anyhow::Result<()> { let chain = ecosystem .load_current_chain() .context(MSG_CHAIN_NOT_FOUND_ERR)?; - let _dir_guard = shell.push_dir(&chain.link_to_code); + let _dir_guard = shell.push_dir(chain.link_to_code.join("core")); logger::info(MSG_BUILDING_EN); diff --git a/zkstack_cli/crates/zkstack/src/commands/external_node/init.rs b/zkstack_cli/crates/zkstack/src/commands/external_node/init.rs index 184151764961..526e9fd4bc5f 100644 --- a/zkstack_cli/crates/zkstack/src/commands/external_node/init.rs +++ b/zkstack_cli/crates/zkstack/src/commands/external_node/init.rs @@ -1,10 +1,12 @@ use anyhow::Context; -use common::{ +use xshell::Shell; +use zkstack_cli_common::{ db::{drop_db_if_exists, init_db, migrate_db, DatabaseConfig}, spinner::Spinner, }; -use config::{traits::ReadConfigWithBasePath, ChainConfig, EcosystemConfig, SecretsConfig}; -use xshell::Shell; +use zkstack_cli_config::{ + traits::ReadConfigWithBasePath, ChainConfig, EcosystemConfig, SecretsConfig, +}; use crate::{ consts::SERVER_MIGRATIONS, diff --git a/zkstack_cli/crates/zkstack/src/commands/external_node/prepare_configs.rs b/zkstack_cli/crates/zkstack/src/commands/external_node/prepare_configs.rs index 8e937e3903d4..ae36c1c9a1f4 100644 --- a/zkstack_cli/crates/zkstack/src/commands/external_node/prepare_configs.rs +++ b/zkstack_cli/crates/zkstack/src/commands/external_node/prepare_configs.rs @@ -1,14 +1,14 @@ use std::{collections::BTreeMap, path::Path, str::FromStr}; use anyhow::Context; -use common::logger; -use config::{ +use xshell::Shell; +use zkstack_cli_common::logger; +use zkstack_cli_config::{ external_node::ENConfig, set_rocks_db_config, traits::{FileConfigWithDefaultName, SaveConfigWithBasePath}, ChainConfig, EcosystemConfig, GeneralConfig, SecretsConfig, }; -use xshell::Shell; use zksync_basic_types::url::SensitiveUrl; use zksync_config::configs::{ consensus::{ConsensusConfig, ConsensusSecrets, NodeSecretKey, Secret}, @@ -60,6 +60,7 @@ fn prepare_configs( let mut ports = EcosystemPortsScanner::scan(shell)?; let genesis = config.get_genesis_config()?; let general = config.get_general_config()?; + let gateway = config.get_gateway_chain_config().ok(); let en_config = ENConfig { l2_chain_id: genesis.l2_chain_id, l1_chain_id: genesis.l1_chain_id, @@ -74,7 +75,7 @@ fn prepare_configs( )?, main_node_rate_limit_rps: None, bridge_addresses_refresh_interval_sec: None, - gateway_chain_id: None, + gateway_chain_id: gateway.map(|g| g.gateway_chain_id), }; let mut general_en = general.clone(); general_en.consensus_config = None; diff --git a/zkstack_cli/crates/zkstack/src/commands/external_node/run.rs b/zkstack_cli/crates/zkstack/src/commands/external_node/run.rs index 46c98119f893..21714c335fa0 100644 --- a/zkstack_cli/crates/zkstack/src/commands/external_node/run.rs +++ b/zkstack_cli/crates/zkstack/src/commands/external_node/run.rs @@ -1,7 +1,7 @@ use anyhow::Context; -use common::logger; -use config::{ChainConfig, EcosystemConfig}; use xshell::Shell; +use zkstack_cli_common::logger; +use zkstack_cli_config::{ChainConfig, EcosystemConfig}; use crate::{ commands::external_node::{args::run::RunExternalNodeArgs, init}, diff --git a/zkstack_cli/crates/zkstack/src/commands/external_node/wait.rs b/zkstack_cli/crates/zkstack/src/commands/external_node/wait.rs index 72568c36f363..b645314dc9c2 100644 --- a/zkstack_cli/crates/zkstack/src/commands/external_node/wait.rs +++ b/zkstack_cli/crates/zkstack/src/commands/external_node/wait.rs @@ -1,7 +1,7 @@ use anyhow::Context as _; -use common::{config::global_config, logger}; -use config::{traits::ReadConfigWithBasePath, EcosystemConfig}; use xshell::Shell; +use zkstack_cli_common::{config::global_config, logger}; +use zkstack_cli_config::{traits::ReadConfigWithBasePath, EcosystemConfig}; use zksync_config::configs::GeneralConfig; use crate::{ diff --git a/zkstack_cli/crates/zkstack/src/commands/portal.rs b/zkstack_cli/crates/zkstack/src/commands/portal.rs index f9e7fe358609..d534498aaacd 100644 --- a/zkstack_cli/crates/zkstack/src/commands/portal.rs +++ b/zkstack_cli/crates/zkstack/src/commands/portal.rs @@ -1,15 +1,15 @@ use std::path::Path; use anyhow::Context; -use common::{config::global_config, docker, ethereum, logger}; -use config::{ +use ethers::types::Address; +use xshell::Shell; +use zkstack_cli_common::{config::global_config, docker, ethereum, logger}; +use zkstack_cli_config::{ portal::*, traits::{ConfigWithL2RpcUrl, SaveConfig}, AppsEcosystemConfig, ChainConfig, EcosystemConfig, }; -use ethers::types::Address; -use types::{BaseToken, TokenInfo}; -use xshell::Shell; +use zkstack_cli_types::{BaseToken, TokenInfo}; use crate::{ consts::{L2_BASE_TOKEN_ADDRESS, PORTAL_DOCKER_CONFIG_PATH, PORTAL_DOCKER_IMAGE}, diff --git a/zkstack_cli/crates/zkstack/src/commands/prover/args/compressor_keys.rs b/zkstack_cli/crates/zkstack/src/commands/prover/args/compressor_keys.rs index f393a89882af..9de616657b20 100644 --- a/zkstack_cli/crates/zkstack/src/commands/prover/args/compressor_keys.rs +++ b/zkstack_cli/crates/zkstack/src/commands/prover/args/compressor_keys.rs @@ -1,57 +1,22 @@ -use clap::{Parser, ValueEnum}; -use common::Prompt; -use strum::EnumIter; +use clap::Parser; +use zkstack_cli_common::Prompt; use crate::messages::MSG_SETUP_COMPRESSOR_KEY_PATH_PROMPT; #[derive(Debug, Clone, Parser, Default)] pub struct CompressorKeysArgs { #[clap(long)] - pub plonk_path: Option, - #[clap(long)] - pub fflonk_path: Option, - #[clap(long, default_value = "plonk")] - pub compressor_type: CompressorType, -} - -#[derive(Debug, Clone, ValueEnum, EnumIter, strum::Display, PartialEq, Eq, Default)] -pub enum CompressorType { - Fflonk, - #[default] - Plonk, - All, + pub path: Option, } impl CompressorKeysArgs { - pub fn fill_values_with_prompt( - self, - default_plonk_path: &str, - default_fflonk_path: &str, - ) -> CompressorKeysArgs { - let plonk_path = if self.compressor_type != CompressorType::Fflonk { - Some(self.plonk_path.unwrap_or_else(|| { - Prompt::new(MSG_SETUP_COMPRESSOR_KEY_PATH_PROMPT) - .default(default_plonk_path) - .ask() - })) - } else { - None - }; - - let fflonk_path = if self.compressor_type != CompressorType::Plonk { - Some(self.fflonk_path.unwrap_or_else(|| { - Prompt::new(MSG_SETUP_COMPRESSOR_KEY_PATH_PROMPT) - .default(default_fflonk_path) - .ask() - })) - } else { - None - }; + pub fn fill_values_with_prompt(self, default_path: &str) -> CompressorKeysArgs { + let path = self.path.unwrap_or_else(|| { + Prompt::new(MSG_SETUP_COMPRESSOR_KEY_PATH_PROMPT) + .default(default_path) + .ask() + }); - CompressorKeysArgs { - plonk_path, - fflonk_path, - ..self - } + CompressorKeysArgs { path: Some(path) } } } diff --git a/zkstack_cli/crates/zkstack/src/commands/prover/args/init.rs b/zkstack_cli/crates/zkstack/src/commands/prover/args/init.rs index b40dc180124d..4956a23ac987 100644 --- a/zkstack_cli/crates/zkstack/src/commands/prover/args/init.rs +++ b/zkstack_cli/crates/zkstack/src/commands/prover/args/init.rs @@ -1,11 +1,11 @@ use clap::{Parser, ValueEnum}; -use common::{db::DatabaseConfig, logger, Prompt, PromptConfirm, PromptSelect}; -use config::ChainConfig; use serde::{Deserialize, Serialize}; use slugify_rs::slugify; use strum::{EnumIter, IntoEnumIterator}; use url::Url; use xshell::Shell; +use zkstack_cli_common::{db::DatabaseConfig, logger, Prompt, PromptConfirm, PromptSelect}; +use zkstack_cli_config::ChainConfig; use zksync_config::configs::fri_prover::CloudConnectionMode; use super::{ @@ -202,16 +202,13 @@ impl ProverInitArgs { pub(crate) fn fill_values_with_prompt( &self, shell: &Shell, - default_plonk_key_path: &str, - default_fflonk_key_path: &str, + default_compressor_key_path: &str, chain_config: &ChainConfig, ) -> anyhow::Result { let proof_store = self.fill_proof_storage_values_with_prompt(shell)?; let public_store = self.fill_public_storage_values_with_prompt(shell)?; - let compressor_key_args = self.fill_setup_compressor_key_values_with_prompt( - default_plonk_key_path, - default_fflonk_key_path, - ); + let compressor_key_args = + self.fill_setup_compressor_key_values_with_prompt(default_compressor_key_path); let bellman_cuda_config = self.fill_bellman_cuda_values_with_prompt(); let cloud_type = self.get_cloud_type_with_prompt(); let database_config = self.fill_database_values_with_prompt(chain_config); @@ -358,14 +355,11 @@ impl ProverInitArgs { fn fill_setup_compressor_key_values_with_prompt( &self, - default_plonk_path: &str, - default_fflonk_path: &str, + default_path: &str, ) -> Option { if self.dev { return Some(CompressorKeysArgs { - plonk_path: Some(default_plonk_path.to_string()), - fflonk_path: Some(default_fflonk_path.to_string()), - ..self.compressor_keys_args.clone() + path: Some(default_path.to_string()), }); } @@ -379,7 +373,7 @@ impl ProverInitArgs { Some( self.compressor_keys_args .clone() - .fill_values_with_prompt(default_plonk_path, default_fflonk_path), + .fill_values_with_prompt(default_path), ) } else { None diff --git a/zkstack_cli/crates/zkstack/src/commands/prover/args/init_bellman_cuda.rs b/zkstack_cli/crates/zkstack/src/commands/prover/args/init_bellman_cuda.rs index 98a5c78be2a6..aa653fc62ade 100644 --- a/zkstack_cli/crates/zkstack/src/commands/prover/args/init_bellman_cuda.rs +++ b/zkstack_cli/crates/zkstack/src/commands/prover/args/init_bellman_cuda.rs @@ -1,7 +1,7 @@ use clap::Parser; -use common::{Prompt, PromptSelect}; use serde::{Deserialize, Serialize}; use strum::{EnumIter, IntoEnumIterator}; +use zkstack_cli_common::{Prompt, PromptSelect}; use crate::messages::{ MSG_BELLMAN_CUDA_DIR_PROMPT, MSG_BELLMAN_CUDA_ORIGIN_SELECT, MSG_BELLMAN_CUDA_SELECTION_CLONE, diff --git a/zkstack_cli/crates/zkstack/src/commands/prover/args/run.rs b/zkstack_cli/crates/zkstack/src/commands/prover/args/run.rs index a356f1874b7a..3aa3d090e4dc 100644 --- a/zkstack_cli/crates/zkstack/src/commands/prover/args/run.rs +++ b/zkstack_cli/crates/zkstack/src/commands/prover/args/run.rs @@ -2,10 +2,10 @@ use std::path::Path; use anyhow::anyhow; use clap::{Parser, ValueEnum}; -use common::{Prompt, PromptSelect}; -use config::ChainConfig; use serde::{Deserialize, Serialize}; use strum::{EnumIter, IntoEnumIterator}; +use zkstack_cli_common::{Prompt, PromptSelect}; +use zkstack_cli_config::ChainConfig; use crate::{ consts::{ diff --git a/zkstack_cli/crates/zkstack/src/commands/prover/args/setup_keys.rs b/zkstack_cli/crates/zkstack/src/commands/prover/args/setup_keys.rs index 155977b8812a..914533e47144 100644 --- a/zkstack_cli/crates/zkstack/src/commands/prover/args/setup_keys.rs +++ b/zkstack_cli/crates/zkstack/src/commands/prover/args/setup_keys.rs @@ -1,6 +1,6 @@ use clap::{Parser, ValueEnum}; -use common::PromptSelect; use strum::{EnumIter, IntoEnumIterator}; +use zkstack_cli_common::PromptSelect; use crate::messages::{MSG_SETUP_KEYS_DOWNLOAD_SELECTION_PROMPT, MSG_SETUP_KEYS_REGION_PROMPT}; diff --git a/zkstack_cli/crates/zkstack/src/commands/prover/compressor_keys.rs b/zkstack_cli/crates/zkstack/src/commands/prover/compressor_keys.rs index 31b970de3a1d..88eec0688da7 100644 --- a/zkstack_cli/crates/zkstack/src/commands/prover/compressor_keys.rs +++ b/zkstack_cli/crates/zkstack/src/commands/prover/compressor_keys.rs @@ -1,9 +1,9 @@ use anyhow::Context; -use common::{logger, spinner::Spinner}; -use config::{get_link_to_prover, EcosystemConfig, GeneralConfig}; use xshell::Shell; +use zkstack_cli_common::{logger, spinner::Spinner}; +use zkstack_cli_config::{get_link_to_prover, EcosystemConfig, GeneralConfig}; -use super::args::compressor_keys::{CompressorKeysArgs, CompressorType}; +use super::args::compressor_keys::CompressorKeysArgs; use crate::messages::{ MSG_CHAIN_NOT_FOUND_ERR, MSG_DOWNLOADING_SETUP_COMPRESSOR_KEY_SPINNER, MSG_PROOF_COMPRESSOR_CONFIG_NOT_FOUND_ERR, MSG_SETUP_KEY_PATH_ERROR, @@ -16,39 +16,12 @@ pub(crate) async fn run(shell: &Shell, args: CompressorKeysArgs) -> anyhow::Resu .context(MSG_CHAIN_NOT_FOUND_ERR)?; let mut general_config = chain_config.get_general_config()?; - let default_plonk_path = get_default_plonk_compressor_keys_path(&ecosystem_config)?; - let default_fflonk_path = get_default_fflonk_compressor_keys_path(&ecosystem_config)?; - let args = args.fill_values_with_prompt(&default_plonk_path, &default_fflonk_path); - - match args.compressor_type { - CompressorType::Fflonk => { - let path = args.clone().fflonk_path.context(MSG_SETUP_KEY_PATH_ERROR)?; - - download_compressor_key(shell, &mut general_config, CompressorType::Fflonk, &path)?; - } - CompressorType::Plonk => { - let path = args.plonk_path.context(MSG_SETUP_KEY_PATH_ERROR)?; - - download_compressor_key(shell, &mut general_config, CompressorType::Plonk, &path)?; - } - CompressorType::All => { - let plonk_path = args.clone().plonk_path.context(MSG_SETUP_KEY_PATH_ERROR)?; - let fflonk_path = args.clone().fflonk_path.context(MSG_SETUP_KEY_PATH_ERROR)?; - - download_compressor_key( - shell, - &mut general_config, - CompressorType::Fflonk, - &fflonk_path, - )?; - download_compressor_key( - shell, - &mut general_config, - CompressorType::Plonk, - &plonk_path, - )?; - } - } + let default_path = get_default_compressor_keys_path(&ecosystem_config)?; + let args = args.fill_values_with_prompt(&default_path); + + let path = args.path.context(MSG_SETUP_KEY_PATH_ERROR)?; + + download_compressor_key(shell, &mut general_config, &path)?; chain_config.save_general_config(&general_config)?; @@ -58,7 +31,6 @@ pub(crate) async fn run(shell: &Shell, args: CompressorKeysArgs) -> anyhow::Resu pub(crate) fn download_compressor_key( shell: &Shell, general_config: &mut GeneralConfig, - r#type: CompressorType, path: &str, ) -> anyhow::Result<()> { let spinner = Spinner::new(MSG_DOWNLOADING_SETUP_COMPRESSOR_KEY_SPINNER); @@ -68,50 +40,35 @@ pub(crate) fn download_compressor_key( .expect(MSG_PROOF_COMPRESSOR_CONFIG_NOT_FOUND_ERR) .clone(); - let url = match r#type { - CompressorType::Fflonk => { - compressor_config.universal_fflonk_setup_path = path.to_string(); - general_config.proof_compressor_config = Some(compressor_config.clone()); - compressor_config.universal_fflonk_setup_download_url - } - CompressorType::Plonk => { - compressor_config.universal_setup_path = path.to_string(); - general_config.proof_compressor_config = Some(compressor_config.clone()); - compressor_config.universal_setup_download_url - } - _ => unreachable!("Invalid compressor type"), - }; + compressor_config.universal_setup_path = path.to_string(); + general_config.proof_compressor_config = Some(compressor_config.clone()); let path = std::path::Path::new(path); - logger::info(format!("Downloading setup key by URL: {}", url)); + logger::info(format!( + "Downloading setup key by URL: {}", + compressor_config.universal_setup_download_url + )); let client = reqwest::blocking::Client::builder() .timeout(std::time::Duration::from_secs(600)) .build()?; - let response = client.get(url).send()?.bytes()?; + let response = client + .get(compressor_config.universal_setup_download_url) + .send()? + .bytes()?; shell.write_file(path, &response)?; spinner.finish(); Ok(()) } -pub fn get_default_plonk_compressor_keys_path( - ecosystem_config: &EcosystemConfig, -) -> anyhow::Result { - let link_to_prover = get_link_to_prover(ecosystem_config); - let path = link_to_prover.join("keys/setup/setup_2^24.key"); - let string = path.to_str().unwrap(); - - Ok(String::from(string)) -} - -pub fn get_default_fflonk_compressor_keys_path( +pub fn get_default_compressor_keys_path( ecosystem_config: &EcosystemConfig, ) -> anyhow::Result { let link_to_prover = get_link_to_prover(ecosystem_config); - let path = link_to_prover.join("keys/setup/setup_fflonk_compact.key"); + let path = link_to_prover.join("keys/setup/setup_compact.key"); let string = path.to_str().unwrap(); Ok(String::from(string)) diff --git a/zkstack_cli/crates/zkstack/src/commands/prover/gcs.rs b/zkstack_cli/crates/zkstack/src/commands/prover/gcs.rs index f28c44504b56..5d82647f98d1 100644 --- a/zkstack_cli/crates/zkstack/src/commands/prover/gcs.rs +++ b/zkstack_cli/crates/zkstack/src/commands/prover/gcs.rs @@ -1,5 +1,7 @@ -use common::{check_prerequisites, cmd::Cmd, logger, spinner::Spinner, GCLOUD_PREREQUISITE}; use xshell::{cmd, Shell}; +use zkstack_cli_common::{ + check_prerequisites, cmd::Cmd, logger, spinner::Spinner, GCLOUD_PREREQUISITE, +}; use zksync_config::{configs::object_store::ObjectStoreMode, ObjectStoreConfig}; use super::args::init::ProofStorageGCSCreateBucket; diff --git a/zkstack_cli/crates/zkstack/src/commands/prover/init.rs b/zkstack_cli/crates/zkstack/src/commands/prover/init.rs index d0d9238321a4..51034e02a213 100644 --- a/zkstack_cli/crates/zkstack/src/commands/prover/init.rs +++ b/zkstack_cli/crates/zkstack/src/commands/prover/init.rs @@ -1,18 +1,18 @@ use std::path::PathBuf; use anyhow::Context; -use common::{ +use xshell::{cmd, Shell}; +use zkstack_cli_common::{ cmd::Cmd, config::global_config, db::{drop_db_if_exists, init_db, migrate_db, DatabaseConfig}, logger, spinner::Spinner, }; -use config::{ +use zkstack_cli_config::{ copy_configs, get_link_to_prover, set_prover_database, traits::SaveConfigWithBasePath, EcosystemConfig, }; -use xshell::{cmd, Shell}; use zksync_config::{configs::object_store::ObjectStoreMode, ObjectStoreConfig}; use super::{ @@ -24,10 +24,7 @@ use super::{ }; use crate::{ commands::prover::{ - args::{compressor_keys::CompressorType, init::ProofStorageFileBacked}, - compressor_keys::{ - get_default_fflonk_compressor_keys_path, get_default_plonk_compressor_keys_path, - }, + args::init::ProofStorageFileBacked, compressor_keys::get_default_compressor_keys_path, }, consts::{PROVER_MIGRATIONS, PROVER_STORE_MAX_RETRIES}, messages::{ @@ -41,18 +38,12 @@ use crate::{ pub(crate) async fn run(args: ProverInitArgs, shell: &Shell) -> anyhow::Result<()> { let ecosystem_config = EcosystemConfig::from_file(shell)?; - let default_plonk_key_path = get_default_plonk_compressor_keys_path(&ecosystem_config)?; - let default_fflonk_key_path = get_default_fflonk_compressor_keys_path(&ecosystem_config)?; + let default_compressor_key_path = get_default_compressor_keys_path(&ecosystem_config)?; let chain_config = ecosystem_config .load_current_chain() .context(MSG_CHAIN_NOT_FOUND_ERR)?; - let args = args.fill_values_with_prompt( - shell, - &default_plonk_key_path, - &default_fflonk_key_path, - &chain_config, - )?; + let args = args.fill_values_with_prompt(shell, &default_compressor_key_path, &chain_config)?; if chain_config.get_general_config().is_err() || chain_config.get_secrets_config().is_err() { copy_configs(shell, &ecosystem_config.link_to_code, &chain_config.configs)?; @@ -66,35 +57,9 @@ pub(crate) async fn run(args: ProverInitArgs, shell: &Shell) -> anyhow::Result<( let public_object_store_config = get_object_store_config(shell, args.public_store)?; if let Some(args) = args.compressor_key_args { - match args.compressor_type { - CompressorType::Fflonk => { - let path = args.clone().fflonk_path.context(MSG_SETUP_KEY_PATH_ERROR)?; - - download_compressor_key(shell, &mut general_config, CompressorType::Fflonk, &path)?; - } - CompressorType::Plonk => { - let path = args.plonk_path.context(MSG_SETUP_KEY_PATH_ERROR)?; - - download_compressor_key(shell, &mut general_config, CompressorType::Plonk, &path)?; - } - CompressorType::All => { - let fflonk_path = args.clone().fflonk_path.context(MSG_SETUP_KEY_PATH_ERROR)?; - let plonk_path = args.clone().plonk_path.context(MSG_SETUP_KEY_PATH_ERROR)?; - - download_compressor_key( - shell, - &mut general_config, - CompressorType::Fflonk, - &fflonk_path, - )?; - download_compressor_key( - shell, - &mut general_config, - CompressorType::Plonk, - &plonk_path, - )?; - } - } + let path = args.path.context(MSG_SETUP_KEY_PATH_ERROR)?; + + download_compressor_key(shell, &mut general_config, &path)?; } if let Some(args) = args.setup_keys { diff --git a/zkstack_cli/crates/zkstack/src/commands/prover/init_bellman_cuda.rs b/zkstack_cli/crates/zkstack/src/commands/prover/init_bellman_cuda.rs index 615ef841488b..16e33f8d7d3f 100644 --- a/zkstack_cli/crates/zkstack/src/commands/prover/init_bellman_cuda.rs +++ b/zkstack_cli/crates/zkstack/src/commands/prover/init_bellman_cuda.rs @@ -1,7 +1,9 @@ use anyhow::Context; -use common::{check_prerequisites, cmd::Cmd, git, logger, spinner::Spinner, GPU_PREREQUISITES}; -use config::{traits::SaveConfigWithBasePath, EcosystemConfig}; use xshell::{cmd, Shell}; +use zkstack_cli_common::{ + check_prerequisites, cmd::Cmd, git, logger, spinner::Spinner, GPU_PREREQUISITES, +}; +use zkstack_cli_config::{traits::SaveConfigWithBasePath, EcosystemConfig}; use super::args::init_bellman_cuda::InitBellmanCudaArgs; use crate::{ diff --git a/zkstack_cli/crates/zkstack/src/commands/prover/run.rs b/zkstack_cli/crates/zkstack/src/commands/prover/run.rs index c9bf837f787d..495c41ef8255 100644 --- a/zkstack_cli/crates/zkstack/src/commands/prover/run.rs +++ b/zkstack_cli/crates/zkstack/src/commands/prover/run.rs @@ -1,9 +1,9 @@ use std::path::{Path, PathBuf}; use anyhow::{anyhow, Context}; -use common::{check_prerequisites, cmd::Cmd, logger, GPU_PREREQUISITES}; -use config::{get_link_to_prover, ChainConfig, EcosystemConfig}; use xshell::{cmd, Shell}; +use zkstack_cli_common::{check_prerequisites, cmd::Cmd, logger, GPU_PREREQUISITES}; +use zkstack_cli_config::{get_link_to_prover, ChainConfig, EcosystemConfig}; use super::args::run::{ProverComponent, ProverRunArgs}; use crate::messages::{ diff --git a/zkstack_cli/crates/zkstack/src/commands/prover/setup_keys.rs b/zkstack_cli/crates/zkstack/src/commands/prover/setup_keys.rs index ae0480e872dd..463dd788777e 100644 --- a/zkstack_cli/crates/zkstack/src/commands/prover/setup_keys.rs +++ b/zkstack_cli/crates/zkstack/src/commands/prover/setup_keys.rs @@ -1,9 +1,9 @@ use anyhow::Ok; -use common::{ +use xshell::{cmd, Shell}; +use zkstack_cli_common::{ check_prerequisites, cmd::Cmd, logger, spinner::Spinner, GCLOUD_PREREQUISITE, GPU_PREREQUISITES, }; -use config::{get_link_to_prover, EcosystemConfig}; -use xshell::{cmd, Shell}; +use zkstack_cli_config::{get_link_to_prover, EcosystemConfig}; use crate::{ commands::prover::args::setup_keys::{Mode, Region, SetupKeysArgs}, diff --git a/zkstack_cli/crates/zkstack/src/commands/server.rs b/zkstack_cli/crates/zkstack/src/commands/server.rs index 702897edbbc1..e1e4ca3ff99d 100644 --- a/zkstack_cli/crates/zkstack/src/commands/server.rs +++ b/zkstack_cli/crates/zkstack/src/commands/server.rs @@ -1,15 +1,16 @@ use anyhow::Context; -use common::{ +use xshell::{cmd, Shell}; +use zkstack_cli_common::{ cmd::Cmd, config::global_config, logger, server::{Server, ServerMode}, }; -use config::{ +use zkstack_cli_config::{ traits::FileConfigWithDefaultName, ChainConfig, ContractsConfig, EcosystemConfig, GeneralConfig, GenesisConfig, SecretsConfig, WalletsConfig, }; -use xshell::{cmd, Shell}; +use zksync_config::configs::gateway::GatewayChainConfig; use crate::{ commands::args::{RunServerArgs, ServerArgs, ServerCommand, WaitArgs}, @@ -34,7 +35,7 @@ pub async fn run(shell: &Shell, args: ServerArgs) -> anyhow::Result<()> { } fn build_server(chain_config: &ChainConfig, shell: &Shell) -> anyhow::Result<()> { - let _dir_guard = shell.push_dir(&chain_config.link_to_code); + let _dir_guard = shell.push_dir(chain_config.link_to_code.join("core")); logger::info(MSG_BUILDING_SERVER); @@ -60,6 +61,19 @@ fn run_server( } else { ServerMode::Normal }; + + let gateway_config = chain_config.get_gateway_chain_config().ok(); + let mut gateway_contracts = None; + if let Some(gateway_config) = gateway_config { + gateway_contracts = if gateway_config.gateway_chain_id.0 != 0_u64 { + Some(GatewayChainConfig::get_path_with_base_path( + &chain_config.configs, + )) + } else { + None + }; + } + server .run( shell, @@ -69,7 +83,7 @@ fn run_server( GeneralConfig::get_path_with_base_path(&chain_config.configs), SecretsConfig::get_path_with_base_path(&chain_config.configs), ContractsConfig::get_path_with_base_path(&chain_config.configs), - None, + gateway_contracts, vec![], ) .context(MSG_FAILED_TO_RUN_SERVER_ERR) diff --git a/zkstack_cli/crates/zkstack/src/commands/update.rs b/zkstack_cli/crates/zkstack/src/commands/update.rs index 534d490e6cae..0e1d385f8fef 100644 --- a/zkstack_cli/crates/zkstack/src/commands/update.rs +++ b/zkstack_cli/crates/zkstack/src/commands/update.rs @@ -1,17 +1,17 @@ use std::path::Path; use anyhow::{Context, Ok}; -use common::{ +use xshell::Shell; +use zkstack_cli_common::{ db::migrate_db, git, logger, spinner::Spinner, yaml::{merge_yaml, ConfigDiff}, }; -use config::{ +use zkstack_cli_config::{ ChainConfig, EcosystemConfig, CONTRACTS_FILE, EN_CONFIG_FILE, ERA_OBSERBAVILITY_DIR, GENERAL_FILE, GENESIS_FILE, SECRETS_FILE, }; -use xshell::Shell; use super::args::UpdateArgs; use crate::{ diff --git a/zkstack_cli/crates/zkstack/src/defaults.rs b/zkstack_cli/crates/zkstack/src/defaults.rs index 2b43009f5594..843a15e047e9 100644 --- a/zkstack_cli/crates/zkstack/src/defaults.rs +++ b/zkstack_cli/crates/zkstack/src/defaults.rs @@ -1,6 +1,6 @@ -use config::ChainConfig; use lazy_static::lazy_static; use url::Url; +use zkstack_cli_config::ChainConfig; lazy_static! { pub static ref DATABASE_SERVER_URL: Url = @@ -9,6 +9,9 @@ lazy_static! { Url::parse("postgres://postgres:notsecurepassword@localhost:5432").unwrap(); pub static ref DATABASE_EXPLORER_URL: Url = Url::parse("postgres://postgres:notsecurepassword@localhost:5432").unwrap(); + pub static ref AVAIL_RPC_URL: Url = Url::parse("wss://turing-rpc.avail.so/ws").unwrap(); + pub static ref AVAIL_BRIDGE_API_URL: Url = + Url::parse("https://turing-bridge-api.avail.so").unwrap(); } pub const DEFAULT_OBSERVABILITY_PORT: u16 = 3000; diff --git a/zkstack_cli/crates/zkstack/src/enable_evm_emulator.rs b/zkstack_cli/crates/zkstack/src/enable_evm_emulator.rs index bda1bfb3fc81..57e78a30b418 100644 --- a/zkstack_cli/crates/zkstack/src/enable_evm_emulator.rs +++ b/zkstack_cli/crates/zkstack/src/enable_evm_emulator.rs @@ -1,11 +1,13 @@ -use common::{ +use ethers::{abi::parse_abi, contract::BaseContract, types::Address}; +use xshell::Shell; +use zkstack_cli_common::{ forge::{Forge, ForgeScript, ForgeScriptArgs}, spinner::Spinner, wallets::Wallet, }; -use config::{forge_interface::script_params::ENABLE_EVM_EMULATOR_PARAMS, EcosystemConfig}; -use ethers::{abi::parse_abi, contract::BaseContract, types::Address}; -use xshell::Shell; +use zkstack_cli_config::{ + forge_interface::script_params::ENABLE_EVM_EMULATOR_PARAMS, EcosystemConfig, +}; use crate::{ messages::MSG_ENABLING_EVM_EMULATOR, diff --git a/zkstack_cli/crates/zkstack/src/external_node.rs b/zkstack_cli/crates/zkstack/src/external_node.rs index 5ff4ce070250..21d4e0db5592 100644 --- a/zkstack_cli/crates/zkstack/src/external_node.rs +++ b/zkstack_cli/crates/zkstack/src/external_node.rs @@ -1,11 +1,11 @@ use std::path::PathBuf; use anyhow::Context; -use config::{ +use xshell::Shell; +use zkstack_cli_config::{ external_node::ENConfig, traits::FileConfigWithDefaultName, ChainConfig, GeneralConfig, SecretsConfig, }; -use xshell::Shell; use zksync_config::configs::consensus::ConsensusConfig; use crate::messages::MSG_FAILED_TO_RUN_SERVER_ERR; @@ -63,7 +63,7 @@ impl RunExternalNode { consensus_args.push(format!("--consensus-path={}", consensus_config)) } - common::external_node::run( + zkstack_cli_common::external_node::run( shell, code_path, config_general_config, diff --git a/zkstack_cli/crates/zkstack/src/main.rs b/zkstack_cli/crates/zkstack/src/main.rs index ff4589a99cc5..98970e2be682 100644 --- a/zkstack_cli/crates/zkstack/src/main.rs +++ b/zkstack_cli/crates/zkstack/src/main.rs @@ -4,15 +4,15 @@ use commands::{ contract_verifier::ContractVerifierCommands, dev::DevCommands, }; -use common::{ +use xshell::Shell; +use zkstack_cli_common::{ check_general_prerequisites, config::{global_config, init_global_config, GlobalConfig}, error::log_error, init_prompt_theme, logger, version::version_message, }; -use config::EcosystemConfig; -use xshell::Shell; +use zkstack_cli_config::EcosystemConfig; use crate::commands::{ args::ServerArgs, chain::ChainCommands, consensus, ecosystem::EcosystemCommands, diff --git a/zkstack_cli/crates/zkstack/src/messages.rs b/zkstack_cli/crates/zkstack/src/messages.rs index 216c4bd64d3a..179f7100ef9e 100644 --- a/zkstack_cli/crates/zkstack/src/messages.rs +++ b/zkstack_cli/crates/zkstack/src/messages.rs @@ -72,6 +72,7 @@ pub(super) const MSG_DEPLOY_ECOSYSTEM_PROMPT: &str = "Do you want to deploy ecosystem contracts? (Not needed if you already have an existing one)"; pub(super) const MSG_L1_RPC_URL_PROMPT: &str = "What is the RPC URL of the L1 network?"; pub(super) const MSG_DEPLOY_PAYMASTER_PROMPT: &str = "Do you want to deploy Paymaster contract?"; +pub(super) const MSG_VALIDIUM_TYPE_PROMPT: &str = "Select the Validium type"; pub(super) const MSG_DEPLOY_ERC20_PROMPT: &str = "Do you want to deploy some test ERC20s?"; pub(super) const MSG_ECOSYSTEM_CONTRACTS_PATH_PROMPT: &str = "Provide the path to the ecosystem contracts or keep it empty and you will use ZKsync ecosystem config. \ For using this config, you need to have governance wallet"; @@ -97,6 +98,7 @@ pub(super) const MSG_DEPLOYING_ECOSYSTEM_CONTRACTS_SPINNER: &str = "Deploying ecosystem contracts..."; pub(super) const MSG_REGISTERING_CHAIN_SPINNER: &str = "Registering chain..."; pub(super) const MSG_ACCEPTING_ADMIN_SPINNER: &str = "Accepting admin..."; +pub(super) const MSG_DA_PAIR_REGISTRATION_SPINNER: &str = "Registering DA pair..."; pub(super) const MSG_UPDATING_TOKEN_MULTIPLIER_SETTER_SPINNER: &str = "Updating token multiplier setter..."; pub(super) const MSG_TOKEN_MULTIPLIER_SETTER_UPDATED_TO: &str = @@ -592,3 +594,16 @@ pub(super) fn msg_wait_consensus_registry_started_polling(addr: Address, url: &U pub(super) fn msg_consensus_registry_wait_success(addr: Address, code_len: usize) -> String { format!("Consensus registry is deployed at {addr:?}: {code_len} bytes") } + +/// DA clients related messages +pub(super) const MSG_AVAIL_CLIENT_TYPE_PROMPT: &str = "Avail client type"; +pub(super) const MSG_AVAIL_API_TIMEOUT_MS: &str = "Avail API timeout in milliseconds"; +pub(super) const MSG_AVAIL_API_NODE_URL_PROMPT: &str = "Avail API node URL"; +pub(super) const MSG_AVAIL_APP_ID_PROMPT: &str = "Avail app id"; +pub(super) const MSG_AVAIL_FINALITY_STATE_PROMPT: &str = "Avail finality state"; +pub(super) const MSG_AVAIL_GAS_RELAY_API_URL_PROMPT: &str = "Gas relay API URL"; +pub(super) const MSG_AVAIL_GAS_RELAY_MAX_RETRIES_PROMPT: &str = "Gas relay max retries"; +pub(super) const MSG_AVAIL_BRIDGE_API_URL_PROMPT: &str = "Attestation bridge API URL"; +pub(super) const MSG_AVAIL_SEED_PHRASE_PROMPT: &str = "Seed phrase"; +pub(super) const MSG_AVAIL_GAS_RELAY_API_KEY_PROMPT: &str = "Gas relay API key"; +pub(super) const MSG_INVALID_URL_ERR: &str = "Invalid URL format"; diff --git a/zkstack_cli/crates/zkstack/src/utils/consensus.rs b/zkstack_cli/crates/zkstack/src/utils/consensus.rs index 946d28a33fbd..0a1287067434 100644 --- a/zkstack_cli/crates/zkstack/src/utils/consensus.rs +++ b/zkstack_cli/crates/zkstack/src/utils/consensus.rs @@ -1,6 +1,6 @@ use anyhow::Context as _; -use config::ChainConfig; use secrecy::{ExposeSecret, Secret}; +use zkstack_cli_config::ChainConfig; use zksync_config::configs::consensus::{ AttesterPublicKey, AttesterSecretKey, ConsensusSecrets, GenesisSpec, NodePublicKey, NodeSecretKey, ProtocolVersion, ValidatorPublicKey, ValidatorSecretKey, WeightedAttester, diff --git a/zkstack_cli/crates/zkstack/src/utils/forge.rs b/zkstack_cli/crates/zkstack/src/utils/forge.rs index 76f045f82b9e..ccf5c3c84cd8 100644 --- a/zkstack_cli/crates/zkstack/src/utils/forge.rs +++ b/zkstack_cli/crates/zkstack/src/utils/forge.rs @@ -1,6 +1,6 @@ use anyhow::Context as _; -use common::{forge::ForgeScript, wallets::Wallet}; use ethers::types::U256; +use zkstack_cli_common::{forge::ForgeScript, wallets::Wallet}; use crate::{ consts::MINIMUM_BALANCE_FOR_WALLET, @@ -37,7 +37,7 @@ pub async fn check_the_balance(forge: &ForgeScript) -> anyhow::Result<()> { if balance >= expected_balance { return Ok(()); } - if !common::PromptConfirm::new(msg_address_doesnt_have_enough_money_prompt( + if !zkstack_cli_common::PromptConfirm::new(msg_address_doesnt_have_enough_money_prompt( &address, balance, expected_balance, diff --git a/zkstack_cli/crates/zkstack/src/utils/link_to_code.rs b/zkstack_cli/crates/zkstack/src/utils/link_to_code.rs index fcae429966dc..522e0d5e9c84 100644 --- a/zkstack_cli/crates/zkstack/src/utils/link_to_code.rs +++ b/zkstack_cli/crates/zkstack/src/utils/link_to_code.rs @@ -4,10 +4,12 @@ use std::{ }; use anyhow::bail; -use common::{cmd::Cmd, git, logger, spinner::Spinner, Prompt, PromptConfirm, PromptSelect}; -use config::ZKSYNC_ERA_GIT_REPO; use strum::{EnumIter, IntoEnumIterator}; use xshell::{cmd, Shell}; +use zkstack_cli_common::{ + cmd::Cmd, git, logger, spinner::Spinner, Prompt, PromptConfirm, PromptSelect, +}; +use zkstack_cli_config::ZKSYNC_ERA_GIT_REPO; use crate::messages::{ msg_path_to_zksync_does_not_exist_err, MSG_CLONING_ERA_REPO_SPINNER, diff --git a/zkstack_cli/crates/zkstack/src/utils/ports.rs b/zkstack_cli/crates/zkstack/src/utils/ports.rs index 6c299b999136..f46acc9402e3 100644 --- a/zkstack_cli/crates/zkstack/src/utils/ports.rs +++ b/zkstack_cli/crates/zkstack/src/utils/ports.rs @@ -1,13 +1,13 @@ use std::{collections::HashMap, fmt, net::SocketAddr, ops::Range, path::Path}; use anyhow::{bail, Context, Result}; -use config::{ - explorer_compose::ExplorerBackendPorts, EcosystemConfig, DEFAULT_EXPLORER_API_PORT, - DEFAULT_EXPLORER_DATA_FETCHER_PORT, DEFAULT_EXPLORER_WORKER_PORT, -}; use serde_yaml::Value; use url::Url; use xshell::Shell; +use zkstack_cli_config::{ + explorer_compose::ExplorerBackendPorts, EcosystemConfig, DEFAULT_EXPLORER_API_PORT, + DEFAULT_EXPLORER_DATA_FETCHER_PORT, DEFAULT_EXPLORER_WORKER_PORT, +}; use crate::defaults::{DEFAULT_OBSERVABILITY_PORT, PORT_RANGE_END, PORT_RANGE_START}; diff --git a/zkstack_cli/crates/zkstack/src/utils/rocks_db.rs b/zkstack_cli/crates/zkstack/src/utils/rocks_db.rs index 1b7e29dd9722..e365d3650952 100644 --- a/zkstack_cli/crates/zkstack/src/utils/rocks_db.rs +++ b/zkstack_cli/crates/zkstack/src/utils/rocks_db.rs @@ -1,7 +1,7 @@ use std::path::Path; -use config::RocksDbs; use xshell::Shell; +use zkstack_cli_config::RocksDbs; use crate::defaults::{ EN_ROCKS_DB_PREFIX, MAIN_ROCKS_DB_PREFIX, ROCKS_DB_BASIC_WITNESS_INPUT_PRODUCER, diff --git a/zkstack_cli/zkstackup/zkstackup b/zkstack_cli/zkstackup/zkstackup index e91bbc17905c..2c928d8b1194 100755 --- a/zkstack_cli/zkstackup/zkstackup +++ b/zkstack_cli/zkstackup/zkstackup @@ -86,6 +86,10 @@ parse_args() { shift ZKSTACKUP_VERSION=$1 ;; + --cargo-features) + shift + ZKSTACKUP_FEATURES=$1 + ;; -h | --help) usage exit 0 @@ -114,10 +118,12 @@ Options: -b, --branch Git branch to use when installing from a repository. Ignored if --commit or --version is provided. -c, --commit Git commit hash to use when installing from a repository. Ignored if --branch or --version is provided. -v, --version Git tag to use when installing from a repository. Ignored if --branch or --commit is provided. + --cargo-features One or more features passed to cargo install (e.g., "gateway"). -h, --help Show this help message and exit. Examples: $(basename "$0") --repo matter-labs/zksync-era --version 0.1.1 + $(basename "$0") --local --cargo-features "gateway" EOF } @@ -143,7 +149,10 @@ install_local() { for bin in "${BINS[@]}"; do say "Installing $bin" - ensure cargo install --root $LOCAL_DIR --path ./crates/$bin --force + ensure cargo install --root "$LOCAL_DIR" \ + --path "./crates/$bin" \ + --force \ + ${ZKSTACKUP_FEATURES:+--features "$ZKSTACKUP_FEATURES"} chmod +x "$BIN_DIR/$bin" done } @@ -161,19 +170,42 @@ install_from_repo() { if [ -n "$ZKSTACKUP_COMMIT" ] || [ -n "$ZKSTACKUP_BRANCH" ]; then warn "Ignoring --commit and --branch arguments when installing by version" fi - ensure cargo install --root $LOCAL_DIR --git "https://github.com/$ZKSTACKUP_REPO" --tag "zkstack_cli-v$ZKSTACKUP_VERSION" --locked "${BINS[@]}" --force + ensure cargo install --root "$LOCAL_DIR" \ + --git "https://github.com/$ZKSTACKUP_REPO" \ + --tag "zkstack_cli-v$ZKSTACKUP_VERSION" \ + --locked "${BINS[@]}" \ + --force \ + ${ZKSTACKUP_FEATURES:+--features "$ZKSTACKUP_FEATURES"} + elif [ -n "$ZKSTACKUP_COMMIT" ]; then if [ -n "$ZKSTACKUP_BRANCH" ]; then warn "Ignoring --branch argument when installing by commit" fi - ensure cargo install --root $LOCAL_DIR --git "https://github.com/$ZKSTACKUP_REPO" --rev "$ZKSTACKUP_COMMIT" --locked "${BINS[@]}" --force + ensure cargo install --root "$LOCAL_DIR" \ + --git "https://github.com/$ZKSTACKUP_REPO" \ + --rev "$ZKSTACKUP_COMMIT" \ + --locked "${BINS[@]}" \ + --force \ + ${ZKSTACKUP_FEATURES:+--features "$ZKSTACKUP_FEATURES"} + elif [ -n "$ZKSTACKUP_BRANCH" ]; then - ensure cargo install --root $LOCAL_DIR --git "https://github.com/$ZKSTACKUP_REPO" --branch "$ZKSTACKUP_BRANCH" --locked "${BINS[@]}" --force + ensure cargo install --root "$LOCAL_DIR" \ + --git "https://github.com/$ZKSTACKUP_REPO" \ + --branch "$ZKSTACKUP_BRANCH" \ + --locked "${BINS[@]}" \ + --force \ + ${ZKSTACKUP_FEATURES:+--features "$ZKSTACKUP_FEATURES"} + else - ensure cargo install --root $LOCAL_DIR --git "https://github.com/$ZKSTACKUP_REPO" --locked "${BINS[@]}" --force + ensure cargo install --root "$LOCAL_DIR" \ + --git "https://github.com/$ZKSTACKUP_REPO" \ + --locked "${BINS[@]}" \ + --force \ + ${ZKSTACKUP_FEATURES:+--features "$ZKSTACKUP_FEATURES"} fi } + add_bin_folder_to_path() { if [[ ":$PATH:" == *":${BIN_DIR}:"* ]]; then echo "found ${BIN_DIR} in PATH"