diff --git a/.github/workflows/rusk_build.yml b/.github/workflows/rusk_build.yml index 5edb9703e0..9d46d8325e 100644 --- a/.github/workflows/rusk_build.yml +++ b/.github/workflows/rusk_build.yml @@ -48,11 +48,14 @@ jobs: include: - os: ubuntu-24.04 target: linux-x64 + target_folder: target - os: macos-15 target: macos-arm64 + target_folder: target/aarch64-apple-darwin flags: --target=aarch64-apple-darwin - os: arm-linux target: linux-arm64 + target_folder: target/aarch64-unknown-linux-gnu flags: --target=aarch64-unknown-linux-gnu fail-fast: false @@ -89,15 +92,6 @@ jobs: echo "SKIP_WASM=false" >> $GITHUB_ENV fi - - name: Compile keys - shell: bash - run: make keys - - - name: Compile WASM Contracts - if: ${{ env.SKIP_WASM != 'true' }} - shell: bash - run: make wasm - - name: Build Rusk binary shell: bash working-directory: ./rusk @@ -105,13 +99,14 @@ jobs: - name: Extract Version run: | - export SEMVER=$(cargo pkgid --manifest-path ./rusk/Cargo.toml | sed -E 's/.*#([0-9]+\.[0-9]+\.[0-9]+).*/\1/') + export SEMVER=$(cargo pkgid --manifest-path ./rusk/Cargo.toml | sed -E 's/.*@([0-9]+\.[0-9]+\.[0-9]+).*/\1/') echo "SEMVER=$SEMVER" >> $GITHUB_ENV - name: Package Binaries run: | + find . -name "rusk" mkdir rusk-${{ env.SEMVER }}-${{ matrix.target }}-${{ matrix.feature }} - mv target/release/rusk rusk-${{ env.SEMVER }}-${{ matrix.target }}-${{ matrix.feature }} + mv ${{ matrix.target_folder }}/release/rusk rusk-${{ env.SEMVER }}-${{ matrix.target }}-${{ matrix.feature }} tar -czvf rusk-${{ env.SEMVER }}-${{ matrix.target }}-${{ matrix.feature }}.tar.gz \ rusk-${{ env.SEMVER }}-${{ matrix.target }}-${{ matrix.feature }} diff --git a/Cargo.lock b/Cargo.lock index ace2446fc4..8fcb6c50d6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -122,7 +122,7 @@ dependencies = [ name = "alice" version = "0.3.0" dependencies = [ - "dusk-core 1.0.0", + "dusk-core 1.1.0", ] [[package]] @@ -734,12 +734,30 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a38440ec581131c35809ddb8554f63ede8a051d238cfe974ff21c5aa6ee47a36" dependencies = [ "bytecheck", - "dusk-bls12_381", + "dusk-bls12_381 0.13.0", + "dusk-bytes", + "ff", + "rand_core 0.6.4", + "rkyv", + "zeroize", +] + +[[package]] +name = "bls12_381-bls" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b53eb2c00ecaf0635c278ed7701386a6e83e9b578181ee248ed52f339957b9c" +dependencies = [ + "bs58", + "bytecheck", + "dusk-bls12_381 0.14.1", "dusk-bytes", "ff", "rand_core 0.6.4", "rayon", "rkyv", + "serde", + "serde_json", "zeroize", ] @@ -749,7 +767,7 @@ version = "0.3.0" dependencies = [ "bytecheck", "dusk-bytes", - "dusk-core 1.0.0", + "dusk-core 1.1.0", "rkyv", ] @@ -869,7 +887,7 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" name = "charlie" version = "0.3.0" dependencies = [ - "dusk-core 1.0.0", + "dusk-core 1.1.0", "rkyv", ] @@ -1556,6 +1574,24 @@ name = "dusk-bls12_381" version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc2bd68f9bed48c2e64860e1fd1f5dbb7733eb07d0288fbdcc3763678c742139" +dependencies = [ + "blake2b_simd", + "bytecheck", + "dusk-bytes", + "ff", + "group", + "pairing", + "rand_core 0.6.4", + "rkyv", + "subtle", + "zeroize", +] + +[[package]] +name = "dusk-bls12_381" +version = "0.14.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6321527ff3adf7664566879499556bcf476fec7eaf9b1e62d82db2f7054dee82" dependencies = [ "blake2b_simd", "bytecheck", @@ -1563,10 +1599,12 @@ dependencies = [ "dusk-bytes", "ff", "group", + "hex", "pairing", "rand_core 0.6.4", "rayon", "rkyv", + "serde", "subtle", "zeroize", ] @@ -1589,9 +1627,9 @@ dependencies = [ "anyhow", "async-trait", "dusk-bytes", - "dusk-core 1.0.0", + "dusk-core 1.1.0", "dusk-merkle", - "dusk-node-data 1.0.1", + "dusk-node-data 1.1.0", "hex", "num-bigint", "sha3", @@ -1608,9 +1646,9 @@ dependencies = [ "async-trait", "criterion", "dusk-bytes", - "dusk-core 1.0.0", + "dusk-core 1.1.0", "dusk-merkle", - "dusk-node-data 1.0.1", + "dusk-node-data 1.1.0", "hex", "num-bigint", "rand 0.8.5", @@ -1622,17 +1660,17 @@ dependencies = [ [[package]] name = "dusk-core" -version = "1.0.0" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "925616ff2955a30ce32c97c789d43ea9fcecba722352252d32e6c05fbd9cb244" +checksum = "9f59efed0e9cae4161db7ab01609e59177c6b416282e37c75287ea2a165a31be" dependencies = [ "ark-bn254", "ark-groth16", "ark-relations", "ark-serialize", - "bls12_381-bls", + "bls12_381-bls 0.5.0", "bytecheck", - "dusk-bls12_381", + "dusk-bls12_381 0.14.1", "dusk-bytes", "dusk-jubjub", "dusk-plonk", @@ -1649,20 +1687,21 @@ dependencies = [ [[package]] name = "dusk-core" -version = "1.0.1-alpha.1" +version = "1.1.1-alpha.1" dependencies = [ "ark-bn254", "ark-groth16", "ark-relations", "ark-serialize", - "bls12_381-bls", + "bls12_381-bls 0.5.0", "bytecheck", - "dusk-bls12_381", + "dusk-bls12_381 0.14.1", "dusk-bytes", "dusk-jubjub", "dusk-plonk", "dusk-poseidon", "ff", + "hex", "jubjub-schnorr", "phoenix-circuits", "phoenix-core", @@ -1670,23 +1709,28 @@ dependencies = [ "poseidon-merkle", "rand 0.8.5", "rkyv", + "serde", + "serde_json", ] [[package]] name = "dusk-jubjub" -version = "0.14.1" +version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca3f0c0f56e6f8aebf174915945eb1a105f1deee866f614c5c7c0a316c54011c" +checksum = "0d0e6995ff48af44bbcb810e4a49ed4ddb0800601c9c9722fd00d7f7e0b77c3a" dependencies = [ "bitvec", "blake2b_simd", "bytecheck", - "dusk-bls12_381", + "dusk-bls12_381 0.14.1", "dusk-bytes", "ff", "group", + "hex", "rand_core 0.6.4", "rkyv", + "serde", + "serde_json", "subtle", "zeroize", ] @@ -1704,17 +1748,17 @@ dependencies = [ [[package]] name = "dusk-node" -version = "1.0.1" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed2116ad4caa07060bdb3348dbb848144d69bbae941b53cbe4fd3e094a1961e6" +checksum = "0441c97b1730ba96bd6eda42a8bfdfb40d4e6727a45a65924752a16f6ae69fad" dependencies = [ "anyhow", "async-channel", "async-trait", "dusk-bytes", "dusk-consensus 1.0.1", - "dusk-core 1.0.0", - "dusk-node-data 1.0.1", + "dusk-core 1.1.0", + "dusk-node-data 1.1.0", "hex", "humantime-serde", "kadcast", @@ -1736,7 +1780,7 @@ dependencies = [ [[package]] name = "dusk-node" -version = "1.0.2-alpha.1" +version = "1.1.1-alpha.1" dependencies = [ "anyhow", "async-channel", @@ -1744,8 +1788,8 @@ dependencies = [ "criterion", "dusk-bytes", "dusk-consensus 1.0.1", - "dusk-core 1.0.0", - "dusk-node-data 1.0.1", + "dusk-core 1.1.0", + "dusk-node-data 1.1.0", "fake", "hex", "humantime-serde", @@ -1761,7 +1805,7 @@ dependencies = [ "serde_with", "smallvec", "sqlx", - "tempdir", + "tempfile", "thiserror", "time-util", "tokio", @@ -1770,9 +1814,9 @@ dependencies = [ [[package]] name = "dusk-node-data" -version = "1.0.1" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e19cc13157634f8b092f5a10119c2c1047066192efd8bbfd98a736c120405d9b" +checksum = "c9898926e038094f5b0a23af0f61ea0271fedbb86d9cbb3b9c5d9eb47f02f41d" dependencies = [ "aes 0.7.5", "anyhow", @@ -1782,7 +1826,7 @@ dependencies = [ "bs58", "chrono", "dusk-bytes", - "dusk-core 1.0.0", + "dusk-core 1.1.0", "fake", "hex", "rand 0.8.5", @@ -1797,7 +1841,7 @@ dependencies = [ [[package]] name = "dusk-node-data" -version = "1.0.2-alpha.1" +version = "1.1.1-alpha.1" dependencies = [ "aes 0.7.5", "anyhow", @@ -1807,7 +1851,7 @@ dependencies = [ "bs58", "chrono", "dusk-bytes", - "dusk-core 1.0.0", + "dusk-core 1.1.0", "fake", "hex", "rand 0.8.5", @@ -1822,13 +1866,13 @@ dependencies = [ [[package]] name = "dusk-plonk" -version = "0.20.2" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fbb5ba0c35fd58d44ae5191e512bc9fef6fd06fff77cc500e5c98918abe08e5" +checksum = "c425aa88fb1fd44c4bb72c0fe6abd37eae9de5b1f42d375d16c95aa1efceb327" dependencies = [ "bytecheck", "cfg-if", - "dusk-bls12_381", + "dusk-bls12_381 0.14.1", "dusk-bytes", "dusk-jubjub", "ff", @@ -1846,11 +1890,11 @@ dependencies = [ [[package]] name = "dusk-poseidon" -version = "0.40.0" +version = "0.41.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5acb405df5e90ca91e8ce3c5aa48117e27632396665aaf9e8950f2af971acfc9" +checksum = "f66925505adc9db3671ebed59f5f2f196306efbe6ede8de9761e7830af5ddb84" dependencies = [ - "dusk-bls12_381", + "dusk-bls12_381 0.14.1", "dusk-jubjub", "dusk-plonk", "dusk-safe", @@ -1858,13 +1902,11 @@ dependencies = [ [[package]] name = "dusk-rusk" -version = "1.0.2-dev" +version = "1.1.1-alpha.1" dependencies = [ "anyhow", "async-graphql", "async-trait", - "base64 0.22.1", - "blake2b_simd", "blake3", "bs58", "bytecheck", @@ -1873,11 +1915,11 @@ dependencies = [ "dirs", "dusk-bytes", "dusk-consensus 1.0.1", - "dusk-core 1.0.0", - "dusk-node 1.0.1", - "dusk-node-data 1.0.1", - "dusk-vm 1.0.0", - "dusk-wallet-core 1.0.1", + "dusk-core 1.1.0", + "dusk-node 1.1.0", + "dusk-node-data 1.1.0", + "dusk-vm 1.1.0", + "dusk-wallet-core 1.1.0", "ff", "futures", "futures-util", @@ -1894,15 +1936,14 @@ dependencies = [ "reqwest", "rkyv", "rusk-profile 1.0.1", - "rusk-prover 1.0.1", - "rusk-recovery 1.0.2", + "rusk-prover 1.1.0", + "rusk-recovery 1.0.3", "rustc_tools_util", "rustls-pemfile", "semver", "serde", "serde_json", "serde_with", - "sha3", "tempfile", "tokio", "tokio-rustls", @@ -1917,23 +1958,23 @@ dependencies = [ [[package]] name = "dusk-safe" -version = "0.2.1" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b1984b4b78f1f5d862dc482c73ab5f4e26bca829e8d30dfa443131c53933088" +checksum = "2c3954d110d0d0f20555048d7171c2e6dfb54fd9a4220d30cc73dc66a88af42d" dependencies = [ "zeroize", ] [[package]] name = "dusk-vm" -version = "1.0.0" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c46f00b4fa84beda4e492b25f8e8556d93c942ab40a83bdeccf7458aa2e5965a" +checksum = "0fd93de74b9906727d9d58af528e34650a37b9d7edee6116b4bf24682370cffe" dependencies = [ "blake2b_simd", "blake3", "dusk-bytes", - "dusk-core 1.0.0", + "dusk-core 1.1.0", "dusk-poseidon", "lru", "piecrust", @@ -1942,12 +1983,12 @@ dependencies = [ [[package]] name = "dusk-vm" -version = "1.0.1-alpha.1" +version = "1.1.1-alpha.1" dependencies = [ "blake2b_simd", "blake3", "dusk-bytes", - "dusk-core 1.0.0", + "dusk-core 1.1.0", "dusk-poseidon", "ff", "lru", @@ -1959,15 +2000,15 @@ dependencies = [ [[package]] name = "dusk-wallet-core" -version = "1.0.1" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd32198d16b63e52444f138a0f554754eb40cd18e8fe78f968e7c3ea1bb2e461" +checksum = "4533c6298486bf3378dc0cd231613811a8973a037f6d94975f32d6e3a0f07f4f" dependencies = [ "blake3", "bytecheck", "dlmalloc", "dusk-bytes", - "dusk-core 1.0.0", + "dusk-core 1.1.0", "ff", "rand 0.8.5", "rand_chacha 0.3.1", @@ -1978,13 +2019,13 @@ dependencies = [ [[package]] name = "dusk-wallet-core" -version = "1.0.2-alpha.1" +version = "1.1.1-alpha.1" dependencies = [ "blake3", "bytecheck", "dlmalloc", "dusk-bytes", - "dusk-core 1.0.0", + "dusk-core 1.1.0", "ff", "rand 0.8.5", "rand_chacha 0.3.1", @@ -2296,12 +2337,6 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" -[[package]] -name = "fuchsia-cprng" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" - [[package]] name = "funty" version = "2.0.0" @@ -2673,7 +2708,7 @@ name = "host_fn" version = "0.2.0" dependencies = [ "dusk-bytes", - "dusk-core 1.0.0", + "dusk-core 1.1.0", ] [[package]] @@ -3010,9 +3045,9 @@ dependencies = [ [[package]] name = "jubjub-elgamal" -version = "0.1.0" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c81b13ed2b476df1cd4618fe3a3d3463e521a21731954d47b186f6eaffc0ece6" +checksum = "0c2b02b5859a6ebd729e4b7f9970963d3f3be4f0aebc700d0b2d392b9fd8201c" dependencies = [ "dusk-jubjub", "dusk-plonk", @@ -3020,12 +3055,12 @@ dependencies = [ [[package]] name = "jubjub-schnorr" -version = "0.5.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c365e50a449633ca6032128be417f8298678c58ebee76dfe1f923ad29a07d646" +checksum = "008885a6d1b73ef18a827ad0faf7cf48d1c5a28d52a9e863b90a421a92799443" dependencies = [ "bytecheck", - "dusk-bls12_381", + "dusk-bls12_381 0.14.1", "dusk-bytes", "dusk-jubjub", "dusk-plonk", @@ -3799,11 +3834,11 @@ dependencies = [ [[package]] name = "phoenix-circuits" -version = "0.5.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c015f89a28b159853fa2b177991776ac40671a0f6415243f90a442b469a3282" +checksum = "d42a8482526bf7bcb3a6d34ad171a26f495319a520b9eb8a68837d837e3b0cf9" dependencies = [ - "dusk-bls12_381", + "dusk-bls12_381 0.14.1", "dusk-bytes", "dusk-jubjub", "dusk-plonk", @@ -3816,23 +3851,28 @@ dependencies = [ [[package]] name = "phoenix-core" -version = "0.33.1" +version = "0.34.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1880ff8ba7cbb7e82843b98e3759134f5f7c9b0e5ffe9ff73703eced0360467" +checksum = "5b45ed942d9d166f7a45d16630529d68d68cfde7f37191d4646dfbe4b350b9cc" dependencies = [ "aes-gcm", - "bls12_381-bls", + "base64 0.22.1", + "bls12_381-bls 0.4.0", + "bs58", "bytecheck", - "dusk-bls12_381", + "dusk-bls12_381 0.14.1", "dusk-bytes", "dusk-jubjub", "dusk-poseidon", "ff", + "hex", "hkdf", "jubjub-elgamal", "jubjub-schnorr", "rand 0.8.5", "rkyv", + "serde", + "serde_json", "sha2 0.10.8", "subtle", "zeroize", @@ -3840,9 +3880,9 @@ dependencies = [ [[package]] name = "piecrust" -version = "0.27.0" +version = "0.27.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7615b52ada11b2f89751c19fa55ddbfa84c625bd8103528179be514874c32d3b" +checksum = "10c5b83e833e13d1be1a5bfb7183a3edd4dbd786ec3524918c78bdc982a32c7e" dependencies = [ "blake3", "bytecheck", @@ -3866,9 +3906,13 @@ version = "0.17.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e681bcea1ea7f82f8b66649da42d1fb7a23ab85f9d0acbf99fca4e560a2f1400" dependencies = [ + "base64 0.22.1", "bytecheck", "dlmalloc", + "hex", "rkyv", + "serde", + "serde_json", ] [[package]] @@ -3978,12 +4022,12 @@ checksum = "cc9c68a3f6da06753e9335d63e27f6b9754dd1920d941135b7ea8224f141adb2" [[package]] name = "poseidon-merkle" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae0f84bdbdbeebfeeadfb7c28c07c14315713b9206a84e4d8ce310827691511e" +checksum = "6316347b2cf759ff0303ad280008ac89a36ed9f970e4cdfd1585d8ba5ed60134" dependencies = [ "bytecheck", - "dusk-bls12_381", + "dusk-bls12_381 0.14.1", "dusk-bytes", "dusk-merkle", "dusk-plonk", @@ -4094,19 +4138,6 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" -[[package]] -name = "rand" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" -dependencies = [ - "fuchsia-cprng", - "libc", - "rand_core 0.3.1", - "rdrand", - "winapi", -] - [[package]] name = "rand" version = "0.7.3" @@ -4151,21 +4182,6 @@ dependencies = [ "rand_core 0.6.4", ] -[[package]] -name = "rand_core" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" -dependencies = [ - "rand_core 0.4.2", -] - -[[package]] -name = "rand_core" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" - [[package]] name = "rand_core" version = "0.5.1" @@ -4234,15 +4250,6 @@ dependencies = [ "crossbeam-utils", ] -[[package]] -name = "rdrand" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" -dependencies = [ - "rand_core 0.3.1", -] - [[package]] name = "redox_syscall" version = "0.5.7" @@ -4320,15 +4327,6 @@ version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" -[[package]] -name = "remove_dir_all" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" -dependencies = [ - "winapi", -] - [[package]] name = "rend" version = "0.3.6" @@ -4493,12 +4491,12 @@ dependencies = [ [[package]] name = "rusk-prover" -version = "1.0.1" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5b66e742b2c4bac238aedf523e026a07393a8c021b704bb60b4030f32c6ff94" +checksum = "469cbbdd11c2fde62fabe5ca710eb274d36aa01941a12625ac1b6c97320568db" dependencies = [ "dusk-bytes", - "dusk-core 1.0.0", + "dusk-core 1.1.0", "dusk-plonk", "hex", "once_cell", @@ -4509,10 +4507,10 @@ dependencies = [ [[package]] name = "rusk-prover" -version = "1.0.2-alpha.1" +version = "1.1.1-alpha.1" dependencies = [ "dusk-bytes", - "dusk-core 1.0.0", + "dusk-core 1.1.0", "dusk-plonk", "hex", "once_cell", @@ -4523,16 +4521,16 @@ dependencies = [ [[package]] name = "rusk-recovery" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "774e04ab0731b731a52858f25d32aa9976d539f120d603723df360aa0c127251" +checksum = "1e42cccdbd038aa1d17c4eb053a4817813adf2c6d68ea392bcc20efee1de78bc" dependencies = [ "bs58", "cargo_toml", "dusk-bytes", - "dusk-core 1.0.0", + "dusk-core 1.1.0", "dusk-plonk", - "dusk-vm 1.0.0", + "dusk-vm 1.1.0", "ff", "flate2", "hex", @@ -4552,14 +4550,14 @@ dependencies = [ [[package]] name = "rusk-recovery" -version = "1.0.3-alpha.1" +version = "1.0.4-alpha.1" dependencies = [ "bs58", "cargo_toml", "dusk-bytes", - "dusk-core 1.0.0", + "dusk-core 1.1.0", "dusk-plonk", - "dusk-vm 1.0.0", + "dusk-vm 1.1.0", "ff", "flate2", "hex", @@ -4570,7 +4568,7 @@ dependencies = [ "serde", "serde_derive", "tar", - "tempdir", + "tempfile", "tokio", "toml", "tracing", @@ -4593,8 +4591,8 @@ dependencies = [ "crossterm", "dirs", "dusk-bytes", - "dusk-core 1.0.0", - "dusk-wallet-core 1.0.1", + "dusk-core 1.1.0", + "dusk-wallet-core 1.1.0", "flume 0.10.14", "futures", "hex", @@ -5248,13 +5246,13 @@ version = "0.8.0" dependencies = [ "criterion", "dusk-bytes", - "dusk-core 1.0.0", - "dusk-vm 1.0.0", - "dusk-wallet-core 1.0.1", + "dusk-core 1.1.0", + "dusk-vm 1.1.0", + "dusk-wallet-core 1.1.0", "ff", "rand 0.8.5", "rkyv", - "rusk-prover 1.0.1", + "rusk-prover 1.1.0", ] [[package]] @@ -5367,16 +5365,6 @@ version = "0.12.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61c41af27dd6d1e27b1b16b489db798443478cef1f06a660c96db617ba5de3b1" -[[package]] -name = "tempdir" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15f2b5fb00ccdf689e0149d1b1b3c03fead81c2b37735d812fa8bddbbf41b6d8" -dependencies = [ - "rand 0.4.6", - "remove_dir_all", -] - [[package]] name = "tempfile" version = "3.12.0" @@ -5743,14 +5731,14 @@ name = "transfer-contract" version = "0.10.1" dependencies = [ "dusk-bytes", - "dusk-core 1.0.0", - "dusk-vm 1.0.0", + "dusk-core 1.1.0", + "dusk-vm 1.1.0", "ff", "rand 0.8.5", "ringbuffer", "rkyv", "rusk-profile 1.0.1", - "rusk-prover 1.0.1", + "rusk-prover 1.1.0", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 933c3097fd..efde38294f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -32,38 +32,38 @@ resolver = "2" # Workspace internal dependencies dusk-consensus = "1.0.1" # dusk-consensus = { version = "1.0.2-alpha.1", path = "./consensus/" } -dusk-core = "1.0.0" -# dusk-core = { version = "1.0.1-alpha.1", path = "./core/" } -dusk-vm = "1.0.0" -# dusk-vm = { version = "1.0.1-alpha.1", path = "./vm/" } -node = { version = "1.0.1", package = "dusk-node" } -# node = { version = "1.0.2-alpha.1", path = "./node/", package = "dusk-node" } -node-data = { version = "1.0.1", package = "dusk-node-data" } -# node-data = { version = "1.0.2-alpha.1", path = "./node-data/", package = "dusk-node-data" } +dusk-core = "1.1.0" +# dusk-core = { version = "1.1.0", path = "./core/" } +dusk-vm = "1.1.0" +# dusk-vm = { version = "1.1.0", path = "./vm/" } +node = { version = "1.1.0", package = "dusk-node" } +# node = { version = "1.1.0", path = "./node/", package = "dusk-node" } +node-data = { version = "1.1.0", package = "dusk-node-data" } +# node-data = { version = "1.1.0", path = "./node-data/", package = "dusk-node-data" } rusk-profile = "1.0.1" -# rusk-profile = { version = "1.0.1", path = "./rusk-profile/" } -rusk-prover = "1.0.1" -# rusk-prover = { version = "1.0.2-alpha.1", path = "./rusk-prover/" } -rusk-recovery = "1.0.2" -# rusk-recovery = { version = "1.0.3-alpha.1", path = "./rusk-recovery/" } -wallet-core = { version = "1.0.1", package = "dusk-wallet-core" } -# wallet-core = { version = "1.0.2-alpha.1", path = "./wallet-core/", package = "dusk-wallet-core" } +# rusk-profile = { version = "1.0.2-alpha.1", path = "./rusk-profile/" } +rusk-prover = "1.1.0" +# rusk-prover = { version = "1.1.1-alpha.1", path = "./rusk-prover/" } +rusk-recovery = "1.0.3" +# rusk-recovery = { version = "1.0.4-alpha.1", path = "./rusk-recovery/" } +wallet-core = { version = "1.1.0", package = "dusk-wallet-core" } +# wallet-core = { version = "1.1.1-alpha.1", path = "./wallet-core/", package = "dusk-wallet-core" } # Dusk dependencies outside the workspace -bls12_381-bls = { version = "0.4", default-features = false } -dusk-bls12_381 = { version = "0.13", default-features = false } +bls12_381-bls = { version = "0.5", default-features = false } +dusk-bls12_381 = { version = "0.14", default-features = false } dusk-bytes = "0.1.7" -dusk-jubjub = { version = "0.14.1", default-features = false } +dusk-jubjub = { version = "0.15.0", default-features = false } dusk-merkle = "0.5.3" -dusk-plonk = { version = "0.20.2", default-features = false } -dusk-poseidon = "0.40" -jubjub-schnorr = { version = "0.5", default-features = false } +dusk-plonk = { version = "0.21.0", default-features = false } +dusk-poseidon = "0.41" +jubjub-schnorr = { version = "0.6", default-features = false } kadcast = "0.7" -phoenix-circuits = { version = "0.5", default-features = false } -phoenix-core = { version = "0.33.1", default-features = false } -piecrust = "0.27" +phoenix-circuits = { version = "0.6", default-features = false } +phoenix-core = { version = "0.34.0", default-features = false } +piecrust = "0.27.1" piecrust-uplink = "0.17.3" -poseidon-merkle = "0.7" +poseidon-merkle = "0.8" # External dependencies aes = "0.7.5" @@ -132,7 +132,6 @@ sha3 = "0.10.8" smallvec = "1.13.2" sqlx = "0.8.2" tar = "0.4.42" -tempdir = "0.3.7" tempfile = "3.12" thiserror = "1.0.64" time-util = "0.3.4" diff --git a/Dockerfile b/Dockerfile.ephemeral similarity index 94% rename from Dockerfile rename to Dockerfile.ephemeral index 888ccf2adf..c9ba3f1fc1 100644 --- a/Dockerfile +++ b/Dockerfile.ephemeral @@ -35,9 +35,9 @@ RUN make keys RUN make wasm # Build rusk with default features and include CARGO_FEATURES RUN if [ -n "$CARGO_FEATURES" ]; then \ - cargo build --release --features "$CARGO_FEATURES" -p rusk; \ + cargo build --release --features "$CARGO_FEATURES" -p dusk-rusk; \ else \ - cargo build --release -p rusk; \ + cargo build --release -p dusk-rusk; \ fi # --- Run stage --- diff --git a/Dockerfile.persistent b/Dockerfile.persistent new file mode 100644 index 0000000000..aa71eaffa8 --- /dev/null +++ b/Dockerfile.persistent @@ -0,0 +1,59 @@ +# --- Build stage --- +FROM ubuntu:24.04 AS build-stage + +RUN apt-get update && apt-get install -y unzip curl build-essential openssl libssl-dev pkg-config && rm -rf /var/lib/apt/lists/* + +RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y + +WORKDIR /opt/rusk +ENV RUSK_PROFILE_PATH=/opt/dusk/rusk/ +ENV PATH="$PATH:/root/.cargo/bin" + +RUN apt-get update && apt-get install -y clang && rm -rf /var/lib/apt/lists/* + +# Using this to modify rusk config file before running a node +RUN cargo install toml-cli --version 0.2.3 + +COPY . . + +ARG TARGETPLATFORM +# See also https://github.com/docker/buildx/issues/510 +ENV TARGETPLATFORM=${TARGETPLATFORM:-linux/amd64} + +# Generate keys and compile genesis contracts +RUN make keys +RUN make wasm + +ARG NODE_TYPE="provisioner" + +RUN case "$NODE_TYPE" in \ + "provisioner") cargo build --release -p dusk-rusk ;; \ + "archive") cargo build --release --features archive -p dusk-rusk ;; \ + "prover") cargo build --release --no-default-features --features prover -p dusk-rusk ;; \ + *) echo "Unrecognized node type: $NODE_TYPE. Expected one of 'provisioner', 'archive' and 'prover'"; exit 1 ;; \ + esac + +# --- Run stage --- +FROM ubuntu:24.04 AS run-stage + +RUN apt-get update && apt-get install -y unzip curl net-tools libssl-dev && rm -rf /var/lib/apt/lists/* + +WORKDIR /opt/dusk + +ENV RUSK_PROFILE_PATH=/opt/dusk/rusk +ENV RUSK_RECOVERY_INPUT=/opt/dusk/conf/genesis.toml +ENV RUST_BACKTRACE=full +ENV NETWORK=mainnet + +EXPOSE 9000/udp +EXPOSE 8080/tcp + +# Copy only the necessary files from the build stage +COPY --from=build-stage /opt/rusk/target/release/rusk /opt/dusk/bin/rusk +COPY --from=build-stage /opt/rusk/scripts/persistent-docker-setup/setup.sh /opt/dusk/setup.sh +COPY --from=build-stage /opt/rusk/scripts/persistent-docker-setup/detect_ips.sh /opt/dusk/detect_ips.sh +COPY --from=build-stage /root/.cargo/bin/toml /usr/bin/toml-cli + +RUN chmod +x /opt/dusk/setup.sh /opt/dusk/detect_ips.sh + +CMD [ "./setup.sh" ] diff --git a/README.md b/README.md index 8281dcb00c..66c5ce0d7d 100644 --- a/README.md +++ b/README.md @@ -160,18 +160,20 @@ make wasm for=transfer ## 🐳 Docker support +### Local Ephemeral Node + It's also possible to run a local ephemeral node with Docker. To build the Docker image with archive: ```bash -docker build -t rusk . +docker build -f Dockerfile.ephemeral -t rusk . ``` To build the Docker image **without** archive: ```bash -docker build -t rusk --build-arg CARGO_FEATURES="" . +docker build -t -f Dockerfile.ephemeral rusk --build-arg CARGO_FEATURES="" . ``` To run Rusk inside a Docker container: @@ -182,6 +184,66 @@ docker run -p 9000:9000/udp -p 8080:8080/tcp rusk Port 9000 is used for Kadcast, port 8080 for the HTTP and GraphQL APIs. +### Persistent Node + +To build the docker image for a provisioner +```bash +docker build -f Dockerfile.persistent -t rusk --build-arg NODE_TYPE=provisioner . +``` + +To build for an archiver or prover instead, build with NODE_TYPE=archive or NODE_TYPE=prover, +respectively. + +To run: + +```bash +docker run -it \ + -v /path/to/consensus.keys:/opt/dusk/conf/consensus.keys \ + -v /path/to/rusk/profile:/opt/dusk/rusk \ + -e NETWORK= \ + -e DUSK_CONSENSUS_KEYS_PASS= \ + -p 9000:9000/udp \ + -p 8080:8080/tcp \ + rusk +``` + +#### Customizing Configuration + +The configuration used for rusk is based on the template file at `https://raw.githubusercontent.com/dusk-network/node-installer/ac1dd78eb31be4dba1c9c0986f6d6a06b5bd4fcc/conf/mainnet.toml` for mainnet and `https://raw.githubusercontent.com/dusk-network/node-installer/ac1dd78eb31be4dba1c9c0986f6d6a06b5bd4fcc/conf/testnet.toml` for testnet. +As part of the node setup process when the container is started, the IP addresses used for listening in kadcast and, if +configured, http will be detected and automatically configured. + +To customize the configuration, the configuration template file can be copied and modified. The custom configuration template +should be mounted on `/opt/dusk/conf/rusk.template.toml`. + +```bash +docker run -it \ + -v /path/to/consensus.keys:/opt/dusk/conf/consensus.keys + -v /path/to/rusk/profile:/opt/dusk/rusk \ + -v /path/to/rusk.modified-template.toml:/opt/dusk/conf/rusk.template.toml \ + -e NETWORK= \ + -e DUSK_CONSENSUS_KEYS_PASS= \ + -p 9000:9000/udp \ + -p 8080:8080/tcp \ + rusk +``` + +##### IP Addresses + +When using a custom configuration file, properties that use IP addresses should be set to 'N/A'. For example, if +you want HTTP to be configured: + +```toml +[http] +listen_address = 'N/A' +``` + +This entry should be present in the template configuration file. When the node is starting, the address to be used +will be detected and this configuration will be set to listen at port 8080. + +Likewise, the `kadcast.public_address` and `kadcast.listen_address` properties in the configuration file should be set +to 'N/A'. During node startup, they will be detected and set to use port 9000. + ## License The Rusk software is licensed under the diff --git a/contracts/stake/tests/partial_stake.rs b/contracts/stake/tests/partial_stake.rs index e4759b8679..c45b5c1bfa 100644 --- a/contracts/stake/tests/partial_stake.rs +++ b/contracts/stake/tests/partial_stake.rs @@ -10,7 +10,9 @@ use dusk_core::signatures::bls::{ }; use dusk_core::stake::{Reward, RewardReason, EPOCH, STAKE_CONTRACT}; use dusk_core::transfer::TRANSFER_CONTRACT; -use dusk_vm::{execute, ContractData, Error as VMError, Session, VM}; +use dusk_vm::{ + execute, ContractData, Error as VMError, ExecutionConfig, Session, VM, +}; use rand::rngs::StdRng; use rand::SeedableRng; use wallet_core::transaction::{ @@ -26,6 +28,8 @@ const GENESIS_VALUE: u64 = dusk(1_000_000.0); const STAKE_VALUE: u64 = GENESIS_VALUE / 2; const GENESIS_NONCE: u64 = 0; +const NO_CONFIG: ExecutionConfig = ExecutionConfig::DEFAULT; + #[test] fn stake() -> Result<(), VMError> { // ------ @@ -59,7 +63,7 @@ fn stake() -> Result<(), VMError> { CHAIN_ID, ) .expect("tx creation should pass"); - let receipt = execute(&mut session, &tx, 0, 0, 0)?; + let receipt = execute(&mut session, &tx, &NO_CONFIG)?; // verify 1st stake transaction let gas_spent_1 = receipt.gas_spent; @@ -87,7 +91,7 @@ fn stake() -> Result<(), VMError> { CHAIN_ID, ) .expect("tx creation should pass"); - let receipt = execute(&mut session, &tx, 0, 0, 0)?; + let receipt = execute(&mut session, &tx, &NO_CONFIG)?; // verify 2nd stake transaction let gas_spent_2 = receipt.gas_spent; @@ -121,7 +125,7 @@ fn stake() -> Result<(), VMError> { CHAIN_ID, ) .expect("tx creation should pass"); - let receipt = execute(&mut session, &tx, 0, 0, 0)?; + let receipt = execute(&mut session, &tx, &NO_CONFIG)?; // verify 3rd stake transaction let gas_spent_3 = receipt.gas_spent; @@ -156,7 +160,7 @@ fn stake() -> Result<(), VMError> { CHAIN_ID, ) .expect("tx creation should pass"); - assert!(execute(&mut session, &tx, 0, 0, 0).is_err()); + assert!(execute(&mut session, &tx, &NO_CONFIG).is_err()); Ok(()) } @@ -190,7 +194,7 @@ fn unstake() -> Result<(), VMError> { CHAIN_ID, ) .expect("tx creation should pass"); - let receipt = execute(&mut session, &tx, 0, 0, 0)?; + let receipt = execute(&mut session, &tx, &NO_CONFIG)?; let mut moonlight_balance = GENESIS_VALUE - STAKE_VALUE - receipt.gas_spent; assert_moonlight(&mut session, &moonlight_pk, moonlight_balance, nonce); @@ -212,7 +216,7 @@ fn unstake() -> Result<(), VMError> { CHAIN_ID, ) .expect("tx creation should pass"); - let receipt = execute(&mut session, &tx, 0, 0, 0)?; + let receipt = execute(&mut session, &tx, &NO_CONFIG)?; // verify 1st unstake transaction let gas_spent_1 = receipt.gas_spent; @@ -242,7 +246,7 @@ fn unstake() -> Result<(), VMError> { CHAIN_ID, ) .expect("tx creation should pass"); - let receipt = execute(&mut session, &tx, 0, 0, 0)?; + let receipt = execute(&mut session, &tx, &NO_CONFIG)?; total_stake = STAKE_VALUE; let mut locked = unstake_1 / 10; assert_stake(&mut session, &stake_pk, total_stake, locked, 0); @@ -268,7 +272,7 @@ fn unstake() -> Result<(), VMError> { CHAIN_ID, ) .expect("tx creation should pass"); - let receipt = execute(&mut session, &tx, 0, 0, 0)?; + let receipt = execute(&mut session, &tx, &NO_CONFIG)?; // verify 2nd unstake transaction let gas_spent_2 = receipt.gas_spent; @@ -306,7 +310,7 @@ fn unstake() -> Result<(), VMError> { CHAIN_ID, ) .expect("tx creation should pass"); - let receipt = execute(&mut session, &tx, 0, 0, 0)?; + let receipt = execute(&mut session, &tx, &NO_CONFIG)?; // verify 3rd unstake transaction let gas_spent_3 = receipt.gas_spent; @@ -349,7 +353,7 @@ fn withdraw_reward() -> Result<(), VMError> { CHAIN_ID, ) .expect("tx creation should pass"); - let receipt = execute(&mut session, &tx, 0, 0, 0)?; + let receipt = execute(&mut session, &tx, &NO_CONFIG)?; let mut moonlight_balance = GENESIS_VALUE - STAKE_VALUE - receipt.gas_spent; assert_moonlight(&mut session, &moonlight_pk, moonlight_balance, nonce); // add a reward to the staked key @@ -374,7 +378,7 @@ fn withdraw_reward() -> Result<(), VMError> { CHAIN_ID, ) .expect("tx creation should pass"); - let receipt = execute(&mut session, &tx, 0, 0, 0)?; + let receipt = execute(&mut session, &tx, &NO_CONFIG)?; // verify 1st reward withdrawal let gas_spent_1 = receipt.gas_spent; @@ -409,7 +413,7 @@ fn withdraw_reward() -> Result<(), VMError> { CHAIN_ID, ) .expect("tx creation should pass"); - let receipt = execute(&mut session, &tx, 0, 0, 0)?; + let receipt = execute(&mut session, &tx, &NO_CONFIG)?; // verify 1st reward withdrawal let gas_spent_2 = receipt.gas_spent; diff --git a/contracts/stake/tests/stake.rs b/contracts/stake/tests/stake.rs index 18ba44bc02..2e5798be1e 100644 --- a/contracts/stake/tests/stake.rs +++ b/contracts/stake/tests/stake.rs @@ -19,7 +19,7 @@ use dusk_core::transfer::withdraw::{ Withdraw, WithdrawReceiver, WithdrawReplayToken, }; use dusk_core::{dusk, JubJubScalar}; -use dusk_vm::{execute, VM}; +use dusk_vm::{execute, ExecutionConfig, VM}; use ff::Field; use rand::rngs::StdRng; use rand::SeedableRng; @@ -34,6 +34,8 @@ use crate::common::utils::*; const GENESIS_VALUE: u64 = dusk(1_000_000.0); const INITIAL_STAKE: u64 = GENESIS_VALUE / 2; +const NO_CONFIG: ExecutionConfig = ExecutionConfig::DEFAULT; + #[test] fn stake_withdraw_unstake() { // ------ @@ -85,7 +87,7 @@ fn stake_withdraw_unstake() { contract_call, ); - let receipt = execute(&mut session, &tx, 0, 0, 0) + let receipt = execute(&mut session, &tx, &NO_CONFIG) .expect("Executing TX should succeed"); let gas_spent = receipt.gas_spent; @@ -181,7 +183,7 @@ fn stake_withdraw_unstake() { .session(base, CHAIN_ID, 2) .expect("Instantiating new session should succeed"); - let receipt = execute(&mut session, &tx, 0, 0, 0) + let receipt = execute(&mut session, &tx, &NO_CONFIG) .expect("Executing TX should succeed"); let gas_spent = receipt.gas_spent; @@ -278,7 +280,7 @@ fn stake_withdraw_unstake() { .session(base, CHAIN_ID, 3) .expect("Instantiating new session should succeed"); - let receipt = execute(&mut session, &tx, 0, 0, 0) + let receipt = execute(&mut session, &tx, &NO_CONFIG) .expect("Executing TX should succeed"); update_root(&mut session).expect("Updating the root should succeed"); diff --git a/contracts/transfer/tests/moonlight.rs b/contracts/transfer/tests/moonlight.rs index b2ab55c4fa..6398913541 100644 --- a/contracts/transfer/tests/moonlight.rs +++ b/contracts/transfer/tests/moonlight.rs @@ -21,7 +21,7 @@ use dusk_core::transfer::{ ContractToAccount, ContractToContract, Transaction, TRANSFER_CONTRACT, }; use dusk_core::{dusk, JubJubScalar, LUX}; -use dusk_vm::{execute, ContractData, Session, VM}; +use dusk_vm::{execute, ContractData, ExecutionConfig, Session, VM}; use ff::Field; use rand::rngs::StdRng; use rand::SeedableRng; @@ -52,6 +52,8 @@ const BOB_ID: ContractId = { const OWNER: [u8; 32] = [0; 32]; const CHAIN_ID: u8 = 0xFA; +const NO_CONFIG: ExecutionConfig = ExecutionConfig::DEFAULT; + /// Instantiate the virtual machine with the transfer contract deployed, with a /// moonlight account owning the `MOONLIGHT_GENESIS_VALUE` and alice and bob /// contracts deployed with alice contract owning `ALICE_GENESIS_VALUE`. @@ -183,7 +185,7 @@ fn transfer() { ) .expect("Creating moonlight transaction should succeed"); - let gas_spent = execute(session, &transaction, 0, 0, 0) + let gas_spent = execute(session, &transaction, &NO_CONFIG) .expect("Transaction should succeed") .gas_spent; @@ -248,7 +250,7 @@ fn transfer_with_refund() { .into(); let max_gas = GAS_LIMIT * LUX; - let gas_spent = execute(session, &transaction, 0, 0, 0) + let gas_spent = execute(session, &transaction, &NO_CONFIG) .expect("Transaction should succeed") .gas_spent; let gas_refund = max_gas - gas_spent; @@ -313,7 +315,7 @@ fn transfer_gas_fails() { ) .expect("Creating moonlight transaction should succeed"); - let result = execute(session, &transaction, 0, 0, 0); + let result = execute(session, &transaction, &NO_CONFIG); assert!( result.is_err(), @@ -365,7 +367,7 @@ fn alice_ping() { ) .expect("Creating moonlight transaction should succeed"); - let gas_spent = execute(session, &transaction, 0, 0, 0) + let gas_spent = execute(session, &transaction, &NO_CONFIG) .expect("Transaction should succeed") .gas_spent; @@ -446,7 +448,7 @@ fn convert_to_phoenix() { ) .expect("Creating moonlight transaction should succeed"); - let gas_spent = execute(&mut session, &tx, 0, 0, 0) + let gas_spent = execute(&mut session, &tx, &NO_CONFIG) .expect("Executing transaction should succeed") .gas_spent; update_root(session).expect("Updating the root should succeed"); @@ -566,7 +568,7 @@ fn convert_to_moonlight_fails() { ) .expect("Creating moonlight transaction should succeed"); - let receipt = execute(&mut session, &tx, 0, 0, 0) + let receipt = execute(&mut session, &tx, &NO_CONFIG) .expect("Executing TX should succeed"); // check that the transaction execution panicked with the correct message @@ -672,7 +674,7 @@ fn convert_wrong_contract_targeted() { ) .expect("Creating moonlight transaction should succeed"); - let receipt = execute(&mut session, &tx, 0, 0, 0) + let receipt = execute(&mut session, &tx, &NO_CONFIG) .expect("Executing transaction should succeed"); update_root(session).expect("Updating the root should succeed"); @@ -744,7 +746,7 @@ fn contract_to_contract() { ) .expect("Creating moonlight transaction should succeed"); - let receipt = execute(session, &transaction, 0, 0, 0) + let receipt = execute(session, &transaction, &NO_CONFIG) .expect("Transaction should succeed"); let gas_spent = receipt.gas_spent; @@ -811,7 +813,7 @@ fn contract_to_account() { ) .expect("Creating moonlight transaction should succeed"); - let receipt = execute(session, &transaction, 0, 0, 0) + let receipt = execute(session, &transaction, &NO_CONFIG) .expect("Transaction should succeed"); let gas_spent = receipt.gas_spent; @@ -875,7 +877,7 @@ fn contract_to_account_insufficient_funds() { ) .expect("Creating moonlight transaction should succeed"); - let receipt = execute(session, &transaction, 0, 0, 0) + let receipt = execute(session, &transaction, &NO_CONFIG) .expect("Transaction should succeed"); let gas_spent = receipt.gas_spent; @@ -946,7 +948,7 @@ fn contract_to_account_direct_call() { ) .expect("Creating moonlight transaction should succeed"); - let receipt = execute(session, &transaction, 0, 0, 0) + let receipt = execute(session, &transaction, &NO_CONFIG) .expect("Transaction should succeed"); let gas_spent = receipt.gas_spent; diff --git a/contracts/transfer/tests/phoenix.rs b/contracts/transfer/tests/phoenix.rs index e481c57164..3e2b187f88 100644 --- a/contracts/transfer/tests/phoenix.rs +++ b/contracts/transfer/tests/phoenix.rs @@ -27,7 +27,9 @@ use dusk_core::transfer::{ ContractToAccount, ContractToContract, Transaction, TRANSFER_CONTRACT, }; use dusk_core::{BlsScalar, JubJubScalar, LUX}; -use dusk_vm::{execute, ContractData, Error as VMError, Session, VM}; +use dusk_vm::{ + execute, ContractData, Error as VMError, ExecutionConfig, Session, VM, +}; use ff::Field; use rand::rngs::StdRng; use rand::{CryptoRng, RngCore, SeedableRng}; @@ -59,6 +61,8 @@ const BOB_ID: ContractId = { const OWNER: [u8; 32] = [0; 32]; const CHAIN_ID: u8 = 0xFA; +const NO_CONFIG: ExecutionConfig = ExecutionConfig::DEFAULT; + /// Instantiate the virtual machine with the transfer contract deployed, and the /// notes tree populated with `N` notes, each carrying `PHOENIX_GENESIS_VALUE / /// N`, all owned by the given public key, and alice and bob contracts deployed @@ -252,7 +256,7 @@ fn transfer_1_2() { contract_call, ); - let gas_spent = execute(session, &tx, 0, 0, 0) + let gas_spent = execute(session, &tx, &NO_CONFIG) .expect("Executing TX should succeed") .gas_spent; update_root(session).expect("Updating the root should succeed"); @@ -359,7 +363,7 @@ fn transfer_2_2() { contract_call, ); - let gas_spent = execute(session, &tx, 0, 0, 0) + let gas_spent = execute(session, &tx, &NO_CONFIG) .expect("Executing TX should succeed") .gas_spent; update_root(session).expect("Updating the root should succeed"); @@ -467,7 +471,7 @@ fn transfer_3_2() { contract_call, ); - let gas_spent = execute(session, &tx, 0, 0, 0) + let gas_spent = execute(session, &tx, &NO_CONFIG) .expect("Executing TX should succeed") .gas_spent; update_root(session).expect("Updating the root should succeed"); @@ -575,7 +579,7 @@ fn transfer_4_2() { contract_call, ); - let gas_spent = execute(session, &tx, 0, 0, 0) + let gas_spent = execute(session, &tx, &NO_CONFIG) .expect("Executing TX should succeed") .gas_spent; update_root(session).expect("Updating the root should succeed"); @@ -679,7 +683,7 @@ fn transfer_gas_fails() { let total_num_notes_before_tx = num_notes(session).expect("Getting num_notes should succeed"); - let result = execute(session, &tx, 0, 0, 0); + let result = execute(session, &tx, &NO_CONFIG); assert!( result.is_err(), @@ -750,7 +754,7 @@ fn alice_ping() { contract_call, ); - let gas_spent = execute(session, &tx, 0, 0, 0) + let gas_spent = execute(session, &tx, &NO_CONFIG) .expect("Executing TX should succeed") .gas_spent; update_root(session).expect("Updating the root should succeed"); @@ -820,7 +824,7 @@ fn contract_deposit() { contract_call, ); - let gas_spent = execute(session, &tx, 0, 0, 0) + let gas_spent = execute(session, &tx, &NO_CONFIG) .expect("Executing TX should succeed") .gas_spent; update_root(session).expect("Updating the root should succeed"); @@ -928,7 +932,7 @@ fn contract_withdraw() { contract_call, ); - let gas_spent = execute(session, &tx, 0, 0, 0) + let gas_spent = execute(session, &tx, &NO_CONFIG) .expect("Executing TX should succeed") .gas_spent; update_root(session).expect("Updating the root should succeed"); @@ -1055,7 +1059,7 @@ fn convert_to_phoenix_fails() { ); let receipt = - execute(session, &tx, 0, 0, 0).expect("Executing TX should succeed"); + execute(session, &tx, &NO_CONFIG).expect("Executing TX should succeed"); // check that the transaction execution panicked with the correct message assert!(receipt.data.is_err()); @@ -1175,7 +1179,7 @@ fn convert_to_moonlight() { Some(contract_call), ); - let gas_spent = execute(session, &tx, 0, 0, 0) + let gas_spent = execute(session, &tx, &NO_CONFIG) .expect("Executing TX should succeed") .gas_spent; update_root(session).expect("Updating the root should succeed"); @@ -1285,7 +1289,7 @@ fn convert_wrong_contract_targeted() { Some(contract_call), ); - let receipt = execute(&mut session, &tx, 0, 0, 0) + let receipt = execute(&mut session, &tx, &NO_CONFIG) .expect("Executing transaction should succeed"); update_root(session).expect("Updating the root should succeed"); @@ -1377,7 +1381,7 @@ fn contract_to_contract() { ); let receipt = - execute(session, &tx, 0, 0, 0).expect("Transaction should succeed"); + execute(session, &tx, &NO_CONFIG).expect("Transaction should succeed"); let gas_spent = receipt.gas_spent; println!("CONTRACT TO CONTRACT: {gas_spent} gas"); @@ -1471,7 +1475,7 @@ fn contract_to_account() { ); let receipt = - execute(session, &tx, 0, 0, 0).expect("Transaction should succeed"); + execute(session, &tx, &NO_CONFIG).expect("Transaction should succeed"); let gas_spent = receipt.gas_spent; println!("CONTRACT TO ACCOUNT: {gas_spent} gas"); diff --git a/core/CHANGELOG.md b/core/CHANGELOG.md index c35e0f3b3d..0ca978bb34 100644 --- a/core/CHANGELOG.md +++ b/core/CHANGELOG.md @@ -7,7 +7,19 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] -[1.0.0] - 2025-01-23 +## [1.1.0] - 2025-02-14 + +### Changed + +- Change `dusk_core::transfer::moonlight::Transaction::data` fn visibility to public + +### Added + +- Add `METADATA::PUBLIC_SENDER` [#3341] +- Add `abi::public_sender` host fn [#3341] +- Add serde feature for event serialization [#2773] + +## [1.0.0] - 2025-01-23 ### Changed @@ -22,7 +34,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 [#3405]: https://github.com/dusk-network/rusk/issues/3405 +[#3341]: https://github.com/dusk-network/rusk/issues/3341 +[#2773]: https://github.com/dusk-network/rusk/issues/2773 -[Unreleased]: https://github.com/dusk-network/rusk/compare/dusk-core-1.0.0...HEAD +[Unreleased]: https://github.com/dusk-network/rusk/compare/dusk-core-1.1.0...HEAD +[1.1.0]: https://github.com/dusk-network/rusk/compare/dusk-core-1.0.0...dusk-core-1.1.0 [1.0.0]: https://github.com/dusk-network/rusk/compare/dusk-core-0.1.0...dusk-core-1.0.0 [0.1.0]: https://github.com/dusk-network/rusk/tree/dusk-core-0.1.0 diff --git a/core/Cargo.toml b/core/Cargo.toml index 95f367e1bf..8a01611cc2 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "dusk-core" -version = "1.0.1-alpha.1" +version = "1.1.1-alpha.1" edition = "2021" description = "Types used for interacting with Dusk's transfer and stake contracts." @@ -32,8 +32,13 @@ ark-bn254 = { workspace = true, features = ["curve"], optional = true } ark-relations = { workspace = true, optional = true } ark-serialize = { workspace = true, optional = true } +# serde support dependencies +serde = { workspace = true, features = ["derive"], optional = true } +hex = { workspace = true, optional = true } + [dev-dependencies] rand = { workspace = true, features = ["std", "std_rng"] } +serde_json = { workspace = true } [features] parallel = [ @@ -67,3 +72,11 @@ groth16 = [ # Enables std feature for dusk-plonk std = ["dusk-plonk/std"] + +serde = [ + "dep:serde", + "hex", + "piecrust-uplink/serde", + "bls12_381-bls/serde", + "phoenix-core/serde", +] diff --git a/core/Makefile b/core/Makefile index c226c0fc73..c466e9cd91 100644 --- a/core/Makefile +++ b/core/Makefile @@ -5,11 +5,11 @@ help: ## Display this help screen @grep -h -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-15s\033[0m %s\n", $$1, $$2}' test: - cargo test --release --features zk + cargo test --release --features zk,serde cargo test --release --no-run clippy: ## Run clippy - @cargo clippy --release -- -D warnings + @cargo clippy --release --features serde -- -D warnings @cargo clippy --no-default-features --release -- -D warnings doc: ## Run doc gen diff --git a/core/src/abi.rs b/core/src/abi.rs index 94a5232d4c..39aece8dbb 100644 --- a/core/src/abi.rs +++ b/core/src/abi.rs @@ -22,6 +22,8 @@ impl Metadata { pub const CHAIN_ID: &'static str = "chain_id"; /// The current block-height. pub const BLOCK_HEIGHT: &'static str = "block_height"; + /// The sender of the transaction, if the transaction is public. + pub const PUBLIC_SENDER: &'static str = "public_sender"; } /// Enum storing the available host-queries. @@ -156,6 +158,18 @@ pub(crate) mod host_queries { meta_data(Metadata::BLOCK_HEIGHT).unwrap() } + /// Get the public sender of the ongoing tx. Returns `None` if the + /// transaction is shielded. + /// + /// # Panics + /// Panics if the chain doesn't store a `Option` + /// `PUBLIC_SENDER` in the metadata. + #[must_use] + pub fn public_sender() -> Option { + meta_data(Metadata::PUBLIC_SENDER) + .expect("moonlight sender metadata to be set") + } + /// Query owner of a given contract. /// Returns none if contract is not found. /// diff --git a/core/src/lib.rs b/core/src/lib.rs index d458155328..0e2c327ec1 100644 --- a/core/src/lib.rs +++ b/core/src/lib.rs @@ -28,6 +28,9 @@ pub use error::Error; mod dusk; pub use dusk::{dusk, from_dusk, Dusk, LUX}; +#[cfg(feature = "serde")] +mod serde_support; + // elliptic curve types pub use dusk_bls12_381::BlsScalar; pub use dusk_jubjub::{ @@ -135,3 +138,10 @@ fn read_arr(buf: &mut &[u8]) -> Result<[u8; N], BytesError> { *buf = &buf[N..]; Ok(a) } + +#[cfg(test)] +mod tests { + // the `unused_crate_dependencies` lint complains for dev-dependencies that + // are only used in integration tests, so adding this work-around here + use serde_json as _; +} diff --git a/core/src/serde_support.rs b/core/src/serde_support.rs new file mode 100644 index 0000000000..646f479beb --- /dev/null +++ b/core/src/serde_support.rs @@ -0,0 +1,439 @@ +// This Source Code Form is subject to the terms of the Mozilla Public +// License, v. 2.0. If a copy of the MPL was not distributed with this +// file, You can obtain one at http://mozilla.org/MPL/2.0/. +// +// Copyright (c) DUSK NETWORK. All rights reserved. + +use alloc::format; +use alloc::string::String; +use alloc::vec::Vec; + +use serde::de::{Error as SerdeError, Unexpected}; +use serde::ser::SerializeStruct; +use serde::{Deserialize, Deserializer, Serialize, Serializer}; + +use crate::abi::ContractId; +use crate::signatures::bls::PublicKey as AccountPublicKey; +use crate::stake::{Reward, RewardReason, SlashEvent, StakeEvent, StakeKeys}; +use crate::transfer::phoenix::Note; +use crate::transfer::withdraw::WithdrawReceiver; +use crate::transfer::{ + ContractToAccountEvent, ContractToContractEvent, ConvertEvent, + DepositEvent, MoonlightTransactionEvent, PhoenixTransactionEvent, + WithdrawEvent, +}; +use crate::BlsScalar; + +// To serialize and deserialize u64s as big ints: +#[derive(Debug)] +struct Bigint(u64); + +impl Serialize for Bigint { + fn serialize( + &self, + serializer: S, + ) -> Result { + let s: String = format!("{}", self.0); + s.serialize(serializer) + } +} + +impl<'de> Deserialize<'de> for Bigint { + fn deserialize>( + deserializer: D, + ) -> Result { + let s = String::deserialize(deserializer)?; + if s.is_empty() { + return Err(SerdeError::invalid_value( + Unexpected::Str(&s), + &"a non-empty string", + )); + } + let parsed_number = s.parse::().map_err(|e| { + SerdeError::custom(format!("failed to deserialize u64: {e}")) + })?; + Ok(Self(parsed_number)) + } +} + +impl Serialize for StakeEvent { + fn serialize( + &self, + serializer: S, + ) -> Result { + let mut ser_struct = serializer.serialize_struct("StakeEvent", 3)?; + ser_struct.serialize_field("keys", &self.keys)?; + ser_struct.serialize_field("value", &Bigint(self.value))?; + ser_struct.serialize_field("locked", &Bigint(self.locked))?; + ser_struct.end() + } +} + +impl<'de> Deserialize<'de> for StakeEvent { + fn deserialize>( + deserializer: D, + ) -> Result { + #[derive(Deserialize)] + struct Intermediate { + keys: StakeKeys, + value: Bigint, + locked: Bigint, + } + let intermediate_event = Intermediate::deserialize(deserializer)?; + Ok(StakeEvent { + keys: intermediate_event.keys, + value: intermediate_event.value.0, + locked: intermediate_event.locked.0, + }) + } +} + +impl Serialize for SlashEvent { + fn serialize( + &self, + serializer: S, + ) -> Result { + let mut ser_struct = serializer.serialize_struct("SlashEvent", 3)?; + ser_struct.serialize_field("account", &self.account)?; + ser_struct.serialize_field("value", &Bigint(self.value))?; + ser_struct.serialize_field( + "next_eligibility", + &Bigint(self.next_eligibility), + )?; + ser_struct.end() + } +} + +impl<'de> Deserialize<'de> for SlashEvent { + fn deserialize>( + deserializer: D, + ) -> Result { + #[derive(Deserialize)] + struct Intermediate { + pub account: AccountPublicKey, + pub value: Bigint, + pub next_eligibility: Bigint, + } + let intermediate_event = Intermediate::deserialize(deserializer)?; + Ok(SlashEvent { + account: intermediate_event.account, + value: intermediate_event.value.0, + next_eligibility: intermediate_event.next_eligibility.0, + }) + } +} + +impl Serialize for Reward { + fn serialize( + &self, + serializer: S, + ) -> Result { + let mut ser_struct = serializer.serialize_struct("Reward", 3)?; + ser_struct.serialize_field("account", &self.account)?; + ser_struct.serialize_field("value", &Bigint(self.value))?; + ser_struct.serialize_field("reason", &self.reason)?; + ser_struct.end() + } +} + +impl<'de> Deserialize<'de> for Reward { + fn deserialize>( + deserializer: D, + ) -> Result { + #[derive(Deserialize)] + struct Intermediate { + account: AccountPublicKey, + value: Bigint, + reason: RewardReason, + } + let intermediate_reward = Intermediate::deserialize(deserializer)?; + Ok(Reward { + account: intermediate_reward.account, + value: intermediate_reward.value.0, + reason: intermediate_reward.reason, + }) + } +} + +impl Serialize for WithdrawEvent { + fn serialize( + &self, + serializer: S, + ) -> Result { + let mut ser_struct = serializer.serialize_struct("WithdrawEvent", 3)?; + ser_struct.serialize_field("sender", &self.sender)?; + ser_struct.serialize_field("receiver", &self.receiver)?; + ser_struct.serialize_field("value", &Bigint(self.value))?; + ser_struct.end() + } +} + +impl<'de> Deserialize<'de> for WithdrawEvent { + fn deserialize>( + deserializer: D, + ) -> Result { + #[derive(Deserialize)] + struct Intermediate { + pub sender: ContractId, + pub receiver: WithdrawReceiver, + pub value: Bigint, + } + let intermediate_event = Intermediate::deserialize(deserializer)?; + Ok(WithdrawEvent { + sender: intermediate_event.sender, + receiver: intermediate_event.receiver, + value: intermediate_event.value.0, + }) + } +} + +impl Serialize for ConvertEvent { + fn serialize( + &self, + serializer: S, + ) -> Result { + let mut ser_struct = serializer.serialize_struct("ConvertEvent", 3)?; + ser_struct.serialize_field("sender", &self.sender)?; + ser_struct.serialize_field("receiver", &self.receiver)?; + ser_struct.serialize_field("value", &Bigint(self.value))?; + ser_struct.end() + } +} + +impl<'de> Deserialize<'de> for ConvertEvent { + fn deserialize>( + deserializer: D, + ) -> Result { + #[derive(Deserialize)] + struct Intermediate { + sender: Option, + receiver: WithdrawReceiver, + value: Bigint, + } + let intermediate_event = Intermediate::deserialize(deserializer)?; + Ok(ConvertEvent { + sender: intermediate_event.sender, + receiver: intermediate_event.receiver, + value: intermediate_event.value.0, + }) + } +} + +impl Serialize for DepositEvent { + fn serialize( + &self, + serializer: S, + ) -> Result { + let mut ser_struct = serializer.serialize_struct("DepositEvent", 3)?; + ser_struct.serialize_field("sender", &self.sender)?; + ser_struct.serialize_field("receiver", &self.receiver)?; + ser_struct.serialize_field("value", &Bigint(self.value))?; + ser_struct.end() + } +} + +impl<'de> Deserialize<'de> for DepositEvent { + fn deserialize>( + deserializer: D, + ) -> Result { + #[derive(Deserialize)] + struct Intermediate { + sender: Option, + receiver: ContractId, + value: Bigint, + } + let intermediate_event = Intermediate::deserialize(deserializer)?; + Ok(DepositEvent { + sender: intermediate_event.sender, + receiver: intermediate_event.receiver, + value: intermediate_event.value.0, + }) + } +} + +impl Serialize for ContractToContractEvent { + fn serialize( + &self, + serializer: S, + ) -> Result { + let mut ser_struct = + serializer.serialize_struct("ContractToContractEvent", 3)?; + ser_struct.serialize_field("sender", &self.sender)?; + ser_struct.serialize_field("receiver", &self.receiver)?; + ser_struct.serialize_field("value", &Bigint(self.value))?; + ser_struct.end() + } +} + +impl<'de> Deserialize<'de> for ContractToContractEvent { + fn deserialize>( + deserializer: D, + ) -> Result { + #[derive(Deserialize)] + struct Intermediate { + sender: ContractId, + receiver: ContractId, + value: Bigint, + } + let intermediate_event = Intermediate::deserialize(deserializer)?; + Ok(ContractToContractEvent { + sender: intermediate_event.sender, + receiver: intermediate_event.receiver, + value: intermediate_event.value.0, + }) + } +} + +impl Serialize for ContractToAccountEvent { + fn serialize( + &self, + serializer: S, + ) -> Result { + let mut ser_struct = + serializer.serialize_struct("ContractToAccountEvent", 3)?; + ser_struct.serialize_field("sender", &self.sender)?; + ser_struct.serialize_field("receiver", &self.receiver)?; + ser_struct.serialize_field("value", &Bigint(self.value))?; + ser_struct.end() + } +} + +impl<'de> Deserialize<'de> for ContractToAccountEvent { + fn deserialize>( + deserializer: D, + ) -> Result { + #[derive(Deserialize)] + struct Intermediate { + sender: ContractId, + receiver: AccountPublicKey, + value: Bigint, + } + let intermediate_event = Intermediate::deserialize(deserializer)?; + Ok(ContractToAccountEvent { + sender: intermediate_event.sender, + receiver: intermediate_event.receiver, + value: intermediate_event.value.0, + }) + } +} + +impl Serialize for PhoenixTransactionEvent { + fn serialize( + &self, + serializer: S, + ) -> Result { + let mut ser_struct = + serializer.serialize_struct("PhoenixTransactionEvent", 5)?; + ser_struct.serialize_field("nullifiers", &self.nullifiers)?; + ser_struct.serialize_field("notes", &self.notes)?; + ser_struct.serialize_field("memo", &hex::encode(&self.memo))?; + ser_struct.serialize_field("gas_spent", &Bigint(self.gas_spent))?; + ser_struct.serialize_field("refund_note", &self.refund_note)?; + ser_struct.end() + } +} + +impl<'de> Deserialize<'de> for PhoenixTransactionEvent { + fn deserialize>( + deserializer: D, + ) -> Result { + #[derive(Deserialize)] + struct Intermediate { + pub nullifiers: Vec, + pub notes: Vec, + pub memo: String, + pub gas_spent: Bigint, + pub refund_note: Option, + } + let intermediate_event = Intermediate::deserialize(deserializer)?; + let memo = + hex::decode(intermediate_event.memo).map_err(SerdeError::custom)?; + Ok(PhoenixTransactionEvent { + nullifiers: intermediate_event.nullifiers, + notes: intermediate_event.notes, + memo, + gas_spent: intermediate_event.gas_spent.0, + refund_note: intermediate_event.refund_note, + }) + } +} + +impl Serialize for MoonlightTransactionEvent { + fn serialize( + &self, + serializer: S, + ) -> Result { + let mut ser_struct = + serializer.serialize_struct("MoonlightTransactionEvent", 6)?; + let refund_info = + self.refund_info.map(|(pk, number)| (pk, Bigint(number))); + ser_struct.serialize_field("sender", &self.sender)?; + ser_struct.serialize_field("receiver", &self.receiver)?; + ser_struct.serialize_field("value", &Bigint(self.value))?; + ser_struct.serialize_field("memo", &hex::encode(&self.memo))?; + ser_struct.serialize_field("gas_spent", &Bigint(self.gas_spent))?; + ser_struct.serialize_field("refund_info", &refund_info)?; + ser_struct.end() + } +} + +impl<'de> Deserialize<'de> for MoonlightTransactionEvent { + fn deserialize>( + deserializer: D, + ) -> Result { + #[derive(Deserialize)] + struct Intermediate { + sender: AccountPublicKey, + receiver: Option, + value: Bigint, + memo: String, + gas_spent: Bigint, + refund_info: Option<(AccountPublicKey, Bigint)>, + } + let intermediate_event = Intermediate::deserialize(deserializer)?; + let memo = + hex::decode(intermediate_event.memo).map_err(SerdeError::custom)?; + let refund_info = intermediate_event + .refund_info + .map(|(pk, bigint)| (pk, bigint.0)); + Ok(MoonlightTransactionEvent { + sender: intermediate_event.sender, + receiver: intermediate_event.receiver, + value: intermediate_event.value.0, + memo, + gas_spent: intermediate_event.gas_spent.0, + refund_info, + }) + } +} + +#[cfg(test)] +mod tests { + use rand::rngs::StdRng; + use rand::{RngCore, SeedableRng}; + + use super::*; + + #[test] + fn serde_bigint() { + let mut rng = StdRng::seed_from_u64(42); + let n = Bigint(rng.next_u64()); + let ser = serde_json::to_string(&n).unwrap(); + let deser: Bigint = serde_json::from_str(&ser).unwrap(); + assert_eq!(n.0, deser.0); + } + + #[test] + fn serde_bigint_max() { + let n = Bigint(u64::MAX); + let ser = serde_json::to_string(&n).unwrap(); + let deser: Bigint = serde_json::from_str(&ser).unwrap(); + assert_eq!(n.0, deser.0); + } + + #[test] + fn serde_bigint_empty() { + let deser: Result = serde_json::from_str("\"\""); + assert!(deser.is_err()); + } +} diff --git a/core/src/stake.rs b/core/src/stake.rs index efb3385f5a..d18a461b8d 100644 --- a/core/src/stake.rs +++ b/core/src/stake.rs @@ -358,7 +358,7 @@ impl Withdraw { } /// Event emitted after a stake contract operation is performed. -#[derive(Debug, Clone, Archive, Deserialize, Serialize)] +#[derive(Debug, Clone, Archive, Deserialize, Serialize, PartialEq)] #[archive_attr(derive(CheckBytes))] pub struct StakeEvent { /// Keys associated to the event. @@ -401,7 +401,7 @@ impl StakeEvent { } /// Event emitted after a slash operation is performed. -#[derive(Debug, Clone, Archive, Deserialize, Serialize)] +#[derive(Debug, Clone, Archive, Deserialize, Serialize, PartialEq)] #[archive_attr(derive(CheckBytes))] pub struct SlashEvent { /// Account slashed. @@ -442,6 +442,7 @@ pub struct StakeData { #[derive( Debug, Default, Clone, Copy, PartialEq, Eq, Archive, Deserialize, Serialize, )] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[archive_attr(derive(CheckBytes))] pub struct StakeKeys { /// Key used for consensus operations, such as voting or producing blocks. @@ -488,6 +489,7 @@ impl StakeKeys { #[derive( Debug, Clone, Copy, PartialEq, Eq, Archive, Deserialize, Serialize, )] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[archive_attr(derive(CheckBytes))] pub enum StakeFundOwner { /// Represents an account-based owner identified by a BLS public key. @@ -673,6 +675,7 @@ pub struct Reward { /// The reason that a reward is issued. #[derive(Debug, Clone, PartialEq, Eq, Archive, Serialize, Deserialize)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[archive_attr(derive(CheckBytes))] pub enum RewardReason { /// The fixed amount awarded to a generator. diff --git a/core/src/transfer.rs b/core/src/transfer.rs index 4b2d8f8914..c88e6251be 100644 --- a/core/src/transfer.rs +++ b/core/src/transfer.rs @@ -151,8 +151,16 @@ impl Transaction { } } - /// Return the receiver of the transaction for Moonlight transactions, if it + /// Get the receiver of the transaction for Moonlight transactions, if it /// exists. + /// + /// # Returns + /// + /// - `Some(&AccountPublicKey)` if the transaction is a Moonlight + /// transaction and the receiver is different from the sender. + /// - `None` if the transaction is a Moonlight transaction and the receiver + /// is the same as the sender. + /// - `None` if the transaction is a Phoenix transaction. #[must_use] pub fn moonlight_receiver(&self) -> Option<&AccountPublicKey> { match self { @@ -246,6 +254,15 @@ impl Transaction { } /// Return the contract call data, if there is any. + /// + /// Call data is present only when inter-contract calls happen. + /// + /// # Returns + /// + /// - `Some(&ContractCall)` if the transaction invokes another call to a + /// contract. + /// - `None` if the transaction is an entrypoint call to a protocol contract + /// without a second call attached to it. #[must_use] pub fn call(&self) -> Option<&ContractCall> { match self { diff --git a/core/src/transfer/moonlight.rs b/core/src/transfer/moonlight.rs index 2ad88c38da..cb86f830be 100644 --- a/core/src/transfer/moonlight.rs +++ b/core/src/transfer/moonlight.rs @@ -163,7 +163,7 @@ impl Transaction { } /// Return the receiver of the transaction if it's different from the - /// sender. + /// sender. Otherwise, return None. #[must_use] pub fn receiver(&self) -> Option<&AccountPublicKey> { if self.payload.sender == self.payload.receiver { @@ -240,7 +240,7 @@ impl Transaction { /// Returns the transaction data, if it exists. #[must_use] - fn data(&self) -> Option<&TransactionData> { + pub fn data(&self) -> Option<&TransactionData> { self.payload.data.as_ref() } diff --git a/core/src/transfer/withdraw.rs b/core/src/transfer/withdraw.rs index 1e72fdeac7..e6c69dcbda 100644 --- a/core/src/transfer/withdraw.rs +++ b/core/src/transfer/withdraw.rs @@ -169,6 +169,7 @@ impl Withdraw { /// The receiver of the [`Withdraw`] value. #[derive(Debug, Clone, Copy, PartialEq, Archive, Serialize, Deserialize)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[archive_attr(derive(CheckBytes))] pub enum WithdrawReceiver { /// The stealth address to withdraw to, when the withdrawal is into Phoenix diff --git a/core/tests/serde.rs b/core/tests/serde.rs new file mode 100644 index 0000000000..b51d3ecfed --- /dev/null +++ b/core/tests/serde.rs @@ -0,0 +1,309 @@ +// This Source Code Form is subject to the terms of the Mozilla Public +// License, v. 2.0. If a copy of the MPL was not distributed with this +// file, You can obtain one at http://mozilla.org/MPL/2.0/. +// +// Copyright (c) DUSK NETWORK. All rights reserved. + +#![cfg(feature = "serde")] + +use bls12_381_bls::{ + PublicKey as AccountPublicKey, SecretKey as AccountSecretKey, +}; +use dusk_core::stake::{ + Reward, RewardReason, SlashEvent, StakeEvent, StakeFundOwner, StakeKeys, +}; +use dusk_core::transfer::withdraw::WithdrawReceiver; +use dusk_core::transfer::WithdrawEvent; +use dusk_core::transfer::{ + ContractToAccountEvent, ContractToContractEvent, ConvertEvent, + DepositEvent, MoonlightTransactionEvent, PhoenixTransactionEvent, +}; +use dusk_core::{BlsScalar, JubJubScalar}; +use ff::Field; +use phoenix_core::{ + Note, PublicKey as PhoenixPublicKey, SecretKey as PhoenixSecretKey, +}; +use piecrust_uplink::{ContractId, CONTRACT_ID_BYTES}; +use rand::rngs::StdRng; +use rand::Rng; +use rand::{RngCore, SeedableRng}; + +#[test] +fn serde_stake_event() { + let mut rng = StdRng::seed_from_u64(42); + let mut contract_id_bytes = [0; CONTRACT_ID_BYTES]; + rng.fill_bytes(&mut contract_id_bytes); + let pk = AccountPublicKey::from(&AccountSecretKey::random(&mut rng)); + let owner1 = StakeFundOwner::Account(pk); + let owner2 = + StakeFundOwner::Contract(ContractId::from_bytes(contract_id_bytes)); + let event1 = StakeEvent { + keys: StakeKeys::new(pk, owner1), + value: rng.next_u64(), + locked: rng.next_u64(), + }; + let event2 = StakeEvent { + keys: StakeKeys::new(pk, owner2), + value: rng.next_u64(), + locked: rng.next_u64(), + }; + + let ser1 = serde_json::to_string(&event1).unwrap(); + let ser2 = serde_json::to_string(&event2).unwrap(); + let deser1 = serde_json::from_str(&ser1).unwrap(); + let deser2 = serde_json::from_str(&ser2).unwrap(); + + assert_eq!(event1, deser1); + assert_eq!(event2, deser2); + assert_ne!(deser1, deser2); +} + +#[test] +fn serde_slash_event() { + let mut rng = StdRng::seed_from_u64(42); + let event = SlashEvent { + account: AccountPublicKey::from(&AccountSecretKey::random(&mut rng)), + value: rng.next_u64(), + next_eligibility: rng.next_u64(), + }; + let ser = serde_json::to_string(&event).unwrap(); + let deser = serde_json::from_str(&ser).unwrap(); + assert_eq!(event, deser); +} + +#[test] +fn serde_reward() { + use RewardReason::*; + let mut rng = StdRng::seed_from_u64(42); + let account = AccountPublicKey::from(&AccountSecretKey::random(&mut rng)); + let mut events = vec![]; + for reason in [GeneratorExtra, GeneratorFixed, Voter, Other] { + events.push(Reward { + account, + value: rng.next_u64(), + reason, + }); + } + let mut desers = vec![]; + for event in &events { + let ser = serde_json::to_string(event).unwrap(); + let deser = serde_json::from_str(&ser).unwrap(); + desers.push(deser); + } + assert_eq!(events, desers); +} + +#[test] +fn serde_withdraw_event() { + let mut rng = StdRng::seed_from_u64(42); + let mut contract_id_bytes = [0; CONTRACT_ID_BYTES]; + rng.fill_bytes(&mut contract_id_bytes); + let sender = ContractId::from_bytes(contract_id_bytes); + let scalar = JubJubScalar::random(&mut rng); + let pk = PhoenixPublicKey::from(&PhoenixSecretKey::random(&mut rng)); + let stealth_addr = pk.gen_stealth_address(&scalar); + + let event1 = WithdrawEvent { + sender, + receiver: WithdrawReceiver::Moonlight(AccountPublicKey::from( + &AccountSecretKey::random(&mut rng), + )), + value: rng.next_u64(), + }; + let event2 = WithdrawEvent { + sender, + receiver: WithdrawReceiver::Phoenix(stealth_addr), + value: rng.next_u64(), + }; + + let ser1 = serde_json::to_string(&event1).unwrap(); + let ser2 = serde_json::to_string(&event2).unwrap(); + let deser1 = serde_json::from_str(&ser1).unwrap(); + let deser2 = serde_json::from_str(&ser2).unwrap(); + + assert_eq!(event1, deser1); + assert_eq!(event2, deser2); + assert_ne!(deser1, deser2); +} + +#[test] +fn serde_convert_event() { + let mut rng = StdRng::seed_from_u64(42); + let scalar = JubJubScalar::random(&mut rng); + let account_pk = + AccountPublicKey::from(&AccountSecretKey::random(&mut rng)); + let pk = PhoenixPublicKey::from(&PhoenixSecretKey::random(&mut rng)); + let stealth_addr = pk.gen_stealth_address(&scalar); + + let event1 = ConvertEvent { + sender: None, + receiver: WithdrawReceiver::Moonlight(account_pk.clone()), + value: rng.next_u64(), + }; + let event2 = ConvertEvent { + sender: Some(account_pk), + receiver: WithdrawReceiver::Phoenix(stealth_addr), + value: rng.next_u64(), + }; + + let ser1 = serde_json::to_string(&event1).unwrap(); + let ser2 = serde_json::to_string(&event2).unwrap(); + let deser1 = serde_json::from_str(&ser1).unwrap(); + let deser2 = serde_json::from_str(&ser2).unwrap(); + + assert_eq!(event1, deser1); + assert_eq!(event2, deser2); + assert_ne!(deser1, deser2); +} + +#[test] +fn serde_deposit_event() { + let mut rng = StdRng::seed_from_u64(42); + let mut contract_id_bytes = [0; CONTRACT_ID_BYTES]; + rng.fill_bytes(&mut contract_id_bytes); + let pk = AccountPublicKey::from(&AccountSecretKey::random(&mut rng)); + let contract_id = ContractId::from_bytes(contract_id_bytes); + + let event1 = DepositEvent { + sender: None, + receiver: contract_id, + value: rng.next_u64(), + }; + let event2 = DepositEvent { + sender: Some(pk), + receiver: contract_id, + value: rng.next_u64(), + }; + + let ser1 = serde_json::to_string(&event1).unwrap(); + let ser2 = serde_json::to_string(&event2).unwrap(); + let deser1 = serde_json::from_str(&ser1).unwrap(); + let deser2 = serde_json::from_str(&ser2).unwrap(); + + assert_eq!(event1, deser1); + assert_eq!(event2, deser2); + assert_ne!(deser1, deser2); +} + +#[test] +fn serde_contract_to_contract_event() { + let mut rng = StdRng::seed_from_u64(42); + let mut contract_id_bytes1 = [0; CONTRACT_ID_BYTES]; + let mut contract_id_bytes2 = [0; CONTRACT_ID_BYTES]; + rng.fill_bytes(&mut contract_id_bytes1); + rng.fill_bytes(&mut contract_id_bytes2); + let sender = ContractId::from_bytes(contract_id_bytes1); + let receiver = ContractId::from_bytes(contract_id_bytes2); + + let event = ContractToContractEvent { + sender, + receiver, + value: rng.next_u64(), + }; + + let ser = serde_json::to_string(&event).unwrap(); + let deser = serde_json::from_str(&ser).unwrap(); + assert_eq!(event, deser); +} + +#[test] +fn serde_contract_to_account_event() { + let mut rng = StdRng::seed_from_u64(42); + let mut contract_id_bytes = [0; CONTRACT_ID_BYTES]; + rng.fill_bytes(&mut contract_id_bytes); + let sender = ContractId::from_bytes(contract_id_bytes); + let receiver = AccountPublicKey::from(&AccountSecretKey::random(&mut rng)); + + let event = ContractToAccountEvent { + sender, + receiver, + value: rng.next_u64(), + }; + + let ser = serde_json::to_string(&event).unwrap(); + let deser = serde_json::from_str(&ser).unwrap(); + assert_eq!(event, deser); +} + +fn rand_note() -> Note { + let mut rng = StdRng::seed_from_u64(42); + let pk = PhoenixPublicKey::from(&PhoenixSecretKey::random(&mut rng)); + let blinder = JubJubScalar::random(&mut rng); + let sender_blinder = [ + JubJubScalar::random(&mut rng), + JubJubScalar::random(&mut rng), + ]; + Note::obfuscated(&mut rng, &pk, &pk, 42, blinder, sender_blinder) +} + +#[test] +fn serde_phoenix_transaction_event() { + let mut rng = StdRng::seed_from_u64(42); + let mut nullifiers = vec![]; + for _ in 0..rng.gen_range(0..10) { + nullifiers.push(BlsScalar::random(&mut rng)); + } + let mut notes = vec![]; + for _ in 0..rng.gen_range(0..10) { + notes.push(rand_note()); + } + let mut memo = vec![0; 50]; + rng.fill_bytes(&mut memo); + + let event1 = PhoenixTransactionEvent { + nullifiers: nullifiers.clone(), + notes: notes.clone(), + memo: memo.clone(), + gas_spent: rng.next_u64(), + refund_note: None, + }; + let event2 = PhoenixTransactionEvent { + nullifiers: nullifiers.clone(), + notes: notes.clone(), + memo: memo.clone(), + gas_spent: rng.next_u64(), + refund_note: Some(rand_note()), + }; + + let ser1 = serde_json::to_string(&event1).unwrap(); + let ser2 = serde_json::to_string(&event2).unwrap(); + let deser1 = serde_json::from_str(&ser1).unwrap(); + let deser2 = serde_json::from_str(&ser2).unwrap(); + + assert_eq!(event1, deser1); + assert_eq!(event2, deser2); + assert_ne!(deser1, deser2); +} + +#[test] +fn serde_moonlight_transaction_event_serde() { + let mut rng = StdRng::seed_from_u64(42); + let mut memo = vec![0; 50]; + rng.fill_bytes(&mut memo); + let pk = AccountPublicKey::from(&AccountSecretKey::random(&mut rng)); + + let event1 = MoonlightTransactionEvent { + sender: pk.clone(), + receiver: Some(pk.clone()), + value: rng.next_u64(), + memo: memo.clone(), + gas_spent: rng.next_u64(), + refund_info: Some((pk, rng.next_u64())), + }; + let event2 = MoonlightTransactionEvent { + sender: pk, + receiver: None, + value: rng.next_u64(), + memo, + gas_spent: rng.next_u64(), + refund_info: None, + }; + + let ser1 = serde_json::to_string(&event1).unwrap(); + let ser2 = serde_json::to_string(&event2).unwrap(); + let deser1 = serde_json::from_str(&ser1).unwrap(); + let deser2 = serde_json::from_str(&ser2).unwrap(); + assert_eq!(event1, deser1); + assert_eq!(event2, deser2); + assert_ne!(deser1, deser2); +} diff --git a/explorer/CHANGELOG.md b/explorer/CHANGELOG.md index ca7bc103e8..b2ff90bc25 100644 --- a/explorer/CHANGELOG.md +++ b/explorer/CHANGELOG.md @@ -3,14 +3,19 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), -and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +and this project adheres to +[Semantic Versioning](https://semver.org/spec/v2.0.0.html). ## Unreleased ### Added +- Add Tokens page [#3415] + ### Changed +- Replace legacy event system with RUES [#3425] + ### Removed - Remove version number from app title [#3338] @@ -59,7 +64,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Change `raw` payload to `json` in transaction details [#2364] - Change average gas price display value to “lux” [#2416] -- Update blocks table headers – `FEE` to `GAS`, `AVG` to `AVG PRICE`, and `TOTAL` to `USED` [#2416] +- Update blocks table headers – `FEE` to `GAS`, `AVG` to `AVG PRICE`, and + `TOTAL` to `USED` [#2416] - Update block rewards tooltip information [#2166] - Hide "Show More" button when error occurs [#2585] - Update footer layout [#2640] @@ -143,6 +149,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 [#3305]: https://github.com/dusk-network/rusk/issues/3305 [#3338]: https://github.com/dusk-network/rusk/issues/3338 [#3377]: https://github.com/dusk-network/rusk/issues/3377 +[#3425]: https://github.com/dusk-network/rusk/issues/3425 +[#3415]: https://github.com/dusk-network/rusk/issues/3415 diff --git a/explorer/README.md b/explorer/README.md index 4944bd2328..aa4f92c813 100644 --- a/explorer/README.md +++ b/explorer/README.md @@ -35,6 +35,7 @@ VITE_REFETCH_INTERVAL=10000 VITE_RUSK_PATH="" # Optional, set to '/rusk' for dev mode VITE_STATS_REFETCH_INTERVAL=1000 VITE_TRANSACTIONS_LIST_ENTRIES=100 +VITE_FEATURE_TOKENS=true ``` ## Environment variables and dev mode diff --git a/explorer/src/lib/chain-info/__tests__/transformBlock.spec.js b/explorer/src/lib/chain-info/__tests__/transformBlock.spec.js index a9440f3a30..cd15e216d0 100644 --- a/explorer/src/lib/chain-info/__tests__/transformBlock.spec.js +++ b/explorer/src/lib/chain-info/__tests__/transformBlock.spec.js @@ -30,12 +30,14 @@ describe("transformBlock", () => { blockheight: 495868, date: new Date(blockData.transactions[0].blockTimestamp * 1000), feepaid: 290866, + from: undefined, gaslimit: 500000000, gasprice: 1, gasspent: 290866, memo: blockData.transactions[0].tx.memo, method: "transfer", success: true, + to: undefined, txerror: "", txid: "3a3f6f90a1012ae751b4448bcb8e98def0ba2b18170239bd69fcf8e2e37f0602", txtype: "Moonlight", @@ -46,12 +48,14 @@ describe("transformBlock", () => { blockheight: 495868, date: new Date(blockData.transactions[1].blockTimestamp * 1000), feepaid: 289852, + from: undefined, gaslimit: 500000000, gasprice: 1, gasspent: 289852, memo: blockData.transactions[1].tx.memo, method: "transfer", success: true, + to: undefined, txerror: "", txid: "07bfabea1d94c16f2dc3697fa642f6cecea6e81bf76b9644efbb6e2723b76d00", txtype: "Phoenix", diff --git a/explorer/src/lib/chain-info/__tests__/transformTransaction.spec.js b/explorer/src/lib/chain-info/__tests__/transformTransaction.spec.js index 9c5c185c98..c5bc250f72 100644 --- a/explorer/src/lib/chain-info/__tests__/transformTransaction.spec.js +++ b/explorer/src/lib/chain-info/__tests__/transformTransaction.spec.js @@ -12,12 +12,14 @@ describe("transformTransaction", () => { blockheight: 487166, date: new Date(txData.blockTimestamp * 1000), feepaid: 290766, + from: undefined, gaslimit: 500000000, gasprice: 1, gasspent: 290766, memo: gqlTransaction.tx.tx.memo, method: "transfer", success: true, + to: undefined, txerror: "", txid: "4877687c2dbf154248d3ddee9ba0d81e3431f39056f82a46819da041d4ac0e04", txtype: "Moonlight", diff --git a/explorer/src/lib/chain-info/chain-info.d.ts b/explorer/src/lib/chain-info/chain-info.d.ts index be3924099b..97cee13ee6 100644 --- a/explorer/src/lib/chain-info/chain-info.d.ts +++ b/explorer/src/lib/chain-info/chain-info.d.ts @@ -35,6 +35,8 @@ type SearchResult = { }; type Transaction = { + from: string | undefined; + to: string | undefined; blockhash: string; blockheight: number; date: Date; diff --git a/explorer/src/lib/chain-info/transformTransaction.js b/explorer/src/lib/chain-info/transformTransaction.js index 53e42b315c..00e803775e 100644 --- a/explorer/src/lib/chain-info/transformTransaction.js +++ b/explorer/src/lib/chain-info/transformTransaction.js @@ -6,12 +6,14 @@ const transformTransaction = (tx) => ({ blockheight: tx.blockHeight, date: unixTsToDate(tx.blockTimestamp), feepaid: tx.gasSpent * tx.tx.gasPrice, + from: undefined, gaslimit: tx.tx.gasLimit, gasprice: tx.tx.gasPrice, gasspent: tx.gasSpent, memo: tx.tx.memo ?? "", method: tx.tx.isDeploy ? "deploy" : tx.tx.callData?.fnName ?? "transfer", success: tx.err === null, + to: undefined, txerror: tx.err ?? "", txid: tx.id, txtype: tx.tx.txType, diff --git a/explorer/src/lib/components/__tests__/__snapshots__/BlocksCard.spec.js.snap b/explorer/src/lib/components/__tests__/__snapshots__/BlocksCard.spec.js.snap index 6c588edde8..5d15072faa 100644 --- a/explorer/src/lib/components/__tests__/__snapshots__/BlocksCard.spec.js.snap +++ b/explorer/src/lib/components/__tests__/__snapshots__/BlocksCard.spec.js.snap @@ -31,7 +31,7 @@ exports[`Blocks Card > should disable the \`Show More\` button if there is no mo class="data-card__content" >
renders the Navbar component 1`] = ` > Provisioners + + + Tokens + diff --git a/explorer/src/lib/components/__tests__/__snapshots__/ProvisionersCard.spec.js.snap b/explorer/src/lib/components/__tests__/__snapshots__/ProvisionersCard.spec.js.snap index 4b52ce1989..3d671c2e8d 100644 --- a/explorer/src/lib/components/__tests__/__snapshots__/ProvisionersCard.spec.js.snap +++ b/explorer/src/lib/components/__tests__/__snapshots__/ProvisionersCard.spec.js.snap @@ -31,7 +31,7 @@ exports[`Provisioners Card > should disable the \`Show More\` button if there is class="data-card__content" >
should disable the \`Show More\` button if there is class="data-card__content" >
import { BlocksList, BlocksTable, DataCard } from "$lib/components"; - import "./BlocksCard.css"; - /** @type {Block[] | null}*/ export let blocks; @@ -50,12 +48,12 @@ }} > {#if isSmallScreen} -
+
{#each displayedBlocks as block (block)} {/each}
{:else} - + {/if} diff --git a/explorer/src/lib/components/data-card/DataCard.css b/explorer/src/lib/components/data-card/DataCard.css index bda3591005..468f582564 100644 --- a/explorer/src/lib/components/data-card/DataCard.css +++ b/explorer/src/lib/components/data-card/DataCard.css @@ -37,3 +37,8 @@ .data-card__progress-bar .dusk-progress-bar__filler { background-color: var(--primary-color); } + +.data-card__table, +.data-card__list { + overflow-y: auto; +} diff --git a/explorer/src/lib/components/data-card/DataCard.svelte b/explorer/src/lib/components/data-card/DataCard.svelte index cba1fb3bd2..129b736e3b 100644 --- a/explorer/src/lib/components/data-card/DataCard.svelte +++ b/explorer/src/lib/components/data-card/DataCard.svelte @@ -7,7 +7,7 @@ import "./DataCard.css"; - /** @type {Block[] | Transaction[] | HostProvisioner[] | Block | Transaction | null}*/ + /** @type {Block[] | Transaction[] | HostProvisioner[] | Block | Transaction | Token[] | null}*/ export let data; /** @type {Error | null}*/ diff --git a/explorer/src/lib/components/index.js b/explorer/src/lib/components/index.js index f2ea2e9319..e53a7cafd5 100644 --- a/explorer/src/lib/components/index.js +++ b/explorer/src/lib/components/index.js @@ -25,6 +25,12 @@ export { default as TransactionsCard } from "./transactions-card/TransactionsCar export { default as TransactionDetails } from "./transaction-details/TransactionDetails.svelte"; export { default as TransactionsList } from "./transactions-list/TransactionsList.svelte"; export { default as TransactionsTable } from "./transactions-table/TransactionsTable.svelte"; +export { default as TokensTable } from "./tokens-table/TokensTable.svelte"; +export { default as TokenOverview } from "./token-overview/TokenOverview.svelte"; +export { default as TokenDetailItem } from "./token-detail-item/TokenDetailItem.svelte"; +export { default as TokenDetailsTable } from "./token-details-table/TokenDetailsTable.svelte"; +export { default as TokenListDetails } from "./token-list-details/TokenListDetails.svelte"; +export { default as TokenTransactionsList } from "./token-transactions-list/TokenTransactionsList.svelte"; export { default as TransactionType } from "./transaction-type/TransactionType.svelte"; export { default as TransactionStatus } from "./transaction-status/TransactionStatus.svelte"; export { default as WorldMap } from "./world-map/WorldMap.svelte"; diff --git a/explorer/src/lib/components/navbar/Navbar.svelte b/explorer/src/lib/components/navbar/Navbar.svelte index 2d6d41a539..c941cb321f 100644 --- a/explorer/src/lib/components/navbar/Navbar.svelte +++ b/explorer/src/lib/components/navbar/Navbar.svelte @@ -22,7 +22,7 @@ /** @type {*} */ let notificationData; - const navigation = [ + let navigation = [ { link: "/", title: "Chain Info", @@ -41,6 +41,11 @@ }, ]; + $: if (import.meta.env.VITE_FEATURE_TOKENS === "true") { + navigation.push({ link: "/tokens", title: "Tokens" }); + navigation = navigation; + } + const dispatch = createEventDispatcher(); async function createEmptySpace() { diff --git a/explorer/src/lib/components/provisioners-card/ProvisionersCard.svelte b/explorer/src/lib/components/provisioners-card/ProvisionersCard.svelte index 4f53db6e02..167afdabcf 100644 --- a/explorer/src/lib/components/provisioners-card/ProvisionersCard.svelte +++ b/explorer/src/lib/components/provisioners-card/ProvisionersCard.svelte @@ -55,7 +55,7 @@ }} > {#if isSmallScreen} -
+
{#each displayedProvisioner as provisioner (provisioner)} {/each} @@ -63,7 +63,7 @@ {:else} {/if} diff --git a/explorer/src/lib/components/token-detail-item/TokenDetailItem.css b/explorer/src/lib/components/token-detail-item/TokenDetailItem.css new file mode 100644 index 0000000000..33537cf902 --- /dev/null +++ b/explorer/src/lib/components/token-detail-item/TokenDetailItem.css @@ -0,0 +1,24 @@ +.token-overview-panel__details-item { + flex: 1; + display: flex; + flex-direction: column; + justify-content: flex-start; + min-width: max-content; + margin: 0 1rem; +} + +@media (min-width: 1024px) { + .token-overview-panel__details-item:not(:last-child) { + border-right: 1px solid var(--taupe-grey); + } +} + +.token-overview-panel__details-item-title { + font-size: 1.125rem; + font-weight: 500; + margin-bottom: 0.3125rem; +} + +.token-overview-panel__details-item-subtitle { + font-size: 0.875rem; +} diff --git a/explorer/src/lib/components/token-detail-item/TokenDetailItem.svelte b/explorer/src/lib/components/token-detail-item/TokenDetailItem.svelte new file mode 100644 index 0000000000..19477d1a68 --- /dev/null +++ b/explorer/src/lib/components/token-detail-item/TokenDetailItem.svelte @@ -0,0 +1,11 @@ + + +
+

{title}

+

{subtitle}

+
diff --git a/explorer/src/lib/components/token-details-table/TokenDetailsTable.svelte b/explorer/src/lib/components/token-details-table/TokenDetailsTable.svelte new file mode 100644 index 0000000000..b02ba1758d --- /dev/null +++ b/explorer/src/lib/components/token-details-table/TokenDetailsTable.svelte @@ -0,0 +1,70 @@ + + + + +
+ + + From + To + ID + Fee (Dusk) + Status + Type + + + + {#each data as transaction (transaction)} + + {middleEllipsis( + transaction.from ? transaction.from : "", + HASH_CHARS_LENGTH + )} + {middleEllipsis( + transaction.to ? transaction.to : "", + HASH_CHARS_LENGTH + )} + {middleEllipsis(transaction.txid, HASH_CHARS_LENGTH)} + {transaction.gasprice} + + + + + + + + {/each} + +
diff --git a/explorer/src/lib/components/token-list-details/TokenListDetails.svelte b/explorer/src/lib/components/token-list-details/TokenListDetails.svelte new file mode 100644 index 0000000000..bcdb7bf5aa --- /dev/null +++ b/explorer/src/lib/components/token-list-details/TokenListDetails.svelte @@ -0,0 +1,107 @@ + + + + + + + + Token + + {data.token} + + + + + + Total Current Supply + + {data.totalCurrentSupply} + + + + + + Max Circulating Supply + + {data.maxCirculatingSupply} + + + + + + Ticker + + {data.ticker} + + + + + + Contract ID + + {tokensContractID} + + + + + + Price + + {data.price} + + + diff --git a/explorer/src/lib/components/token-overview/TokenOverview.css b/explorer/src/lib/components/token-overview/TokenOverview.css new file mode 100644 index 0000000000..256d72d99b --- /dev/null +++ b/explorer/src/lib/components/token-overview/TokenOverview.css @@ -0,0 +1,56 @@ +.token-overview-panel { + display: flex; + padding: 1rem 1.375rem; + flex-direction: column; + row-gap: 0.75rem; + border-radius: 1.5rem; + background-color: var(--surface-color); + width: 100%; + text-transform: uppercase; + position: relative; + margin-bottom: 1.25rem; +} + +.token-overview-panel__header { + display: flex; + gap: 1.25rem; + flex-direction: column; + align-items: flex-start; +} + +@media (min-width: 992px) { + .token-overview-panel__header { + flex-direction: row; + align-items: center; + } +} + +.token-overview-panel__token-name { + font-weight: 500; + font-size: 1.125em; + margin-bottom: 0.3125rem; +} + +.token-overview-panel__token-address-subheader { + font-size: 1.125em; + color: var(--text-color-secondary); +} + +.token-overview-panel__token-address { + font-weight: 500; +} + +.token-overview-panel__separator { + border: none; + border-top: 1px solid var(--taupe-grey); + margin: 0.5rem 0; +} + +.token-overview-panel__details { + display: flex; + flex-direction: row; + flex-wrap: wrap; + justify-content: space-between; + gap: 1rem; + width: 100%; +} diff --git a/explorer/src/lib/components/token-overview/TokenOverview.svelte b/explorer/src/lib/components/token-overview/TokenOverview.svelte new file mode 100644 index 0000000000..43db48739f --- /dev/null +++ b/explorer/src/lib/components/token-overview/TokenOverview.svelte @@ -0,0 +1,41 @@ + + +
+
+ +
+

{data.token}

+

+ Address: + + {middleEllipsis( + data.contractId, + calculateAdaptiveCharCount(screenWidth, 320, 1024, 4, 30) + )} + +

+
+
+
+
+ + + +
+
diff --git a/explorer/src/lib/components/token-transactions-list/TokenTransactionsList.svelte b/explorer/src/lib/components/token-transactions-list/TokenTransactionsList.svelte new file mode 100644 index 0000000000..9ffa1231e1 --- /dev/null +++ b/explorer/src/lib/components/token-transactions-list/TokenTransactionsList.svelte @@ -0,0 +1,132 @@ + + + + + + {#if data.from} + + From + + {middleEllipsis( + data.from, + calculateAdaptiveCharCount(screenWidth, 320, 1024, 4, 25) + )} + + + {/if} + {#if data.to} + + To + + {middleEllipsis( + data.to, + calculateAdaptiveCharCount(screenWidth, 320, 1024, 4, 25) + )} + + + {/if} + + + + ID + + {middleEllipsis( + data.txid, + calculateAdaptiveCharCount(screenWidth, 320, 1024, 4, 25) + )} + + + + + + relative time + + + + + + Fee + + {formatter(luxToDusk(data.feepaid))} DUSK + + + + + + Status + + + + + + + + Type + + + + + diff --git a/explorer/src/lib/components/tokens-table/TokensTable.svelte b/explorer/src/lib/components/tokens-table/TokensTable.svelte new file mode 100644 index 0000000000..7a77af08dc --- /dev/null +++ b/explorer/src/lib/components/tokens-table/TokensTable.svelte @@ -0,0 +1,54 @@ + + + + + + + + Token + Total Current Supply + Max Circulating Supply + Ticker + Contract ID + Price ($) + + + + {#each data as token (token)} + + + {token.token} + {token.totalCurrentSupply} + {token.maxCirculatingSupply} + {token.ticker} + {middleEllipsis(token.contractId, HASH_CHARS_LENGTH)} + {token.price} + + {/each} + +
diff --git a/explorer/src/lib/components/transactions-card/TransactionsCard.css b/explorer/src/lib/components/transactions-card/TransactionsCard.css deleted file mode 100644 index a74f796afc..0000000000 --- a/explorer/src/lib/components/transactions-card/TransactionsCard.css +++ /dev/null @@ -1,4 +0,0 @@ -.transactions-card__table, -.transactions-card__list { - overflow-y: auto; -} diff --git a/explorer/src/lib/components/transactions-card/TransactionsCard.svelte b/explorer/src/lib/components/transactions-card/TransactionsCard.svelte index 1e2440f135..4d0cb0b444 100644 --- a/explorer/src/lib/components/transactions-card/TransactionsCard.svelte +++ b/explorer/src/lib/components/transactions-card/TransactionsCard.svelte @@ -7,8 +7,6 @@ TransactionsTable, } from "$lib/components"; - import "./TransactionsCard.css"; - /** @type {Transaction[] | null}*/ export let txns; @@ -54,14 +52,13 @@ }} > {#if isSmallScreen} -
+
{#each displayedTxns as txn (txn)} {/each}
{:else} { }; /** @type {URL} */ - const gqlExpectedURL = new URL("/02/Chain", node); + const gqlExpectedURL = new URL("/on/graphql/query", node); const endpointEnvName = "VITE_API_ENDPOINT"; /** @type {(data: Record | number) => Response} */ @@ -72,7 +72,7 @@ describe("duskAPI", () => { expect(fetchSpy.mock.calls[0][0]).toStrictEqual(gqlExpectedURL); expect(fetchSpy.mock.calls[0][1]).toMatchInlineSnapshot(` { - "body": "{"data":"\\n \\n\\nfragment TransactionInfo on SpentTransaction {\\n\\tblockHash,\\n\\tblockHeight,\\n\\tblockTimestamp,\\n err,\\n\\tgasSpent,\\n\\tid,\\n tx {\\n callData {\\n contractId,\\n data,\\n fnName\\n },\\n gasLimit,\\n gasPrice,\\n id,\\n isDeploy,\\n memo,\\n txType\\n }\\n}\\n\\nfragment BlockInfo on Block {\\n header {\\n hash,\\n gasLimit,\\n height,\\n prevBlockHash,\\n seed,\\n stateHash,\\n timestamp,\\n version\\n },\\n fees,\\n gasSpent,\\n reward,\\n transactions {...TransactionInfo}\\n}\\n\\n query($id: String!) { block(hash: $id) {...BlockInfo} }\\n ","topic":"gql"}", + "body": "fragment TransactionInfo on SpentTransaction { blockHash, blockHeight, blockTimestamp, err, gasSpent, id, tx { callData { contractId, data, fnName }, gasLimit, gasPrice, id, isDeploy, memo, txType } } fragment BlockInfo on Block { header { hash, gasLimit, height, prevBlockHash, seed, stateHash, timestamp, version }, fees, gasSpent, reward, transactions {...TransactionInfo} } query($id: String!) { block(hash: $id) {...BlockInfo} }", "headers": { "Accept": "application/json", "Accept-Charset": "utf-8", @@ -86,7 +86,7 @@ describe("duskAPI", () => { expect(fetchSpy.mock.calls[1][0]).toStrictEqual(gqlExpectedURL); expect(fetchSpy.mock.calls[1][1]).toMatchInlineSnapshot(` { - "body": "{"data":"\\n query($height: Float!) { block(height: $height) { header { hash } } }\\n ","topic":"gql"}", + "body": "query($height: Float!) { block(height: $height) { header { hash } } }", "headers": { "Accept": "application/json", "Accept-Charset": "utf-8", @@ -130,7 +130,7 @@ describe("duskAPI", () => { expect(fetchSpy.mock.calls[0][0]).toStrictEqual(gqlExpectedURL); expect(fetchSpy.mock.calls[0][1]).toMatchInlineSnapshot(` { - "body": "{"data":"\\n query($height: Float!) { block(height: $height) { header { hash } } }\\n ","topic":"gql"}", + "body": "query($height: Float!) { block(height: $height) { header { hash } } }", "headers": { "Accept": "application/json", "Accept-Charset": "utf-8", @@ -144,7 +144,7 @@ describe("duskAPI", () => { // expect(fetchSpy.mock.calls[1][0]).toStrictEqual(gqlExpectedURL); expect(fetchSpy.mock.calls[1][1]).toMatchInlineSnapshot(` { - "body": "{"data":"\\n query($height: Float!) { block(height: $height) { header { hash } } }\\n ","topic":"gql"}", + "body": "query($height: Float!) { block(height: $height) { header { hash } } }", "headers": { "Accept": "application/json", "Accept-Charset": "utf-8", @@ -166,7 +166,7 @@ describe("duskAPI", () => { expect(fetchSpy.mock.calls[0][0]).toStrictEqual(gqlExpectedURL); expect(fetchSpy.mock.calls[0][1]).toMatchInlineSnapshot(` { - "body": "{"data":"query($id: String!) { block(hash: $id) { header { json } } }","topic":"gql"}", + "body": "query($id: String!) { block(hash: $id) { header { json } } }", "headers": { "Accept": "application/json", "Accept-Charset": "utf-8", @@ -189,7 +189,7 @@ describe("duskAPI", () => { expect(fetchSpy.mock.calls[0][0]).toStrictEqual(gqlExpectedURL); expect(fetchSpy.mock.calls[0][1]).toMatchInlineSnapshot(` { - "body": "{"data":"\\n \\n\\nfragment TransactionInfo on SpentTransaction {\\n\\tblockHash,\\n\\tblockHeight,\\n\\tblockTimestamp,\\n err,\\n\\tgasSpent,\\n\\tid,\\n tx {\\n callData {\\n contractId,\\n data,\\n fnName\\n },\\n gasLimit,\\n gasPrice,\\n id,\\n isDeploy,\\n memo,\\n txType\\n }\\n}\\n\\nfragment BlockInfo on Block {\\n header {\\n hash,\\n gasLimit,\\n height,\\n prevBlockHash,\\n seed,\\n stateHash,\\n timestamp,\\n version\\n },\\n fees,\\n gasSpent,\\n reward,\\n transactions {...TransactionInfo}\\n}\\n\\n query($amount: Int!) { blocks(last: $amount) {...BlockInfo} }\\n ","topic":"gql"}", + "body": "fragment TransactionInfo on SpentTransaction { blockHash, blockHeight, blockTimestamp, err, gasSpent, id, tx { callData { contractId, data, fnName }, gasLimit, gasPrice, id, isDeploy, memo, txType } } fragment BlockInfo on Block { header { hash, gasLimit, height, prevBlockHash, seed, stateHash, timestamp, version }, fees, gasSpent, reward, transactions {...TransactionInfo} } query($amount: Int!) { blocks(last: $amount) {...BlockInfo} }", "headers": { "Accept": "application/json", "Accept-Charset": "utf-8", @@ -214,7 +214,7 @@ describe("duskAPI", () => { expect(fetchSpy.mock.calls[0][0]).toStrictEqual(gqlExpectedURL); expect(fetchSpy.mock.calls[0][1]).toMatchInlineSnapshot(` { - "body": "{"data":"\\n \\n\\nfragment TransactionInfo on SpentTransaction {\\n\\tblockHash,\\n\\tblockHeight,\\n\\tblockTimestamp,\\n err,\\n\\tgasSpent,\\n\\tid,\\n tx {\\n callData {\\n contractId,\\n data,\\n fnName\\n },\\n gasLimit,\\n gasPrice,\\n id,\\n isDeploy,\\n memo,\\n txType\\n }\\n}\\n\\nfragment BlockInfo on Block {\\n header {\\n hash,\\n gasLimit,\\n height,\\n prevBlockHash,\\n seed,\\n stateHash,\\n timestamp,\\n version\\n },\\n fees,\\n gasSpent,\\n reward,\\n transactions {...TransactionInfo}\\n}\\n\\n query($amount: Int!) {\\n blocks(last: $amount) {...BlockInfo},\\n transactions(last: $amount) {...TransactionInfo}\\n }\\n ","topic":"gql"}", + "body": "fragment TransactionInfo on SpentTransaction { blockHash, blockHeight, blockTimestamp, err, gasSpent, id, tx { callData { contractId, data, fnName }, gasLimit, gasPrice, id, isDeploy, memo, txType } } fragment BlockInfo on Block { header { hash, gasLimit, height, prevBlockHash, seed, stateHash, timestamp, version }, fees, gasSpent, reward, transactions {...TransactionInfo} } query($amount: Int!) { blocks(last: $amount) {...BlockInfo}, transactions(last: $amount) {...TransactionInfo} }", "headers": { "Accept": "application/json", "Accept-Charset": "utf-8", @@ -314,7 +314,7 @@ describe("duskAPI", () => { expect(fetchSpy.mock.calls[1][0]).toStrictEqual(gqlExpectedURL); expect(fetchSpy.mock.calls[1][1]).toMatchInlineSnapshot(` { - "body": "{"data":"query { block(height: -1) { header { height } } }","topic":"gql"}", + "body": "query { block(height: -1) { header { height } } }", "headers": { "Accept": "application/json", "Accept-Charset": "utf-8", @@ -327,7 +327,7 @@ describe("duskAPI", () => { expect(fetchSpy.mock.calls[2][0]).toStrictEqual(gqlExpectedURL); expect(fetchSpy.mock.calls[2][1]).toMatchInlineSnapshot(` { - "body": "{"data":"query { blocks(last: 100) { transactions { err } } }","topic":"gql"}", + "body": "query { blocks(last: 100) { transactions { err } } }", "headers": { "Accept": "application/json", "Accept-Charset": "utf-8", @@ -349,7 +349,7 @@ describe("duskAPI", () => { expect(fetchSpy.mock.calls[0][0]).toStrictEqual(gqlExpectedURL); expect(fetchSpy.mock.calls[0][1]).toMatchInlineSnapshot(` { - "body": "{"data":"\\n \\nfragment TransactionInfo on SpentTransaction {\\n\\tblockHash,\\n\\tblockHeight,\\n\\tblockTimestamp,\\n err,\\n\\tgasSpent,\\n\\tid,\\n tx {\\n callData {\\n contractId,\\n data,\\n fnName\\n },\\n gasLimit,\\n gasPrice,\\n id,\\n isDeploy,\\n memo,\\n txType\\n }\\n}\\n\\n query($id: String!) { tx(hash: $id) {...TransactionInfo} }\\n ","topic":"gql"}", + "body": "fragment TransactionInfo on SpentTransaction { blockHash, blockHeight, blockTimestamp, err, gasSpent, id, tx { callData { contractId, data, fnName }, gasLimit, gasPrice, id, isDeploy, memo, txType } } query($id: String!) { tx(hash: $id) {...TransactionInfo} }", "headers": { "Accept": "application/json", "Accept-Charset": "utf-8", @@ -373,7 +373,7 @@ describe("duskAPI", () => { expect(fetchSpy.mock.calls[0][0]).toStrictEqual(gqlExpectedURL); expect(fetchSpy.mock.calls[0][1]).toMatchInlineSnapshot(` { - "body": "{"data":"query($id: String!) { tx(hash: $id) { tx { json } } }","topic":"gql"}", + "body": "query($id: String!) { tx(hash: $id) { tx { json } } }", "headers": { "Accept": "application/json", "Accept-Charset": "utf-8", @@ -396,7 +396,7 @@ describe("duskAPI", () => { expect(fetchSpy.mock.calls[0][0]).toStrictEqual(gqlExpectedURL); expect(fetchSpy.mock.calls[0][1]).toMatchInlineSnapshot(` { - "body": "{"data":"\\n \\nfragment TransactionInfo on SpentTransaction {\\n\\tblockHash,\\n\\tblockHeight,\\n\\tblockTimestamp,\\n err,\\n\\tgasSpent,\\n\\tid,\\n tx {\\n callData {\\n contractId,\\n data,\\n fnName\\n },\\n gasLimit,\\n gasPrice,\\n id,\\n isDeploy,\\n memo,\\n txType\\n }\\n}\\n\\n query($amount: Int!) { transactions(last: $amount) {...TransactionInfo} }\\n ","topic":"gql"}", + "body": "fragment TransactionInfo on SpentTransaction { blockHash, blockHeight, blockTimestamp, err, gasSpent, id, tx { callData { contractId, data, fnName }, gasLimit, gasPrice, id, isDeploy, memo, txType } } query($amount: Int!) { transactions(last: $amount) {...TransactionInfo} }", "headers": { "Accept": "application/json", "Accept-Charset": "utf-8", @@ -498,7 +498,7 @@ describe("duskAPI", () => { expect(fetchSpy.mock.calls[0][0]).toStrictEqual(gqlExpectedURL); expect(fetchSpy.mock.calls[0][1]).toMatchInlineSnapshot(` { - "body": "{"data":"\\n query($id: String!) {\\n block(hash: $id) { header { hash } },\\n tx(hash: $id) { id }\\n }\\n ","topic":"gql"}", + "body": "query($id: String!) { block(hash: $id) { header { hash } }, tx(hash: $id) { id } }", "headers": { "Accept": "application/json", "Accept-Charset": "utf-8", @@ -512,7 +512,7 @@ describe("duskAPI", () => { expect(fetchSpy.mock.calls[1][0]).toStrictEqual(gqlExpectedURL); expect(fetchSpy.mock.calls[1][1]).toMatchInlineSnapshot(` { - "body": "{"data":"\\n query($height: Float!) { block(height: $height) { header { hash } } }\\n ","topic":"gql"}", + "body": "query($height: Float!) { block(height: $height) { header { hash } } }", "headers": { "Accept": "application/json", "Accept-Charset": "utf-8", @@ -541,7 +541,7 @@ describe("duskAPI", () => { expect(fetchSpy.mock.calls[0][0]).toStrictEqual(gqlExpectedURL); expect(fetchSpy.mock.calls[0][1]).toMatchInlineSnapshot(` { - "body": "{"data":"\\n query($id: String!) {\\n block(hash: $id) { header { hash } },\\n tx(hash: $id) { id }\\n }\\n ","topic":"gql"}", + "body": "query($id: String!) { block(hash: $id) { header { hash } }, tx(hash: $id) { id } }", "headers": { "Accept": "application/json", "Accept-Charset": "utf-8", @@ -569,7 +569,7 @@ describe("duskAPI", () => { expect(fetchSpy.mock.calls[0][0]).toStrictEqual(gqlExpectedURL); expect(fetchSpy.mock.calls[0][1]).toMatchInlineSnapshot(` { - "body": "{"data":"\\n query($height: Float!) { block(height: $height) { header { hash } } }\\n ","topic":"gql"}", + "body": "query($height: Float!) { block(height: $height) { header { hash } } }", "headers": { "Accept": "application/json", "Accept-Charset": "utf-8", diff --git a/explorer/src/lib/services/duskAPI.js b/explorer/src/lib/services/duskAPI.js index 65b5dc4a00..7cb610477f 100644 --- a/explorer/src/lib/services/duskAPI.js +++ b/explorer/src/lib/services/duskAPI.js @@ -54,11 +54,8 @@ const toHeadersVariables = unless( * @param {{ query: string, variables?: Record }} queryInfo */ const gqlGet = (queryInfo) => - fetch(makeNodeUrl("/02/Chain"), { - body: JSON.stringify({ - data: queryInfo.query, - topic: "gql", - }), + fetch(makeNodeUrl("/on/graphql/query"), { + body: queryInfo.query.replace(/\s+/g, " ").trim(), headers: { Accept: "application/json", "Accept-Charset": "utf-8", diff --git a/explorer/src/routes/+page.svelte b/explorer/src/routes/+page.svelte index c818935bc7..5b77864d63 100644 --- a/explorer/src/routes/+page.svelte +++ b/explorer/src/routes/+page.svelte @@ -35,9 +35,9 @@
-
+
-
+
should render the app's main layout 1`] = ` > Provisioners + + + Tokens + diff --git a/explorer/src/routes/__tests__/__snapshots__/page.spec.js.snap b/explorer/src/routes/__tests__/__snapshots__/page.spec.js.snap index 368960b2e0..483036db91 100644 --- a/explorer/src/routes/__tests__/__snapshots__/page.spec.js.snap +++ b/explorer/src/routes/__tests__/__snapshots__/page.spec.js.snap @@ -4,7 +4,7 @@ exports[`home page > should render the home page, start polling for the latest c
-
should render the home page, start polling for the latest c
-
+
should render the home page, start polling for the latest c
-
should render the home page, start polling for the latest c
-
+
should render the Blocks page with the mobile layout 1`] class="data-card__content" >
should render the Blocks page, start polling for blocks a class="data-card__content" >
should render the Provisioners page with the mobile class="data-card__content" >
should render the Provisioners page, start polling class="data-card__content" >
{ + const featureTokensEnabled = import.meta.env.VITE_FEATURE_TOKENS === "true"; + + if (!featureTokensEnabled) { + throw redirect(302, "/"); + } +}; diff --git a/explorer/src/routes/tokens/+page.svelte b/explorer/src/routes/tokens/+page.svelte new file mode 100644 index 0000000000..a9037e5dbc --- /dev/null +++ b/explorer/src/routes/tokens/+page.svelte @@ -0,0 +1,50 @@ + + +
+ loadMoreItems(), + disabled: isLoadMoreDisabled, + label: "Show More", + }} + > + {#if isSmallScreen} +
+ {#each displayedTokens as token (token)} + + {/each} +
+ {:else} + + {/if} +
+
diff --git a/explorer/src/routes/tokens/token/+page.svelte b/explorer/src/routes/tokens/token/+page.svelte new file mode 100644 index 0000000000..6303cdee1b --- /dev/null +++ b/explorer/src/routes/tokens/token/+page.svelte @@ -0,0 +1,95 @@ + + +
+ {#if tokenData} +
+ +
+ loadMoreItems(), + disabled: isLoadMoreDisabled, + label: "Show More", + }} + > + {#if isSmallScreen} +
+ {#each displayedTxns as txn (txn)} + + {/each} +
+ {:else} + + {/if} +
+ {:else} +

Token not found

+ {/if} +
diff --git a/explorer/src/routes/transactions/__tests__/__snapshots__/page.spec.js.snap b/explorer/src/routes/transactions/__tests__/__snapshots__/page.spec.js.snap index d6b0684468..a1b1062b58 100644 --- a/explorer/src/routes/transactions/__tests__/__snapshots__/page.spec.js.snap +++ b/explorer/src/routes/transactions/__tests__/__snapshots__/page.spec.js.snap @@ -31,7 +31,7 @@ exports[`Transactions page > should render the Transactions page with the mobile class="data-card__content" >
should render the Transactions page, start polling class="data-card__content" >
{ API_ENDPOINT: env.VITE_API_ENDPOINT, VITE_BLOCKS_LIST_ENTRIES: env.VITE_BLOCKS_LIST_ENTRIES, VITE_CHAIN_INFO_ENTRIES: env.VITE_CHAIN_INFO_ENTRIES, + VITE_FEATURE_TOKENS: env.VITE_FEATURE_TOKENS, VITE_MARKET_DATA_REFETCH_INTERVAL: env.VITE_MARKET_DATA_REFETCH_INTERVAL, VITE_NODE_URL: env.VITE_NODE_URL, @@ -59,6 +60,7 @@ export default defineConfig(({ mode }) => { VITE_API_ENDPOINT: "https://api.dusk.network/v1", VITE_BLOCKS_LIST_ENTRIES: "100", VITE_CHAIN_INFO_ENTRIES: "15", + VITE_FEATURE_TOKENS: "true", VITE_MARKET_DATA_REFETCH_INTERVAL: "120000", VITE_NODE_URL: "https://nodes.dusk.network", VITE_PROVISIONERS_REFETCH_INTERVAL: "30000", diff --git a/node-data/CHANGELOG.md b/node-data/CHANGELOG.md index fc2a8fbf78..3266884a6f 100644 --- a/node-data/CHANGELOG.md +++ b/node-data/CHANGELOG.md @@ -7,6 +7,17 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [1.1.0] - 2025-02-14 + +### Added + +- Add PartialEq, Eq to `BlockState` [#3359] +- Add `SpentTransaction::shielded` & `SpentTransaction::public` getter fn [#3464] + +### Removed + +- Removed `ArchivalData` together with archive module [#3359] + [1.0.1] - 2025-01-23 ### Changed @@ -20,8 +31,11 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Add Types used for interacting with Dusk node +[#3464]: https://github.com/dusk-network/rusk/issues/3464 +[#3359]: https://github.com/dusk-network/rusk/issues/3359 [#3405]: https://github.com/dusk-network/rusk/issues/3405 -[Unreleased]: https://github.com/dusk-network/rusk/compare/dusk-node-data-1.0.1...HEAD +[Unreleased]: https://github.com/dusk-network/rusk/compare/dusk-node-data-1.1.0...HEAD +[1.1.0]: https://github.com/dusk-network/rusk/compare/dusk-node-data-1.0.1...dusk-node-data-1.1.0 [1.0.1]: https://github.com/dusk-network/rusk/compare/dusk-node-data-1.0.0...dusk-node-data-1.0.1 [1.0.0]: https://github.com/dusk-network/rusk/tree/dusk-node-data-1.0.0 diff --git a/node-data/Cargo.toml b/node-data/Cargo.toml index b73884215a..e922251ae0 100644 --- a/node-data/Cargo.toml +++ b/node-data/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "dusk-node-data" -version = "1.0.2-alpha.1" +version = "1.1.1-alpha.1" edition = "2021" description = "Types used for interacting with Dusk node." diff --git a/node-data/src/archive.rs b/node-data/src/archive.rs deleted file mode 100644 index eef06af56a..0000000000 --- a/node-data/src/archive.rs +++ /dev/null @@ -1,25 +0,0 @@ -// This Source Code Form is subject to the terms of the Mozilla Public -// License, v. 2.0. If a copy of the MPL was not distributed with this -// file, You can obtain one at http://mozilla.org/MPL/2.0/. -// -// Copyright (c) DUSK NETWORK. All rights reserved. - -use crate::events::contract::ContractTxEvent; -use crate::ledger::Hash; - -type HexHash = String; - -/// Defined data, that the archivist will store. -/// -/// This is also the type of the mpsc channel where the archivist listens for -/// data to archive. -/// -/// Any data that archive nodes can store must be defined here -#[derive(Debug)] -pub enum ArchivalData { - /// List of contract events from one block together with the block height - /// and block hash. - ArchivedEvents(u64, Hash, Vec), - FinalizedBlock(u64, HexHash), - DeletedBlock(u64, HexHash), -} diff --git a/node-data/src/events/blocks.rs b/node-data/src/events/blocks.rs index 8083d98336..bd7e4ff052 100644 --- a/node-data/src/events/blocks.rs +++ b/node-data/src/events/blocks.rs @@ -19,7 +19,7 @@ use crate::ledger::{Block, Hash}; /// /// - `as_str() -> &'static str` - Returns the string representation of the /// block state. -#[derive(Clone, Debug)] +#[derive(Clone, Debug, PartialEq, Eq)] pub enum BlockState { Confirmed, Finalized, diff --git a/node-data/src/ledger/transaction.rs b/node-data/src/ledger/transaction.rs index e2e51019cb..9692a7a9f8 100644 --- a/node-data/src/ledger/transaction.rs +++ b/node-data/src/ledger/transaction.rs @@ -7,7 +7,9 @@ use std::io; use dusk_bytes::Serializable as DuskSerializable; -use dusk_core::signatures::bls; +use dusk_core::signatures::bls::PublicKey as AccountPublicKey; +use dusk_core::transfer::moonlight::Transaction as MoonlightTransaction; +use dusk_core::transfer::phoenix::Transaction as PhoenixTransaction; use dusk_core::transfer::Transaction as ProtocolTransaction; use serde::Serialize; use sha3::Digest; @@ -46,14 +48,42 @@ impl From for Transaction { } } +/// A spent transaction is a transaction that has been included in a block and +/// was executed. #[derive(Debug, Clone, Serialize)] pub struct SpentTransaction { + /// The transaction that was executed. pub inner: Transaction, + /// The height of the block in which the transaction was included. pub block_height: u64, + /// The amount of gas that was spent during the execution of the + /// transaction. pub gas_spent: u64, + /// An optional error message if the transaction execution yielded an + /// error. pub err: Option, } +impl SpentTransaction { + /// Returns the underlying public transaction, if it is one. Otherwise, + /// returns `None`. + pub fn public(&self) -> Option<&MoonlightTransaction> { + match &self.inner.inner { + ProtocolTransaction::Moonlight(public_tx) => Some(public_tx), + _ => None, + } + } + + /// Returns the underlying shielded transaction, if it is one. Otherwise, + /// returns `None`. + pub fn shielded(&self) -> Option<&PhoenixTransaction> { + match &self.inner.inner { + ProtocolTransaction::Phoenix(shielded_tx) => Some(shielded_tx), + _ => None, + } + } +} + impl Transaction { /// Computes the hash digest of the entire transaction data. /// @@ -71,8 +101,8 @@ impl Transaction { /// Computes the transaction ID. /// /// The transaction ID is a unique identifier for the transaction. - /// Unlike the [`hash()`](#method.hash) method, which is computed over the - /// entire transaction, the transaction ID is derived from specific + /// Unlike the [`digest()`](#method.digest) method, which is computed over + /// the entire transaction, the transaction ID is derived from specific /// fields of the transaction and serves as a unique identifier of the /// transaction itself. /// @@ -129,7 +159,7 @@ impl Eq for SpentTransaction {} pub enum SpendingId { Nullifier([u8; 32]), - AccountNonce(bls::PublicKey, u64), + AccountNonce(AccountPublicKey, u64), } impl SpendingId { diff --git a/node-data/src/lib.rs b/node-data/src/lib.rs index 9615f0d7b6..ea685697fb 100644 --- a/node-data/src/lib.rs +++ b/node-data/src/lib.rs @@ -7,7 +7,6 @@ #![deny(unused_crate_dependencies)] #![deny(unused_extern_crates)] -pub mod archive; pub mod bls; pub mod encoding; pub mod events; diff --git a/node/CHANGELOG.md b/node/CHANGELOG.md index 18bdee0951..7bf341c751 100644 --- a/node/CHANGELOG.md +++ b/node/CHANGELOG.md @@ -7,6 +7,24 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [1.1.0] - 2025-02-14 + +### Added + +- Add `ledger_txs` to `Ledger` trait and Backend implementation [#3491] + +### Fixed + +- Change the way the archive synchronizes with the node Acceptor [#3359] + +### Changed + +- Change deprecated `tempdir` with `tempfile` dependency [#3407] + +### Removed + +- Removed ArchivistSrv & archivist module [#3359] + ## [1.0.1] - 2025-01-23 ### Changed @@ -18,8 +36,12 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - First `dusk-node` release +[#3491]: https://github.com/dusk-network/rusk/issues/3491 +[#3359]: https://github.com/dusk-network/rusk/issues/3359 +[#3407]: https://github.com/dusk-network/rusk/issues/3407 [#3405]: https://github.com/dusk-network/rusk/issues/3405 -[Unreleased]: https://github.com/dusk-network/rusk/compare/dusk-node-1.0.1...HEAD +[Unreleased]: https://github.com/dusk-network/rusk/compare/dusk-node-1.1.0...HEAD +[1.1.0]: https://github.com/dusk-network/rusk/compare/dusk-node-1.0.1...dusk-node-1.1.0 [1.0.1]: https://github.com/dusk-network/rusk/compare/node-1.0.0...dusk-node-1.0.1 [1.0.0]: https://github.com/dusk-network/rusk/tree/node-1.0.0 diff --git a/node/Cargo.toml b/node/Cargo.toml index 4a9cd65eb8..ad6a2c1f99 100644 --- a/node/Cargo.toml +++ b/node/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "dusk-node" -version = "1.0.2-alpha.1" +version = "1.1.1-alpha.1" edition = "2021" autobins = false repository = "https://github.com/dusk-network/rusk" @@ -49,7 +49,7 @@ serde_with = { workspace = true, features = ["hex"], optional = true } fake = { workspace = true, features = ['derive'] } node-data = { workspace = true, features = ["faker"] } rand = { workspace = true } -tempdir = { workspace = true } +tempfile = { workspace = true } criterion = { workspace = true, features = ["async_futures"] } [features] diff --git a/node/src/archive.rs b/node/src/archive.rs index 6a4ed620c0..7d3868ff65 100644 --- a/node/src/archive.rs +++ b/node/src/archive.rs @@ -12,12 +12,10 @@ use rocksdb::OptimisticTransactionDB; use sqlx::sqlite::SqlitePool; use tracing::debug; -mod archivist; mod moonlight; mod sqlite; mod transformer; -pub use archivist::ArchivistSrv; pub use moonlight::{MoonlightGroup, Order}; // Archive folder containing the sqlite database and the moonlight database diff --git a/node/src/archive/archivist.rs b/node/src/archive/archivist.rs deleted file mode 100644 index d1febf1ffa..0000000000 --- a/node/src/archive/archivist.rs +++ /dev/null @@ -1,95 +0,0 @@ -// This Source Code Form is subject to the terms of the Mozilla Public -// License, v. 2.0. If a copy of the MPL was not distributed with this -// file, You can obtain one at http://mozilla.org/MPL/2.0/. -// -// Copyright (c) DUSK NETWORK. All rights reserved. - -use std::sync::Arc; - -use async_trait::async_trait; -use node_data::archive::ArchivalData; -use tokio::sync::mpsc::Receiver; -use tokio::sync::RwLock; -use tracing::error; - -use crate::archive::Archive; -use crate::{database, vm, LongLivedService, Network}; - -pub struct ArchivistSrv { - pub archive_receiver: Receiver, - pub archivist: Archive, -} - -#[async_trait] -impl - LongLivedService for ArchivistSrv -{ - async fn execute( - &mut self, - _: Arc>, - _: Arc>, - _: Arc>, - ) -> anyhow::Result { - loop { - if let Some(msg) = self.archive_receiver.recv().await { - match msg { - ArchivalData::ArchivedEvents( - blk_height, - blk_hash, - events, - ) => { - if let Err(e) = self - .archivist - .store_unfinalized_events( - blk_height, blk_hash, events, - ) - .await - { - error!( - "Failed to archive block vm events: {:?}", - e - ); - } - } - ArchivalData::DeletedBlock(blk_height, hex_blk_hash) => { - if let Err(e) = self - .archivist - .remove_block_and_events(blk_height, &hex_blk_hash) - .await - { - error!( - "Failed to delete block in archive: {:?}", - e - ); - } - } - ArchivalData::FinalizedBlock(blk_height, hex_blk_hash) => { - if let Err(e) = self - .archivist - .finalize_archive_data(blk_height, &hex_blk_hash) - .await - { - error!( - "Failed to finalize block in archive: {:?}", - e - ); - } - } - } - } else { - error!( - "Sending side of the archive data channel has been closed" - ); - - break; - } - } - - Ok(0) - } - - /// Returns service name. - fn name(&self) -> &'static str { - "archivist" - } -} diff --git a/node/src/archive/moonlight.rs b/node/src/archive/moonlight.rs index 78ff5c4e4f..f66bf2e5bf 100644 --- a/node/src/archive/moonlight.rs +++ b/node/src/archive/moonlight.rs @@ -21,7 +21,7 @@ use serde::{Deserialize, Serialize}; use tracing::{debug, error, info, warn}; use crate::archive::transformer::{ - self, EventIdentifier, MoonlightTxEvents, MoonlightTxMapping, + self, EventIdentifier, MoonlightTransferEvents, MoonlightTransferMapping, }; use crate::archive::{Archive, ArchiveOptions}; @@ -194,7 +194,9 @@ impl Archive { self.update_outflow_address_tx(pk, tx_hash)?; } - for MoonlightTxMapping(moonlight_tx, events) in moonlight_tx_mappings { + for MoonlightTransferMapping(moonlight_tx, events) in + moonlight_tx_mappings + { self.put_moonlight_events(moonlight_tx, events)?; } @@ -337,7 +339,7 @@ impl Archive { // Construct the MoonlightGroup from MoonlightTxEvents & // MoonlightTx let moonlight_tx_events = - serde_json::from_slice::(e)?; + serde_json::from_slice::(e)?; moonlight_groups.push(MoonlightGroup { events: moonlight_tx_events.events(), @@ -512,7 +514,7 @@ impl Archive { fn put_moonlight_events( &self, moonlight_tx: EventIdentifier, - events: MoonlightTxEvents, + events: MoonlightTransferEvents, ) -> Result<()> { let txn = self.moonlight_db.transaction(); let cf = self.cf_txhash_moonlight_events()?; @@ -753,12 +755,12 @@ mod tests { use std::env; use std::path::PathBuf; + use dusk_core::abi::{ContractId, CONTRACT_ID_BYTES}; use dusk_core::signatures::bls::SecretKey; use dusk_core::transfer::withdraw::WithdrawReceiver; use dusk_core::transfer::{ ConvertEvent, DepositEvent, MoonlightTransactionEvent, WithdrawEvent, }; - use dusk_core::{ContractId, CONTRACT_ID_BYTES}; use node_data::events::contract::{ ContractEvent, ContractTxEvent, WrappedContractId, ORIGIN_HASH_BYTES, }; @@ -768,7 +770,7 @@ mod tests { use super::transformer::{self, filter_and_convert, TransormerResult}; use super::{ - AccountPublicKey, Archive, EventIdentifier, MoonlightTxEvents, + AccountPublicKey, Archive, EventIdentifier, MoonlightTransferEvents, }; // Construct a random test directory path in the temp folder of the OS @@ -1153,7 +1155,7 @@ mod tests { .unwrap() .unwrap(); let fetched_events_by_moonlight_tx = - serde_json::from_slice::( + serde_json::from_slice::( &fetched_events_by_moonlight_tx, ) .unwrap(); diff --git a/node/src/archive/sqlite.rs b/node/src/archive/sqlite.rs index cb176e1579..feb9d88f57 100644 --- a/node/src/archive/sqlite.rs +++ b/node/src/archive/sqlite.rs @@ -226,7 +226,7 @@ impl Archive { impl Archive { /// Store the list of **all** unfinalized vm events from the block of the /// given height. - pub(super) async fn store_unfinalized_events( + pub(crate) async fn store_unfinalized_events( &self, block_height: u64, block_hash: Hash, @@ -276,7 +276,7 @@ impl Archive { /// This also triggers the loading of the MoonlightTxEvents into the /// moonlight db. This also updates the last finalized block height /// attribute. - pub(super) async fn finalize_archive_data( + pub(crate) async fn finalize_archive_data( &mut self, current_block_height: u64, hex_block_hash: &str, @@ -375,7 +375,7 @@ impl Archive { /// Remove the unfinalized block together with the unfinalized events of the /// given hash from the archive. - pub(super) async fn remove_block_and_events( + pub(crate) async fn remove_block_and_events( &self, current_block_height: u64, hex_block_hash: &str, @@ -501,7 +501,7 @@ mod tests { use std::env; use std::path::PathBuf; - use dusk_core::ContractId; + use dusk_core::abi::ContractId; use node_data::events::contract::{ContractEvent, WrappedContractId}; use rand::distributions::Alphanumeric; use rand::Rng; diff --git a/node/src/archive/transformer.rs b/node/src/archive/transformer.rs index cba996e826..5d35fc541e 100644 --- a/node/src/archive/transformer.rs +++ b/node/src/archive/transformer.rs @@ -9,20 +9,24 @@ use std::collections::BTreeMap; use dusk_core::signatures::bls::PublicKey as AccountPublicKey; use dusk_core::transfer::withdraw::WithdrawReceiver; use dusk_core::transfer::{ - ConvertEvent, MoonlightTransactionEvent, WithdrawEvent, CONVERT_TOPIC, - MINT_TOPIC, MOONLIGHT_TOPIC, TRANSFER_CONTRACT, WITHDRAW_TOPIC, + ContractToAccountEvent, ConvertEvent, MoonlightTransactionEvent, + WithdrawEvent, CONTRACT_TO_ACCOUNT_TOPIC, CONVERT_TOPIC, MINT_TOPIC, + MOONLIGHT_TOPIC, TRANSFER_CONTRACT, WITHDRAW_TOPIC, }; use node_data::events::contract::{ContractEvent, ContractTxEvent, OriginHash}; use serde::{Deserialize, Serialize}; -/// A group of events that belong to the same Moonlight transaction. +/// A group of events that belong to the same transaction. +/// +/// This transaction is guaranteed to have changed the balance of at least one +/// public account, therefore seen as a transfer. #[serde_with::serde_as] #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -pub(super) struct MoonlightTxEvents { +pub(super) struct MoonlightTransferEvents { events: Vec, } -impl MoonlightTxEvents { +impl MoonlightTransferEvents { // Private on purpose fn new(events: Vec) -> Self { Self { events } @@ -68,16 +72,16 @@ impl EventIdentifier { pub(super) type AddressMapping = (AccountPublicKey, EventIdentifier); pub(super) type MemoMapping = (Vec, EventIdentifier); -pub(super) struct MoonlightTxMapping( +pub(super) struct MoonlightTransferMapping( pub EventIdentifier, - pub MoonlightTxEvents, + pub MoonlightTransferEvents, ); pub(super) struct TransormerResult { pub address_outflow_mappings: Vec, pub address_inflow_mappings: Vec, pub memo_mappings: Vec, - pub moonlight_tx_mappings: Vec, + pub moonlight_tx_mappings: Vec, } /// Group a list of events from the same block by origin and block height @@ -107,140 +111,211 @@ pub(super) fn group_by_origins( pub(super) fn filter_and_convert( grouped_events: BTreeMap>, ) -> TransormerResult { - // Keep only the event groups which contain a moonlight in- - // or outflow + // TODO: We could add Ord to PublicKey / G2Affine for easy sort & dedup or + // use inside BTreeMap let mut address_inflow_mappings: Vec<(AccountPublicKey, EventIdentifier)> = vec![]; let mut address_outflow_mappings: Vec<(AccountPublicKey, EventIdentifier)> = vec![]; let mut memo_mappings: Vec<(Vec, EventIdentifier)> = vec![]; let mut moonlight_tx_mappings = vec![]; + // Iterate over the grouped events and push them to the groups vector in // the new format if they are moonlight events for (tx_ident, group) in grouped_events { - let is_moonlight = group.iter().any(|event| { - // Make sure that the events originate from the transfer contract. + let got_recorded = record_flows( + &mut address_inflow_mappings, + &mut address_outflow_mappings, + &mut memo_mappings, + tx_ident, + &group, + ); + + if got_recorded { + moonlight_tx_mappings.push(MoonlightTransferMapping( + tx_ident, + MoonlightTransferEvents::new(group), + )); + } + } + + TransormerResult { + address_outflow_mappings, + address_inflow_mappings, + memo_mappings, + moonlight_tx_mappings, + } +} + +/// Record moonlight inflows/outflows (transfers) from a transaction, based on +/// Event categorization +/// +/// # Categories of Events being looked for +/// +/// - **Only Moonlight Transaction** +/// - `MOONLIGHT_TOPIC` with `MoonlightTransactionEvent`: Captures all +/// Moonlight protocol transactions (outflows from normal transactions, +/// deposits, or converts). Sender recorded in outflow mapping; receiver in +/// inflow mapping. Refunds (if > 0) are recorded in inflow mapping. +/// +/// - **Only Phoenix Transaction** +/// - `CONVERT_TOPIC` with `ConvertEvent`: Receiver recorded in inflow +/// mapping. +/// +/// - **Moonlight or Phoenix Transaction** +/// - `WITHDRAW_TOPIC` with `WithdrawEvent`: Receiver recorded in inflow +/// mapping. +/// - `MINT_TOPIC` with `WithdrawEvent`: Receiver recorded in inflow mapping. +/// - `CONTRACT_TO_ACCOUNT_TOPIC` with `ContractToAccountEvent`: Receiver +/// recorded in inflow mapping. +/// +/// Mappings are recorded only once per transaction to prevent redundancy. +/// +/// # Returns +/// +/// Returns true if the group contains a moonlight inflow or outflow, false +/// otherwise. +fn record_flows( + address_inflow_mappings: &mut Vec<(AccountPublicKey, EventIdentifier)>, + address_outflow_mappings: &mut Vec<(AccountPublicKey, EventIdentifier)>, + memo_mappings: &mut Vec<(Vec, EventIdentifier)>, + tx_ident: EventIdentifier, + group: &Vec, +) -> bool { + // Helper to handle inflow mappings without pushing duplicates + let mut handle_inflow = |key: AccountPublicKey| { + if !address_inflow_mappings.contains(&(key, tx_ident)) { + address_inflow_mappings.push((key, tx_ident)); + } + }; + + // Helper to handle outflow mappings without pushing duplicates + let mut handle_outflow = |key: AccountPublicKey| { + if !address_outflow_mappings.contains(&(key, tx_ident)) { + address_outflow_mappings.push((key, tx_ident)); + } + }; + + let filtered_group = group + .iter() + .filter(|event| { + // Make sure that the events originate from the transfer + // contract. if event.target.0 != TRANSFER_CONTRACT { return false; } - /* - Cases of a moonlight in- or outflow: - 1. Any MoonlightTransactionEvent. This implicitly also catches a moonlight outflow for deposit, convert or refund (from moonlight) - 2a. Any withdraw event where the receiver is moonlight. (from phoenix) - 2b. Any mint event where the receiver is moonlight. (from staking) - 3. Any convert event where the receiver is moonlight. (from phoenix) - */ match event.topic.as_str() { MOONLIGHT_TOPIC => { - /* - This also catches deposits & converts. - For deposits & convert the sender will be Some(pk) where pk is the same as the from field of the MoonlightTransactionEvent - */ - if let Ok(moonlight_event) = - rkyv::from_bytes::( - &event.data, - ) - { - // An outflow from the sender address is always the case - address_outflow_mappings - .push((moonlight_event.sender, tx_ident)); - - // Exhaustively handle all inflow cases - match ( - moonlight_event.receiver, - moonlight_event.refund_info, - ) { - (None, refund) => { - // Note: Tx sent to self are also recorded as - // inflows. - // If a group only has one event & the event is - // "moonlight", it has to be a transaction to - // self. - if group.len() == 1 { - address_inflow_mappings.push(( - moonlight_event.sender, - tx_ident, - )); - } + // This also catches deposits or converts. + // The DepositEvent or ConvertEvent will have a sender + // Some(pk) equal to the sender field of + // this MoonlightTransactionEvent + let Ok(moonlight_event) = rkyv::from_bytes::< + MoonlightTransactionEvent, + >(&event.data) else { + return false; + }; - // addr != moonlight_event.sender to not record - // an inflow twice for the same tx - if let Some((addr, amt)) = refund { - if amt > 0 && addr != moonlight_event.sender - { - address_inflow_mappings - .push((addr, tx_ident)); - } - } + // An outflow from the sender pk is always the + // case + handle_outflow(moonlight_event.sender); + + // Exhaustively handle all inflow cases + // - We don't record refund to sender as inflow (no matter + // which amount) + // - We also don't record Zero-refunds to anyone as inflow + match ( + moonlight_event.receiver, + moonlight_event.refund_info, + ) { + (None, refund) => { + // If a group only has one event & + // the event is "moonlight", it has to be a + // transaction to self. + if group.len() == 1 { + // Tx sent to self are always recorded + // as inflows as well (even if value is 0). + handle_inflow(moonlight_event.sender); } - (Some(receiver), None) => address_inflow_mappings - .push((receiver, tx_ident)), - (Some(receiver), Some((addr, amt))) => { - address_inflow_mappings - .push((receiver, tx_ident)); - - if amt > 0 - && addr != receiver - && addr != moonlight_event.sender - { - address_inflow_mappings - .push((addr, tx_ident)); + + if let Some((key, amt)) = refund { + if amt > 0 { + // We rely on the fact, that refund only + // exists if different from sender + handle_inflow(key); } } } + (Some(receiver), None) => { + handle_inflow(receiver); + } + (Some(receiver), Some((key, amt))) => { + handle_inflow(receiver); - if !moonlight_event.memo.is_empty() { - memo_mappings - .push((moonlight_event.memo, tx_ident)); + if amt > 0 // We rely on the fact, that refund only exists if different from sender + && key != receiver + { + handle_inflow(key); + } } + } - return true; + if !moonlight_event.memo.is_empty() { + memo_mappings.push((moonlight_event.memo, tx_ident)); } - false + + true + } + CONVERT_TOPIC => { + let Ok(convert_event) = + rkyv::from_bytes::(&event.data) + else { + return false; + }; + + let WithdrawReceiver::Moonlight(key) = + convert_event.receiver + else { + return false; + }; + + handle_inflow(key); + + true } WITHDRAW_TOPIC | MINT_TOPIC => { - if let Ok(withdraw_event) = + let Ok(withdraw_event) = rkyv::from_bytes::(&event.data) - { - if let WithdrawReceiver::Moonlight(key) = - withdraw_event.receiver - { - address_inflow_mappings.push((key, tx_ident)); - return true; - } - } - false + else { + return false; + }; + + let WithdrawReceiver::Moonlight(key) = + withdraw_event.receiver + else { + return false; + }; + + handle_inflow(key); + + true } - CONVERT_TOPIC => { - if let Ok(convert_event) = - rkyv::from_bytes::(&event.data) - { - if let WithdrawReceiver::Moonlight(key) = - convert_event.receiver - { - address_inflow_mappings.push((key, tx_ident)); - return true; - } - } - false + CONTRACT_TO_ACCOUNT_TOPIC => { + let Ok(contract_to_account_event) = + rkyv::from_bytes::(&event.data) + else { + return false; + }; + + handle_inflow(contract_to_account_event.receiver); + + true } _ => false, } - }); + }) + .collect::>(); - if is_moonlight { - moonlight_tx_mappings.push(MoonlightTxMapping( - tx_ident, - MoonlightTxEvents::new(group), - )); - } - } - - TransormerResult { - address_outflow_mappings, - address_inflow_mappings, - memo_mappings, - moonlight_tx_mappings, - } + !filtered_group.is_empty() } diff --git a/node/src/chain.rs b/node/src/chain.rs index 51f53bd939..4b97b4e7b9 100644 --- a/node/src/chain.rs +++ b/node/src/chain.rs @@ -34,6 +34,8 @@ use tracing::{debug, error, info, warn}; use self::acceptor::Acceptor; use self::fsm::SimpleFSM; +#[cfg(feature = "archive")] +use crate::archive::Archive; use crate::database::rocksdb::MD_HASH_KEY; use crate::database::{Ledger, Metadata}; use crate::{database, vm, LongLivedService, Message, Network}; @@ -59,6 +61,9 @@ pub struct ChainSrv { event_sender: Sender, genesis_timestamp: u64, dusk_key: BlsPublicKey, + finality_activation: u64, + #[cfg(feature = "archive")] + archive: Archive, } #[async_trait] @@ -89,9 +94,12 @@ impl db, network, vm, + #[cfg(feature = "archive")] + self.archive.clone(), self.max_consensus_queue_size, self.event_sender.clone(), self.dusk_key, + self.finality_activation, ) .await?; @@ -251,6 +259,8 @@ impl ChainSrv { event_sender: Sender, genesis_timestamp: u64, dusk_key: BlsPublicKey, + finality_activation: u64, + #[cfg(feature = "archive")] archive: Archive, ) -> Self { info!( "ChainSrv::new with keys_path: {}, max_inbound_size: {}", @@ -265,6 +275,9 @@ impl ChainSrv { event_sender, genesis_timestamp, dusk_key, + finality_activation, + #[cfg(feature = "archive")] + archive, } } diff --git a/node/src/chain/acceptor.rs b/node/src/chain/acceptor.rs index 54661f8a93..8523018091 100644 --- a/node/src/chain/acceptor.rs +++ b/node/src/chain/acceptor.rs @@ -21,6 +21,7 @@ use dusk_consensus::operations::Voter; use dusk_consensus::user::provisioners::{ContextProvisioners, Provisioners}; use dusk_consensus::user::stake::Stake; use dusk_core::signatures::bls; +use dusk_core::stake::STAKE_CONTRACT; use dusk_core::stake::{SlashEvent, StakeAmount, StakeEvent}; use metrics::{counter, gauge, histogram}; use node_data::bls::PublicKey; @@ -38,6 +39,8 @@ use tokio::sync::{RwLock, RwLockReadGuard}; use tracing::{debug, error, info, trace, warn}; use super::consensus::Task; +#[cfg(feature = "archive")] +use crate::archive::Archive; use crate::chain::header_validation::{verify_att, verify_faults, Validator}; use crate::chain::metrics::AverageElapsedTime; use crate::database::rocksdb::{ @@ -53,7 +56,19 @@ const CANDIDATES_DELETION_OFFSET: u64 = 10; /// future message. const OFFSET_FUTURE_MSGS: u64 = 5; -pub type RollingFinalityResult = ([u8; 32], BTreeMap); +struct Identifiers { + /// Block hash of the newly finalized block + block_hash: [u8; 32], + /// State root of the newly finalized block + state_root: [u8; 32], +} + +struct RollingFinalityResult { + /// State root of the last finalized block + prev_final_state_root: [u8; 32], + /// New finalized blocks + new_finals: BTreeMap, +} #[allow(dead_code)] pub(crate) enum RevertTarget { @@ -78,10 +93,14 @@ pub(crate) struct Acceptor { pub(crate) db: Arc>, pub(crate) vm: Arc>, pub(crate) network: Arc>, + #[cfg(feature = "archive")] + pub(crate) archive: Archive, /// Sender channel for sending out RUES events event_sender: Sender, dusk_key: bls::PublicKey, + + finality_activation: u64, } impl Drop @@ -181,9 +200,11 @@ impl Acceptor { db: Arc>, network: Arc>, vm: Arc>, + #[cfg(feature = "archive")] archive: Archive, max_queue_size: usize, event_sender: Sender, dusk_key: bls::PublicKey, + finality_activation: u64, ) -> anyhow::Result { let tip_height = tip.inner().header().height; let tip_state_hash = tip.inner().header().state_hash; @@ -202,12 +223,15 @@ impl Acceptor { db: db.clone(), vm: vm.clone(), network: network.clone(), + #[cfg(feature = "archive")] + archive, task: RwLock::new(Task::new_with_keys( keys_path.to_string(), max_queue_size, )?), event_sender, dusk_key, + finality_activation, }; // NB. After restart, state_root returned by VM is always the last @@ -696,30 +720,83 @@ impl Acceptor { let vm = self.vm.write().await; - let (stakes, finality) = self.db.read().await.update(|db| { - let (txs, verification_output, stake_events) = vm.accept( - prev_header.state_hash, - blk, - &prev_block_voters[..], - )?; - for spent_tx in txs.iter() { - events.push(TransactionEvent::Executed(spent_tx).into()); - } - est_elapsed_time = start.elapsed(); + let (contract_events, finality) = + self.db.read().await.update(|db| { + let (txs, verification_output, contract_events) = vm + .accept( + prev_header.state_hash, + blk, + &prev_block_voters[..], + )?; + + for spent_tx in txs.iter() { + events + .push(TransactionEvent::Executed(spent_tx).into()); + } + est_elapsed_time = start.elapsed(); + + assert_eq!( + header.state_hash, + verification_output.state_root + ); + assert_eq!( + header.event_bloom, + verification_output.event_bloom + ); - assert_eq!(header.state_hash, verification_output.state_root); - assert_eq!(header.event_bloom, verification_output.event_bloom); + let finality = + self.rolling_finality::(pni, blk, db, &mut events)?; - let finality = - self.rolling_finality::(pni, blk, db, &mut events)?; + let label = finality.0; + // Store block with updated transactions with Error and + // GasSpent + block_size_on_disk = + db.store_block(header, &txs, blk.faults(), label)?; - let label = finality.0; - // Store block with updated transactions with Error and GasSpent - block_size_on_disk = - db.store_block(header, &txs, blk.faults(), label)?; + Ok((contract_events, finality)) + })?; - Ok((stake_events, finality)) - })?; + // use rolling_finality_events for archive + #[cfg(feature = "archive")] + { + if let Some(RollingFinalityResult { new_finals, .. }) = + &finality.1 + { + for (height, Identifiers { block_hash, .. }) in + new_finals.iter() + { + if let Err(e) = self + .archive + .finalize_archive_data( + *height, + &hex::encode(block_hash), + ) + .await + { + error!("Failed to finalize block in archive: {e:?}") + } + } + } + + // Store all events from this current block in the archive + self.archive + .store_unfinalized_events( + header.height, + header.hash, + contract_events.clone(), + ) + .await + .expect( + "Storing unfinalized events in archive should never fail", + ); + } + + let mut stakes = vec![]; + for event in contract_events { + if event.event.target.0 == STAKE_CONTRACT { + stakes.push(event.event); + } + } self.log_missing_iterations( provisioners_list.current(), @@ -757,14 +834,29 @@ impl Acceptor { let finalized = final_results.is_some(); - if let Some((prev_final_state, mut new_finals)) = final_results { + if let Some(RollingFinalityResult { + prev_final_state_root, + mut new_finals, + }) = final_results + { + let legacy = blk.header().height < self.finality_activation; + let (_, new_final_state) = new_finals.pop_last().expect("new_finals to be not empty"); - let old_finals_to_merge = new_finals + let new_final_state_root = new_final_state.state_root; + // old final state roots to merge too + let new_finals = new_finals .into_values() - .chain([prev_final_state]) + .map(|finalized_info| finalized_info.state_root) .collect::>(); - vm.finalize_state(new_final_state, old_finals_to_merge)?; + + let old_final_state_roots = if legacy { + [new_finals, vec![prev_final_state_root]].concat() + } else { + [vec![prev_final_state_root], new_finals].concat() + }; + + vm.finalize_state(new_final_state_root, old_final_state_roots)?; } anyhow::Ok((label, finalized)) @@ -890,7 +982,8 @@ impl Acceptor { /// Returns /// - Current accepted block label /// - Previous last finalized state root - /// - List of the new finalized state root + /// - List of the new finalized state root together with the respective + /// block hash fn rolling_finality( &self, pni: u8, // Previous Non-Attested Iterations @@ -926,7 +1019,7 @@ impl Acceptor { } let lfb_hash = lfb_hash.expect("Unable to find last finalized block hash"); - let lfb_state_root = db + let prev_final_state_root = db .block_header(&lfb_hash)? .ok_or(anyhow!( "Cannot get header for last finalized block hash {}", @@ -998,23 +1091,28 @@ impl Acceptor { events.push(event.into()); db.store_block_label(height, &hash, label)?; - let state_hash = db + let state_root = db .block_header(&hash)? .map(|h| h.state_hash) .ok_or(anyhow!( "Cannot get header for hash {}", to_str(&hash) ))?; + let finalized = Identifiers { + block_hash: hash, + state_root, + }; info!( event = "block finalized", src = "rolling_finality", current_height, height, finalized_after, - hash = to_str(&hash), - state_root = to_str(&state_hash), + hash = to_str(&finalized.block_hash), + state_root = to_str(&finalized.state_root), ); - finalized_blocks.insert(height, state_hash); + + finalized_blocks.insert(height, finalized); } } } @@ -1022,7 +1120,10 @@ impl Acceptor { let finalized_result = if finalized_blocks.is_empty() { None } else { - Some((lfb_state_root, finalized_blocks)) + Some(RollingFinalityResult { + prev_final_state_root, + new_finals: finalized_blocks, + }) }; Ok((block_label, finalized_result)) @@ -1067,6 +1168,8 @@ impl Acceptor { // VM was reverted to. // The blockchain tip after reverting + #[cfg(feature = "archive")] + let mut archive_revert_info: Vec<(u64, String)> = vec![]; let (blk, label) = self.db.read().await.update(|db| { let mut height = curr_height; loop { @@ -1098,6 +1201,10 @@ impl Acceptor { warn!("cannot notify event {e}") }; + // Temporary store the reverted block info for archive + #[cfg(feature = "archive")] + archive_revert_info.push((h.height, hex::encode(h.hash))); + info!( event = "block reverted", height = h.height, @@ -1136,6 +1243,19 @@ impl Acceptor { state_root = hex::encode(blk.header().state_hash) ); + // Remove the block and event entries for this block from the + // archive + #[cfg(feature = "archive")] + for (height, hex_hash) in archive_revert_info { + if let Err(e) = self + .archive + .remove_block_and_events(height, &hex_hash) + .await + { + error!("Failed to delete block & events in archive: {:?}", e); + } + } + self.update_tip(&blk, label).await } diff --git a/node/src/database.rs b/node/src/database.rs index 08bc8e1a06..668c240364 100644 --- a/node/src/database.rs +++ b/node/src/database.rs @@ -80,6 +80,10 @@ pub trait Ledger { fn block_exists(&self, hash: &[u8]) -> Result; fn ledger_tx(&self, tx_id: &[u8]) -> Result>; + fn ledger_txs( + &self, + tx_ids: Vec<&[u8; 32]>, + ) -> Result>; fn ledger_tx_exists(&self, tx_id: &[u8]) -> Result; diff --git a/node/src/database/rocksdb.rs b/node/src/database/rocksdb.rs index 55f01d524a..8ea6cc6773 100644 --- a/node/src/database/rocksdb.rs +++ b/node/src/database/rocksdb.rs @@ -531,6 +531,43 @@ impl<'db, DB: DBAccess> Ledger for DBTransaction<'db, DB> { Ok(tx) } + /// Returns a list of transactions from the ledger + /// + /// This function expects a list of transaction IDs that are in the ledger. + /// + /// It will return an error if any of the transaction IDs are not found in + /// the ledger. + fn ledger_txs( + &self, + tx_ids: Vec<&[u8; 32]>, + ) -> Result> { + let cf = self.ledger_txs_cf; + + let ids = tx_ids.into_iter().map(|id| (cf, id)).collect::>(); + + let multi_get_results = self.inner.multi_get_cf(ids); + + let mut spent_transactions = + Vec::with_capacity(multi_get_results.len()); + for result in multi_get_results.into_iter() { + let opt_blob = result.map_err(|e| { + std::io::Error::new(std::io::ErrorKind::Other, e) + })?; + + let Some(blob) = opt_blob else { + return Err(anyhow::anyhow!( + "At least one Transaction ID was not found" + )); + }; + + let stx = SpentTransaction::read(&mut &blob[..])?; + + spent_transactions.push(stx); + } + + Ok(spent_transactions) + } + /// Returns true if the transaction exists in the /// ledger /// @@ -1741,12 +1778,12 @@ mod tests { .for_each(drop); } - struct TestWrapper(tempdir::TempDir); + struct TestWrapper(tempfile::TempDir); impl TestWrapper { fn new(path: &'static str) -> Self { Self( - tempdir::TempDir::new(path) + tempfile::TempDir::with_prefix(path) .expect("Temp directory to be created"), ) } diff --git a/node/src/vm.rs b/node/src/vm.rs index bc104b0a4b..dfccc23a45 100644 --- a/node/src/vm.rs +++ b/node/src/vm.rs @@ -10,7 +10,7 @@ use dusk_consensus::user::provisioners::Provisioners; use dusk_consensus::user::stake::Stake; use dusk_core::signatures::bls::PublicKey as BlsPublicKey; use dusk_core::transfer::moonlight::AccountData; -use node_data::events::contract::ContractEvent; +use node_data::events::contract::ContractTxEvent; use node_data::ledger::{Block, SpentTransaction, Transaction}; #[derive(Default)] @@ -42,7 +42,7 @@ pub trait VMExecution: Send + Sync + 'static { ) -> anyhow::Result<( Vec, VerificationOutput, - Vec, + Vec, )>; fn finalize_state( diff --git a/rusk-prover/CHANGELOG.md b/rusk-prover/CHANGELOG.md index b655893bdc..4e027e8f8d 100644 --- a/rusk-prover/CHANGELOG.md +++ b/rusk-prover/CHANGELOG.md @@ -7,6 +7,20 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [1.1.0] - 2025-02-14 + +### Changed + +- Update `bls12_381-bls` to 0.5 [#2773] +- Update `dusk-bls12_381` to 0.14 [#2773] +- Update `dusk-jubjub` to 0.15.0 [#2773] +- Update `dusk-plonk` to 0.21.0 [#2773] +- Update `dusk-poseidon` to 0.41 [#2773] +- Update `jubjub-schnorr` to 0.6 [#2773] +- Update `phoenix-circuits` to 0.6 [#2773] +- Update `phoenix-core` to 0.34.0 [#2773] +- Update `poseidon-merkle` to 0.8 [#2773] + ## [1.0.1] - 2025-01-23 ### Changed @@ -22,6 +36,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 [#3405]: https://github.com/dusk-network/rusk/issues/3405 -[Unreleased]: https://github.com/dusk-network/rusk/compare/rusk-prover-1.0.1...HEAD +[Unreleased]: https://github.com/dusk-network/rusk/compare/rusk-prover-1.1.0...HEAD +[1.1.0]: https://github.com/dusk-network/rusk/compare/rusk-prover-1.0.1...rusk-prover-1.1.0 [1.0.1]: https://github.com/dusk-network/rusk/compare/rusk-prover-1.0.0...rusk-prover-1.0.1 [1.0.0]: https://github.com/dusk-network/rusk/tree/rusk-prover-1.0.0 diff --git a/rusk-prover/Cargo.toml b/rusk-prover/Cargo.toml index 06410bdc53..f694ff1f88 100644 --- a/rusk-prover/Cargo.toml +++ b/rusk-prover/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "rusk-prover" -version = "1.0.2-alpha.1" +version = "1.1.1-alpha.1" edition = "2021" autobins = false diff --git a/rusk-recovery/CHANGELOG.md b/rusk-recovery/CHANGELOG.md index 9b99c59719..f1c03dea73 100644 --- a/rusk-recovery/CHANGELOG.md +++ b/rusk-recovery/CHANGELOG.md @@ -7,7 +7,13 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] -[1.0.2] - 2025-01-23 +## [1.0.3] - 2025-02-14 + +### Changed + +- Change deprecated `tempdir` with `tempfile` dependency [#3407] + +## [1.0.2] - 2025-01-23 ### Changed @@ -18,8 +24,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - First `rusk-recovery` release +[#3407]: https://github.com/dusk-network/rusk/issues/3407 [#3405]: https://github.com/dusk-network/rusk/issues/3405 -[Unreleased]: https://github.com/dusk-network/rusk/compare/rusk-recovery-1.0.2...HEAD +[Unreleased]: https://github.com/dusk-network/rusk/compare/rusk-recovery-1.0.3...HEAD +[1.0.3]: https://github.com/dusk-network/rusk/compare/rusk-recovery-1.0.2...rusk-recovery-1.0.3 [1.0.2]: https://github.com/dusk-network/rusk/compare/rusk-recovery-1.0.1...rusk-recovery-1.0.2 [1.0.1]: https://github.com/dusk-network/rusk/tree/rusk-recovery-1.0.1 diff --git a/rusk-recovery/Cargo.toml b/rusk-recovery/Cargo.toml index 804e276745..20369a7c5b 100644 --- a/rusk-recovery/Cargo.toml +++ b/rusk-recovery/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "rusk-recovery" -version = "1.0.3-alpha.1" +version = "1.0.4-alpha.1" edition = "2021" autobins = false description = "Tool to restore Rusk to factory settings" @@ -43,7 +43,7 @@ reqwest = { workspace = true, optional = true } tokio = { workspace = true, features = ["full"], optional = true } [dev-dependencies] -tempdir = { workspace = true } +tempfile = { workspace = true } [build-dependencies] cargo_toml = { workspace = true } diff --git a/rusk-recovery/src/state.rs b/rusk-recovery/src/state.rs index be2dcf275e..8bad02a0b2 100644 --- a/rusk-recovery/src/state.rs +++ b/rusk-recovery/src/state.rs @@ -346,7 +346,7 @@ mod tests { #[test] fn mainnet_genesis() -> Result<(), Box> { let mainnet = mainnet_from_file()?; - let tmp = tempdir::TempDir::new("genesis") + let tmp = tempfile::TempDir::with_prefix("genesis") .expect("Should be able to create temporary directory"); let (_, root) = deploy(tmp.path(), &mainnet, dusk_mainnet_key(), |_| {})?; diff --git a/rusk-wallet/CHANGELOG.md b/rusk-wallet/CHANGELOG.md index 54b66ae002..36be6cd1ed 100644 --- a/rusk-wallet/CHANGELOG.md +++ b/rusk-wallet/CHANGELOG.md @@ -15,6 +15,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Add pagenation for transaction history to not pollute the stdout [#3292] +### Fix + +- Fix wrong lower limit for stake operation when performing topup [#3394] + ## [0.1.0] - 2025-01-20 ### Add @@ -46,6 +50,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Fix stake info for inactive stakes with rewards [#2766] - Fix Moonlight stake reward withdrawal [#2523] + [#3405]: https://github.com/dusk-network/rusk/issues/3405 [#3263]: https://github.com/dusk-network/rusk/issues/3263 @@ -68,6 +73,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 [#2340]: https://github.com/dusk-network/rusk/issues/2340 [#2288]: https://github.com/dusk-network/rusk/issues/2288 [#3292]: https://github.com/dusk-network/rusk/issues/3292 +[#3394]: https://github.com/dusk-network/rusk/issues/3394 [Unreleased]: https://github.com/dusk-network/rusk/compare/rusk-wallet-0.1.0...HEAD diff --git a/rusk-wallet/src/bin/interactive/command_menu.rs b/rusk-wallet/src/bin/interactive/command_menu.rs index 907fdc31f5..0eb5e5603e 100644 --- a/rusk-wallet/src/bin/interactive/command_menu.rs +++ b/rusk-wallet/src/bin/interactive/command_menu.rs @@ -6,6 +6,7 @@ use std::fmt::Display; +use dusk_core::stake::DEFAULT_MINIMUM_STAKE; use dusk_core::transfer::data::MAX_MEMO_SIZE; use inquire::{InquireError, Select}; use rusk_wallet::currency::Dusk; @@ -15,6 +16,7 @@ use rusk_wallet::gas::{ }; use rusk_wallet::{ Address, Error, Wallet, MAX_CONTRACT_INIT_ARG_SIZE, MAX_FUNCTION_NAME_SIZE, + MIN_CONVERTIBLE, }; use super::ProfileOp; @@ -169,6 +171,21 @@ pub(crate) async fn online( .public_key(stake_idx) .expect("public key to exists in interactive mode"); + let min_val = { + let has_stake = wallet + .stake_info(stake_idx) + .await? + .map(|s| s.amount.is_some()) + .unwrap_or_default(); + + // if the user has stake then they are performing a topup + if has_stake { + MIN_CONVERTIBLE + } else { + DEFAULT_MINIMUM_STAKE.into() + } + }; + let owner = match wallet.find_stake_owner_account(stake_pk).await { Ok(account) => account, Err(Error::NotStaked) => { @@ -177,6 +194,7 @@ pub(crate) async fn online( .iter() .map(|p| Address::Public(p.public_addr)) .collect(); + prompt::request_address(stake_idx, choices)? } e => e?, @@ -185,7 +203,7 @@ pub(crate) async fn online( ProfileOp::Run(Box::new(Command::Stake { address: Some(addr), owner: Some(owner), - amt: prompt::request_stake_token_amt(balance)?, + amt: prompt::request_stake_token_amt(balance, min_val)?, gas_limit: prompt::request_gas_limit(gas::DEFAULT_LIMIT_CALL)?, gas_price: prompt::request_gas_price( DEFAULT_PRICE, diff --git a/rusk-wallet/src/bin/io/prompt.rs b/rusk-wallet/src/bin/io/prompt.rs index 92884d472a..35ce3886ed 100644 --- a/rusk-wallet/src/bin/io/prompt.rs +++ b/rusk-wallet/src/bin/io/prompt.rs @@ -16,7 +16,6 @@ use crossterm::{ use anyhow::Result; use bip39::{ErrorKind, Language, Mnemonic}; -use dusk_core::stake::DEFAULT_MINIMUM_STAKE; use inquire::ui::RenderConfig; use inquire::validator::Validation; @@ -267,10 +266,12 @@ pub(crate) fn request_optional_token_amt( request_token(action, min, balance, None).map_err(Error::from) } -/// Request amount of tokens that can't be lower than MINIMUM_STAKE -pub(crate) fn request_stake_token_amt(balance: Dusk) -> Result { - let min: Dusk = DEFAULT_MINIMUM_STAKE.into(); - +/// Request amount of tokens that can't be lower than the `min` argument and +/// higher than `balance` +pub(crate) fn request_stake_token_amt( + balance: Dusk, + min: Dusk, +) -> Result { request_token("stake", min, balance, None).map_err(Error::from) } diff --git a/rusk/CHANGELOG.md b/rusk/CHANGELOG.md index b303cce770..0be844d1df 100644 --- a/rusk/CHANGELOG.md +++ b/rusk/CHANGELOG.md @@ -5,7 +5,46 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -## Unreleased +## [Unreleased] + +## [1.1.0] - 2025-02-14 + +### Added + +- Add `abi::public_sender` [#3341] +- Add `[vm]` config section [#3341] +- Add CONTRACT_TO_ACCOUNT inflow case on archive moonlight filtering [#3494] +- Add Dockerfile for persistent state builds [#1080] +- Add `vm_config` section to `/on/node/info` [#3341] + +### Changed + +- Deprecate `[chain].gas_per_deploy_byte` config [#3341] +- Deprecate `[chain].min_deployment_gas_price` config [#3341] +- Deprecate `[chain].generation_timeout` config [#3341] +- Deprecate `[chain].min_deploy_points` config [#3341] +- Deprecate `[chain].block_gas_limit` config [#3341] +- Change how Rusk controls the archive for synchronization [#3359] +- Update `bls12_381-bls` to 0.5 [#2773] +- Update `dusk-bls12_381` to 0.14 [#2773] +- Update `dusk-jubjub` to 0.15.0 [#2773] +- Update `dusk-plonk` to 0.21.0 [#2773] +- Update `dusk-poseidon` to 0.41 [#2773] +- Update `jubjub-schnorr` to 0.6 [#2773] +- Update `phoenix-circuits` to 0.6 [#2773] +- Update `phoenix-core` to 0.34.0 [#2773] +- Update `poseidon-merkle` to 0.8 [#2773] + +### Fixed + +- Fix node unresponsiveness when querying contracts that take too long to terminate [#3481] + +### Removed + +- Remove legacy event system +- Remove archive mpsc channel & archive event forwarding [#3359] + +## [1.0.2] - 2025-01-27 ### Added @@ -300,10 +339,15 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Add build system that generates keys for circuits and caches them. +[#3494]: https://github.com/dusk-network/rusk/issues/3494 +[#3481]: https://github.com/dusk-network/rusk/issues/3481 +[#3359]: https://github.com/dusk-network/rusk/issues/3359 [#3422]: https://github.com/dusk-network/rusk/issues/3422 [#3405]: https://github.com/dusk-network/rusk/issues/3405 +[#3341]: https://github.com/dusk-network/rusk/issues/3341 [#3359]: https://github.com/dusk-network/rusk/issues/3359 [#3206]: https://github.com/dusk-network/rusk/issues/3206 +[#2773]: https://github.com/dusk-network/rusk/issues/2773 [#2597]: https://github.com/dusk-network/rusk/issues/2597 [#2536]: https://github.com/dusk-network/rusk/issues/2536 [#2207]: https://github.com/dusk-network/rusk/issues/2207 @@ -316,6 +360,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 [#1257]: https://github.com/dusk-network/rusk/pull/1257 [#1219]: https://github.com/dusk-network/rusk/issues/1219 [#1144]: https://github.com/dusk-network/rusk/issues/1144 +[#1080]: https://github.com/dusk-network/rusk/issues/1080 [#970]: https://github.com/dusk-network/rusk/issues/970 [#931]: https://github.com/dusk-network/rusk/issues/931 [#401]: https://github.com/dusk-network/rusk/issues/401 @@ -326,7 +371,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 [#290]: https://github.com/dusk-network/rusk/issues/290 -[unreleased]: https://github.com/dusk-network/rusk/compare/rusk-1.0.1...HEAD +[Unreleased]: https://github.com/dusk-network/rusk/compare/dusk-rusk-1.1.0...HEAD +[1.1.0]: https://github.com/dusk-network/rusk/compare/dusk-rusk-1.0.2...dusk-rusk-1.1.0 +[1.0.2]: https://github.com/dusk-network/rusk/compare/rusk-1.0.1...dusk-rusk-1.0.2 [1.0.1]: https://github.com/dusk-network/rusk/compare/rusk-1.0.0...rusk-1.0.1 [1.0.0]: https://github.com/dusk-network/rusk/compare/v0.8.0...rusk-1.0.0 [0.8.0]: https://github.com/dusk-network/rusk/compare/v0.7.0...v0.8.0 diff --git a/rusk/Cargo.toml b/rusk/Cargo.toml index 006e2eedbc..df5ba76bf6 100644 --- a/rusk/Cargo.toml +++ b/rusk/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "dusk-rusk" -version = "1.0.2-dev" +version = "1.1.1-alpha.1" edition = "2021" autobins = false @@ -39,16 +39,13 @@ serde_json = { workspace = true } serde_with = { workspace = true, features = ["hex"] } humantime-serde = { workspace = true } bs58 = { workspace = true } -base64 = { workspace = true } hex = { workspace = true } parking_lot = { workspace = true } rkyv = { workspace = true, features = ["size_32"] } bytecheck = { workspace = true } dirs = { workspace = true } blake3 = { workspace = true } -blake2b_simd = { workspace = true } -sha3 = { workspace = true } dusk-bytes = { workspace = true } kadcast = { workspace = true } pin-project = { workspace = true } diff --git a/rusk/benches/block_ingestion.rs b/rusk/benches/block_ingestion.rs index 0afabed639..21c85deb6d 100644 --- a/rusk/benches/block_ingestion.rs +++ b/rusk/benches/block_ingestion.rs @@ -23,8 +23,8 @@ use node_data::ledger::Transaction; use rand::prelude::StdRng; use rand::seq::SliceRandom; use rand::SeedableRng; -use rusk::Rusk; -use rusk::DUSK_CONSENSUS_KEY; +use rusk::node::RuskVmConfig; +use rusk::{Rusk, DUSK_CONSENSUS_KEY}; use tempfile::tempdir; use common::state::new_state; @@ -158,7 +158,8 @@ pub fn accept_benchmark(c: &mut Criterion) { let snapshot = toml::from_str(include_str!("../tests/config/bench.toml")) .expect("Cannot deserialize config"); - let rusk = new_state(&tmp, &snapshot, BLOCK_GAS_LIMIT) + let vm_config = RuskVmConfig::new().with_block_gas_limit(BLOCK_GAS_LIMIT); + let rusk = new_state(&tmp, &snapshot, vm_config) .expect("Creating state should work"); let phoenix_txs = load_phoenix_txs(); diff --git a/rusk/default.config.toml b/rusk/default.config.toml index 830c4d38b3..d9e316ea48 100644 --- a/rusk/default.config.toml +++ b/rusk/default.config.toml @@ -21,12 +21,21 @@ [chain] #db_path = '/home/user/.dusk/rusk' #consensus_keys_path = '/home/user/.dusk/rusk/consensus.keys' -#generation_timeout = '3s' -# Note: changing the gas per deploy byte parameter is equivalent to forking the chain. -#gas_per_deploy_byte = 100 -#min_deployment_gas_price = 2000 -#min_gas_limit = 75000 -#min_deploy_points = 5000000 +min_gas_limit = 150000 + +# Note: changing the vm settings is equivalent to forking the chain. +[vm] +generation_timeout = '3s' +gas_per_deploy_byte = 100 +min_deployment_gas_price = 2000 +min_deploy_points = 5000000 +block_gas_limit = 3000000000 + +[vm.features] +# ABI_PUBLIC_SENDER = +# key = activation_height +# key = activation_height +# key = activation_height [databroker] max_inv_entries = 100 diff --git a/rusk/src/bin/config.rs b/rusk/src/bin/config.rs index a0286a23b3..5b74910149 100644 --- a/rusk/src/bin/config.rs +++ b/rusk/src/bin/config.rs @@ -26,6 +26,9 @@ use self::{ mempool::MempoolConfig, telemetry::TelemetryConfig, }; +#[cfg(feature = "chain")] +use rusk::node::RuskVmConfig; + use serde::{Deserialize, Serialize}; use crate::args::Args; @@ -50,6 +53,10 @@ pub(crate) struct Config { #[serde(default = "ChainConfig::default")] pub(crate) chain: ChainConfig, + #[cfg(feature = "chain")] + #[serde(default = "RuskVmConfig::default")] + pub(crate) vm: RuskVmConfig, + #[serde(default = "HttpConfig::default")] pub(crate) http: HttpConfig, diff --git a/rusk/src/bin/config/chain.rs b/rusk/src/bin/config/chain.rs index 1e69ac6175..34f9db7d4d 100644 --- a/rusk/src/bin/config/chain.rs +++ b/rusk/src/bin/config/chain.rs @@ -12,8 +12,6 @@ use std::{ use node::database::DatabaseOptions; use serde::{Deserialize, Serialize}; -pub const DEFAULT_BLOCK_GAS_LIMIT: u64 = 5 * 1_000_000_000; - use crate::args::Args; #[derive(Serialize, Deserialize, Clone, Default)] @@ -24,16 +22,21 @@ pub(crate) struct ChainConfig { consensus_keys_path: Option, #[serde(with = "humantime_serde")] #[serde(default)] + #[deprecated(since = "1.0.3", note = "please use `RuskVmConfig` instead")] generation_timeout: Option, max_queue_size: Option, // NB: changing the gas_per_deploy_byte/block_gas_limit is equivalent to // forking the chain. + #[deprecated(since = "1.0.3", note = "please use `RuskVmConfig` instead")] gas_per_deploy_byte: Option, + #[deprecated(since = "1.0.3", note = "please use `RuskVmConfig` instead")] min_deployment_gas_price: Option, + #[deprecated(since = "1.0.3", note = "please use `RuskVmConfig` instead")] min_deploy_points: Option, min_gas_limit: Option, + #[deprecated(since = "1.0.3", note = "please use `RuskVmConfig` instead")] block_gas_limit: Option, #[serde(with = "humantime_serde")] @@ -82,19 +85,27 @@ impl ChainConfig { self.db_options.clone().unwrap_or_default() } + #[deprecated(since = "1.0.3", note = "please use `RuskVmConfig` instead")] pub(crate) fn generation_timeout(&self) -> Option { + #[allow(deprecated)] self.generation_timeout } + #[deprecated(since = "1.0.3", note = "please use `RuskVmConfig` instead")] pub(crate) fn gas_per_deploy_byte(&self) -> Option { + #[allow(deprecated)] self.gas_per_deploy_byte } + #[deprecated(since = "1.0.3", note = "please use `RuskVmConfig` instead")] pub(crate) fn min_deployment_gas_price(&self) -> Option { + #[allow(deprecated)] self.min_deployment_gas_price } + #[deprecated(since = "1.0.3", note = "please use `RuskVmConfig` instead")] pub(crate) fn min_deploy_points(&self) -> Option { + #[allow(deprecated)] self.min_deploy_points } @@ -106,8 +117,10 @@ impl ChainConfig { self.max_queue_size.unwrap_or(10_000) } - pub(crate) fn block_gas_limit(&self) -> u64 { - self.block_gas_limit.unwrap_or(DEFAULT_BLOCK_GAS_LIMIT) + #[deprecated(since = "1.0.3", note = "please use `RuskVmConfig` instead")] + pub(crate) fn block_gas_limit(&self) -> Option { + #[allow(deprecated)] + self.block_gas_limit } pub(crate) fn genesis_timestamp(&self) -> u64 { diff --git a/rusk/src/bin/config/http.rs b/rusk/src/bin/config/http.rs index 244b42e809..0e0eab03e6 100644 --- a/rusk/src/bin/config/http.rs +++ b/rusk/src/bin/config/http.rs @@ -79,7 +79,7 @@ impl Default for HttpConfig { } const fn default_feeder_call_gas() -> u64 { - u64::MAX + 3 * 1_000_000_000 } const fn default_listen() -> bool { diff --git a/rusk/src/bin/main.rs b/rusk/src/bin/main.rs index bc4780ae0e..1b3a26da7c 100644 --- a/rusk/src/bin/main.rs +++ b/rusk/src/bin/main.rs @@ -11,7 +11,7 @@ mod ephemeral; mod log; #[cfg(feature = "chain")] -use tracing::info; +use tracing::{info, warn}; use clap::Parser; @@ -69,6 +69,7 @@ async fn main() -> Result<(), Box> { let db_path = config.chain.db_path(); node_builder = node_builder + .with_vm_config(config.vm) .with_feeder_call_gas(config.http.feeder_call_gas) .with_db_path(db_path) .with_db_options(config.chain.db_options()) @@ -80,14 +81,32 @@ async fn main() -> Result<(), Box> { .with_genesis_timestamp(config.chain.genesis_timestamp()) .with_mempool(config.mempool.into()) .with_state_dir(state_dir) - .with_generation_timeout(config.chain.generation_timeout()) - .with_gas_per_deploy_byte(config.chain.gas_per_deploy_byte()) - .with_min_deployment_gas_price( - config.chain.min_deployment_gas_price(), - ) - .with_min_deploy_points(config.chain.min_deploy_points()) - .with_min_gas_limit(config.chain.min_gas_limit()) - .with_block_gas_limit(config.chain.block_gas_limit()); + .with_min_gas_limit(config.chain.min_gas_limit()); + + #[allow(deprecated)] + { + if let Some(gas_byte) = config.chain.gas_per_deploy_byte() { + warn!("[chain].gas_per_deploy_byte is deprecated, use [vm].gas_per_deploy_byte"); + node_builder = node_builder.with_gas_per_deploy_byte(gas_byte); + } + if let Some(price) = config.chain.min_deployment_gas_price() { + warn!("[chain].min_deployment_gas_price is deprecated, use [vm].min_deployment_gas_price"); + node_builder = + node_builder.with_min_deployment_gas_price(price); + } + if let Some(timeout) = config.chain.generation_timeout() { + warn!("[chain].generation_timeout is deprecated, use [vm].generation_timeout"); + node_builder = node_builder.with_generation_timeout(timeout); + } + if let Some(min) = config.chain.min_deploy_points() { + warn!("[chain].min_deploy_points is deprecated, use [vm].min_deploy_points"); + node_builder = node_builder.with_min_deploy_points(min); + } + if let Some(limit) = config.chain.block_gas_limit() { + warn!("[chain].block_gas_limit is deprecated, use [vm].block_gas_limit"); + node_builder = node_builder.with_block_gas_limit(limit); + } + } }; if config.http.listen { diff --git a/rusk/src/lib/builder/node.rs b/rusk/src/lib/builder/node.rs index 31e2e47ac6..b647fcef3c 100644 --- a/rusk/src/lib/builder/node.rs +++ b/rusk/src/lib/builder/node.rs @@ -19,13 +19,13 @@ use node::network::Kadcast; use node::telemetry::TelemetrySrv; use node::{LongLivedService, Node}; +#[cfg(feature = "archive")] +use node::archive::Archive; use tokio::sync::{broadcast, mpsc}; use tracing::info; -#[cfg(feature = "archive")] -use {node::archive::Archive, node::archive::ArchivistSrv}; use crate::http::{DataSources, HttpServer, HttpServerConfig}; -use crate::node::{ChainEventStreamer, RuskNode, Services}; +use crate::node::{ChainEventStreamer, RuskNode, RuskVmConfig, Services}; use crate::{Rusk, VERSION}; #[derive(Default)] @@ -39,13 +39,8 @@ pub struct RuskNodeBuilder { db_options: DatabaseOptions, max_chain_queue_size: usize, genesis_timestamp: u64, - - generation_timeout: Option, - gas_per_deploy_byte: Option, - min_deployment_gas_price: Option, + vm_config: RuskVmConfig, min_gas_limit: Option, - min_deploy_points: Option, - block_gas_limit: u64, feeder_call_gas: u64, state_dir: PathBuf, @@ -54,11 +49,7 @@ pub struct RuskNodeBuilder { command_revert: bool, } -const DEFAULT_GAS_PER_DEPLOY_BYTE: u64 = 100; -const DEFAULT_MIN_DEPLOYMENT_GAS_PRICE: u64 = 2000; const DEFAULT_MIN_GAS_LIMIT: u64 = 75000; -const DEFAULT_MIN_DEPLOY_POINTS: u64 = 5_000_000; - impl RuskNodeBuilder { pub fn with_consensus_keys(mut self, consensus_keys_path: String) -> Self { self.consensus_keys_path = consensus_keys_path; @@ -115,27 +106,34 @@ impl RuskNodeBuilder { self } - pub fn with_generation_timeout( + #[deprecated(since = "1.0.3", note = "please use `with_vm_config` instead")] + pub fn with_generation_timeout>>( mut self, - generation_timeout: Option, + generation_timeout: O, ) -> Self { - self.generation_timeout = generation_timeout; + self.vm_config.generation_timeout = generation_timeout.into(); self } - pub fn with_gas_per_deploy_byte( + #[deprecated(since = "1.0.3", note = "please use `with_vm_config` instead")] + pub fn with_gas_per_deploy_byte>>( mut self, - gas_per_deploy_byte: Option, + gas_per_deploy_byte: O, ) -> Self { - self.gas_per_deploy_byte = gas_per_deploy_byte; + if let Some(gas_per_deploy_byte) = gas_per_deploy_byte.into() { + self.vm_config.gas_per_deploy_byte = gas_per_deploy_byte; + } self } - pub fn with_min_deployment_gas_price( + #[deprecated(since = "1.0.3", note = "please use `with_vm_config` instead")] + pub fn with_min_deployment_gas_price>>( mut self, - min_deployment_gas_price: Option, + min_deployment_gas_price: O, ) -> Self { - self.min_deployment_gas_price = min_deployment_gas_price; + if let Some(min_deploy_gas_price) = min_deployment_gas_price.into() { + self.vm_config.min_deployment_gas_price = min_deploy_gas_price; + } self } @@ -144,16 +142,25 @@ impl RuskNodeBuilder { self } - pub fn with_min_deploy_points( + #[deprecated(since = "1.0.3", note = "please use `with_vm_config` instead")] + pub fn with_min_deploy_points>>( mut self, - min_deploy_points: Option, + min_deploy_points: O, ) -> Self { - self.min_deploy_points = min_deploy_points; + if let Some(min_deploy_points) = min_deploy_points.into() { + self.vm_config.min_deploy_points = min_deploy_points; + } self } - pub fn with_block_gas_limit(mut self, block_gas_limit: u64) -> Self { - self.block_gas_limit = block_gas_limit; + #[deprecated(since = "1.0.3", note = "please use `with_vm_config` instead")] + pub fn with_block_gas_limit>>( + mut self, + block_gas_limit: O, + ) -> Self { + if let Some(block_gas_limit) = block_gas_limit.into() { + self.vm_config.block_gas_limit = block_gas_limit; + } self } @@ -177,6 +184,11 @@ impl RuskNodeBuilder { self } + pub fn with_vm_config(mut self, vm_config: RuskVmConfig) -> Self { + self.vm_config = vm_config; + self + } + /// Build the RuskNode and corresponding services pub async fn build_and_run(self) -> anyhow::Result<()> { let channel_cap = self @@ -188,38 +200,27 @@ impl RuskNodeBuilder { let (node_sender, node_receiver) = mpsc::channel(1000); #[cfg(feature = "archive")] - let (archive_sender, archive_receiver) = mpsc::channel(10000); - - let gas_per_deploy_byte = self - .gas_per_deploy_byte - .unwrap_or(DEFAULT_GAS_PER_DEPLOY_BYTE); - let min_deployment_gas_price = self - .min_deployment_gas_price - .unwrap_or(DEFAULT_MIN_DEPLOYMENT_GAS_PRICE); + let archive = Archive::create_or_open(self.db_path.clone()).await; + let min_gas_limit = self.min_gas_limit.unwrap_or(DEFAULT_MIN_GAS_LIMIT); - let min_deploy_points = - self.min_deploy_points.unwrap_or(DEFAULT_MIN_DEPLOY_POINTS); + let finality_activation = self + .vm_config + .feature(crate::node::FEATURE_ABI_PUBLIC_SENDER) + .unwrap_or(u64::MAX); let rusk = Rusk::new( self.state_dir, self.kadcast.kadcast_id.unwrap_or_default(), - self.generation_timeout, - gas_per_deploy_byte, - min_deployment_gas_price, + self.vm_config, min_gas_limit, - min_deploy_points, - self.block_gas_limit, self.feeder_call_gas, rues_sender.clone(), #[cfg(feature = "archive")] - archive_sender.clone(), + archive.clone(), ) .map_err(|e| anyhow::anyhow!("Cannot instantiate VM {e}"))?; info!("Rusk VM loaded"); - #[cfg(feature = "archive")] - let archive = Archive::create_or_open(self.db_path.clone()).await; - let node = { let db = rocksdb::Backend::create_or_open( self.db_path.clone(), @@ -239,6 +240,9 @@ impl RuskNodeBuilder { node_sender.clone(), self.genesis_timestamp, *crate::DUSK_CONSENSUS_KEY, + finality_activation, + #[cfg(feature = "archive")] + archive.clone(), ); if self.command_revert { chain_srv @@ -265,8 +269,6 @@ impl RuskNodeBuilder { service_list.push(Box::new(ChainEventStreamer { node_receiver, rues_sender, - #[cfg(feature = "archive")] - archivist_sender: archive_sender, })); let mut handler = DataSources::default(); @@ -294,12 +296,6 @@ impl RuskNodeBuilder { ); } - #[cfg(feature = "archive")] - service_list.push(Box::new(ArchivistSrv { - archive_receiver, - archivist: archive, - })); - node.inner().initialize(&mut service_list).await?; node.inner().spawn_all(service_list).await?; diff --git a/rusk/src/lib/http.rs b/rusk/src/lib/http.rs index dec08ceacb..420af4da34 100644 --- a/rusk/src/lib/http.rs +++ b/rusk/src/lib/http.rs @@ -17,7 +17,7 @@ mod stream; pub(crate) use event::{ BinaryWrapper, DataType, ExecutionError, MessageResponse as EventResponse, - RequestData, Target, + RequestData, }; use dusk_core::abi::Event; @@ -74,7 +74,7 @@ use crate::VERSION; pub use self::event::{RuesDispatchEvent, RuesEvent, RUES_LOCATION_PREFIX}; -use self::event::{MessageRequest, ResponseData, RuesEventUri, SessionId}; +use self::event::{ResponseData, RuesEventUri, SessionId}; use self::stream::{Listener, Stream}; const RUSK_VERSION_HEADER: &str = "Rusk-Version"; @@ -148,26 +148,6 @@ pub struct DataSources { #[async_trait] impl HandleRequest for DataSources { - fn can_handle(&self, request: &MessageRequest) -> bool { - self.sources.iter().any(|s| s.can_handle(request)) - } - async fn handle( - &self, - request: &MessageRequest, - ) -> anyhow::Result { - info!( - "Received {:?}:{} request", - request.event.target, request.event.topic - ); - request.check_rusk_version()?; - for h in &self.sources { - if h.can_handle(request) { - return h.handle(request).await; - } - } - Err(anyhow::anyhow!("unsupported target type")) - } - fn can_handle_rues(&self, event: &RuesDispatchEvent) -> bool { self.sources.iter().any(|s| s.can_handle_rues(event)) } @@ -254,155 +234,6 @@ async fn listening_loop( } } -async fn handle_stream( - sources: Arc, - websocket: HyperWebsocket, - target: Target, - mut shutdown: broadcast::Receiver, -) { - let mut stream = match websocket.await { - Ok(stream) => stream, - Err(_) => return, - }; - - // Add this block to disable requests through websockets - // { - // let _ = stream - // .close(Some(CloseFrame { - // code: CloseCode::Unsupported, - // reason: Cow::from("Websocket is currently unsupported"), - // })) - // .await; - // #[allow(clippy::needless_return)] - // return; - // } - - let (responder, mut responses) = mpsc::unbounded_channel::(); - - 'outer: loop { - tokio::select! { - // If the server shuts down we send a close frame to the client - // and stop. - _ = shutdown.recv() => { - let _ = stream.close(Some(CloseFrame { - code: CloseCode::Away, - reason: Cow::from("Shutting down"), - })).await; - break; - } - - rsp = responses.recv() => { - // `responder` is never dropped so this can never be `None` - let rsp = rsp.unwrap(); - - if let DataType::Channel(c) = rsp.data { - let mut datas = stream_iter(c).map(|e| { - EventResponse { - data: e.into(), - headers: rsp.headers.clone(), - error: None - } - });//.await; - while let Some(c) = datas.next().await { - let rsp = serde_json::to_string(&c).unwrap_or_else(|err| { - serde_json::to_string( - &EventResponse::from_error( - format!("Failed serializing response: {err}") - )).expect("serializing error response should succeed") - }); - - // If we error in sending the message we send a close frame - // to the client and stop. - if stream.send(Message::Text(rsp)).await.is_err() { - let _ = stream.close(Some(CloseFrame { - code: CloseCode::Error, - reason: Cow::from("Failed sending response"), - })).await; - // break; - } - } - - - } else { - // Serialize the response to text. If this does not succeed, - // we simply serialize an error response. - let rsp = serde_json::to_string(&rsp).unwrap_or_else(|err| { - serde_json::to_string( - &EventResponse::from_error( - format!("Failed serializing response: {err}") - )).expect("serializing error response should succeed") - }); - - // If we error in sending the message we send a close frame - // to the client and stop. - if stream.send(Message::Text(rsp)).await.is_err() { - let _ = stream.close(Some(CloseFrame { - code: CloseCode::Error, - reason: Cow::from("Failed sending response"), - })).await; - break; - } - } - } - - msg = stream.next() => { - - let mut req = match msg { - Some(Ok(msg)) => match msg { - // We received a text request. - Message::Text(msg) => { - serde_json::from_str(&msg) - .map_err(|err| anyhow::anyhow!("Failed deserializing request: {err}")) - }, - // We received a binary request. - Message::Binary(msg) => { - MessageRequest::parse(&msg) - .map_err(|err| anyhow::anyhow!("Failed deserializing request: {err}")) - } - // Any other type of message is unsupported. - _ => Err(anyhow::anyhow!("Only text and binary messages are supported")) - } - // Errored while receiving the message, we will - // close the stream and return a close frame. - Some(Err(err)) => { - Err(anyhow::anyhow!("Failed receiving message: {err}")) - } - // The stream has stopped producing messages, and we - // should close it and stop. The client likely has done - // this on purpose, and it's a part of the normal - // operation of the server. - None => { - let _ = stream.close(Some(CloseFrame { - code: CloseCode::Normal, - reason: Cow::from("Stream stopped"), - })).await; - break; - } - }; - match req { - // We received a valid request and should spawn a new task to handle it - Ok(mut req) => { - req.event.target=target.clone(); - task::spawn(handle_execution( - sources.clone(), - req, - responder.clone(), - )); - }, - Err(e) => { - let _ = stream.close(Some(CloseFrame { - code: CloseCode::Error, - reason: Cow::from(e.to_string()), - })).await; - break; - } - } - - } - } - } -} - struct ExecutionService { sources: Arc, sockets_map: @@ -627,41 +458,6 @@ async fn handle_stream_rues( sockets.remove(&sid); } -async fn handle_dispatch( - uri: RuesEventUri, - body: Incoming, - handler: Arc, - sender: mpsc::Sender>, -) { - let bytes = match body.collect().await { - Ok(bytes) => bytes.to_bytes(), - Err(err) => { - let _ = sender.send(Err(err.into())).await; - return; - } - }; - - let req = match MessageRequest::parse(&bytes) { - Ok(req) => req, - Err(err) => { - let _ = sender.send(Err(err)).await; - return; - } - }; - - let rsp = match handler.handle(&req).await { - Ok(rsp) => rsp, - Err(err) => { - let _ = sender.send(Err(err)).await; - return; - } - }; - - let (data, headers) = rsp.into_inner(); - - sender.send(Ok(RuesEvent { uri, data, headers })).await; -} - fn response( status: StatusCode, body: impl Into, @@ -830,66 +626,7 @@ where return Ok(response); } - if hyper_tungstenite::is_upgrade_request(&req) { - let target = req.uri().path().try_into()?; - - let (response, websocket) = hyper_tungstenite::upgrade(&mut req, None)?; - task::spawn(handle_stream(sources, websocket, target, shutdown)); - - Ok(response.map(Into::into)) - } else { - let (execution_request, binary_resp) = - MessageRequest::from_request(req).await?; - - let mut resp_headers = execution_request.x_headers(); - - let (responder, mut receiver) = mpsc::unbounded_channel(); - handle_execution(sources, execution_request, responder).await; - - let execution_response = receiver - .recv() - .await - .expect("An execution should always return a response"); - resp_headers.extend(execution_response.headers.clone()); - let mut resp = execution_response.into_http(binary_resp)?; - - for (k, v) in resp_headers { - let k = HeaderName::from_str(&k)?; - let v = match v { - serde_json::Value::String(s) => HeaderValue::from_str(&s), - serde_json::Value::Null => HeaderValue::from_str(""), - _ => HeaderValue::from_str(&v.to_string()), - }?; - resp.headers_mut().append(k, v); - } - - Ok(resp) - } -} - -async fn handle_execution( - sources: Arc, - request: MessageRequest, - responder: mpsc::UnboundedSender, -) where - H: HandleRequest, -{ - let mut rsp = sources - .handle(&request) - .await - .map(|data| { - let (data, mut headers) = data.into_inner(); - headers.append(&mut request.x_headers()); - EventResponse { - data, - error: None, - headers, - } - }) - .unwrap_or_else(|e| request.to_error(e.to_string())); - - rsp.set_header(RUSK_VERSION_HEADER, serde_json::json!(*VERSION)); - let _ = responder.send(rsp); + Err(ExecutionError::Generic(anyhow::anyhow!("Unsupported path"))) } async fn handle_execution_rues( @@ -923,12 +660,6 @@ async fn handle_execution_rues( #[async_trait] pub trait HandleRequest: Send + Sync + 'static { - fn can_handle(&self, request: &MessageRequest) -> bool; - async fn handle( - &self, - request: &MessageRequest, - ) -> anyhow::Result; - fn can_handle_rues(&self, request: &RuesDispatchEvent) -> bool; async fn handle_rues( &self, @@ -941,7 +672,6 @@ mod tests { use std::{fs, thread}; use super::*; - use event::Event as EventRequest; use dusk_core::abi::ContractId; use node_data::events::contract::{ @@ -962,26 +692,15 @@ mod tests { #[async_trait] impl HandleRequest for TestHandle { - fn can_handle(&self, _request: &MessageRequest) -> bool { - true - } - fn can_handle_rues(&self, request: &RuesDispatchEvent) -> bool { - false + true } async fn handle_rues( &self, request: &RuesDispatchEvent, ) -> anyhow::Result { - unimplemented!() - } - - async fn handle( - &self, - request: &MessageRequest, - ) -> anyhow::Result { - let response = match request.event.to_route() { - (_, _, "stream") => { + let response = match request.uri.inner() { + ("test", _, "stream") => { let (sender, rec) = std::sync::mpsc::channel(); thread::spawn(move || { for f in STREAMED_DATA.iter() { @@ -990,7 +709,10 @@ mod tests { }); ResponseData::new(rec) } - _ => ResponseData::new(request.event_data().to_vec()), + ("test", _, "echo") => { + ResponseData::new(request.data.as_bytes().to_vec()) + } + _ => anyhow::bail!("Unsupported"), }; Ok(response) } @@ -1017,19 +739,12 @@ mod tests { let data = Vec::from(&b"I am call data 0"[..]); let data = RequestData::Binary(BinaryWrapper { inner: data }); - let event = EventRequest { - target: Target::None, - data, - topic: "topic".into(), - }; - - let request = serde_json::to_vec(&event) - .expect("Serializing request should succeed"); + let request_bytes = data.as_bytes(); let client = reqwest::Client::new(); let response = client - .post(format!("http://{}/01/target", server.local_addr)) - .body(request) + .post(format!("http://{}/on/test/echo", server.local_addr)) + .body(request_bytes.to_vec()) .send() .await .expect("Requesting should succeed"); @@ -1038,7 +753,6 @@ mod tests { response.bytes().await.expect("There should be a response"); let response_bytes = hex::decode(response_bytes).expect("data to be hex encoded"); - let request_bytes = event.data.as_bytes(); assert_eq!( request_bytes, response_bytes, @@ -1071,15 +785,7 @@ mod tests { let data = Vec::from(&b"I am call data 0"[..]); let data = RequestData::Binary(BinaryWrapper { inner: data }); - - let event = EventRequest { - target: Target::None, - data, - topic: "topic".into(), - }; - - let request = serde_json::to_vec(&event) - .expect("Serializing request should succeed"); + let request_bytes = data.as_bytes().to_vec(); let client = reqwest::ClientBuilder::new() .add_root_certificate(certificate) @@ -1089,10 +795,10 @@ mod tests { let response = client .post(format!( - "https://localhost:{}/01/target", + "https://localhost:{}/on/test/echo", server.local_addr.port() )) - .body(request) + .body(request_bytes.clone()) .send() .await .expect("Requesting should succeed"); @@ -1101,7 +807,6 @@ mod tests { response.bytes().await.expect("There should be a response"); let response_bytes = hex::decode(response_bytes).expect("data to be hex encoded"); - let request_bytes = event.data.as_bytes(); assert_eq!( request_bytes, response_bytes, @@ -1109,90 +814,6 @@ mod tests { ); } - #[tokio::test(flavor = "multi_thread")] - async fn websocket_queries() { - let cert_and_key: Option<(String, String)> = None; - - let (_, event_receiver) = broadcast::channel(16); - let ws_event_channel_cap = 2; - - let server = HttpServer::bind( - TestHandle, - event_receiver, - ws_event_channel_cap, - "localhost:0", - HeaderMap::new(), - cert_and_key, - ) - .await - .expect("Binding the server to the address should succeed"); - - let stream = TcpStream::connect(server.local_addr) - .expect("Connecting to the server should succeed"); - - let ws_uri = format!("ws://{}/01/stream", server.local_addr); - let (mut stream, _) = client(ws_uri, stream) - .expect("Handshake with the server should succeed"); - - let event = EventRequest { - target: Target::None, - data: RequestData::Text("Not used".into()), - topic: "stream".into(), - }; - let request_x_header: serde_json::Map = - serde_json::from_str(r#"{"X-requestid": "100"}"#) - .expect("headers to be serialized"); - - let request = MessageRequest { - event, - headers: request_x_header.clone(), - }; - - let request = serde_json::to_string(&request).unwrap(); - - stream - .send(Message::Text(request)) - .expect("Sending request to the server should succeed"); - - let mut responses = vec![]; - - while responses.len() < STREAMED_DATA.len() { - let msg = stream - .read() - .expect("Response should be received without error"); - - let msg = match msg { - Message::Text(msg) => msg, - _ => panic!("Shouldn't receive anything but text"), - }; - let response: EventResponse = serde_json::from_str(&msg) - .expect("Response should deserialize successfully"); - - let mut response_x_header = response.headers.clone(); - response_x_header.retain(|k, _| k.to_lowercase().starts_with("x-")); - assert_eq!( - response_x_header, request_x_header, - "x-headers to be propagated back" - ); - assert!(response.error.is_none(), "There should be no error"); - match response.data { - DataType::Binary(BinaryWrapper { inner }) => { - responses.push(inner); - } - _ => panic!("WS stream is supposed to return binary data"), - } - } - - for (idx, response) in responses.iter().enumerate() { - let expected_data = STREAMED_DATA[idx]; - assert_eq!( - &response[..], - expected_data, - "Response data should be the same as the request `fn_args`" - ); - } - } - #[tokio::test(flavor = "multi_thread")] async fn websocket_rues() { let cert_and_key: Option<(String, String)> = None; diff --git a/rusk/src/lib/http/chain.rs b/rusk/src/lib/http/chain.rs index c345b8c623..722c4fc805 100644 --- a/rusk/src/lib/http/chain.rs +++ b/rusk/src/lib/http/chain.rs @@ -53,14 +53,6 @@ fn variables_from_headers(headers: &Map) -> Variables { #[async_trait] impl HandleRequest for RuskNode { - fn can_handle(&self, request: &MessageRequest) -> bool { - let route = request.event.to_route(); - if matches!(route, (Target::Host(_), "rusk", "preverify")) { - return true; - } - matches!(route, (Target::Host(_), "Chain", _)) - } - fn can_handle_rues(&self, request: &RuesDispatchEvent) -> bool { #[allow(clippy::match_like_matches_macro)] match request.uri.inner() { @@ -107,38 +99,6 @@ impl HandleRequest for RuskNode { _ => anyhow::bail!("Unsupported"), } } - async fn handle( - &self, - request: &MessageRequest, - ) -> anyhow::Result { - match &request.event.to_route() { - (Target::Host(_), "Chain", "gql") => { - self.handle_gql(&request.event.data, &request.headers).await - } - (Target::Host(_), "rusk", "preverify") => { - self.handle_preverify(request.event_data()).await - } - (Target::Host(_), "Chain", "propagate_tx") => { - self.propagate_tx(request.event_data()).await - } - (Target::Host(_), "Chain", "alive_nodes") => { - let amount = request.event.data.as_string().trim().parse()?; - self.alive_nodes(amount).await - } - (Target::Host(_), "Chain", "info") => self.get_info().await, - (Target::Host(_), "Chain", "gas") => { - let max_transactions = request - .event - .data - .as_string() - .trim() - .parse::() - .unwrap_or(usize::MAX); - self.get_gas_price(max_transactions).await - } - _ => anyhow::bail!("Unsupported"), - } - } } impl RuskNode { async fn handle_gql( @@ -223,6 +183,10 @@ impl RuskNode { info.insert("chain_id", n_conf.kadcast_id.into()); info.insert("kadcast_address", n_conf.public_address.into()); + let vm_conf = self.inner().vm_handler().read().await.vm_config.clone(); + let vm_conf = serde_json::to_value(vm_conf).unwrap_or_default(); + info.insert("vm_config", vm_conf); + Ok(ResponseData::new(serde_json::to_value(&info)?)) } diff --git a/rusk/src/lib/http/event.rs b/rusk/src/lib/http/event.rs index 04b2b51e2c..21184a062a 100644 --- a/rusk/src/lib/http/event.rs +++ b/rusk/src/lib/http/event.rs @@ -4,7 +4,6 @@ // // Copyright (c) DUSK NETWORK. All rights reserved. -use base64::engine::{general_purpose::STANDARD as BASE64, Engine}; use bytecheck::CheckBytes; use dusk_core::abi::ContractId; use futures_util::stream::Iter as StreamIter; @@ -32,144 +31,6 @@ use tungstenite::http::HeaderValue; use super::{RUSK_VERSION_HEADER, RUSK_VERSION_STRICT_HEADER}; -/// A request sent by the websocket client. -#[derive(Debug, Serialize, Deserialize)] -pub struct Event { - #[serde(skip)] - pub target: Target, - pub topic: String, - pub data: RequestData, -} - -impl Event { - pub fn to_route(&self) -> (&Target, &str, &str) { - (&self.target, self.target.inner(), self.topic.as_ref()) - } -} - -/// A request sent by the websocket client. -#[derive(Debug, Serialize, Deserialize)] -pub struct MessageRequest { - pub headers: serde_json::Map, - pub event: Event, -} - -impl MessageRequest { - pub fn to_error(&self, err: S) -> MessageResponse - where - S: AsRef, - { - MessageResponse { - headers: self.x_headers(), - data: DataType::None, - error: Some(err.as_ref().to_string()), - } - } - - pub fn event_data(&self) -> &[u8] { - self.event.data.as_bytes() - } -} - -#[derive(Debug, Deserialize, Serialize, Default, Clone)] -pub enum Target { - #[default] - None, - Contract(String), // 0x01 - Host(String), // 0x02 - Debugger(String), // 0x03 -} - -impl Target { - pub fn inner(&self) -> &str { - match self { - Self::None => "", - Self::Contract(s) => s, - Self::Host(s) => s, - Self::Debugger(s) => s, - } - } -} - -impl TryFrom<&str> for Target { - type Error = anyhow::Error; - fn try_from(value: &str) -> Result { - let paths: Vec<_> = - value.split('/').skip_while(|p| p.is_empty()).collect(); - let target_type: i32 = paths - .first() - .ok_or_else(|| anyhow::anyhow!("Missing target type"))? - .parse()?; - let target = paths - .get(1) - .ok_or_else(|| anyhow::anyhow!("Missing target"))? - .to_string(); - - let target = match target_type { - 0x01 => Target::Contract(target), - 0x02 => Target::Host(target), - 0x03 => Target::Debugger(target), - ty => { - return Err(anyhow::anyhow!("Unsupported target type '{ty}'")) - } - }; - - Ok(target) - } -} - -impl MessageRequest { - pub fn x_headers(&self) -> serde_json::Map { - let mut h = self.headers.clone(); - h.retain(|k, _| k.to_lowercase().starts_with("x-")); - h - } - - pub fn header(&self, name: &str) -> Option<&serde_json::Value> { - self.headers - .iter() - .find_map(|(k, v)| k.eq_ignore_ascii_case(name).then_some(v)) - } - - pub fn parse(bytes: &[u8]) -> anyhow::Result { - let (headers, bytes) = parse_header(bytes)?; - let event = Event::parse(bytes)?; - Ok(Self { event, headers }) - } - - pub async fn from_request( - req: Request, - ) -> anyhow::Result<(Self, bool)> { - let headers = req - .headers() - .iter() - .map(|(k, v)| { - let v = if v.is_empty() { - serde_json::Value::Null - } else { - serde_json::from_slice::(v.as_bytes()) - .unwrap_or(serde_json::Value::String( - v.to_str().unwrap().to_string(), - )) - }; - (k.to_string().to_lowercase(), v) - }) - .collect(); - let (event, binary_response) = Event::from_request(req).await?; - - let req = MessageRequest { event, headers }; - - Ok((req, binary_response)) - } - - pub fn check_rusk_version(&self) -> anyhow::Result<()> { - check_rusk_version( - self.header(RUSK_VERSION_HEADER), - self.header(RUSK_VERSION_STRICT_HEADER).is_some(), - ) - } -} - #[derive(Debug, Deserialize, Serialize)] pub struct MessageResponse { pub headers: serde_json::Map, @@ -465,52 +326,6 @@ pub struct BinaryWrapper { pub inner: Vec, } -impl Event { - pub fn parse(bytes: &[u8]) -> anyhow::Result { - let (topic, bytes) = parse_string(bytes)?; - let data = bytes.to_vec().into(); - - Ok(Self { - target: Target::None, - topic, - data, - }) - } - pub async fn from_request( - req: Request, - ) -> anyhow::Result<(Self, bool)> { - let (parts, req_body) = req.into_parts(); - // HTTP REQUEST - let binary_request = parts - .headers - .get(CONTENT_TYPE) - .and_then(|h| h.to_str().ok()) - .map(|v| v.eq_ignore_ascii_case(CONTENT_TYPE_BINARY)) - .unwrap_or_default(); - - let target = parts.uri.path().try_into()?; - - let body = req_body.collect().await?.to_bytes(); - - let mut event = match binary_request { - true => Event::parse(&body) - .map_err(|e| anyhow::anyhow!("Invalid data {e}"))?, - false => serde_json::from_slice(&body) - .map_err(|e| anyhow::anyhow!("Invalid data {e}"))?, - }; - event.target = target; - - let binary_response = binary_request - || parts - .headers - .get(ACCEPT) - .and_then(|h| h.to_str().ok()) - .map(|v| v.eq_ignore_ascii_case(CONTENT_TYPE_BINARY)) - .unwrap_or_default(); - - Ok((event, binary_response)) - } -} const CONTENT_TYPE: &str = "content-type"; const ACCEPT: &str = "accept"; const CONTENT_TYPE_BINARY: &str = "application/octet-stream"; @@ -979,17 +794,3 @@ pub fn check_rusk_version( } Ok(()) } - -#[cfg(test)] -mod tests { - - use super::*; - - #[test] - fn event() { - let data = - "120000006c65617665735f66726f6d5f6865696768740000000000000000"; - let data = hex::decode(data).unwrap(); - let event = Event::parse(&data).unwrap(); - } -} diff --git a/rusk/src/lib/http/prover.rs b/rusk/src/lib/http/prover.rs index b0a4ba5117..613fcbbfa4 100644 --- a/rusk/src/lib/http/prover.rs +++ b/rusk/src/lib/http/prover.rs @@ -13,9 +13,6 @@ use super::*; #[async_trait] impl HandleRequest for LocalProver { - fn can_handle(&self, request: &MessageRequest) -> bool { - matches!(request.event.to_route(), (_, "rusk", topic) | (_, "prover", topic) if topic.starts_with("prove_")) - } fn can_handle_rues(&self, request: &RuesDispatchEvent) -> bool { matches!(request.uri.inner(), ("prover", _, "prove")) } @@ -32,18 +29,4 @@ impl HandleRequest for LocalProver { }; Ok(ResponseData::new(response)) } - - async fn handle( - &self, - request: &MessageRequest, - ) -> anyhow::Result { - let topic = request.event.topic.as_str(); - let response = match topic { - "prove_execute" => LocalProver - .prove(request.event_data()) - .map_err(|e| anyhow!(e))?, - _ => anyhow::bail!("Unsupported"), - }; - Ok(ResponseData::new(response)) - } } diff --git a/rusk/src/lib/http/rusk.rs b/rusk/src/lib/http/rusk.rs index 4b101bdb18..88b1f1412f 100644 --- a/rusk/src/lib/http/rusk.rs +++ b/rusk/src/lib/http/rusk.rs @@ -4,7 +4,6 @@ // // Copyright (c) DUSK NETWORK. All rights reserved. -use super::event::Event; use super::*; use dusk_bytes::{DeserializableSlice, Serializable}; @@ -26,21 +25,6 @@ const RUSK_FEEDER_HEADER: &str = "Rusk-Feeder"; #[async_trait] impl HandleRequest for Rusk { - fn can_handle(&self, request: &MessageRequest) -> bool { - let route = request.event.to_route(); - if matches!(route, (Target::Host(_), "rusk", "preverify")) { - // moved to chain - // here just for backward compatibility - return false; - } - if route.2.starts_with("prove_") { - return false; - } - matches!( - route, - (Target::Contract(_), ..) | (Target::Host(_), "rusk", _) - ) - } fn can_handle_rues(&self, request: &RuesDispatchEvent) -> bool { #[allow(clippy::match_like_matches_macro)] match request.uri.inner() { @@ -68,37 +52,9 @@ impl HandleRequest for Rusk { _ => Err(anyhow::anyhow!("Unsupported")), } } - - async fn handle( - &self, - request: &MessageRequest, - ) -> anyhow::Result { - match &request.event.to_route() { - (Target::Contract(_), ..) => { - let feeder = request.header(RUSK_FEEDER_HEADER).is_some(); - self.handle_contract_query_legacy(&request.event, feeder) - } - (Target::Host(_), "rusk", "provisioners") => { - self.get_provisioners() - } - (Target::Host(_), "rusk", "crs") => self.get_crs(), - _ => Err(anyhow::anyhow!("Unsupported")), - } - } } impl Rusk { - fn handle_contract_query_legacy( - &self, - event: &Event, - feeder: bool, - ) -> anyhow::Result { - let contract = event.target.inner(); - let topic = &event.topic; - let data = event.data.as_bytes(); - - self.handle_contract_query(contract, topic, data, feeder) - } fn handle_contract_query( &self, contract: &str, diff --git a/rusk/src/lib/node.rs b/rusk/src/lib/node.rs index 2cf27a4bb7..9c5e5b8ef4 100644 --- a/rusk/src/lib/node.rs +++ b/rusk/src/lib/node.rs @@ -10,7 +10,6 @@ mod vm; use std::path::PathBuf; use std::sync::Arc; -use std::time::Duration; use dusk_core::{dusk, Dusk}; @@ -20,13 +19,12 @@ use node::network::Kadcast; use node::LongLivedService; use parking_lot::RwLock; use tokio::sync::broadcast; +pub use vm::*; use crate::http::RuesEvent; pub(crate) use events::ChainEventStreamer; #[cfg(feature = "archive")] -use { - node::archive::Archive, node_data::archive::ArchivalData, tokio::sync::mpsc, -}; +use node::archive::Archive; #[derive(Debug, Clone, Copy)] pub struct RuskTip { @@ -40,16 +38,12 @@ pub struct Rusk { pub(crate) vm: Arc, dir: PathBuf, pub(crate) chain_id: u8, - pub(crate) generation_timeout: Option, - pub(crate) gas_per_deploy_byte: u64, - pub(crate) min_deployment_gas_price: u64, + pub(crate) vm_config: RuskVmConfig, pub(crate) min_gas_limit: u64, - pub(crate) min_deploy_points: u64, pub(crate) feeder_gas_limit: u64, - pub(crate) block_gas_limit: u64, pub(crate) event_sender: broadcast::Sender, #[cfg(feature = "archive")] - pub(crate) archive_sender: mpsc::Sender, + pub archive: Archive, } pub(crate) type Services = diff --git a/rusk/src/lib/node/events.rs b/rusk/src/lib/node/events.rs index 5c39ea55ff..a70028cf1d 100644 --- a/rusk/src/lib/node/events.rs +++ b/rusk/src/lib/node/events.rs @@ -13,19 +13,12 @@ use node_data::events::Event as ChainEvent; use tokio::sync::broadcast; use tokio::sync::mpsc::Receiver; use tracing::error; -#[cfg(feature = "archive")] -use { - node_data::archive::ArchivalData, node_data::events::BlockState, - serde_json::Value, tokio::sync::mpsc::Sender, -}; use crate::http::RuesEvent; pub(crate) struct ChainEventStreamer { pub node_receiver: Receiver, pub rues_sender: broadcast::Sender, - #[cfg(feature = "archive")] - pub archivist_sender: Sender, } #[async_trait] @@ -43,62 +36,6 @@ impl if let Err(e) = self.rues_sender.send(msg.clone().into()) { error!("Cannot send to rues {e:?}"); } - - #[cfg(feature = "archive")] - { - // NB: This is a temporary solution to send finalized and - // deleted blocks to the archivist in a decoupled way. - // We can remove this once the consensus acceptor can send - // these events directly to the archivist service. - match msg.topic { - // "statechange" & "deleted" are only in msg.component - // == "blocks" - "statechange" => { - if let Some(json_val) = msg.data { - let state = json_val - .get("state") - .and_then(Value::as_str) - .unwrap_or_default(); - let at_height = json_val - .get("atHeight") - .and_then(Value::as_u64) - .unwrap_or_default(); - - if state == BlockState::Finalized.as_str() { - if let Err(e) = self - .archivist_sender - .try_send(ArchivalData::FinalizedBlock( - at_height, - msg.entity.clone(), - )) - { - error!( - "Cannot send to archivist {e:?}" - ); - }; - } - }; - } - "deleted" => { - if let Some(json_val) = msg.data { - let at_height = json_val - .get("atHeight") - .and_then(Value::as_u64) - .unwrap_or_default(); - - if let Err(e) = self.archivist_sender.try_send( - ArchivalData::DeletedBlock( - at_height, - msg.entity.clone(), - ), - ) { - error!("Cannot send to archivist {e:?}"); - }; - }; - } - _ => (), - } - } } } } diff --git a/rusk/src/lib/node/rusk.rs b/rusk/src/lib/node/rusk.rs index 17041d80ca..3e06f8d4fa 100644 --- a/rusk/src/lib/node/rusk.rs +++ b/rusk/src/lib/node/rusk.rs @@ -6,7 +6,7 @@ use std::path::Path; use std::sync::{mpsc, Arc}; -use std::time::{Duration, Instant}; +use std::time::Instant; use std::{fs, io}; use dusk_bytes::Serializable; @@ -25,16 +25,19 @@ use dusk_core::transfer::{ moonlight::AccountData, PANIC_NONCE_NOT_READY, TRANSFER_CONTRACT, }; use dusk_core::{BlsScalar, Dusk}; -use dusk_vm::{execute, CallReceipt, Error as VMError, Session, VM}; -use node_data::events::contract::{ContractEvent, ContractTxEvent}; +use dusk_vm::{ + execute, CallReceipt, Error as VMError, ExecutionConfig, Session, VM, +}; +#[cfg(feature = "archive")] +use node::archive::Archive; +use node_data::events::contract::ContractTxEvent; use node_data::ledger::{Hash, Slash, SpentTransaction, Transaction}; use parking_lot::RwLock; use rusk_profile::to_rusk_state_id_path; use tokio::sync::broadcast; use tracing::info; -#[cfg(feature = "archive")] -use {node_data::archive::ArchivalData, tokio::sync::mpsc::Sender}; +use super::RuskVmConfig; use crate::bloom::Bloom; use crate::http::RuesEvent; use crate::node::{coinbase_value, Rusk, RuskTip}; @@ -42,19 +45,14 @@ use crate::Error::InvalidCreditsCount; use crate::{Error, Result, DUSK_CONSENSUS_KEY}; impl Rusk { - #[allow(clippy::too_many_arguments)] pub fn new>( dir: P, chain_id: u8, - generation_timeout: Option, - gas_per_deploy_byte: u64, - min_deployment_gas_price: u64, + vm_config: RuskVmConfig, min_gas_limit: u64, - min_deploy_points: u64, - block_gas_limit: u64, feeder_gas_limit: u64, event_sender: broadcast::Sender, - #[cfg(feature = "archive")] archive_sender: Sender, + #[cfg(feature = "archive")] archive: Archive, ) -> Result { let dir = dir.as_ref(); info!("Using state from {dir:?}"); @@ -87,16 +85,12 @@ impl Rusk { vm, dir: dir.into(), chain_id, - generation_timeout, - gas_per_deploy_byte, - min_deployment_gas_price, + vm_config, min_gas_limit, - min_deploy_points, feeder_gas_limit, event_sender, #[cfg(feature = "archive")] - archive_sender, - block_gas_limit, + archive, }) } @@ -109,7 +103,7 @@ impl Rusk { let started = Instant::now(); let block_height = params.round; - let block_gas_limit = self.block_gas_limit; + let block_gas_limit = self.vm_config.block_gas_limit; let generator = params.generator_pubkey.inner(); let to_slash = params.to_slash.clone(); let prev_state_root = params.prev_state_root; @@ -128,11 +122,13 @@ impl Rusk { let mut event_bloom = Bloom::new(); + let execution_config = self.vm_config.to_execution_config(block_height); + // We always write the faults len in a u32 let mut size_left = params.max_txs_bytes - u32::SIZE; for unspent_tx in txs { - if let Some(timeout) = self.generation_timeout { + if let Some(timeout) = self.vm_config.generation_timeout { if started.elapsed() > timeout { info!("execute_transactions timeout triggered {timeout:?}"); break; @@ -158,13 +154,7 @@ impl Rusk { continue; } - match execute( - &mut session, - &unspent_tx.inner, - self.gas_per_deploy_byte, - self.min_deploy_points, - self.min_deployment_gas_price, - ) { + match execute(&mut session, &unspent_tx.inner, &execution_config) { Ok(receipt) => { let gas_spent = receipt.gas_spent; @@ -182,9 +172,7 @@ impl Rusk { let _ = execute( &mut session, &spent_tx.inner.inner, - self.gas_per_deploy_byte, - self.min_deploy_points, - self.min_deployment_gas_price, + &execution_config, ); } @@ -263,6 +251,7 @@ impl Rusk { voters: &[Voter], ) -> Result<(Vec, VerificationOutput)> { let session = self.new_block_session(block_height, prev_commit)?; + let execution_config = self.vm_config.to_execution_config(block_height); accept( session, @@ -273,9 +262,7 @@ impl Rusk { txs, slashing, voters, - self.gas_per_deploy_byte, - self.min_deploy_points, - self.min_deployment_gas_price, + &execution_config, ) .map(|(a, b, _, _)| (a, b)) } @@ -285,6 +272,12 @@ impl Rusk { /// * `consistency_check` - represents a state_root, the caller expects to /// be returned on successful transactions execution. Passing a None /// value disables the check. + /// + /// # Returns + /// - Vec - The transactions that were spent. + /// - VerificationOutput - The verification output. + /// - Vec - All contract events that were emitted from the + /// given transactions. #[allow(clippy::too_many_arguments)] pub fn accept_transactions( &self, @@ -300,10 +293,12 @@ impl Rusk { ) -> Result<( Vec, VerificationOutput, - Vec, + Vec, )> { let session = self.new_block_session(block_height, prev_commit)?; + let execution_config = self.vm_config.to_execution_config(block_height); + let (spent_txs, verification_output, session, events) = accept( session, block_height, @@ -313,9 +308,7 @@ impl Rusk { &txs[..], slashing, voters, - self.gas_per_deploy_byte, - self.min_deploy_points, - self.min_deployment_gas_price, + &execution_config, )?; if let Some(expected_verification) = consistency_check { @@ -330,27 +323,15 @@ impl Rusk { self.set_current_commit(session.commit()?); - // Sent all events from this block to the archivist - #[cfg(feature = "archive")] - { - let _ = self.archive_sender.try_send(ArchivalData::ArchivedEvents( - block_height, - block_hash, - events.clone(), - )); - } - - let mut stake_events = vec![]; + let contract_events = events.clone(); for event in events { - if event.event.target.0 == STAKE_CONTRACT { - stake_events.push(event.event.clone()); - } // Send VM event to RUES let event = RuesEvent::from(event); let _ = self.event_sender.send(event); - } + } // TODO: move this also in acceptor (async fn try_accept_block) where + // stake events are filtered, to avoid looping twice? - Ok((spent_txs, verification_output, stake_events)) + Ok((spent_txs, verification_output, contract_events)) } pub fn finalize_state( @@ -551,10 +532,6 @@ impl Rusk { } Ok(()) } - - pub(crate) fn block_gas_limit(&self) -> u64 { - self.block_gas_limit - } } #[allow(clippy::too_many_arguments)] @@ -567,9 +544,7 @@ fn accept( txs: &[Transaction], slashing: Vec, voters: &[Voter], - gas_per_deploy_byte: u64, - min_deploy_points: u64, - min_deployment_gas_price: u64, + execution_config: &ExecutionConfig, ) -> Result<( Vec, VerificationOutput, @@ -589,13 +564,7 @@ fn accept( for unspent_tx in txs { let tx = &unspent_tx.inner; let tx_id = unspent_tx.id(); - let receipt = execute( - &mut session, - tx, - gas_per_deploy_byte, - min_deploy_points, - min_deployment_gas_price, - )?; + let receipt = execute(&mut session, tx, execution_config)?; event_bloom.add_events(&receipt.events); diff --git a/rusk/src/lib/node/vm.rs b/rusk/src/lib/node/vm.rs index a15ddd8142..a208ee1f76 100644 --- a/rusk/src/lib/node/vm.rs +++ b/rusk/src/lib/node/vm.rs @@ -4,10 +4,11 @@ // // Copyright (c) DUSK NETWORK. All rights reserved. +mod config; mod query; use dusk_consensus::errors::VstError; -use node_data::events::contract::ContractEvent; +use node_data::events::contract::ContractTxEvent; use tracing::{debug, info}; use dusk_bytes::DeserializableSlice; @@ -23,6 +24,8 @@ use node_data::bls::PublicKey; use node_data::ledger::{Block, Slash, SpentTransaction, Transaction}; use super::Rusk; +pub use config::feature::*; +pub use config::Config as RuskVmConfig; impl VMExecution for Rusk { fn execute_state_transition>( @@ -88,7 +91,7 @@ impl VMExecution for Rusk { ) -> anyhow::Result<( Vec, VerificationOutput, - Vec, + Vec, )> { debug!("Received accept request"); let generator = blk.header().generator_bls_pubkey; @@ -97,7 +100,7 @@ impl VMExecution for Rusk { let slashing = Slash::from_block(blk)?; - let (txs, verification_output, stake_events) = self + let (txs, verification_output, contract_events) = self .accept_transactions( prev_root, blk.header().height, @@ -114,7 +117,7 @@ impl VMExecution for Rusk { ) .map_err(|inner| anyhow::anyhow!("Cannot accept txs: {inner}!!"))?; - Ok((txs, verification_output, stake_events)) + Ok((txs, verification_output, contract_events)) } fn move_to_commit(&self, commit: [u8; 32]) -> anyhow::Result<()> { @@ -265,15 +268,15 @@ impl VMExecution for Rusk { } fn get_block_gas_limit(&self) -> u64 { - self.block_gas_limit() + self.vm_config.block_gas_limit } fn gas_per_deploy_byte(&self) -> u64 { - self.gas_per_deploy_byte + self.vm_config.gas_per_deploy_byte } fn min_deployment_gas_price(&self) -> u64 { - self.min_deployment_gas_price + self.vm_config.min_deployment_gas_price } fn min_gas_limit(&self) -> u64 { @@ -281,7 +284,7 @@ impl VMExecution for Rusk { } fn min_deploy_points(&self) -> u64 { - self.min_deploy_points + self.vm_config.min_deploy_points } } diff --git a/rusk/src/lib/node/vm/config.rs b/rusk/src/lib/node/vm/config.rs new file mode 100644 index 0000000000..06bd762c3b --- /dev/null +++ b/rusk/src/lib/node/vm/config.rs @@ -0,0 +1,140 @@ +// This Source Code Form is subject to the terms of the Mozilla Public +// License, v. 2.0. If a copy of the MPL was not distributed with this +// file, You can obtain one at http://mozilla.org/MPL/2.0/. +// +// Copyright (c) DUSK NETWORK. All rights reserved. + +use std::collections::HashMap; +use std::time::Duration; + +use dusk_vm::ExecutionConfig; +use serde::{Deserialize, Serialize}; + +const fn default_gas_per_deploy_byte() -> u64 { + 100 +} +const fn default_min_deploy_points() -> u64 { + 5_000_000 +} +const fn default_min_deployment_gas_price() -> u64 { + 2_000 +} +const fn default_block_gas_limit() -> u64 { + 3 * 1_000_000_000 +} + +/// Configuration for the execution of a transaction. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Config { + /// The amount of gas points charged for each byte in a contract-deployment + /// bytecode. + #[serde(default = "default_gas_per_deploy_byte")] + pub gas_per_deploy_byte: u64, + + /// The minimum gas points charged for a contract deployment. + #[serde(default = "default_min_deploy_points")] + pub min_deploy_points: u64, + + /// The minimum gas price set for a contract deployment + #[serde(default = "default_min_deployment_gas_price")] + pub min_deployment_gas_price: u64, + + /// The maximum amount of gas points that can be used in a block. + #[serde(default = "default_block_gas_limit")] + pub block_gas_limit: u64, + + /// The timeout for a candidate block generation. + #[serde(with = "humantime_serde")] + #[serde(default)] + pub generation_timeout: Option, + + /// Set of features to activate + pub features: HashMap, +} + +impl Default for Config { + fn default() -> Self { + Self::new() + } +} + +pub(crate) mod feature { + pub const FEATURE_ABI_PUBLIC_SENDER: &str = "ABI_PUBLIC_SENDER"; +} + +impl Config { + pub fn new() -> Self { + Self { + gas_per_deploy_byte: default_gas_per_deploy_byte(), + min_deployment_gas_price: default_min_deployment_gas_price(), + min_deploy_points: default_min_deploy_points(), + block_gas_limit: default_block_gas_limit(), + generation_timeout: None, + features: HashMap::new(), + } + } + + /// Set the maximum amount of gas points that can be used in a block. + pub const fn with_block_gas_limit(mut self, block_gas_limit: u64) -> Self { + self.block_gas_limit = block_gas_limit; + self + } + + /// Set the amount of gas points charged for each byte in a + /// contract-deployment + pub const fn with_gas_per_deploy_byte( + mut self, + gas_per_deploy_byte: u64, + ) -> Self { + self.gas_per_deploy_byte = gas_per_deploy_byte; + self + } + + /// Set the minimum amount of gas points charged for a contract deployment. + pub const fn with_min_deploy_points( + mut self, + min_deploy_points: u64, + ) -> Self { + self.min_deploy_points = min_deploy_points; + self + } + + /// Set the minimum gas price set for a contract deployment. + pub const fn with_min_deploy_gas_price( + mut self, + min_deploy_gas_price: u64, + ) -> Self { + self.min_deployment_gas_price = min_deploy_gas_price; + self + } + + /// Set the timeout for a candidate block generation. + pub const fn with_generation_timeout( + mut self, + generation_timeout: Option, + ) -> Self { + self.generation_timeout = generation_timeout; + self + } + + /// Create a new `Config` with the given parameters. + pub fn to_execution_config(&self, block_height: u64) -> ExecutionConfig { + let with_public_sender: bool = self + .feature(feature::FEATURE_ABI_PUBLIC_SENDER) + .map(|activation| activation >= block_height) + .unwrap_or_default(); + ExecutionConfig { + gas_per_deploy_byte: self.gas_per_deploy_byte, + min_deploy_points: self.min_deploy_points, + min_deploy_gas_price: self.min_deployment_gas_price, + with_public_sender, + } + } + + pub fn feature(&self, feature: &str) -> Option { + self.features + .iter() + .find(|(k, _)| k.eq_ignore_ascii_case(feature)) + .map(|(_, &v)| v) + } +} diff --git a/rusk/src/lib/node/vm/query.rs b/rusk/src/lib/node/vm/query.rs index 327dd9ae16..6d275e0594 100644 --- a/rusk/src/lib/node/vm/query.rs +++ b/rusk/src/lib/node/vm/query.rs @@ -11,6 +11,7 @@ use std::sync::mpsc; use bytecheck::CheckBytes; use dusk_core::abi::{ContractId, StandardBufSerializer}; +use node::vm::VMExecution; use rkyv::validation::validators::DefaultValidator; use rkyv::{Archive, Deserialize, Infallible, Serialize}; @@ -27,9 +28,13 @@ impl Rusk { { let mut session = self.query_session(None)?; - // For queries we set a point limit of effectively infinite session - .call_raw(contract_id, fn_name.as_ref(), fn_arg, u64::MAX) + .call_raw( + contract_id, + fn_name.as_ref(), + fn_arg, + self.get_block_gas_limit(), + ) .map(|receipt| receipt.data) .map_err(Into::into) } @@ -72,18 +77,27 @@ impl Rusk { { let mut session = self.query_session(None)?; - // For queries we set a point limit of effectively infinite let mut result = session - .call(contract_id, call_name, call_arg, u64::MAX)? + .call(contract_id, call_name, call_arg, self.get_block_gas_limit())? .data; while let Some(call_arg) = closure(result) { result = session - .call(contract_id, call_name, &call_arg, u64::MAX)? + .call( + contract_id, + call_name, + &call_arg, + self.get_block_gas_limit(), + )? .data; } - session.call::<_, ()>(contract_id, call_name, call_arg, u64::MAX)?; + session.call::<_, ()>( + contract_id, + call_name, + call_arg, + self.get_block_gas_limit(), + )?; Ok(()) } diff --git a/rusk/tests/common/state.rs b/rusk/tests/common/state.rs index fd5826fa94..de4a753573 100644 --- a/rusk/tests/common/state.rs +++ b/rusk/tests/common/state.rs @@ -8,6 +8,7 @@ use std::{path::Path, usize}; use dusk_bytes::Serializable; use node::vm::VMExecution; +use rusk::node::RuskVmConfig; use rusk::{Result, Rusk, DUSK_CONSENSUS_KEY}; use rusk_recovery_tools::state::{self, Snapshot}; @@ -30,25 +31,22 @@ use tokio::sync::broadcast; use tracing::info; const CHAIN_ID: u8 = 0xFA; -pub const DEFAULT_GAS_PER_DEPLOY_BYTE: u64 = 100; -pub const DEFAULT_MIN_DEPLOYMENT_GAS_PRICE: u64 = 2000; pub const DEFAULT_MIN_GAS_LIMIT: u64 = 75000; -pub const DEFAULT_MIN_DEPLOY_POINTS: u64 = 5000000; // Creates a Rusk initial state in the given directory pub fn new_state>( dir: P, snapshot: &Snapshot, - block_gas_limit: u64, + vm_config: RuskVmConfig, ) -> Result { - new_state_with_chainid(dir, snapshot, block_gas_limit, CHAIN_ID) + new_state_with_chainid(dir, snapshot, vm_config, CHAIN_ID) } // Creates a Rusk initial state in the given directory pub fn new_state_with_chainid>( dir: P, snapshot: &Snapshot, - block_gas_limit: u64, + vm_config: RuskVmConfig, chain_id: u8, ) -> Result { let dir = dir.as_ref(); @@ -62,12 +60,8 @@ pub fn new_state_with_chainid>( let rusk = Rusk::new( dir, chain_id, - None, - DEFAULT_GAS_PER_DEPLOY_BYTE, - DEFAULT_MIN_DEPLOYMENT_GAS_PRICE, + vm_config, DEFAULT_MIN_GAS_LIMIT, - DEFAULT_MIN_DEPLOY_POINTS, - block_gas_limit, u64::MAX, sender, ) diff --git a/rusk/tests/rusk-state.rs b/rusk/tests/rusk-state.rs index 93b72e9406..f55c25a656 100644 --- a/rusk/tests/rusk-state.rs +++ b/rusk/tests/rusk-state.rs @@ -27,7 +27,7 @@ use ff::Field; use parking_lot::RwLockWriteGuard; use rand::prelude::*; use rand::rngs::StdRng; -use rusk::node::{Rusk, RuskTip}; +use rusk::node::{Rusk, RuskTip, RuskVmConfig}; use rusk::Result; use tempfile::tempdir; use tracing::info; @@ -43,8 +43,9 @@ const INITIAL_BALANCE: u64 = 10_000_000_000; fn initial_state>(dir: P) -> Result { let snapshot = toml::from_str(include_str!("./config/rusk-state.toml")) .expect("Cannot deserialize config"); + let vm_config = RuskVmConfig::new().with_block_gas_limit(BLOCK_GAS_LIMIT); - new_state(dir, &snapshot, BLOCK_GAS_LIMIT) + new_state(dir, &snapshot, vm_config) } fn leaves_from_height(rusk: &Rusk, height: u64) -> Result> { @@ -185,7 +186,8 @@ async fn generate_phoenix_txs() -> Result<(), Box> { let snapshot = toml::from_str(include_str!("./config/bench.toml")) .expect("Cannot deserialize config"); - let rusk = new_state(&tmp, &snapshot, 100_000_000_000)?; + let vm_config = RuskVmConfig::new().with_block_gas_limit(100_000_000_000); + let rusk = new_state(&tmp, &snapshot, vm_config)?; let cache = Arc::new(std::sync::RwLock::new(std::collections::HashMap::new())); @@ -247,7 +249,8 @@ async fn generate_moonlight_txs() -> Result<(), Box> { let snapshot = toml::from_str(include_str!("./config/bench.toml")) .expect("Cannot deserialize config"); - let rusk = new_state(&tmp, &snapshot, 100_000_000_000)?; + let vm_config = RuskVmConfig::new().with_block_gas_limit(100_000_000_000); + let rusk = new_state(&tmp, &snapshot, vm_config)?; let cache = Arc::new(std::sync::RwLock::new(std::collections::HashMap::new())); diff --git a/rusk/tests/services/contract_deployment.rs b/rusk/tests/services/contract_deployment.rs index 2a4a510e75..916f2357dd 100644 --- a/rusk/tests/services/contract_deployment.rs +++ b/rusk/tests/services/contract_deployment.rs @@ -15,6 +15,7 @@ use dusk_core::transfer::data::{ use dusk_vm::{gen_contract_id, ContractData, Error as VMError, VM}; use rand::prelude::*; use rand::rngs::StdRng; +use rusk::node::RuskVmConfig; use rusk::{Result, Rusk, DUSK_CONSENSUS_KEY}; use rusk_recovery_tools::state; use tempfile::tempdir; @@ -22,11 +23,8 @@ use tokio::sync::broadcast; use tracing::info; use crate::common::logger; -use crate::common::state::DEFAULT_MIN_DEPLOYMENT_GAS_PRICE; -use crate::common::state::DEFAULT_MIN_DEPLOY_POINTS; -use crate::common::state::{generator_procedure, ExecuteResult}; use crate::common::state::{ - DEFAULT_GAS_PER_DEPLOY_BYTE, DEFAULT_MIN_GAS_LIMIT, + generator_procedure, ExecuteResult, DEFAULT_MIN_GAS_LIMIT, }; use crate::common::wallet::{ test_wallet as wallet, TestStateClient, TestStore, Wallet, @@ -106,12 +104,8 @@ fn initial_state>(dir: P, deploy_bob: bool) -> Result { let rusk = Rusk::new( dir, CHAIN_ID, - None, - DEFAULT_GAS_PER_DEPLOY_BYTE, - DEFAULT_MIN_DEPLOYMENT_GAS_PRICE, + RuskVmConfig::new(), DEFAULT_MIN_GAS_LIMIT, - DEFAULT_MIN_DEPLOY_POINTS, - BLOCK_GAS_LIMIT, u64::MAX, sender, ) diff --git a/rusk/tests/services/contract_stake.rs b/rusk/tests/services/contract_stake.rs index 2f137fdf56..9f77788983 100644 --- a/rusk/tests/services/contract_stake.rs +++ b/rusk/tests/services/contract_stake.rs @@ -17,6 +17,7 @@ use dusk_vm::gen_contract_id; use node_data::ledger::SpentTransaction; use rand::prelude::*; use rand::rngs::StdRng; +use rusk::node::RuskVmConfig; use rusk::{Result, Rusk}; use std::collections::HashMap; use tempfile::tempdir; @@ -30,6 +31,7 @@ use crate::common::*; const BLOCK_HEIGHT: u64 = 1; const BLOCK_GAS_LIMIT: u64 = 100_000_000_000; + const GAS_LIMIT: u64 = 10_000_000_000; const GAS_PRICE: u64 = 1; @@ -38,8 +40,9 @@ fn stake_state>(dir: P) -> Result { let snapshot = toml::from_str(include_str!("../config/stake_from_contract.toml")) .expect("Cannot deserialize config"); + let vm_config = RuskVmConfig::new().with_block_gas_limit(BLOCK_GAS_LIMIT); - new_state(dir, &snapshot, u64::MAX) + new_state(dir, &snapshot, vm_config) } #[tokio::test(flavor = "multi_thread")] diff --git a/rusk/tests/services/conversion.rs b/rusk/tests/services/conversion.rs index b038cc9400..96399c8a81 100644 --- a/rusk/tests/services/conversion.rs +++ b/rusk/tests/services/conversion.rs @@ -11,6 +11,7 @@ use std::sync::{Arc, RwLock}; use node_data::ledger::SpentTransaction; use rand::prelude::*; use rand::rngs::StdRng; +use rusk::node::RuskVmConfig; use rusk::{Result, Rusk}; use tempfile::tempdir; @@ -29,8 +30,9 @@ const INITIAL_MOONLIGHT_BALANCE: u64 = 10_000_000_000; fn initial_state>(dir: P) -> Result { let snapshot = toml::from_str(include_str!("../config/convert.toml")) .expect("Cannot deserialize config"); + let vm_config = RuskVmConfig::new().with_block_gas_limit(BLOCK_GAS_LIMIT); - new_state(dir, &snapshot, BLOCK_GAS_LIMIT) + new_state(dir, &snapshot, vm_config) } /// Makes a transaction that converts Dusk from Phoenix to Moonlight, and diff --git a/rusk/tests/services/finalization.rs b/rusk/tests/services/finalization.rs index b8d6550921..fdccf37dfb 100644 --- a/rusk/tests/services/finalization.rs +++ b/rusk/tests/services/finalization.rs @@ -6,7 +6,7 @@ use std::path::Path; -use rusk::{Result, Rusk}; +use rusk::{node::RuskVmConfig, Result, Rusk}; use tempfile::tempdir; use crate::common::state::{generator_procedure2, new_state}; @@ -19,8 +19,9 @@ fn initial_state>(dir: P) -> Result { let snapshot = toml::from_str(include_str!("../config/multi_transfer.toml")) .expect("Cannot deserialize config"); + let vm_config = RuskVmConfig::new().with_block_gas_limit(BLOCK_GAS_LIMIT); - new_state(dir, &snapshot, BLOCK_GAS_LIMIT) + new_state(dir, &snapshot, vm_config) } #[tokio::test(flavor = "multi_thread")] diff --git a/rusk/tests/services/gas_behavior.rs b/rusk/tests/services/gas_behavior.rs index e552f3d983..cc3ba2f88c 100644 --- a/rusk/tests/services/gas_behavior.rs +++ b/rusk/tests/services/gas_behavior.rs @@ -14,6 +14,7 @@ use dusk_core::transfer::{ }; use rand::prelude::*; use rand::rngs::StdRng; +use rusk::node::RuskVmConfig; use rusk::{Result, Rusk}; use tempfile::tempdir; use tracing::info; @@ -26,6 +27,7 @@ use crate::common::wallet::{ const BLOCK_HEIGHT: u64 = 1; const BLOCK_GAS_LIMIT: u64 = 1_000_000_000_000; + const INITIAL_BALANCE: u64 = 10_000_000_000; const GAS_LIMIT_0: u64 = 100_000_000; @@ -37,8 +39,9 @@ const DEPOSIT: u64 = 0; fn initial_state>(dir: P) -> Result { let snapshot = toml::from_str(include_str!("../config/gas-behavior.toml")) .expect("Cannot deserialize config"); + let vm_config = RuskVmConfig::new().with_block_gas_limit(BLOCK_GAS_LIMIT); - new_state(dir, &snapshot, BLOCK_GAS_LIMIT) + new_state(dir, &snapshot, vm_config) } const SENDER_INDEX_0: u8 = 0; diff --git a/rusk/tests/services/mainnet.rs b/rusk/tests/services/mainnet.rs index 77fdcd8fed..99386b9b79 100644 --- a/rusk/tests/services/mainnet.rs +++ b/rusk/tests/services/mainnet.rs @@ -12,6 +12,7 @@ use dusk_core::stake::{StakeData, StakeKeys, STAKE_CONTRACT}; use dusk_core::transfer::moonlight::AccountData; use dusk_core::transfer::phoenix::NoteLeaf; use dusk_core::transfer::TRANSFER_CONTRACT; +use rusk::node::RuskVmConfig; use rusk::{Result, Rusk}; use tempfile::tempdir; @@ -27,7 +28,7 @@ fn initial_state>(dir: P) -> Result { )) .expect("Cannot deserialize config"); - new_state(dir, &snapshot, u64::MAX) + new_state(dir, &snapshot, RuskVmConfig::default()) } #[tokio::test(flavor = "multi_thread")] diff --git a/rusk/tests/services/moonlight_stake.rs b/rusk/tests/services/moonlight_stake.rs index a4fee5b358..057eac9771 100644 --- a/rusk/tests/services/moonlight_stake.rs +++ b/rusk/tests/services/moonlight_stake.rs @@ -11,6 +11,7 @@ use dusk_core::stake::DEFAULT_MINIMUM_STAKE; use rand::prelude::*; use rand::rngs::StdRng; +use rusk::node::RuskVmConfig; use rusk::{Result, Rusk}; use std::collections::HashMap; use tempfile::tempdir; @@ -31,8 +32,9 @@ const GAS_PRICE: u64 = 1; fn stake_state>(dir: P) -> Result { let snapshot = toml::from_str(include_str!("../config/stake.toml")) .expect("Cannot deserialize config"); + let vm_config = RuskVmConfig::new().with_block_gas_limit(BLOCK_GAS_LIMIT); - new_state(dir, &snapshot, BLOCK_GAS_LIMIT) + new_state(dir, &snapshot, vm_config) } /// Stakes an amount Dusk and produces a block with this single transaction, diff --git a/rusk/tests/services/multi_transfer.rs b/rusk/tests/services/multi_transfer.rs index 689a290a7f..c80970499e 100644 --- a/rusk/tests/services/multi_transfer.rs +++ b/rusk/tests/services/multi_transfer.rs @@ -10,6 +10,7 @@ use std::sync::{Arc, RwLock}; use rand::prelude::*; use rand::rngs::StdRng; +use rusk::node::RuskVmConfig; use rusk::{Result, Rusk}; use tempfile::tempdir; use tracing::info; @@ -24,6 +25,7 @@ const BLOCK_HEIGHT: u64 = 1; // This is purposefully chosen to be low to trigger the discarding of a // perfectly good transaction. const BLOCK_GAS_LIMIT: u64 = 24_000_000; + const GAS_LIMIT: u64 = 12_000_000; // Lowest value for a transfer const INITIAL_BALANCE: u64 = 10_000_000_000; const INITIAL_BALANCE_DEPLOY: u64 = 1_000_000_000_000; @@ -37,8 +39,9 @@ fn initial_state>(dir: P) -> Result { let snapshot = toml::from_str(include_str!("../config/multi_transfer.toml")) .expect("Cannot deserialize config"); + let vm_config = RuskVmConfig::new().with_block_gas_limit(BLOCK_GAS_LIMIT); - new_state(dir, &snapshot, BLOCK_GAS_LIMIT) + new_state(dir, &snapshot, vm_config) } // Creates the Rusk initial state for the tests below @@ -46,8 +49,9 @@ fn initial_state_deploy>(dir: P) -> Result { let snapshot = toml::from_str(include_str!("../config/multi_transfer_deploy.toml")) .expect("Cannot deserialize config"); + let vm_config = RuskVmConfig::new().with_block_gas_limit(BLOCK_GAS_LIMIT); - new_state(dir, &snapshot, BLOCK_GAS_LIMIT) + new_state(dir, &snapshot, vm_config) } /// Executes three different transactions in the same block, expecting only two diff --git a/rusk/tests/services/owner_calls.rs b/rusk/tests/services/owner_calls.rs index adaac088bb..9415616015 100644 --- a/rusk/tests/services/owner_calls.rs +++ b/rusk/tests/services/owner_calls.rs @@ -10,6 +10,7 @@ use rand::rngs::StdRng; use rand::SeedableRng; use rkyv::validation::validators::DefaultValidator; use rkyv::{Archive, Deserialize, Infallible, Serialize}; +use rusk::node::RuskVmConfig; use std::collections::HashMap; use std::path::{Path, PathBuf}; use std::sync::{Arc, RwLock}; @@ -27,11 +28,7 @@ use tokio::sync::broadcast; use tracing::info; use crate::common::logger; -use crate::common::state::DEFAULT_MIN_DEPLOYMENT_GAS_PRICE; -use crate::common::state::DEFAULT_MIN_DEPLOY_POINTS; -use crate::common::state::{ - DEFAULT_GAS_PER_DEPLOY_BYTE, DEFAULT_MIN_GAS_LIMIT, -}; +use crate::common::state::DEFAULT_MIN_GAS_LIMIT; use crate::common::wallet::{ test_wallet as wallet, test_wallet::Wallet, TestStateClient, TestStore, }; @@ -83,12 +80,8 @@ fn initial_state>( let rusk = Rusk::new( dir, CHAIN_ID, - None, - DEFAULT_GAS_PER_DEPLOY_BYTE, - DEFAULT_MIN_DEPLOYMENT_GAS_PRICE, + RuskVmConfig::new(), DEFAULT_MIN_GAS_LIMIT, - DEFAULT_MIN_DEPLOY_POINTS, - BLOCK_GAS_LIMIT, u64::MAX, sender, ) diff --git a/rusk/tests/services/phoenix_stake.rs b/rusk/tests/services/phoenix_stake.rs index f0c3318e4a..d8b73c282d 100644 --- a/rusk/tests/services/phoenix_stake.rs +++ b/rusk/tests/services/phoenix_stake.rs @@ -17,6 +17,7 @@ use dusk_core::{ use rand::prelude::*; use rand::rngs::StdRng; +use rusk::node::RuskVmConfig; use rusk::{Result, Rusk}; use std::collections::HashMap; use tempfile::tempdir; @@ -38,16 +39,18 @@ const DEPOSIT: u64 = 0; fn stake_state>(dir: P) -> Result { let snapshot = toml::from_str(include_str!("../config/stake.toml")) .expect("Cannot deserialize config"); + let vm_config = RuskVmConfig::new().with_block_gas_limit(BLOCK_GAS_LIMIT); - new_state(dir, &snapshot, BLOCK_GAS_LIMIT) + new_state(dir, &snapshot, vm_config) } // Creates the Rusk initial state for the tests below fn slash_state>(dir: P) -> Result { let snapshot = toml::from_str(include_str!("../config/slash.toml")) .expect("Cannot deserialize config"); + let vm_config = RuskVmConfig::new().with_block_gas_limit(BLOCK_GAS_LIMIT); - new_state(dir, &snapshot, BLOCK_GAS_LIMIT) + new_state(dir, &snapshot, vm_config) } /// Stakes an amount Dusk and produces a block with this single transaction, diff --git a/rusk/tests/services/transfer.rs b/rusk/tests/services/transfer.rs index 8655407301..e71c3d9c43 100644 --- a/rusk/tests/services/transfer.rs +++ b/rusk/tests/services/transfer.rs @@ -11,6 +11,7 @@ use std::sync::{Arc, RwLock}; use node_data::ledger::SpentTransaction; use rand::prelude::*; use rand::rngs::StdRng; +use rusk::node::RuskVmConfig; use rusk::{Result, Rusk}; use tempfile::tempdir; use tracing::info; @@ -29,8 +30,9 @@ const MAX_NOTES: u64 = 10; fn initial_state>(dir: P) -> Result { let snapshot = toml::from_str(include_str!("../config/transfer.toml")) .expect("Cannot deserialize config"); + let vm_config = RuskVmConfig::new().with_block_gas_limit(BLOCK_GAS_LIMIT); - new_state(dir, &snapshot, BLOCK_GAS_LIMIT) + new_state(dir, &snapshot, vm_config) } /// Transacts between two accounts on the in the same wallet and produces a diff --git a/rusk/tests/services/unspendable.rs b/rusk/tests/services/unspendable.rs index 0d425c2697..0b57fdaf18 100644 --- a/rusk/tests/services/unspendable.rs +++ b/rusk/tests/services/unspendable.rs @@ -14,6 +14,7 @@ use dusk_core::transfer::{ }; use rand::prelude::*; use rand::rngs::StdRng; +use rusk::node::RuskVmConfig; use rusk::{Result, Rusk}; use tempfile::tempdir; use tracing::info; @@ -38,8 +39,9 @@ const DEPOSIT: u64 = 0; fn initial_state>(dir: P) -> Result { let snapshot = toml::from_str(include_str!("../config/unspendable.toml")) .expect("Cannot deserialize config"); + let vm_config = RuskVmConfig::new().with_block_gas_limit(BLOCK_GAS_LIMIT); - new_state(dir, &snapshot, BLOCK_GAS_LIMIT) + new_state(dir, &snapshot, vm_config) } const SENDER_INDEX_0: u8 = 0; diff --git a/scripts/persistent-docker-setup/detect_ips.sh b/scripts/persistent-docker-setup/detect_ips.sh new file mode 100755 index 0000000000..2420ea4a00 --- /dev/null +++ b/scripts/persistent-docker-setup/detect_ips.sh @@ -0,0 +1,56 @@ +#!/bin/bash + +# Script for detecting IP addresses in the persistent state Docker container. + +# Fetch IPv4 WAN address using ifconfig.me, fallback to ipinfo.io +PUBLIC_IP=$(curl -4 -s https://ifconfig.me) +if [ -z "$PUBLIC_IP" ]; then + PUBLIC_IP=$(curl -4 -s https://ipinfo.io/ip) +fi + +# Validate IPv4 address +if [[ -z "$PUBLIC_IP" || ! "$PUBLIC_IP" =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then + echo "Error: Unable to retrieve a valid WAN IPv4 address" + exit 1 +fi + +runOnMac=false +int2ip() { printf ${2+-v} $2 "%d.%d.%d.%d" \ + $(($1>>24)) $(($1>>16&255)) $(($1>>8&255)) $(($1&255)) ;} +ip2int() { local _a=(${1//./ }) ; printf ${2+-v} $2 "%u" $(( _a<<24 | + ${_a[1]} << 16 | ${_a[2]} << 8 | ${_a[3]} )) ;} +while IFS=$' :\t\r\n' read a b c d; do + [ "$a" = "usage" ] && [ "$b" = "route" ] && runOnMac=true + if $runOnMac ;then + case $a in + gateway ) gWay=$b ;; + interface ) iFace=$b ;; + esac + else + [ "$a" = "0.0.0.0" ] && [ "$c" = "$a" ] && iFace=${d##* } gWay=$b + fi +done < <(/sbin/route -n 2>&1 || /sbin/route -n get 0.0.0.0/0) +ip2int $gWay gw +while read lhs rhs; do + [ "$lhs" ] && { + [ -z "${lhs#*:}" ] && iface=${lhs%:} + [ "$lhs" = "inet" ] && [ "$iface" = "$iFace" ] && { + mask=${rhs#*netmask } + mask=${mask%% *} + [ "$mask" ] && [ -z "${mask%0x*}" ] && + printf -v mask %u $mask || + ip2int $mask mask + ip2int ${rhs%% *} ip + (( ( ip & mask ) == ( gw & mask ) )) && + int2ip $ip myIp && int2ip $mask netMask + } + } +done < <(/sbin/ifconfig) + +echo "$PUBLIC_IP" +if [ -z "$myIp" ]; then + echo "$PUBLIC_IP" +else + echo "$myIp" +fi + diff --git a/scripts/persistent-docker-setup/setup.sh b/scripts/persistent-docker-setup/setup.sh new file mode 100644 index 0000000000..746ea987f0 --- /dev/null +++ b/scripts/persistent-docker-setup/setup.sh @@ -0,0 +1,111 @@ +#!/bin/bash +set -e + +# Script for setting up a node in the persistent state Docker container. +# +# It detects the IP addresses for the node, generates a configuration file for +# the node based on the default `rusk.toml` used in the dusk-node-installer (or +# a user-supplied configuration file which can be provided by mounting at +# `/opt/dusk/conf/rusk.template.toml`), and runs the node. + +echo "Starting node environment" + +RUSK_CONFIG_DIR=/opt/dusk/conf +RUSK_TEMPLATE_CONFIG_PATH="$RUSK_CONFIG_DIR/rusk.template.toml" +RUSK_CONFIG_PATH="$RUSK_CONFIG_DIR/rusk.toml" + +detect_ips_output=$(./detect_ips.sh) +PUBLIC_IP=$(echo "$detect_ips_output" | sed -n '1p') +LISTEN_IP=$(echo "$detect_ips_output" | sed -n '2p') + +SELECTED_NETWORK="" +case "$NETWORK" in + mainnet) + SELECTED_NETWORK="mainnet" + ;; + testnet) + SELECTED_NETWORK="testnet" + ;; + *) + echo "Unknown network $NETWORK. Defaulting to mainnet" + SELECTED_NETWORK="mainnet" +esac + +toml_set() { + file=$1 + property=$2 + value=$3 + + echo -e "$(toml-cli set $file $property $value)" > $file +} + +# Configure your local installation based on the selected network +configure_network() { + echo "Generating configuration" + + cp "$RUSK_TEMPLATE_CONFIG_PATH" "$RUSK_CONFIG_PATH" + toml_set "$RUSK_CONFIG_PATH" kadcast.public_address "$PUBLIC_IP:9000" + toml_set "$RUSK_CONFIG_PATH" kadcast.listen_address "$LISTEN_IP:9000" + if toml-cli get "$RUSK_CONFIG_PATH" http &> /dev/null; then + toml_set "$RUSK_CONFIG_PATH" http.listen_address "$LISTEN_IP:8080" + fi +} + +download_rusk_config_template() { + echo "Downloading default template rusk config from the dusk node installer" + local remote_location + case "$SELECTED_NETWORK" in + mainnet) + remote_location="https://raw.githubusercontent.com/dusk-network/node-installer/ac1dd78eb31be4dba1c9c0986f6d6a06b5bd4fcc/conf/mainnet.toml" + ;; + testnet) + remote_location="https://raw.githubusercontent.com/dusk-network/node-installer/ac1dd78eb31be4dba1c9c0986f6d6a06b5bd4fcc/conf/testnet.toml" + ;; + esac + mkdir -p "$RUSK_CONFIG_DIR" + curl -o "$RUSK_TEMPLATE_CONFIG_PATH" "$remote_location" + if [ "$(cat $RUSK_TEMPLATE_CONFIG_PATH)" = "404: Not Found" ]; then + echo "Couldn't find the default rusk template config file. This is a bug, please file an issue." + exit 1 + fi +} + +download_genesis_config() { + echo "Downloading the genesis config from the dusk node installer" + local remote_location + case "$SELECTED_NETWORK" in + mainnet) + remote_location="https://raw.githubusercontent.com/dusk-network/node-installer/ac1dd78eb31be4dba1c9c0986f6d6a06b5bd4fcc/conf/mainnet.genesis" + ;; + testnet) + remote_location="https://raw.githubusercontent.com/dusk-network/node-installer/ac1dd78eb31be4dba1c9c0986f6d6a06b5bd4fcc/conf/testnet.genesis" + ;; + esac + mkdir -p "$RUSK_CONFIG_DIR" + curl -o "$RUSK_RECOVERY_INPUT" "$remote_location" + if [ "$(cat $RUSK_RECOVERY_INPUT)" = "404: Not Found" ]; then + echo "Couldn't find the genesis config file. This is a bug, please file an issue." + exit 1 + fi +} + +if [ ! -f "$RUSK_TEMPLATE_CONFIG_PATH" ]; then + download_rusk_config_template +fi +download_genesis_config +configure_network + +if [ -z "$DUSK_CONSENSUS_KEYS_PASS" ]; then + echo "DUSK_CONSENSUS_KEYS_PASS is not set" + exit 1 +fi + +echo "Selected network: $NETWORK" + +/opt/dusk/bin/rusk recovery keys +/opt/dusk/bin/rusk recovery state + +echo "Starting rusk" +echo "Rusk config:" +cat "$RUSK_CONFIG_PATH" +/opt/dusk/bin/rusk --config "$RUSK_CONFIG_PATH" diff --git a/vm/CHANGELOG.md b/vm/CHANGELOG.md index 5339786846..38a26f64e9 100644 --- a/vm/CHANGELOG.md +++ b/vm/CHANGELOG.md @@ -7,7 +7,19 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] -[1.0.0] - 2025-01-23 +## [1.1.0] - 2025-02-14 + +### Added + +- Add `PUBLIC_SENDER` available to session [#3341] + +### Changed + +- Change `execution` module to use `execution::Config` [#3437] +- Change `dusk-core` dependency to `1.0.1-alpha` [#3341] +- Change `piecrust` dependency to `0.27.1` [#3341] + +## [1.0.0] - 2025-01-23 ### Changed @@ -21,8 +33,11 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 [#3235]: https://github.com/dusk-network/rusk/issues/3235 +[#3341]: https://github.com/dusk-network/rusk/issues/3341 [#3405]: https://github.com/dusk-network/rusk/issues/3405 +[#3437]: https://github.com/dusk-network/rusk/issues/3437 -[Unreleased]: https://github.com/dusk-network/rusk/compare/dusk-vm-1.0.0...HEAD +[Unreleased]: https://github.com/dusk-network/rusk/compare/dusk-vm-1.1.0...HEAD +[1.1.0]: https://github.com/dusk-network/rusk/compare/dusk-vm-1.0.0...dusk-vm-1.1.0 [1.0.0]: https://github.com/dusk-network/rusk/compare/dusk-vm-0.1.0...dusk-vm-1.0.0 [0.1.0]: https://github.com/dusk-network/rusk/tree/dusk-vm-0.1.0 diff --git a/vm/Cargo.toml b/vm/Cargo.toml index 1ec2b332c7..d8f6162fd6 100644 --- a/vm/Cargo.toml +++ b/vm/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "dusk-vm" -version = "1.0.1-alpha.1" +version = "1.1.1-alpha.1" edition = "2021" repository = "https://github.com/dusk-network/rusk" diff --git a/vm/src/execute.rs b/vm/src/execute.rs index 0ea09ee9f1..49d5ba11e9 100644 --- a/vm/src/execute.rs +++ b/vm/src/execute.rs @@ -4,13 +4,16 @@ // // Copyright (c) DUSK NETWORK. All rights reserved. +mod config; + use blake2b_simd::Params; -use dusk_core::abi::{ContractError, ContractId, CONTRACT_ID_BYTES}; -use dusk_core::transfer::{ - data::ContractBytecode, Transaction, TRANSFER_CONTRACT, -}; +use dusk_core::abi::{ContractError, ContractId, Metadata, CONTRACT_ID_BYTES}; +use dusk_core::transfer::data::ContractBytecode; +use dusk_core::transfer::{Transaction, TRANSFER_CONTRACT}; use piecrust::{CallReceipt, Error, Session}; +pub use config::Config; + /// Executes a transaction in the provided session. /// /// This function processes the transaction, invoking smart contracts or @@ -53,43 +56,40 @@ use piecrust::{CallReceipt, Error, Session}; /// # Arguments /// * `session` - A mutable reference to the session executing the transaction. /// * `tx` - The transaction to execute. -/// * `gas_per_deploy_byte` - The amount of gas points charged for each byte in -/// a contract-deployment bytecode. -/// * `min_deploy_points` - The minimum gas points charged for a contract -/// deployment. -/// * `min_deploy_gas_price` - The minimum gas price set for a contract -/// deployment +/// * `config` - The configuration for the execution of the transaction. /// /// # Returns /// A result indicating success or failure. pub fn execute( session: &mut Session, tx: &Transaction, - gas_per_deploy_byte: u64, - min_deploy_points: u64, - min_deploy_gas_price: u64, + config: &Config, ) -> Result, ContractError>>, Error> { // Transaction will be discarded if it is a deployment transaction // with gas limit smaller than deploy charge. - deploy_check(tx, gas_per_deploy_byte, min_deploy_gas_price)?; + deploy_check(tx, config)?; + + if config.with_public_sender { + let _ = session + .set_meta(Metadata::PUBLIC_SENDER, tx.moonlight_sender().copied()); + } // Spend the inputs and execute the call. If this errors the transaction is // unspendable. - let mut receipt = session.call::<_, Result, ContractError>>( - TRANSFER_CONTRACT, - "spend_and_execute", - tx.strip_off_bytecode().as_ref().unwrap_or(tx), - tx.gas_limit(), - )?; + let mut receipt = session + .call::<_, Result, ContractError>>( + TRANSFER_CONTRACT, + "spend_and_execute", + tx.strip_off_bytecode().as_ref().unwrap_or(tx), + tx.gas_limit(), + ) + .map_err(|e| { + clear_session(session, config); + e + })?; // Deploy if this is a deployment transaction and spend part is successful. - contract_deploy( - session, - tx, - gas_per_deploy_byte, - min_deploy_points, - &mut receipt, - ); + contract_deploy(session, tx, config, &mut receipt); // Ensure all gas is consumed if there's an error in the contract call if receipt.data.is_err() { @@ -110,15 +110,21 @@ pub fn execute( receipt.events.extend(refund_receipt.events); + clear_session(session, config); + Ok(receipt) } -fn deploy_check( - tx: &Transaction, - gas_per_deploy_byte: u64, - min_deploy_gas_price: u64, -) -> Result<(), Error> { +fn clear_session(session: &mut Session, config: &Config) { + if config.with_public_sender { + let _ = session.remove_meta(Metadata::PUBLIC_SENDER); + } +} + +fn deploy_check(tx: &Transaction, config: &Config) -> Result<(), Error> { if tx.deploy().is_some() { + let gas_per_deploy_byte = config.gas_per_deploy_byte; + let min_deploy_gas_price = config.min_deploy_gas_price; let deploy_charge = tx.deploy_charge(gas_per_deploy_byte, min_deploy_gas_price); @@ -145,11 +151,13 @@ fn deploy_check( fn contract_deploy( session: &mut Session, tx: &Transaction, - gas_per_deploy_byte: u64, - min_deploy_points: u64, + config: &Config, receipt: &mut CallReceipt, ContractError>>, ) { if let Some(deploy) = tx.deploy() { + let gas_per_deploy_byte = config.gas_per_deploy_byte; + let min_deploy_points = config.min_deploy_points; + let gas_left = tx.gas_limit() - receipt.gas_spent; if receipt.data.is_ok() { let deploy_charge = diff --git a/vm/src/execute/config.rs b/vm/src/execute/config.rs new file mode 100644 index 0000000000..761f54d402 --- /dev/null +++ b/vm/src/execute/config.rs @@ -0,0 +1,37 @@ +// This Source Code Form is subject to the terms of the Mozilla Public +// License, v. 2.0. If a copy of the MPL was not distributed with this +// file, You can obtain one at http://mozilla.org/MPL/2.0/. +// +// Copyright (c) DUSK NETWORK. All rights reserved. + +/// Configuration for the execution of a transaction. +#[derive(Debug, Clone)] +pub struct Config { + /// The amount of gas points charged for each byte in a contract-deployment + /// bytecode. + pub gas_per_deploy_byte: u64, + /// The minimum gas points charged for a contract deployment. + pub min_deploy_points: u64, + /// The minimum gas price set for a contract deployment + pub min_deploy_gas_price: u64, + /// Enable the public sender metadata in the transaction. + /// + /// This field may be deprecated after the feature rollout. + pub with_public_sender: bool, +} + +impl Default for Config { + fn default() -> Self { + Self::DEFAULT + } +} + +impl Config { + /// Create a config with all values to default + pub const DEFAULT: Config = Config { + gas_per_deploy_byte: 0, + min_deploy_points: 0, + min_deploy_gas_price: 0, + with_public_sender: false, + }; +} diff --git a/vm/src/lib.rs b/vm/src/lib.rs index 85594729a3..ae4f9ccf46 100644 --- a/vm/src/lib.rs +++ b/vm/src/lib.rs @@ -13,7 +13,7 @@ extern crate alloc; -pub use self::execute::{execute, gen_contract_id}; +pub use self::execute::{execute, gen_contract_id, Config as ExecutionConfig}; pub use piecrust::{ CallReceipt, CallTree, CallTreeElem, ContractData, Error, PageOpening, Session, diff --git a/w3sper.js/CHANGELOG.md b/w3sper.js/CHANGELOG.md new file mode 100644 index 0000000000..f5fca3ad6f --- /dev/null +++ b/w3sper.js/CHANGELOG.md @@ -0,0 +1,22 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [Unreleased] + +### Changed + +- Changed AddressTransfer to support memo data [#3460] + +## [1.0.0] - 2025-01-15 + +- First `w3sper.js` release + + +[#3460]: https://github.com/dusk-network/rusk/issues/3460 + +[Unreleased]: https://github.com/dusk-network/rusk/compare/w3sper-v.0.1.0...HEAD +[1.0.0]: https://github.com/dusk-network/rusk/tree/w3sper-v.0.1.0 diff --git a/w3sper.js/deno.json b/w3sper.js/deno.json index 280d85eedb..84dbedd46f 100644 --- a/w3sper.js/deno.json +++ b/w3sper.js/deno.json @@ -9,8 +9,8 @@ "tasks": { "test": "deno test --allow-net --allow-read --allow-write --allow-run --allow-import", "wasm": "cd ../wallet-core && cargo wasm", - "state": "cd ../rusk && cargo r --release -p rusk -- recovery state --init ../w3sper.js/tests/assets/genesis.toml -o /tmp/example.state", - "rusk": "cd ../rusk && RUSK_MINIMUM_BLOCK_TIME=1 DUSK_CONSENSUS_KEYS_PASS=password cargo r --release -p rusk -- -s /tmp/example.state" + "state": "cd ../rusk && cargo r --release -p dusk-rusk -- recovery state --init ../w3sper.js/tests/assets/genesis.toml -o /tmp/example.state", + "rusk": "cd ../rusk && RUSK_MINIMUM_BLOCK_TIME=1 DUSK_CONSENSUS_KEYS_PASS=password cargo r --release -p dusk-rusk -- -s /tmp/example.state" }, "publish": { "include": ["LICENSE", "README.md", "src"], diff --git a/w3sper.js/src/protocol-driver/mod.js b/w3sper.js/src/protocol-driver/mod.js index 248fd51dc3..9b456e1426 100644 --- a/w3sper.js/src/protocol-driver/mod.js +++ b/w3sper.js/src/protocol-driver/mod.js @@ -46,7 +46,7 @@ export function load(source, importsURL) { const minimumStake = await u64(ptr(globals.MINIMUM_STAKE)); return [{ key, item }, minimumStake]; - }), + }) )(); } @@ -79,7 +79,7 @@ export function useAsProtocolDriver(source, importsURL) { export async function opening(bytes) { const task = protocolDriverModule.task(async function ( { malloc, opening }, - { memcpy }, + { memcpy } ) { const buffer = new Uint8Array(DataBuffer.from(bytes)); @@ -98,7 +98,7 @@ export async function opening(bytes) { export async function displayScalar(bytes) { const task = protocolDriverModule.task(async function ( { malloc, display_scalar }, - { memcpy }, + { memcpy } ) { let ptr = await malloc(32); await memcpy(ptr, bytes, 32); @@ -120,7 +120,7 @@ export async function displayScalar(bytes) { export async function bookmarks(notes) { const task = protocolDriverModule.task(async function ( { malloc, bookmarks }, - { memcpy }, + { memcpy } ) { if (notes.length === 0) { return []; @@ -140,7 +140,7 @@ export async function bookmarks(notes) { let code = await bookmarks(ptr, bookmarks_ptr); if (code > 0) throw DriverError.from(code); bookmarks_ptr = new DataView( - (await memcpy(null, bookmarks_ptr, 4)).buffer, + (await memcpy(null, bookmarks_ptr, 4)).buffer ).getUint32(0, true); return await memcpy(null, bookmarks_ptr, notes.size * 8); @@ -152,7 +152,7 @@ export async function bookmarks(notes) { export async function pickNotes(owner, notes, value) { const task = protocolDriverModule.task(async function ( { malloc, pick_notes }, - { memcpy }, + { memcpy } ) { if (notes.length === 0) { return new Map(); @@ -184,14 +184,14 @@ export async function pickNotes(owner, notes, value) { let len = new DataView((await memcpy(null, ptr, 4)).buffer).getUint32( 0, - true, + true ); notesBuffer = await memcpy(null, ptr + 4, len); let notesLen = new DataView(notesBuffer.buffer).getUint32( notesBuffer.byteLength - 4, - true, + true ); let itemSize = (notesBuffer.buffer.byteLength - 8) / notesLen; @@ -231,7 +231,7 @@ export const generateProfile = (seed, n) => // Return the content of the `out` boxed value return out.valueOf(); - }), + }) )(); export const mapOwned = (owners, notes) => @@ -245,7 +245,7 @@ export const mapOwned = (owners, notes) => const firstOwner = owners[0]; const sharesSameSource = owners.every((owner) => - firstOwner.sameSourceOf(owner), + firstOwner.sameSourceOf(owner) ); if (!sharesSameSource) { @@ -256,7 +256,7 @@ export const mapOwned = (owners, notes) => let entrySize = keySize + itemSize; let notesBuffer = new Uint8Array( - DataBuffer.from(notes, { size: notes.byteLength / itemSize }), + DataBuffer.from(notes, { size: notes.byteLength / itemSize }) ); // Allocates memory on the WASM heap and then places `seed` into it. @@ -277,7 +277,7 @@ export const mapOwned = (owners, notes) => indexes[0] = owners.length; indexes.set( owners.map((p) => +p), - 1, + 1 ); let idx_ptr = await box(indexes); @@ -291,7 +291,7 @@ export const mapOwned = (owners, notes) => +idx_ptr, +notes_ptr, +out_ptr, - +info_ptr, + +info_ptr ); if (code > 0) throw DriverError.from(code); @@ -328,13 +328,13 @@ export const mapOwned = (owners, notes) => } return [results, { blockHeight, bookmark }]; - }), + }) )(); export async function balance(seed, n, notes) { const task = await protocolDriverModule.task(async function ( { malloc, balance }, - { memcpy }, + { memcpy } ) { // Copy the seed to avoid invalidating the original buffer seed = new Uint8Array(seed); @@ -363,10 +363,10 @@ export async function balance(seed, n, notes) { export const accountsIntoRaw = async (users) => protocolDriverModule.task(async function ( { malloc, accounts_into_raw }, - { memcpy }, + { memcpy } ) { let buffer = new Uint8Array( - DataBuffer.from(DataBuffer.flatten(users.map((user) => user.valueOf()))), + DataBuffer.from(DataBuffer.flatten(users.map((user) => user.valueOf()))) ); // copy buffer into WASM memory @@ -383,12 +383,12 @@ export const accountsIntoRaw = async (users) => // Copy the result from WASM memory out_ptr = new DataView((await memcpy(null, out_ptr, 4)).buffer).getUint32( 0, - true, + true ); let len = new DataView((await memcpy(null, out_ptr, 4)).buffer).getUint32( 0, - true, + true ); buffer = await memcpy(null, out_ptr + 4, len); @@ -404,7 +404,7 @@ export const accountsIntoRaw = async (users) => export const intoProven = async (tx, proof) => protocolDriverModule.task(async function ( { malloc, into_proven }, - { memcpy }, + { memcpy } ) { let buffer = tx.valueOf(); const tx_ptr = await malloc(buffer.byteLength); @@ -426,11 +426,11 @@ export const intoProven = async (tx, proof) => if (code > 0) throw DriverError.from(code); proved_ptr = new DataView( - (await memcpy(null, proved_ptr, 4)).buffer, + (await memcpy(null, proved_ptr, 4)).buffer ).getUint32(0, true); const len = new DataView( - (await memcpy(null, proved_ptr, 4)).buffer, + (await memcpy(null, proved_ptr, 4)).buffer ).getUint32(0, true); buffer = await memcpy(null, proved_ptr + 4, len); @@ -474,7 +474,7 @@ export const phoenix = async (info) => new DataView(transfer_value.buffer).setBigUint64( 0, info.transfer_value, - true, + true ); ptr.transfer_value = await malloc(8); await memcpy(ptr.transfer_value, transfer_value); @@ -494,6 +494,15 @@ export const phoenix = async (info) => ptr.gas_price = await malloc(8); await memcpy(ptr.gas_price, gas_price); + const data = serializeMemo(info.data); + + if (data) { + ptr.data = await malloc(data.byteLength); + await memcpy(ptr.data, data); + } else { + ptr.data = null; + } + let tx = await malloc(4); let proof = await malloc(4); @@ -512,31 +521,31 @@ export const phoenix = async (info) => ptr.gas_limit, ptr.gas_price, info.chainId, - info.data, + ptr.data, tx, - proof, + proof ); if (code > 0) throw DriverError.from(code); let tx_ptr = new DataView((await memcpy(null, tx, 4)).buffer).getUint32( 0, - true, + true ); let tx_len = new DataView((await memcpy(null, tx_ptr, 4)).buffer).getUint32( 0, - true, + true ); const tx_buffer = await memcpy(null, tx_ptr, tx_len); let proof_ptr = new DataView( - (await memcpy(null, proof, 4)).buffer, + (await memcpy(null, proof, 4)).buffer ).getUint32(0, true); let proof_len = new DataView( - (await memcpy(null, proof_ptr, 4)).buffer, + (await memcpy(null, proof_ptr, 4)).buffer ).getUint32(0, true); const proof_buffer = await memcpy(null, proof_ptr + 4, proof_len); @@ -562,7 +571,7 @@ export const moonlight = async (info) => new DataView(transfer_value.buffer).setBigUint64( 0, info.transfer_value, - true, + true ); ptr.transfer_value = await malloc(8); await memcpy(ptr.transfer_value, transfer_value); @@ -612,19 +621,19 @@ export const moonlight = async (info) => info.chainId, ptr.data, tx, - hash, + hash ); if (code > 0) throw DriverError.from(code); let tx_ptr = new DataView((await memcpy(null, tx, 4)).buffer).getUint32( 0, - true, + true ); let tx_len = new DataView((await memcpy(null, tx_ptr, 4)).buffer).getUint32( 0, - true, + true ); const tx_buffer = await memcpy(null, tx_ptr + 4, tx_len); @@ -636,7 +645,7 @@ export const moonlight = async (info) => export const unshield = async (info) => protocolDriverModule.task(async function ( { malloc, phoenix_to_moonlight }, - { memcpy }, + { memcpy } ) { const ptr = Object.create(null); @@ -671,7 +680,7 @@ export const unshield = async (info) => new DataView(allocate_value.buffer).setBigUint64( 0, info.allocate_value, - true, + true ); ptr.allocate_value = await malloc(8); await memcpy(ptr.allocate_value, allocate_value); @@ -703,29 +712,29 @@ export const unshield = async (info) => ptr.gas_price, info.chainId, tx, - proof, + proof ); if (code > 0) throw DriverError.from(code); let tx_ptr = new DataView((await memcpy(null, tx, 4)).buffer).getUint32( 0, - true, + true ); let tx_len = new DataView((await memcpy(null, tx_ptr, 4)).buffer).getUint32( 0, - true, + true ); const tx_buffer = await memcpy(null, tx_ptr, tx_len); let proof_ptr = new DataView( - (await memcpy(null, proof, 4)).buffer, + (await memcpy(null, proof, 4)).buffer ).getUint32(0, true); let proof_len = new DataView( - (await memcpy(null, proof_ptr, 4)).buffer, + (await memcpy(null, proof_ptr, 4)).buffer ).getUint32(0, true); const proof_buffer = await memcpy(null, proof_ptr + 4, proof_len); @@ -736,7 +745,7 @@ export const unshield = async (info) => export const shield = async (info) => protocolDriverModule.task(async function ( { malloc, moonlight_to_phoenix }, - { memcpy }, + { memcpy } ) { const ptr = Object.create(null); @@ -754,7 +763,7 @@ export const shield = async (info) => new DataView(allocate_value.buffer).setBigUint64( 0, info.allocate_value, - true, + true ); ptr.allocate_value = await malloc(8); await memcpy(ptr.allocate_value, allocate_value); @@ -788,19 +797,19 @@ export const shield = async (info) => ptr.nonce, info.chainId, tx, - hash, + hash ); if (code > 0) throw DriverError.from(code); let tx_ptr = new DataView((await memcpy(null, tx, 4)).buffer).getUint32( 0, - true, + true ); let tx_len = new DataView((await memcpy(null, tx_ptr, 4)).buffer).getUint32( 0, - true, + true ); const tx_buffer = await memcpy(null, tx_ptr + 4, tx_len); @@ -812,7 +821,7 @@ export const shield = async (info) => export const stake = async (info) => protocolDriverModule.task(async function ( { malloc, moonlight_stake }, - { memcpy }, + { memcpy } ) { const ptr = Object.create(null); @@ -855,19 +864,19 @@ export const stake = async (info) => ptr.nonce, info.chainId, tx, - hash, + hash ); if (code > 0) throw DriverError.from(code); let tx_ptr = new DataView((await memcpy(null, tx, 4)).buffer).getUint32( 0, - true, + true ); let tx_len = new DataView((await memcpy(null, tx_ptr, 4)).buffer).getUint32( 0, - true, + true ); const tx_buffer = await memcpy(null, tx_ptr + 4, tx_len); @@ -879,7 +888,7 @@ export const stake = async (info) => export const unstake = async (info) => protocolDriverModule.task(async function ( { malloc, moonlight_unstake }, - { memcpy }, + { memcpy } ) { const ptr = Object.create(null); @@ -897,7 +906,7 @@ export const unstake = async (info) => new DataView(unstake_value.buffer).setBigUint64( 0, info.unstake_value, - true, + true ); ptr.unstake_value = await malloc(8); await memcpy(ptr.unstake_value, unstake_value); @@ -930,19 +939,19 @@ export const unstake = async (info) => ptr.nonce, info.chainId, tx, - hash, + hash ); if (code > 0) throw DriverError.from(code); let tx_ptr = new DataView((await memcpy(null, tx, 4)).buffer).getUint32( 0, - true, + true ); let tx_len = new DataView((await memcpy(null, tx_ptr, 4)).buffer).getUint32( 0, - true, + true ); const tx_buffer = await memcpy(null, tx_ptr + 4, tx_len); @@ -954,7 +963,7 @@ export const unstake = async (info) => export const withdraw = async (info) => protocolDriverModule.task(async function ( { malloc, moonlight_stake_reward }, - { memcpy }, + { memcpy } ) { const ptr = Object.create(null); @@ -972,7 +981,7 @@ export const withdraw = async (info) => new DataView(reward_amount.buffer).setBigUint64( 0, info.reward_amount, - true, + true ); ptr.reward_amount = await malloc(8); await memcpy(ptr.reward_amount, reward_amount); @@ -1005,19 +1014,19 @@ export const withdraw = async (info) => ptr.nonce, info.chainId, tx, - hash, + hash ); if (code > 0) throw DriverError.from(code); let tx_ptr = new DataView((await memcpy(null, tx, 4)).buffer).getUint32( 0, - true, + true ); let tx_len = new DataView((await memcpy(null, tx_ptr, 4)).buffer).getUint32( 0, - true, + true ); const tx_buffer = await memcpy(null, tx_ptr + 4, tx_len); diff --git a/w3sper.js/src/transaction.js b/w3sper.js/src/transaction.js index 6e98039e6c..c7c6820aa8 100644 --- a/w3sper.js/src/transaction.js +++ b/w3sper.js/src/transaction.js @@ -68,6 +68,11 @@ export class Transfer extends BasicTransfer { return builder; } + + memo(value) { + this[_attributes].memo = value; + return this; + } } class AccountTransfer extends Transfer { @@ -85,11 +90,6 @@ class AccountTransfer extends Transfer { return this; } - memo(value) { - this[_attributes].memo = value; - return this; - } - async build(network) { const sender = this.bookentry.profile; const { attributes } = this; @@ -154,6 +154,7 @@ class AddressTransfer extends Transfer { amount: transfer_value, obfuscated: obfuscated_transaction, gas, + memo: data, } = attributes; const sender = this.bookentry.profile; const receiver = base58.decode(to); @@ -163,7 +164,7 @@ class AddressTransfer extends Transfer { // Pick notes to spend from the treasury const picked = await bookkeeper.pick( sender.address, - transfer_value + gas.total, + transfer_value + gas.total ); const syncer = new AddressSyncer(network); @@ -195,7 +196,7 @@ class AddressTransfer extends Transfer { gas_limit: gas.limit, gas_price: gas.price, chainId, - data: null, + data, }); // Attempt to prove the transaction @@ -225,7 +226,7 @@ export class UnshieldTransfer extends BasicTransfer { // Pick notes to spend from the treasury const picked = await bookkeeper.pick( profile.address, - allocate_value + gas.total, + allocate_value + gas.total ); const syncer = new AddressSyncer(network); @@ -321,7 +322,7 @@ export class StakeTransfer extends BasicTransfer { if (!isTopup && stake_value < minimumStake) { throw new RangeError( - `Stake amount must be greater or equal than ${minimumStake}`, + `Stake amount must be greater or equal than ${minimumStake}` ); } @@ -335,7 +336,7 @@ export class StakeTransfer extends BasicTransfer { if (hasStake && !isTopup) { throw new Error( - "Stake already exists. Use `topup` to add to the current stake", + "Stake already exists. Use `topup` to add to the current stake" ); } else if (!hasStake && isTopup) { throw new Error("No stake to topup. Use `stake` to create a new stake"); @@ -392,7 +393,7 @@ export class UnstakeTransfer extends BasicTransfer { if (remainingStake > 0n && remainingStake < minimumStake) { throw new RangeError( - `Remaining stake must be greater or equal than ${minimumStake}`, + `Remaining stake must be greater or equal than ${minimumStake}` ); } @@ -436,11 +437,11 @@ export class WithdrawStakeRewardTransfer extends BasicTransfer { throw new Error(`No stake available to withdraw the reward from`); } else if (reward_amount > reward) { throw new RangeError( - `The withdrawn reward amount must be less or equal to ${reward}`, + `The withdrawn reward amount must be less or equal to ${reward}` ); } else if (!reward_amount) { throw new RangeError( - `Can't withdraw an empty reward amount. I mean, you could, but it would be pointless.`, + `Can't withdraw an empty reward amount. I mean, you could, but it would be pointless.` ); } diff --git a/w3sper.js/tests/transfer_test.js b/w3sper.js/tests/transfer_test.js index 53a3a62480..8e51ccbba3 100644 --- a/w3sper.js/tests/transfer_test.js +++ b/w3sper.js/tests/transfer_test.js @@ -43,7 +43,7 @@ test("Offline account transfers", async () => { // from the network, so it does not need to be connected. // All transactions are signed locally. const offlineOperations = useAsProtocolDriver( - await getLocalWasmBuffer(), + await getLocalWasmBuffer() ).then(async () => { const profiles = new ProfileGenerator(seeder); const to = @@ -59,8 +59,8 @@ test("Offline account transfers", async () => { .nonce(balance.nonce + BigInt(nonce)) .chain(Network.LOCALNET) .gas({ limit: 500_000_000n }) - .build(), - ), + .build() + ) ); assert.equal(transfers[0].nonce, balance.nonce + 1n); @@ -250,13 +250,11 @@ test("shield", async () => { await network.disconnect(); }); -test("memo transfer", async () => { +test("account memo transfer", async () => { const network = await Network.connect("http://localhost:8080/"); const profiles = new ProfileGenerator(seeder); const users = await Promise.all([profiles.default, profiles.next()]); - const accounts = new AccountSyncer(network); - const treasury = new Treasury(users); await treasury.update({ accounts }); @@ -296,13 +294,70 @@ test("memo transfer", async () => { 84, 97, 114, 97, 112, 105, 97, 32, 84, 97, 112, 105, 111, 99, 111, 44, 32, 99, 111, 109, 101, 32, 102, 111, 115, 115, 101, 32, 115, 116, 114, 105, 110, 103, 97, - ], + ] + ); + + assert.equal( + evt.memo({ as: "string" }), + "Tarapia Tapioco, come fosse stringa" + ); + + await network.disconnect(); +}); + +test("address memo transfer", async () => { + const { cleanup } = useAsProtocolDriver(await getLocalWasmBuffer()); // Temporarily needed, while the node doesn't serve the latest WASM. + const network = await Network.connect("http://localhost:8080/"); + const profiles = new ProfileGenerator(seeder); + const users = await Promise.all([profiles.default, profiles.next()]); + const addresses = new AddressSyncer(network); + const treasury = new Treasury(users); + + await treasury.update({ addresses }); + + const bookkeeper = new Bookkeeper(treasury); + + let transfer = bookkeeper + .as(users[1]) + .transfer(1n) + .to(users[0].address) + .memo(new Uint8Array([2, 4, 8, 16])) + .gas({ limit: 500_000_000n }); + + let { hash } = await network.execute(transfer); + + let evt = await network.transactions.withId(hash).once.executed(); + + assert.equal([...evt.memo()], [2, 4, 8, 16]); + + await treasury.update({ addresses }); + + transfer = bookkeeper + .as(users[1]) + .transfer(1n) + .to(users[0].address) + .memo("Tarapia Tapioco, come fosse stringa") + .gas({ limit: 500_000_000n }); + + ({ hash } = await network.execute(transfer)); + + evt = await network.transactions.withId(hash).once.executed(); + + // deno-fmt-ignore + assert.equal( + [...evt.memo()], + [ + 84, 97, 114, 97, 112, 105, 97, 32, 84, 97, 112, 105, 111, 99, 111, 44, 32, + 99, 111, 109, 101, 32, 102, 111, 115, 115, 101, 32, 115, 116, 114, 105, + 110, 103, 97, + ] ); assert.equal( evt.memo({ as: "string" }), - "Tarapia Tapioco, come fosse stringa", + "Tarapia Tapioco, come fosse stringa" ); await network.disconnect(); + await cleanup(); // Remove when useAsProtocolDriver is removed. }); diff --git a/wallet-core/CHANGELOG.md b/wallet-core/CHANGELOG.md index 3b0b53e40b..ea934ea9df 100644 --- a/wallet-core/CHANGELOG.md +++ b/wallet-core/CHANGELOG.md @@ -7,6 +7,12 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [1.1.0] - 2025-02-14 + +### Changed + +- Changed phoenix function to allow data to be passed to transaction [#3438] + ## [1.0.1] - 2025-01-23 ### Changed @@ -18,8 +24,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - First `dusk-wallet-core` release +[#3438]: https://github.com/dusk-network/rusk/issues/3438 [#3405]: https://github.com/dusk-network/rusk/issues/3405 -[Unreleased]: https://github.com/dusk-network/rusk/compare/dusk-wallet-core-1.0.1...HEAD +[Unreleased]: https://github.com/dusk-network/rusk/compare/dusk-wallet-core-1.1.0...HEAD +[1.1.0]: https://github.com/dusk-network/rusk/compare/dusk-wallet-core-1.0.1...dusk-wallet-core-1.1.0 [1.0.1]: https://github.com/dusk-network/rusk/compare/wallet-core-1.0.0...dusk-wallet-core-1.0.1 [1.0.0]: https://github.com/dusk-network/rusk/tree/wallet-core-1.0.0 diff --git a/wallet-core/Cargo.toml b/wallet-core/Cargo.toml index 151f610bdf..3ceffd3620 100644 --- a/wallet-core/Cargo.toml +++ b/wallet-core/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "dusk-wallet-core" -version = "1.0.2-alpha.1" +version = "1.1.1-alpha.1" edition = "2021" description = "The core functionality of the Dusk wallet" license = "MPL-2.0" diff --git a/wallet-core/src/ffi.rs b/wallet-core/src/ffi.rs index 5bf7e742f5..1e4e38b13d 100644 --- a/wallet-core/src/ffi.rs +++ b/wallet-core/src/ffi.rs @@ -384,8 +384,13 @@ pub unsafe fn phoenix( .filter_map(|(note, opening)| opening.map(|op| (note, op))) .collect(); - let data: Option = - if data.is_null() { None } else { todo!() }; + let data: Option = if data.is_null() { + None + } else { + let buffer = mem::read_buffer(data); + + Some(buffer[1..].to_vec().into()) + }; let prover = NoOpProver::default(); diff --git a/web-wallet/CHANGELOG.md b/web-wallet/CHANGELOG.md index 5846f6ae31..6a72a01b4f 100644 --- a/web-wallet/CHANGELOG.md +++ b/web-wallet/CHANGELOG.md @@ -9,13 +9,31 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added +- Add memo field to send flow [#3112] +- Add "VITE_SYNC_INTERVAL" ENV variable [#3403] + +### Changed + +- Update Transactions list design [#1922] +- Auto-focus text field (Unlock page) [#3420] + +### Removed + +### Fixed + +- Fix width discrepancy in footer icons [#3163] +- Fix network methods not reusing the existing connection [#3486] + +## [1.3.0] - 2025-01-28 + +### Added + - Add support for partial unstake/claim rewards [#3009] - Add "Unstake" flow validation [#3009] - Store the wallet creation block height and show it in settings [#3381] ### Changed -- Update Transactions list design [#1922] - Change Review step label to "Overview" (Send flow) [#3387] ### Removed @@ -525,10 +543,12 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 [#3097]: https://github.com/dusk-network/rusk/issues/3097 [#3098]: https://github.com/dusk-network/rusk/issues/3098 [#3099]: https://github.com/dusk-network/rusk/issues/3099 +[#3112]: https://github.com/dusk-network/rusk/issues/3112 [#3113]: https://github.com/dusk-network/rusk/issues/3113 [#3129]: https://github.com/dusk-network/rusk/issues/3129 [#3156]: https://github.com/dusk-network/rusk/issues/3156 [#3160]: https://github.com/dusk-network/rusk/issues/3160 +[#3163]: https://github.com/dusk-network/rusk/issues/3163 [#3164]: https://github.com/dusk-network/rusk/issues/3164 [#3178]: https://github.com/dusk-network/rusk/issues/3178 [#3179]: https://github.com/dusk-network/rusk/issues/3179 @@ -550,10 +570,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 [#3362]: https://github.com/dusk-network/rusk/issues/3362 [#3381]: https://github.com/dusk-network/rusk/issues/3381 [#3387]: https://github.com/dusk-network/rusk/issues/3387 +[#3403]: https://github.com/dusk-network/rusk/issues/3403 +[#3420]: https://github.com/dusk-network/rusk/issues/3420 +[#3486]: https://github.com/dusk-network/rusk/issues/3486 [Unreleased]: https://github.com/dusk-network/rusk/tree/master/web-wallet +[1.3.0]: https://github.com/dusk-network/rusk/tree/web-wallet-v1.3.0 [1.2.0]: https://github.com/dusk-network/rusk/tree/web-wallet-v1.2.0 [1.1.0]: https://github.com/dusk-network/rusk/tree/web-wallet-v1.1.0 [1.0.0]: https://github.com/dusk-network/rusk/tree/web-wallet-v1.0.0 diff --git a/web-wallet/README.md b/web-wallet/README.md index 8619b0c401..07df89c53c 100644 --- a/web-wallet/README.md +++ b/web-wallet/README.md @@ -43,6 +43,7 @@ VITE_GAS_LIMIT_LOWER=10000000 VITE_GAS_LIMIT_UPPER=1000000000 VITE_GAS_PRICE_DEFAULT=1 VITE_GAS_PRICE_LOWER=1 +VITE_SYNC_INTERVAL=300000 VITE_MODE_MAINTENANCE=false VITE_REOWN_PROJECT_ID="" # the ID of the Bridge project (as on Reown Cloud) VITE_NODE_URL="" # connect to a specific node diff --git a/web-wallet/package-lock.json b/web-wallet/package-lock.json index c459f45d4e..b31917fe32 100644 --- a/web-wallet/package-lock.json +++ b/web-wallet/package-lock.json @@ -1,12 +1,12 @@ { "name": "web-wallet", - "version": "1.2.0", + "version": "1.3.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "web-wallet", - "version": "1.2.0", + "version": "1.3.0", "license": "MPL-2.0", "dependencies": { "@floating-ui/dom": "1.6.5", diff --git a/web-wallet/package.json b/web-wallet/package.json index 77235e1bb1..b8c967e316 100644 --- a/web-wallet/package.json +++ b/web-wallet/package.json @@ -33,7 +33,7 @@ "typecheck:watch": "svelte-kit sync && svelte-check --tsconfig ./jsconfig.json --watch --fail-on-warnings" }, "type": "module", - "version": "1.2.0", + "version": "1.3.0", "dependencies": { "@floating-ui/dom": "1.6.5", "@mdi/js": "7.4.47", diff --git a/web-wallet/src/lib/components/Send/Send.svelte b/web-wallet/src/lib/components/Send/Send.svelte index a9dc97f2a2..cc7873784c 100644 --- a/web-wallet/src/lib/components/Send/Send.svelte +++ b/web-wallet/src/lib/components/Send/Send.svelte @@ -9,8 +9,9 @@ mdiWalletOutline, } from "@mdi/js"; import { areValidGasSettings } from "$lib/contracts"; + import { getAddressInfo } from "$lib/wallet"; import { duskToLux, luxToDusk } from "$lib/dusk/currency"; - import { getAddressInfo } from "$lib/dusk/string"; + import { makeClassName } from "$lib/dusk/string"; import { logo } from "$lib/dusk/icons"; import { AnchorButton, @@ -18,6 +19,7 @@ Button, Icon, Stepper, + Switch, Textbox, Wizard, WizardStep, @@ -32,7 +34,7 @@ } from "$lib/components"; import { MESSAGES } from "$lib/constants"; - /** @type {(to: string, amount: bigint, gasPrice: bigint, gasLimit: bigint) => Promise} */ + /** @type {(to: string, amount: bigint, memo: string, gasPrice: bigint, gasLimit: bigint) => Promise} */ export let execute; /** @type {(amount: number) => string} */ @@ -59,12 +61,18 @@ /** @type {string} */ let sendToAddress = ""; + /** @type {string} */ + let memo = ""; + /** @type {import("qr-scanner").default} */ let scanner; /** @type {import("..").ScanQR} */ let scanQrComponent; + /** @type {boolean} */ + let isMemoShown = false; + /** @type {boolean} */ let isNextButtonDisabled = false; @@ -153,6 +161,11 @@ type: addressInfo.type, }); } + + $: sendToAddressTextboxClasses = makeClassName({ + "operation__send-address": true, + "operation__send-address--invalid": sendToAddress && !addressInfo.isValid, + });
@@ -176,7 +189,7 @@ >
-

Address:

+

Address

+
+

Memo (optional)

+ { + if (!isMemoShown) { + memo = ""; + } + }} + onSurface + bind:value={isMemoShown} + /> +
+ {#if isMemoShown} + + {/if} +
- Amount: + Amount
{formatter(sendAmount)} @@ -341,13 +375,24 @@
- To: + To
{sendToAddress}
+ {#if memo} +
+
+ Memo +
+
+ {memo} +
+
+ {/if} +
@@ -355,7 +400,13 @@ @@ -382,7 +433,8 @@ flex-direction: column; gap: 1.2em; } - &__review-address { + &__review-address, + &__review-memo { background-color: transparent; border: 1px solid var(--primary-color); border-radius: 1.5em; @@ -436,7 +488,10 @@ font-weight: bold; } - :global(.dusk-textbox.operation__send-address) { + :global( + .dusk-textbox.operation__send-address, + .dusk-textbox.operation__send-memo + ) { resize: vertical; min-height: 5em; max-height: 10em; diff --git a/web-wallet/src/lib/components/__tests__/Send.spec.js b/web-wallet/src/lib/components/__tests__/Send.spec.js index abfb4b722e..ebd3cd462b 100644 --- a/web-wallet/src/lib/components/__tests__/Send.spec.js +++ b/web-wallet/src/lib/components/__tests__/Send.spec.js @@ -226,10 +226,16 @@ describe("Send", () => { target: { value: shieldedAddress }, }); await fireEvent.click(getByRole("button", { name: "Next" })); + await fireEvent.click(getByRole("switch")); const amountInput = getByRole("spinbutton"); + const memoInput = getByRole("textbox"); await fireEvent.input(amountInput, { target: { value: amount } }); + await fireEvent.input(memoInput, { + target: { value: "abc-example-memo" }, + }); + await fireEvent.click(getByRole("button", { name: "Next" })); const value = getAsHTMLElement( @@ -240,9 +246,11 @@ describe("Send", () => { container, ".operation__review-address span" ); + const memo = getAsHTMLElement(container, ".operation__review-memo span"); expect(value.textContent).toBe(baseProps.formatter(amount)); expect(key.textContent).toBe(shieldedAddress); + expect(memo.textContent).toBe("abc-example-memo"); expect(container.firstChild).toMatchSnapshot(); }); }); @@ -278,6 +286,46 @@ describe("Send", () => { expect(baseProps.execute).toHaveBeenCalledWith( shieldedAddress, duskToLux(amount), + "", + baseProps.gasSettings.gasPrice, + baseProps.gasSettings.gasLimit + ); + + const explorerLink = getByRole("link", { name: /explorer/i }); + + expect(getByText("Transaction created")).toBeInTheDocument(); + expect(explorerLink).toHaveAttribute("target", "_blank"); + expect(explorerLink).toHaveAttribute("href", expectedExplorerLink); + }); + + it("should perform a transfer for the desired amount, with a memo, give a success message and supply a link to see the transaction in the explorer", async () => { + const { getByRole, getByText } = render(Send, baseProps); + const addressInput = getByRole("textbox"); + + await fireEvent.input(addressInput, { + target: { value: shieldedAddress }, + }); + await fireEvent.click(getByRole("button", { name: "Next" })); + await fireEvent.click(getByRole("switch")); + + const amountInput = getByRole("spinbutton"); + const memoInput = getByRole("textbox"); + + await fireEvent.input(amountInput, { target: { value: amount } }); + await fireEvent.input(memoInput, { + target: { value: "abc-example-memo" }, + }); + + await fireEvent.click(getByRole("button", { name: "Next" })); + await fireEvent.click(getByRole("button", { name: "SEND" })); + + await vi.advanceTimersToNextTimerAsync(); + + expect(baseProps.execute).toHaveBeenCalledTimes(1); + expect(baseProps.execute).toHaveBeenCalledWith( + shieldedAddress, + duskToLux(amount), + "abc-example-memo", baseProps.gasSettings.gasPrice, baseProps.gasSettings.gasLimit ); @@ -313,6 +361,7 @@ describe("Send", () => { expect(baseProps.execute).toHaveBeenCalledWith( shieldedAddress, duskToLux(amount), + "", baseProps.gasSettings.gasPrice, baseProps.gasSettings.gasLimit ); @@ -342,6 +391,7 @@ describe("Send", () => { expect(baseProps.execute).toHaveBeenCalledWith( shieldedAddress, duskToLux(amount), + "", baseProps.gasSettings.gasPrice, baseProps.gasSettings.gasLimit ); diff --git a/web-wallet/src/lib/components/__tests__/__snapshots__/Send.spec.js.snap b/web-wallet/src/lib/components/__tests__/__snapshots__/Send.spec.js.snap index 7f1bdd182b..645f9db99f 100644 --- a/web-wallet/src/lib/components/__tests__/__snapshots__/Send.spec.js.snap +++ b/web-wallet/src/lib/components/__tests__/__snapshots__/Send.spec.js.snap @@ -2,7 +2,7 @@ exports[`Send > Address step > should display a warning if the address input is a public account 1`] = `
Address step > should display a warning if the address input is

- Address: + Address