Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
/*
* Copyright contributors to Hyperledger Besu.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.ethereum.trie.pathbased.bonsai.worldview;

import org.hyperledger.besu.ethereum.trie.MerkleTrie;
import org.hyperledger.besu.ethereum.trie.NoOpMerkleTrie;
import org.hyperledger.besu.ethereum.trie.NodeLoader;
import org.hyperledger.besu.ethereum.trie.pathbased.common.worldview.WorldStateConfig;
import org.hyperledger.besu.ethereum.trie.patricia.ParallelStoredMerklePatriciaTrie;
import org.hyperledger.besu.ethereum.trie.patricia.StoredMerklePatriciaTrie;

import java.util.function.Function;

import org.apache.tuweni.bytes.Bytes;
import org.apache.tuweni.bytes.Bytes32;

/**
* Creates Merkle trie instances based on the execution context. This centralizes the decision of
* which trie implementation to use, keeping it out of the callers.
*
* <p>The abstraction is intentionally minimal and package-private. It is introduced to eliminate
* the overhead of {@link ParallelStoredMerklePatriciaTrie} in latency-sensitive paths (like
* per-transaction {@code frontierRootHash()} calls) while preserving it for throughput-oriented
* batch computation. The structure is designed so the policy can be elevated to a broader scope in
* the future without changing callers.
*/
public class BonsaiTrieFactory {

/**
* Describes the execution profile under which a trie is being created. This is not a choice of
* implementation — it is a declaration of latency/throughput intent that the factory maps to the
* appropriate trie type.
*/
enum TrieMode {
/** May use parallel trie if the global config allows it. */
PARALLELIZE_ALLOWED,

/** Always uses sequential trie regardless of config. */
ALWAYS_SEQUENTIAL
}

private final WorldStateConfig worldStateConfig;

BonsaiTrieFactory(final WorldStateConfig worldStateConfig) {
this.worldStateConfig = worldStateConfig;
}

/**
* Creates a Merkle trie appropriate for the given trieMode context.
*
* @param nodeLoader loader for trie nodes from storage
* @param rootHash root hash to start from
* @param trieMode the execution context declaring latency/throughput intent
* @return a trie instance; never null
*/
MerkleTrie<Bytes, Bytes> create(
final NodeLoader nodeLoader, final Bytes32 rootHash, final TrieMode trieMode) {
if (worldStateConfig.isTrieDisabled()) {
return new NoOpMerkleTrie<>();
}
if (trieMode == TrieMode.PARALLELIZE_ALLOWED
&& worldStateConfig.isParallelStateRootComputationEnabled()) {
return new ParallelStoredMerklePatriciaTrie<>(
nodeLoader, rootHash, Function.identity(), Function.identity());
}
return new StoredMerklePatriciaTrie<>(
nodeLoader, rootHash, Function.identity(), Function.identity());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,9 @@
import org.hyperledger.besu.datatypes.Hash;
import org.hyperledger.besu.datatypes.StorageSlotKey;
import org.hyperledger.besu.ethereum.core.MutableWorldState;
import org.hyperledger.besu.ethereum.mainnet.staterootcommitter.StateRootCommitter;
import org.hyperledger.besu.ethereum.trie.MerkleTrie;
import org.hyperledger.besu.ethereum.trie.MerkleTrieException;
import org.hyperledger.besu.ethereum.trie.NoOpMerkleTrie;
import org.hyperledger.besu.ethereum.trie.NodeLoader;
import org.hyperledger.besu.ethereum.trie.pathbased.bonsai.BonsaiAccount;
import org.hyperledger.besu.ethereum.trie.pathbased.bonsai.BonsaiWorldStateProvider;
Expand All @@ -39,16 +39,14 @@
import org.hyperledger.besu.ethereum.trie.pathbased.common.worldview.WorldStateConfig;
import org.hyperledger.besu.ethereum.trie.pathbased.common.worldview.accumulator.PathBasedWorldStateUpdateAccumulator;
import org.hyperledger.besu.ethereum.trie.pathbased.common.worldview.accumulator.preload.StorageConsumingMap;
import org.hyperledger.besu.ethereum.trie.patricia.ParallelStoredMerklePatriciaTrie;
import org.hyperledger.besu.ethereum.trie.patricia.StoredMerklePatriciaTrie;
import org.hyperledger.besu.evm.account.Account;
import org.hyperledger.besu.evm.internal.EvmConfiguration;
import org.hyperledger.besu.plugin.data.BlockHeader;

import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Stream;

Expand All @@ -64,6 +62,8 @@ public class BonsaiWorldState extends PathBasedWorldState {

protected BonsaiCachedMerkleTrieLoader bonsaiCachedMerkleTrieLoader;
private final CodeCache codeCache;
private final BonsaiTrieFactory trieFactory;
private final FrontierRootHashTracker frontierRootHashTracker;

public BonsaiWorldState(
final BonsaiWorldStateProvider archive,
Expand Down Expand Up @@ -92,7 +92,7 @@ public BonsaiWorldState(
super(worldStateKeyValueStorage, cachedWorldStorageManager, trieLogManager, worldStateConfig);
this.bonsaiCachedMerkleTrieLoader = bonsaiCachedMerkleTrieLoader;
this.worldStateKeyValueStorage = worldStateKeyValueStorage;
this.setAccumulator(
final BonsaiWorldStateUpdateAccumulator acc =
new BonsaiWorldStateUpdateAccumulator(
this,
(addr, value) ->
Expand All @@ -102,7 +102,20 @@ public BonsaiWorldState(
this.bonsaiCachedMerkleTrieLoader.preLoadStorageSlot(
getWorldStateStorage(), addr, value),
evmConfiguration,
codeCache));
codeCache);
this.setAccumulator(acc);
this.trieFactory = new BonsaiTrieFactory(worldStateConfig);
this.frontierRootHashTracker =
new FrontierRootHashTracker(
acc,
rootHash ->
trieFactory.create(
(location, hash) ->
bonsaiCachedMerkleTrieLoader.getAccountStateTrieNode(
getWorldStateStorage(), location, hash),
rootHash,
BonsaiTrieFactory.TrieMode.ALWAYS_SEQUENTIAL),
(address, storageUpdates) -> updateFrontierStorageState(acc, address, storageUpdates));
this.codeCache = codeCache;
}

Expand Down Expand Up @@ -248,6 +261,19 @@ private void updateAccountStorageState(
final BonsaiWorldStateUpdateAccumulator worldStateUpdater,
final Map.Entry<Address, StorageConsumingMap<StorageSlotKey, PathBasedValue<UInt256>>>
storageAccountUpdate) {
updateAccountStorageState(
maybeStateUpdater,
worldStateUpdater,
storageAccountUpdate,
BonsaiTrieFactory.TrieMode.PARALLELIZE_ALLOWED);
}

private void updateAccountStorageState(
final Optional<BonsaiWorldStateKeyValueStorage.Updater> maybeStateUpdater,
final BonsaiWorldStateUpdateAccumulator worldStateUpdater,
final Map.Entry<Address, StorageConsumingMap<StorageSlotKey, PathBasedValue<UInt256>>>
storageAccountUpdate,
final BonsaiTrieFactory.TrieMode trieMode) {
final Address updatedAddress = storageAccountUpdate.getKey();
final Hash updatedAddressHash = updatedAddress.addressHash();
if (worldStateUpdater.getAccountsToUpdate().containsKey(updatedAddress)) {
Expand All @@ -260,11 +286,12 @@ private void updateAccountStorageState(
? Hash.EMPTY_TRIE_HASH
: accountOriginal.getStorageRoot();
final MerkleTrie<Bytes, Bytes> storageTrie =
createTrie(
trieFactory.create(
(location, key) ->
bonsaiCachedMerkleTrieLoader.getAccountStorageTrieNode(
getWorldStateStorage(), updatedAddressHash, location, key),
Bytes32.wrap(storageRoot.getBytes()));
Bytes32.wrap(storageRoot.getBytes()),
trieMode);

// for manicured tries and composting, collect branches here (not implemented)
for (final Map.Entry<StorageSlotKey, PathBasedValue<UInt256>> storageUpdate :
Expand Down Expand Up @@ -394,16 +421,15 @@ static Optional<Bytes32> incrementBytes32(final Bytes32 value) {
return incremented.isZero() ? Optional.empty() : Optional.of(incremented);
}

@Override
public void persist(final BlockHeader blockHeader, final StateRootCommitter committer) {
frontierRootHashTracker.reset();
super.persist(blockHeader, committer);
}

@Override
public Hash frontierRootHash() {
return calculateRootHash(
Optional.of(
new BonsaiWorldStateKeyValueStorage.Updater(
noOpSegmentedTx,
noOpTx,
worldStateKeyValueStorage.getFlatDbStrategy(),
worldStateKeyValueStorage.getComposedWorldStateStorage())),
accumulator.copy());
return frontierRootHashTracker.frontierRootHash(worldStateRootHash);
}

@Override
Expand All @@ -423,6 +449,17 @@ protected Optional<Bytes> getStorageTrieNode(
return getWorldStateStorage().getAccountStorageTrieNode(accountHash, location, nodeHash);
}

private void updateFrontierStorageState(
final BonsaiWorldStateUpdateAccumulator accumulator,
final Address address,
final StorageConsumingMap<StorageSlotKey, PathBasedValue<UInt256>> storageUpdates) {
updateAccountStorageState(
Optional.empty(),
accumulator,
Map.entry(address, storageUpdates),
BonsaiTrieFactory.TrieMode.ALWAYS_SEQUENTIAL);
}

private void writeStorageTrieNode(
final BonsaiWorldStateKeyValueStorage.Updater stateUpdater,
final Hash accountHash,
Expand Down Expand Up @@ -481,15 +518,7 @@ public void disableCacheMerkleTrieLoader() {
}

private MerkleTrie<Bytes, Bytes> createTrie(final NodeLoader nodeLoader, final Bytes32 rootHash) {
if (worldStateConfig.isTrieDisabled()) {
return new NoOpMerkleTrie<>();
}
if (worldStateConfig.isParallelStateRootComputationEnabled()) {
return new ParallelStoredMerklePatriciaTrie<>(
nodeLoader, rootHash, Function.identity(), Function.identity());
}
return new StoredMerklePatriciaTrie<>(
nodeLoader, rootHash, Function.identity(), Function.identity());
return trieFactory.create(nodeLoader, rootHash, BonsaiTrieFactory.TrieMode.PARALLELIZE_ALLOWED);
}

protected Hash hashAndSavePreImage(final Bytes value) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,13 @@
import org.hyperledger.besu.evm.internal.EvmConfiguration;
import org.hyperledger.besu.evm.worldstate.UpdateTrackingAccount;

import java.util.HashSet;
import java.util.Set;

public class BonsaiWorldStateUpdateAccumulator
extends PathBasedWorldStateUpdateAccumulator<BonsaiAccount> {
private final CodeCache codeCache;
private final Set<Address> frontierDirtyAddresses = new HashSet<>();

public BonsaiWorldStateUpdateAccumulator(
final PathBasedWorldView world,
Expand All @@ -43,17 +47,16 @@ public BonsaiWorldStateUpdateAccumulator(
this.codeCache = codeCache;
}

/** Copy constructor. */
protected BonsaiWorldStateUpdateAccumulator(final BonsaiWorldStateUpdateAccumulator source) {
super(source);
this.codeCache = source.codeCache;
this.frontierDirtyAddresses.addAll(source.frontierDirtyAddresses);
}

@Override
public PathBasedWorldStateUpdateAccumulator<BonsaiAccount> copy() {
final BonsaiWorldStateUpdateAccumulator copy =
new BonsaiWorldStateUpdateAccumulator(
wrappedWorldView(),
getAccountPreloader(),
getStoragePreloader(),
getEvmConfiguration(),
codeCache);
copy.cloneFromUpdater(this);
return copy;
public BonsaiWorldStateUpdateAccumulator copy() {
return new BonsaiWorldStateUpdateAccumulator(this);
}

@Override
Expand Down Expand Up @@ -102,6 +105,27 @@ protected void assertCloseEnoughForDiffing(
BonsaiAccount.assertCloseEnoughForDiffing(source, account, context);
}

@Override
public void commit() {
super.commit();
getDeletedAccountAddresses().forEach(frontierDirtyAddresses::add);
getUpdatedAccounts().forEach(account -> frontierDirtyAddresses.add(account.getAddress()));
}

public Set<Address> getFrontierDirtyAddresses() {
return new HashSet<>(frontierDirtyAddresses);
}

public void clearFrontierDirtyAddresses(final Set<Address> processed) {
frontierDirtyAddresses.removeAll(processed);
}

@Override
public void reset() {
super.reset();
frontierDirtyAddresses.clear();
}

@Override
public CodeCache codeCache() {
return codeCache;
Expand Down
Loading
Loading