|
| 1 | +// SPDX-License-Identifier: Apache-2.0 |
| 2 | +package com.hedera.node.app.hapi.utils.blocks; |
| 3 | + |
| 4 | +import static java.util.Objects.requireNonNull; |
| 5 | + |
| 6 | +import com.hedera.hapi.node.state.blockstream.MerkleLeaf; |
| 7 | +import java.security.MessageDigest; |
| 8 | +import java.security.NoSuchAlgorithmException; |
| 9 | + |
| 10 | +/** |
| 11 | + * Shared hashing helpers for Merkle path construction and verification. |
| 12 | + * |
| 13 | + * <p>This utility class provides methods for computing cryptographic hashes used in Merkle trees, |
| 14 | + * with domain separation prefixes to prevent collision attacks between different node types: |
| 15 | + * <ul> |
| 16 | + * <li>Leaf nodes: prefixed with 0x00</li> |
| 17 | + * <li>Single-child (internal) nodes: prefixed with 0x01</li> |
| 18 | + * <li>Two-child (internal) nodes: prefixed with 0x02</li> |
| 19 | + * </ul> |
| 20 | + * |
| 21 | + * <p>All hashing uses SHA-384 for security and consistency with the broader Hedera ecosystem. |
| 22 | + */ |
| 23 | +final class HashUtils { |
| 24 | + |
| 25 | + private static final String HASH_ALGORITHM = "SHA-384"; |
| 26 | + static final int HASH_SIZE_BYTES = 48; |
| 27 | + |
| 28 | + private HashUtils() { |
| 29 | + throw new UnsupportedOperationException("Utility class"); |
| 30 | + } |
| 31 | + |
| 32 | + static MessageDigest newMessageDigest() { |
| 33 | + try { |
| 34 | + return MessageDigest.getInstance(HASH_ALGORITHM); |
| 35 | + } catch (NoSuchAlgorithmException e) { |
| 36 | + throw new RuntimeException(HASH_ALGORITHM + " algorithm not found", e); |
| 37 | + } |
| 38 | + } |
| 39 | + |
| 40 | + static byte[] computeLeafHash(final MessageDigest digest, final MerkleLeaf leaf) { |
| 41 | + requireNonNull(digest, "digest must not be null"); |
| 42 | + requireNonNull(leaf, "leaf must not be null"); |
| 43 | + digest.reset(); |
| 44 | + digest.update((byte) 0x00); |
| 45 | + digest.update(MerkleLeaf.PROTOBUF.toBytes(leaf).toByteArray()); |
| 46 | + return digest.digest(); |
| 47 | + } |
| 48 | + |
| 49 | + static byte[] computeSingleChildHash(final MessageDigest digest, final byte[] childHash) { |
| 50 | + requireNonNull(digest, "digest must not be null"); |
| 51 | + requireNonNull(childHash, "childHash must not be null"); |
| 52 | + digest.reset(); |
| 53 | + digest.update((byte) 0x01); |
| 54 | + digest.update(childHash); |
| 55 | + return digest.digest(); |
| 56 | + } |
| 57 | + |
| 58 | + static byte[] joinHashes(final MessageDigest digest, final byte[] left, final byte[] right) { |
| 59 | + requireNonNull(digest, "digest must not be null"); |
| 60 | + requireNonNull(left, "left must not be null"); |
| 61 | + requireNonNull(right, "right must not be null"); |
| 62 | + digest.reset(); |
| 63 | + digest.update((byte) 0x02); |
| 64 | + digest.update(left); |
| 65 | + digest.update(right); |
| 66 | + return digest.digest(); |
| 67 | + } |
| 68 | +} |
0 commit comments