diff --git a/src/lean_spec/subspecs/ssz/constants.py b/src/lean_spec/subspecs/ssz/constants.py index 7cdc851fe..0540a0398 100644 --- a/src/lean_spec/subspecs/ssz/constants.py +++ b/src/lean_spec/subspecs/ssz/constants.py @@ -3,7 +3,10 @@ from lean_spec.types.byte_arrays import Bytes32 BYTES_PER_CHUNK: int = 32 -"""The number of bytes in a Merkle tree chunk.""" +"""Number of bytes per Merkle chunk.""" + +BITS_PER_BYTE: int = 8 +"""Number of bits per byte.""" ZERO_HASH: Bytes32 = Bytes32(b"\x00" * BYTES_PER_CHUNK) -"""A zero hash, used for padding in the Merkle tree.""" +"""A zero hash, used for padding in Merkleization.""" diff --git a/src/lean_spec/subspecs/ssz/hash.py b/src/lean_spec/subspecs/ssz/hash.py new file mode 100644 index 000000000..ce56be1ed --- /dev/null +++ b/src/lean_spec/subspecs/ssz/hash.py @@ -0,0 +1,158 @@ +""" +SSZ Merkleization entry point (`hash_tree_root`). + +This module exposes: +- A `hash_tree_root(value: object) -> Bytes32` singledispatch function. +- A tiny facade `HashTreeRoot.compute(value)` if you prefer a class entrypoint. +""" + +from __future__ import annotations + +from functools import singledispatch +from math import ceil +from typing import Final, Type + +from lean_spec.subspecs.ssz.constants import BYTES_PER_CHUNK +from lean_spec.types.bitfields import Bitlist, Bitvector +from lean_spec.types.boolean import Boolean +from lean_spec.types.byte_arrays import ByteListBase, Bytes32, ByteVectorBase +from lean_spec.types.collections import ( + List, + Vector, +) +from lean_spec.types.container import Container +from lean_spec.types.uint import BaseUint +from lean_spec.types.union import Union + +from .merkleization import Merkle +from .pack import Packer + + +@singledispatch +def hash_tree_root(value: object) -> Bytes32: + """ + Compute `hash_tree_root(value)` for SSZ values. + + Concrete specializations are registered below with `@hash_tree_root.register(Type)`. + + Raises: + TypeError: If `value` has no registered specialization. + """ + raise TypeError(f"hash_tree_root: unsupported value type {type(value).__name__}") + + +class HashTreeRoot: + """OO facade around `hash_tree_root`.""" + + @staticmethod + def compute(value: object) -> Bytes32: + """Delegate to the singledispatch implementation.""" + return hash_tree_root(value) + + +@hash_tree_root.register +def _htr_uint(value: BaseUint) -> Bytes32: + """Basic scalars merkleize as `merkleize(pack(bytes))`.""" + return Merkle.merkleize(Packer.pack_bytes(value.encode_bytes())) + + +@hash_tree_root.register +def _htr_boolean(value: Boolean) -> Bytes32: + return Merkle.merkleize(Packer.pack_bytes(value.encode_bytes())) + + +@hash_tree_root.register +def _htr_bytes(value: bytes) -> Bytes32: + """Treat raw bytes like ByteVector[N].""" + return Merkle.merkleize(Packer.pack_bytes(value)) + + +@hash_tree_root.register +def _htr_bytearray(value: bytearray) -> Bytes32: + return Merkle.merkleize(Packer.pack_bytes(bytes(value))) + + +@hash_tree_root.register +def _htr_memoryview(value: memoryview) -> Bytes32: + data: Final[bytes] = value.tobytes() + return Merkle.merkleize(Packer.pack_bytes(data)) + + +@hash_tree_root.register +def _htr_bytevector(value: ByteVectorBase) -> Bytes32: + return Merkle.merkleize(Packer.pack_bytes(value.encode_bytes())) + + +@hash_tree_root.register +def _htr_bytelist(value: ByteListBase) -> Bytes32: + data = value.encode_bytes() + limit_chunks = ceil(type(value).LIMIT / BYTES_PER_CHUNK) + root = Merkle.merkleize(Packer.pack_bytes(data), limit=limit_chunks) + return Merkle.mix_in_length(root, len(data)) + + +@hash_tree_root.register +def _htr_bitvector(value: Bitvector) -> Bytes32: + nbits = type(value).LENGTH + limit = (nbits + 255) // 256 + chunks = Packer.pack_bits(tuple(bool(b) for b in value)) + return Merkle.merkleize(chunks, limit=limit) + + +@hash_tree_root.register +def _htr_bitlist(value: Bitlist) -> Bytes32: + limit = (type(value).LIMIT + 255) // 256 + chunks = Packer.pack_bits(tuple(bool(b) for b in value)) + root = Merkle.merkleize(chunks, limit=limit) + return Merkle.mix_in_length(root, len(value)) + + +@hash_tree_root.register +def _htr_vector(value: Vector) -> Bytes32: + elem_t: Type[object] = type(value).ELEMENT_TYPE + length: int = type(value).LENGTH + + # BASIC elements (uint/boolean): pack serialized bytes + if issubclass(elem_t, (BaseUint, Boolean)): + elem_size = elem_t.get_byte_length() if issubclass(elem_t, BaseUint) else 1 + concat = b"".join(e.encode_bytes() for e in value) + limit_chunks = (length * elem_size + (BYTES_PER_CHUNK - 1)) // BYTES_PER_CHUNK + return Merkle.merkleize(Packer.pack_bytes(concat), limit=limit_chunks) + + # COMPOSITE elements: merkleize child roots with limit = length + leaves = [hash_tree_root(e) for e in value] + return Merkle.merkleize(leaves, limit=length) + + +@hash_tree_root.register +def _htr_list(value: List) -> Bytes32: + elem_t: Type[object] = type(value).ELEMENT_TYPE + limit: int = type(value).LIMIT + + # BASIC elements + if issubclass(elem_t, (BaseUint, Boolean)): + elem_size = elem_t.get_byte_length() if issubclass(elem_t, BaseUint) else 1 + concat = b"".join(e.encode_bytes() for e in value) + limit_chunks = (limit * elem_size + (BYTES_PER_CHUNK - 1)) // BYTES_PER_CHUNK + root = Merkle.merkleize(Packer.pack_bytes(concat), limit=limit_chunks) + return Merkle.mix_in_length(root, len(value)) + + # COMPOSITE elements + leaves = [hash_tree_root(e) for e in value] + root = Merkle.merkleize(leaves, limit=limit) + return Merkle.mix_in_length(root, len(value)) + + +@hash_tree_root.register +def _htr_container(value: Container) -> Bytes32: + # Preserve declared field order from the Pydantic model. + leaves = [hash_tree_root(getattr(value, fname)) for fname in type(value).model_fields.keys()] + return Merkle.merkleize(leaves) + + +@hash_tree_root.register +def _htr_union(value: Union) -> Bytes32: + sel = value.selector() + if value.selected_type() is None: + return Merkle.mix_in_selector(Bytes32(b"\x00" * 32), 0) + return Merkle.mix_in_selector(hash_tree_root(value.value()), sel) diff --git a/src/lean_spec/subspecs/ssz/merkle/__init__.py b/src/lean_spec/subspecs/ssz/merkle_proof/__init__.py similarity index 100% rename from src/lean_spec/subspecs/ssz/merkle/__init__.py rename to src/lean_spec/subspecs/ssz/merkle_proof/__init__.py diff --git a/src/lean_spec/subspecs/ssz/gindex.py b/src/lean_spec/subspecs/ssz/merkle_proof/gindex.py similarity index 100% rename from src/lean_spec/subspecs/ssz/gindex.py rename to src/lean_spec/subspecs/ssz/merkle_proof/gindex.py diff --git a/src/lean_spec/subspecs/ssz/merkle/proof.py b/src/lean_spec/subspecs/ssz/merkle_proof/proof.py similarity index 99% rename from src/lean_spec/subspecs/ssz/merkle/proof.py rename to src/lean_spec/subspecs/ssz/merkle_proof/proof.py index 763c4335a..8ba5b628e 100644 --- a/src/lean_spec/subspecs/ssz/merkle/proof.py +++ b/src/lean_spec/subspecs/ssz/merkle_proof/proof.py @@ -10,8 +10,8 @@ from lean_spec.types.byte_arrays import Bytes32 from ..constants import ZERO_HASH -from ..gindex import GeneralizedIndex from ..utils import hash_nodes +from .gindex import GeneralizedIndex Root = Bytes32 """The type of a Merkle tree root.""" diff --git a/src/lean_spec/subspecs/ssz/merkle/tree.py b/src/lean_spec/subspecs/ssz/merkle_proof/tree.py similarity index 100% rename from src/lean_spec/subspecs/ssz/merkle/tree.py rename to src/lean_spec/subspecs/ssz/merkle_proof/tree.py diff --git a/src/lean_spec/subspecs/ssz/merkleization.py b/src/lean_spec/subspecs/ssz/merkleization.py new file mode 100644 index 000000000..58cbe116c --- /dev/null +++ b/src/lean_spec/subspecs/ssz/merkleization.py @@ -0,0 +1,118 @@ +"""Merkleization utilities per SSZ.""" + +from __future__ import annotations + +from typing import List, Optional, Sequence + +from lean_spec.subspecs.ssz.constants import ZERO_HASH +from lean_spec.subspecs.ssz.utils import get_power_of_two_ceil, hash_nodes +from lean_spec.types.byte_arrays import Bytes32 + + +class Merkle: + """Static Merkle helpers for SSZ.""" + + @staticmethod + def merkleize(chunks: Sequence[Bytes32], limit: Optional[int] = None) -> Bytes32: + """Compute the Merkle root of `chunks`. + + Behavior + -------- + - If `limit` is None: pad to next power of two of len(chunks). + - If `limit` is provided and >= len(chunks): pad to next power of two of `limit`. + - If `limit` < len(chunks): raise (exceeds limit). + - If no chunks: return ZERO_HASH. + *Exception when `limit` is provided:* return the zero-subtree root for the padded width. + + This matches the SSZ spec's padding/limiting rules. + """ + n = len(chunks) + if n == 0: + # If a limit is provided, the tree width is determined by that limit, + # and the root must be the zero-subtree root of that width. + if limit is not None: + width = get_power_of_two_ceil(limit) + return Merkle._zero_tree_root(width) + return ZERO_HASH + + # Determine the width of the bottom layer after padding/limiting. + if limit is None: + width = get_power_of_two_ceil(n) + else: + if limit < n: + raise ValueError("merkleize: input exceeds limit") + width = get_power_of_two_ceil(limit) + + # Width of 1: the single chunk is the root. + if width == 1: + return chunks[0] + + # Start with the leaf layer: provided chunks + ZERO padding. + level: List[Bytes32] = list(chunks) + [ZERO_HASH] * (width - n) + + # Reduce bottom-up: pairwise hash until a single root remains. + while len(level) > 1: + nxt: List[Bytes32] = [] + it = iter(level) + for a in it: + b = next(it, ZERO_HASH) # Safe: even-length implied by padding + nxt.append(hash_nodes(a, b)) + level = nxt + return level[0] + + @staticmethod + def merkleize_progressive(chunks: Sequence[Bytes32], num_leaves: int = 1) -> Bytes32: + """Progressive Merkleization (per spec). + + Rare in practice; provided for completeness. Splits on `num_leaves`: + - right: merkleize the first up-to-`num_leaves` chunks using a fixed-width tree + - left: recurse on the remaining chunks, quadrupling the right's width at each step + """ + if len(chunks) == 0: + return ZERO_HASH + + # Right branch: fixed-width merkleization of the first `num_leaves` chunks. + right = Merkle.merkleize(chunks[:num_leaves], num_leaves) + + # Left branch: recursively collapse everything beyond `num_leaves`. + left = ( + Merkle.merkleize_progressive(chunks[num_leaves:], num_leaves * 4) + if len(chunks) > num_leaves + else ZERO_HASH + ) + + # Combine branches. + return hash_nodes(left, right) + + @staticmethod + def mix_in_length(root: Bytes32, length: int) -> Bytes32: + """Mix the length (as uint256 little-endian) into a Merkle root.""" + if length < 0: + raise ValueError("length must be non-negative") + # The "mix" is `hash(root + length_uint256_le)`. + le = length.to_bytes(32, "little") + return hash_nodes(root, Bytes32(le)) + + @staticmethod + def mix_in_selector(root: Bytes32, selector: int) -> Bytes32: + """Mix the union selector (as uint256 little-endian) into a Merkle root.""" + if selector < 0: + raise ValueError("selector must be non-negative") + le = selector.to_bytes(32, "little") + return hash_nodes(root, Bytes32(le)) + + @staticmethod + def _zero_tree_root(width_pow2: int) -> Bytes32: + """ + Return the Merkle root of a full zero tree with `width_pow2` leaves. + + Power of two >= 1. + """ + if width_pow2 <= 1: + return ZERO_HASH + h = ZERO_HASH + w = width_pow2 + while w > 1: + h = hash_nodes(h, h) + w //= 2 + return h diff --git a/src/lean_spec/subspecs/ssz/pack.py b/src/lean_spec/subspecs/ssz/pack.py new file mode 100644 index 000000000..f35aea017 --- /dev/null +++ b/src/lean_spec/subspecs/ssz/pack.py @@ -0,0 +1,96 @@ +"""Packing helpers for SSZ Merkleization. + +These helpers convert existing *serialized* data into 32-byte chunks (Bytes32). +They do not serialize objects themselves; they only arrange bytes into chunks +as required by the SSZ Merkleization rules. + +Design notes +------------ +- We keep these helpers in a dedicated class (`Packer`) to make call sites explicit + and discoverable (e.g., `Packer.pack_bytes(...)`), while remaining purely static. +- All functions return `list[Bytes32]`, the canonical chunk form fed into `merkleize`. +""" + +from __future__ import annotations + +from typing import Iterable, List, Sequence + +from lean_spec.subspecs.ssz.constants import BITS_PER_BYTE, BYTES_PER_CHUNK +from lean_spec.types.byte_arrays import Bytes32 + + +class Packer: + """Collection of static helpers to pack byte data into 32-byte chunks.""" + + @staticmethod + def _right_pad_to_chunk(b: bytes) -> bytes: + """Right-pad `b` with zeros up to a multiple of BYTES_PER_CHUNK. + + SSZ Merkleization packs serialized basic values into 32-byte "chunks". + When `b` is not already chunk-aligned, we append zero bytes. + """ + # Already aligned? Return as-is. + if len(b) % BYTES_PER_CHUNK == 0: + return b + # Compute the minimal pad size to reach the next multiple of 32. + pad = BYTES_PER_CHUNK - (len(b) % BYTES_PER_CHUNK) + return b + b"\x00" * pad + + @staticmethod + def _partition_chunks(b: bytes) -> List[Bytes32]: + """Partition an already-aligned byte-string into 32-byte chunks. + + Precondition: `len(b)` must be a multiple of 32. + """ + if len(b) == 0: + return [] + if len(b) % BYTES_PER_CHUNK != 0: + raise ValueError("partition requires a multiple of BYTES_PER_CHUNK") + # Slice in steps of 32 to build Bytes32 chunks. + return [Bytes32(b[i : i + BYTES_PER_CHUNK]) for i in range(0, len(b), BYTES_PER_CHUNK)] + + @staticmethod + def pack_basic_serialized(serialized_basic_values: Iterable[bytes]) -> List[Bytes32]: + """Pack *serialized* basic values (e.g. uintN/boolean/byte) into chunks. + + Parameters + ---------- + serialized_basic_values: + Iterable of bytes objects; each element is already the SSZ-serialized + form of a basic value. + + Returns: + ------- + list[Bytes32] + Concatenated and right-padded chunks ready for Merkleization. + """ + # Concatenate the serialized representations of individual basic values. + joined = b"".join(serialized_basic_values) + # Right-pad, then partition into 32-byte slices. + return Packer._partition_chunks(Packer._right_pad_to_chunk(joined)) + + @staticmethod + def pack_bytes(data: bytes) -> List[Bytes32]: + """Pack raw bytes (e.g. ByteVector/ByteList content) into 32-byte chunks.""" + return Packer._partition_chunks(Packer._right_pad_to_chunk(data)) + + @staticmethod + def pack_bits(bools: Sequence[bool]) -> List[Bytes32]: + """Pack a boolean sequence into a bitfield, then into 32-byte chunks. + + Notes: + ----- + - This does **not** add the Bitlist length-delimiter bit. Callers implementing + Bitlist should add it separately or mix the list length at the Merkle level. + - Bit ordering follows SSZ (little-endian within each byte). + """ + if not bools: + return [] + # Pack 8 bools per byte (round up). + byte_len = (len(bools) + (BITS_PER_BYTE - 1)) // BITS_PER_BYTE + arr = bytearray(byte_len) + for i, bit in enumerate(bools): + if bit: + # Set the (i % 8)-th bit of the (i // 8)-th byte. + arr[i // BITS_PER_BYTE] |= 1 << (i % BITS_PER_BYTE) + return Packer._partition_chunks(Packer._right_pad_to_chunk(bytes(arr))) diff --git a/src/lean_spec/types/byte_arrays.py b/src/lean_spec/types/byte_arrays.py index b2adbf4d9..6dae49e98 100644 --- a/src/lean_spec/types/byte_arrays.py +++ b/src/lean_spec/types/byte_arrays.py @@ -43,7 +43,7 @@ def _coerce_to_bytes(value: Any) -> bytes: return bytes(value) -class _ByteVectorBase(SSZType): +class ByteVectorBase(SSZType): """ Base class for specialized `ByteVector[N]`. @@ -88,7 +88,7 @@ def serialize(self, stream: IO[bytes]) -> int: return len(self._b) @classmethod - def deserialize(cls, stream: IO[bytes], scope: int) -> _ByteVectorBase: + def deserialize(cls, stream: IO[bytes], scope: int) -> ByteVectorBase: """ Read exactly `scope` bytes from `stream` and build an instance. @@ -112,7 +112,7 @@ def encode_bytes(self) -> bytes: return self._b @classmethod - def decode_bytes(cls, data: bytes) -> _ByteVectorBase: + def decode_bytes(cls, data: bytes) -> ByteVectorBase: """ Parse `data` as a value of this type. @@ -125,35 +125,44 @@ def decode_bytes(cls, data: bytes) -> _ByteVectorBase: return cls(data) def __len__(self) -> int: + """Return the length of the byte vector.""" return self.LENGTH def __iter__(self) -> Iterator[int]: + """Return an iterator over the byte vector.""" return iter(self._b) def __bytes__(self) -> bytes: + """Return the byte vector as a bytes object.""" return self._b def __add__(self, other: Any) -> bytes: + """Return the concatenation of the byte vector and the argument.""" if isinstance(other, (bytes, bytearray)): return self._b + bytes(other) return self._b + bytes(other) def __radd__(self, other: Any) -> bytes: + """Return the concatenation of the argument and the byte vector.""" if isinstance(other, (bytes, bytearray)): return bytes(other) + self._b return bytes(other) + self._b def __getitem__(self, i: int) -> int: + """Return the i-th byte of the byte vector.""" return self._b[i] def __repr__(self) -> str: + """Return a string representation of the byte vector.""" tname = type(self).__name__ return f"{tname}({self._b.hex()})" def __eq__(self, other: object) -> bool: + """Return whether the two byte vectors are equal.""" return isinstance(other, type(self)) and self._b == other._b def __hash__(self) -> int: + """Return the hash of the byte vector.""" return hash((type(self), self._b)) def hex(self) -> str: @@ -161,6 +170,7 @@ def hex(self) -> str: return self._b.hex() def __lt__(self, other: object) -> bool: + """Return whether the byte vector is less than the other byte vector.""" if not isinstance(other, type(self)): return NotImplemented return self._b < other._b @@ -178,12 +188,12 @@ def __get_pydantic_core_schema__( Serialize to raw `bytes`. """ - def validator(v: Any) -> _ByteVectorBase: + def validator(v: Any) -> ByteVectorBase: if isinstance(v, cls): return v return cls(v) - def serializer(x: _ByteVectorBase) -> bytes: + def serializer(x: ByteVectorBase) -> bytes: return x.encode_bytes() return core_schema.union_schema( @@ -203,10 +213,10 @@ class ByteVector(SSZType): Bytes32 = ByteVector[32] """ - _CACHE: ClassVar[dict[int, Type[_ByteVectorBase]]] = {} + _CACHE: ClassVar[dict[int, Type[ByteVectorBase]]] = {} @classmethod - def __class_getitem__(cls, length: int) -> Type[_ByteVectorBase]: + def __class_getitem__(cls, length: int) -> Type[ByteVectorBase]: """ Specialize the factory into a concrete ``ByteVector[length]`` class. @@ -214,7 +224,7 @@ def __class_getitem__(cls, length: int) -> Type[_ByteVectorBase]: length: Exact number of bytes the specialized type must contain (N ≥ 0). Returns: - A new subclass of ``_ByteVectorBase`` whose ``LENGTH`` is ``length``. + A new subclass of ``ByteVectorBase`` whose ``LENGTH`` is ``length``. Raises: TypeError: If ``length`` is not a non-negative integer. @@ -225,50 +235,50 @@ def __class_getitem__(cls, length: int) -> Type[_ByteVectorBase]: if cached is not None: return cached name = f"ByteVector[{length}]" - bases = (_ByteVectorBase,) + bases = (ByteVectorBase,) attrs = {"LENGTH": length, "__module__": cls.__module__} typ = type(name, bases, attrs) cls._CACHE[length] = typ return typ -class Bytes1(_ByteVectorBase): +class Bytes1(ByteVectorBase): """Fixed-size byte vector of exactly 1 byte.""" LENGTH = 1 -class Bytes4(_ByteVectorBase): +class Bytes4(ByteVectorBase): """Fixed-size byte vector of exactly 4 bytes.""" LENGTH = 4 -class Bytes8(_ByteVectorBase): +class Bytes8(ByteVectorBase): """Fixed-size byte vector of exactly 8 bytes.""" LENGTH = 8 -class Bytes32(_ByteVectorBase): +class Bytes32(ByteVectorBase): """Fixed-size byte vector of exactly 32 bytes.""" LENGTH = 32 -class Bytes48(_ByteVectorBase): +class Bytes48(ByteVectorBase): """Fixed-size byte vector of exactly 48 bytes.""" LENGTH = 48 -class Bytes96(_ByteVectorBase): +class Bytes96(ByteVectorBase): """Fixed-size byte vector of exactly 96 bytes.""" LENGTH = 96 -class _ByteListBase(SSZType): +class ByteListBase(SSZType): """ Base class for specialized `ByteList[L]`. @@ -312,7 +322,7 @@ def serialize(self, stream: IO[bytes]) -> int: return len(self._b) @classmethod - def deserialize(cls, stream: IO[bytes], scope: int) -> _ByteListBase: + def deserialize(cls, stream: IO[bytes], scope: int) -> ByteListBase: """ Read exactly `scope` bytes from `stream` and build an instance. @@ -337,7 +347,7 @@ def encode_bytes(self) -> bytes: return self._b @classmethod - def decode_bytes(cls, data: bytes) -> _ByteListBase: + def decode_bytes(cls, data: bytes) -> ByteListBase: """ Parse `data` as a value of this type. @@ -348,35 +358,44 @@ def decode_bytes(cls, data: bytes) -> _ByteListBase: return cls(data) def __len__(self) -> int: + """Return the length of the byte list.""" return len(self._b) def __iter__(self) -> Iterator[int]: + """Return an iterator over the byte list.""" return iter(self._b) def __bytes__(self) -> bytes: + """Return the byte list as a bytes object.""" return self._b def __add__(self, other: Any) -> bytes: + """Return the concatenation of the byte list and the argument.""" if isinstance(other, (bytes, bytearray)): return self._b + bytes(other) return self._b + bytes(other) def __radd__(self, other: Any) -> bytes: + """Return the concatenation of the argument and the byte list.""" if isinstance(other, (bytes, bytearray)): return bytes(other) + self._b return bytes(other) + self._b def __getitem__(self, i: int) -> int: + """Return the i-th byte of the byte list.""" return self._b[i] def __repr__(self) -> str: + """Return a string representation of the byte list.""" tname = type(self).__name__ return f"{tname}({self._b.hex()})" def __eq__(self, other: object) -> bool: + """Return whether the two byte lists are equal.""" return isinstance(other, type(self)) and self._b == other._b def __hash__(self) -> int: + """Return the hash of the byte list.""" return hash((type(self), self._b)) def hex(self) -> str: @@ -396,12 +415,12 @@ def __get_pydantic_core_schema__( Serialize to raw `bytes`. """ - def validator(v: Any) -> _ByteListBase: + def validator(v: Any) -> ByteListBase: if isinstance(v, cls): return v return cls(v) - def serializer(x: _ByteListBase) -> bytes: + def serializer(x: ByteListBase) -> bytes: return x.encode_bytes() return core_schema.union_schema( @@ -421,10 +440,10 @@ class ByteList(SSZType): Payload = ByteList[2048] """ - _CACHE: ClassVar[dict[int, Type[_ByteListBase]]] = {} + _CACHE: ClassVar[dict[int, Type[ByteListBase]]] = {} @classmethod - def __class_getitem__(cls, limit: int) -> Type[_ByteListBase]: + def __class_getitem__(cls, limit: int) -> Type[ByteListBase]: """ Specialize the factory into a concrete ``ByteList[limit]`` class. @@ -432,7 +451,7 @@ def __class_getitem__(cls, limit: int) -> Type[_ByteListBase]: limit: Maximum number of bytes instances may contain (L ≥ 0). Returns: - A new subclass of ``_ByteListBase`` whose ``LIMIT`` is ``limit``. + A new subclass of ``ByteListBase`` whose ``LIMIT`` is ``limit``. Raises: TypeError: If ``limit`` is not a non-negative integer. @@ -443,7 +462,7 @@ def __class_getitem__(cls, limit: int) -> Type[_ByteListBase]: if cached is not None: return cached name = f"ByteList[{limit}]" - bases = (_ByteListBase,) + bases = (ByteListBase,) attrs = {"LIMIT": limit, "__module__": cls.__module__} typ = type(name, bases, attrs) cls._CACHE[limit] = typ diff --git a/tests/lean_spec/subspecs/ssz/test_gindex.py b/tests/lean_spec/subspecs/ssz/test_gindex.py index bca1a3f59..a73fa8858 100644 --- a/tests/lean_spec/subspecs/ssz/test_gindex.py +++ b/tests/lean_spec/subspecs/ssz/test_gindex.py @@ -4,7 +4,7 @@ from pydantic import ValidationError from typing_extensions import Any -from lean_spec.subspecs.ssz.gindex import GeneralizedIndex +from lean_spec.subspecs.ssz.merkle_proof.gindex import GeneralizedIndex def test_pydantic_validation_accepts_valid_int() -> None: diff --git a/tests/lean_spec/subspecs/ssz/test_hash.py b/tests/lean_spec/subspecs/ssz/test_hash.py new file mode 100644 index 000000000..cca21ef40 --- /dev/null +++ b/tests/lean_spec/subspecs/ssz/test_hash.py @@ -0,0 +1,701 @@ +""" "SSZ hash tree root tests.""" + +from __future__ import annotations + +from hashlib import sha256 +from typing import Iterable, Tuple, Type + +import pytest + +from lean_spec.subspecs.ssz.hash import HashTreeRoot, hash_tree_root +from lean_spec.types.bitfields import Bitlist, Bitvector +from lean_spec.types.boolean import Boolean +from lean_spec.types.byte import Byte +from lean_spec.types.byte_arrays import ByteList, ByteVector +from lean_spec.types.collections import List, Vector +from lean_spec.types.container import Container +from lean_spec.types.uint import ( + BaseUint, + Uint8, + Uint16, + Uint32, + Uint64, + Uint128, + Uint256, +) +from lean_spec.types.union import Union + + +def _le_hex(value: int, byte_len: int) -> str: + """ + Converts an integer to a little-endian hexadecimal string of a fixed length. + + Args: + value: The integer to convert. + byte_len: The exact number of bytes the output hex string should represent. + + Returns: + A hexadecimal string representation of the integer. + """ + return value.to_bytes(byte_len, "little").hex() + + +def chunk(payload_hex: str) -> str: + """ + Pads or truncates a hexadecimal string to form a 32-byte chunk. + + According to SSZ, data is processed in 32-byte chunks. This function + ensures that any given hex string is correctly formatted as a 32-byte + (64-character) hex string by right-padding with '00' or truncating if necessary. + + Args: + payload_hex: The input hexadecimal string. + + Returns: + A 64-character hexadecimal string representing a 32-byte chunk. + """ + return (payload_hex + ("00" * 32))[:64] + + +def h(a_hex: str, b_hex: str) -> str: + """ + Computes the SHA-256 hash of the concatenation of two 32-byte hex chunks. + + This is the core Merkle tree hashing operation, combining two child nodes + to form a parent node. + + Args: + a_hex: The left 32-byte chunk as a hex string. + b_hex: The right 32-byte chunk as a hex string. + + Returns: + The resulting 32-byte SHA-256 hash as a hex string. + """ + a = bytes.fromhex(a_hex) + b = bytes.fromhex(b_hex) + return sha256(a + b).hexdigest() + + +# Precompute the "zero hashes" used for padding Merkle trees. +# +# ZERO_HASHES[i] is the hash of two ZERO_HASHES[i-1] nodes, forming a +# balanced Merkle subtree of zero chunks at increasing depths. +ZERO_HASHES = [chunk("")] +for _ in range(1, 32): + ZERO_HASHES.append(h(ZERO_HASHES[-1], ZERO_HASHES[-1])) + + +def merge(leaf: str, branch: Iterable[str]) -> str: + """ + Merges a leaf with a branch of nodes in a Merkle tree structure. + + This helper simulates the process of hashing a leaf up a series of parent + nodes in a Merkle proof, combining the current hash with a sibling node at + each level. + + Args: + leaf: The initial leaf hash as a hex string. + branch: An iterable of sibling node hashes to merge with. + + Returns: + The final root hash after merging the leaf all the way up the branch. + """ + out = leaf + for b in branch: + out = h(out, b) + return out + + +def _chunk_hex(payload_hex: str) -> str: + """ + Alias for the `chunk` function for semantic clarity in tests. + """ + return chunk(payload_hex) + + +@pytest.mark.parametrize( + "uint_type,value_int,le_hex", + [ + # uint8 + (Uint8, 0x00, _le_hex(0x00, 1)), + (Uint8, 0x01, _le_hex(0x01, 1)), + (Uint8, 0xAB, _le_hex(0xAB, 1)), + # uint16 + (Uint16, 0x0000, _le_hex(0x0000, 2)), + (Uint16, 0xABCD, _le_hex(0xABCD, 2)), + # uint32 + (Uint32, 0x00000000, _le_hex(0x00000000, 4)), + (Uint32, 0x01234567, _le_hex(0x01234567, 4)), + # uint64 + (Uint64, 0x0000000000000000, _le_hex(0x0000000000000000, 8)), + (Uint64, 0x0123456789ABCDEF, _le_hex(0x0123456789ABCDEF, 8)), + # uint128 + (Uint128, 0x0, _le_hex(0x0, 16)), + ( + Uint128, + 0x11223344556677880123456789ABCDEF, + _le_hex(0x11223344556677880123456789ABCDEF, 16), + ), + # uint256 + (Uint256, 0x0, _le_hex(0x0, 32)), + ], +) +def test_hash_tree_root_uints(uint_type: Type[BaseUint], value_int: int, le_hex: str) -> None: + """ + Tests the hash tree root of various unsigned integer types (Uint). + + For basic types like integers, the hash tree root is simply their + little-endian byte representation, right-padded with zeros to 32 bytes. + + This test covers integers of different bit lengths. + """ + # Instantiate the specific SSZ integer type with the test value. + val = uint_type(value_int) + # Calculate the hash tree root using both the functional and object-oriented approaches. + root_fn = hash_tree_root(val) + root_oo = HashTreeRoot.compute(val) + # The expected root is the little-endian hex string padded to a 32-byte chunk. + expected_chunk = _chunk_hex(le_hex) + # Verify that both calculated roots match the expected chunk. + assert root_fn.hex() == expected_chunk + assert root_oo.hex() == expected_chunk + # Ensure both calculation methods produce the exact same result. + assert root_fn == root_oo + + +@pytest.mark.parametrize( + "val, serialized_hex", + [ + (Boolean(False), "00"), + (Boolean(True), "01"), + ], +) +def test_hash_tree_root_boolean(val: Boolean, serialized_hex: str) -> None: + """ + Tests the hash tree root of the Boolean type. + + A boolean is serialized as `0x01` for True and `0x00` for False. + + Its hash tree root is this single byte, right-padded to 32 bytes. + """ + # The expected root is the boolean's serialized hex byte ('00' or '01') padded to 32 bytes. + expected = chunk(serialized_hex) + # Verify the functional approach gives the correct root. + assert hash_tree_root(val).hex() == expected + # Verify the object-oriented approach gives the correct root. + assert HashTreeRoot.compute(val).hex() == expected + + +@pytest.mark.parametrize( + "payload_hex", + [ + "", # Empty bytes + "00", # Single byte + "01", + "ab", + "00010203", # Multiple bytes + "ff" * 31, # 31 bytes, requires padding + "ff" * 32, # Exactly one chunk + "ff" * 33, # More than one chunk, requires Merklization + ], +) +def test_hash_tree_root_raw_bytes_like(payload_hex: str) -> None: + """Tests that `hash_tree_root` handles various raw byte-like inputs consistently.""" + # Convert the hex payload to a bytes object. + data = bytes.fromhex(payload_hex) + + # Compute the hash tree root for `bytes`. + got_b = hash_tree_root(data).hex() + # Compute the hash tree root for `bytearray`. + got_ba = hash_tree_root(bytearray(data)).hex() + # Compute the hash tree root for `memoryview`. + got_mv = hash_tree_root(memoryview(data)).hex() + + # The core SSZ logic is to pack the data into chunks and then Merkleize. + # - For a single chunk or less, the root is just the padded chunk. + # - For multiple chunks, it's the root of the Merkle tree. + # + # This assertion verifies that all byte-like input types are treated identically. + assert got_b == got_ba == got_mv + + +def test_hash_tree_root_bytevector_48() -> None: + """Tests the hash tree root of a fixed-size `ByteVector` that spans multiple chunks.""" + # Create a ByteVector of 48 bytes with values 0x00 to 0x2F (47). + bv = ByteVector[48](bytes(range(48))) # type: ignore[misc] + # Define the first 32-byte chunk (bytes 0-31). + left = "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f" + # Define the second chunk (bytes 32-47), right-padded with zeros to 32 bytes. + right = "202122232425262728292a2b2c2d2e2f00000000000000000000000000000000" + # The expected root is the hash of the two chunks. + expected = h(left, right) + # Verify the calculated root matches the expected hash. + assert hash_tree_root(bv).hex() == expected + + +def test_hash_tree_root_bytelist_small_empty() -> None: + """ + Tests the hash tree root of an empty, small-capacity `ByteList`. + + For a list, the root is a mix-in of the Merkle root of the data and the list's + length. For an empty list with a capacity of 10 bytes (which fits in one + chunk), the data root is a single zero chunk. This is then hashed with the + list's length (0) to get the final root. + """ + # Create an empty ByteList with a capacity of 10 bytes. + bl = ByteList[10](b"") # type: ignore[misc] + # The data root for an empty list within a single-chunk capacity is the zero chunk. + # - The length (0) is serialized and chunked. + # - The final root is hash(zero_chunk, chunk(length)). + expected = h(chunk(""), chunk("00")) + # Verify the calculated root. + assert hash_tree_root(bl).hex() == expected + + +def test_hash_tree_root_bytelist_big_empty() -> None: + """ + Tests the hash tree root of an empty, large-capacity `ByteList`. + + If the list's capacity (2048 bytes = 64 chunks) requires a non-trivial Merkle + tree, the data root for an empty list is the root of a balanced tree of zero + chunks. For 64 chunks, this is `ZERO_HASHES[6]`. This root is then mixed in + with the length (0). + """ + # Create an empty ByteList with a capacity of 2048 bytes. + bl = ByteList[2048](b"") # type: ignore[misc] + # The data root for a 64-chunk capacity is the precomputed zero hash at depth 6. + # This is then hashed with the length (0). + expected = h(ZERO_HASHES[6], chunk("00")) + # Verify the calculated root. + assert hash_tree_root(bl).hex() == expected + + +@pytest.mark.parametrize( + "limit, payload_hex, expected_root_hex", + [ + # Case 1: 7-byte list. Fits in one chunk. Root is hash(chunk(data), chunk(length=7)). + (7, "00010203040506", h(chunk("00010203040506"), chunk("07"))), + # Case 2: 50-byte list. Spans two chunks. Merkleize data chunks, then mix in length. + ( + 50, + "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031", + h( + h( + # Chunk 1 + "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f", + # Chunk 2 (padded) + "202122232425262728292a2b2c2d2e2f30310000000000000000000000000000", + ), + chunk("32"), # Length is 50 (0x32) + ), + ), + # Case 3: 256-byte limit, but only 6 bytes of data. + # Data root requires padding up to the capacity's Merkle tree depth. + # 256 bytes = 8 chunks, depth = 3. + ( + 256, + "000102030405", + h( + # Merkleize the single data chunk with zero hashes to a depth of 3. + h(h(h(chunk("000102030405"), ZERO_HASHES[0]), ZERO_HASHES[1]), ZERO_HASHES[2]), + # Mix in the length (6). + chunk("06"), + ), + ), + ], +) +def test_hash_tree_root_bytelist_various( + limit: int, payload_hex: str, expected_root_hex: str +) -> None: + """ + Tests `ByteList` hash tree root calculation for various sizes and capacities. + """ + # Create the ByteList instance for the current test case. + bl = ByteList[limit](bytes.fromhex(payload_hex)) # type: ignore[misc] + # Verify the calculated root matches the pre-computed expected root. + assert hash_tree_root(bl).hex() == expected_root_hex + + +@pytest.mark.parametrize( + "bits, expect_serial_hex, expect_root_hex", + [ + # Bitvector[8]: 8 bits fit in 1 byte. Root is the chunk of that byte. + ((1, 1, 0, 1, 0, 1, 0, 0), "2b", chunk("2b")), + # Bitvector[4]: 4 bits fit in 1 byte. + ((0, 1, 0, 1), "0a", chunk("0a")), + # Bitvector[3]: 3 bits fit in 1 byte. + ((0, 1, 0), "02", chunk("02")), + ], +) +def test_hash_tree_root_bitvector( + bits: Tuple[int, ...], expect_serial_hex: str, expect_root_hex: str +) -> None: + """ + Tests the hash tree root of `Bitvector` (fixed-size bitfield). + + A `Bitvector` is serialized into the minimum number of bytes required. + Its hash tree root is the Merkle root of these bytes, treated like a `ByteVector`. + """ + # Create the Bitvector instance. + bv = Bitvector[len(bits)](bits) # type: ignore[misc] + # Sanity check: ensure the serialization is correct. + assert bv.encode_bytes().hex() == expect_serial_hex + # Verify the hash tree root. + assert hash_tree_root(bv).hex() == expect_root_hex + + +@pytest.mark.parametrize( + "limit, bits, expect_serial_hex, expect_root_hex", + [ + # Bitlist[8]: 8 bits + length bit serialize to 2 bytes ("2b01"). + # The data root is chunk("2b"), mixed with length 8. + (8, (1, 1, 0, 1, 0, 1, 0, 0), "2b01", h(chunk("2b"), chunk("08"))), + # Bitlist[4]: 4 bits + length bit serialize to 1 byte ("1a"). + # The data part is "0a". The root is hash(chunk("0a"), chunk(length=4)). + (4, (0, 1, 0, 1), "1a", h(chunk("0a"), chunk("04"))), + # Bitlist[3]: 3 bits + length bit serialize to 1 byte ("0a"). + # Data part is "02". Root is hash(chunk("02"), chunk(length=3)). + (3, (0, 1, 0), "0a", h(chunk("02"), chunk("03"))), + ], +) +def test_hash_tree_root_bitlist( + limit: int, bits: Tuple[int, ...], expect_serial_hex: str, expect_root_hex: str +) -> None: + """ + Tests the hash tree root of `Bitlist` (variable-size bitfield). + + A `Bitlist`'s serialization includes a "length bit". The hash tree root + calculation separates the data bits from the length, Merkleizes the data + part, and then mixes in the number of bits. + """ + # Create the Bitlist instance. + bl = Bitlist[limit](bits) # type: ignore[misc] + # Sanity check the SSZ serialization. + assert bl.encode_bytes().hex() == expect_serial_hex + # Verify the hash tree root. + assert hash_tree_root(bl).hex() == expect_root_hex + + +def test_hash_tree_root_bitvector_512_all_ones() -> None: + """ + Tests the hash tree root of a large `Bitvector` that spans multiple chunks. + """ + # A 512-bit vector is 64 bytes, which is exactly two 32-byte chunks. + bv = Bitvector[512]((1,) * 512) # type: ignore[misc] + # Both chunks will be all `0xff` bytes. + left = "ff" * 32 + right = "ff" * 32 + # The root is the hash of these two chunks. + expected = h(left, right) + # Verify the result. + assert hash_tree_root(bv).hex() == expected + + +def test_hash_tree_root_bitlist_512_all_ones() -> None: + """ + Tests the hash tree root of a large `Bitlist`. + """ + # Create a Bitlist of 512 bits. + bl = Bitlist[512]((1,) * 512) # type: ignore[misc] + # The data part is 512 bits (64 bytes), which forms two full chunks of `0xff`. + # The Merkle root of the data is the hash of these two chunks. + base = h("ff" * 32, "ff" * 32) + # This data root is then hashed with the list's length (512). + # 512 in little-endian is `0x0002`, which is hex "0002". + expected = h(base, chunk("0002")) + # Verify the result. + assert hash_tree_root(bl).hex() == expected + + +def test_hash_tree_root_vector_uint16_2() -> None: + """ + Tests the hash tree root of a `Vector` of basic types. + + If the total serialized size of a Vector is <= 32 bytes, the root is + simply the serialized bytes, right-padded to 32 bytes. + """ + # Vector of two Uint16 values. + v = Vector[Uint16, 2]((0x4567, 0x0123)) # type: ignore[misc] + # Serialization (little-endian): 0x4567 -> "6745", 0x0123 -> "2301". + # Concatenated: "67452301". This is 4 bytes, which fits in one chunk. + expected = chunk("67452301") + # Verify the root is the padded serialization. + assert hash_tree_root(v).hex() == expected + + +def test_hash_tree_root_list_uint16() -> None: + """ + Tests the hash tree root of a `List` of basic types. + """ + # Create a list of three Uint16 elements. + test_list = List[Uint16, 32]((0xAABB, 0xC0AD, 0xEEFF)) # type: ignore[misc] + # The serialized data is "bbaaadc0ffee" (3 * 2 = 6 bytes). + # The capacity is 32 * 2 = 64 bytes = 2 chunks. + # The data is packed into chunks and Merkleized. Here, it's one data chunk and one zero chunk. + base = h(chunk("bbaaadc0ffee"), chunk("")) + # This data root is mixed in with the element count (3). + expected = h(base, chunk("03")) + # Verify the result. + assert hash_tree_root(test_list).hex() == expected + + +def test_hash_tree_root_list_uint32_large_limit() -> None: + """ + Tests a `List` of basic types with a large capacity, requiring padding. + """ + # List of three Uint32s, capacity 128 elements. + test_list = List[Uint32, 128]((0xAABB, 0xC0AD, 0xEEFF)) # type: ignore[misc] + # Capacity: 128 * 4 = 512 bytes = 16 chunks. Tree depth is 4 (2^4=16). + # Serialized data: "bbaa0000adc00000ffee0000" (3 * 4 = 12 bytes), fits in one chunk. + # This single chunk must be Merkleized with zero hashes up to depth 4. + base = merge(chunk("bbaa0000adc00000ffee0000"), ZERO_HASHES[0:4]) + # Finally, mix in the element count (3). + expected = h(base, chunk("03")) + # Verify the result. + assert hash_tree_root(test_list).hex() == expected + + +def test_hash_tree_root_list_uint256() -> None: + """ + Tests a `List` where each element is itself a 32-byte chunk. + """ + # Create a list of three Uint256 elements. + test_list = List[Uint256, 32]((0xAABB, 0xC0AD, 0xEEFF)) # type: ignore[misc] + # Each Uint256 is a 32-byte leaf. We have 3 leaves. + a = chunk("bbaa") # 0xAABB + b = chunk("adc0") # 0xC0AD + c = chunk("ffee") # 0xEEFF + # Merkleize the three leaves, padding to 4 with a zero chunk. + base = h(h(a, b), h(c, chunk(""))) + # The list capacity is 32 elements, so the tree depth is 5 (2^5=32). + # We already have a root for 4 leaves (depth 2), so merge with zero hashes from depth 2 to 5. + merkle = merge(base, ZERO_HASHES[2:5]) + # Mix in the element count (3). + expected = h(merkle, chunk("03")) + # Verify the result. + assert hash_tree_root(test_list).hex() == expected + + +# Define SSZ Container types for testing. +class SingleField(Container): + A: Byte + + +class Small(Container): + A: Uint16 + B: Uint16 + + +class Fixed(Container): + A: Uint8 + B: Uint64 + C: Uint32 + + +class Var(Container): + A: Uint16 + B: List[Uint16, 1024] # type: ignore + C: Uint8 + + +class Complex(Container): + A: Uint16 + B: List[Uint16, 128] # type: ignore + C: Uint8 + D: ByteList[256] # type: ignore + E: Var + F: Vector[Fixed, 4] # type: ignore + G: Vector[Var, 2] # type: ignore + + +def test_hash_tree_root_container_singlefield() -> None: + """ + Tests the hash tree root of a container with a single basic field. + """ + # For a container with one basic field, the root is just the chunk of that field. + v = SingleField(A=Byte(0xAB)) + expected = chunk("ab") + assert hash_tree_root(v).hex() == expected + + +def test_hash_tree_root_container_small() -> None: + """ + Tests a container with two basic fields that fit within one chunk. + """ + # Create a container with two Uint16 fields. + v = Small(A=Uint16(0x4567), B=Uint16(0x0123)) + # The fields are chunked separately and then hashed. + # Note: SSZ chunks fields, not their concatenated serialization. + expected = h(chunk("6745"), chunk("2301")) + assert hash_tree_root(v).hex() == expected + + +def test_hash_tree_root_container_fixed() -> None: + """ + Tests a container with multiple fixed-size fields, requiring Merklization. + """ + # Create the container instance. + v = Fixed(A=Uint8(0xAB), B=Uint64(0xAABBCCDDEEFF0011), C=Uint32(0x12345678)) + # The fields A, B, C are chunked. Since there are 3 fields, the tree is padded to 4. + # Tree structure: h( h(chunk(A), chunk(B)), h(chunk(C), zero_chunk) ) + expected = h(h(chunk("ab"), chunk("1100ffeeddccbbaa")), h(chunk("78563412"), chunk(""))) + assert hash_tree_root(v).hex() == expected + + +def test_hash_tree_root_container_var_empty() -> None: + """ + Tests a container with a variable-size list that is empty. + """ + # Create a container where field B is an empty List. + v = Var(A=Uint16(0xABCD), B=List[Uint16, 1024](), C=Uint8(0xFF)) # type: ignore[misc] + # The root of the empty list B is calculated first. + # Capacity 1024*2 bytes = 64 chunks, so empty root is ZERO_HASHES[6]. + # This is mixed with length 0. + expected_b = h(ZERO_HASHES[6], chunk("00000000")) # Length for basic lists is uint64 + # The container's fields are then Merkleized. + expected = h(h(chunk("cdab"), expected_b), h(chunk("ff"), chunk(""))) + assert hash_tree_root(v).hex() == expected + + +def test_hash_tree_root_container_var_some() -> None: + """ + Tests a container with a populated variable-size list. + """ + # Create a container with a list containing three elements. + v = Var(A=Uint16(0xABCD), B=List[Uint16, 1024]((1, 2, 3)), C=Uint8(0xFF)) # type: ignore[misc] + # Calculate the root of list B. + # Data "010002000300" is padded to capacity (64 chunks, depth 6). + base = merge(chunk("010002000300"), ZERO_HASHES[0:6]) + # Mix in the length (3). + expected_b = h(base, chunk("03")) + # Merkleize the container fields' roots. + expected = h(h(chunk("cdab"), expected_b), h(chunk("ff"), chunk(""))) + assert hash_tree_root(v).hex() == expected + + +def test_hash_tree_root_container_complex() -> None: + """ + Tests a complex, nested container with all types of fields. + """ + # Instantiate the deeply nested container. + v = Complex( + A=Uint16(0xAABB), + B=List[Uint16, 128]((0x1122, 0x3344)), # type: ignore[misc] + C=Uint8(0xFF), + D=ByteList[256](b"foobar"), # type: ignore[misc] + E=Var(A=Uint16(0xABCD), B=List[Uint16, 1024]((1, 2, 3)), C=Uint8(0xFF)), # type: ignore[misc] + F=Vector[Fixed, 4]( # type: ignore[misc] + ( + Fixed(A=Uint8(0xCC), B=Uint64(0x4242424242424242), C=Uint32(0x13371337)), + Fixed(A=Uint8(0xDD), B=Uint64(0x3333333333333333), C=Uint32(0xABCDABCD)), + Fixed(A=Uint8(0xEE), B=Uint64(0x4444444444444444), C=Uint32(0x00112233)), + Fixed(A=Uint8(0xFF), B=Uint64(0x5555555555555555), C=Uint32(0x44556677)), + ) + ), + G=Vector[Var, 2]( # type: ignore[misc] + ( + Var(A=Uint16(0xDEAD), B=List[Uint16, 1024]((1, 2, 3)), C=Uint8(0x11)), # type: ignore[misc] + Var(A=Uint16(0xBEEF), B=List[Uint16, 1024]((4, 5, 6)), C=Uint8(0x22)), # type: ignore[misc] + ) + ), + ) + + # Manually build the expected root by calculating the root of each field + # and then Merkleizing them together, mirroring the container structure. + + # Root of field B: List[Uint16, 128] + b_base = merge(chunk("22114433"), ZERO_HASHES[0:3]) + b_root = h(b_base, chunk("02")) + + # Root of field D: ByteList[256] + d_base = merge(chunk("666f6f626172"), ZERO_HASHES[0:3]) + d_root = h(d_base, chunk("06")) + + # Root of field E: Var container + e_data_base = merge(chunk("010002000300"), ZERO_HASHES[0:6]) + e_b_root = h(e_data_base, chunk("03")) + e_root = h(h(chunk("cdab"), e_b_root), h(chunk("ff"), chunk(""))) + + # Root of field F: Vector[Fixed, 4] + def fixed_root(a: str, b: str, c: str) -> str: + return h(h(chunk(a), chunk(b)), h(chunk(c), chunk(""))) + + f_roots = [ + fixed_root("cc", "4242424242424242", "37133713"), + fixed_root("dd", "3333333333333333", "cdabcdab"), + fixed_root("ee", "4444444444444444", "33221100"), + fixed_root("ff", "5555555555555555", "77665544"), + ] + f_root = h(h(f_roots[0], f_roots[1]), h(f_roots[2], f_roots[3])) + + # Root of field G: Vector[Var, 2] + def var_root(a_hex: str, payload_hex: str, count_hex: str, c_hex: str) -> str: + b_base_local = merge(chunk(payload_hex), ZERO_HASHES[0:6]) + b_root_local = h(b_base_local, chunk(count_hex)) + return h(h(chunk(a_hex), b_root_local), h(chunk(c_hex), chunk(""))) + + g0 = var_root("adde", "010002000300", "03", "11") + g1 = var_root("efbe", "040005000600", "03", "22") + g_root = h(g0, g1) + + # Final Merklization of all field roots (A, B, C, D, E, F, G), padded to 8 leaves. + left = h(h(chunk("bbaa"), b_root), h(chunk("ff"), d_root)) + right = h(h(e_root, f_root), h(g_root, chunk(""))) + expected = h(left, right) + + # Verify the final calculated root. + assert hash_tree_root(v).hex() == expected + + +def test_hash_tree_root_union_single_type() -> None: + """ + Tests the hash tree root of a Union object. + """ + # Define a Union type with one possible member. + union = Union[Uint16] # type: ignore[type-arg] + # Instantiate the union, selecting the first type (selector=0). + u = union(selector=0, value=Uint16(0xAABB)) + # The root is hash(root(value), chunk(selector)). + # For selector 0, this is hashed with a zero chunk. + expected = h(chunk("bbaa"), chunk("")) + assert hash_tree_root(u).hex() == expected + + +def test_hash_tree_root_union_with_none_arm() -> None: + """ + Tests a Union where the selected type is `None`. + """ + # Define a Union type that includes None. + union = Union[None, Uint16, Uint32] # type: ignore[type-arg] + # Instantiate with the None type (selector=0). + u = union(selector=0, value=None) + # For a `None` value, the value root is a zero chunk. + # This is hashed with the selector (0), which is also a zero chunk. + expected = h(chunk(""), chunk("")) + assert hash_tree_root(u).hex() == expected + + +def test_hash_tree_root_union_other_arm() -> None: + """ + Tests a Union where a non-zero selector is used. + """ + # Define the Union type. + union = Union[None, Uint16, Uint32] # type: ignore[type-arg] + # Instantiate with the second type (selector=1). + u = union(selector=1, value=Uint16(0xAABB)) + # The root is hash(root(value), chunk(selector=1)). + expected = h(chunk("bbaa"), chunk("01")) + assert hash_tree_root(u).hex() == expected + + +def test_hash_tree_root_union_multi_other_arm() -> None: + """ + Tests a Union with multiple non-None types. + """ + # Define a union of two integer types. + union = Union[Uint16, Uint32] # type: ignore[type-arg] + # Instantiate with the second type (selector=1), which is Uint32. + u = union(selector=1, value=Uint32(0xDEADBEEF)) + # The root is hash(root(value), chunk(selector=1)). + expected = h(chunk("efbeadde"), chunk("01")) + assert hash_tree_root(u).hex() == expected diff --git a/tests/lean_spec/subspecs/ssz/test_merkle_proof.py b/tests/lean_spec/subspecs/ssz/test_merkle_proof.py index b3228b3dd..169ab6610 100644 --- a/tests/lean_spec/subspecs/ssz/test_merkle_proof.py +++ b/tests/lean_spec/subspecs/ssz/test_merkle_proof.py @@ -6,9 +6,9 @@ import pytest from typing_extensions import Any -from lean_spec.subspecs.ssz.gindex import GeneralizedIndex -from lean_spec.subspecs.ssz.merkle.proof import MerkleProof, ProofHashes -from lean_spec.subspecs.ssz.merkle.tree import build_merkle_tree +from lean_spec.subspecs.ssz.merkle_proof.gindex import GeneralizedIndex +from lean_spec.subspecs.ssz.merkle_proof.proof import MerkleProof, ProofHashes +from lean_spec.subspecs.ssz.merkle_proof.tree import build_merkle_tree from lean_spec.subspecs.ssz.utils import get_power_of_two_ceil from lean_spec.types.byte_arrays import Bytes32 diff --git a/tests/lean_spec/subspecs/ssz/test_merkleization.py b/tests/lean_spec/subspecs/ssz/test_merkleization.py new file mode 100644 index 000000000..ac2bc171e --- /dev/null +++ b/tests/lean_spec/subspecs/ssz/test_merkleization.py @@ -0,0 +1,186 @@ +"""Unit tests for SSZ Merkleization utilities.""" + +from __future__ import annotations + +import pytest + +from lean_spec.subspecs.ssz.constants import ZERO_HASH +from lean_spec.subspecs.ssz.merkleization import Merkle +from lean_spec.subspecs.ssz.utils import hash_nodes +from lean_spec.types.byte_arrays import Bytes32 + + +def h(a: Bytes32, b: Bytes32) -> Bytes32: + """A concise alias for hash_nodes for building expected roots.""" + return hash_nodes(a, b) + + +# Create some sample chunks for testing, c[i] = bytes32(i) +c = [Bytes32(i.to_bytes(32, "little")) for i in range(16)] + +# Pre-calculate zero-tree roots for assertions +# Z[0] = ZERO_HASH, Z[1] = h(Z[0], Z[0]), Z[2] = h(Z[1], Z[1]), etc. +Z = [ZERO_HASH] +for _ in range(10): + Z.append(h(Z[-1], Z[-1])) + + +def test_merkleize_empty_no_limit() -> None: + """Tests that merkleizing an empty list with no limit returns the ZERO_HASH.""" + assert Merkle.merkleize([]) == ZERO_HASH + + +@pytest.mark.parametrize( + "limit, expected_width, expected_zero_root", + [ + (0, 1, Z[0]), # limit=0 -> width=1 -> root is Z[0] + (1, 1, Z[0]), # limit=1 -> width=1 -> root is Z[0] + (2, 2, Z[1]), # limit=2 -> width=2 -> root is Z[1] + (3, 4, Z[2]), # limit=3 -> width=4 -> root is Z[2] + (7, 8, Z[3]), # limit=7 -> width=8 -> root is Z[3] + (8, 8, Z[3]), + ], +) +def test_merkleize_empty_with_limit( + limit: int, expected_width: int, expected_zero_root: Bytes32 +) -> None: + """ + Tests that merkleizing an empty list with a limit returns the correct + pre-computed root for a tree of zero hashes of the specified width. + """ + assert Merkle.merkleize([], limit=limit) == expected_zero_root + + +def test_merkleize_single_chunk() -> None: + """Tests that the root of a single chunk is the chunk itself.""" + assert Merkle.merkleize([c[1]]) == c[1] + + +def test_merkleize_power_of_two_chunks() -> None: + """Tests merkleization with a number of chunks that is a power of two (no padding needed).""" + # Test with 2 chunks + assert Merkle.merkleize([c[0], c[1]]) == h(c[0], c[1]) + # Test with 4 chunks + root_4 = h(h(c[0], c[1]), h(c[2], c[3])) + assert Merkle.merkleize(c[0:4]) == root_4 + + +def test_merkleize_non_power_of_two_chunks() -> None: + """Tests merkleization with a number of chunks that requires padding.""" + # Test with 3 chunks (pads to 4) + expected = h(h(c[0], c[1]), h(c[2], Z[0])) + assert Merkle.merkleize(c[0:3]) == expected + # Test with 5 chunks (pads to 8) + h01 = h(c[0], c[1]) + h23 = h(c[2], c[3]) + h4z = h(c[4], Z[0]) + # The remaining leaves are zero, so their parent is h(Z[0], Z[0]) = Z[1] + expected = h(h(h01, h23), h(h4z, Z[1])) + assert Merkle.merkleize(c[0:5]) == expected + + +def test_merkleize_with_limit_padding() -> None: + """Tests that a limit correctly enforces a larger tree width than the number of chunks.""" + # 3 chunks, but limit is 8 (pads to width 8) + h01 = h(c[0], c[1]) + h2z = h(c[2], Z[0]) + # The parent of h01 and h2z + left_branch = h(h01, h2z) + # The right branch is a zero-tree of width 4, so its root is Z[2]. + right_branch = Z[2] + expected = h(left_branch, right_branch) + assert Merkle.merkleize(c[0:3], limit=8) == expected + + +def test_merkleize_error_on_exceeding_limit() -> None: + """Tests that merkleize raises a ValueError if the chunk count exceeds the limit.""" + with pytest.raises(ValueError, match="input exceeds limit"): + Merkle.merkleize(c[0:5], limit=4) + + +def test_mix_in_length() -> None: + """Tests mixing a length into a root.""" + root = c[0] + length = 12345 + length_bytes = Bytes32(length.to_bytes(32, "little")) + expected = h(root, length_bytes) + assert Merkle.mix_in_length(root, length) == expected + + +def test_mix_in_length_zero() -> None: + """Tests mixing a length of 0.""" + root = c[0] + length = 0 + length_bytes = Bytes32(length.to_bytes(32, "little")) + expected = h(root, length_bytes) + assert Merkle.mix_in_length(root, length) == expected + + +def test_mix_in_length_error_on_negative() -> None: + """Tests that mixing in a negative length raises a ValueError.""" + with pytest.raises(ValueError): + Merkle.mix_in_length(c[0], -1) + + +def test_mix_in_selector() -> None: + """Tests mixing a selector into a root.""" + root = c[1] + selector = 42 + selector_bytes = Bytes32(selector.to_bytes(32, "little")) + expected = h(root, selector_bytes) + assert Merkle.mix_in_selector(root, selector) == expected + + +def test_mix_in_selector_error_on_negative() -> None: + """Tests that mixing in a negative selector raises a ValueError.""" + with pytest.raises(ValueError): + Merkle.mix_in_selector(c[1], -1) + + +def test_zero_tree_root_internal() -> None: + """Tests the internal helper for calculating the root of an all-zero tree.""" + assert Merkle._zero_tree_root(1) == Z[0] + assert Merkle._zero_tree_root(2) == Z[1] + assert Merkle._zero_tree_root(4) == Z[2] + assert Merkle._zero_tree_root(8) == Z[3] + assert Merkle._zero_tree_root(16) == Z[4] + + +def test_merkleize_progressive_empty() -> None: + """Tests progressive merkleization of an empty list.""" + assert Merkle.merkleize_progressive([]) == ZERO_HASH + + +def test_merkleize_progressive_single_chunk() -> None: + """Tests progressive merkleization of a single chunk.""" + # right = merkleize([c[0]], 1) -> c[0] + # left = ZERO_HASH + expected = h(ZERO_HASH, c[0]) + assert Merkle.merkleize_progressive([c[0]], num_leaves=1) == expected + + +def test_merkleize_progressive_five_chunks() -> None: + """ + Tests progressive merkleization with multiple recursive steps. + Calculates the expected root manually by tracing the spec's logic. + """ + chunks = c[0:5] + + # Manually trace the recursion for `merkleize_progressive(chunks, 1)`: + # Step 1 (num_leaves=1): + # right1 = merkleize([c0], 1) -> c0 + # left1 = merkleize_progressive([c1, c2, c3, c4], 4) + # + # To calculate left1, recurse... + # Step 2 (num_leaves=4): + # right2 = merkleize([c1, c2, c3, c4], 4) -> h(h(c1,c2), h(c3,c4)) + # left2 = ZERO_HASH (no more chunks) + # So, left1 = h(left2, right2) = h(Z[0], right2) + # + # Final result is h(left1, right1) + right2 = h(h(c[1], c[2]), h(c[3], c[4])) + left1 = h(Z[0], right2) + right1 = c[0] + expected = h(left1, right1) + + assert Merkle.merkleize_progressive(chunks, num_leaves=1) == expected diff --git a/tests/lean_spec/subspecs/ssz/test_pack.py b/tests/lean_spec/subspecs/ssz/test_pack.py new file mode 100644 index 000000000..245198e9a --- /dev/null +++ b/tests/lean_spec/subspecs/ssz/test_pack.py @@ -0,0 +1,193 @@ +""" "Unit tests for SSZ packing helpers (`Packer`).""" + +from __future__ import annotations + +from typing import List as PyList + +import pytest + +from lean_spec.subspecs.ssz.constants import BITS_PER_BYTE, BYTES_PER_CHUNK +from lean_spec.subspecs.ssz.pack import Packer +from lean_spec.types.byte_arrays import Bytes32 + + +def _hex_chunks(chunks: PyList[Bytes32]) -> PyList[str]: + """Return a list of hex strings for Bytes32 chunks.""" + # Bytes32 is bytes-like + return [bytes(c).hex() for c in chunks] + + +def _pad32_hex(payload_hex: str) -> str: + """Right-pad hex to 32 bytes (64 hex chars).""" + return (payload_hex + ("00" * 32))[:64] + + +def test_right_pad_to_chunk_empty() -> None: + assert Packer._right_pad_to_chunk(b"") == b"" + + +def test_right_pad_to_chunk_already_aligned() -> None: + data = bytes(range(32)) + out = Packer._right_pad_to_chunk(data) + assert out == data # unchanged + + +def test_right_pad_to_chunk_partial() -> None: + data = b"\x01\x02\x03" # 3 bytes + out = Packer._right_pad_to_chunk(data) + assert len(out) % BYTES_PER_CHUNK == 0 + assert out.startswith(data) + assert out[len(data) :] == b"\x00" * (BYTES_PER_CHUNK - len(data)) + + +def test_partition_chunks_empty() -> None: + assert Packer._partition_chunks(b"") == [] + + +def test_partition_chunks_exact_one() -> None: + data = bytes(range(32)) + chunks = Packer._partition_chunks(data) + assert len(chunks) == 1 + assert bytes(chunks[0]) == data + + +def test_partition_chunks_multiple() -> None: + data = bytes(range(64)) + chunks = Packer._partition_chunks(data) + assert len(chunks) == 2 + assert bytes(chunks[0]) == data[:32] + assert bytes(chunks[1]) == data[32:] + + +def test_partition_chunks_raises_on_misaligned() -> None: + with pytest.raises(ValueError): + Packer._partition_chunks(b"\x00" * 33) + + +@pytest.mark.parametrize( + "payload_hex, expected_chunks_hex", + [ + ("", []), # no data -> no chunks + ("01", [_pad32_hex("01")]), + # A 32-byte payload should become a single 32-byte (64-char hex) chunk. + ("00" * 32, ["00" * 32]), + ( + # A 33-byte payload is padded to 64 bytes, becoming two 32-byte chunks. + "00" * 33, + ["00" * 32, "00" * 32], + ), + ( + "".join(f"{i:02x}" for i in range(40)), # 40 raw bytes -> 2 chunks + [ + "".join(f"{i:02x}" for i in range(32)), + _pad32_hex("".join(f"{i:02x}" for i in range(32, 40))), + ], + ), + ], +) +def test_pack_bytes(payload_hex: str, expected_chunks_hex: PyList[str]) -> None: + """ + Tests packing of raw bytes into 32-byte chunks for various payload sizes. + """ + # Pack the input bytes into a list of 32-byte SSZ chunks. + out = Packer.pack_bytes(bytes.fromhex(payload_hex)) + # Compare the hex representation of the output chunks with the expected list. + assert _hex_chunks(out) == expected_chunks_hex + + +def test_pack_basic_serialized_empty() -> None: + assert Packer.pack_basic_serialized([]) == [] + + +def test_pack_basic_serialized_small_values() -> None: + # Two serialized Uint16 (little-endian): 0x4567 -> 67 45, 0x0123 -> 23 01 + values = [b"\x67\x45", b"\x23\x01"] + out = Packer.pack_basic_serialized(values) + assert len(out) == 1 + assert out[0].hex() == _pad32_hex("67452301") + + +def test_pack_basic_serialized_multi_chunk() -> None: + # 40 bytes worth of already-serialized basic scalars (e.g., 40 x uint8) + values = [bytes([i]) for i in range(40)] + out = Packer.pack_basic_serialized(values) + assert len(out) == 2 + # first chunk: 0..31 + assert out[0].hex() == "".join(f"{i:02x}" for i in range(32)) + # second chunk: 32..39 then padded + tail_hex = "".join(f"{i:02x}" for i in range(32, 40)) + assert out[1].hex() == _pad32_hex(tail_hex) + + +def test_pack_bits_empty() -> None: + assert Packer.pack_bits(()) == [] + + +@pytest.mark.parametrize( + "bits, expected_first_byte_hex", + [ + # Matches the mapping used in other tests: first tuple item -> bit 0 (LSB) of first byte. + ((True, True, False, True, False, True, False, False), "2b"), # 0b00101011 + ((False, True, False, True), "0a"), # 0b00001010 + ((False, True, False), "02"), # 0b00000010 + ], +) +def test_pack_bits_small(bits: tuple[bool, ...], expected_first_byte_hex: str) -> None: + chunks = Packer.pack_bits(bits) + # Always at least one chunk if there are bits. + assert len(chunks) == 1 + first = bytes(chunks[0]) + assert first[0] == int(expected_first_byte_hex, 16) + # Remaining of the first chunk must be zero-padded + assert first[1:] == b"\x00" * (BYTES_PER_CHUNK - 1) + + +def test_pack_bits_two_full_chunks_all_ones_512() -> None: + # 512 bits -> 64 bytes -> exactly 2 chunks of 0xff + bits = (True,) * 512 + chunks = Packer.pack_bits(bits) + assert len(chunks) == 2 + assert bytes(chunks[0]) == b"\xff" * 32 + assert bytes(chunks[1]) == b"\xff" * 32 + + +def test_pack_bits_cross_chunk_boundary_257_ones() -> None: + # 257 ones -> 33 bytes: 32 bytes of 0xff, then 0x01, then pad to 32 + bits = (True,) * 257 + chunks = Packer.pack_bits(bits) + assert len(chunks) == 2 + assert bytes(chunks[0]) == b"\xff" * 32 + second = bytes(chunks[1]) + assert second[0] == 0x01 + assert second[1:] == b"\x00" * 31 + + +def test_pack_bits_byte_len_rounding() -> None: + # Verify byte length rounding: len= (n + 7)//8 + n = 9 + bits = tuple(True if i < n else False for i in range(n)) + chunks = Packer.pack_bits(bits) + # 9 bits -> 2 bytes -> still 1 chunk after padding + assert len(chunks) == 1 + first = bytes(chunks[0]) + # first two bytes should be: 0xff and 0x01 (lower bit set); rest zeros + assert first[:2] == b"\xff\x01" + assert first[2:] == b"\x00" * 30 + + +def test_pack_bits_bit_ordering_examples() -> None: + # Spot-check the little-endian-in-byte policy. + # Set only bit 7 (MSB) of the first byte: tuple index 7 -> value 1 + bits = tuple(True if i == 7 else False for i in range(8)) + chunks = Packer.pack_bits(bits) + assert len(chunks) == 1 + assert bytes(chunks[0])[0] == 0x80 # MSB set + # Set only bit 0 (LSB) of the second byte: index 8 + bits = tuple(True if i == 8 else False for i in range(16)) + chunks = Packer.pack_bits(bits) + assert len(chunks) == 1 + assert bytes(chunks[0])[0] == 0x00 + assert bytes(chunks[0])[1] == 0x01 + # Sanity about constants used internally + assert BITS_PER_BYTE == 8 + assert BYTES_PER_CHUNK == 32 diff --git a/tests/lean_spec/subspecs/ssz/test_tree.py b/tests/lean_spec/subspecs/ssz/test_tree.py index 3deafc176..8be9f8481 100644 --- a/tests/lean_spec/subspecs/ssz/test_tree.py +++ b/tests/lean_spec/subspecs/ssz/test_tree.py @@ -6,7 +6,7 @@ from typing_extensions import List from lean_spec.subspecs.ssz.constants import ZERO_HASH -from lean_spec.subspecs.ssz.merkle.tree import build_merkle_tree +from lean_spec.subspecs.ssz.merkle_proof.tree import build_merkle_tree from lean_spec.subspecs.ssz.utils import hash_nodes from lean_spec.types.byte_arrays import Bytes32 diff --git a/tests/lean_spec/subspecs/ssz/test_utils_ssz.py b/tests/lean_spec/subspecs/ssz/test_utils_ssz.py new file mode 100644 index 000000000..2f1f647c2 --- /dev/null +++ b/tests/lean_spec/subspecs/ssz/test_utils_ssz.py @@ -0,0 +1,64 @@ +"""Unit tests for SSZ utility functions.""" + +import hashlib + +import pytest + +from lean_spec.subspecs.ssz.utils import get_power_of_two_ceil, hash_nodes +from lean_spec.types.byte_arrays import Bytes32 + + +@pytest.mark.parametrize( + "x, expected", + [ + (0, 1), # Edge case: 0 should result in 1 + (1, 1), # A power of two + (2, 2), # A power of two + (3, 4), # A number between powers of two + (4, 4), # A power of two + (5, 8), + (7, 8), + (8, 8), + (9, 16), + (1023, 1024), + (1024, 1024), # A larger power of two + ], +) +def test_get_power_of_two_ceil(x: int, expected: int) -> None: + """ + Tests that get_power_of_two_ceil correctly finds the next highest + power of two for a range of inputs. + """ + assert get_power_of_two_ceil(x) == expected + + +def test_hash_nodes() -> None: + """ + Tests that hash_nodes correctly computes the SHA-256 hash of two concatenated nodes. + """ + # Define two known 32-byte nodes. + node_a = Bytes32((1).to_bytes(32, "little")) + node_b = Bytes32((2).to_bytes(32, "little")) + + # Manually compute the expected hash using the standard library. + expected_digest = hashlib.sha256(node_a + node_b).digest() + expected_hash = Bytes32(expected_digest) + + # Call the function and assert that the result matches the expected hash. + assert hash_nodes(node_a, node_b) == expected_hash + + +def test_hash_nodes_with_zero() -> None: + """ + Tests hashing a node with a zero-hash node to ensure correctness. + """ + # Define a node and a zero node. + node_a = Bytes32((42).to_bytes(32, "little")) + zero_node = Bytes32(b"\x00" * 32) + + # Manually compute the expected hash. + expected_digest = hashlib.sha256(node_a + zero_node).digest() + expected_hash = Bytes32(expected_digest) + + # Assert the function's output is correct. + assert hash_nodes(node_a, zero_node) == expected_hash diff --git a/tests/lean_spec/types/test_byte_arrays.py b/tests/lean_spec/types/test_byte_arrays.py index 3ed8c9e31..979f09d55 100644 --- a/tests/lean_spec/types/test_byte_arrays.py +++ b/tests/lean_spec/types/test_byte_arrays.py @@ -16,8 +16,8 @@ Bytes32, Bytes48, Bytes96, - _ByteVectorBase, - _ByteListBase, + ByteVectorBase, + ByteListBase, ) @@ -27,7 +27,7 @@ def sha256(b: bytes) -> bytes: def test_bytevector_factory_ok() -> None: B7 = ByteVector.__class_getitem__(7) - assert issubclass(B7, _ByteVectorBase) + assert issubclass(B7, ByteVectorBase) assert B7.LENGTH == 7 v = B7(b"\x00" * 7) assert isinstance(v, B7) @@ -47,7 +47,7 @@ def test_bytevector_factory_negative() -> None: def test_bytelist_factory_ok() -> None: L9 = ByteList.__class_getitem__(9) - assert issubclass(L9, _ByteListBase) + assert issubclass(L9, ByteListBase) assert L9.LIMIT == 9 v = L9(b"\x01\x02") assert isinstance(v, L9) @@ -171,7 +171,7 @@ def test_hashlib_accepts_bytes32_via_add() -> None: (Bytes96, bytes(range(96))), ], ) -def test_encode_decode_roundtrip_vector(Typ: Type[_ByteVectorBase], payload: bytes) -> None: +def test_encode_decode_roundtrip_vector(Typ: Type[ByteVectorBase], payload: bytes) -> None: v = Typ(payload) assert v.encode_bytes() == payload assert Typ.decode_bytes(payload) == v