Skip to content

fix: Enforce input length to match the modulus #1905

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 3 additions & 4 deletions program-libs/batched-merkle-tree/tests/merkle_tree.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ use light_compressed_account::{
hash_chain::create_hash_chain_from_slice, instruction_data::compressed_proof::CompressedProof,
pubkey::Pubkey,
};
use light_hasher::{Hasher, Poseidon};
use light_hasher::{to_byte_array::ToByteArray, Hasher, Poseidon};
use light_merkle_tree_reference::MerkleTree;
use light_prover_client::prover::{spawn_prover, ProverConfig};
use light_test_utils::mock_batched_forester::{
Expand All @@ -62,9 +62,8 @@ pub fn assert_nullifier_queue_insert(
) -> Result<(), BatchedMerkleTreeError> {
let mut leaf_hash_chain_insert_values = vec![];
for (insert_value, leaf_index) in bloom_filter_insert_values.iter().zip(leaf_indices.iter()) {
let nullifier =
Poseidon::hashv(&[insert_value.as_slice(), &leaf_index.to_be_bytes(), &tx_hash])
.unwrap();
let leaf_index = leaf_index.to_byte_array().unwrap();
let nullifier = Poseidon::hashv(&[insert_value.as_slice(), &leaf_index, &tx_hash]).unwrap();
leaf_hash_chain_insert_values.push(nullifier);
}
Comment on lines 63 to 68
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

💡 Verification agent

🧩 Analysis chain

Quick repo-wide audit to catch non-32-byte Poseidon inputs.

To reduce the chance of a missed callsite (especially in tests/fixtures), run the search below and adjust any remaining .to_be_bytes() or variable-length to_bytes_be() inputs to 32-byte encodings.


🏁 Script executed:

#!/bin/bash
# Show Poseidon::hashv callsites with context
rg -n -A 3 -B 3 $'Poseidon::hashv\\('

# Identify risky conversions that commonly produce non-32-byte inputs
rg -n '\.to_be_bytes\('
rg -n 'to_bytes_be\('

# Confirm adoption of 32-byte conversion helpers
rg -n 'to_byte_array\('
rg -n 'bigint_to_be_bytes_array::<32>'

Length of output: 75413


Enforce 32-byte encodings for all Poseidon::hashv inputs

Several hash callsites still pass variable-length byte slices. Please update the following to use fixed 32-byte arrays (via to_byte_array() or bigint_to_be_bytes_array::<32>()):

• prover/client/tests/batch_update.rs:36

- let nullifier = Poseidon::hashv(&[&leaf, &(i as usize).to_be_bytes(), &tx_hash]).unwrap();
+ let index_bytes = (i as usize).to_byte_array().unwrap();
+ let nullifier = Poseidon::hashv(&[&leaf, &index_bytes, &tx_hash]).unwrap();

• program-tests/merkle-tree/tests/indexed.rs:40–43 & 48–51

- let leaf_0 = Poseidon::hashv(&[
-     &0_u32.to_biguint().unwrap().to_bytes_be(),
-     &30_u32.to_biguint().unwrap().to_bytes_be(),
- ]).unwrap();
+ let leaf_0 = Poseidon::hashv(&[
+     &bigint_to_be_bytes_array::<32>(&0_u32.to_biguint().unwrap()).unwrap(),
+     &bigint_to_be_bytes_array::<32>(&30_u32.to_biguint().unwrap()).unwrap(),
+ ]).unwrap();

- let leaf_1 = Poseidon::hashv(&[
-     &30_u32.to_biguint().unwrap().to_bytes_be(),
-     &0_u32.to_biguint().unwrap().to_bytes_be(),
- ]).unwrap();
+ let leaf_1 = Poseidon::hashv(&[
+     &bigint_to_be_bytes_array::<32>(&30_u32.to_biguint().unwrap()).unwrap(),
+     &bigint_to_be_bytes_array::<32>(&0_u32.to_biguint().unwrap()).unwrap(),
+ ]).unwrap();

After making these changes, rerun the repo-wide audit to confirm no direct .to_be_bytes() or variable-length .to_bytes_be() slices are passed to Poseidon::hashv.

Committable suggestion skipped: line range outside the PR's diff.

🤖 Prompt for AI Agents
In program-libs/batched-merkle-tree/tests/merkle_tree.rs around lines 63–68, the
Poseidon::hashv call passes variable-length byte slices; convert each input to a
fixed 32-byte array and pass those. Replace the current usage of
insert_value.as_slice(), the local leaf_index Vec, and tx_hash slice with
32-byte arrays (e.g. call insert_value.to_byte_array().unwrap().try_into() or
preferably bigint_to_be_bytes_array::<32>() for numeric types, and use
leaf_index.to_byte_array().unwrap().try_into() or
bigint_to_be_bytes_array::<32>() for the index, and the same for tx_hash) and
then pass their references (as &[u8;32] or .as_slice()) into Poseidon::hashv.

assert_input_queue_insert(
Expand Down
4 changes: 2 additions & 2 deletions program-libs/compressed-account/src/compressed_account.rs
Original file line number Diff line number Diff line change
Expand Up @@ -378,7 +378,7 @@ impl ZCompressedAccount<'_> {
#[cfg(not(feature = "pinocchio"))]
#[cfg(test)]
mod tests {
use light_hasher::Poseidon;
use light_hasher::{to_byte_array::ToByteArray, Poseidon};
use light_zero_copy::traits::ZeroCopyAt;
use num_bigint::BigUint;
use rand::Rng;
Expand Down Expand Up @@ -750,7 +750,7 @@ mod tests {
Some(CompressedAccountData {
discriminator: rng.gen(),
data: Vec::new(), // not used in hash
data_hash: Poseidon::hash(rng.gen::<u64>().to_be_bytes().as_slice())
data_hash: Poseidon::hash(&rng.gen::<u64>().to_byte_array().unwrap())
.unwrap(),
})
} else {
Expand Down
13 changes: 6 additions & 7 deletions program-libs/hasher/src/poseidon.rs
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,12 @@ impl Hasher for Poseidon {
}

fn hashv(vals: &[&[u8]]) -> Result<Hash, HasherError> {
for val in vals {
if val.len() != 32 {
return Err(HasherError::InvalidInputLength(32, val.len()));
}
}

// Perform the calculation inline, calling this from within a program is
// not supported.
#[cfg(not(target_os = "solana"))]
Expand All @@ -99,13 +105,6 @@ impl Hasher for Poseidon {
#[cfg(target_os = "solana")]
{
use crate::HASH_BYTES;
// TODO: reenable once LightHasher refactor is merged
// solana_program::msg!("remove len check onchain.");
// for val in vals {
// if val.len() != 32 {
// return Err(HasherError::InvalidInputLength(val.len()));
// }
// }
let mut hash_result = [0; HASH_BYTES];
let result = unsafe {
crate::syscalls::sol_poseidon(
Expand Down
2 changes: 2 additions & 0 deletions program-libs/hasher/src/to_byte_array.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,8 @@ impl_to_byte_array_for_integer_type!(i32);
impl_to_byte_array_for_integer_type!(u32);
impl_to_byte_array_for_integer_type!(i64);
impl_to_byte_array_for_integer_type!(u64);
impl_to_byte_array_for_integer_type!(isize);
impl_to_byte_array_for_integer_type!(usize);
impl_to_byte_array_for_integer_type!(i128);
impl_to_byte_array_for_integer_type!(u128);

Expand Down
9 changes: 5 additions & 4 deletions program-libs/indexed-merkle-tree/src/array.rs
Original file line number Diff line number Diff line change
Expand Up @@ -463,6 +463,7 @@ where
#[cfg(test)]
mod test {
use light_concurrent_merkle_tree::light_hasher::Poseidon;
use light_hasher::to_byte_array::ToByteArray;
use num_bigint::{RandBigInt, ToBigUint};
use rand::thread_rng;

Expand Down Expand Up @@ -561,7 +562,7 @@ mod test {
bigint_to_be_bytes_array::<32>(&nullifier1)
.unwrap()
.as_ref(),
0_usize.to_be_bytes().as_ref(),
0_usize.to_byte_array().unwrap().as_ref(),
bigint_to_be_bytes_array::<32>(&(0.to_biguint().unwrap()))
.unwrap()
.as_ref(),
Expand Down Expand Up @@ -631,7 +632,7 @@ mod test {
bigint_to_be_bytes_array::<32>(&nullifier2)
.unwrap()
.as_ref(),
1_usize.to_be_bytes().as_ref(),
1_usize.to_byte_array().unwrap().as_ref(),
bigint_to_be_bytes_array::<32>(&(30.to_biguint().unwrap()))
.unwrap()
.as_ref(),
Expand Down Expand Up @@ -711,7 +712,7 @@ mod test {
bigint_to_be_bytes_array::<32>(&nullifier3)
.unwrap()
.as_ref(),
1_usize.to_be_bytes().as_ref(),
1_usize.to_byte_array().unwrap().as_ref(),
bigint_to_be_bytes_array::<32>(&(30.to_biguint().unwrap()))
.unwrap()
.as_ref(),
Expand Down Expand Up @@ -806,7 +807,7 @@ mod test {
bigint_to_be_bytes_array::<32>(&nullifier4)
.unwrap()
.as_ref(),
0_usize.to_be_bytes().as_ref(),
0_usize.to_byte_array().unwrap().as_ref(),
bigint_to_be_bytes_array::<32>(&(0.to_biguint().unwrap()))
.unwrap()
.as_ref(),
Expand Down
12 changes: 6 additions & 6 deletions program-libs/indexed-merkle-tree/tests/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -639,18 +639,18 @@ pub fn functional_non_inclusion_test() {
assert_eq!(
leaf_0,
Poseidon::hashv(&[
&0_u32.to_biguint().unwrap().to_bytes_be(),
&1_u32.to_biguint().unwrap().to_bytes_be(),
&30_u32.to_biguint().unwrap().to_bytes_be()
&bigint_to_be_bytes_array::<32>(&0_u32.to_biguint().unwrap()).unwrap(),
&bigint_to_be_bytes_array::<32>(&1_u32.to_biguint().unwrap()).unwrap(),
&bigint_to_be_bytes_array::<32>(&30_u32.to_biguint().unwrap()).unwrap()
])
.unwrap()
);
assert_eq!(
leaf_1,
Poseidon::hashv(&[
&30_u32.to_biguint().unwrap().to_bytes_be(),
&0_u32.to_biguint().unwrap().to_bytes_be(),
&0_u32.to_biguint().unwrap().to_bytes_be()
&bigint_to_be_bytes_array::<32>(&30_u32.to_biguint().unwrap()).unwrap(),
&bigint_to_be_bytes_array::<32>(&0_u32.to_biguint().unwrap()).unwrap(),
&bigint_to_be_bytes_array::<32>(&0_u32.to_biguint().unwrap()).unwrap()
])
.unwrap()
);
Expand Down
4 changes: 3 additions & 1 deletion program-tests/create-address-test-program/src/create_pda.rs
Original file line number Diff line number Diff line change
Expand Up @@ -133,8 +133,10 @@ pub struct RegisteredUser {
impl light_hasher::DataHasher for RegisteredUser {
fn hash<H: light_hasher::Hasher>(&self) -> std::result::Result<[u8; 32], HasherError> {
let truncated_user_pubkey = hash_to_bn254_field_size_be(&self.user_pubkey.to_bytes());
let mut data = [0u8; 32];
data[1..].copy_from_slice(&self.data);

H::hashv(&[truncated_user_pubkey.as_slice(), self.data.as_slice()])
H::hashv(&[truncated_user_pubkey.as_slice(), data.as_slice()])
Comment on lines +136 to +139
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

are you sure this is necessary?
hash_to_bn254_field_size_be should already truncate to less than field size.

Copy link
Contributor Author

@vadorovsky vadorovsky Aug 7, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The truncated_user_pubkey variable and the result of hash_to_bn254_field_size_be are OK, they are not a problem.

But self.data is of type [0; 31], so I need to "resize" it to 32. Another option would be changing it to [u8; 32] in the struct.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

you are right got it

}
}

Expand Down
12 changes: 7 additions & 5 deletions program-tests/merkle-tree/tests/indexed.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
use light_hasher::{bigint::bigint_to_be_bytes_array, Hasher, Poseidon};
use light_hasher::{
bigint::bigint_to_be_bytes_array, to_byte_array::ToByteArray, Hasher, Poseidon,
};
use light_merkle_tree_reference::indexed::IndexedMerkleTree;
use num_bigint::ToBigUint;

Expand Down Expand Up @@ -38,16 +40,16 @@ pub fn functional_non_inclusion_test() {
assert_eq!(
leaf_0,
Poseidon::hashv(&[
&0_u32.to_biguint().unwrap().to_bytes_be(),
&30_u32.to_biguint().unwrap().to_bytes_be()
&0_u32.to_byte_array().unwrap(),
&30_u32.to_byte_array().unwrap(),
])
.unwrap()
);
assert_eq!(
Comment on lines 41 to 47
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

💡 Verification agent

🧩 Analysis chain

Verify leaf hashing arity vs. inline comment (“H(0, 1, 30)” vs. two-field hash calls)

The inline comments say the leaf is H(value, next_index, next_value), but the assertions hash only two inputs. If the leaf really includes next_index, this test may be under-specifying the expected hash. If the design intentionally moved to two inputs, please update the comment to avoid confusion.

Run this script to find Poseidon::hashv call sites and check how many fields are hashed for leaves:

Also applies to: 49-55


🏁 Script executed:

#!/bin/bash
set -euo pipefail

# Show Poseidon::hashv usage with a bit of context
rg -n "Poseidon::hashv\\(&\\[" -A 6 -B 2

# Inspect leaf construction sites in the indexed merkle tree library to confirm arity
rg -n "leaf\\(|leaf_hash|hash_leaf|Hash\\(value, next_index, next_value\\)|H\\(" -S -A 6 -B 2

Length of output: 304588


Update inline comments to match two-input leaf hash

The test’s comments still describe a three-input hash (H(value, next_index, next_value)), but the code now calls Poseidon::hashv(&[value, next_value]). Please update both comment lines so they reflect the two-field hash arity:

• File: program-tests/merkle-tree/tests/indexed.rs
Lines ~27–28

Suggested diff:

- // leaf index: 0 = H(0, 1, 30) // Hash(value, next_index, next_value)
+ // leaf index: 0 = H(0, 30)    // Hash(value, next_value)
- // leaf index: 1 = H(30, 0, 0)
+ // leaf index: 1 = H(30, 0)
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
leaf_0,
Poseidon::hashv(&[
&0_u32.to_biguint().unwrap().to_bytes_be(),
&30_u32.to_biguint().unwrap().to_bytes_be()
&0_u32.to_byte_array().unwrap(),
&30_u32.to_byte_array().unwrap(),
])
.unwrap()
);
// leaf index: 0 = H(0, 30) // Hash(value, next_value)
// leaf index: 1 = H(30, 0) // Hash(value, next_value)
🤖 Prompt for AI Agents
In program-tests/merkle-tree/tests/indexed.rs around lines 27–28 and 41–47, the
inline comment still describes a three-input leaf hash H(value, next_index,
next_value) while the code uses Poseidon::hashv(&[value, next_value]) (two
inputs); update the comment text to reflect the two-field hash arity (e.g.,
change to H(value, next_value) and any explanatory text referencing three fields
to reference two), keeping surrounding wording consistent with the new two-input
hash.

leaf_1,
Poseidon::hashv(&[
&30_u32.to_biguint().unwrap().to_bytes_be(),
&0_u32.to_biguint().unwrap().to_bytes_be()
&30_u32.to_byte_array().unwrap(),
&0_u32.to_byte_array().unwrap(),
])
.unwrap()
);
Expand Down
5 changes: 4 additions & 1 deletion program-tests/system-cpi-test/src/create_pda.rs
Original file line number Diff line number Diff line change
Expand Up @@ -543,7 +543,10 @@ pub struct RegisteredUser {
impl light_hasher::DataHasher for RegisteredUser {
fn hash<H: light_hasher::Hasher>(&self) -> std::result::Result<[u8; 32], HasherError> {
let truncated_user_pubkey = hash_to_bn254_field_size_be(&self.user_pubkey.to_bytes());
H::hashv(&[truncated_user_pubkey.as_slice(), self.data.as_slice()])
let mut data = [0u8; 32];
data[1..].copy_from_slice(&self.data);

H::hashv(&[truncated_user_pubkey.as_slice(), data.as_slice()])
}
}

Expand Down
6 changes: 4 additions & 2 deletions program-tests/utils/src/mock_batched_forester.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
use light_compressed_account::{
hash_chain::create_hash_chain_from_slice, instruction_data::compressed_proof::CompressedProof,
};
use light_hasher::{bigint::bigint_to_be_bytes_array, Hasher, Poseidon};
use light_hasher::{
bigint::bigint_to_be_bytes_array, to_byte_array::ToByteArray, Hasher, Poseidon,
};
use light_merkle_tree_reference::{indexed::IndexedMerkleTree, MerkleTree};
use light_prover_client::{
errors::ProverClientError,
Expand Down Expand Up @@ -186,7 +188,7 @@ impl<const HEIGHT: usize> MockBatchedForester<HEIGHT> {
.iter()
.find(|tx_event| tx_event.inputs.contains(leaf))
.expect("No event for leaf found.");
let index_bytes = index.to_be_bytes();
let index_bytes = index.to_byte_array().unwrap();
let nullifier = Poseidon::hashv(&[leaf, &index_bytes, &event.tx_hash]).unwrap();
tx_hashes.push(event.tx_hash);
nullifiers.push(nullifier);
Expand Down
7 changes: 4 additions & 3 deletions program-tests/utils/src/test_batch_forester.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,9 @@ use light_compressed_account::{
hash_chain::create_hash_chain_from_slice, instruction_data::compressed_proof::CompressedProof,
QueueType,
};
use light_hasher::{bigint::bigint_to_be_bytes_array, Poseidon};
use light_hasher::{
bigint::bigint_to_be_bytes_array, to_byte_array::ToByteArray, Hasher, Poseidon,
};
use light_prover_client::{
proof_client::ProofClient,
proof_types::{
Expand Down Expand Up @@ -269,8 +271,7 @@ pub async fn get_batched_nullify_ix_data<R: Rpc>(
let proof = bundle.merkle_tree.get_proof_of_leaf(index, true).unwrap();
merkle_proofs.push(proof.to_vec());
bundle.input_leaf_indices.remove(0);
let index_bytes = index.to_be_bytes();
use light_hasher::Hasher;
let index_bytes = index.to_byte_array().unwrap();
let nullifier = Poseidon::hashv(&[&leaf, &index_bytes, &leaf_info.tx_hash]).unwrap();

tx_hashes.push(leaf_info.tx_hash);
Expand Down
6 changes: 4 additions & 2 deletions prover/client/src/proof_types/batch_update/proof_inputs.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
use light_hasher::{hash_chain::create_hash_chain_from_array, Hasher, Poseidon};
use light_hasher::{
hash_chain::create_hash_chain_from_array, to_byte_array::ToByteArray, Hasher, Poseidon,
};
use light_sparse_merkle_tree::changelog::ChangelogEntry;
use num_bigint::{BigInt, Sign};

Expand Down Expand Up @@ -91,7 +93,7 @@ pub fn get_batch_update_inputs<const HEIGHT: usize>(
let merkle_proof_array = merkle_proof.try_into().unwrap();

// Use the adjusted index bytes for computing the nullifier.
let index_bytes = (*index).to_be_bytes();
let index_bytes = index.to_byte_array().unwrap();
let nullifier = Poseidon::hashv(&[leaf, &index_bytes, &tx_hashes[i]]).unwrap();
let (root, changelog_entry) =
compute_root_from_merkle_proof(nullifier, &merkle_proof_array, *index);
Expand Down
8 changes: 5 additions & 3 deletions prover/client/tests/batch_update.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
use light_hasher::{hash_chain::create_hash_chain_from_slice, Hasher, Poseidon};
use light_hasher::{
hash_chain::create_hash_chain_from_slice, to_byte_array::ToByteArray, Hasher, Poseidon,
};
use light_merkle_tree_reference::MerkleTree;
use light_prover_client::{
constants::{DEFAULT_BATCH_STATE_TREE_HEIGHT, PROVE_PATH, SERVER_ADDRESS},
Expand Down Expand Up @@ -31,9 +33,9 @@ async fn prove_batch_update() {
old_leaves.push(leaf);
merkle_tree.append(&leaf).unwrap();

let index_bytes = (i as usize).to_byte_array().unwrap();
#[allow(clippy::unnecessary_cast)]
let nullifier =
Poseidon::hashv(&[&leaf, &(i as usize).to_be_bytes(), &tx_hash]).unwrap();
let nullifier = Poseidon::hashv(&[&leaf, &index_bytes, &tx_hash]).unwrap();
nullifiers.push(nullifier);
}

Expand Down
32 changes: 16 additions & 16 deletions sdk-libs/macros/tests/hasher.rs
Original file line number Diff line number Diff line change
Expand Up @@ -141,8 +141,8 @@ mod basic_hashing {
let account = create_account(Some(42));

let manual_nested_bytes: Vec<Vec<u8>> = vec![
nested_struct.a.to_be_bytes().to_vec(),
nested_struct.b.to_be_bytes().to_vec(),
nested_struct.a.to_byte_array().unwrap().to_vec(),
nested_struct.b.to_byte_array().unwrap().to_vec(),
light_compressed_account::hash_to_bn254_field_size_be(
nested_struct.c.try_to_vec().unwrap().as_slice(),
)
Expand All @@ -163,8 +163,8 @@ mod basic_hashing {
assert_eq!(nested_hash_result, manual_nested_hash);

let manual_account_bytes: Vec<Vec<u8>> = vec![
vec![u8::from(account.a)],
account.b.to_be_bytes().to_vec(),
account.a.to_byte_array().unwrap().to_vec(),
account.b.to_byte_array().unwrap().to_vec(),
account.c.hash::<Poseidon>().unwrap().to_vec(),
light_compressed_account::hash_to_bn254_field_size_be(&account.d).to_vec(),
{
Expand Down Expand Up @@ -495,18 +495,18 @@ fn test_poseidon_width_limits() {

assert!(max_fields.hash::<Poseidon>().is_ok());
let expected_hash = Poseidon::hashv(&[
1u64.to_be_bytes().as_ref(),
2u64.to_be_bytes().as_ref(),
3u64.to_be_bytes().as_ref(),
4u64.to_be_bytes().as_ref(),
5u64.to_be_bytes().as_ref(),
6u64.to_be_bytes().as_ref(),
7u64.to_be_bytes().as_ref(),
8u64.to_be_bytes().as_ref(),
9u64.to_be_bytes().as_ref(),
10u64.to_be_bytes().as_ref(),
11u64.to_be_bytes().as_ref(),
12u64.to_be_bytes().as_ref(),
1u64.to_byte_array().unwrap().as_ref(),
2u64.to_byte_array().unwrap().as_ref(),
3u64.to_byte_array().unwrap().as_ref(),
4u64.to_byte_array().unwrap().as_ref(),
5u64.to_byte_array().unwrap().as_ref(),
6u64.to_byte_array().unwrap().as_ref(),
7u64.to_byte_array().unwrap().as_ref(),
8u64.to_byte_array().unwrap().as_ref(),
9u64.to_byte_array().unwrap().as_ref(),
10u64.to_byte_array().unwrap().as_ref(),
11u64.to_byte_array().unwrap().as_ref(),
12u64.to_byte_array().unwrap().as_ref(),
])
.unwrap();
assert_eq!(max_fields.hash::<Poseidon>().unwrap(), expected_hash);
Expand Down
Loading