Skip to content

Commit 2800b7f

Browse files
refactor: prover logic, dependency updates (#28)
* chore: update dependencies and clean up unused packages * refactor: consolidate PublicTransactionEvent structs and update references * refactor: adjust root_seq assignment and add Debug trait to struct definitions * refactor: proof compression and error handling in prover * refactor: simplify AddressProofInputs, rootIndex: u16 * refactor: extract proof generation for empty tree into separate function
1 parent d1e34e8 commit 2800b7f

File tree

14 files changed

+281
-739
lines changed

14 files changed

+281
-739
lines changed

Cargo.lock

Lines changed: 55 additions & 462 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Cargo.toml

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -81,13 +81,13 @@ solana-pubkey = "2.3.0"
8181

8282
solana-transaction-status = "1.18.0"
8383

84+
light-concurrent-merkle-tree = { git = "https://github.com/Lightprotocol/light-protocol", rev = "a16015358aee65da5f67e4ae73197df5e75495d9" }
85+
light-batched-merkle-tree = { git = "https://github.com/Lightprotocol/light-protocol", rev = "a16015358aee65da5f67e4ae73197df5e75495d9" }
86+
light-merkle-tree-metadata = { git = "https://github.com/Lightprotocol/light-protocol", rev = "a16015358aee65da5f67e4ae73197df5e75495d9" }
87+
light-compressed-account = { git = "https://github.com/Lightprotocol/light-protocol", rev = "a16015358aee65da5f67e4ae73197df5e75495d9" }
88+
light-hasher = { git = "https://github.com/Lightprotocol/light-protocol", rev = "a16015358aee65da5f67e4ae73197df5e75495d9" }
89+
8490
light-poseidon = "0.3.0"
85-
light-batched-merkle-tree = { git = "https://github.com/Lightprotocol/light-protocol", rev = "368f9f08272db78c74b2ade1a1c2fead27dd0a96" }
86-
light-compressed-account = { git = "https://github.com/Lightprotocol/light-protocol", rev = "368f9f08272db78c74b2ade1a1c2fead27dd0a96" }
87-
light-concurrent-merkle-tree = { git = "https://github.com/Lightprotocol/light-protocol", rev = "368f9f08272db78c74b2ade1a1c2fead27dd0a96" }
88-
light-hasher = { git = "https://github.com/Lightprotocol/light-protocol", rev = "368f9f08272db78c74b2ade1a1c2fead27dd0a96" }
89-
light-merkle-tree-metadata = { git = "https://github.com/Lightprotocol/light-protocol", rev = "368f9f08272db78c74b2ade1a1c2fead27dd0a96" }
90-
light-sdk = { git = "https://github.com/Lightprotocol/light-protocol", rev = "368f9f08272db78c74b2ade1a1c2fead27dd0a96" }
9191

9292
sqlx = { version = "0.6.2", features = [
9393
"macros",
@@ -131,7 +131,7 @@ rust-s3 = "0.34.0"
131131
[dev-dependencies]
132132
function_name = "0.3.0"
133133
serial_test = "2.0.0"
134-
light-merkle-tree-reference = { git = "https://github.com/Lightprotocol/light-protocol", rev = "368f9f08272db78c74b2ade1a1c2fead27dd0a96" }
134+
light-merkle-tree-reference = { git = "https://github.com/Lightprotocol/light-protocol", rev = "a16015358aee65da5f67e4ae73197df5e75495d9" }
135135

136136
[profile.dev]
137137
# Do not produce debug info for ~40% faster incremental compilation.
Lines changed: 35 additions & 92 deletions
Original file line numberDiff line numberDiff line change
@@ -1,103 +1,46 @@
1+
use crate::api::error::PhotonApiError;
12
use crate::api::method::get_validity_proof::prover::structs::{CompressedProof, ProofABC};
23
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Compress, Validate};
4+
use solana_program::alt_bn128::compression::prelude::{
5+
alt_bn128_g1_compress, alt_bn128_g2_compress, convert_endianness,
6+
};
37
use std::ops::Neg;
4-
use crate::api::error::PhotonApiError;
58

69
type G1 = ark_bn254::g1::G1Affine;
710

8-
/// Changes the endianness of the given slice of bytes by reversing the order of every 32-byte chunk.
9-
///
10-
/// # Arguments
11-
///
12-
/// * `bytes` - A reference to a slice of bytes whose endianness will be changed.
13-
///
14-
/// # Returns
15-
///
16-
/// A `Vec<u8>` containing the bytes with their order reversed in chunks of 32 bytes. If the number
17-
/// of bytes in the slice is not a multiple of 32, the remaining bytes at the end will also be reversed.
18-
///
19-
/// # Examples
20-
///
21-
/// ```
22-
/// let input = vec![0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08,
23-
/// 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10,
24-
/// 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18,
25-
/// 0x19, 0x1A, 0x1B, 0x1C, 0x1D, 0x1E, 0x1F, 0x20];
26-
/// let output = change_endianness(&input);
27-
/// assert_eq!(output, vec![0x20, 0x1F, 0x1E, 0x1D, 0x1C, 0x1B, 0x1A, 0x19,
28-
/// 0x18, 0x17, 0x16, 0x15, 0x14, 0x13, 0x12, 0x11,
29-
/// 0x10, 0x0F, 0x0E, 0x0D, 0x0C, 0x0B, 0x0A, 0x09,
30-
/// 0x08, 0x07, 0x06, 0x05, 0x04, 0x03, 0x02, 0x01]);
31-
///
32-
/// let input = vec![0x01, 0x02, 0x03];
33-
/// let output = change_endianness(&input);
34-
/// assert_eq!(output, vec![0x03, 0x02, 0x01]);
35-
/// ```
36-
fn change_endianness(bytes: &[u8]) -> Vec<u8> {
37-
let mut vec = Vec::new();
38-
for b in bytes.chunks(32) {
39-
for byte in b.iter().rev() {
40-
vec.push(*byte);
41-
}
42-
}
43-
vec
44-
}
45-
46-
/// Negates the `a` component of the given proof and compresses the proof into a `CompressedProof`.
47-
///
48-
/// # Arguments
49-
///
50-
/// * `proof` - A `ProofABC` structure containing three components: `a`, `b`, and `c`.
51-
///
52-
/// - `a` is negated and serialized in big-endian format.
53-
/// - `b` and `c` are trimmed and included as-is in the compressed form.
54-
///
55-
/// # Returns
56-
///
57-
/// A `CompressedProof` containing:
58-
///
59-
/// * The negated and serialized `a` component as a vector of bytes.
60-
/// * The first 64 bytes of the `b` component.
61-
/// * The first 32 bytes of the `c` component.
62-
///
63-
/// # Panics
64-
///
65-
/// This function will panic if:
66-
///
67-
/// * The deserialization or serialization of the `G1` point fails.
68-
/// * The `proof.a` slice length is insufficient to produce a valid G1 when adding padding for deserialization.
69-
///
70-
/// # Note
71-
///
72-
/// The function assumes that the `ProofABC` structure contains its `a`, `b`, and `c` components in valid formats
73-
/// necessary for transformation and compression.
74-
pub fn negate_proof(proof: ProofABC) -> Result<CompressedProof, PhotonApiError> {
75-
let mut proof_a_neg = [0u8; 65];
76-
77-
let proof_a: G1 = G1::deserialize_with_mode(
78-
&*[&change_endianness(&proof.a), &[0u8][..]].concat(),
79-
Compress::No,
80-
Validate::No,
81-
).map_err(|e| PhotonApiError::UnexpectedError(format!("Failed to deserialize G1 point: {}", e)))?;
11+
pub fn negate_g1(g1_be: &[u8; 64]) -> Result<[u8; 64], PhotonApiError> {
12+
let g1_le = convert_endianness::<32, 64>(g1_be);
13+
let g1: G1 = G1::deserialize_with_mode(g1_le.as_slice(), Compress::No, Validate::No).unwrap();
8214

83-
proof_a
84-
.neg()
15+
let g1_neg = g1.neg();
16+
let mut g1_neg_be = [0u8; 64];
17+
g1_neg
8518
.x
86-
.serialize_with_mode(&mut proof_a_neg[..32], Compress::No)
87-
.map_err(|e| PhotonApiError::UnexpectedError(format!("Failed to serialize x coordinate: {}", e)))?;
88-
89-
proof_a
90-
.neg()
19+
.serialize_with_mode(&mut g1_neg_be[..32], Compress::No)
20+
.map_err(|_| {
21+
PhotonApiError::UnexpectedError("Failed to serialize G1 x coordinate".to_string())
22+
})?;
23+
g1_neg
9124
.y
92-
.serialize_with_mode(&mut proof_a_neg[32..], Compress::No)
93-
.map_err(|e| PhotonApiError::UnexpectedError(format!("Failed to serialize y coordinate: {}", e)))?;
94-
95-
let compressed_proof = CompressedProof {
96-
a: proof_a_neg[0..32].to_vec(),
97-
b: proof.b[0..64].to_vec(),
98-
c: proof.c[0..32].to_vec(),
99-
};
100-
101-
Ok(compressed_proof)
25+
.serialize_with_mode(&mut g1_neg_be[32..], Compress::No)
26+
.map_err(|_| {
27+
PhotonApiError::UnexpectedError("Failed to serialize G1 y coordinate".to_string())
28+
})?;
29+
let g1_neg_be: [u8; 64] = convert_endianness::<32, 64>(&g1_neg_be);
30+
Ok(g1_neg_be)
10231
}
10332

33+
pub fn compress_proof(proof: &ProofABC) -> Result<CompressedProof, PhotonApiError> {
34+
let proof_a = alt_bn128_g1_compress(&proof.a)
35+
.map_err(|_| PhotonApiError::UnexpectedError("Failed to compress G1 proof".to_string()))?;
36+
let proof_b = alt_bn128_g2_compress(&proof.b)
37+
.map_err(|_| PhotonApiError::UnexpectedError("Failed to compress G2 proof".to_string()))?;
38+
let proof_c = alt_bn128_g1_compress(&proof.c)
39+
.map_err(|_| PhotonApiError::UnexpectedError("Failed to compress G1 proof".to_string()))?;
40+
41+
Ok(CompressedProof {
42+
a: Vec::from(proof_a),
43+
b: Vec::from(proof_b),
44+
c: Vec::from(proof_c),
45+
})
46+
}

src/api/method/get_validity_proof/prover/helpers.rs

Lines changed: 91 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,6 @@
1+
use crate::api::error::PhotonApiError;
12
use crate::api::method::get_multiple_new_address_proofs::MerkleContextWithNewAddressProof;
3+
use crate::api::method::get_validity_proof::prover::gnark::negate_g1;
24
use crate::api::method::get_validity_proof::prover::structs::{
35
GnarkProofJson, InclusionHexInputsForProver, NonInclusionHexInputsForProver, ProofABC,
46
};
@@ -64,73 +66,125 @@ pub fn hash_to_hex(hash: &Hash) -> String {
6466
fn pubkey_to_hex(pubkey: &SerializablePubkey) -> String {
6567
let bytes = pubkey.to_bytes_vec();
6668
let hex = hex::encode(bytes);
69+
6770
format!("0x{}", hex)
6871
}
6972

70-
fn deserialize_hex_string_to_bytes(hex_str: &str) -> Vec<u8> {
73+
pub fn deserialize_hex_string_to_bytes(hex_str: &str) -> Result<Vec<u8>, PhotonApiError> {
7174
let hex_str = if hex_str.starts_with("0x") {
7275
&hex_str[2..]
7376
} else {
7477
hex_str
7578
};
76-
77-
// Left pad with 0s if the length is not 64
7879
let hex_str = format!("{:0>64}", hex_str);
7980

80-
hex::decode(&hex_str).expect("Failed to decode hex string")
81+
hex::decode(hex_str)
82+
.map_err(|_| PhotonApiError::UnexpectedError("Failed to decode hex string".to_string()))
8183
}
8284

83-
pub fn proof_from_json_struct(json: GnarkProofJson) -> ProofABC {
84-
let proof_ax = deserialize_hex_string_to_bytes(&json.ar[0]);
85-
let proof_ay = deserialize_hex_string_to_bytes(&json.ar[1]);
86-
let proof_a = [proof_ax, proof_ay].concat();
85+
pub fn proof_from_json_struct(json: GnarkProofJson) -> Result<ProofABC, PhotonApiError> {
86+
let proof_a_x = deserialize_hex_string_to_bytes(&json.ar[0])?;
87+
let proof_a_y = deserialize_hex_string_to_bytes(&json.ar[1])?;
88+
let proof_a: [u8; 64] = [proof_a_x, proof_a_y].concat().try_into().map_err(|_| {
89+
PhotonApiError::UnexpectedError("Failed to convert proof_a to [u8; 64]".to_string())
90+
})?;
91+
let proof_a = negate_g1(&proof_a)?;
8792

88-
let proof_bx0 = deserialize_hex_string_to_bytes(&json.bs[0][0]);
89-
let proof_bx1 = deserialize_hex_string_to_bytes(&json.bs[0][1]);
90-
let proof_by0 = deserialize_hex_string_to_bytes(&json.bs[1][0]);
91-
let proof_by1 = deserialize_hex_string_to_bytes(&json.bs[1][1]);
92-
let proof_b = [proof_bx0, proof_bx1, proof_by0, proof_by1].concat();
93+
let proof_b_x_0 = deserialize_hex_string_to_bytes(&json.bs[0][0])?;
94+
let proof_b_x_1 = deserialize_hex_string_to_bytes(&json.bs[0][1])?;
95+
let proof_b_y_0 = deserialize_hex_string_to_bytes(&json.bs[1][0])?;
96+
let proof_b_y_1 = deserialize_hex_string_to_bytes(&json.bs[1][1])?;
97+
let proof_b: [u8; 128] = [proof_b_x_0, proof_b_x_1, proof_b_y_0, proof_b_y_1]
98+
.concat()
99+
.try_into()
100+
.map_err(|_| {
101+
PhotonApiError::UnexpectedError("Failed to convert proof_b to [u8; 128]".to_string())
102+
})?;
93103

94-
let proof_cx = deserialize_hex_string_to_bytes(&json.krs[0]);
95-
let proof_cy = deserialize_hex_string_to_bytes(&json.krs[1]);
96-
let proof_c = [proof_cx, proof_cy].concat();
104+
let proof_c_x = deserialize_hex_string_to_bytes(&json.krs[0])?;
105+
let proof_c_y = deserialize_hex_string_to_bytes(&json.krs[1])?;
106+
let proof_c: [u8; 64] = [proof_c_x, proof_c_y].concat().try_into().map_err(|_| {
107+
PhotonApiError::UnexpectedError("Failed to convert proof_c to [u8; 64]".to_string())
108+
})?;
97109

98-
ProofABC {
110+
Ok(ProofABC {
99111
a: proof_a,
100112
b: proof_b,
101113
c: proof_c,
102-
}
114+
})
103115
}
104116

105117
pub fn get_public_input_hash(
106118
account_proofs: &[MerkleProofWithContext],
107119
new_address_proofs: &[MerkleContextWithNewAddressProof],
108-
) -> [u8; 32] {
109-
let account_hashes: Vec<[u8; 32]> = account_proofs
120+
) -> Result<[u8; 32], PhotonApiError> {
121+
let account_hashes: Result<Vec<[u8; 32]>, PhotonApiError> = account_proofs
110122
.iter()
111-
.map(|x| x.hash.to_vec().clone().try_into().unwrap())
112-
.collect::<Vec<[u8; 32]>>();
113-
let account_roots: Vec<[u8; 32]> = account_proofs
123+
.map(|x| {
124+
x.hash.to_vec().try_into().map_err(|_| {
125+
PhotonApiError::UnexpectedError("Failed to convert hash to [u8; 32]".to_string())
126+
})
127+
})
128+
.collect();
129+
let account_hashes = account_hashes?;
130+
131+
let account_roots: Result<Vec<[u8; 32]>, PhotonApiError> = account_proofs
114132
.iter()
115-
.map(|x| x.root.to_vec().clone().try_into().unwrap())
116-
.collect::<Vec<[u8; 32]>>();
117-
let inclusion_hash_chain: [u8; 32] =
118-
create_two_inputs_hash_chain(&account_roots, &account_hashes).unwrap();
119-
let new_address_hashes: Vec<[u8; 32]> = new_address_proofs
133+
.map(|x| {
134+
x.root.to_vec().try_into().map_err(|_| {
135+
PhotonApiError::UnexpectedError("Failed to convert root to [u8; 32]".to_string())
136+
})
137+
})
138+
.collect();
139+
let account_roots = account_roots?;
140+
141+
let inclusion_hash_chain = create_two_inputs_hash_chain(&account_roots, &account_hashes)
142+
.map_err(|e| {
143+
PhotonApiError::UnexpectedError(format!("Failed to create hash chain: {}", e))
144+
})?;
145+
146+
let new_address_hashes: Result<Vec<[u8; 32]>, PhotonApiError> = new_address_proofs
120147
.iter()
121-
.map(|x| x.address.try_to_vec().unwrap().clone().try_into().unwrap())
122-
.collect::<Vec<[u8; 32]>>();
123-
let new_address_roots: Vec<[u8; 32]> = new_address_proofs
148+
.map(|x| {
149+
x.address
150+
.try_to_vec()
151+
.map_err(|e| {
152+
PhotonApiError::UnexpectedError(format!("Failed to serialize address: {}", e))
153+
})?
154+
.try_into()
155+
.map_err(|_| {
156+
PhotonApiError::UnexpectedError(
157+
"Failed to convert address bytes to [u8; 32]".to_string(),
158+
)
159+
})
160+
})
161+
.collect();
162+
let new_address_hashes = new_address_hashes?;
163+
164+
let new_address_roots: Result<Vec<[u8; 32]>, PhotonApiError> = new_address_proofs
124165
.iter()
125-
.map(|x| x.root.to_vec().clone().try_into().unwrap())
126-
.collect::<Vec<[u8; 32]>>();
166+
.map(|x| {
167+
x.root.to_vec().try_into().map_err(|_| {
168+
PhotonApiError::UnexpectedError(
169+
"Failed to convert new address root to [u8; 32]".to_string(),
170+
)
171+
})
172+
})
173+
.collect();
174+
let new_address_roots = new_address_roots?;
175+
127176
let non_inclusion_hash_chain =
128-
create_two_inputs_hash_chain(&new_address_roots, &new_address_hashes).unwrap();
177+
create_two_inputs_hash_chain(&new_address_roots, &new_address_hashes).map_err(|e| {
178+
PhotonApiError::UnexpectedError(format!("Failed to create hash chain: {}", e))
179+
})?;
180+
129181
if non_inclusion_hash_chain != [0u8; 32] {
130-
non_inclusion_hash_chain
182+
Ok(non_inclusion_hash_chain)
131183
} else if inclusion_hash_chain != [0u8; 32] {
132-
inclusion_hash_chain
184+
Ok(inclusion_hash_chain)
133185
} else {
134-
create_two_inputs_hash_chain(&[inclusion_hash_chain], &[non_inclusion_hash_chain]).unwrap()
186+
create_two_inputs_hash_chain(&[inclusion_hash_chain], &[non_inclusion_hash_chain]).map_err(
187+
|e| PhotonApiError::UnexpectedError(format!("Failed to create hash chain: {}", e)),
188+
)
135189
}
136190
}

0 commit comments

Comments
 (0)