- Compressed and regular Solana accounts share the same functionality and are fully composable.
- The account state is hashed with Poseidon and stored as a commitment in the state Merkle tree.
- For Solana PDA like behavior your compressed account has an address as persistent identifier.
- To verify ownership, users generate a proof instead of reading account data directly.
Find a full program example at the end with Rust test to create compressed accounts and verify with Groth16 proofs.
Implementation guide
- Program
- Client
- Circuit Example
- Get started with AI
1
Dependencies
Add dependencies to your program.Report incorrect code
Copy
Ask AI
[dependencies]
anchor-lang = "0.31.1"
borsh = "0.10.4"
light-sdk = { version = "0.17.1", features = ["anchor", "poseidon", "merkle-tree", "v2"] }
light-hasher = "5.0.0"
light-sdk-types = { version = "0.17.1", features = ["v2"] }
light-sdkwithposeidonfeature enables ZK-friendly hashing.light-hasherprovides Poseidon hash functions.
2
Constants
Set program address, CPI signer, and include the verifying key from your circuit setup.Report incorrect code
Copy
Ask AI
declare_id!("MPzkYomvQc4VQPwMr6bFduyWRQZVCh5CofgDC4dFqJp");
pub const LIGHT_CPI_SIGNER: CpiSigner =
derive_light_cpi_signer!("MPzkYomvQc4VQPwMr6bFduyWRQZVCh5CofgDC4dFqJp");
pub const ZK_ACCOUNT: &[u8] = b"zk_account";
// Generated from circom trusted setup (.zkey file)
pub mod verifying_key;
3
Compressed account
Report incorrect code
Copy
Ask AI
#[derive(Clone, Debug, Default, BorshSerialize, BorshDeserialize, LightDiscriminator, LightHasher)]
pub struct ZkAccount {
pub data_hash: ZkDataHash,
}
- Derive
LightHasherfor Poseidon hashing (instead of default SHA-256).
ToByteArray:Report incorrect code
Copy
Ask AI
#[derive(Clone, Debug, Default, BorshSerialize, BorshDeserialize)]
pub struct ZkDataHash(pub [u8; 32]);
impl ToByteArray for ZkDataHash {
const NUM_FIELDS: usize = 1;
fn to_byte_array(&self) -> std::result::Result<[u8; 32], HasherError> {
Ok(self.0)
}
}
4
Instruction data
Define the instruction parameters.Report incorrect code
Copy
Ask AI
pub fn create_account<'info>(
ctx: Context<'_, '_, '_, 'info, CreateAccountAccounts<'info>>,
proof: ValidityProof,
address_tree_info: PackedAddressTreeInfo,
output_state_tree_index: u8,
data_hash: [u8; 32],
) -> Result<()>
proof: Proves the address does not exist yet.address_tree_info: References the address tree for deriving the address.output_state_tree_index: References the state tree for storing the account hash.data_hash: The Poseidon hash of the data to store.
5
Derive address
Derive the address as a persistent unique identifier.Report incorrect code
Copy
Ask AI
let address_tree_pubkey = address_tree_info
.get_tree_pubkey(&light_cpi_accounts)
.map_err(|_| ProgramError::InvalidAccountData)?;
let (address, address_seed) = derive_address(
&[ZK_ACCOUNT, &data_hash],
&address_tree_pubkey,
&crate::ID,
);
6
Address tree check
Ensure global uniqueness by verifying the address tree pubkey.Report incorrect code
Copy
Ask AI
if address_tree_pubkey.to_bytes() != light_sdk::constants::ADDRESS_TREE_V2 {
msg!("Invalid address tree");
return Err(ProgramError::InvalidAccountData.into());
}
7
Initialize compressed account
Report incorrect code
Copy
Ask AI
let mut account = LightAccountPoseidon::<ZkAccount>::new_init(
&crate::ID,
Some(address),
output_state_tree_index,
);
account.data_hash = ZkDataHash(data_hash);
8
Light System Program CPI
Report incorrect code
Copy
Ask AI
let light_cpi_accounts = CpiAccounts::new(
ctx.accounts.signer.as_ref(),
ctx.remaining_accounts,
crate::LIGHT_CPI_SIGNER,
);
LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, proof)
.with_light_account_poseidon(account)?
.with_new_addresses(&[address_tree_info.into_new_address_params_packed(address_seed)])
.invoke(light_cpi_accounts)?;
This is an example circuit to include compressed accounts.
Report incorrect code
Copy
Ask AI
pragma circom 2.0.0;
include "../node_modules/circomlib/circuits/poseidon.circom";
include "../node_modules/circomlib/circuits/bitify.circom";
include "../node_modules/circomlib/circuits/switcher.circom";
// Merkle Proof Verification Template
// Verifies that a leaf is in a Merkle tree with a given root
template MerkleProof(levels) {
signal input leaf;
signal input pathElements[levels];
signal input leafIndex;
signal output root;
component switcher[levels];
component hasher[levels];
component indexBits = Num2Bits(levels);
indexBits.in <== leafIndex;
for (var i = 0; i < levels; i++) {
switcher[i] = Switcher();
switcher[i].L <== i == 0 ? leaf : hasher[i - 1].out;
switcher[i].R <== pathElements[i];
switcher[i].sel <== indexBits.out[i];
hasher[i] = Poseidon(2);
hasher[i].inputs[0] <== switcher[i].outL;
hasher[i].inputs[1] <== switcher[i].outR;
}
root <== hasher[levels - 1].out;
}
template CompressedAccountHash() {
signal input owner_hashed;
signal input leaf_index;
signal input merkle_tree_hashed;
signal input address;
signal input discriminator;
signal input data_hash;
signal output hash;
component poseidon = Poseidon(6);
poseidon.inputs[0] <== owner_hashed;
poseidon.inputs[1] <== leaf_index;
poseidon.inputs[2] <== merkle_tree_hashed;
poseidon.inputs[3] <== address;
poseidon.inputs[4] <== discriminator + 36893488147419103232;
poseidon.inputs[5] <== data_hash;
hash <== poseidon.out;
}
template CompressedCompressedAccountMerkleProof(levels) {
signal input owner_hashed;
signal input merkle_tree_hashed;
signal input discriminator;
signal input data_hash;
signal input expectedRoot;
signal input leaf_index;
signal input account_leaf_index;
signal input address;
signal input pathElements[levels];
component accountHasher = CompressedAccountHash();
accountHasher.owner_hashed <== owner_hashed;
accountHasher.leaf_index <== account_leaf_index;
accountHasher.address <== address;
accountHasher.merkle_tree_hashed <== merkle_tree_hashed;
accountHasher.discriminator <== discriminator;
accountHasher.data_hash <== data_hash;
component merkleProof = MerkleProof(levels);
merkleProof.leaf <== accountHasher.hash;
merkleProof.pathElements <== pathElements;
merkleProof.leafIndex <== leaf_index;
merkleProof.root === expectedRoot;
}
component main {
public [owner_hashed, merkle_tree_hashed, discriminator, data_hash, expectedRoot]
} = CompressedAccountMerkleProof(26);
For AI assistance with your ZK App, copy this prompt and add your design ideas:
Report incorrect code
Copy
Ask AI
---
argument-hint: <add_your_app_description>
description: Design a ZK App POC with rent-free nullifiers, compressed accounts, and Groth16 circuits
allowed-tools: [Bash, Read, Glob, Grep, Task, WebFetch]
---
Design a Solana program with tests that uses rent-free nullifiers, compressed accounts, and Groth16 circuits.
## Initial App Design
<ADD YOUR IDEA AND DESIGN HERE>
## Goal
Produce a **fully working POC** that builds and tests pass.
## Available commands
Via Bash tool:
- `cargo build-sbf`, `cargo test-sbf`, `cargo fmt`, `cargo clippy`
- `anchor build`, `anchor test`, `anchor deploy`
- `circom`, `snarkjs`, `solana`, `light`
## Documentation
- Nullifiers: https://zkcompression.com/zk/nullifiers
- Compressed Accounts with Poseidon Hashes: https://zkcompression.com/zk/compressed-account-zk
## Reference repos
program-examples/zk/zk-id/
├── programs/zk-id/src/
│ ├── lib.rs # create_issuer, add_credential, zk_verify_credential
│ └── verifying_key.rs # Groth16 key from circom trusted setup
├── circuits/
│ └── compressed_account_merkle_proof.circom # Merkle proof + nullifier circuit
└── tests/
└── zk-id.ts # Proof generation + on-chain verification
## Workflow
### Phase 1: Design application
**1.1 Define private state**
What data stays private? (credentials, balances, votes, etc.)
**1.2 Define public inputs**
What does the circuit prove publicly? (nullifier, merkle root, commitments)
**1.3 Define nullifier scheme**
nullifier = Poseidon(context, secret)
### Phase 2: Index reference implementation
grep -r "LightAccountPoseidon" program-examples/zk/
grep -r "Groth16Verifier" program-examples/zk/
grep -r "derive_address.*nullifier" program-examples/zk/
grep -r "read_state_merkle_tree_root" program-examples/zk/
Read matching files to understand patterns.
### Phase 3: Circuit development
**3.1 Write circom circuit**
Based on compressed_account_merkle_proof.circom:
- Merkle proof verification
- Nullifier computation
- Public input constraints
**3.2 Trusted setup**
circom circuit.circom --r1cs --wasm --sym
snarkjs groth16 setup circuit.r1cs pot_final.ptau circuit_0000.zkey
snarkjs zkey export verificationkey circuit_final.zkey verification_key.json
snarkjs zkey export solidityverifier circuit_final.zkey # adapt for Solana
**3.3 Add sensitive files to .gitignore**
*.zkey
*.ptau
*.r1cs
*_js/
### Phase 4: Program implementation
| Pattern | Function | Reference |
|---------|----------|-----------|
| Poseidon state | `LightAccountPoseidon::new_init()` | zk-id/lib.rs |
| Nullifier address | `derive_address([prefix, nullifier, ctx], tree, program)` | zk-id/lib.rs |
| Read root only | `read_state_merkle_tree_root()` | zk-id/lib.rs |
| Groth16 verify | `Groth16Verifier::new().verify()` | zk-id/lib.rs |
**Dependencies:**
[dependencies]
anchor-lang = "0.31.1"
light-sdk = { version = "0.17.1", features = ["anchor", "poseidon", "merkle-tree", "v2"] }
light-hasher = "5.0.0"
light-sdk-types = { version = "0.17.1", features = ["v2"] }
groth16-solana = { git = "https://github.com/Lightprotocol/groth16-solana", rev = "66c0dc87" }
[dev-dependencies]
light-program-test = "0.17.1"
light-client = "0.17.1"
### Phase 5: Build and test loop
**Required commands (no shortcuts):**
For Anchor programs: `anchor build && anchor test`
For Native programs: `cargo build-sbf && cargo test-sbf`
**NO shortcuts allowed:**
- Do NOT use `cargo build` (must use `cargo build-sbf`)
- Do NOT use `cargo test` (must use `cargo test-sbf`)
- Do NOT skip SBF compilation
- Tests MUST run against real BPF bytecode
**On failure:** Spawn debugger agent with error context.
**Loop rules:**
1. Each debugger gets fresh context + previous debug reports
2. Each attempt tries something DIFFERENT
3. **NEVER GIVE UP** - keep spawning until fixed
Do NOT proceed until all tests pass.
### Phase 6: Cleanup (only after tests pass)
rm -rf target/
## DeepWiki fallback
If no matching pattern in reference repos:
mcp__deepwiki__ask_question("Lightprotocol/light-protocol", "How to {operation}?")
Full Code Example
A minimal Solana program to create compressed accounts and verifies with Groth16 proofs.See the full implementation at program-examples/zk/zk-merkle-proof.
- Program
- Rust client
Report incorrect code
Copy
Ask AI
#![allow(unexpected_cfgs)]
#![allow(deprecated)]
use anchor_lang::prelude::*;
use borsh::{BorshDeserialize, BorshSerialize};
use groth16_solana::groth16::Groth16Verifier;
use light_hasher::to_byte_array::ToByteArray;
use light_hasher::HasherError;
use light_sdk::account::poseidon::LightAccount as LightAccountPoseidon;
use light_sdk::address::v2::derive_address;
use light_sdk::cpi::v1::CpiAccounts;
use light_sdk::{
cpi::{v1::LightSystemProgramCpi, InvokeLightSystemProgram, LightCpiInstruction},
derive_light_cpi_signer,
instruction::{CompressedProof, PackedAddressTreeInfo, ValidityProof},
merkle_tree::v1::read_state_merkle_tree_root,
LightDiscriminator, LightHasher,
};
use light_sdk_types::CpiSigner;
declare_id!("MPzkYomvQc4VQPwMr6bFduyWRQZVCh5CofgDC4dFqJp");
pub const LIGHT_CPI_SIGNER: CpiSigner =
derive_light_cpi_signer!("MPzkYomvQc4VQPwMr6bFduyWRQZVCh5CofgDC4dFqJp");
pub const ZK_ACCOUNT: &[u8] = b"zk_account";
pub mod verifying_key;
#[program]
pub mod zk_merkle_proof {
use groth16_solana::decompression::{decompress_g1, decompress_g2};
use light_hasher::hash_to_field_size::hashv_to_bn254_field_size_be_const_array;
use super::*;
pub fn create_account<'info>(
ctx: Context<'_, '_, '_, 'info, CreateAccountAccounts<'info>>,
proof: ValidityProof,
address_tree_info: PackedAddressTreeInfo,
output_state_tree_index: u8,
data_hash: [u8; 32],
) -> Result<()> {
let light_cpi_accounts = CpiAccounts::new(
ctx.accounts.signer.as_ref(),
ctx.remaining_accounts,
crate::LIGHT_CPI_SIGNER,
);
let address_tree_pubkey = address_tree_info
.get_tree_pubkey(&light_cpi_accounts)
.map_err(|_| ProgramError::InvalidAccountData)?;
let (address, address_seed) = derive_address(
&[ZK_ACCOUNT, &data_hash],
&address_tree_pubkey,
&crate::ID,
);
let mut account = LightAccountPoseidon::<ZkAccount>::new_init(
&crate::ID,
Some(address),
output_state_tree_index,
);
account.data_hash = DataHash(data_hash);
LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, proof)
.with_light_account_poseidon(account)?
.with_new_addresses(&[address_tree_info.into_new_address_params_packed(address_seed)])
.invoke(light_cpi_accounts)?;
Ok(())
}
pub fn verify_account<'info>(
ctx: Context<'_, '_, '_, 'info, VerifyAccountAccounts<'info>>,
input_root_index: u16,
zk_proof: CompressedProof,
data_hash: [u8; 32],
) -> Result<()> {
let expected_root = read_state_merkle_tree_root(
&ctx.accounts.state_merkle_tree.to_account_info(),
input_root_index,
)
.map_err(ProgramError::from)?;
let owner_hashed =
hashv_to_bn254_field_size_be_const_array::<2>(&[&crate::ID.to_bytes()]).unwrap();
let merkle_tree_pubkey = ctx.accounts.state_merkle_tree.key();
let merkle_tree_hashed =
hashv_to_bn254_field_size_be_const_array::<2>(&[&merkle_tree_pubkey.to_bytes()])
.unwrap();
let mut discriminator = [0u8; 32];
discriminator[24..].copy_from_slice(ZkAccount::LIGHT_DISCRIMINATOR_SLICE);
let public_inputs: [[u8; 32]; 5] = [
owner_hashed,
merkle_tree_hashed,
discriminator,
data_hash,
expected_root,
];
let proof_a = decompress_g1(&zk_proof.a).map_err(|e| {
let code: u32 = e.into();
Error::from(ProgramError::Custom(code))
})?;
let proof_b = decompress_g2(&zk_proof.b).map_err(|e| {
let code: u32 = e.into();
Error::from(ProgramError::Custom(code))
})?;
let proof_c = decompress_g1(&zk_proof.c).map_err(|e| {
let code: u32 = e.into();
Error::from(ProgramError::Custom(code))
})?;
let mut verifier = Groth16Verifier::new(
&proof_a,
&proof_b,
&proof_c,
&public_inputs,
&crate::verifying_key::VERIFYINGKEY,
)
.map_err(|e| {
let code: u32 = e.into();
Error::from(ProgramError::Custom(code))
})?;
verifier.verify().map_err(|e| {
let code: u32 = e.into();
Error::from(ProgramError::Custom(code))
})?;
Ok(())
}
}
#[derive(Accounts)]
pub struct CreateAccountAccounts<'info> {
#[account(mut)]
pub signer: Signer<'info>,
}
#[derive(Accounts)]
pub struct VerifyAccountAccounts<'info> {
#[account(mut)]
pub signer: Signer<'info>,
/// CHECK: validated by read_state_merkle_tree_root
pub state_merkle_tree: UncheckedAccount<'info>,
}
#[derive(Clone, Debug, Default, BorshSerialize, BorshDeserialize, LightDiscriminator, LightHasher)]
pub struct ZkAccount {
pub data_hash: DataHash,
}
#[derive(Clone, Debug, Default, BorshSerialize, BorshDeserialize)]
pub struct DataHash(pub [u8; 32]);
impl ToByteArray for DataHash {
const NUM_FIELDS: usize = 1;
fn to_byte_array(&self) -> std::result::Result<[u8; 32], HasherError> {
Ok(self.0)
}
}
Report incorrect code
Copy
Ask AI
use anchor_lang::{InstructionData, ToAccountMetas};
use circom_prover::{prover::ProofLib, witness::WitnessFn, CircomProver};
use groth16_solana::proof_parser::circom_prover::convert_proof;
use light_hasher::{hash_to_field_size::hash_to_bn254_field_size_be, Hasher, Poseidon};
use light_program_test::{
program_test::LightProgramTest, AddressWithTree, Indexer, ProgramTestConfig, Rpc, RpcError,
};
use light_sdk::{
address::v2::derive_address,
instruction::{PackedAccounts, SystemAccountMetaConfig},
};
use num_bigint::BigUint;
use solana_sdk::{
instruction::Instruction,
pubkey::Pubkey,
signature::{Keypair, Signature, Signer},
};
use std::collections::HashMap;
use zk_merkle_proof::ZK_ACCOUNT;
#[link(name = "circuit", kind = "static")]
extern "C" {}
rust_witness::witness!(merkleproof);
#[tokio::test]
async fn test_create_and_verify_account() {
let config =
ProgramTestConfig::new(true, Some(vec![("zk_merkle_proof", zk_merkle_proof::ID)]));
let mut rpc = LightProgramTest::new(config).await.unwrap();
let payer = rpc.get_payer().insecure_clone();
// First byte = 0 for BN254 field compatibility
let mut secret_data = [0u8; 32];
for i in 1..32 {
secret_data[i] = (i as u8) + 65;
}
let data_hash = Poseidon::hashv(&[&secret_data]).unwrap();
let address_tree_info = rpc.get_address_tree_v2();
let (account_address, _) = derive_address(
&[ZK_ACCOUNT, &data_hash],
&address_tree_info.tree,
&zk_merkle_proof::ID,
);
create_account(&mut rpc, &payer, &account_address, address_tree_info.clone(), data_hash)
.await
.unwrap();
let accounts = rpc
.get_compressed_accounts_by_owner(&zk_merkle_proof::ID, None, None)
.await
.unwrap();
assert_eq!(accounts.value.items.len(), 1);
let created_account = &accounts.value.items[0];
let account_data_hash = created_account.data.as_ref().unwrap().data_hash;
verify_account(&mut rpc, &payer, created_account, account_data_hash)
.await
.unwrap();
}
async fn create_account<R>(
rpc: &mut R,
payer: &Keypair,
address: &[u8; 32],
address_tree_info: light_client::indexer::TreeInfo,
data_hash: [u8; 32],
) -> Result<Signature, RpcError>
where
R: Rpc + Indexer,
{
let mut remaining_accounts = PackedAccounts::default();
let config = SystemAccountMetaConfig::new(zk_merkle_proof::ID);
remaining_accounts.add_system_accounts(config)?;
let rpc_result = rpc
.get_validity_proof(
vec![],
vec![AddressWithTree {
address: *address,
tree: address_tree_info.tree,
}],
None,
)
.await?
.value;
let packed_address_tree_accounts = rpc_result
.pack_tree_infos(&mut remaining_accounts)
.address_trees;
let output_state_tree_index = rpc
.get_random_state_tree_info_v1()?
.pack_output_tree_index(&mut remaining_accounts)?;
let instruction_data = zk_merkle_proof::instruction::CreateAccount {
proof: rpc_result.proof,
address_tree_info: packed_address_tree_accounts[0],
output_state_tree_index,
data_hash,
};
let accounts = zk_merkle_proof::accounts::CreateAccountAccounts {
signer: payer.pubkey(),
};
let instruction = Instruction {
program_id: zk_merkle_proof::ID,
accounts: [
accounts.to_account_metas(None),
remaining_accounts.to_account_metas().0,
]
.concat(),
data: instruction_data.data(),
};
rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer])
.await
}
async fn verify_account<R>(
rpc: &mut R,
payer: &Keypair,
account: &light_client::indexer::CompressedAccount,
data_hash: [u8; 32],
) -> Result<Signature, RpcError>
where
R: Rpc + Indexer,
{
let proofs_result = rpc
.get_multiple_compressed_account_proofs(vec![account.hash], None)
.await?;
let proofs = proofs_result.value.items;
assert!(!proofs.is_empty(), "No proofs returned");
let merkle_proof = &proofs[0];
let leaf_index = merkle_proof.leaf_index as u32;
let merkle_proof_hashes = &merkle_proof.proof;
let merkle_root = merkle_proof.root;
let root_index = (merkle_proof.root_seq % 2400) as u16;
let state_tree = merkle_proof.merkle_tree;
let zk_proof = generate_merkle_proof(
account,
&state_tree,
leaf_index,
merkle_proof_hashes,
&merkle_root,
&data_hash,
);
let mut remaining_accounts = PackedAccounts::default();
let config = SystemAccountMetaConfig::new(zk_merkle_proof::ID);
remaining_accounts.add_system_accounts(config)?;
let instruction_data = zk_merkle_proof::instruction::VerifyAccount {
input_root_index: root_index,
zk_proof,
data_hash,
};
let accounts = zk_merkle_proof::accounts::VerifyAccountAccounts {
signer: payer.pubkey(),
state_merkle_tree: state_tree,
};
let instruction = Instruction {
program_id: zk_merkle_proof::ID,
accounts: [
accounts.to_account_metas(None),
remaining_accounts.to_account_metas().0,
]
.concat(),
data: instruction_data.data(),
};
rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer])
.await
}
fn generate_merkle_proof(
account: &light_client::indexer::CompressedAccount,
merkle_tree_pubkey: &Pubkey,
leaf_index: u32,
merkle_proof_hashes: &[[u8; 32]],
merkle_root: &[u8; 32],
data_hash: &[u8; 32],
) -> light_compressed_account::instruction_data::compressed_proof::CompressedProof {
let zkey_path = "./build/merkle_proof_final.zkey".to_string();
let mut proof_inputs = HashMap::new();
let owner_hashed = hash_to_bn254_field_size_be(zk_merkle_proof::ID.as_ref());
let merkle_tree_hashed = hash_to_bn254_field_size_be(merkle_tree_pubkey.as_ref());
let discriminator = if let Some(ref data) = account.data {
data.discriminator
} else {
[0u8; 8]
};
let address = account.address.expect("Account must have an address");
// Verify hash can be recreated
let mut leaf_index_bytes = [0u8; 32];
leaf_index_bytes[28..32].copy_from_slice(&(account.leaf_index as u32).to_le_bytes());
let mut discriminator_with_domain = [0u8; 32];
discriminator_with_domain[24..32].copy_from_slice(&discriminator);
discriminator_with_domain[23] = 2;
let computed_hash = Poseidon::hashv(&[
owner_hashed.as_slice(),
leaf_index_bytes.as_slice(),
merkle_tree_hashed.as_slice(),
address.as_slice(),
discriminator_with_domain.as_slice(),
data_hash.as_slice(),
])
.unwrap();
assert_eq!(computed_hash, account.hash, "Hash mismatch");
// Public inputs
proof_inputs.insert(
"owner_hashed".to_string(),
vec![BigUint::from_bytes_be(&owner_hashed).to_string()],
);
proof_inputs.insert(
"merkle_tree_hashed".to_string(),
vec![BigUint::from_bytes_be(&merkle_tree_hashed).to_string()],
);
proof_inputs.insert(
"discriminator".to_string(),
vec![BigUint::from_bytes_be(&discriminator).to_string()],
);
proof_inputs.insert(
"data_hash".to_string(),
vec![BigUint::from_bytes_be(data_hash).to_string()],
);
proof_inputs.insert(
"expectedRoot".to_string(),
vec![BigUint::from_bytes_be(merkle_root).to_string()],
);
// Private inputs
proof_inputs.insert("leaf_index".to_string(), vec![leaf_index.to_string()]);
let mut account_leaf_index_bytes = [0u8; 32];
account_leaf_index_bytes[28..32].copy_from_slice(&(account.leaf_index as u32).to_le_bytes());
proof_inputs.insert(
"account_leaf_index".to_string(),
vec![BigUint::from_bytes_be(&account_leaf_index_bytes).to_string()],
);
proof_inputs.insert(
"address".to_string(),
vec![BigUint::from_bytes_be(&address).to_string()],
);
let path_elements: Vec<String> = merkle_proof_hashes
.iter()
.map(|hash| BigUint::from_bytes_be(hash).to_string())
.collect();
proof_inputs.insert("pathElements".to_string(), path_elements);
let circuit_inputs = serde_json::to_string(&proof_inputs).unwrap();
let proof = CircomProver::prove(
ProofLib::Arkworks,
WitnessFn::RustWitness(merkleproof_witness),
circuit_inputs,
zkey_path.clone(),
)
.expect("Proof generation failed");
let is_valid = CircomProver::verify(ProofLib::Arkworks, proof.clone(), zkey_path.clone())
.expect("Proof verification failed");
assert!(is_valid, "Local proof verification failed");
// Verify with groth16-solana
{
use groth16_solana::groth16::Groth16Verifier;
use groth16_solana::proof_parser::circom_prover::convert_public_inputs;
let (proof_a, proof_b, proof_c) =
convert_proof(&proof.proof).expect("Failed to convert proof");
let public_inputs_converted: [[u8; 32]; 5] = convert_public_inputs(&proof.pub_inputs);
let mut verifier = Groth16Verifier::new(
&proof_a,
&proof_b,
&proof_c,
&public_inputs_converted,
&zk_merkle_proof::verifying_key::VERIFYINGKEY,
)
.expect("Failed to create verifier");
verifier.verify().expect("groth16-solana verification failed");
}
let (proof_a_uncompressed, proof_b_uncompressed, proof_c_uncompressed) =
convert_proof(&proof.proof).expect("Failed to convert proof");
use groth16_solana::proof_parser::circom_prover::convert_proof_to_compressed;
let (proof_a, proof_b, proof_c) = convert_proof_to_compressed(
&proof_a_uncompressed,
&proof_b_uncompressed,
&proof_c_uncompressed,
)
.expect("Failed to compress proof");
light_compressed_account::instruction_data::compressed_proof::CompressedProof {
a: proof_a,
b: proof_b,
c: proof_c,
}
}