Skip to content

Commit

Permalink
feat: automate verify_honk_proof input generation (#8092)
Browse files Browse the repository at this point in the history
This script replaces the gen_inner_proof_inputs_ultra_honk.sh with
regenerate_verify_honk_proof_inputs.sh, which does the resetting of
test_programs and input copying automatically so that people don't have
to know intricate details on what to do to regenerate the inputs for the
verify_honk_proof test program.

As part of this, I created a python script
barretenberg/acir_tests/update_verify_honk_proof_inputs.py, which does
the copying/surgery of the proofs. This python script shouldn't have to
be run by itself.

It also updates the rebuild.sh script to actually output which
test_programs failed. Before, it always outputted that the rebuild
succeeded... It also allows for users to specify which test_programs
that they want to rebuild.

I updated the reset_acir_tests.sh script to be more efficient as well.
  • Loading branch information
lucasxia01 authored Aug 21, 2024
1 parent 6c5ab2b commit bf38d61
Show file tree
Hide file tree
Showing 7 changed files with 173 additions and 54 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ fi

export BRANCH

./clone_test_vectors.sh
./reset_acir_tests.sh --rebuild-nargo --programs assert_statement_recursive

cd acir_tests/assert_statement_recursive

Expand All @@ -39,4 +39,10 @@ echo "Generate proof to file..."
$BIN prove_ultra_honk $VFLAG -c $CRS_PATH -b ./target/program.json -o "./proofs/honk_$PROOF_NAME"

echo "Write proof as fields for recursion..."
$BIN proof_as_fields_honk $VFLAG -c $CRS_PATH -p "./proofs/honk_$PROOF_NAME" -o "./proofs/honk_${PROOF_NAME}_fields.json"
$BIN proof_as_fields_honk $VFLAG -c $CRS_PATH -p "./proofs/honk_$PROOF_NAME" -o "./proofs/honk_${PROOF_NAME}_fields.json"

# cd back to barretenberg/acir_tests
cd ../..
python3 update_verify_honk_proof_inputs.py

./reset_acir_tests.sh --programs verify_honk_proof
48 changes: 42 additions & 6 deletions barretenberg/acir_tests/reset_acir_tests.sh
Original file line number Diff line number Diff line change
@@ -1,12 +1,48 @@
#!/usr/bin/env bash
set -e

# Run from within barretenberg/acir_tests

# clean and rebuild noir then compile the test programs
# Initialize variables for flags
REBUILD_NARGO_FLAG=""
PROGRAMS=""

# Parse the arguments
while [[ "$#" -gt 0 ]]; do
case $1 in
--rebuild-nargo)
REBUILD_NARGO_FLAG="--rebuild-nargo"
;;
--programs)
shift
PROGRAMS="$@"
break # Exit loop after collecting all programs
;;
*)
echo "Unknown option: $1"
exit 1
;;
esac
shift
done

# Clean and rebuild noir, then compile the test programs if --rebuild-nargo flag is set
cd ../../noir/noir-repo
cargo clean
noirup -p .
cd test_programs && ./rebuild.sh

# remove and repopulate the test artifacts in bberg
if [[ -n "$REBUILD_NARGO_FLAG" ]]; then
cargo clean
noirup -p .
fi

# Rebuild test programs with rebuild.sh
cd test_programs
if [[ -n "$PROGRAMS" ]]; then
./rebuild.sh $PROGRAMS
else
./rebuild.sh
fi

# Remove and repopulate the test artifacts in bberg
cd ../../../barretenberg/acir_tests
rm -rf acir_tests
./clone_test_vectors.sh
./clone_test_vectors.sh
41 changes: 41 additions & 0 deletions barretenberg/acir_tests/update_verify_honk_proof_inputs.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
import json

# Paths to the input files
proof_file_path = "acir_tests/assert_statement_recursive/proofs/honk_proof_a_fields.json"
vk_file_path = "acir_tests/assert_statement_recursive/target/honk_vk_fields.json"

# Path to the output TOML file
output_toml_path = "../../noir/noir-repo/test_programs/execution_success/verify_honk_proof/Prover.toml"

# Read the proof from the JSON file
with open(proof_file_path, "r") as proof_file:
proof_data = json.load(proof_file)

# Read the verification key from the JSON file
with open(vk_file_path, "r") as vk_file:
vk_data = json.load(vk_file)

# Extract the one public input (4th element in the proof array)
public_inputs = proof_data[3] if len(proof_data) > 3 else None

# Remove the public input from the proof array
proof_data_without_public_input = proof_data[:3] + proof_data[4:]

# Convert each element in the proof and verification key to a hex string with double quotes
proof_data_str = [f'"{item}"' for item in proof_data_without_public_input]
vk_data_str = [f'"{item}"' for item in vk_data]
public_inputs_str = f'"{public_inputs}"'

# Manually create the TOML content with public_inputs as an array
toml_content = (
f'key_hash = "0x{"0" * 64}"\n'
f'proof = [{", ".join(proof_data_str)}]\n'
f'public_inputs = [{public_inputs_str}]\n'
f'verification_key = [{", ".join(vk_data_str)}]\n'
)

# Write the content to the output TOML file
with open(output_toml_path, "w") as output_toml_file:
output_toml_file.write(toml_content)

print(f"Prover.toml has been successfully created at {output_toml_path}")
2 changes: 1 addition & 1 deletion noir/Earthfile
Original file line number Diff line number Diff line change
Expand Up @@ -215,7 +215,7 @@ build-acir-tests:
COPY .earthly-staging/noir-repo/test_programs /usr/src/noir-repo/test_programs/
RUN /usr/src/noir-repo/target/release/nargo --version
# TODO(#6225): We have trouble with concurrency and pass 'true' to build in serial, see #6225 for details
RUN ./rebuild.sh true
RUN ./rebuild.sh
SAVE ARTIFACT /usr/src/noir-repo/test_programs/acir_artifacts/*

barretenberg-acir-benches-bb:
Expand Down
1 change: 1 addition & 0 deletions noir/noir-repo/test_programs/.gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
acir_artifacts
execution_success/**/crs
./Nargo.toml
rebuild.log

Large diffs are not rendered by default.

115 changes: 76 additions & 39 deletions noir/noir-repo/test_programs/rebuild.sh
Original file line number Diff line number Diff line change
@@ -1,65 +1,102 @@
#!/usr/bin/env bash

# Exit immediately if a command exits with a non-zero status, treat unset variables as an error, and print commands as they are executed
set -e

process_dir() {
local dir=$1
local current_dir=$2
local dir_name=$(basename "$dir")

if [[ ! -f "$dir/Nargo.toml" ]]; then
# This directory isn't a proper test but just hold some stale build artifacts
# We then delete it and carry on.
rm -rf $dir
return 0
fi
{
echo "Processing $dir"

if [[ ! -f "$dir/Nargo.toml" ]]; then
echo "No Nargo.toml found in $dir. Removing directory."
rm -rf "$dir"
echo "$dir: skipped (no Nargo.toml)"
return 0
fi

if [[ ! -d "$current_dir/acir_artifacts/$dir_name" ]]; then
mkdir -p $current_dir/acir_artifacts/$dir_name
fi
if [[ ! -d "$current_dir/acir_artifacts/$dir_name" ]]; then
mkdir -p "$current_dir/acir_artifacts/$dir_name"
fi

cd $dir
if [ -d ./target/ ]; then
rm -r ./target/
fi
nargo execute witness
cd "$dir"
if [ -d ./target/ ]; then
rm -r ./target/
fi

if [ -d "$current_dir/acir_artifacts/$dir_name/target" ]; then
rm -r "$current_dir/acir_artifacts/$dir_name/target"
fi
mkdir $current_dir/acir_artifacts/$dir_name/target
if ! nargo execute witness; then
echo "$dir: failed"
else
if [ -d "$current_dir/acir_artifacts/$dir_name/target" ]; then
rm -r "$current_dir/acir_artifacts/$dir_name/target"
fi
mkdir "$current_dir/acir_artifacts/$dir_name/target"

mv ./target/$dir_name.json $current_dir/acir_artifacts/$dir_name/target/program.json
mv ./target/*.gz $current_dir/acir_artifacts/$dir_name/target/
mv ./target/$dir_name.json "$current_dir/acir_artifacts/$dir_name/target/program.json"
mv ./target/*.gz "$current_dir/acir_artifacts/$dir_name/target/"
echo "$dir: succeeded"
fi

cd $current_dir
cd "$current_dir"
} >> "$current_dir/rebuild.log" 2>&1
}

export -f process_dir

excluded_dirs=("workspace" "workspace_default_member")
current_dir=$(pwd)
base_path="$current_dir/execution_success"
dirs_to_process=()

# Remove existing artifacts and create a new directory
rm -rf "$current_dir/acir_artifacts"
mkdir -p "$current_dir/acir_artifacts"

# Gather directories to process, either from arguments or by default.
if [ $# -gt 0 ]; then
for dir in "$@"; do
dirs_to_process+=("$base_path/$dir")
done
else
for dir in $base_path/*; do
if [[ ! -d $dir ]] || [[ " ${excluded_dirs[@]} " =~ " $(basename "$dir") " ]]; then
continue
fi
dirs_to_process+=("$dir")
done

rm -rf $current_dir/acir_artifacts
mkdir -p $current_dir/acir_artifacts
for dir in $current_dir/benchmarks/*; do
if [[ ! -d $dir ]]; then
continue
fi
dirs_to_process+=("$dir")
done
fi

# Gather directories to process.
# dirs_to_process=()
for dir in $base_path/*; do
if [[ ! -d $dir ]] || [[ " ${excluded_dirs[@]} " =~ " $(basename "$dir") " ]]; then
continue
fi
dirs_to_process+=("$dir")
done
# Clear any existing rebuild.log
rm -f "$current_dir/rebuild.log"

for dir in $current_dir/benchmarks/*; do
if [[ ! -d $dir ]]; then
continue
fi
dirs_to_process+=("$dir")
done
# Process directories in parallel
parallel -j0 process_dir {} "$current_dir" ::: "${dirs_to_process[@]}"

parallel -j0 process_dir {} "$current_dir" ::: ${dirs_to_process[@]}
# Check rebuild.log for failures
if [ -f "$current_dir/rebuild.log" ]; then
failed_dirs=($(grep -a 'failed' "$current_dir/rebuild.log" | awk '{print $1}'))
else
echo "rebuild.log not found or empty. Check for errors." >&2
exit 1
fi

echo "Rebuild Succeeded!"
# Print final status message after processing all directories
if [ ${#failed_dirs[@]} -ne 0 ]; then
echo "Rebuild failed for the following directories:"
for dir in "${failed_dirs[@]}"; do
echo "- $dir"
done
exit 1
else
echo "Rebuild Succeeded!"
fi

0 comments on commit bf38d61

Please sign in to comment.