Skip to content

Commit

Permalink
feat(avm): keccack as blackbox
Browse files Browse the repository at this point in the history
  • Loading branch information
fcarreiro committed Apr 12, 2024
1 parent 5409493 commit 7459911
Show file tree
Hide file tree
Showing 10 changed files with 132 additions and 88 deletions.
27 changes: 26 additions & 1 deletion avm-transpiler/src/transpile.rs
Original file line number Diff line number Diff line change
Expand Up @@ -292,7 +292,7 @@ fn handle_foreign_call(
"avmOpcodeNullifierExists" => handle_nullifier_exists(avm_instrs, destinations, inputs),
"avmOpcodeL1ToL2MsgExists" => handle_l1_to_l2_msg_exists(avm_instrs, destinations, inputs),
"avmOpcodeSendL2ToL1Msg" => handle_send_l2_to_l1_msg(avm_instrs, destinations, inputs),
"avmOpcodeKeccak256" | "avmOpcodeSha256" => {
"avmOpcodeSha256" => {
handle_2_field_hash_instruction(avm_instrs, function, destinations, inputs)
}
"avmOpcodeGetContractInstance" => {
Expand Down Expand Up @@ -902,6 +902,31 @@ fn handle_black_box_function(avm_instrs: &mut Vec<AvmInstruction>, operation: &B
..Default::default()
});
}
BlackBoxOp::Keccak256 { message, output } => {
let message_offset = message.pointer.0;
let message_size_offset = message.size.0;
let dest_offset = output.pointer.0;
assert_eq!(output.size, 32, "Keccak256 output size must be 32!");

avm_instrs.push(AvmInstruction {
opcode: AvmOpcode::KECCAK,
indirect: Some(
ZEROTH_OPERAND_INDIRECT | FIRST_OPERAND_INDIRECT | SECOND_OPERAND_INDIRECT,
),
operands: vec![
AvmOperand::U32 {
value: dest_offset as u32,
},
AvmOperand::U32 {
value: message_offset as u32,
},
AvmOperand::U32 {
value: message_size_offset as u32,
},
],
..Default::default()
});
}
_ => panic!("Transpiler doesn't know how to process {:?}", operation),
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ contract AvmTest {
use dep::compressed_string::CompressedString;

// avm lib
use dep::aztec::avm::hash::{keccak256, sha256};
use dep::aztec::avm::hash::sha256;

#[aztec(storage)]
struct Storage {
Expand Down Expand Up @@ -145,8 +145,8 @@ contract AvmTest {
* Hashing functions
************************************************************************/
#[aztec(public-vm)]
fn keccak_hash(data: [Field; 10]) -> pub [Field; 2] {
keccak256(data)
fn keccak_hash(data: [u8; 10]) -> pub [u8; 32] {
dep::std::hash::keccak256(data, data.len() as u32)
}

#[aztec(public-vm)]
Expand Down
4 changes: 2 additions & 2 deletions yarn-project/aztec.js/src/utils/cheat_codes.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { type Note, type PXE } from '@aztec/circuit-types';
import { type AztecAddress, type EthAddress, Fr } from '@aztec/circuits.js';
import { toBigIntBE, toHex } from '@aztec/foundation/bigint-buffer';
import { keccak, pedersenHash } from '@aztec/foundation/crypto';
import { keccak256, pedersenHash } from '@aztec/foundation/crypto';
import { createDebugLogger } from '@aztec/foundation/log';

import fs from 'fs';
Expand Down Expand Up @@ -167,7 +167,7 @@ export class EthCheatCodes {
public keccak256(baseSlot: bigint, key: bigint): bigint {
// abi encode (removing the 0x) - concat key and baseSlot (both padded to 32 bytes)
const abiEncoded = toHex(key, true).substring(2) + toHex(baseSlot, true).substring(2);
return toBigIntBE(keccak(Buffer.from(abiEncoded, 'hex')));
return toBigIntBE(keccak256(Buffer.from(abiEncoded, 'hex')));
}

/**
Expand Down
4 changes: 2 additions & 2 deletions yarn-project/foundation/src/abi/event_selector.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { fromHex, toBigIntBE } from '../bigint-buffer/index.js';
import { keccak, randomBytes } from '../crypto/index.js';
import { keccak256, randomBytes } from '../crypto/index.js';
import { type Fr } from '../fields/fields.js';
import { BufferReader } from '../serialize/buffer_reader.js';
import { Selector } from './selector.js';
Expand Down Expand Up @@ -44,7 +44,7 @@ export class EventSelector extends Selector {
if (/\s/.test(signature)) {
throw new Error('Signature cannot contain whitespace');
}
return EventSelector.fromBuffer(keccak(Buffer.from(signature)).subarray(0, Selector.SIZE));
return EventSelector.fromBuffer(keccak256(Buffer.from(signature)).subarray(0, Selector.SIZE));
}

/**
Expand Down
4 changes: 2 additions & 2 deletions yarn-project/foundation/src/abi/function_selector.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { fromHex, toBigIntBE } from '../bigint-buffer/index.js';
import { keccak, randomBytes } from '../crypto/index.js';
import { keccak256, randomBytes } from '../crypto/index.js';
import { type Fr } from '../fields/fields.js';
import { BufferReader } from '../serialize/buffer_reader.js';
import { FieldReader } from '../serialize/field_reader.js';
Expand Down Expand Up @@ -72,7 +72,7 @@ export class FunctionSelector extends Selector {
if (/\s/.test(signature)) {
throw new Error('Signature cannot contain whitespace');
}
return FunctionSelector.fromBuffer(keccak(Buffer.from(signature)).subarray(0, Selector.SIZE));
return FunctionSelector.fromBuffer(keccak256(Buffer.from(signature)).subarray(0, Selector.SIZE));
}

/**
Expand Down
2 changes: 1 addition & 1 deletion yarn-project/foundation/src/crypto/keccak/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import { Keccak } from 'sha3';
* @param input - The input buffer to be hashed.
* @returns The computed Keccak-256 hash as a Buffer.
*/
export function keccak(input: Buffer) {
export function keccak256(input: Buffer) {
const hash = new Keccak(256);
return hash.update(input).digest();
}
Expand Down
2 changes: 1 addition & 1 deletion yarn-project/pxe/src/kernel_prover/kernel_prover.ts
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@ export class KernelProver {
await this.oracle.getContractClassIdPreimage(contractClassId);

// TODO(#262): Use real acir hash
// const acirHash = keccak(Buffer.from(bytecode, 'hex'));
// const acirHash = keccak235(Buffer.from(bytecode, 'hex'));
const acirHash = Fr.fromBuffer(Buffer.alloc(32, 0));

// TODO
Expand Down
73 changes: 46 additions & 27 deletions yarn-project/simulator/src/avm/avm_simulator.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { UnencryptedL2Log } from '@aztec/circuit-types';
import { computeVarArgsHash } from '@aztec/circuits.js/hash';
import { EventSelector, FunctionSelector } from '@aztec/foundation/abi';
import { AztecAddress } from '@aztec/foundation/aztec-address';
import { keccak, pedersenHash, poseidon2Hash, sha256 } from '@aztec/foundation/crypto';
import { keccak256, pedersenHash, poseidon2Hash, sha256 } from '@aztec/foundation/crypto';
import { EthAddress } from '@aztec/foundation/eth-address';
import { Fr } from '@aztec/foundation/fields';
import { AvmNestedCallsTestContractArtifact, AvmTestContractArtifact } from '@aztec/noir-contracts.js';
Expand All @@ -12,7 +12,7 @@ import { strict as assert } from 'assert';

import { isAvmBytecode } from '../public/transitional_adaptors.js';
import { AvmMachineState } from './avm_machine_state.js';
import { TypeTag } from './avm_memory_types.js';
import { Field, type MemoryValue, TypeTag, Uint8 } from './avm_memory_types.js';
import { AvmSimulator } from './avm_simulator.js';
import {
adjustCalldataIndex,
Expand Down Expand Up @@ -121,37 +121,49 @@ describe('AVM simulator: transpiled Noir contracts', () => {
});

describe.each([
['sha256_hash', sha256],
['keccak_hash', keccak],
])('Hashes with 2 fields returned in noir contracts', (name: string, hashFunction: (data: Buffer) => Buffer) => {
it(`Should execute contract function that performs ${name} hash`, async () => {
const calldata = [...Array(10)].map(_ => Fr.random());
const hash = hashFunction(Buffer.concat(calldata.map(f => f.toBuffer())));

const context = initContext({ env: initExecutionEnvironment({ calldata }) });
const bytecode = getAvmTestContractBytecode(name);
const results = await new AvmSimulator(context).executeBytecode(bytecode);

expect(results.reverted).toBe(false);
expect(results.output).toEqual([new Fr(hash.subarray(0, 16)), new Fr(hash.subarray(16, 32))]);
});
});

describe.each([
['poseidon2_hash', poseidon2Hash],
['pedersen_hash', pedersenHash],
['pedersen_hash_with_index', (m: Buffer[]) => pedersenHash(m, 20)],
])('Hashes with field returned in noir contracts', (name: string, hashFunction: (data: Buffer[]) => Fr) => {
it(`Should execute contract function that performs ${name} hash`, async () => {
const calldata = [...Array(10)].map(_ => Fr.random());
const hash = hashFunction(calldata.map(f => f.toBuffer()));
[
'sha256_hash',
/*input=*/ randomBytes(10),
/*output=*/ (fields: Field[]) => {
const resBuffer = sha256(Buffer.concat(fields.map(f => f.toBuffer())));
return [new Fr(resBuffer.subarray(0, 16)), new Fr(resBuffer.subarray(16, 32))];
},
],
[
'keccak_hash',
/*input=*/ randomBytes(10),
/*output=*/ (bytes: Uint8[]) => [...keccak256(Buffer.concat(bytes.map(b => b.toBuffer())))].map(b => new Fr(b)),
],
[
'poseidon2_hash',
/*input=*/ randomFields(10),
/*output=*/ (fields: Field[]) => [poseidon2Hash(fields.map(f => f.toBuffer()))],
],
[
'pedersen_hash',
/*input=*/ randomFields(10),
/*output=*/ (fields: Field[]) => [pedersenHash(fields.map(f => f.toBuffer()))],
],
[
'pedersen_hash_with_index',
/*input=*/ randomFields(10),
/*output=*/ (fields: Field[]) => [
pedersenHash(
fields.map(f => f.toBuffer()),
/*index=*/ 20,
),
],
],
])('Hashes in noir contracts', (name: string, input: MemoryValue[], output: (msg: any[]) => Fr[]) => {
it(`Should execute contract function that performs ${name}`, async () => {
const calldata = input.map(e => e.toFr());

const context = initContext({ env: initExecutionEnvironment({ calldata }) });
const bytecode = getAvmTestContractBytecode(name);
const results = await new AvmSimulator(context).executeBytecode(bytecode);

expect(results.reverted).toBe(false);
expect(results.output).toEqual([new Fr(hash)]);
expect(results.output).toEqual(output(input));
});
});

Expand Down Expand Up @@ -923,3 +935,10 @@ function getAvmNestedCallsTestContractBytecode(functionName: string): Buffer {
);
return artifact.bytecode;
}

function randomBytes(length: number): Uint8[] {
return [...Array(length)].map(_ => new Uint8(Math.floor(Math.random() * 255)));
}
function randomFields(length: number): Field[] {
return [...Array(length)].map(_ => new Field(Fr.random()));
}
63 changes: 33 additions & 30 deletions yarn-project/simulator/src/avm/opcodes/hashing.test.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
import { Fr } from '@aztec/circuits.js';
import { keccak, pedersenHash, sha256 } from '@aztec/foundation/crypto';
import { keccak256, pedersenHash, sha256 } from '@aztec/foundation/crypto';

import { type AvmContext } from '../avm_context.js';
import { Field, Uint32 } from '../avm_memory_types.js';
import { Field, Uint8, Uint32 } from '../avm_memory_types.js';
import { initContext } from '../fixtures/index.js';
import { Addressing, AddressingMode } from './addressing_mode.js';
import { Keccak, Pedersen, Poseidon2, Sha256 } from './hashing.js';
Expand Down Expand Up @@ -75,61 +74,65 @@ describe('Hashing Opcodes', () => {
1, // indirect
...Buffer.from('12345678', 'hex'), // dstOffset
...Buffer.from('23456789', 'hex'), // messageOffset
...Buffer.from('3456789a', 'hex'), // hashSize
...Buffer.from('3456789a', 'hex'), // messageSizeOffset
]);
const inst = new Keccak(
/*indirect=*/ 1,
/*dstOffset=*/ 0x12345678,
/*messageOffset=*/ 0x23456789,
/*hashSize=*/ 0x3456789a,
/*messageSizeOffset=*/ 0x3456789a,
);

expect(Keccak.deserialize(buf)).toEqual(inst);
expect(inst.serialize()).toEqual(buf);
});

it('Should hash correctly - direct', async () => {
const args = [new Field(1n), new Field(2n), new Field(3n)];
const args = [...Array(10)].map(_ => new Uint8(Math.floor(Math.random() * 255)));
const indirect = 0;
const messageOffset = 0;
const messageSizeOffset = 15;
const dstOffset = 20;
context.machineState.memory.set(messageSizeOffset, new Uint32(args.length));
context.machineState.memory.setSlice(messageOffset, args);

const dstOffset = 3;
await new Keccak(indirect, dstOffset, messageOffset, messageSizeOffset).execute(context);

const inputBuffer = Buffer.concat(args.map(field => field.toBuffer()));
const expectedHash = keccak(inputBuffer);
await new Keccak(indirect, dstOffset, messageOffset, args.length).execute(context);

const result = context.machineState.memory.getSliceAs<Field>(dstOffset, 2);
const combined = Buffer.concat([result[0].toBuffer().subarray(16, 32), result[1].toBuffer().subarray(16, 32)]);

expect(combined).toEqual(expectedHash);
const resultBuffer = Buffer.concat(
context.machineState.memory.getSliceAs<Uint8>(dstOffset, 32).map(byte => byte.toBuffer()),
);
const inputBuffer = Buffer.concat(args.map(byte => byte.toBuffer()));
const expectedHash = keccak256(inputBuffer);
expect(resultBuffer).toEqual(expectedHash);
});

it('Should hash correctly - indirect', async () => {
const args = [new Field(1n), new Field(2n), new Field(3n)];
const args = [...Array(10)].map(_ => new Uint8(Math.floor(Math.random() * 255)));
const indirect = new Addressing([
/*dstOffset=*/ AddressingMode.INDIRECT,
/*messageOffset*/ AddressingMode.INDIRECT,
/*messageSizeOffset*/ AddressingMode.INDIRECT,
]).toWire();
const messageOffset = 0;
const argsLocation = 4;

const messageOffsetReal = 10;
const messageSizeOffset = 1;
const messageSizeOffsetReal = 100;
const dstOffset = 2;
const readLocation = 6;
const dstOffsetReal = 30;
context.machineState.memory.set(messageOffset, new Uint32(messageOffsetReal));
context.machineState.memory.set(dstOffset, new Uint32(dstOffsetReal));
context.machineState.memory.set(messageSizeOffset, new Uint32(messageSizeOffsetReal));
context.machineState.memory.set(messageSizeOffsetReal, new Uint32(args.length));
context.machineState.memory.setSlice(messageOffsetReal, args);

context.machineState.memory.set(messageOffset, new Uint32(argsLocation));
context.machineState.memory.set(dstOffset, new Uint32(readLocation));
context.machineState.memory.setSlice(argsLocation, args);
await new Keccak(indirect, dstOffset, messageOffset, messageSizeOffset).execute(context);

const inputBuffer = Buffer.concat(args.map(field => field.toBuffer()));
const expectedHash = keccak(inputBuffer);
await new Keccak(indirect, dstOffset, messageOffset, args.length).execute(context);

const result = context.machineState.memory.getSliceAs<Field>(readLocation, 2);
const combined = Buffer.concat([result[0].toBuffer().subarray(16, 32), result[1].toBuffer().subarray(16, 32)]);

expect(combined).toEqual(expectedHash);
const resultBuffer = Buffer.concat(
context.machineState.memory.getSliceAs<Uint8>(dstOffsetReal, 32).map(byte => byte.toBuffer()),
);
const inputBuffer = Buffer.concat(args.map(byte => byte.toBuffer()));
const expectedHash = keccak256(inputBuffer);
expect(resultBuffer).toEqual(expectedHash);
});
});

Expand Down
35 changes: 16 additions & 19 deletions yarn-project/simulator/src/avm/opcodes/hashing.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
import { toBigIntBE } from '@aztec/foundation/bigint-buffer';
import { keccak, pedersenHash, poseidon2Permutation, sha256 } from '@aztec/foundation/crypto';
import { keccak256, pedersenHash, poseidon2Permutation, sha256 } from '@aztec/foundation/crypto';

import { strict as assert } from 'assert';

import { type AvmContext } from '../avm_context.js';
import { Field } from '../avm_memory_types.js';
import { Field, Uint8 } from '../avm_memory_types.js';
import { Opcode, OperandType } from '../serialization/instruction_serialization.js';
import { Addressing } from './addressing_mode.js';
import { Instruction } from './instruction.js';
Expand Down Expand Up @@ -63,33 +65,28 @@ export class Keccak extends Instruction {
private indirect: number,
private dstOffset: number,
private messageOffset: number,
private messageSize: number,
private messageSizeOffset: number,
) {
super();
}

// Note hash output is 32 bytes, so takes up two fields
// pub fn keccak256(input: [u8], message_size: u32) -> [u8; 32]
public async execute(context: AvmContext): Promise<void> {
const memoryOperations = { reads: this.messageSize, writes: 2, indirect: this.indirect };
const memory = context.machineState.memory.track(this.type);
context.machineState.consumeGas(this.gasCost(memoryOperations));

// We hash a set of field elements
const [dstOffset, messageOffset] = Addressing.fromWire(this.indirect).resolve(
[this.dstOffset, this.messageOffset],
const [dstOffset, messageOffset, messageSizeOffset] = Addressing.fromWire(this.indirect).resolve(
[this.dstOffset, this.messageOffset, this.messageSizeOffset],
memory,
);
const messageSize = memory.get(messageSizeOffset).toNumber();
const memoryOperations = { reads: messageSize + 1, writes: 32, indirect: this.indirect };
context.machineState.consumeGas(this.gasCost(memoryOperations));

const hashData = memory.getSlice(messageOffset, this.messageSize).map(word => word.toBuffer());

const hash = keccak(Buffer.concat(hashData));
const messageData = Buffer.concat(memory.getSlice(messageOffset, messageSize).map(word => word.toBuffer()));
const hashBuffer = keccak256(messageData);

// Split output into two fields
const high = new Field(toBigIntBE(hash.subarray(0, 16)));
const low = new Field(toBigIntBE(hash.subarray(16, 32)));

memory.set(dstOffset, high);
memory.set(dstOffset + 1, low);
// We need to convert the hashBuffer because map doesn't work as expected on an Uint8Array (Buffer).
const res = [...hashBuffer].map(byte => new Uint8(byte));
memory.setSlice(dstOffset, res);

memory.assert(memoryOperations);
context.machineState.incrementPc();
Expand Down

0 comments on commit 7459911

Please sign in to comment.