Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(avm): hashing opcodes #4526

Merged
merged 6 commits into from
Feb 14, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions avm-transpiler/src/opcodes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,8 @@ pub enum AvmOpcode {
// Gadgets
KECCAK,
POSEIDON,
SHA256,
PEDERSEN,
}

impl AvmOpcode {
Expand Down Expand Up @@ -167,6 +169,8 @@ impl AvmOpcode {
// Gadgets
AvmOpcode::KECCAK => "KECCAK",
AvmOpcode::POSEIDON => "POSEIDON",
AvmOpcode::SHA256 => "SHA256 ",
AvmOpcode::PEDERSEN => "PEDERSEN",
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,8 @@ const std::unordered_map<OpCode, size_t> Bytecode::OPERANDS_NUM = {
//// Gadgets
//{ OpCode::KECCAK, },
//{ OpCode::POSEIDON, },
//{ OpCode::SHA256, },
//{ OpCode::PEDERSEN, },
};

/**
Expand Down
1 change: 1 addition & 0 deletions yarn-project/foundation/src/crypto/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ export * from './keccak/index.js';
export * from './random/index.js';
export * from './sha256/index.js';
export * from './pedersen/index.js';
export * from './poseidon/index.js';

/**
* Init the bb singleton. This constructs (if not already) the barretenberg sync api within bb.js itself.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ export function pedersenCommit(input: Buffer[]) {
* Create a pedersen hash (field) from an array of input fields.
* Left pads any inputs less than 32 bytes.
*/
export function pedersenHash(input: Buffer[], index = 0) {
export function pedersenHash(input: Buffer[], index = 0): Buffer {
if (!input.every(i => i.length <= 32)) {
throw new Error('All Pedersen Hash input buffers must be <= 32 bytes.');
}
Expand Down
13 changes: 13 additions & 0 deletions yarn-project/foundation/src/crypto/poseidon/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
import { BarretenbergSync, Fr } from '@aztec/bb.js';

/**
* Create a poseidon hash (field) from an array of input fields.
* Left pads any inputs less than 32 bytes.
*/
export function poseidonHash(input: Buffer[]): Buffer {
return Buffer.from(
BarretenbergSync.getSingleton()
.poseidonHash(input.map(i => new Fr(i)))
.toBuffer(),
);
}
14 changes: 13 additions & 1 deletion yarn-project/simulator/src/avm/avm_memory_types.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import { toBufferBE } from '@aztec/foundation/bigint-buffer';
import { Fr } from '@aztec/foundation/fields';

import { strict as assert } from 'assert';
Expand All @@ -19,6 +20,9 @@ export abstract class MemoryValue {
// Use sparingly.
public abstract toBigInt(): bigint;

// To Buffer
public abstract toBuffer(): Buffer;

// To field
public toFr(): Fr {
return new Fr(this.toBigInt());
Expand Down Expand Up @@ -111,6 +115,10 @@ function UnsignedIntegerClassFactory(bits: number) {
public toBigInt(): bigint {
return this.n;
}

public toBuffer(): Buffer {
return toBufferBE(this.n, bits / 8);
}
};
}

Expand All @@ -127,7 +135,7 @@ export class Field extends MemoryValue {
public static readonly MODULUS: bigint = Fr.MODULUS;
private readonly rep: Fr;

constructor(v: number | bigint | Fr) {
constructor(v: number | bigint | Fr | Buffer) {
super();
this.rep = new Fr(v);
}
Expand Down Expand Up @@ -159,6 +167,10 @@ export class Field extends MemoryValue {
public toBigInt(): bigint {
return this.rep.toBigInt();
}

public toBuffer(): Buffer {
return this.rep.toBuffer();
}
}

export enum TypeTag {
Expand Down
138 changes: 138 additions & 0 deletions yarn-project/simulator/src/avm/opcodes/hashing.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,138 @@
import { toBigIntBE } from '@aztec/foundation/bigint-buffer';
import { keccak, pedersenHash, poseidonHash, sha256 } from '@aztec/foundation/crypto';

import { AvmContext } from '../avm_context.js';
import { Field } from '../avm_memory_types.js';
import { initContext } from '../fixtures/index.js';
import { Keccak, Pedersen, Poseidon2, Sha256 } from './hashing.js';

describe('Hashing Opcodes', () => {
let context: AvmContext;

beforeEach(async () => {
context = initContext();
});

describe('Poseidon2', () => {
it('Should (de)serialize correctly', () => {
const buf = Buffer.from([
Poseidon2.opcode, // opcode
...Buffer.from('12345678', 'hex'), // dstOffset
...Buffer.from('23456789', 'hex'), // hashOffset
...Buffer.from('3456789a', 'hex'), // hashSize
]);
const inst = new Poseidon2(/*dstOffset=*/ 0x12345678, /*hashOffset=*/ 0x23456789, /*hashSize=*/ 0x3456789a);

expect(Poseidon2.deserialize(buf)).toEqual(inst);
expect(inst.serialize()).toEqual(buf);
});

it('Should hash correctly', async () => {
const args = [new Field(1n), new Field(2n), new Field(3n)];
const hashOffset = 0;
context.machineState.memory.setSlice(hashOffset, args);

const dstOffset = 3;

const expectedHash = poseidonHash(args.map(field => field.toBuffer()));
await new Poseidon2(dstOffset, hashOffset, args.length).execute(context);

const result = context.machineState.memory.get(dstOffset);
expect(result).toEqual(new Field(toBigIntBE(expectedHash)));
});
});

describe('Keccak', () => {
it('Should (de)serialize correctly', () => {
const buf = Buffer.from([
Keccak.opcode, // opcode
...Buffer.from('12345678', 'hex'), // dstOffset
...Buffer.from('23456789', 'hex'), // hashOffset
...Buffer.from('3456789a', 'hex'), // hashSize
]);
const inst = new Keccak(/*dstOffset=*/ 0x12345678, /*hashOffset=*/ 0x23456789, /*hashSize=*/ 0x3456789a);

expect(Keccak.deserialize(buf)).toEqual(inst);
expect(inst.serialize()).toEqual(buf);
});

it('Should hash correctly', async () => {
const args = [new Field(1n), new Field(2n), new Field(3n)];
const hashOffset = 0;
context.machineState.memory.setSlice(hashOffset, args);

const dstOffset = 3;

const inputBuffer = Buffer.concat(args.map(field => field.toBuffer()));
const expectedHash = keccak(inputBuffer);
await new Keccak(dstOffset, hashOffset, args.length).execute(context);

const result = context.machineState.memory.getSliceAs<Field>(dstOffset, 2);
const combined = Buffer.concat([result[0].toBuffer().subarray(16, 32), result[1].toBuffer().subarray(16, 32)]);

expect(combined).toEqual(expectedHash);
});
});

describe('Sha256', () => {
it('Should (de)serialize correctly', () => {
const buf = Buffer.from([
Sha256.opcode, // opcode
...Buffer.from('12345678', 'hex'), // dstOffset
...Buffer.from('23456789', 'hex'), // hashOffset
...Buffer.from('3456789a', 'hex'), // hashSize
]);
const inst = new Sha256(/*dstOffset=*/ 0x12345678, /*hashOffset=*/ 0x23456789, /*hashSize=*/ 0x3456789a);

expect(Sha256.deserialize(buf)).toEqual(inst);
expect(inst.serialize()).toEqual(buf);
});

it('Should hash correctly', async () => {
const args = [new Field(1n), new Field(2n), new Field(3n)];
const hashOffset = 0;
context.machineState.memory.setSlice(hashOffset, args);

const dstOffset = 3;

const inputBuffer = Buffer.concat(args.map(field => field.toBuffer()));
const expectedHash = sha256(inputBuffer);
await new Sha256(dstOffset, hashOffset, args.length).execute(context);

const result = context.machineState.memory.getSliceAs<Field>(dstOffset, 2);
const combined = Buffer.concat([result[0].toBuffer().subarray(16, 32), result[1].toBuffer().subarray(16, 32)]);

expect(combined).toEqual(expectedHash);
});
});

describe('Pedersen', () => {
it('Should (de)serialize correctly', () => {
const buf = Buffer.from([
Pedersen.opcode, // opcode
...Buffer.from('12345678', 'hex'), // dstOffset
...Buffer.from('23456789', 'hex'), // hashOffset
...Buffer.from('3456789a', 'hex'), // hashSize
]);
const inst = new Pedersen(/*dstOffset=*/ 0x12345678, /*hashOffset=*/ 0x23456789, /*hashSize=*/ 0x3456789a);

expect(Sha256.deserialize(buf)).toEqual(inst);
expect(inst.serialize()).toEqual(buf);
});

it('Should hash correctly', async () => {
const args = [new Field(1n), new Field(2n), new Field(3n)];
const hashOffset = 0;
context.machineState.memory.setSlice(hashOffset, args);

const dstOffset = 3;

const inputBuffer = args.map(field => field.toBuffer());
const expectedHash = pedersenHash(inputBuffer);
await new Pedersen(dstOffset, hashOffset, args.length).execute(context);

const result = context.machineState.memory.get(dstOffset);
expect(result).toEqual(new Field(toBigIntBE(expectedHash)));
});
});
});
138 changes: 138 additions & 0 deletions yarn-project/simulator/src/avm/opcodes/hashing.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,138 @@
import { toBigIntBE } from '@aztec/foundation/bigint-buffer';
import { keccak, pedersenHash, poseidonHash, sha256 } from '@aztec/foundation/crypto';

import { AvmContext } from '../avm_context.js';
import { Field } from '../avm_memory_types.js';
import { Opcode, OperandType } from '../serialization/instruction_serialization.js';
import { Instruction } from './instruction.js';

export class Poseidon2 extends Instruction {
static type: string = 'POSEIDON2';
static readonly opcode: Opcode = Opcode.POSEIDON;

// Informs (de)serialization. See Instruction.deserialize.
static readonly wireFormat: OperandType[] = [
OperandType.UINT8,
OperandType.UINT32,
OperandType.UINT32,
OperandType.UINT32,
];

constructor(private dstOffset: number, private hashOffset: number, private hashSize: number) {
super();
}

async execute(context: AvmContext): Promise<void> {
// We hash a set of field elements
const hashData = context.machineState.memory
.getSlice(this.hashOffset, this.hashOffset + this.hashSize)
.map(word => word.toBuffer());

const hash = poseidonHash(hashData);
context.machineState.memory.set(this.dstOffset, new Field(hash));

context.machineState.incrementPc();
}
}

export class Keccak extends Instruction {
static type: string = 'KECCAK';
static readonly opcode: Opcode = Opcode.KECCAK;

// Informs (de)serialization. See Instruction.deserialize.
static readonly wireFormat: OperandType[] = [
OperandType.UINT8,
OperandType.UINT32,
OperandType.UINT32,
OperandType.UINT32,
];

constructor(private dstOffset: number, private hashOffset: number, private hashSize: number) {
super();
}

// Note hash output is 32 bytes, so takes up two fields
async execute(context: AvmContext): Promise<void> {
// We hash a set of field elements
const hashData = context.machineState.memory
.getSlice(this.hashOffset, this.hashOffset + this.hashSize)
.map(word => word.toBuffer());

const hash = keccak(Buffer.concat(hashData));

// Split output into two fields
const high = new Field(toBigIntBE(hash.subarray(0, 16)));
const low = new Field(toBigIntBE(hash.subarray(16, 32)));

context.machineState.memory.set(this.dstOffset, high);
context.machineState.memory.set(this.dstOffset + 1, low);

context.machineState.incrementPc();
}
}

export class Sha256 extends Instruction {
static type: string = 'SHA256';
static readonly opcode: Opcode = Opcode.SHA256;

// Informs (de)serialization. See Instruction.deserialize.
static readonly wireFormat: OperandType[] = [
OperandType.UINT8,
OperandType.UINT32,
OperandType.UINT32,
OperandType.UINT32,
];

constructor(private dstOffset: number, private hashOffset: number, private hashSize: number) {
super();
}

// Note hash output is 32 bytes, so takes up two fields
async execute(context: AvmContext): Promise<void> {
// We hash a set of field elements
const hashData = context.machineState.memory
.getSlice(this.hashOffset, this.hashOffset + this.hashSize)
.map(word => word.toBuffer());

const hash = sha256(Buffer.concat(hashData));

// Split output into two fields
const high = new Field(toBigIntBE(hash.subarray(0, 16)));
const low = new Field(toBigIntBE(hash.subarray(16, 32)));

context.machineState.memory.set(this.dstOffset, high);
context.machineState.memory.set(this.dstOffset + 1, low);

context.machineState.incrementPc();
}
}

export class Pedersen extends Instruction {
static type: string = 'PEDERSEN';
static readonly opcode: Opcode = Opcode.PEDERSEN;

// Informs (de)serialization. See Instruction.deserialize.
static readonly wireFormat: OperandType[] = [
OperandType.UINT8,
OperandType.UINT32,
OperandType.UINT32,
OperandType.UINT32,
];

constructor(private dstOffset: number, private hashOffset: number, private hashSize: number) {
super();
}

async execute(context: AvmContext): Promise<void> {
// We hash a set of field elements
const hashData = context.machineState.memory
.getSlice(this.hashOffset, this.hashOffset + this.hashSize)
.map(word => word.toBuffer());

// No domain sep for now
const hash = pedersenHash(hashData);
context.machineState.memory.set(this.dstOffset, new Field(hash));

context.machineState.incrementPc();
}
}
Loading
Loading