Skip to content

Commit

Permalink
chore: temp add pedersen and sha256
Browse files Browse the repository at this point in the history
  • Loading branch information
Maddiaa0 committed Feb 13, 2024
1 parent 87f78c0 commit 1716277
Show file tree
Hide file tree
Showing 4 changed files with 141 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ export function pedersenCommit(input: Buffer[]) {
* Create a pedersen hash (field) from an array of input fields.
* Left pads any inputs less than 32 bytes.
*/
export function pedersenHash(input: Buffer[], index = 0) {
export function pedersenHash(input: Buffer[], index = 0): Buffer {
if (!input.every(i => i.length <= 32)) {
throw new Error('All Pedersen Hash input buffers must be <= 32 bytes.');
}
Expand Down
66 changes: 64 additions & 2 deletions yarn-project/simulator/src/avm/opcodes/hashing.test.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import { toBigIntBE } from '@aztec/foundation/bigint-buffer';
import { keccak, poseidonHash } from '@aztec/foundation/crypto';
import { keccak, pedersenHash, poseidonHash, sha256 } from '@aztec/foundation/crypto';

import { AvmContext } from '../avm_context.js';
import { Field } from '../avm_memory_types.js';
import { initContext } from '../fixtures/index.js';
import { Keccak, Poseidon2 } from './hashing.js';
import { Keccak, Pedersen, Poseidon2, Sha256 } from './hashing.js';

describe('Hashing Opcodes', () => {
let context: AvmContext;
Expand Down Expand Up @@ -73,4 +73,66 @@ describe('Hashing Opcodes', () => {
expect(combined).toEqual(expectedHash);
});
});

describe('Sha256', () => {
it('Should (de)serialize correctly', () => {
const buf = Buffer.from([
Sha256.opcode, // opcode
...Buffer.from('12345678', 'hex'), // destOffset
...Buffer.from('23456789', 'hex'), // hashOffset
...Buffer.from('3456789a', 'hex'), // hashSize
]);
const inst = new Sha256(/*destOffset=*/ 0x12345678, /*hashOffset=*/ 0x23456789, /*hashSize=*/ 0x3456789a);

expect(Sha256.deserialize(buf)).toEqual(inst);
expect(inst.serialize()).toEqual(buf);
});

it('Should hash correctly', async () => {
const args = [new Field(1n), new Field(2n), new Field(3n)];
const hashOffset = 0;
context.machineState.memory.setSlice(hashOffset, args);

const destOffset = 3;

const inputBuffer = Buffer.concat(args.map(field => field.toBuffer()));
const expectedHash = sha256(inputBuffer);
await new Sha256(destOffset, hashOffset, args.length).execute(context);

const result = context.machineState.memory.getSliceAs<Field>(destOffset, 2);
const combined = Buffer.concat([result[0].toBuffer().subarray(16, 32), result[1].toBuffer().subarray(16, 32)]);

expect(combined).toEqual(expectedHash);
});
});

describe('Pedersen', () => {
it('Should (de)serialize correctly', () => {
const buf = Buffer.from([
Pedersen.opcode, // opcode
...Buffer.from('12345678', 'hex'), // destOffset
...Buffer.from('23456789', 'hex'), // hashOffset
...Buffer.from('3456789a', 'hex'), // hashSize
]);
const inst = new Pedersen(/*destOffset=*/ 0x12345678, /*hashOffset=*/ 0x23456789, /*hashSize=*/ 0x3456789a);

expect(Sha256.deserialize(buf)).toEqual(inst);
expect(inst.serialize()).toEqual(buf);
});

it('Should hash correctly', async () => {
const args = [new Field(1n), new Field(2n), new Field(3n)];
const hashOffset = 0;
context.machineState.memory.setSlice(hashOffset, args);

const destOffset = 3;

const inputBuffer = args.map(field => field.toBuffer());
const expectedHash = pedersenHash(inputBuffer);
await new Pedersen(destOffset, hashOffset, args.length).execute(context);

const result = context.machineState.memory.get(destOffset);
expect(result).toEqual(new Field(toBigIntBE(expectedHash)));
});
});
});
75 changes: 74 additions & 1 deletion yarn-project/simulator/src/avm/opcodes/hashing.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { toBigIntBE, toBufferBE } from '@aztec/foundation/bigint-buffer';
import { keccak, poseidonHash } from '@aztec/foundation/crypto';
import { keccak, pedersenHash, poseidonHash, sha256 } from '@aztec/foundation/crypto';

import { AvmContext } from '../avm_context.js';
import { Field } from '../avm_memory_types.js';
Expand Down Expand Up @@ -77,3 +77,76 @@ export class Keccak extends Instruction {
context.machineState.incrementPc();
}
}

export class Sha256 extends Instruction {
static type: string = 'SHA256';
static readonly opcode: Opcode = Opcode.SHA256;

// Informs (de)serialization. See Instruction.deserialize.
static readonly wireFormat: OperandType[] = [
OperandType.UINT8,
OperandType.UINT32,
OperandType.UINT32,
OperandType.UINT32,
];

constructor(private destOffset: number, private hashOffset: number, private hashSize: number) {
super();
}

// Note hash output is 32 bytes, so takes up two fields
async execute(context: AvmContext): Promise<void> {
// We hash a set of field elements
const hashDataBigint = context.machineState.memory
.getSlice(this.hashOffset, this.hashOffset + this.hashSize)
.map(word => word.toBigInt());

// TODO: one hashing api takes an array of buffers, the other one big buffer wtf?
const hashData = Buffer.concat(hashDataBigint.map(bigint => toBufferBE(bigint, 32)));
const hash = sha256(hashData);

// Split output into two fields
const high = new Field(toBigIntBE(hash.subarray(0, 16)));
const low = new Field(toBigIntBE(hash.subarray(16, 32)));

context.machineState.memory.set(this.destOffset, high);
context.machineState.memory.set(this.destOffset + 1, low);

context.machineState.incrementPc();
}
}

export class Pedersen extends Instruction {
static type: string = 'PEDERSEN';
static readonly opcode: Opcode = Opcode.PEDERSEN;

// Informs (de)serialization. See Instruction.deserialize.
static readonly wireFormat: OperandType[] = [
OperandType.UINT8,
OperandType.UINT32,
OperandType.UINT32,
OperandType.UINT32,
];

constructor(private destOffset: number, private hashOffset: number, private hashSize: number) {
super();
}

async execute(context: AvmContext): Promise<void> {
// We hash a set of field elements
// TODO: types are going through buffers to deal with two different Fr types in bb.js and foundation
const hashDataBigint = context.machineState.memory
.getSlice(this.hashOffset, this.hashOffset + this.hashSize)
.map(word => word.toBigInt());

// We give each field 32 bytes of space
const hashData = hashDataBigint.map(bigint => toBufferBE(bigint, 32));

// yucky casting
// No domain sep for now
const hash = pedersenHash(hashData);
context.machineState.memory.set(this.destOffset, new Field(toBigIntBE(hash)));

context.machineState.incrementPc();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,8 @@ export enum Opcode {
KECCAK,
POSEIDON,
// Add new opcodes before this
SHA256, // temp - may be removed, but alot of contracts rely on it
PEDERSEN, // temp - may be removed, but alot of contracts rely on it
TOTAL_OPCODES_NUMBER,
}

Expand Down

0 comments on commit 1716277

Please sign in to comment.