Skip to content

Commit

Permalink
making test less disgusting with chatgpt
Browse files Browse the repository at this point in the history
  • Loading branch information
benesjan committed May 30, 2024
1 parent 34904d6 commit bab8b61
Showing 1 changed file with 47 additions and 37 deletions.
84 changes: 47 additions & 37 deletions yarn-project/pxe/src/note_processor/note_processor.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ describe('Note Processor', () => {
// ownedData: [tx1, tx2, ...], the numbers in each tx represents the indices of the note hashes the account owns.
const createEncryptedLogsAndOwnedL1NotePayloads = (ownedData: number[][], ownedNotes: TaggedNote[]) => {
const newNotes: TaggedNote[] = [];
const ownedL1NotePayloads: L1NotePayload[] = [];
const payloads: L1NotePayload[] = [];
const txLogs: EncryptedNoteTxL2Logs[] = [];
let usedOwnedNote = 0;
for (let i = 0; i < TXS_PER_BLOCK; ++i) {
Expand All @@ -76,7 +76,7 @@ describe('Note Processor', () => {
usedOwnedNote += note === ownedNotes[usedOwnedNote] ? 1 : 0;
newNotes.push(note);
if (isOwner) {
ownedL1NotePayloads.push(note.notePayload);
payloads.push(note.notePayload);
}

const ephSk = GrumpkinScalar.random();
Expand All @@ -89,7 +89,7 @@ describe('Note Processor', () => {
}

const encryptedLogs = new EncryptedNoteL2BlockL2Logs(txLogs);
return { newNotes, ownedL1NotePayloads, encryptedLogs };
return { newNotes, payloads, encryptedLogs };
};

const mockData = (
Expand All @@ -98,35 +98,45 @@ describe('Note Processor', () => {
appendedBlocks = 0,
ownedNotes: TaggedNote[] = [], // L1NotePayload[] = [],
) => {
if (ownedData.length > TXS_PER_BLOCK) {
throw new Error(`Tx size should be less than ${TXS_PER_BLOCK}.`);
const transactionLimit = TXS_PER_BLOCK;

if (ownedData.length > transactionLimit) {
throw new Error(`Transaction size must be less than ${transactionLimit}.`);
}

const blocks: L2Block[] = [];
const encryptedLogsArr: EncryptedNoteL2BlockL2Logs[] = [];
const ownedL1NotePayloads: L1NotePayload[] = [];
const numberOfBlocks = prependedBlocks + appendedBlocks + 1;
for (let i = 0; i < numberOfBlocks; ++i) {
const block = L2Block.random(firstBlockNum + i, TXS_PER_BLOCK);
block.header.state.partial.noteHashTree.nextAvailableLeafIndex =
firstBlockDataEndIndex + i * numCommitmentsPerBlock;

const isTargetBlock = i === prependedBlocks;
const {
newNotes,
encryptedLogs,
ownedL1NotePayloads: payloads,
} = createEncryptedLogsAndOwnedL1NotePayloads(isTargetBlock ? ownedData : [], isTargetBlock ? ownedNotes : []);
encryptedLogsArr.push(encryptedLogs);
const totalBlocks = prependedBlocks + appendedBlocks + 1;
const blocks = [];
const encryptedLogEntries = [];
const ownedL1NotePayloads = [];

for (let index = 0; index < totalBlocks; index++) {
const blockNumber = firstBlockNum + index;
const block = L2Block.random(blockNumber, transactionLimit);
const nextLeafIndex = firstBlockDataEndIndex + index * numCommitmentsPerBlock;

block.header.state.partial.noteHashTree.nextAvailableLeafIndex = nextLeafIndex;

const isTargetBlock = index === prependedBlocks;
const { newNotes, encryptedLogs, payloads } = createEncryptedLogsAndOwnedL1NotePayloads(
isTargetBlock ? ownedData : [],
isTargetBlock ? ownedNotes : [],
);

encryptedLogEntries.push(encryptedLogs);
ownedL1NotePayloads.push(...payloads);
for (let i = 0; i < TXS_PER_BLOCK; i++) {
const txEffectNotes = newNotes.slice(i * MAX_NEW_NOTE_HASHES_PER_TX, (i + 1) * MAX_NEW_NOTE_HASHES_PER_TX);
block.body.txEffects[i].noteHashes = txEffectNotes.map(n => pedersenHash(n.notePayload.note.items));

for (let txIndex = 0; txIndex < transactionLimit; txIndex++) {
const start = txIndex * MAX_NEW_NOTE_HASHES_PER_TX;
const end = (txIndex + 1) * MAX_NEW_NOTE_HASHES_PER_TX;
const txNotes = newNotes.slice(start, end);

block.body.txEffects[txIndex].noteHashes = txNotes.map(note => pedersenHash(note.notePayload.note.items));
}

blocks.push(block);
}
return { blocks, encryptedLogsArr, ownedL1NotePayloads };

return { blocks, encryptedLogEntries, ownedL1NotePayloads };
};

beforeAll(() => {
Expand Down Expand Up @@ -179,8 +189,8 @@ describe('Note Processor', () => {
});

it('should store a note that belongs to us', async () => {
const { blocks, encryptedLogsArr, ownedL1NotePayloads } = mockData([[2]]);
await noteProcessor.process(blocks, encryptedLogsArr);
const { blocks, encryptedLogEntries, ownedL1NotePayloads } = mockData([[2]]);
await noteProcessor.process(blocks, encryptedLogEntries);

expect(addNotesSpy).toHaveBeenCalledTimes(1);
expect(addNotesSpy).toHaveBeenCalledWith(
Expand All @@ -199,12 +209,12 @@ describe('Note Processor', () => {
const appendedBlocks = 1;
const thisBlockDataStartIndex = firstBlockDataStartIndex + prependedBlocks * numCommitmentsPerBlock;

const { blocks, encryptedLogsArr, ownedL1NotePayloads } = mockData(
const { blocks, encryptedLogEntries, ownedL1NotePayloads } = mockData(
[[], [1], [], [0, 2]],
prependedBlocks,
appendedBlocks,
);
await noteProcessor.process(blocks, encryptedLogsArr);
await noteProcessor.process(blocks, encryptedLogEntries);

expect(addNotesSpy).toHaveBeenCalledTimes(1);
expect(addNotesSpy).toHaveBeenCalledWith(
Expand All @@ -230,17 +240,17 @@ describe('Note Processor', () => {
}, 30_000);

it('should not store notes that do not belong to us', async () => {
const { blocks, encryptedLogsArr } = mockData([]);
await noteProcessor.process(blocks, encryptedLogsArr);
const { blocks, encryptedLogEntries } = mockData([]);
await noteProcessor.process(blocks, encryptedLogEntries);
});

it('should be able to recover two note payloads with containing the same note', async () => {
const note = TaggedNote.random(); // L1NotePayload.random();
const note2 = TaggedNote.random(); // L1NotePayload.random();
// All note payloads except one have the same contract address, storage slot, and the actual note.
const notes = [note, note, note, note2, note];
const { blocks, encryptedLogsArr, ownedL1NotePayloads } = mockData([[0, 2], [], [0, 1, 3]], 0, 0, notes);
await noteProcessor.process(blocks, encryptedLogsArr);
const { blocks, encryptedLogEntries, ownedL1NotePayloads } = mockData([[0, 2], [], [0, 1, 3]], 0, 0, notes);
await noteProcessor.process(blocks, encryptedLogEntries);

const addedNoteDaos: IncomingNoteDao[] = addNotesSpy.mock.calls[0][0];
expect(addedNoteDaos.map(dao => dao)).toEqual([
Expand All @@ -262,14 +272,14 @@ describe('Note Processor', () => {
});

it('advances the block number', async () => {
const { blocks, encryptedLogsArr } = mockData([[2]]);
await noteProcessor.process(blocks, encryptedLogsArr);
const { blocks, encryptedLogEntries } = mockData([[2]]);
await noteProcessor.process(blocks, encryptedLogEntries);
expect(noteProcessor.status.syncedToBlock).toEqual(blocks.at(-1)?.number);
});

it('should restore the last block number processed and ignore the starting block', async () => {
const { blocks, encryptedLogsArr } = mockData([[2]]);
await noteProcessor.process(blocks, encryptedLogsArr);
const { blocks, encryptedLogEntries } = mockData([[2]]);
await noteProcessor.process(blocks, encryptedLogEntries);

const newNoteProcessor = await NoteProcessor.create(
account.address,
Expand Down

0 comments on commit bab8b61

Please sign in to comment.