Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat!: update to 0.34.0 #6

Merged
merged 3 commits into from
Sep 27, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ jobs:
strategy:
fail-fast: false
matrix:
toolchain: [nightly, 0.32.0]
toolchain: [nightly, 0.34.0]
steps:
- name: Checkout sources
uses: actions/checkout@v4
Expand All @@ -38,7 +38,7 @@ jobs:
- name: Install Nargo
uses: noir-lang/[email protected]
with:
toolchain: 0.32.0
toolchain: 0.34.0

- name: Run formatter
run: nargo fmt --check
2 changes: 1 addition & 1 deletion Nargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
name = "json_parser"
type = "lib"
authors = [""]
compiler_version = ">=0.32.0"
compiler_version = ">=0.34.0"

[dependencies]
noir_sort = {tag = "v0.1.0", git = "https://github.com/noir-lang/noir_sort"}
5 changes: 2 additions & 3 deletions src/_string_tools/slice_field.nr
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ struct Slice200 {
hihi: u64, // 7 bytes
hilo: u64, // 7 bytes
lohi: u64, // 7 bytes
lolo: u32 // 4 bytes
lolo: u32 // 4 bytes
}
global PHI_54: u64 = 0x30644E72E131A0;
global PLO_200: Slice200 = Slice200 {
Expand All @@ -19,7 +19,7 @@ global PLO_200: Slice200 = Slice200 {
};

unconstrained fn __slice_200_bits_from_field(f: Field) -> (Field, Field, bool) {
let b = f.to_be_bytes(32);
let b: [u8; 32] = f.to_be_bytes();

let mut res200: Slice200 = Slice200 { hihi: 0, hilo: 0, lohi: 0, lolo: 0 };
let mut res54: u64 = 0;
Expand Down Expand Up @@ -74,4 +74,3 @@ pub fn slice_200_bits_from_field(f: Field) -> Field {
hi_diff.assert_max_bit_size(56);
lo
}

14 changes: 7 additions & 7 deletions src/_string_tools/slice_packed_field.nr
Original file line number Diff line number Diff line change
Expand Up @@ -371,7 +371,7 @@ global PATH_LOOKUP: [[bool; 5]; 32] = [
**/
unconstrained fn __slice_field(f: Field, num_bytes: Field) -> [Field; 5] {
let head_path = PATH_LOOKUP[num_bytes];
let bytes = f.to_be_bytes(32);
let bytes: [u8; 32] = f.to_be_bytes();
let bytes = bytes.map(|b: u8| b as Field);

let mut chunks: [Field; 5] = [0; 5];
Expand Down Expand Up @@ -401,22 +401,22 @@ unconstrained fn __slice_field(f: Field, num_bytes: Field) -> [Field; 5] {
tail_ptr += 4;
}
if head_path[3] {
chunks[3] =
chunks[3] =
bytes[head_ptr] * 0x100000000000000 + bytes[head_ptr + 1] * 0x1000000000000
+ bytes[head_ptr + 2] * 0x10000000000 + bytes[head_ptr + 3] * 0x100000000
+ bytes[head_ptr + 4] * 0x1000000 + bytes[head_ptr + 5] * 0x10000
+ bytes[head_ptr + 6] * 0x100 + bytes[head_ptr + 7];
head_ptr += 8;
} else {
chunks[3] =
chunks[3] =
bytes[tail_ptr] * 0x100000000000000 + bytes[tail_ptr + 1] * 0x1000000000000
+ bytes[tail_ptr + 2] * 0x10000000000 + bytes[tail_ptr + 3] * 0x100000000
+ bytes[tail_ptr + 4] * 0x1000000 + bytes[tail_ptr + 5] * 0x10000
+ bytes[tail_ptr + 6] * 0x100 + bytes[tail_ptr + 7];
tail_ptr += 8;
}
if head_path[4] {
chunks[4] =
chunks[4] =
bytes[head_ptr] * 0x1000000000000000000000000000000 + bytes[head_ptr + 1] * 0x10000000000000000000000000000
+ bytes[head_ptr + 2] * 0x100000000000000000000000000 + bytes[head_ptr + 3] * 0x1000000000000000000000000
+ bytes[head_ptr + 4] * 0x10000000000000000000000 + bytes[head_ptr + 5] * 0x100000000000000000000
Expand All @@ -426,7 +426,7 @@ unconstrained fn __slice_field(f: Field, num_bytes: Field) -> [Field; 5] {
+ bytes[head_ptr + 12] * 0x1000000 + bytes[head_ptr + 13] * 0x10000
+ bytes[head_ptr + 14] * 0x100 + bytes[head_ptr + 15];
} else {
chunks[4] =
chunks[4] =
bytes[tail_ptr] * 0x1000000000000000000000000000000 + bytes[tail_ptr + 1] * 0x10000000000000000000000000000
+ bytes[tail_ptr + 2] * 0x100000000000000000000000000 + bytes[tail_ptr + 3] * 0x1000000000000000000000000
+ bytes[tail_ptr + 4] * 0x10000000000000000000000 + bytes[tail_ptr + 5] * 0x100000000000000000000
Expand Down Expand Up @@ -548,7 +548,7 @@ pub fn slice_field(f: Field, num_bytes: Field) -> (Field, Field) {

/**
* @brief Given an array of fields that pack 31 bytes, return an array that slices the packed byte array at a given index for a given number of bytes
* @description Some serious dark black magic nonsense going on here. TODO: document
* @description Some serious dark black magic nonsense going on here. TODO: document
**/
pub fn slice_fields<let InputFields: u16, let OutputFields: u16>(
data: [Field; InputFields],
Expand Down Expand Up @@ -783,7 +783,7 @@ fn test_slice_fields() {
fn test_slice_field() {
let input = 0xffeebbccbbaa99887766554433221100112233445566778899aabbccddeeff;

let input_bytes: [u8; 32] = input.to_be_bytes(32).as_array();
let input_bytes: [u8; 32] = input.to_be_bytes();

for i in 0..32 {
println(f"i = {i}");
Expand Down
3 changes: 1 addition & 2 deletions src/_string_tools/string_chopper.nr
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ impl<let NeedlePackedFields: u16> StringChopper<NeedlePackedFields> {

let sliced: [Field; NeedlePackedFields] = slice_fields(haystack, start_bytes, num_bytes);

let sliced_bytes = sliced.map(|x: Field| { let r: [u8; 31] = x.to_be_bytes(31).as_array(); r });
let sliced_bytes = sliced.map(|x: Field| { let r: [u8; 31] = x.to_be_bytes(); r });

let num_slices = StringBytes / 31;
let overflow = StringBytes % 31;
Expand All @@ -28,4 +28,3 @@ impl<let NeedlePackedFields: u16> StringChopper<NeedlePackedFields> {
parsed_string
}
}

4 changes: 1 addition & 3 deletions src/_table_generation/make_tables.nr
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
/**
* @file Contains methods used to generate tables in `json_tables.nr`. These table generation methods shouldn't be used inside of actual circuits.
**/
//! Contains methods used to generate tables in `json_tables.nr`. These table generation methods shouldn't be used inside of actual circuits.

mod CaptureMode {
global GRAMMAR_CAPTURE = 0;
Expand Down
10 changes: 5 additions & 5 deletions src/getters.nr
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ impl<let NumBytes: u32, let NumPackedFields: u16, let MaxNumTokens: u16, let Max

/**
* @brief If the root JSON is an object, extract a JSONEntry that describes an array, object or value that maps to a given key
* @description returns an Option<JSONEntry> which will be null if the entry does not exist
* @description returns an Option<JSONEntry> which will be null if the entry does not exist
**/
fn get_json_entry<let KeyBytes: u16>(self, key: [u8; KeyBytes]) -> (bool, JSONEntry) {
// let key_index = self.find_key_in_map(keyhash);
Expand Down Expand Up @@ -251,7 +251,7 @@ impl<let NumBytes: u32, let NumPackedFields: u16, let MaxNumTokens: u16, let Max
* @brief figures out if `target` exists as a key in `self.key_hashes`
* @details if `target` does not exist, we return the two indicies of adjacent
* entries in `self.key_hashes`, lhs_index, rhs_index, where
* lhs_index < key_hash < rhs_index
* lhs_index < key_hash < rhs_index
**/
unconstrained fn search_for_key_in_map(self, target: Field) -> KeySearchResult {
let mut found_index: Field = 0;
Expand Down Expand Up @@ -322,7 +322,7 @@ impl<let NumBytes: u32, let NumPackedFields: u16, let MaxNumTokens: u16, let Max
If key does NOT exist. 3 cases
case 1: keyhash < first entry
case 2: keyhash > last entry
case 3: entry A > keyhash > entryB
case 3: entry A > keyhash > entryB

*/
let hasher: ByteHasher<MaxKeyFields> = ByteHasher {};
Expand Down Expand Up @@ -380,7 +380,7 @@ impl<let NumBytes: u32, let NumPackedFields: u16, let MaxNumTokens: u16, let Max
If key does NOT exist. 3 cases
case 1: keyhash < first entry
case 2: keyhash > last entry
case 3: entry A > keyhash > entryB
case 3: entry A > keyhash > entryB

*/
let hasher: ByteHasher<MaxKeyFields> = ByteHasher {};
Expand Down Expand Up @@ -424,7 +424,7 @@ impl<let NumBytes: u32, let NumPackedFields: u16, let MaxNumTokens: u16, let Max
(search_result.found, search_result.lhs_index)
}

unconstrained fn __get_keys_at_root<let MaxNumKeys: u16, let MaxKeyBytes: u16>(self) -> BoundedVec<Field, MaxNumKeys> {
unconstrained fn __get_keys_at_root<let MaxNumKeys: u16>(self) -> BoundedVec<Field, MaxNumKeys> {
let mut result: BoundedVec<Field, MaxNumKeys> = BoundedVec { len: 0, storage: [0; MaxNumKeys] };

let root_object: JSONEntry = JSONEntry::from(self.json_entries_packed[self.root_index_in_transcript]);
Expand Down
22 changes: 12 additions & 10 deletions src/json.nr
Original file line number Diff line number Diff line change
Expand Up @@ -61,16 +61,18 @@ struct JSON<let NumBytes: u32, let NumPackedFields: u16, let MaxNumTokens: u16,
**/
impl<let NumBytes: u16, let NumPackedFields: u16, let MaxNumTokens: u16, let MaxNumValues: u16, let MaxKeyFields: u16> std::cmp::Eq for JSON<NumBytes, NumPackedFields, MaxNumTokens, MaxNumValues, MaxKeyFields> {
fn eq(self, other: Self) -> bool {
(self.json == other.json) & (self.raw_transcript == other.raw_transcript)
& (self.transcript == other.transcript)
& (self.transcript_length == other.transcript_length)
& (self.key_data == other.key_data)
& (self.key_hashes == other.key_hashes)
& (self.layer_type_of_root == other.layer_type_of_root)
& (self.root_id == other.root_id)
& (self.root_index_in_transcript == other.root_index_in_transcript)
& (self.json_entries_packed == other.json_entries_packed)
& (self.json_packed == other.json_packed)
(self.json == other.json)
& (self.raw_transcript == other.raw_transcript)
& (self.transcript == other.transcript)
& (self.transcript_length == other.transcript_length)
& (self.key_data == other.key_data)
& (self.key_hashes == other.key_hashes)
& (self.layer_type_of_root == other.layer_type_of_root)
& (self.root_id == other.root_id)
& (self.root_index_in_transcript
== other.root_index_in_transcript)
& (self.json_entries_packed == other.json_entries_packed)
& (self.json_packed == other.json_packed)
}
}

Expand Down
11 changes: 6 additions & 5 deletions src/json_entry.nr
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ struct JSONContextStackEntry {
}
impl JSONContextStackEntry {
unconstrained fn __from_field(f: Field) -> Self {
let bytes = f.to_be_bytes(11);
let bytes: [u8; 11] = f.to_be_bytes();
let context = bytes[0] as Field;
let num_entries = bytes[1] as Field * 0x100 + bytes[2] as Field;
let current_key_length = bytes[3] as Field * 0x100 + bytes[4] as Field;
Expand Down Expand Up @@ -59,7 +59,7 @@ struct JSONEntry {
entry_type: Field, // is this an OBJECT_TOKEN, ARRAY_TOKEN, STRING_TOKEN, NUMERIC_TOKEN or LITERAL_TOKEN?
id: Field, // if this is an object or array, describes the unique identifier assigned to this item
parent_index: Field, // if parent is an object or array, describes the unique identifier assigned to our parent
array_pointer: Field, // if parent is an array, where in the array are we?
array_pointer: Field, // if parent is an array, where in the array are we?
child_pointer: Field, // if this is an object or array, points to the location in `json_entries_packed` of this item's first child
num_children: Field, // if this is an object or array, how many child elements do we contain?
json_pointer: Field, // points to the json that describes the first byte of this entry
Expand Down Expand Up @@ -145,7 +145,7 @@ impl JSONEntry {
parent_index
}
unconstrained fn __from_field(f: Field) -> Self {
let bytes: [u8; 20] = f.to_be_bytes(20).as_array(); // 10.5 gates
let bytes: [u8; 20] = f.to_be_bytes(); // 10.5 gates

let entry_type = bytes[0] as Field;

Expand Down Expand Up @@ -230,6 +230,7 @@ impl std::cmp::Eq for JSONEntryPacked {
}

impl std::default::Default for JSONEntryPacked {
fn default() -> Self { JSONEntryPacked{ value: 0 }}
fn default() -> Self {
JSONEntryPacked { value: 0 }
}
}

2 changes: 1 addition & 1 deletion src/keymap.nr
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ impl KeyIndexData {
}

unconstrained fn __from_field(packed: Field) -> Self {
let unpacked = packed.to_be_bytes(8);
let unpacked: [u8; 8] = packed.to_be_bytes();
let array_index: Field = unpacked[1] as Field + unpacked[0] as Field * 0x100;
let json_length: Field = unpacked[3] as Field + unpacked[2] as Field * 0x100;
let json_index: Field = unpacked[5] as Field + unpacked[4] as Field * 0x100;
Expand Down
Loading
Loading