diff --git a/.noir-sync-commit b/.noir-sync-commit index e94abeae658..c253290bf18 100644 --- a/.noir-sync-commit +++ b/.noir-sync-commit @@ -1 +1 @@ -f0c268606a71381ab4504396695a0adb9b3258b6 +45344bfe1148a2f592c2e432744d3fb3d46340cc diff --git a/noir-projects/aztec-nr/authwit/src/entrypoint/app.nr b/noir-projects/aztec-nr/authwit/src/entrypoint/app.nr index 4c2aaf8fb39..663ae8044a6 100644 --- a/noir-projects/aztec-nr/authwit/src/entrypoint/app.nr +++ b/noir-projects/aztec-nr/authwit/src/entrypoint/app.nr @@ -50,7 +50,7 @@ impl AppPayload { for i in 0..ACCOUNT_MAX_CALLS { bytes.extend_from_array(self.function_calls[i].to_be_bytes()); } - bytes.extend_from_slice(self.nonce.to_be_bytes(32)); + bytes.extend_from_array(self.nonce.to_be_bytes::<32>()); bytes.storage } diff --git a/noir-projects/aztec-nr/authwit/src/entrypoint/fee.nr b/noir-projects/aztec-nr/authwit/src/entrypoint/fee.nr index 0d302d0fcbc..f7d0ea258fc 100644 --- a/noir-projects/aztec-nr/authwit/src/entrypoint/fee.nr +++ b/noir-projects/aztec-nr/authwit/src/entrypoint/fee.nr @@ -50,7 +50,7 @@ impl FeePayload { for i in 0..MAX_FEE_FUNCTION_CALLS { bytes.extend_from_array(self.function_calls[i].to_be_bytes()); } - bytes.extend_from_slice(self.nonce.to_be_bytes(32)); + bytes.extend_from_array(self.nonce.to_be_bytes::<32>()); bytes.push(self.is_fee_payer as u8); bytes.storage diff --git a/noir-projects/aztec-nr/authwit/src/entrypoint/function_call.nr b/noir-projects/aztec-nr/authwit/src/entrypoint/function_call.nr index 10102e9a240..18ed6acb11a 100644 --- a/noir-projects/aztec-nr/authwit/src/entrypoint/function_call.nr +++ b/noir-projects/aztec-nr/authwit/src/entrypoint/function_call.nr @@ -22,15 +22,15 @@ impl Serialize for FunctionCall { impl FunctionCall { fn to_be_bytes(self) -> [u8; FUNCTION_CALL_SIZE_IN_BYTES] { let mut bytes: [u8; FUNCTION_CALL_SIZE_IN_BYTES] = [0; FUNCTION_CALL_SIZE_IN_BYTES]; - let args_hash_bytes = self.args_hash.to_be_bytes(32); + let args_hash_bytes: [u8; 32] = self.args_hash.to_be_bytes(); for i in 0..32 { bytes[i] = args_hash_bytes[i]; } - let function_selector_bytes = self.function_selector.to_field().to_be_bytes(32); + let function_selector_bytes: [u8; 32] = self.function_selector.to_field().to_be_bytes(); for i in 0..32 { bytes[i + 32] = function_selector_bytes[i]; } - let target_address_bytes = self.target_address.to_field().to_be_bytes(32); + let target_address_bytes: [u8; 32] = self.target_address.to_field().to_be_bytes(); for i in 0..32 { bytes[i + 64] = target_address_bytes[i]; } diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs/header.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs/header.nr index f43c60b6b60..c4962380f92 100644 --- a/noir-projects/aztec-nr/aztec/src/encrypted_logs/header.nr +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs/header.nr @@ -23,7 +23,7 @@ impl EncryptedLogHeader { iv[i] = full_key[i + 16]; } - let input: [u8; 32] = self.address.to_field().to_be_bytes(32).as_array(); + let input: [u8; 32] = self.address.to_field().to_be_bytes(); aes128_encrypt(input, iv, sym_key).as_array() } } diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs/incoming_body.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs/incoming_body.nr index fd2cab4422e..e3d2ea44262 100644 --- a/noir-projects/aztec-nr/aztec/src/encrypted_logs/incoming_body.nr +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs/incoming_body.nr @@ -90,8 +90,8 @@ mod test { let mut buffer: [u8; ADDRESS_NOTE_BYTES_LEN] = [0; ADDRESS_NOTE_BYTES_LEN]; - let storage_slot_bytes = storage_slot.to_be_bytes(32); - let note_type_id_bytes = AddressNote::get_note_type_id().to_be_bytes(32); + let storage_slot_bytes: [u8; 32] = storage_slot.to_be_bytes(); + let note_type_id_bytes: [u8; 32] = AddressNote::get_note_type_id().to_be_bytes(); for i in 0..32 { buffer[i] = storage_slot_bytes[i]; @@ -99,7 +99,7 @@ mod test { } for i in 0..serialized_note.len() { - let bytes = serialized_note[i].to_be_bytes(32); + let bytes: [u8; 32] = serialized_note[i].to_be_bytes(); for j in 0..32 { buffer[64 + i * 32 + j] = bytes[j]; } @@ -183,8 +183,8 @@ mod test { fn private_to_be_bytes(self, randomness: Field) -> [u8; TEST_EVENT_BYTES_LEN] { let mut buffer: [u8; TEST_EVENT_BYTES_LEN] = [0; TEST_EVENT_BYTES_LEN]; - let randomness_bytes = randomness.to_be_bytes(32); - let event_type_id_bytes = TestEvent::get_event_type_id().to_field().to_be_bytes(32); + let randomness_bytes: [u8; 32] = randomness.to_be_bytes(); + let event_type_id_bytes: [u8; 32] = TestEvent::get_event_type_id().to_field().to_be_bytes(); for i in 0..32 { buffer[i] = randomness_bytes[i]; @@ -194,7 +194,7 @@ mod test { let serialized_event = self.serialize(); for i in 0..serialized_event.len() { - let bytes = serialized_event[i].to_be_bytes(32); + let bytes: [u8; 32] = serialized_event[i].to_be_bytes(); for j in 0..32 { buffer[64 + i * 32 + j] = bytes[j]; } @@ -206,7 +206,7 @@ mod test { fn to_be_bytes(self) -> [u8; TEST_EVENT_BYTES_LEN_WITHOUT_RANDOMNESS] { let mut buffer: [u8; TEST_EVENT_BYTES_LEN_WITHOUT_RANDOMNESS] = [0; TEST_EVENT_BYTES_LEN_WITHOUT_RANDOMNESS]; - let event_type_id_bytes = TestEvent::get_event_type_id().to_field().to_be_bytes(32); + let event_type_id_bytes: [u8; 32] = TestEvent::get_event_type_id().to_field().to_be_bytes(); for i in 0..32 { buffer[i] = event_type_id_bytes[i]; @@ -215,7 +215,7 @@ mod test { let serialized_event = self.serialize(); for i in 0..serialized_event.len() { - let bytes = serialized_event[i].to_be_bytes(32); + let bytes: [u8; 32] = serialized_event[i].to_be_bytes(); for j in 0..32 { buffer[32 + i * 32 + j] = bytes[j]; } diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs/outgoing_body.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs/outgoing_body.nr index 96c35c68a75..6149584933c 100644 --- a/noir-projects/aztec-nr/aztec/src/encrypted_logs/outgoing_body.nr +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs/outgoing_body.nr @@ -25,10 +25,10 @@ impl EncryptedLogOutgoingBody { let mut buffer = [0 as u8; 128]; - let serialized_eph_sk_high = self.eph_sk.hi.to_be_bytes(32); - let serialized_eph_sk_low = self.eph_sk.lo.to_be_bytes(32); + let serialized_eph_sk_high: [u8; 32] = self.eph_sk.hi.to_be_bytes(); + let serialized_eph_sk_low: [u8; 32] = self.eph_sk.lo.to_be_bytes(); - let address_bytes = self.recipient.to_field().to_be_bytes(32); + let address_bytes: [u8; 32] = self.recipient.to_field().to_be_bytes(); let serialized_recipient_ivpk = point_to_bytes(self.recipient_ivpk.to_point()); for i in 0..32 { @@ -44,7 +44,7 @@ impl EncryptedLogOutgoingBody { let full_key: [u8; 32] = poseidon2_hash_with_separator( [ovsk_app.hi, ovsk_app.lo, eph_pk.x, eph_pk.y], GENERATOR_INDEX__SYMMETRIC_KEY as Field - ).to_be_bytes(32).as_array(); + ).to_be_bytes(); let mut sym_key = [0; 16]; let mut iv = [0; 16]; diff --git a/noir-projects/aztec-nr/aztec/src/hash.nr b/noir-projects/aztec-nr/aztec/src/hash.nr index 64254a6a855..6cca38d1422 100644 --- a/noir-projects/aztec-nr/aztec/src/hash.nr +++ b/noir-projects/aztec-nr/aztec/src/hash.nr @@ -30,7 +30,7 @@ pub fn compute_unencrypted_log_hash( for i in 0..32 { hash_bytes[i] = address_bytes[i]; } - let len_bytes = (n as Field).to_be_bytes(4); + let len_bytes: [u8; 4] = (n as Field).to_be_bytes(); for i in 0..4 { hash_bytes[32 + i] = len_bytes[i]; } @@ -50,12 +50,12 @@ pub fn compute_message_hash( secret_hash: Field ) -> Field { let mut hash_bytes = [0 as u8; 192]; - let sender_bytes = sender.to_field().to_be_bytes(32); - let chain_id_bytes = chain_id.to_be_bytes(32); - let recipient_bytes = recipient.to_field().to_be_bytes(32); - let version_bytes = version.to_be_bytes(32); - let content_bytes = content.to_be_bytes(32); - let secret_hash_bytes = secret_hash.to_be_bytes(32); + let sender_bytes: [u8; 32] = sender.to_field().to_be_bytes(); + let chain_id_bytes: [u8; 32] = chain_id.to_be_bytes(); + let recipient_bytes: [u8; 32] = recipient.to_field().to_be_bytes(); + let version_bytes: [u8; 32] = version.to_be_bytes(); + let content_bytes: [u8; 32] = content.to_be_bytes(); + let secret_hash_bytes: [u8; 32] = secret_hash.to_be_bytes(); for i in 0..32 { hash_bytes[i] = sender_bytes[i]; diff --git a/noir-projects/aztec-nr/aztec/src/note/note_getter/mod.nr b/noir-projects/aztec-nr/aztec/src/note/note_getter/mod.nr index ab9d0edef96..8170ddf8f61 100644 --- a/noir-projects/aztec-nr/aztec/src/note/note_getter/mod.nr +++ b/noir-projects/aztec-nr/aztec/src/note/note_getter/mod.nr @@ -17,7 +17,7 @@ fn extract_property_value_from_selector( // Selectors use PropertySelectors in order to locate note properties inside the serialized note. // This allows easier packing and custom (de)serialization schemas. A note property is located // inside the serialized note using the index inside the array, a byte offset and a length. - let value = serialized_note[selector.index].to_be_bytes(32); + let value: [u8; 32] = serialized_note[selector.index].to_be_bytes(); let offset = selector.offset; let length = selector.length; let mut value_field = 0 as Field; diff --git a/noir-projects/aztec-nr/aztec/src/oracle/logs_traits.nr b/noir-projects/aztec-nr/aztec/src/oracle/logs_traits.nr index cd27ead6498..d9112a21324 100644 --- a/noir-projects/aztec-nr/aztec/src/oracle/logs_traits.nr +++ b/noir-projects/aztec-nr/aztec/src/oracle/logs_traits.nr @@ -102,14 +102,14 @@ trait ToBytesForUnencryptedLog { impl ToBytesForUnencryptedLog<32, 68> for Field { fn to_be_bytes_arr(self) -> [u8; 32] { - self.to_be_bytes(32).as_array() + self.to_be_bytes() } fn output_bytes(self) -> [u8; 68] {[self as u8; 68]} } impl ToBytesForUnencryptedLog<32, 68> for AztecAddress { fn to_be_bytes_arr(self) -> [u8; 32] { - self.to_field().to_be_bytes(32).as_array() + self.to_field().to_be_bytes() } fn output_bytes(self) -> [u8; 68] {[self.to_field() as u8; 68]} } @@ -118,7 +118,7 @@ fn arr_to_be_bytes_arr(fields: [Field; L]) -> [u8; N] { let mut bytes: [u8] = &[]; for i in 0..L { // Note that bytes.append() results in bound error - let to_add = fields[i].to_be_bytes(32); + let to_add: [u8; 32] = fields[i].to_be_bytes(); for j in 0..32 { bytes = bytes.push_back(to_add[j]); } @@ -132,7 +132,7 @@ fn str_to_be_bytes_arr(string: str) -> [u8; N] { let chars_bytes = string.as_bytes(); let mut bytes: [u8] = &[]; for i in 0..L { - let to_add = (chars_bytes[i] as Field).to_be_bytes(32); + let to_add: [u8; 32] = (chars_bytes[i] as Field).to_be_bytes(); for j in 0..32 { bytes = bytes.push_back(to_add[j]); } diff --git a/noir-projects/aztec-nr/aztec/src/test/mocks/mock_note.nr b/noir-projects/aztec-nr/aztec/src/test/mocks/mock_note.nr index c23a3ac80cb..f40f02b71e6 100644 --- a/noir-projects/aztec-nr/aztec/src/test/mocks/mock_note.nr +++ b/noir-projects/aztec-nr/aztec/src/test/mocks/mock_note.nr @@ -65,8 +65,8 @@ impl NoteInterface for MockNote { let mut buffer: [u8; MOCK_NOTE_BYTES_LENGTH] = [0; MOCK_NOTE_BYTES_LENGTH]; - let storage_slot_bytes = storage_slot.to_be_bytes(32); - let note_type_id_bytes = MockNote::get_note_type_id().to_be_bytes(32); + let storage_slot_bytes: [u8; 32] = storage_slot.to_be_bytes(); + let note_type_id_bytes: [u8; 32] = MockNote::get_note_type_id().to_be_bytes(); for i in 0..32 { buffer[i] = storage_slot_bytes[i]; @@ -74,7 +74,7 @@ impl NoteInterface for MockNote { } for i in 0..serialized_note.len() { - let bytes = serialized_note[i].to_be_bytes(32); + let bytes: [u8; 32] = serialized_note[i].to_be_bytes(); for j in 0..32 { buffer[64 + i * 32 + j] = bytes[j]; } @@ -85,7 +85,7 @@ impl NoteInterface for MockNote { impl Eq for MockNote { fn eq(self, other: Self) -> bool { - (self.header == other.header) & + (self.header == other.header) & (self.value == other.value) } } diff --git a/noir-projects/aztec-nr/aztec/src/utils/point.nr b/noir-projects/aztec-nr/aztec/src/utils/point.nr index 249df9e2b0b..633bd837ebb 100644 --- a/noir-projects/aztec-nr/aztec/src/utils/point.nr +++ b/noir-projects/aztec-nr/aztec/src/utils/point.nr @@ -13,7 +13,7 @@ pub fn point_to_bytes(pk: Point) -> [u8; 32] { // the "sign") so it's possible to use that last bit as an "is_infinite" flag if desired in the future. assert(!pk.is_infinite, "Cannot serialize point at infinity as bytes."); - let mut result = pk.x.to_be_bytes(32); + let mut result: [u8; 32] = pk.x.to_be_bytes(); // We store only a "sign" of the y coordinate because the rest can be derived from the x coordinate. To get // the sign we check if the y coordinate is less or equal than the curve's order minus 1 divided by 2. @@ -27,7 +27,7 @@ pub fn point_to_bytes(pk: Point) -> [u8; 32] { result[0] += 128; } - result.as_array() + result } mod test { diff --git a/noir-projects/aztec-nr/compressed-string/src/compressed_string.nr b/noir-projects/aztec-nr/compressed-string/src/compressed_string.nr index d73d024a8b1..134753e125b 100644 --- a/noir-projects/aztec-nr/compressed-string/src/compressed_string.nr +++ b/noir-projects/aztec-nr/compressed-string/src/compressed_string.nr @@ -34,7 +34,7 @@ impl CompressedString { let mut result = [0; M]; let mut w_index = 0 as u32; for i in 0..N { - let bytes = self.value[i].to_be_bytes(31); + let bytes: [u8; 31] = self.value[i].to_be_bytes(); for j in 0..31 { if w_index < M { result[w_index] = bytes[j]; @@ -55,7 +55,7 @@ impl Eq for CompressedString { impl Serialize for CompressedString { fn serialize(self) -> [Field; N] { self.value - + } } diff --git a/noir-projects/aztec-nr/compressed-string/src/field_compressed_string.nr b/noir-projects/aztec-nr/compressed-string/src/field_compressed_string.nr index cba01277dfd..0207d1c0313 100644 --- a/noir-projects/aztec-nr/compressed-string/src/field_compressed_string.nr +++ b/noir-projects/aztec-nr/compressed-string/src/field_compressed_string.nr @@ -32,11 +32,6 @@ impl FieldCompressedString { } pub fn to_bytes(self) -> [u8; 31] { - let mut result = [0; 31]; - let bytes = self.value.to_be_bytes(31); - for i in 0..31 { - result[i] = bytes[i]; - } - result + self.value.to_be_bytes() } } diff --git a/noir-projects/noir-contracts/contracts/app_subscription_contract/src/dapp_payload.nr b/noir-projects/noir-contracts/contracts/app_subscription_contract/src/dapp_payload.nr index 538ea7df988..1c49b912cb2 100644 --- a/noir-projects/noir-contracts/contracts/app_subscription_contract/src/dapp_payload.nr +++ b/noir-projects/noir-contracts/contracts/app_subscription_contract/src/dapp_payload.nr @@ -49,7 +49,7 @@ impl DAppPayload { for i in 0..DAPP_MAX_CALLS { bytes.extend_from_array(self.function_calls[i].to_be_bytes()); } - bytes.extend_from_slice(self.nonce.to_be_bytes(32)); + bytes.extend_from_array(self.nonce.to_be_bytes::<32>()); bytes.storage } diff --git a/noir-projects/noir-contracts/contracts/avm_test_contract/src/main.nr b/noir-projects/noir-contracts/contracts/avm_test_contract/src/main.nr index 235d83bfddb..e69c10ce92f 100644 --- a/noir-projects/noir-contracts/contracts/avm_test_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/avm_test_contract/src/main.nr @@ -160,7 +160,7 @@ contract AvmTest { } /************************************************************************ - * Misc + * Misc ************************************************************************/ #[aztec(public)] @@ -178,8 +178,7 @@ contract AvmTest { #[aztec(public)] fn to_radix_le(input: Field) -> [u8; 10] { - let result: [u8] = input.to_le_radix(/*base=*/ 2, /*limbs=*/ 10); - result.as_array() + input.to_le_radix(/*base=*/ 2) } // Helper functions to demonstrate an internal call stack in error messages diff --git a/noir-projects/noir-contracts/contracts/card_game_contract/src/cards.nr b/noir-projects/noir-contracts/contracts/card_game_contract/src/cards.nr index 69e87d9cdaf..0517eee97df 100644 --- a/noir-projects/noir-contracts/contracts/card_game_contract/src/cards.nr +++ b/noir-projects/noir-contracts/contracts/card_game_contract/src/cards.nr @@ -17,7 +17,7 @@ struct Card { impl FromField for Card { fn from_field(field: Field) -> Card { - let value_bytes = field.to_le_bytes(32); + let value_bytes: [u8; 32] = field.to_le_bytes(); let strength = (value_bytes[0] as u32) + (value_bytes[1] as u32) * 256; let points = (value_bytes[2] as u32) + (value_bytes[3] as u32) * 256; Card { strength, points } @@ -157,7 +157,7 @@ pub fn get_pack_cards( // generate pseudo randomness deterministically from 'seed' and user secret let secret = context.request_nsk_app(owner_npk_m_hash); let mix = secret + seed; - let mix_bytes: [u8; 32] = mix.to_le_bytes(32).as_array(); + let mix_bytes: [u8; 32] = mix.to_le_bytes(); let random_bytes = std::hash::sha256(mix_bytes); let mut cards = [Card::from_field(0); PACK_CARDS]; diff --git a/noir-projects/noir-contracts/contracts/ecdsa_k_account_contract/src/main.nr b/noir-projects/noir-contracts/contracts/ecdsa_k_account_contract/src/main.nr index b125e6bbb15..98f3c9a65a4 100644 --- a/noir-projects/noir-contracts/contracts/ecdsa_k_account_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/ecdsa_k_account_contract/src/main.nr @@ -66,7 +66,7 @@ contract EcdsaKAccount { // Verify payload signature using Ethereum's signing scheme // Note that noir expects the hash of the message/challenge as input to the ECDSA verification. - let outer_hash_bytes: [u8; 32] = outer_hash.to_be_bytes(32).as_array(); + let outer_hash_bytes: [u8; 32] = outer_hash.to_be_bytes(); let hashed_message: [u8; 32] = std::hash::sha256(outer_hash_bytes); let verification = std::ecdsa_secp256k1::verify_signature(public_key.x, public_key.y, signature, hashed_message); assert(verification == true); diff --git a/noir-projects/noir-contracts/contracts/ecdsa_public_key_note/src/lib.nr b/noir-projects/noir-contracts/contracts/ecdsa_public_key_note/src/lib.nr index 174627342d8..511e046c7ce 100644 --- a/noir-projects/noir-contracts/contracts/ecdsa_public_key_note/src/lib.nr +++ b/noir-projects/noir-contracts/contracts/ecdsa_public_key_note/src/lib.nr @@ -41,7 +41,7 @@ impl NoteInterface f let last_x = self.x[31] as Field; let last_y = self.y[31] as Field; - + [x, last_x, y, last_y, self.npk_m_hash] } @@ -50,17 +50,17 @@ impl NoteInterface f let mut x: [u8; 32] = [0; 32]; let mut y: [u8; 32] = [0; 32]; - let part_x = serialized_note[0].to_be_bytes(32); + let part_x:[u8; 32] = serialized_note[0].to_be_bytes(); for i in 0..31 { x[i] = part_x[i + 1]; } - x[31] = serialized_note[1].to_be_bytes(32)[31]; + x[31] = serialized_note[1].to_be_bytes::<32>()[31]; - let part_y = serialized_note[2].to_be_bytes(32); + let part_y:[u8; 32] = serialized_note[2].to_be_bytes(); for i in 0..31 { y[i] = part_y[i + 1]; } - y[31] = serialized_note[3].to_be_bytes(32)[31]; + y[31] = serialized_note[3].to_be_bytes::<32>()[31]; EcdsaPublicKeyNote { x, y, npk_m_hash: serialized_note[4], header: NoteHeader::empty() } } diff --git a/noir-projects/noir-contracts/contracts/ecdsa_r_account_contract/src/main.nr b/noir-projects/noir-contracts/contracts/ecdsa_r_account_contract/src/main.nr index 600184a9904..6cc1444e8a6 100644 --- a/noir-projects/noir-contracts/contracts/ecdsa_r_account_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/ecdsa_r_account_contract/src/main.nr @@ -65,7 +65,7 @@ contract EcdsaRAccount { // Verify payload signature using Ethereum's signing scheme // Note that noir expects the hash of the message/challenge as input to the ECDSA verification. - let outer_hash_bytes: [u8; 32] = outer_hash.to_be_bytes(32).as_array(); + let outer_hash_bytes: [u8; 32] = outer_hash.to_be_bytes(); let hashed_message: [u8; 32] = std::hash::sha256(outer_hash_bytes); let verification = std::ecdsa_secp256r1::verify_signature(public_key.x, public_key.y, signature, hashed_message); assert(verification == true); diff --git a/noir-projects/noir-contracts/contracts/fee_juice_contract/src/lib.nr b/noir-projects/noir-contracts/contracts/fee_juice_contract/src/lib.nr index d8b059b4755..2c13f967ac5 100644 --- a/noir-projects/noir-contracts/contracts/fee_juice_contract/src/lib.nr +++ b/noir-projects/noir-contracts/contracts/fee_juice_contract/src/lib.nr @@ -8,8 +8,8 @@ pub fn calculate_fee(context: PublicContext) -> Field { pub fn get_bridge_gas_msg_hash(owner: AztecAddress, amount: Field) -> Field { let mut hash_bytes = [0; 68]; - let recipient_bytes = owner.to_field().to_be_bytes(32); - let amount_bytes = amount.to_be_bytes(32); + let recipient_bytes: [u8; 32] = owner.to_field().to_be_bytes(); + let amount_bytes: [u8; 32] = amount.to_be_bytes(); for i in 0..32 { hash_bytes[i + 4] = recipient_bytes[i]; diff --git a/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/main.nr b/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/main.nr index 6535db06dc8..f64996579fe 100644 --- a/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/main.nr @@ -71,11 +71,11 @@ contract SchnorrAccount { } // Verify signature of the payload bytes - let verification = std::schnorr::verify_signature_slice( + let verification = std::schnorr::verify_signature( public_key.x, public_key.y, signature, - outer_hash.to_be_bytes(32) + outer_hash.to_be_bytes::<32>() ); assert(verification == true); // docs:end:entrypoint @@ -98,11 +98,11 @@ contract SchnorrAccount { for i in 0..64 { signature[i] = witness[i] as u8; } - let valid_in_private = std::schnorr::verify_signature_slice( + let valid_in_private = std::schnorr::verify_signature( public_key.x, public_key.y, signature, - message_hash.to_be_bytes(32) + message_hash.to_be_bytes::<32>() ); // Compute the nullifier and check if it is spent diff --git a/noir-projects/noir-contracts/contracts/schnorr_hardcoded_account_contract/src/main.nr b/noir-projects/noir-contracts/contracts/schnorr_hardcoded_account_contract/src/main.nr index 5268b67fea6..12ef42757a2 100644 --- a/noir-projects/noir-contracts/contracts/schnorr_hardcoded_account_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/schnorr_hardcoded_account_contract/src/main.nr @@ -37,11 +37,11 @@ contract SchnorrHardcodedAccount { } // Verify signature using hardcoded public key - let verification = std::schnorr::verify_signature_slice( + let verification = std::schnorr::verify_signature( public_key_x, public_key_y, signature, - outer_hash.to_be_bytes(32) + outer_hash.to_be_bytes::<32>() ); assert(verification == true); true diff --git a/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/src/util.nr b/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/src/util.nr index 4f98c0183b1..824847e2c0c 100644 --- a/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/src/util.nr +++ b/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/src/util.nr @@ -1,11 +1,11 @@ -use std::{schnorr::verify_signature_slice}; +use std::schnorr::verify_signature; use dep::aztec::prelude::AztecAddress; use crate::auth_oracle::AuthWitness; pub fn recover_address(message_hash: Field, witness: AuthWitness) -> AztecAddress { - let message_bytes = message_hash.to_be_bytes(32); + let message_bytes: [u8; 32] = message_hash.to_be_bytes(); // In a single key account contract we re-used ivpk_m as signing key - let verification = verify_signature_slice( + let verification = verify_signature( witness.keys.ivpk_m.inner.x, witness.keys.ivpk_m.inner.y, witness.signature, diff --git a/noir-projects/noir-contracts/contracts/token_portal_content_hash_lib/src/lib.nr b/noir-projects/noir-contracts/contracts/token_portal_content_hash_lib/src/lib.nr index cba27bb6ef9..f7789aefcd5 100644 --- a/noir-projects/noir-contracts/contracts/token_portal_content_hash_lib/src/lib.nr +++ b/noir-projects/noir-contracts/contracts/token_portal_content_hash_lib/src/lib.nr @@ -6,8 +6,8 @@ use dep::aztec::protocol_types::hash::sha256_to_field; // Refer TokenPortal.sol for reference on L1. pub fn get_mint_public_content_hash(owner: AztecAddress, amount: Field) -> Field { let mut hash_bytes = [0; 68]; - let recipient_bytes = owner.to_field().to_be_bytes(32); - let amount_bytes = amount.to_be_bytes(32); + let recipient_bytes:[u8; 32] = owner.to_field().to_be_bytes(); + let amount_bytes:[u8; 32] = amount.to_be_bytes(); for i in 0..32 { hash_bytes[i + 4] = recipient_bytes[i]; @@ -33,8 +33,8 @@ pub fn get_mint_private_content_hash( amount: Field ) -> Field { let mut hash_bytes = [0; 68]; - let secret_hash_bytes = secret_hash_for_redeeming_minted_notes.to_be_bytes(32); - let amount_bytes = amount.to_be_bytes(32); + let secret_hash_bytes:[u8; 32] = secret_hash_for_redeeming_minted_notes.to_be_bytes(); + let amount_bytes:[u8; 32] = amount.to_be_bytes(); for i in 0..32 { hash_bytes[i + 4] = secret_hash_bytes[i]; @@ -60,9 +60,9 @@ pub fn get_withdraw_content_hash(recipient: EthAddress, amount: Field, caller_on // then convert to a single field element // add that to the l2 to l1 messages let mut hash_bytes: [u8; 100] = [0; 100]; - let recipient_bytes = recipient.to_field().to_be_bytes(32); - let amount_bytes = amount.to_be_bytes(32); - let caller_on_l1_bytes = caller_on_l1.to_field().to_be_bytes(32); + let recipient_bytes: [u8; 32] = recipient.to_field().to_be_bytes(); + let amount_bytes: [u8; 32] = amount.to_be_bytes(); + let caller_on_l1_bytes: [u8; 32] = caller_on_l1.to_field().to_be_bytes(); // 0x69328dec, selector for "withdraw(address,uint256,address)" hash_bytes[0] = 0x69; diff --git a/noir-projects/noir-contracts/contracts/uniswap_contract/src/util.nr b/noir-projects/noir-contracts/contracts/uniswap_contract/src/util.nr index a2f08dea4cf..afc689c2f0c 100644 --- a/noir-projects/noir-contracts/contracts/uniswap_contract/src/util.nr +++ b/noir-projects/noir-contracts/contracts/uniswap_contract/src/util.nr @@ -16,14 +16,14 @@ pub fn compute_swap_public_content_hash( ) -> Field { let mut hash_bytes = [0; 260]; // 8 fields of 32 bytes each + 4 bytes fn selector - let input_token_portal_bytes = input_asset_bridge_portal_address.to_field().to_be_bytes(32); - let in_amount_bytes = input_amount.to_be_bytes(32); - let uniswap_fee_tier_bytes = uniswap_fee_tier.to_be_bytes(32); - let output_token_portal_bytes = output_asset_bridge_portal_address.to_field().to_be_bytes(32); - let amount_out_min_bytes = minimum_output_amount.to_be_bytes(32); - let aztec_recipient_bytes = aztec_recipient.to_field().to_be_bytes(32); - let secret_hash_for_L1_to_l2_message_bytes = secret_hash_for_L1_to_l2_message.to_be_bytes(32); - let caller_on_L1_bytes = caller_on_L1.to_field().to_be_bytes(32); + let input_token_portal_bytes: [u8; 32] = input_asset_bridge_portal_address.to_field().to_be_bytes(); + let in_amount_bytes: [u8; 32] = input_amount.to_be_bytes(); + let uniswap_fee_tier_bytes: [u8; 32] = uniswap_fee_tier.to_be_bytes(); + let output_token_portal_bytes: [u8; 32] = output_asset_bridge_portal_address.to_field().to_be_bytes(); + let amount_out_min_bytes: [u8; 32] = minimum_output_amount.to_be_bytes(); + let aztec_recipient_bytes: [u8; 32] = aztec_recipient.to_field().to_be_bytes(); + let secret_hash_for_L1_to_l2_message_bytes: [u8; 32] = secret_hash_for_L1_to_l2_message.to_be_bytes(); + let caller_on_L1_bytes: [u8; 32] = caller_on_L1.to_field().to_be_bytes(); // function selector: 0xf18186d8 keccak256("swap_public(address,uint256,uint24,address,uint256,bytes32,bytes32,address)") hash_bytes[0] = 0xf1; @@ -62,14 +62,14 @@ pub fn compute_swap_private_content_hash( ) -> Field { let mut hash_bytes = [0; 260]; // 8 fields of 32 bytes each + 4 bytes fn selector - let input_token_portal_bytes = input_asset_bridge_portal_address.to_field().to_be_bytes(32); - let in_amount_bytes = input_amount.to_be_bytes(32); - let uniswap_fee_tier_bytes = uniswap_fee_tier.to_be_bytes(32); - let output_token_portal_bytes = output_asset_bridge_portal_address.to_field().to_be_bytes(32); - let amount_out_min_bytes = minimum_output_amount.to_be_bytes(32); - let secret_hash_for_redeeming_minted_notes_bytes = secret_hash_for_redeeming_minted_notes.to_be_bytes(32); - let secret_hash_for_L1_to_l2_message_bytes = secret_hash_for_L1_to_l2_message.to_be_bytes(32); - let caller_on_L1_bytes = caller_on_L1.to_field().to_be_bytes(32); + let input_token_portal_bytes: [u8; 32] = input_asset_bridge_portal_address.to_field().to_be_bytes(); + let in_amount_bytes: [u8; 32] = input_amount.to_be_bytes(); + let uniswap_fee_tier_bytes: [u8; 32] = uniswap_fee_tier.to_be_bytes(); + let output_token_portal_bytes: [u8; 32] = output_asset_bridge_portal_address.to_field().to_be_bytes(); + let amount_out_min_bytes: [u8; 32] = minimum_output_amount.to_be_bytes(); + let secret_hash_for_redeeming_minted_notes_bytes: [u8; 32] = secret_hash_for_redeeming_minted_notes.to_be_bytes(); + let secret_hash_for_L1_to_l2_message_bytes: [u8; 32] = secret_hash_for_L1_to_l2_message.to_be_bytes(); + let caller_on_L1_bytes: [u8; 32] = caller_on_L1.to_field().to_be_bytes(); // function selector: 0x16f416eb keccak256("swap_private(address,uint256,uint24,address,uint256,bytes32,bytes32,address)") hash_bytes[0] = 0x16; diff --git a/noir-projects/noir-protocol-circuits/crates/blob/Nargo.toml b/noir-projects/noir-protocol-circuits/crates/blob/Nargo.toml index fc1b7508f71..356e55ea849 100644 --- a/noir-projects/noir-protocol-circuits/crates/blob/Nargo.toml +++ b/noir-projects/noir-protocol-circuits/crates/blob/Nargo.toml @@ -5,4 +5,4 @@ authors = [""] compiler_version = ">=0.30.0" [dependencies] -bigint = {tag = "v0.3.2", git = "https://github.com/noir-lang/noir-bignum" } \ No newline at end of file +bigint = {tag = "tf/update-to-le-bytes", git = "https://github.com/noir-lang/noir-bignum" } diff --git a/noir-projects/noir-protocol-circuits/crates/blob/src/main.nr b/noir-projects/noir-protocol-circuits/crates/blob/src/main.nr index 316db6a464a..2570920b235 100644 --- a/noir-projects/noir-protocol-circuits/crates/blob/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/blob/src/main.nr @@ -72,12 +72,7 @@ unconstrained fn __compute_fracs(z: F, ys: [F; FIELDS_PER_BLOB]) -> [F; FIELDS_P } unconstrained fn __field_to_bytes(x: Field) -> [u8; 32] { - let x_bytes_slice = x.to_be_bytes(32); - let mut x_bytes = [0; 32]; - for i in 0..32 { - x_bytes[i] = x_bytes_slice[i]; - } - x_bytes + x.to_be_bytes() } unconstrained fn __field_to_bignum(x: Field) -> F { @@ -120,17 +115,24 @@ fn unsafe_bignum_to_field(x: F) -> Field { } fn bignum_to_bytes(x: F) -> [u8] { - let limb_0_bytes: [u8] = x.limbs[0].to_be_bytes(15); - let limb_1_bytes: [u8] = x.limbs[1].to_be_bytes(15); - let limb_2_bytes: [u8] = x.limbs[2].to_be_bytes(2); - let out = limb_2_bytes.append(limb_1_bytes).append(limb_0_bytes); + let limb_0_bytes: [u8; 15] = x.limbs[0].to_be_bytes(); + let limb_1_bytes: [u8; 15] = x.limbs[1].to_be_bytes(); + let limb_2_bytes: [u8; 2] = x.limbs[2].to_be_bytes(); + let mut out: [u8; 32] = [0; 32]; + for i in 0..32 { + out[i] = limb_0_bytes[i]; + out[i+15] = limb_1_bytes[i]; + } + for i in 0..1 { + out[30 + i] = limb_2_bytes[i]; + } std::static_assert(out.len() == 32, "bad byte decomposition of bignum"); out } // fn kzg_commitment_to_bytes(c: [Field; 2]) -> [u8] { -// let limb_0_bytes: [u8] = x.limbs[0].to_be_bytes(32); -// let limb_1_bytes: [u8] = x.limbs[1].to_be_bytes(16); +// let limb_0_bytes: [u8; 32] = x.limbs[0].to_be_bytes(); +// let limb_1_bytes: [u8; 16] = x.limbs[1].to_be_bytes(); // let out = limb_2_bytes.append(limb_1_bytes).append(limb_0_bytes); // std::static_assert(out.len() == 32, "bad byte decomposition of bignum"); @@ -247,7 +249,7 @@ fn main(blob: [F; FIELDS_PER_BLOB], kzg_commitment: [Field; 2]) -> pub (Field, F let y: F = barycentric_evaluate_blob_at_z(challenge_z_as_bignum, blob); // let y = challenge_z_as_bignum; // dummy constraint for when we want to skip the barycentric stuff in testing. - // let challenge_z_as_bytes: [u8] = challenge_z.to_be_bytes(32); + // let challenge_z_as_bytes: [u8; 32] = challenge_z.to_be_bytes(); // let y_as_bytes: [u8] = bignum_to_bytes(y); // let kzg_commitment_as_bytes: [u8] = () diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/components.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/components.nr index 324d8fa40c3..44012b0ed79 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/components.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/components.nr @@ -288,7 +288,7 @@ pub fn compute_tx_effects_hash( let mut hash_input_flattened = [0; TX_EFFECTS_HASH_INPUT_FIELDS * 32]; for offset in 0..TX_EFFECTS_HASH_INPUT_FIELDS { - let input_as_bytes = tx_effects_hash_input[offset].to_be_bytes(32); + let input_as_bytes: [u8; 32] = tx_effects_hash_input[offset].to_be_bytes(); for byte_index in 0..32 { hash_input_flattened[offset * 32 + byte_index] = input_as_bytes[byte_index]; } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/hash.nr b/noir-projects/noir-protocol-circuits/crates/types/src/hash.nr index a9afe4dc9b9..67a5eb1d8d5 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/hash.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/hash.nr @@ -150,7 +150,7 @@ pub fn compute_l2_to_l1_hash( let inputs = [contract_address.to_field(), rollup_version_id, recipient.to_field(), chain_id, content]; for i in 0..inputs.len() { // TODO are bytes be in fr.to_buffer() ? - let item_bytes = inputs[i].to_be_bytes(32); + let item_bytes: [u8; 32] = inputs[i].to_be_bytes(); for j in 0..32 { bytes.push(item_bytes[j]); } @@ -195,7 +195,7 @@ pub fn accumulate_sha256(input: [Field; 2]) -> Field { // accumulate_sha256 assumes that the inputs are pre-truncated 31 byte numbers let mut hash_input_flattened = [0; 64]; for offset in 0..input.len() { - let input_as_bytes = input[offset].to_be_bytes(32); + let input_as_bytes: [u8; 32] = input[offset].to_be_bytes(); for byte_index in 0..32 { hash_input_flattened[offset * 32 + byte_index] = input_as_bytes[byte_index]; } @@ -211,7 +211,7 @@ pub fn compute_tx_logs_hash(logs: [LogHash; MAX_ENCRYPTED_LOGS_PER_TX]) -> Field // Convert each field element into a byte array and append the bytes to `hash_input_flattened` let mut hash_input_flattened = [0; MAX_ENCRYPTED_LOGS_PER_TX * 32]; for offset in 0..MAX_ENCRYPTED_LOGS_PER_TX { - let input_as_bytes = logs[offset].value.to_be_bytes(32); + let input_as_bytes: [u8; 32] = logs[offset].value.to_be_bytes(); for byte_index in 0..32 { hash_input_flattened[offset * 32 + byte_index] = input_as_bytes[byte_index]; } @@ -232,7 +232,7 @@ pub fn compute_tx_note_logs_hash(logs: [LogHash; MAX_NOTE_ENCRYPTED_LOGS_PER_TX] // Convert each field element into a byte array and append the bytes to `hash_input_flattened` let mut hash_input_flattened = [0; MAX_NOTE_ENCRYPTED_LOGS_PER_TX * 32]; for offset in 0..MAX_NOTE_ENCRYPTED_LOGS_PER_TX { - let input_as_bytes = logs[offset].value.to_be_bytes(32); + let input_as_bytes: [u8; 32] = logs[offset].value.to_be_bytes(); for byte_index in 0..32 { hash_input_flattened[offset * 32 + byte_index] = input_as_bytes[byte_index]; } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/merkle_tree/root.nr b/noir-projects/noir-protocol-circuits/crates/types/src/merkle_tree/root.nr index dbf616cfd2e..b7c01cb8533 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/merkle_tree/root.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/merkle_tree/root.nr @@ -7,8 +7,8 @@ use crate::{hash::merkle_hash, merkle_tree::merkle_tree::MerkleTree}; // // TODO(David/Someone): The cpp code is using a uint256, whereas its // TODO a bit simpler in Noir to just have a bit array. -// TODO: I'd generally like to avoid u256 for algorithms like -// this because it means we never even need to consider cases where +// TODO: I'd generally like to avoid u256 for algorithms like +// this because it means we never even need to consider cases where // the index is greater than p. pub fn root_from_sibling_path( leaf: Field, @@ -16,7 +16,7 @@ pub fn root_from_sibling_path( sibling_path: [Field; N] ) -> Field { let mut node = leaf; - let indices = leaf_index.to_le_bits(N); + let indices: [u1; N] = leaf_index.to_le_bits(); for i in 0..N { let (hash_left, hash_right) = if indices[i] == 1 { diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/utils/field.nr b/noir-projects/noir-protocol-circuits/crates/types/src/utils/field.nr index c9b4125018c..954ef602f82 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/utils/field.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/utils/field.nr @@ -51,7 +51,7 @@ unconstrained fn bytes_field_test() { 84, 62, 10, 102, 66, 255, 235, 128, 57, 41, 104, 97, 118, 90, 83, 64, 123, 186, 98, 189, 28, 151, 202, 67, 55, 77, 233, 80, 187, 224, 167 ]; let field = field_from_bytes(inputs, true); - let return_bytes = field.to_be_bytes(31); + let return_bytes: [u8; 31] = field.to_be_bytes(); for i in 0..31 { assert_eq(inputs[i], return_bytes[i]); } @@ -60,7 +60,7 @@ unconstrained fn bytes_field_test() { 84, 62, 10, 102, 66, 255, 235, 128, 57, 41, 104, 97, 118, 90, 83, 64, 123, 186, 98, 189, 28, 151, 202, 67, 55, 77, 233, 80, 187, 224, 167, 158 ]; let field2 = field_from_bytes_32_trunc(inputs2); - let return_bytes2 = field.to_be_bytes(31); + let return_bytes2: [u8; 31] = field.to_be_bytes(); for i in 0..31 { assert_eq(return_bytes2[i], return_bytes[i]); @@ -75,7 +75,7 @@ unconstrained fn max_field_test() { let max_value = crate::constants::MAX_FIELD_VALUE; assert_eq(max_value, 0 - 1); // modulus == 0 is tested elsewhere, so below is more of a sanity check - let max_bytes = max_value.to_be_bytes(32); + let max_bytes: [u8; 32] = max_value.to_be_bytes(); let mod_bytes = std::field::modulus_be_bytes(); for i in 0..31 { assert_eq(max_bytes[i], mod_bytes[i]); diff --git a/noir/noir-repo/.github/workflows/test-js-packages.yml b/noir/noir-repo/.github/workflows/test-js-packages.yml index 5a09a0cf444..9f46e6f98e8 100644 --- a/noir/noir-repo/.github/workflows/test-js-packages.yml +++ b/noir/noir-repo/.github/workflows/test-js-packages.yml @@ -509,59 +509,6 @@ jobs: working-directory: ./examples/codegen_verifier run: ./test.sh - external-repo-checks: - needs: [build-nargo] - runs-on: ubuntu-22.04 - timeout-minutes: 30 - strategy: - fail-fast: false - matrix: - project: - # - { repo: AztecProtocol/aztec-nr, path: ./ } - # - { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-contracts } - # Disabled as aztec-packages requires a setup-step in order to generate a `Nargo.toml` - #- { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits } - - { repo: zac-williamson/noir-edwards, path: ./, ref: 0016ce82cd58b6ebb0c43c271725590bcff4e755 } - # TODO: Enable these once they're passing against master again. - # - { repo: zac-williamson/noir-bignum, path: ./, ref: 030c2acce1e6b97c44a3bbbf3429ed96f20d72d3 } - # - { repo: vlayer-xyz/monorepo, path: ./, ref: ee46af88c025863872234eb05d890e1e447907cb } - # - { repo: hashcloak/noir-bigint, path: ./, ref: 940ddba3a5201b508e7b37a2ef643551afcf5ed8 } - - name: Check external repo - ${{ matrix.project.repo }} - - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - repository: ${{ matrix.project.repo }} - path: test-repo - ref: ${{ matrix.project.ref }} - - - name: Download nargo binary - uses: actions/download-artifact@v4 - with: - name: nargo - path: ./nargo - - - name: Set nargo on PATH - run: | - nargo_binary="${{ github.workspace }}/nargo/nargo" - chmod +x $nargo_binary - echo "$(dirname $nargo_binary)" >> $GITHUB_PATH - export PATH="$PATH:$(dirname $nargo_binary)" - nargo -V - - - name: Remove requirements on compiler version - working-directory: ./test-repo - run: | - # Github actions seems to not expand "**" in globs by default. - shopt -s globstar - sed -i '/^compiler_version/d' ./**/Nargo.toml - - - name: Run nargo check - working-directory: ./test-repo/${{ matrix.project.path }} - run: nargo check - # This is a job which depends on all test jobs and reports the overall status. # This allows us to add/remove test jobs without having to update the required workflows. tests-end: diff --git a/noir/noir-repo/aztec_macros/src/transforms/events.rs b/noir/noir-repo/aztec_macros/src/transforms/events.rs index 8b71bd77ae6..b02cf1cc622 100644 --- a/noir/noir-repo/aztec_macros/src/transforms/events.rs +++ b/noir/noir-repo/aztec_macros/src/transforms/events.rs @@ -260,8 +260,8 @@ fn generate_fn_private_to_be_bytes( fn private_to_be_bytes(self: {event_type}, randomness: Field) -> [u8; {byte_length}] {{ let mut buffer: [u8; {byte_length}] = [0; {byte_length}]; - let randomness_bytes = randomness.to_be_bytes(32); - let event_type_id_bytes = {event_type}::get_event_type_id().to_field().to_be_bytes(32); + let randomness_bytes: [u8; 32] = randomness.to_be_bytes(); + let event_type_id_bytes: [u8; 32] = {event_type}::get_event_type_id().to_field().to_be_bytes(); for i in 0..32 {{ buffer[i] = randomness_bytes[i]; @@ -271,7 +271,7 @@ fn generate_fn_private_to_be_bytes( let serialized_event = self.serialize(); for i in 0..serialized_event.len() {{ - let bytes = serialized_event[i].to_be_bytes(32); + let bytes: [u8; 32] = serialized_event[i].to_be_bytes(); for j in 0..32 {{ buffer[64 + i * 32 + j] = bytes[j]; }} @@ -308,7 +308,7 @@ fn generate_fn_to_be_bytes( fn to_be_bytes(self: {event_type}) -> [u8; {byte_length_without_randomness}] {{ let mut buffer: [u8; {byte_length_without_randomness}] = [0; {byte_length_without_randomness}]; - let event_type_id_bytes = {event_type}::get_event_type_id().to_field().to_be_bytes(32); + let event_type_id_bytes: [u8; 32] = {event_type}::get_event_type_id().to_field().to_be_bytes(); for i in 0..32 {{ buffer[i] = event_type_id_bytes[i]; @@ -317,7 +317,7 @@ fn generate_fn_to_be_bytes( let serialized_event = self.serialize(); for i in 0..serialized_event.len() {{ - let bytes = serialized_event[i].to_be_bytes(32); + let bytes: [u8; 32] = serialized_event[i].to_be_bytes(); for j in 0..32 {{ buffer[32 + i * 32 + j] = bytes[j]; }} diff --git a/noir/noir-repo/aztec_macros/src/transforms/note_interface.rs b/noir/noir-repo/aztec_macros/src/transforms/note_interface.rs index 8df1d128c6f..df237926486 100644 --- a/noir/noir-repo/aztec_macros/src/transforms/note_interface.rs +++ b/noir/noir-repo/aztec_macros/src/transforms/note_interface.rs @@ -244,8 +244,8 @@ fn generate_note_to_be_bytes( let mut buffer: [u8; {0}] = [0; {0}]; - let storage_slot_bytes = storage_slot.to_be_bytes(32); - let note_type_id_bytes = {1}::get_note_type_id().to_be_bytes(32); + let storage_slot_bytes: [u8; 32] = storage_slot.to_be_bytes(); + let note_type_id_bytes: [u8; 32] = {1}::get_note_type_id().to_be_bytes(); for i in 0..32 {{ buffer[i] = storage_slot_bytes[i]; @@ -253,7 +253,7 @@ fn generate_note_to_be_bytes( }} for i in 0..serialized_note.len() {{ - let bytes = serialized_note[i].to_be_bytes(32); + let bytes: [u8; 32] = serialized_note[i].to_be_bytes(); for j in 0..32 {{ buffer[64 + i * 32 + j] = bytes[j]; }} diff --git a/noir/noir-repo/aztec_macros/src/utils/ast_utils.rs b/noir/noir-repo/aztec_macros/src/utils/ast_utils.rs index 955e4111bb3..19372fa5cb5 100644 --- a/noir/noir-repo/aztec_macros/src/utils/ast_utils.rs +++ b/noir/noir-repo/aztec_macros/src/utils/ast_utils.rs @@ -1,9 +1,9 @@ use noirc_errors::{Span, Spanned}; use noirc_frontend::ast::{ BinaryOpKind, CallExpression, CastExpression, Expression, ExpressionKind, FunctionReturnType, - Ident, IndexExpression, InfixExpression, Lambda, LetStatement, MemberAccessExpression, - MethodCallExpression, NoirTraitImpl, Path, PathSegment, Pattern, PrefixExpression, Statement, - StatementKind, TraitImplItem, UnaryOp, UnresolvedType, UnresolvedTypeData, + Ident, IndexExpression, InfixExpression, Lambda, MemberAccessExpression, MethodCallExpression, + NoirTraitImpl, Path, PathSegment, Pattern, PrefixExpression, Statement, StatementKind, + TraitImplItem, UnaryOp, UnresolvedType, UnresolvedTypeData, }; use noirc_frontend::token::SecondaryAttribute; @@ -73,13 +73,11 @@ pub fn mutable(name: &str) -> Pattern { } pub fn mutable_assignment(name: &str, assigned_to: Expression) -> Statement { - make_statement(StatementKind::Let(LetStatement { - pattern: mutable(name), - r#type: make_type(UnresolvedTypeData::Unspecified), - expression: assigned_to, - comptime: false, - attributes: vec![], - })) + make_statement(StatementKind::new_let( + mutable(name), + make_type(UnresolvedTypeData::Unspecified), + assigned_to, + )) } pub fn mutable_reference(variable_name: &str) -> Expression { @@ -98,13 +96,7 @@ pub fn assignment_with_type( typ: UnresolvedTypeData, assigned_to: Expression, ) -> Statement { - make_statement(StatementKind::Let(LetStatement { - pattern: pattern(name), - r#type: make_type(typ), - expression: assigned_to, - comptime: false, - attributes: vec![], - })) + make_statement(StatementKind::new_let(pattern(name), make_type(typ), assigned_to)) } pub fn return_type(path: Path) -> FunctionReturnType { diff --git a/noir/noir-repo/aztec_macros/src/utils/parse_utils.rs b/noir/noir-repo/aztec_macros/src/utils/parse_utils.rs index f2998fbaafc..6a2a876e682 100644 --- a/noir/noir-repo/aztec_macros/src/utils/parse_utils.rs +++ b/noir/noir-repo/aztec_macros/src/utils/parse_utils.rs @@ -50,7 +50,7 @@ fn empty_item(item: &mut Item) { empty_parsed_submodule(parsed_submodule); } ItemKind::ModuleDecl(module_declaration) => empty_module_declaration(module_declaration), - ItemKind::Import(use_tree) => empty_use_tree(use_tree), + ItemKind::Import(use_tree, _) => empty_use_tree(use_tree), ItemKind::Struct(noir_struct) => empty_noir_struct(noir_struct), ItemKind::TypeAlias(noir_type_alias) => empty_noir_type_alias(noir_type_alias), } @@ -404,9 +404,9 @@ fn empty_pattern(pattern: &mut Pattern) { } fn empty_unresolved_trait_constraints( - unresolved_trait_constriants: &mut [UnresolvedTraitConstraint], + unresolved_trait_constraints: &mut [UnresolvedTraitConstraint], ) { - for trait_constraint in unresolved_trait_constriants.iter_mut() { + for trait_constraint in unresolved_trait_constraints.iter_mut() { empty_unresolved_trait_constraint(trait_constraint); } } diff --git a/noir/noir-repo/compiler/noirc_driver/src/lib.rs b/noir/noir-repo/compiler/noirc_driver/src/lib.rs index b7bb07ad64a..88918151366 100644 --- a/noir/noir-repo/compiler/noirc_driver/src/lib.rs +++ b/noir/noir-repo/compiler/noirc_driver/src/lib.rs @@ -131,18 +131,6 @@ pub struct CompileOptions { pub skip_underconstrained_check: bool, } -#[derive(Clone, Debug, Default)] -pub struct CheckOptions { - pub compile_options: CompileOptions, - pub error_on_unused_imports: bool, -} - -impl CheckOptions { - pub fn new(compile_options: &CompileOptions, error_on_unused_imports: bool) -> Self { - Self { compile_options: compile_options.clone(), error_on_unused_imports } - } -} - pub fn parse_expression_width(input: &str) -> Result { use std::io::{Error, ErrorKind}; let width = input @@ -290,20 +278,19 @@ pub fn add_dep( pub fn check_crate( context: &mut Context, crate_id: CrateId, - check_options: &CheckOptions, + options: &CompileOptions, ) -> CompilationResult<()> { - let options = &check_options.compile_options; - let macros: &[&dyn MacroProcessor] = if options.disable_macros { &[] } else { &[&aztec_macros::AztecMacro] }; let mut errors = vec![]; + let error_on_unused_imports = true; let diagnostics = CrateDefMap::collect_defs( crate_id, context, options.debug_comptime_in_file.as_deref(), options.arithmetic_generics, - check_options.error_on_unused_imports, + error_on_unused_imports, macros, ); errors.extend(diagnostics.into_iter().map(|(error, file_id)| { @@ -337,10 +324,7 @@ pub fn compile_main( options: &CompileOptions, cached_program: Option, ) -> CompilationResult { - let error_on_unused_imports = true; - let check_options = CheckOptions::new(options, error_on_unused_imports); - - let (_, mut warnings) = check_crate(context, crate_id, &check_options)?; + let (_, mut warnings) = check_crate(context, crate_id, options)?; let main = context.get_main_function(&crate_id).ok_or_else(|| { // TODO(#2155): This error might be a better to exist in Nargo @@ -375,9 +359,7 @@ pub fn compile_contract( crate_id: CrateId, options: &CompileOptions, ) -> CompilationResult { - let error_on_unused_imports = true; - let check_options = CheckOptions::new(options, error_on_unused_imports); - let (_, warnings) = check_crate(context, crate_id, &check_options)?; + let (_, warnings) = check_crate(context, crate_id, options)?; // TODO: We probably want to error if contracts is empty let contracts = context.get_all_contracts(&crate_id); diff --git a/noir/noir-repo/compiler/noirc_errors/src/reporter.rs b/noir/noir-repo/compiler/noirc_errors/src/reporter.rs index b21dc759f14..0886f83af9d 100644 --- a/noir/noir-repo/compiler/noirc_errors/src/reporter.rs +++ b/noir/noir-repo/compiler/noirc_errors/src/reporter.rs @@ -12,6 +12,8 @@ pub struct CustomDiagnostic { pub secondaries: Vec, notes: Vec, pub kind: DiagnosticKind, + pub deprecated: bool, + pub unnecessary: bool, } #[derive(Debug, Copy, Clone, PartialEq, Eq)] @@ -35,6 +37,8 @@ impl CustomDiagnostic { secondaries: Vec::new(), notes: Vec::new(), kind: DiagnosticKind::Error, + deprecated: false, + unnecessary: false, } } @@ -49,6 +53,8 @@ impl CustomDiagnostic { secondaries: vec![CustomLabel::new(secondary_message, secondary_span, None)], notes: Vec::new(), kind, + deprecated: false, + unnecessary: false, } } @@ -101,6 +107,8 @@ impl CustomDiagnostic { secondaries: vec![CustomLabel::new(secondary_message, secondary_span, None)], notes: Vec::new(), kind: DiagnosticKind::Bug, + deprecated: false, + unnecessary: false, } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index a7cb1571e34..d3d0e2231ad 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -502,6 +502,8 @@ impl<'block> BrilligBlock<'block> { ); } Value::Intrinsic(Intrinsic::ToRadix(endianness)) => { + let results = dfg.instruction_results(instruction_id); + let source = self.convert_ssa_single_addr_value(arguments[0], dfg); let radix: u32 = dfg @@ -512,88 +514,43 @@ impl<'block> BrilligBlock<'block> { .try_into() .expect("Radix should be u32"); - let limb_count: usize = dfg - .get_numeric_constant(arguments[2]) - .expect("Limb count should be known") - .try_to_u64() - .expect("Limb count should fit in u64") - .try_into() - .expect("Limb count should fit in usize"); - - let results = dfg.instruction_results(instruction_id); - - let target_len = self.variables.define_single_addr_variable( - self.function_context, - self.brillig_context, - results[0], - dfg, - ); - - let target_vector = self + let target_array = self .variables .define_variable( self.function_context, self.brillig_context, - results[1], + results[0], dfg, ) - .extract_vector(); - - // Update the user-facing slice length - self.brillig_context - .usize_const_instruction(target_len.address, limb_count.into()); + .extract_array(); self.brillig_context.codegen_to_radix( source, - target_vector, + target_array, radix, - limb_count, matches!(endianness, Endian::Big), false, ); } Value::Intrinsic(Intrinsic::ToBits(endianness)) => { - let source = self.convert_ssa_single_addr_value(arguments[0], dfg); - let limb_count: usize = dfg - .get_numeric_constant(arguments[1]) - .expect("Limb count should be known") - .try_to_u64() - .expect("Limb count should fit in u64") - .try_into() - .expect("Limb count should fit in usize"); - let results = dfg.instruction_results(instruction_id); - let target_len_variable = self.variables.define_variable( - self.function_context, - self.brillig_context, - results[0], - dfg, - ); - let target_len = target_len_variable.extract_single_addr(); - - let target_vector = match self.variables.define_variable( - self.function_context, - self.brillig_context, - results[1], - dfg, - ) { - BrilligVariable::BrilligArray(array) => { - self.brillig_context.array_to_vector_instruction(&array) - } - BrilligVariable::BrilligVector(vector) => vector, - BrilligVariable::SingleAddr(..) => unreachable!("ICE: ToBits on non-array"), - }; + let source = self.convert_ssa_single_addr_value(arguments[0], dfg); - // Update the user-facing slice length - self.brillig_context - .usize_const_instruction(target_len.address, limb_count.into()); + let target_array = self + .variables + .define_variable( + self.function_context, + self.brillig_context, + results[0], + dfg, + ) + .extract_array(); self.brillig_context.codegen_to_radix( source, - target_vector, + target_array, 2, - limb_count, matches!(endianness, Endian::Big), true, ); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_intrinsic.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_intrinsic.rs index 5c171f167fc..21f68daab64 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_intrinsic.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_intrinsic.rs @@ -1,12 +1,12 @@ use acvm::acir::{ - brillig::{BlackBoxOp, HeapArray, IntegerBitSize}, + brillig::{BlackBoxOp, IntegerBitSize}, AcirField, }; use crate::brillig::brillig_ir::BrilligBinaryOp; use super::{ - brillig_variable::{BrilligVector, SingleAddrVariable}, + brillig_variable::{BrilligArray, SingleAddrVariable}, debug_show::DebugToString, registers::RegisterAllocator, BrilligContext, @@ -67,27 +67,27 @@ impl BrilligContext< pub(crate) fn codegen_to_radix( &mut self, source_field: SingleAddrVariable, - target_vector: BrilligVector, + target_array: BrilligArray, radix: u32, - limb_count: usize, big_endian: bool, output_bits: bool, // If true will generate bit limbs, if false will generate byte limbs ) { assert!(source_field.bit_size == F::max_num_bits()); - self.usize_const_instruction(target_vector.size, limb_count.into()); - self.usize_const_instruction(target_vector.rc, 1_usize.into()); - self.codegen_allocate_array(target_vector.pointer, target_vector.size); + let size = SingleAddrVariable::new_usize(self.allocate_register()); + self.usize_const_instruction(size.address, target_array.size.into()); + self.usize_const_instruction(target_array.rc, 1_usize.into()); + self.codegen_allocate_array(target_array.pointer, size.address); self.black_box_op_instruction(BlackBoxOp::ToRadix { input: source_field.address, radix, - output: HeapArray { pointer: target_vector.pointer, size: limb_count }, + output: target_array.to_heap_array(), output_bits, }); if big_endian { - self.codegen_array_reverse(target_vector.pointer, target_vector.size); + self.codegen_array_reverse(target_array.pointer, size.address); } } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_memory.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_memory.rs index 69e7ff5383d..a33024dc382 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_memory.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_memory.rs @@ -269,34 +269,28 @@ impl BrilligContext< self.call_array_reverse_procedure(pointer, size); return; } + let iteration_count = self.allocate_register(); self.codegen_usize_op(size, iteration_count, BrilligBinaryOp::UnsignedDiv, 2); let start_value_register = self.allocate_register(); - let index_at_end_of_array = self.allocate_register(); let end_value_register = self.allocate_register(); + let index_at_end_of_array = self.allocate_register(); self.mov_instruction(index_at_end_of_array, size); self.codegen_loop(iteration_count, |ctx, iterator_register| { // The index at the end of array is size - 1 - iterator ctx.codegen_usize_op_in_place(index_at_end_of_array, BrilligBinaryOp::Sub, 1); + let index_at_end_of_array_var = SingleAddrVariable::new_usize(index_at_end_of_array); // Load both values ctx.codegen_array_get(pointer, iterator_register, start_value_register); - ctx.codegen_array_get( - pointer, - SingleAddrVariable::new_usize(index_at_end_of_array), - end_value_register, - ); + ctx.codegen_array_get(pointer, index_at_end_of_array_var, end_value_register); // Write both values ctx.codegen_array_set(pointer, iterator_register, end_value_register); - ctx.codegen_array_set( - pointer, - SingleAddrVariable::new_usize(index_at_end_of_array), - start_value_register, - ); + ctx.codegen_array_set(pointer, index_at_end_of_array_var, start_value_register); }); self.deallocate_register(iteration_count); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/mod.rs index 9a628d601df..45b84f5311e 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/mod.rs @@ -58,6 +58,7 @@ impl std::ops::Index for Brillig { impl Ssa { /// Compile to brillig brillig functions and ACIR functions reachable from them + #[tracing::instrument(level = "trace", skip_all)] pub(crate) fn to_brillig(&self, enable_debug_trace: bool) -> Brillig { // Collect all the function ids that are reachable from brillig // That means all the functions marked as brillig and ACIR functions called by them diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs index 2d138c13f7f..57bd76d4f78 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs @@ -127,12 +127,13 @@ pub(crate) fn optimize_into_acir( ssa.check_for_underconstrained_values() }) }; + + drop(ssa_gen_span_guard); + let brillig = time("SSA to Brillig", options.print_codegen_timings, || { ssa.to_brillig(options.enable_brillig_logging) }); - drop(ssa_gen_span_guard); - let artifacts = time("SSA to ACIR", options.print_codegen_timings, || { ssa.into_acir(&brillig, options.expression_width) })?; diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs index d3d936385b0..a0be1ee19cf 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs @@ -1527,9 +1527,9 @@ impl AcirContext { endian: Endian, input_var: AcirVar, radix_var: AcirVar, - limb_count_var: AcirVar, + limb_count: u32, result_element_type: AcirType, - ) -> Result, RuntimeError> { + ) -> Result { let radix = match self.vars[&radix_var].as_constant() { Some(radix) => radix.to_u128() as u32, None => { @@ -1540,16 +1540,6 @@ impl AcirContext { } }; - let limb_count = match self.vars[&limb_count_var].as_constant() { - Some(limb_count) => limb_count.to_u128() as u32, - None => { - return Err(RuntimeError::InternalError(InternalError::NotAConstant { - name: "limb_size".to_string(), - call_stack: self.get_call_stack(), - })); - } - }; - let input_expr = self.var_to_expression(input_var)?; let bit_size = u32::BITS - (radix - 1).leading_zeros(); @@ -1566,10 +1556,7 @@ impl AcirContext { // `Intrinsic::ToRadix` returns slices which are represented // by tuples with the structure (length, slice contents) - Ok(vec![ - AcirValue::Var(self.add_constant(limb_vars.len()), AcirType::field()), - AcirValue::Array(limb_vars.into()), - ]) + Ok(AcirValue::Array(limb_vars.into())) } /// Returns `AcirVar`s constrained to be the bit decomposition of the provided input @@ -1577,11 +1564,11 @@ impl AcirContext { &mut self, endian: Endian, input_var: AcirVar, - limb_count_var: AcirVar, + limb_count: u32, result_element_type: AcirType, - ) -> Result, RuntimeError> { + ) -> Result { let two_var = self.add_constant(2_u128); - self.radix_decompose(endian, input_var, two_var, limb_count_var, result_element_type) + self.radix_decompose(endian, input_var, two_var, limb_count, result_element_type) } /// Recursive helper to flatten a single AcirValue into the result vector. diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index 768cbe27a28..a2b9e46a15a 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -2177,19 +2177,38 @@ impl<'a> Context<'a> { Intrinsic::ToRadix(endian) => { let field = self.convert_value(arguments[0], dfg).into_var()?; let radix = self.convert_value(arguments[1], dfg).into_var()?; - let limb_size = self.convert_value(arguments[2], dfg).into_var()?; - let result_type = Self::array_element_type(dfg, result_ids[1]); + let Type::Array(result_type, array_length) = dfg.type_of_value(result_ids[0]) + else { + unreachable!("ICE: ToRadix result must be an array"); + }; - self.acir_context.radix_decompose(endian, field, radix, limb_size, result_type) + self.acir_context + .radix_decompose( + endian, + field, + radix, + array_length as u32, + result_type[0].clone().into(), + ) + .map(|array| vec![array]) } Intrinsic::ToBits(endian) => { let field = self.convert_value(arguments[0], dfg).into_var()?; - let bit_size = self.convert_value(arguments[1], dfg).into_var()?; - let result_type = Self::array_element_type(dfg, result_ids[1]); + let Type::Array(result_type, array_length) = dfg.type_of_value(result_ids[0]) + else { + unreachable!("ICE: ToRadix result must be an array"); + }; - self.acir_context.bit_decompose(endian, field, bit_size, result_type) + self.acir_context + .bit_decompose( + endian, + field, + array_length as u32, + result_type[0].clone().into(), + ) + .map(|array| vec![array]) } Intrinsic::ArrayLen => { let len = match self.convert_value(arguments[0], dfg) { diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs index bf6430c36d7..0edb7daf530 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs @@ -533,7 +533,7 @@ mod tests { fn insert_constant_call() { // `bits` should be an array of constants [1, 1, 1, 0...] of length 8: // let x = 7; - // let bits = x.to_le_bits(8); + // let bits: [u1; 8] = x.to_le_bits(); let func_id = Id::test_new(0); let mut builder = FunctionBuilder::new("func".into(), func_id); let one = builder.numeric_constant(FieldElement::one(), Type::bool()); @@ -546,13 +546,7 @@ mod tests { let call_results = builder.insert_call(to_bits_id, vec![input, length], result_types).into_owned(); - let slice_len = match &builder.current_function.dfg[call_results[0]] { - Value::NumericConstant { constant, .. } => *constant, - _ => panic!(), - }; - assert_eq!(slice_len, FieldElement::from(8_u128)); - - let slice = match &builder.current_function.dfg[call_results[1]] { + let slice = match &builder.current_function.dfg[call_results[0]] { Value::Array { array, .. } => array, _ => panic!(), }; diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs index 2c6aedeca35..d3e5acb467b 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs @@ -50,32 +50,39 @@ pub(super) fn simplify_call( match intrinsic { Intrinsic::ToBits(endian) => { - if let Some(constant_args) = constant_args { + // TODO: simplify to a range constraint if `limb_count == 1` + if let (Some(constant_args), Some(return_type)) = + (constant_args, ctrl_typevars.map(|return_types| return_types.first().cloned())) + { let field = constant_args[0]; - let limb_count = constant_args[1].to_u128() as u32; - - let (len_value, result_slice) = - constant_to_radix(endian, field, 2, limb_count, dfg); + let limb_count = if let Some(Type::Array(_, array_len)) = return_type { + array_len as u32 + } else { + unreachable!("ICE: Intrinsic::ToRadix return type must be array") + }; + let result_array = constant_to_radix(endian, field, 2, limb_count, dfg); - // `Intrinsic::ToBits` returns slices which are represented - // by tuples with the structure (length, slice contents) - SimplifyResult::SimplifiedToMultiple(vec![len_value, result_slice]) + SimplifyResult::SimplifiedTo(result_array) } else { SimplifyResult::None } } Intrinsic::ToRadix(endian) => { - if let Some(constant_args) = constant_args { + // TODO: simplify to a range constraint if `limb_count == 1` + if let (Some(constant_args), Some(return_type)) = + (constant_args, ctrl_typevars.map(|return_types| return_types.first().cloned())) + { let field = constant_args[0]; let radix = constant_args[1].to_u128() as u32; - let limb_count = constant_args[2].to_u128() as u32; + let limb_count = if let Some(Type::Array(_, array_len)) = return_type { + array_len as u32 + } else { + unreachable!("ICE: Intrinsic::ToRadix return type must be array") + }; - let (len_value, result_slice) = - constant_to_radix(endian, field, radix, limb_count, dfg); + let result_array = constant_to_radix(endian, field, radix, limb_count, dfg); - // `Intrinsic::ToRadix` returns slices which are represented - // by tuples with the structure (length, slice contents) - SimplifyResult::SimplifiedToMultiple(vec![len_value, result_slice]) + SimplifyResult::SimplifiedTo(result_array) } else { SimplifyResult::None } @@ -584,7 +591,7 @@ fn constant_to_radix( radix: u32, limb_count: u32, dfg: &mut DataFlowGraph, -) -> (ValueId, ValueId) { +) -> ValueId { let bit_size = u32::BITS - (radix - 1).leading_zeros(); let radix_big = BigUint::from(radix); assert_eq!(BigUint::from(2u128).pow(bit_size), radix_big, "ICE: Radix must be a power of 2"); @@ -599,7 +606,7 @@ fn constant_to_radix( if endian == Endian::Big { limbs.reverse(); } - make_constant_slice(dfg, limbs, Type::unsigned(bit_size)) + make_constant_array(dfg, limbs, Type::unsigned(bit_size)) } fn to_u8_vec(dfg: &DataFlowGraph, values: im::Vector>) -> Vec { diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs index f0760f29006..836c812843e 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs @@ -63,17 +63,15 @@ impl<'a> SliceCapacityTracker<'a> { | Intrinsic::SlicePushFront | Intrinsic::SlicePopBack | Intrinsic::SliceInsert - | Intrinsic::SliceRemove => (Some(1), 1), + | Intrinsic::SliceRemove => (1, 1), // `pop_front` returns the popped element, and then the respective slice. // This means in the case of a slice with structs, the result index of the popped slice // will change depending on the number of elements in the struct. // For example, a slice with four elements will look as such in SSA: // v3, v4, v5, v6, v7, v8 = call slice_pop_front(v1, v2) // where v7 is the slice length and v8 is the popped slice itself. - Intrinsic::SlicePopFront => (Some(1), results.len() - 1), - // The slice capacity of these intrinsics is not determined by the arguments of the function. - Intrinsic::ToBits(_) | Intrinsic::ToRadix(_) => (None, 1), - Intrinsic::AsSlice => (Some(0), 1), + Intrinsic::SlicePopFront => (1, results.len() - 1), + Intrinsic::AsSlice => (0, 1), _ => return, }; let result_slice = results[result_index]; @@ -81,8 +79,6 @@ impl<'a> SliceCapacityTracker<'a> { Intrinsic::SlicePushBack | Intrinsic::SlicePushFront | Intrinsic::SliceInsert => { - let argument_index = argument_index - .expect("ICE: Should have an argument index for slice intrinsics"); let slice_contents = arguments[argument_index]; for arg in &arguments[(argument_index + 1)..] { @@ -100,8 +96,6 @@ impl<'a> SliceCapacityTracker<'a> { Intrinsic::SlicePopBack | Intrinsic::SliceRemove | Intrinsic::SlicePopFront => { - let argument_index = argument_index - .expect("ICE: Should have an argument index for slice intrinsics"); let slice_contents = arguments[argument_index]; if let Some(contents_capacity) = slice_sizes.get(&slice_contents) { @@ -121,8 +115,6 @@ impl<'a> SliceCapacityTracker<'a> { .insert(result_slice, FieldElement::max_num_bytes() as usize); } Intrinsic::AsSlice => { - let argument_index = argument_index - .expect("ICE: Should have an argument index for AsSlice builtin"); let array_size = self .dfg .try_get_array_length(arguments[argument_index]) diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs index 6ca7a76d740..984c0de04ca 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs @@ -172,12 +172,10 @@ impl Context<'_> { let typ = self.function.dfg.type_of_value(rhs); if let Type::Numeric(NumericType::Unsigned { bit_size }) = typ { let to_bits = self.function.dfg.import_intrinsic(Intrinsic::ToBits(Endian::Little)); - let length = self.field_constant(FieldElement::from(bit_size as i128)); - let result_types = - vec![Type::field(), Type::Array(Arc::new(vec![Type::bool()]), bit_size as usize)]; - let rhs_bits = self.insert_call(to_bits, vec![rhs, length], result_types); + let result_types = vec![Type::Array(Arc::new(vec![Type::bool()]), bit_size as usize)]; + let rhs_bits = self.insert_call(to_bits, vec![rhs], result_types); - let rhs_bits = rhs_bits[1]; + let rhs_bits = rhs_bits[0]; let one = self.field_constant(FieldElement::one()); let mut r = one; for i in 1..bit_size + 1 { diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs index cc02faeb3df..8d6225afabf 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs @@ -203,18 +203,6 @@ fn slice_capacity_change( SizeChange::Dec { old, new } } - Intrinsic::ToBits(_) => { - assert_eq!(results.len(), 2); - // Some tests fail this check, returning an array instead somehow: - // assert!(matches!(dfg.type_of_value(results[1]), Type::Slice(_))); - SizeChange::SetTo(results[1], FieldElement::max_num_bits() as usize) - } - // ToRadix seems to assume it is to bytes - Intrinsic::ToRadix(_) => { - assert_eq!(results.len(), 2); - assert!(matches!(dfg.type_of_value(results[1]), Type::Slice(_))); - SizeChange::SetTo(results[1], FieldElement::max_num_bytes() as usize) - } Intrinsic::AsSlice => { assert_eq!(arguments.len(), 1); assert_eq!(results.len(), 2); @@ -238,6 +226,8 @@ fn slice_capacity_change( | Intrinsic::AsField | Intrinsic::AsWitness | Intrinsic::IsUnconstrained - | Intrinsic::DerivePedersenGenerators => SizeChange::None, + | Intrinsic::DerivePedersenGenerators + | Intrinsic::ToBits(_) + | Intrinsic::ToRadix(_) => SizeChange::None, } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs index e63222bfc87..3fd63249201 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs @@ -460,12 +460,22 @@ impl UnresolvedTypeExpression { } } -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] /// Represents whether the definition can be referenced outside its module/crate pub enum ItemVisibility { - Public, Private, PublicCrate, + Public, +} + +impl std::fmt::Display for ItemVisibility { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + ItemVisibility::Public => write!(f, "pub"), + ItemVisibility::Private => Ok(()), + ItemVisibility::PublicCrate => write!(f, "pub(crate)"), + } + } } #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/statement.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/statement.rs index c88fcba749b..2e14761a1cc 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/statement.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/statement.rs @@ -7,7 +7,7 @@ use iter_extended::vecmap; use noirc_errors::{Span, Spanned}; use super::{ - BlockExpression, Expression, ExpressionKind, GenericTypeArgs, IndexExpression, + BlockExpression, Expression, ExpressionKind, GenericTypeArgs, IndexExpression, ItemVisibility, MemberAccessExpression, MethodCallExpression, UnresolvedType, }; use crate::elaborator::types::SELF_TYPE_NAME; @@ -136,7 +136,9 @@ impl Recoverable for StatementKind { impl StatementKind { pub fn new_let( - ((pattern, r#type), expression): ((Pattern, UnresolvedType), Expression), + pattern: Pattern, + r#type: UnresolvedType, + expression: Expression, ) -> StatementKind { StatementKind::Let(LetStatement { pattern, @@ -300,6 +302,7 @@ impl std::fmt::Display for ModuleDeclaration { #[derive(Debug, PartialEq, Eq, Clone)] pub struct ImportStatement { + pub visibility: ItemVisibility, pub path: Path, pub alias: Option, } @@ -348,7 +351,7 @@ pub enum UseTreeKind { } impl UseTree { - pub fn desugar(self, root: Option) -> Vec { + pub fn desugar(self, root: Option, visibility: ItemVisibility) -> Vec { let prefix = if let Some(mut root) = root { root.segments.extend(self.prefix.segments); root @@ -358,10 +361,11 @@ impl UseTree { match self.kind { UseTreeKind::Path(name, alias) => { - vec![ImportStatement { path: prefix.join(name), alias }] + vec![ImportStatement { visibility, path: prefix.join(name), alias }] } UseTreeKind::List(trees) => { - trees.into_iter().flat_map(|tree| tree.desugar(Some(prefix.clone()))).collect() + let trees = trees.into_iter(); + trees.flat_map(|tree| tree.desugar(Some(prefix.clone()), visibility)).collect() } } } @@ -715,13 +719,11 @@ impl ForRange { // let fresh1 = array; let let_array = Statement { - kind: StatementKind::Let(LetStatement { - pattern: Pattern::Identifier(array_ident.clone()), - r#type: UnresolvedTypeData::Unspecified.with_span(Default::default()), - expression: array, - comptime: false, - attributes: vec![], - }), + kind: StatementKind::new_let( + Pattern::Identifier(array_ident.clone()), + UnresolvedTypeData::Unspecified.with_span(Default::default()), + array, + ), span: array_span, }; @@ -761,13 +763,11 @@ impl ForRange { // let elem = array[i]; let let_elem = Statement { - kind: StatementKind::Let(LetStatement { - pattern: Pattern::Identifier(identifier), - r#type: UnresolvedTypeData::Unspecified.with_span(Default::default()), - expression: Expression::new(loop_element, array_span), - comptime: false, - attributes: vec![], - }), + kind: StatementKind::new_let( + Pattern::Identifier(identifier), + UnresolvedTypeData::Unspecified.with_span(Default::default()), + Expression::new(loop_element, array_span), + ), span: array_span, }; diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/visitor.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/visitor.rs index 96183d3322f..3955e50b03e 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/visitor.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/visitor.rs @@ -21,8 +21,9 @@ use crate::{ }; use super::{ - FunctionReturnType, GenericTypeArgs, IntegerBitSize, Pattern, Signedness, UnresolvedGenerics, - UnresolvedTraitConstraint, UnresolvedType, UnresolvedTypeData, UnresolvedTypeExpression, + FunctionReturnType, GenericTypeArgs, IntegerBitSize, ItemVisibility, Pattern, Signedness, + UnresolvedGenerics, UnresolvedTraitConstraint, UnresolvedType, UnresolvedTypeData, + UnresolvedTypeExpression, }; /// Implements the [Visitor pattern](https://en.wikipedia.org/wiki/Visitor_pattern) for Noir's AST. @@ -252,7 +253,7 @@ pub trait Visitor { true } - fn visit_import(&mut self, _: &UseTree) -> bool { + fn visit_import(&mut self, _: &UseTree, _visibility: ItemVisibility) -> bool { true } @@ -470,8 +471,8 @@ impl Item { } } ItemKind::Trait(noir_trait) => noir_trait.accept(self.span, visitor), - ItemKind::Import(use_tree) => { - if visitor.visit_import(use_tree) { + ItemKind::Import(use_tree, visibility) => { + if visitor.visit_import(use_tree, *visibility) { use_tree.accept(visitor); } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/debug/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/debug/mod.rs index fe027969473..951c5220707 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/debug/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/debug/mod.rs @@ -142,13 +142,11 @@ impl DebugInstrumenter { None => false, Some(ast::Statement { kind: ast::StatementKind::Expression(ret_expr), .. }) => { let save_ret_expr = ast::Statement { - kind: ast::StatementKind::Let(ast::LetStatement { - pattern: ast::Pattern::Identifier(ident("__debug_expr", ret_expr.span)), - r#type: ast::UnresolvedTypeData::Unspecified.with_span(Default::default()), - expression: ret_expr.clone(), - comptime: false, - attributes: vec![], - }), + kind: ast::StatementKind::new_let( + ast::Pattern::Identifier(ident("__debug_expr", ret_expr.span)), + ast::UnresolvedTypeData::Unspecified.with_span(Default::default()), + ret_expr.clone(), + ), span: ret_expr.span, }; statements.push(save_ret_expr); @@ -242,18 +240,16 @@ impl DebugInstrumenter { }); ast::Statement { - kind: ast::StatementKind::Let(ast::LetStatement { - pattern: ast::Pattern::Tuple(vars_pattern, let_stmt.pattern.span()), - r#type: ast::UnresolvedTypeData::Unspecified.with_span(Default::default()), - comptime: false, - expression: ast::Expression { + kind: ast::StatementKind::new_let( + ast::Pattern::Tuple(vars_pattern, let_stmt.pattern.span()), + ast::UnresolvedTypeData::Unspecified.with_span(Default::default()), + ast::Expression { kind: ast::ExpressionKind::Block(ast::BlockExpression { statements: block_stmts, }), span: let_stmt.expression.span, }, - attributes: vec![], - }), + ), span: *span, } } @@ -274,13 +270,11 @@ impl DebugInstrumenter { // __debug_expr // }; - let let_kind = ast::StatementKind::Let(ast::LetStatement { - pattern: ast::Pattern::Identifier(ident("__debug_expr", assign_stmt.expression.span)), - r#type: ast::UnresolvedTypeData::Unspecified.with_span(Default::default()), - expression: assign_stmt.expression.clone(), - comptime: false, - attributes: vec![], - }); + let let_kind = ast::StatementKind::new_let( + ast::Pattern::Identifier(ident("__debug_expr", assign_stmt.expression.span)), + ast::UnresolvedTypeData::Unspecified.with_span(Default::default()), + assign_stmt.expression.clone(), + ); let expression_span = assign_stmt.expression.span; let new_assign_stmt = match &assign_stmt.lvalue { ast::LValue::Ident(id) => { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/comptime.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/comptime.rs index 3e71f167802..baa9c0ab371 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/comptime.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/comptime.rs @@ -333,7 +333,7 @@ impl<'context> Elaborator<'context> { TopLevelStatement::Error => (), TopLevelStatement::Module(_) - | TopLevelStatement::Import(_) + | TopLevelStatement::Import(..) | TopLevelStatement::Struct(_) | TopLevelStatement::Trait(_) | TopLevelStatement::Impl(_) diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/scope.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/scope.rs index a51fd737f74..7a98e1856b3 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/scope.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/scope.rs @@ -60,12 +60,6 @@ impl<'context> Elaborator<'context> { let mut module_id = self.module_id(); let mut path = path; - if path.kind == PathKind::Plain { - let def_map = self.def_maps.get_mut(&self.crate_id).unwrap(); - let module_data = &mut def_map.modules[module_id.local_id.0]; - module_data.use_import(&path.segments[0].ident); - } - if path.kind == PathKind::Plain && path.first_name() == SELF_TYPE_NAME { if let Some(Type::Struct(struct_type, _)) = &self.self_type { let struct_type = struct_type.borrow(); @@ -90,34 +84,47 @@ impl<'context> Elaborator<'context> { fn resolve_path_in_module(&mut self, path: Path, module_id: ModuleId) -> PathResolutionResult { let resolver = StandardPathResolver::new(module_id); - let path_resolution; - - if self.interner.lsp_mode { - let last_segment = path.last_ident(); - let location = Location::new(last_segment.span(), self.file); - let is_self_type_name = last_segment.is_self_type_name(); - - let mut references: Vec<_> = Vec::new(); - path_resolution = - resolver.resolve(self.def_maps, path.clone(), &mut Some(&mut references))?; - - for (referenced, segment) in references.iter().zip(path.segments) { - self.interner.add_reference( - *referenced, - Location::new(segment.ident.span(), self.file), - segment.ident.is_self_type_name(), - ); - } - self.interner.add_module_def_id_reference( - path_resolution.module_def_id, - location, - is_self_type_name, + if !self.interner.lsp_mode { + return resolver.resolve( + self.def_maps, + path, + &mut self.interner.usage_tracker, + &mut None, + ); + } + + let last_segment = path.last_ident(); + let location = Location::new(last_segment.span(), self.file); + let is_self_type_name = last_segment.is_self_type_name(); + + let mut references: Vec<_> = Vec::new(); + let path_resolution = resolver.resolve( + self.def_maps, + path.clone(), + &mut self.interner.usage_tracker, + &mut Some(&mut references), + ); + + for (referenced, segment) in references.iter().zip(path.segments) { + self.interner.add_reference( + *referenced, + Location::new(segment.ident.span(), self.file), + segment.ident.is_self_type_name(), ); - } else { - path_resolution = resolver.resolve(self.def_maps, path, &mut None)?; } + let path_resolution = match path_resolution { + Ok(path_resolution) => path_resolution, + Err(err) => return Err(err), + }; + + self.interner.add_module_def_id_reference( + path_resolution.module_def_id, + location, + is_self_type_name, + ); + Ok(path_resolution) } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs index 1c03184a8f5..c404b95d4b2 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs @@ -4,7 +4,7 @@ use noirc_errors::{Span, Spanned}; use crate::ast::{ ArrayLiteral, AssignStatement, BlockExpression, CallExpression, CastExpression, ConstrainKind, ConstructorExpression, ExpressionKind, ForLoopStatement, ForRange, GenericTypeArgs, Ident, - IfExpression, IndexExpression, InfixExpression, LValue, Lambda, LetStatement, Literal, + IfExpression, IndexExpression, InfixExpression, LValue, Lambda, Literal, MemberAccessExpression, MethodCallExpression, Path, PathSegment, Pattern, PrefixExpression, UnresolvedType, UnresolvedTypeData, UnresolvedTypeExpression, }; @@ -29,13 +29,7 @@ impl HirStatement { let pattern = let_stmt.pattern.to_display_ast(interner); let r#type = interner.id_type(let_stmt.expression).to_display_ast(); let expression = let_stmt.expression.to_display_ast(interner); - StatementKind::Let(LetStatement { - pattern, - r#type, - expression, - comptime: false, - attributes: Vec::new(), - }) + StatementKind::new_let(pattern, r#type, expression) } HirStatement::Constrain(constrain) => { let expr = constrain.0.to_display_ast(interner); diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs index d52d4ca8c71..e5b098b41ed 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs @@ -61,6 +61,7 @@ impl<'local, 'context> Interpreter<'local, 'context> { "as_slice" => as_slice(interner, arguments, location), "expr_as_array" => expr_as_array(interner, arguments, return_type, location), "expr_as_assert" => expr_as_assert(interner, arguments, return_type, location), + "expr_as_assert_eq" => expr_as_assert_eq(interner, arguments, return_type, location), "expr_as_assign" => expr_as_assign(interner, arguments, return_type, location), "expr_as_binary_op" => expr_as_binary_op(interner, arguments, return_type, location), "expr_as_block" => expr_as_block(interner, arguments, return_type, location), @@ -125,10 +126,11 @@ impl<'local, 'context> Interpreter<'local, 'context> { "slice_push_back" => slice_push_back(interner, arguments, location), "slice_push_front" => slice_push_front(interner, arguments, location), "slice_remove" => slice_remove(interner, arguments, location, call_stack), + "str_as_bytes" => str_as_bytes(interner, arguments, location), "struct_def_as_type" => struct_def_as_type(interner, arguments, location), "struct_def_fields" => struct_def_fields(interner, arguments, location), "struct_def_generics" => struct_def_generics(interner, arguments, location), - "to_le_radix" => to_le_radix(arguments, location), + "to_le_radix" => to_le_radix(arguments, return_type, location), "trait_constraint_eq" => trait_constraint_eq(interner, arguments, location), "trait_constraint_hash" => trait_constraint_hash(interner, arguments, location), "trait_def_as_trait_constraint" => { @@ -241,6 +243,32 @@ fn slice_push_back( Ok(Value::Slice(values, typ)) } +fn str_as_bytes( + interner: &NodeInterner, + arguments: Vec<(Value, Location)>, + location: Location, +) -> IResult { + let (string, string_location) = check_one_argument(arguments, location)?; + + match string { + Value::String(string) => { + let string_as_bytes = string.as_bytes(); + let bytes_vector: Vec = string_as_bytes.iter().cloned().map(Value::U8).collect(); + let byte_array_type = Type::Array( + Box::new(Type::Constant(string_as_bytes.len() as u32)), + Box::new(Type::Integer(Signedness::Unsigned, IntegerBitSize::Eight)), + ); + Ok(Value::Array(bytes_vector.into(), byte_array_type)) + } + value => { + let type_var = Box::new(interner.next_type_variable()); + let expected = Type::Array(type_var.clone(), type_var); + let actual = value.get_type().into_owned(); + Err(InterpreterError::TypeMismatch { expected, actual, location: string_location }) + } + } +} + /// fn as_type(self) -> Type fn struct_def_as_type( interner: &NodeInterner, @@ -455,12 +483,24 @@ fn quoted_as_type( Ok(Value::Type(typ)) } -fn to_le_radix(arguments: Vec<(Value, Location)>, location: Location) -> IResult { - let (value, radix, limb_count) = check_three_arguments(arguments, location)?; +fn to_le_radix( + arguments: Vec<(Value, Location)>, + return_type: Type, + location: Location, +) -> IResult { + let (value, radix) = check_two_arguments(arguments, location)?; let value = get_field(value)?; let radix = get_u32(radix)?; - let limb_count = get_u32(limb_count)?; + let limb_count = if let Type::Array(length, _) = return_type { + if let Type::Constant(limb_count) = *length { + limb_count + } else { + return Err(InterpreterError::TypeAnnotationsNeededForMethodCall { location }); + } + } else { + return Err(InterpreterError::TypeAnnotationsNeededForMethodCall { location }); + }; // Decompose the integer into its radix digits in little endian form. let decomposed_integer = compute_to_radix(value, radix); @@ -953,6 +993,43 @@ fn expr_as_assert( }) } +// fn as_assert_eq(self) -> Option<(Expr, Expr, Option)> +fn expr_as_assert_eq( + interner: &NodeInterner, + arguments: Vec<(Value, Location)>, + return_type: Type, + location: Location, +) -> IResult { + expr_as(interner, arguments, return_type.clone(), location, |expr| { + if let ExprValue::Statement(StatementKind::Constrain(constrain)) = expr { + if constrain.2 == ConstrainKind::AssertEq { + let ExpressionKind::Infix(infix) = constrain.0.kind else { + panic!("Expected AssertEq constrain statement to have an infix expression"); + }; + + let lhs = Value::expression(infix.lhs.kind); + let rhs = Value::expression(infix.rhs.kind); + + let option_type = extract_option_generic_type(return_type); + let Type::Tuple(mut tuple_types) = option_type else { + panic!("Expected the return type option generic arg to be a tuple"); + }; + assert_eq!(tuple_types.len(), 3); + + let option_type = tuple_types.pop().unwrap(); + let message = constrain.1.map(|message| Value::expression(message.kind)); + let message = option(option_type, message).ok()?; + + Some(Value::Tuple(vec![lhs, rhs, message])) + } else { + None + } + } else { + None + } + }) +} + // fn as_assign(self) -> Option<(Expr, Expr)> fn expr_as_assign( interner: &NodeInterner, diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs index 30c91b42b2e..6a6cabe593d 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -19,8 +19,8 @@ use crate::node_interner::{ }; use crate::ast::{ - ExpressionKind, GenericTypeArgs, Ident, LetStatement, Literal, NoirFunction, NoirStruct, - NoirTrait, NoirTypeAlias, Path, PathKind, PathSegment, UnresolvedGenerics, + ExpressionKind, GenericTypeArgs, Ident, ItemVisibility, LetStatement, Literal, NoirFunction, + NoirStruct, NoirTrait, NoirTypeAlias, Path, PathKind, PathSegment, UnresolvedGenerics, UnresolvedTraitConstraint, UnresolvedType, }; @@ -253,7 +253,7 @@ impl DefCollector { root_file_id: FileId, debug_comptime_in_file: Option<&str>, enable_arithmetic_generics: bool, - error_on_unused_imports: bool, + error_on_usage_tracker: bool, macro_processors: &[&dyn MacroProcessor], ) -> Vec<(CompilationError, FileId)> { let mut errors: Vec<(CompilationError, FileId)> = vec![]; @@ -267,13 +267,13 @@ impl DefCollector { let crate_graph = &context.crate_graph[crate_id]; for dep in crate_graph.dependencies.clone() { - let error_on_unused_imports = false; + let error_on_usage_tracker = false; errors.extend(CrateDefMap::collect_defs( dep.crate_id, context, debug_comptime_in_file, enable_arithmetic_generics, - error_on_unused_imports, + error_on_usage_tracker, macro_processors, )); @@ -286,8 +286,8 @@ impl DefCollector { def_map.extern_prelude.insert(dep.as_name(), module_id); let location = dep_def_map[dep_def_root].location; - let attriutes = ModuleAttributes { name: dep.as_name(), location, parent: None }; - context.def_interner.add_module_attributes(module_id, attriutes); + let attributes = ModuleAttributes { name: dep.as_name(), location, parent: None }; + context.def_interner.add_module_attributes(module_id, attributes); } // At this point, all dependencies are resolved and type checked. @@ -328,6 +328,7 @@ impl DefCollector { crate_id, &collected_import, &context.def_maps, + &mut context.def_interner.usage_tracker, &mut Some(&mut references), ); @@ -345,31 +346,89 @@ impl DefCollector { resolved_import } else { - resolve_import(crate_id, &collected_import, &context.def_maps, &mut None) + resolve_import( + crate_id, + &collected_import, + &context.def_maps, + &mut context.def_interner.usage_tracker, + &mut None, + ) }; match resolved_import { Ok(resolved_import) => { + let current_def_map = context.def_maps.get_mut(&crate_id).unwrap(); + let file_id = current_def_map.file_id(module_id); + if let Some(error) = resolved_import.error { errors.push(( DefCollectorErrorKind::PathResolutionError(error).into(), - root_file_id, + file_id, )); } // Populate module namespaces according to the imports used - let current_def_map = context.def_maps.get_mut(&crate_id).unwrap(); - let name = resolved_import.name; - for ns in resolved_import.resolved_namespace.iter_defs() { + let visibility = collected_import.visibility; + let is_prelude = resolved_import.is_prelude; + for (module_def_id, item_visibility, _) in + resolved_import.resolved_namespace.iter_items() + { + if item_visibility < visibility { + errors.push(( + DefCollectorErrorKind::CannotReexportItemWithLessVisibility { + item_name: name.clone(), + desired_visibility: visibility, + } + .into(), + file_id, + )); + } + let visibility = visibility.min(item_visibility); + let result = current_def_map.modules[resolved_import.module_scope.0] - .import(name.clone(), ns, resolved_import.is_prelude); + .import(name.clone(), visibility, module_def_id, is_prelude); + + // Empty spans could come from implicitly injected imports, and we don't want to track those + if visibility != ItemVisibility::Public + && name.span().start() < name.span().end() + { + let module_id = ModuleId { + krate: crate_id, + local_id: resolved_import.module_scope, + }; + + context + .def_interner + .usage_tracker + .add_unused_import(module_id, name.clone()); + } + + if visibility != ItemVisibility::Private { + let local_id = resolved_import.module_scope; + let defining_module = ModuleId { krate: crate_id, local_id }; + context.def_interner.register_name_for_auto_import( + name.to_string(), + module_def_id, + visibility, + Some(defining_module), + ); + } - let file_id = current_def_map.file_id(module_id); let last_segment = collected_import.path.last_ident(); - add_import_reference(ns, &last_segment, &mut context.def_interner, file_id); + add_import_reference( + module_def_id, + &last_segment, + &mut context.def_interner, + file_id, + ); if let Some(ref alias) = collected_import.alias { - add_import_reference(ns, alias, &mut context.def_interner, file_id); + add_import_reference( + module_def_id, + alias, + &mut context.def_interner, + file_id, + ); } if let Err((first_def, second_def)) = result { @@ -417,20 +476,24 @@ impl DefCollector { ); } - if error_on_unused_imports { - Self::check_unused_imports(context, crate_id, &mut errors); + if error_on_usage_tracker { + Self::check_usage_tracker(context, crate_id, &mut errors); } errors } - fn check_unused_imports( + fn check_usage_tracker( context: &Context, crate_id: CrateId, errors: &mut Vec<(CompilationError, FileId)>, ) { - errors.extend(context.def_maps[&crate_id].modules().iter().flat_map(|(_, module)| { - module.unused_imports().iter().map(|ident| { + let unused_imports = context.def_interner.usage_tracker.unused_imports().iter(); + let unused_imports = unused_imports.filter(|(module_id, _)| module_id.krate == crate_id); + + errors.extend(unused_imports.flat_map(|(module_id, usage_tracker)| { + let module = &context.def_maps[&crate_id].modules()[module_id.local_id.0]; + usage_tracker.iter().map(|ident| { let ident = ident.clone(); let error = CompilationError::ResolverError(ResolverError::UnusedImport { ident }); (error, module.location.file) @@ -456,7 +519,7 @@ fn add_import_reference( fn inject_prelude( crate_id: CrateId, - context: &Context, + context: &mut Context, crate_root: LocalModuleId, collected_imports: &mut Vec, ) { @@ -481,6 +544,7 @@ fn inject_prelude( &context.def_maps, ModuleId { krate: crate_id, local_id: crate_root }, path, + &mut context.def_interner.usage_tracker, &mut None, ) { assert!(error.is_none(), "Tried to add private item to prelude"); @@ -494,6 +558,7 @@ fn inject_prelude( collected_imports.insert( 0, ImportDirective { + visibility: ItemVisibility::Private, module_id: crate_root, path: Path { segments, kind: PathKind::Plain, span: Span::default() }, alias: None, diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs index 459c4869379..590cdc541ce 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs @@ -80,6 +80,7 @@ pub fn collect_defs( // Then add the imports to defCollector to resolve once all modules in the hierarchy have been resolved for import in ast.imports { collector.def_collector.imports.push(ImportDirective { + visibility: import.visibility, module_id: collector.module_id, path: import.path, alias: import.alias, diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/errors.rs index e705d7b6fad..f931a7cdf41 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/errors.rs @@ -1,4 +1,4 @@ -use crate::ast::{Ident, Path, UnresolvedTypeData}; +use crate::ast::{Ident, ItemVisibility, Path, UnresolvedTypeData}; use crate::hir::resolution::import::PathResolutionError; use crate::hir::type_check::generics::TraitGenerics; @@ -35,6 +35,8 @@ pub enum DefCollectorErrorKind { OverlappingModuleDecls { mod_name: Ident, expected_path: String, alternative_path: String }, #[error("path resolution error")] PathResolutionError(PathResolutionError), + #[error("cannot re-export {item_name} because it has less visibility than this use statement")] + CannotReexportItemWithLessVisibility { item_name: Ident, desired_visibility: ItemVisibility }, #[error("Non-struct type used in impl")] NonStructTypeInImpl { span: Span }, #[error("Cannot implement trait on a mutable reference type")] @@ -173,6 +175,12 @@ impl<'a> From<&'a DefCollectorErrorKind> for Diagnostic { ) } DefCollectorErrorKind::PathResolutionError(error) => error.into(), + DefCollectorErrorKind::CannotReexportItemWithLessVisibility{item_name, desired_visibility} => { + Diagnostic::simple_warning( + format!("cannot re-export {item_name} because it has less visibility than this use statement"), + format!("consider marking {item_name} as {desired_visibility}"), + item_name.span()) + } DefCollectorErrorKind::NonStructTypeInImpl { span } => Diagnostic::simple_error( "Non-struct type used in impl".into(), "Only struct types may have implementation methods".into(), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/module_data.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/module_data.rs index 7b14db8be77..f9542094be7 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/module_data.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/module_data.rs @@ -1,4 +1,4 @@ -use std::collections::{HashMap, HashSet}; +use std::collections::HashMap; use noirc_errors::Location; @@ -24,10 +24,6 @@ pub struct ModuleData { /// True if this module is a `contract Foo { ... }` module containing contract functions pub is_contract: bool, - - /// List of all unused imports. Each time something is imported into this module it's added - /// to this set. When it's used, it's removed. At the end of the program only unused imports remain. - unused_imports: HashSet, } impl ModuleData { @@ -39,7 +35,6 @@ impl ModuleData { definitions: ItemScope::default(), location, is_contract, - unused_imports: HashSet::new(), } } @@ -123,15 +118,11 @@ impl ModuleData { pub fn import( &mut self, name: Ident, + visibility: ItemVisibility, id: ModuleDefId, is_prelude: bool, ) -> Result<(), (Ident, Ident)> { - // Empty spans could come from implicitly injected imports, and we don't want to track those - if name.span().start() < name.span().end() { - self.unused_imports.insert(name.clone()); - } - - self.scope.add_item_to_namespace(name, ItemVisibility::Public, id, None, is_prelude) + self.scope.add_item_to_namespace(name, visibility, id, None, is_prelude) } pub fn find_name(&self, name: &Ident) -> PerNs { @@ -147,14 +138,4 @@ impl ModuleData { pub fn value_definitions(&self) -> impl Iterator + '_ { self.definitions.values().values().flat_map(|a| a.values().map(|(id, _, _)| *id)) } - - /// Marks an ident as being used by an import. - pub fn use_import(&mut self, ident: &Ident) { - self.unused_imports.remove(ident); - } - - /// Returns the list of all unused imports at this moment. - pub fn unused_imports(&self) -> &HashSet { - &self.unused_imports - } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs index 0b0d8d735eb..cede04dd582 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs @@ -150,20 +150,24 @@ impl<'a> From<&'a ResolverError> for Diagnostic { ResolverError::UnusedVariable { ident } => { let name = &ident.0.contents; - Diagnostic::simple_warning( + let mut diagnostic = Diagnostic::simple_warning( format!("unused variable {name}"), "unused variable ".to_string(), ident.span(), - ) + ); + diagnostic.unnecessary = true; + diagnostic } ResolverError::UnusedImport { ident } => { let name = &ident.0.contents; - Diagnostic::simple_warning( + let mut diagnostic = Diagnostic::simple_warning( format!("unused import {name}"), "unused import ".to_string(), ident.span(), - ) + ); + diagnostic.unnecessary = true; + diagnostic } ResolverError::VariableNotDeclared { name, span } => Diagnostic::simple_error( format!("cannot find `{name}` in this scope "), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/import.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/import.rs index b820e4664e3..938da0a879f 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/import.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/import.rs @@ -4,6 +4,7 @@ use thiserror::Error; use crate::graph::CrateId; use crate::hir::def_collector::dc_crate::CompilationError; use crate::node_interner::ReferenceId; +use crate::usage_tracker::UsageTracker; use std::collections::BTreeMap; use crate::ast::{Ident, ItemVisibility, Path, PathKind, PathSegment}; @@ -13,6 +14,7 @@ use super::errors::ResolverError; #[derive(Debug, Clone)] pub struct ImportDirective { + pub visibility: ItemVisibility, pub module_id: LocalModuleId, pub path: Path, pub alias: Option, @@ -86,6 +88,7 @@ pub fn resolve_import( crate_id: CrateId, import_directive: &ImportDirective, def_maps: &BTreeMap, + usage_tracker: &mut UsageTracker, path_references: &mut Option<&mut Vec>, ) -> Result { let module_scope = import_directive.module_id; @@ -93,7 +96,14 @@ pub fn resolve_import( module_id: resolved_module, namespace: resolved_namespace, mut error, - } = resolve_path_to_ns(import_directive, crate_id, crate_id, def_maps, path_references)?; + } = resolve_path_to_ns( + import_directive, + crate_id, + crate_id, + def_maps, + usage_tracker, + path_references, + )?; let name = resolve_path_name(import_directive); @@ -131,10 +141,10 @@ fn resolve_path_to_ns( crate_id: CrateId, importing_crate: CrateId, def_maps: &BTreeMap, + usage_tracker: &mut UsageTracker, path_references: &mut Option<&mut Vec>, ) -> NamespaceResolutionResult { let import_path = &import_directive.path.segments; - let def_map = &def_maps[&crate_id]; match import_directive.path.kind { crate::ast::PathKind::Crate => { @@ -144,6 +154,7 @@ fn resolve_path_to_ns( importing_crate, import_path, def_maps, + usage_tracker, path_references, ) } @@ -157,10 +168,13 @@ fn resolve_path_to_ns( import_path, import_directive.module_id, def_maps, + true, + usage_tracker, path_references, ); } + let def_map = &def_maps[&crate_id]; let current_mod_id = ModuleId { krate: crate_id, local_id: import_directive.module_id }; let current_mod = &def_map.modules[current_mod_id.local_id.0]; let first_segment = @@ -168,9 +182,11 @@ fn resolve_path_to_ns( if current_mod.find_name(first_segment).is_none() { // Resolve externally when first segment is unresolved return resolve_external_dep( - def_map, + crate_id, + // def_map, import_directive, def_maps, + usage_tracker, path_references, importing_crate, ); @@ -182,14 +198,17 @@ fn resolve_path_to_ns( import_path, import_directive.module_id, def_maps, + true, + usage_tracker, path_references, ) } crate::ast::PathKind::Dep => resolve_external_dep( - def_map, + crate_id, import_directive, def_maps, + usage_tracker, path_references, importing_crate, ), @@ -204,6 +223,8 @@ fn resolve_path_to_ns( import_path, parent_module_id, def_maps, + false, + usage_tracker, path_references, ) } else { @@ -221,24 +242,31 @@ fn resolve_path_from_crate_root( import_path: &[PathSegment], def_maps: &BTreeMap, + usage_tracker: &mut UsageTracker, path_references: &mut Option<&mut Vec>, ) -> NamespaceResolutionResult { + let starting_mod = def_maps[&crate_id].root; resolve_name_in_module( crate_id, importing_crate, import_path, - def_maps[&crate_id].root, + starting_mod, def_maps, + false, + usage_tracker, path_references, ) } +#[allow(clippy::too_many_arguments)] fn resolve_name_in_module( krate: CrateId, importing_crate: CrateId, import_path: &[PathSegment], starting_mod: LocalModuleId, def_maps: &BTreeMap, + plain: bool, + usage_tracker: &mut UsageTracker, path_references: &mut Option<&mut Vec>, ) -> NamespaceResolutionResult { let def_map = &def_maps[&krate]; @@ -261,8 +289,12 @@ fn resolve_name_in_module( return Err(PathResolutionError::Unresolved(first_segment.clone())); } + usage_tracker.mark_as_used(current_mod_id, first_segment); + let mut warning: Option = None; - for (last_segment, current_segment) in import_path.iter().zip(import_path.iter().skip(1)) { + for (index, (last_segment, current_segment)) in + import_path.iter().zip(import_path.iter().skip(1)).enumerate() + { let last_segment = &last_segment.ident; let current_segment = ¤t_segment.ident; @@ -298,13 +330,17 @@ fn resolve_name_in_module( }; warning = warning.or_else(|| { - if can_reference_module_id( - def_maps, - importing_crate, - starting_mod, - current_mod_id, - visibility, - ) { + // If the path is plain, the first segment will always refer to + // something that's visible from the current module. + if (plain && index == 0) + || can_reference_module_id( + def_maps, + importing_crate, + starting_mod, + current_mod_id, + visibility, + ) + { None } else { Some(PathResolutionError::Private(last_segment.clone())) @@ -320,6 +356,8 @@ fn resolve_name_in_module( return Err(PathResolutionError::Unresolved(current_segment.clone())); } + usage_tracker.mark_as_used(current_mod_id, current_segment); + current_ns = found_ns; } @@ -334,15 +372,18 @@ fn resolve_path_name(import_directive: &ImportDirective) -> Ident { } fn resolve_external_dep( - current_def_map: &CrateDefMap, + crate_id: CrateId, directive: &ImportDirective, def_maps: &BTreeMap, + usage_tracker: &mut UsageTracker, path_references: &mut Option<&mut Vec>, importing_crate: CrateId, ) -> NamespaceResolutionResult { // Use extern_prelude to get the dep let path = &directive.path.segments; + let current_def_map = &def_maps[&crate_id]; + // Fetch the root module from the prelude let crate_name = &path.first().unwrap().ident; let dep_module = current_def_map @@ -365,13 +406,21 @@ fn resolve_external_dep( span: Span::default(), }; let dep_directive = ImportDirective { + visibility: ItemVisibility::Private, module_id: dep_module.local_id, path, alias: directive.alias.clone(), is_prelude: false, }; - resolve_path_to_ns(&dep_directive, dep_module.krate, importing_crate, def_maps, path_references) + resolve_path_to_ns( + &dep_directive, + dep_module.krate, + importing_crate, + def_maps, + usage_tracker, + path_references, + ) } // Returns false if the given private function is being called from a non-child module, or diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/path_resolver.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/path_resolver.rs index 712951ad6cb..50089d849ae 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/path_resolver.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/path_resolver.rs @@ -1,6 +1,7 @@ use super::import::{resolve_import, ImportDirective, PathResolution, PathResolutionResult}; -use crate::ast::Path; +use crate::ast::{ItemVisibility, Path}; use crate::node_interner::ReferenceId; +use crate::usage_tracker::UsageTracker; use std::collections::BTreeMap; use crate::graph::CrateId; @@ -15,6 +16,7 @@ pub trait PathResolver { &self, def_maps: &BTreeMap, path: Path, + usage_tracker: &mut UsageTracker, path_references: &mut Option<&mut Vec>, ) -> PathResolutionResult; @@ -39,9 +41,10 @@ impl PathResolver for StandardPathResolver { &self, def_maps: &BTreeMap, path: Path, + usage_tracker: &mut UsageTracker, path_references: &mut Option<&mut Vec>, ) -> PathResolutionResult { - resolve_path(def_maps, self.module_id, path, path_references) + resolve_path(def_maps, self.module_id, path, usage_tracker, path_references) } fn local_module_id(&self) -> LocalModuleId { @@ -59,12 +62,19 @@ pub fn resolve_path( def_maps: &BTreeMap, module_id: ModuleId, path: Path, + usage_tracker: &mut UsageTracker, path_references: &mut Option<&mut Vec>, ) -> PathResolutionResult { // lets package up the path into an ImportDirective and resolve it using that - let import = - ImportDirective { module_id: module_id.local_id, path, alias: None, is_prelude: false }; - let resolved_import = resolve_import(module_id.krate, &import, def_maps, path_references)?; + let import = ImportDirective { + visibility: ItemVisibility::Private, + module_id: module_id.local_id, + path, + alias: None, + is_prelude: false, + }; + let resolved_import = + resolve_import(module_id.krate, &import, def_maps, usage_tracker, path_references)?; let namespace = resolved_import.resolved_namespace; let id = diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/errors.rs index 17642843757..5eb7a40dcd7 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/errors.rs @@ -357,7 +357,9 @@ impl<'a> From<&'a TypeCheckError> for Diagnostic { let primary_message = error.to_string(); let secondary_message = note.clone().unwrap_or_default(); - Diagnostic::simple_warning(primary_message, secondary_message, *span) + let mut diagnostic = Diagnostic::simple_warning(primary_message, secondary_message, *span); + diagnostic.deprecated = true; + diagnostic } TypeCheckError::UnusedResultError { expr_type, expr_span } => { let msg = format!("Unused expression result of type {expr_type}"); diff --git a/noir/noir-repo/compiler/noirc_frontend/src/lib.rs b/noir/noir-repo/compiler/noirc_frontend/src/lib.rs index b14f65a3e35..ec09f680bc2 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/lib.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/lib.rs @@ -20,6 +20,7 @@ pub mod monomorphization; pub mod node_interner; pub mod parser; pub mod resolve_locations; +pub mod usage_tracker; pub mod hir; pub mod hir_def; diff --git a/noir/noir-repo/compiler/noirc_frontend/src/locations.rs b/noir/noir-repo/compiler/noirc_frontend/src/locations.rs index 0ac13a58ecf..58de235455c 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/locations.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/locations.rs @@ -278,7 +278,8 @@ impl NodeInterner { } pub(crate) fn register_module(&mut self, id: ModuleId, name: String) { - self.register_name_for_auto_import(name, ModuleDefId::ModuleId(id), ItemVisibility::Public); + let visibility = ItemVisibility::Public; + self.register_name_for_auto_import(name, ModuleDefId::ModuleId(id), visibility, None); } pub(crate) fn register_global( @@ -290,7 +291,7 @@ impl NodeInterner { self.add_definition_location(ReferenceId::Global(id), Some(parent_module_id)); let visibility = ItemVisibility::Public; - self.register_name_for_auto_import(name, ModuleDefId::GlobalId(id), visibility); + self.register_name_for_auto_import(name, ModuleDefId::GlobalId(id), visibility, None); } pub(crate) fn register_struct( @@ -302,13 +303,14 @@ impl NodeInterner { self.add_definition_location(ReferenceId::Struct(id), Some(parent_module_id)); let visibility = ItemVisibility::Public; - self.register_name_for_auto_import(name, ModuleDefId::TypeId(id), visibility); + self.register_name_for_auto_import(name, ModuleDefId::TypeId(id), visibility, None); } pub(crate) fn register_trait(&mut self, id: TraitId, name: String, parent_module_id: ModuleId) { self.add_definition_location(ReferenceId::Trait(id), Some(parent_module_id)); - self.register_name_for_auto_import(name, ModuleDefId::TraitId(id), ItemVisibility::Public); + let visibility = ItemVisibility::Public; + self.register_name_for_auto_import(name, ModuleDefId::TraitId(id), visibility, None); } pub(crate) fn register_type_alias( @@ -320,31 +322,34 @@ impl NodeInterner { self.add_definition_location(ReferenceId::Alias(id), Some(parent_module_id)); let visibility = ItemVisibility::Public; - self.register_name_for_auto_import(name, ModuleDefId::TypeAliasId(id), visibility); + self.register_name_for_auto_import(name, ModuleDefId::TypeAliasId(id), visibility, None); } pub(crate) fn register_function(&mut self, id: FuncId, func_def: &FunctionDefinition) { - self.register_name_for_auto_import( - func_def.name.0.contents.clone(), - ModuleDefId::FunctionId(id), - func_def.visibility, - ); + let name = func_def.name.0.contents.clone(); + let id = ModuleDefId::FunctionId(id); + self.register_name_for_auto_import(name, id, func_def.visibility, None); } - fn register_name_for_auto_import( + pub fn register_name_for_auto_import( &mut self, name: String, module_def_id: ModuleDefId, visibility: ItemVisibility, + defining_module: Option, ) { if !self.lsp_mode { return; } - self.auto_import_names.entry(name).or_default().push((module_def_id, visibility)); + let entry = self.auto_import_names.entry(name).or_default(); + entry.push((module_def_id, visibility, defining_module)); } - pub fn get_auto_import_names(&self) -> &HashMap> { + #[allow(clippy::type_complexity)] + pub fn get_auto_import_names( + &self, + ) -> &HashMap)>> { &self.auto_import_names } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs b/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs index 32f25790e12..4a73df6a15f 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs @@ -27,6 +27,7 @@ use crate::hir::type_check::generics::TraitGenerics; use crate::hir_def::traits::NamedType; use crate::macros_api::ModuleDefId; use crate::macros_api::UnaryOp; +use crate::usage_tracker::UsageTracker; use crate::QuotedType; use crate::ast::{BinaryOpKind, FunctionDefinition, ItemVisibility}; @@ -253,9 +254,11 @@ pub struct NodeInterner { pub(crate) reference_modules: HashMap, // All names (and their definitions) that can be offered for auto_import. + // The third value in the tuple is the module where the definition is (only for pub use). // These include top-level functions, global variables and types, but excludes // impl and trait-impl methods. - pub(crate) auto_import_names: HashMap>, + pub(crate) auto_import_names: + HashMap)>>, /// Each value currently in scope in the comptime interpreter. /// Each element of the Vec represents a scope with every scope together making @@ -264,6 +267,8 @@ pub struct NodeInterner { /// This is stored in the NodeInterner so that the Elaborator from each crate can /// share the same global values. pub(crate) comptime_scopes: Vec>, + + pub(crate) usage_tracker: UsageTracker, } /// A dependency in the dependency graph may be a type or a definition. @@ -650,6 +655,7 @@ impl Default for NodeInterner { auto_import_names: HashMap::default(), comptime_scopes: vec![HashMap::default()], trait_impl_associated_types: HashMap::default(), + usage_tracker: UsageTracker::default(), } } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/noir_parser.lalrpop b/noir/noir-repo/compiler/noirc_frontend/src/noir_parser.lalrpop index 1488a53183e..01b8be8f721 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/noir_parser.lalrpop +++ b/noir/noir-repo/compiler/noirc_frontend/src/noir_parser.lalrpop @@ -103,7 +103,7 @@ extern { pub(crate) TopLevelStatement: TopLevelStatement = { "use" r"[\t\r\n ]+" ";" EOF => { - TopLevelStatement::Import(use_tree) + TopLevelStatement::Import(use_tree, crate::ast::ItemVisibility::Private) } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/errors.rs index 2e38d7ae83b..ad6f6b928ab 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/errors.rs @@ -165,42 +165,51 @@ impl std::fmt::Display for ParserError { impl<'a> From<&'a ParserError> for Diagnostic { fn from(error: &'a ParserError) -> Diagnostic { match &error.reason { - Some(reason) => { - match reason { - ParserErrorReason::ConstrainDeprecated => Diagnostic::simple_error( + Some(reason) => match reason { + ParserErrorReason::ConstrainDeprecated => { + let mut diagnostic = Diagnostic::simple_error( "Use of deprecated keyword 'constrain'".into(), "The 'constrain' keyword is deprecated. Please use the 'assert' function instead.".into(), error.span, - ), - ParserErrorReason::ComptimeDeprecated => Diagnostic::simple_warning( + ); + diagnostic.deprecated = true; + diagnostic + } + ParserErrorReason::ComptimeDeprecated => { + let mut diagnostic = Diagnostic::simple_warning( "Use of deprecated keyword 'comptime'".into(), "The 'comptime' keyword has been deprecated. It can be removed without affecting your program".into(), error.span, + ) ; + diagnostic.deprecated = true; + diagnostic + } + ParserErrorReason::InvalidBitSize(bit_size) => Diagnostic::simple_error( + format!("Use of invalid bit size {}", bit_size), + format!( + "Allowed bit sizes for integers are {}", + IntegerBitSize::allowed_sizes() + .iter() + .map(|n| n.to_string()) + .collect::>() + .join(", ") ), - ParserErrorReason::InvalidBitSize(bit_size) => Diagnostic::simple_error( - format!("Use of invalid bit size {}", bit_size), - format!("Allowed bit sizes for integers are {}", IntegerBitSize::allowed_sizes().iter().map(|n| n.to_string()).collect::>().join(", ")), - error.span, - ), - ParserErrorReason::ExperimentalFeature(_) => Diagnostic::simple_warning( - reason.to_string(), - "".into(), - error.span, - ), - ParserErrorReason::TraitImplFunctionModifiers => Diagnostic::simple_warning( - reason.to_string(), - "".into(), - error.span, - ), - ParserErrorReason::ExpectedPatternButFoundType(ty) => { - Diagnostic::simple_error("Expected a ; separating these two statements".into(), format!("{ty} is a type and cannot be used as a variable name"), error.span) - } - ParserErrorReason::Lexer(error) => error.into(), - other => { - Diagnostic::simple_error(format!("{other}"), String::new(), error.span) - } + error.span, + ), + ParserErrorReason::ExperimentalFeature(_) => { + Diagnostic::simple_warning(reason.to_string(), "".into(), error.span) } - } + ParserErrorReason::TraitImplFunctionModifiers => { + Diagnostic::simple_warning(reason.to_string(), "".into(), error.span) + } + ParserErrorReason::ExpectedPatternButFoundType(ty) => Diagnostic::simple_error( + "Expected a ; separating these two statements".into(), + format!("{ty} is a type and cannot be used as a variable name"), + error.span, + ), + ParserErrorReason::Lexer(error) => error.into(), + other => Diagnostic::simple_error(format!("{other}"), String::new(), error.span), + }, None => { let primary = error.to_string(); Diagnostic::simple_error(primary, String::new(), error.span) diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/mod.rs index 11944cd3304..c82906b69a2 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/mod.rs @@ -12,8 +12,9 @@ mod labels; mod parser; use crate::ast::{ - Expression, Ident, ImportStatement, LetStatement, ModuleDeclaration, NoirFunction, NoirStruct, - NoirTrait, NoirTraitImpl, NoirTypeAlias, Recoverable, StatementKind, TypeImpl, UseTree, + Expression, Ident, ImportStatement, ItemVisibility, LetStatement, ModuleDeclaration, + NoirFunction, NoirStruct, NoirTrait, NoirTraitImpl, NoirTypeAlias, Recoverable, StatementKind, + TypeImpl, UseTree, }; use crate::token::{Keyword, Token}; @@ -32,7 +33,7 @@ pub use parser::{ pub enum TopLevelStatement { Function(NoirFunction), Module(ModuleDeclaration), - Import(UseTree), + Import(UseTree, ItemVisibility), Struct(NoirStruct), Trait(NoirTrait), TraitImpl(NoirTraitImpl), @@ -48,7 +49,7 @@ impl TopLevelStatement { match self { TopLevelStatement::Function(f) => Some(ItemKind::Function(f)), TopLevelStatement::Module(m) => Some(ItemKind::ModuleDecl(m)), - TopLevelStatement::Import(i) => Some(ItemKind::Import(i)), + TopLevelStatement::Import(i, visibility) => Some(ItemKind::Import(i, visibility)), TopLevelStatement::Struct(s) => Some(ItemKind::Struct(s)), TopLevelStatement::Trait(t) => Some(ItemKind::Trait(t)), TopLevelStatement::TraitImpl(t) => Some(ItemKind::TraitImpl(t)), @@ -298,7 +299,7 @@ impl ParsedModule { for item in self.items { match item.kind { - ItemKind::Import(import) => module.push_import(import), + ItemKind::Import(import, visibility) => module.push_import(import, visibility), ItemKind::Function(func) => module.push_function(func), ItemKind::Struct(typ) => module.push_type(typ), ItemKind::Trait(noir_trait) => module.push_trait(noir_trait), @@ -323,7 +324,7 @@ pub struct Item { #[derive(Clone, Debug)] pub enum ItemKind { - Import(UseTree), + Import(UseTree, ItemVisibility), Function(NoirFunction), Struct(NoirStruct), Trait(NoirTrait), @@ -398,8 +399,8 @@ impl SortedModule { self.type_aliases.push(type_alias); } - fn push_import(&mut self, import_stmt: UseTree) { - self.imports.extend(import_stmt.desugar(None)); + fn push_import(&mut self, import_stmt: UseTree, visibility: ItemVisibility) { + self.imports.extend(import_stmt.desugar(None, visibility)); } fn push_module_decl(&mut self, mod_decl: ModuleDeclaration) { @@ -497,7 +498,13 @@ impl std::fmt::Display for TopLevelStatement { match self { TopLevelStatement::Function(fun) => fun.fmt(f), TopLevelStatement::Module(m) => m.fmt(f), - TopLevelStatement::Import(tree) => write!(f, "use {tree}"), + TopLevelStatement::Import(tree, visibility) => { + if visibility == &ItemVisibility::Private { + write!(f, "use {tree}") + } else { + write!(f, "{visibility} use {tree}") + } + } TopLevelStatement::Trait(t) => t.fmt(f), TopLevelStatement::TraitImpl(i) => i.fmt(f), TopLevelStatement::Struct(s) => s.fmt(f), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs index 8a894ec2b83..bead1e69006 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs @@ -27,6 +27,7 @@ use self::path::as_trait_path; use self::primitives::{keyword, macro_quote_marker, mutable_reference, variable}; use self::types::{generic_type_args, maybe_comp_time}; pub use types::parse_type; +use visibility::visibility_modifier; use super::{ foldl_with_span, labels::ParsingRuleLabel, parameter_name_recovery, parameter_recovery, @@ -64,6 +65,7 @@ mod primitives; mod structs; pub(super) mod traits; mod types; +mod visibility; // synthesized by LALRPOP lalrpop_mod!(pub noir_parser); @@ -95,7 +97,7 @@ pub fn parse_program(source_program: &str) -> (ParsedModule, Vec) { for parsed_item in &parsed_module.items { if lalrpop_parser_supports_kind(&parsed_item.kind) { match &parsed_item.kind { - ItemKind::Import(parsed_use_tree) => { + ItemKind::Import(parsed_use_tree, _visibility) => { prototype_parse_use_tree(Some(parsed_use_tree), source_program); } // other kinds prevented by lalrpop_parser_supports_kind @@ -139,7 +141,7 @@ fn prototype_parse_use_tree(expected_use_tree_opt: Option<&UseTree>, input: &str ); match calculated.unwrap() { - TopLevelStatement::Import(parsed_use_tree) => { + TopLevelStatement::Import(parsed_use_tree, _visibility) => { assert_eq!(expected_use_tree, &parsed_use_tree); } unexpected_calculated => { @@ -161,7 +163,7 @@ fn prototype_parse_use_tree(expected_use_tree_opt: Option<&UseTree>, input: &str } fn lalrpop_parser_supports_kind(kind: &ItemKind) -> bool { - matches!(kind, ItemKind::Import(_)) + matches!(kind, ItemKind::Import(..)) } /// program: module EOF @@ -438,7 +440,10 @@ fn module_declaration() -> impl NoirParser { } fn use_statement() -> impl NoirParser { - keyword(Keyword::Use).ignore_then(use_tree()).map(TopLevelStatement::Import) + visibility_modifier() + .then_ignore(keyword(Keyword::Use)) + .then(use_tree()) + .map(|(visibility, use_tree)| TopLevelStatement::Import(use_tree, visibility)) } fn rename() -> impl NoirParser> { @@ -574,7 +579,8 @@ fn declaration<'a, P>(expr_parser: P) -> impl NoirParser + 'a where P: ExprParser + 'a, { - let_statement(expr_parser).map(StatementKind::new_let) + let_statement(expr_parser) + .map(|((pattern, typ), expr)| StatementKind::new_let(pattern, typ, expr)) } pub fn pattern() -> impl NoirParser { @@ -1555,7 +1561,7 @@ mod test { parse_recover(&use_statement(), &use_statement_str); use_statement_str.push(';'); match result_opt.unwrap() { - TopLevelStatement::Import(expected_use_statement) => { + TopLevelStatement::Import(expected_use_statement, _visibility) => { Some(expected_use_statement) } _ => unreachable!(), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/function.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/function.rs index 56760898374..9328c882e54 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/function.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/function.rs @@ -3,7 +3,9 @@ use super::{ block, fresh_statement, ident, keyword, maybe_comp_time, nothing, optional_visibility, parameter_name_recovery, parameter_recovery, parenthesized, parse_type, pattern, primitives::token_kind, - self_parameter, where_clause, NoirParser, + self_parameter, + visibility::visibility_modifier, + where_clause, NoirParser, }; use crate::token::{Keyword, Token, TokenKind}; use crate::{ @@ -73,21 +75,6 @@ pub(super) fn function_definition(allow_self: bool) -> impl NoirParser impl NoirParser { - let is_pub_crate = (keyword(Keyword::Pub) - .then_ignore(just(Token::LeftParen)) - .then_ignore(keyword(Keyword::Crate)) - .then_ignore(just(Token::RightParen))) - .map(|_| ItemVisibility::PublicCrate); - - let is_pub = keyword(Keyword::Pub).map(|_| ItemVisibility::Public); - - let is_private = empty().map(|_| ItemVisibility::Private); - - choice((is_pub_crate, is_pub, is_private)) -} - /// function_modifiers: 'unconstrained'? (visibility)? /// /// returns (is_unconstrained, visibility) for whether each keyword was present diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/visibility.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/visibility.rs new file mode 100644 index 00000000000..d9c1abf2123 --- /dev/null +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/visibility.rs @@ -0,0 +1,27 @@ +use chumsky::{ + prelude::{choice, empty, just}, + Parser, +}; + +use crate::{ + ast::ItemVisibility, + parser::NoirParser, + token::{Keyword, Token}, +}; + +use super::primitives::keyword; + +/// visibility_modifier: 'pub(crate)'? 'pub'? '' +pub(crate) fn visibility_modifier() -> impl NoirParser { + let is_pub_crate = (keyword(Keyword::Pub) + .then_ignore(just(Token::LeftParen)) + .then_ignore(keyword(Keyword::Crate)) + .then_ignore(just(Token::RightParen))) + .map(|_| ItemVisibility::PublicCrate); + + let is_pub = keyword(Keyword::Pub).map(|_| ItemVisibility::Public); + + let is_private = empty().map(|_| ItemVisibility::Private); + + choice((is_pub_crate, is_pub, is_private)) +} diff --git a/noir/noir-repo/compiler/noirc_frontend/src/resolve_locations.rs b/noir/noir-repo/compiler/noirc_frontend/src/resolve_locations.rs index 61ff6baf919..b9e86bf0ef7 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/resolve_locations.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/resolve_locations.rs @@ -4,7 +4,7 @@ use noirc_errors::Location; use crate::hir_def::expr::HirExpression; use crate::hir_def::types::Type; -use crate::node_interner::{DefinitionKind, Node, NodeInterner}; +use crate::node_interner::{DefinitionId, DefinitionKind, Node, NodeInterner}; impl NodeInterner { /// Scans the interner for the item which is located at that [Location] @@ -108,14 +108,18 @@ impl NodeInterner { ) -> Option { match expression { HirExpression::Ident(ident, _) => { - let definition_info = self.definition(ident.id); - match definition_info.kind { - DefinitionKind::Function(func_id) => { - Some(self.function_meta(&func_id).location) + if ident.id != DefinitionId::dummy_id() { + let definition_info = self.definition(ident.id); + match definition_info.kind { + DefinitionKind::Function(func_id) => { + Some(self.function_meta(&func_id).location) + } + DefinitionKind::Local(_local_id) => Some(definition_info.location), + DefinitionKind::Global(_global_id) => Some(definition_info.location), + _ => None, } - DefinitionKind::Local(_local_id) => Some(definition_info.location), - DefinitionKind::Global(_global_id) => Some(definition_info.location), - _ => None, + } else { + None } } HirExpression::Constructor(expr) => { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/tests.rs b/noir/noir-repo/compiler/noirc_frontend/src/tests.rs index 870c781b89d..a30907211a3 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/tests.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/tests.rs @@ -3199,7 +3199,7 @@ fn as_trait_path_syntax_no_impl() { } #[test] -fn errors_on_unused_import() { +fn errors_on_unused_private_import() { let src = r#" mod foo { pub fn bar() {} @@ -3231,3 +3231,113 @@ fn errors_on_unused_import() { assert_eq!(ident.to_string(), "bar"); } + +#[test] +fn errors_on_unused_pub_crate_import() { + let src = r#" + mod foo { + pub fn bar() {} + pub fn baz() {} + + trait Foo { + } + } + + pub(crate) use foo::bar; + use foo::baz; + use foo::Foo; + + impl Foo for Field { + } + + fn main() { + baz(); + } + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + let CompilationError::ResolverError(ResolverError::UnusedImport { ident }) = &errors[0].0 + else { + panic!("Expected an unused import error"); + }; + + assert_eq!(ident.to_string(), "bar"); +} + +#[test] +fn warns_on_use_of_private_exported_item() { + let src = r#" + mod foo { + mod bar { + pub fn baz() {} + } + + use bar::baz; + + fn qux() { + baz(); + } + } + + fn main() { + foo::baz(); + } + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 2); // An existing bug causes this error to be duplicated + + assert!(matches!( + &errors[0].0, + CompilationError::ResolverError(ResolverError::PathResolutionError( + PathResolutionError::Private(..), + )) + )); +} + +#[test] +fn can_use_pub_use_item() { + let src = r#" + mod foo { + mod bar { + pub fn baz() {} + } + + pub use bar::baz; + } + + fn main() { + foo::baz(); + } + "#; + assert_no_errors(src); +} + +#[test] +fn warns_on_re_export_of_item_with_less_visibility() { + let src = r#" + mod foo { + mod bar { + pub(crate) fn baz() {} + } + + pub use bar::baz; + } + + fn main() { + foo::baz(); + } + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + assert!(matches!( + &errors[0].0, + CompilationError::DefinitionError( + DefCollectorErrorKind::CannotReexportItemWithLessVisibility { .. } + ) + )); +} diff --git a/noir/noir-repo/compiler/noirc_frontend/src/usage_tracker.rs b/noir/noir-repo/compiler/noirc_frontend/src/usage_tracker.rs new file mode 100644 index 00000000000..d8b7b271734 --- /dev/null +++ b/noir/noir-repo/compiler/noirc_frontend/src/usage_tracker.rs @@ -0,0 +1,26 @@ +use std::collections::HashSet; + +use rustc_hash::FxHashMap as HashMap; + +use crate::{ast::Ident, hir::def_map::ModuleId}; + +#[derive(Debug, Default)] +pub struct UsageTracker { + /// List of all unused imports in each module. Each time something is imported it's added + /// to the module's set. When it's used, it's removed. At the end of the program only unused imports remain. + unused_imports: HashMap>, +} + +impl UsageTracker { + pub(crate) fn add_unused_import(&mut self, module_id: ModuleId, name: Ident) { + self.unused_imports.entry(module_id).or_default().insert(name); + } + + pub(crate) fn mark_as_used(&mut self, current_mod_id: ModuleId, name: &Ident) { + self.unused_imports.entry(current_mod_id).or_default().remove(name); + } + + pub(crate) fn unused_imports(&self) -> &HashMap> { + &self.unused_imports + } +} diff --git a/noir/noir-repo/docs/docs/noir/concepts/data_types/fields.md b/noir/noir-repo/docs/docs/noir/concepts/data_types/fields.md index a10a4810788..b5af20f7d7e 100644 --- a/noir/noir-repo/docs/docs/noir/concepts/data_types/fields.md +++ b/noir/noir-repo/docs/docs/noir/concepts/data_types/fields.md @@ -41,103 +41,67 @@ After declaring a Field, you can use these common methods on it: Transforms the field into an array of bits, Little Endian. -```rust -fn to_le_bits(_x : Field, _bit_size: u32) -> [u1] -``` +#include_code to_le_bits noir_stdlib/src/field/mod.nr rust example: -```rust -fn main() { - let field = 2; - let bits = field.to_le_bits(32); -} -``` +#include_code to_le_bits_example noir_stdlib/src/field/mod.nr rust + ### to_be_bits Transforms the field into an array of bits, Big Endian. -```rust -fn to_be_bits(_x : Field, _bit_size: u32) -> [u1] -``` +#include_code to_be_bits noir_stdlib/src/field/mod.nr rust example: -```rust -fn main() { - let field = 2; - let bits = field.to_be_bits(32); -} -``` +#include_code to_be_bits_example noir_stdlib/src/field/mod.nr rust + ### to_le_bytes Transforms into an array of bytes, Little Endian -```rust -fn to_le_bytes(_x : Field, byte_size: u32) -> [u8] -``` +#include_code to_le_bytes noir_stdlib/src/field/mod.nr rust example: -```rust -fn main() { - let field = 2; - let bytes = field.to_le_bytes(4); -} -``` +#include_code to_le_bytes_example noir_stdlib/src/field/mod.nr rust ### to_be_bytes Transforms into an array of bytes, Big Endian -```rust -fn to_be_bytes(_x : Field, byte_size: u32) -> [u8] -``` +#include_code to_be_bytes noir_stdlib/src/field/mod.nr rust example: -```rust -fn main() { - let field = 2; - let bytes = field.to_be_bytes(4); -} -``` +#include_code to_be_bytes_example noir_stdlib/src/field/mod.nr rust + ### to_le_radix -Decomposes into a vector over the specified base, Little Endian +Decomposes into an array over the specified base, Little Endian + +#include_code to_le_radix noir_stdlib/src/field/mod.nr rust -```rust -fn to_le_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] -``` example: -```rust -fn main() { - let field = 2; - let radix = field.to_le_radix(256, 4); -} -``` +#include_code to_le_radix_example noir_stdlib/src/field/mod.nr rust + ### to_be_radix -Decomposes into a vector over the specified base, Big Endian +Decomposes into an array over the specified base, Big Endian -```rust -fn to_be_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] -``` +#include_code to_be_radix noir_stdlib/src/field/mod.nr rust example: -```rust -fn main() { - let field = 2; - let radix = field.to_be_radix(256, 4); -} -``` +#include_code to_be_radix_example noir_stdlib/src/field/mod.nr rust + ### pow_32 @@ -161,9 +125,7 @@ fn main() { Adds a constraint to specify that the field can be represented with `bit_size` number of bits -```rust -fn assert_max_bit_size(self, bit_size: u32) -``` +#include_code assert_max_bit_size noir_stdlib/src/field/mod.nr rust example: diff --git a/noir/noir-repo/docs/docs/noir/modules_packages_crates/modules.md b/noir/noir-repo/docs/docs/noir/modules_packages_crates/modules.md index 16b6307d2fd..d21b009be3b 100644 --- a/noir/noir-repo/docs/docs/noir/modules_packages_crates/modules.md +++ b/noir/noir-repo/docs/docs/noir/modules_packages_crates/modules.md @@ -182,4 +182,30 @@ fn from_bar() { from_foo(); // invokes super::from_foo(), which is bar::from_foo() super::from_foo(); // also invokes bar::from_foo() } -``` \ No newline at end of file +``` + +### `use` visibility + +`use` declarations are private to the containing module, by default. However, like functions, +they can be marked as `pub` or `pub(crate)`. Such a use declaration serves to _re-export_ a name. +A public `use` declaration can therefore redirect some public name to a different target definition: +even a definition with a private canonical path, inside a different module. + +An example of re-exporting: + +```rust +mod some_module { + pub use foo::{bar, baz}; + mod foo { + pub fn bar() {} + pub fn baz() {} + } +} + +fn main() { + some_module::bar(); + some_module::baz(); +} +``` + +In this example, the module `some_module` re-exports two public names defined in `foo`. \ No newline at end of file diff --git a/noir/noir-repo/docs/docs/noir/standard_library/meta/expr.md b/noir/noir-repo/docs/docs/noir/standard_library/meta/expr.md index 57f0fce24c1..3a3c61b41f5 100644 --- a/noir/noir-repo/docs/docs/noir/standard_library/meta/expr.md +++ b/noir/noir-repo/docs/docs/noir/standard_library/meta/expr.md @@ -18,6 +18,13 @@ If this expression is an array, this returns a slice of each element in the arra If this expression is an assert, this returns the assert expression and the optional message. +### as_assert_eq + +#include_code as_assert_eq noir_stdlib/src/meta/expr.nr rust + +If this expression is an assert_eq, this returns the left-hand-side and right-hand-side +expressions, together with the optional message. + ### as_assign #include_code as_assign noir_stdlib/src/meta/expr.nr rust diff --git a/noir/noir-repo/noir_stdlib/src/array.nr b/noir/noir-repo/noir_stdlib/src/array.nr index 23683a54e45..68e134b56fa 100644 --- a/noir/noir-repo/noir_stdlib/src/array.nr +++ b/noir/noir-repo/noir_stdlib/src/array.nr @@ -1,5 +1,4 @@ use crate::cmp::Ord; -use crate::option::Option; use crate::convert::From; impl [T; N] { diff --git a/noir/noir-repo/noir_stdlib/src/collections/map.nr b/noir/noir-repo/noir_stdlib/src/collections/map.nr index 4607b06d667..27a7d0d3550 100644 --- a/noir/noir-repo/noir_stdlib/src/collections/map.nr +++ b/noir/noir-repo/noir_stdlib/src/collections/map.nr @@ -1,5 +1,4 @@ use crate::cmp::Eq; -use crate::collections::vec::Vec; use crate::option::Option; use crate::default::Default; use crate::hash::{Hash, Hasher, BuildHasher}; diff --git a/noir/noir-repo/noir_stdlib/src/collections/umap.nr b/noir/noir-repo/noir_stdlib/src/collections/umap.nr index c552c053a92..c71905e63b3 100644 --- a/noir/noir-repo/noir_stdlib/src/collections/umap.nr +++ b/noir/noir-repo/noir_stdlib/src/collections/umap.nr @@ -1,10 +1,7 @@ use crate::cmp::Eq; -use crate::collections::vec::Vec; use crate::option::Option; use crate::default::Default; -use crate::hash::{Hash, Hasher, BuildHasher, BuildHasherDefault}; -use crate::hash::poseidon2::Poseidon2; -use crate::collections::bounded_vec::BoundedVec; +use crate::hash::{Hash, Hasher, BuildHasher}; // An unconstrained hash table with open addressing and quadratic probing. // Note that "unconstrained" here means that almost all operations on this diff --git a/noir/noir-repo/noir_stdlib/src/ec/consts/te.nr b/noir/noir-repo/noir_stdlib/src/ec/consts/te.nr index e25f373593a..8cea7654e39 100644 --- a/noir/noir-repo/noir_stdlib/src/ec/consts/te.nr +++ b/noir/noir-repo/noir_stdlib/src/ec/consts/te.nr @@ -1,4 +1,3 @@ -use crate::compat; use crate::ec::tecurve::affine::Point as TEPoint; use crate::ec::tecurve::affine::Curve as TECurve; diff --git a/noir/noir-repo/noir_stdlib/src/ec/mod.nr b/noir/noir-repo/noir_stdlib/src/ec/mod.nr index 86fb201408f..093852acc79 100644 --- a/noir/noir-repo/noir_stdlib/src/ec/mod.nr +++ b/noir/noir-repo/noir_stdlib/src/ec/mod.nr @@ -155,15 +155,12 @@ pub fn is_square(x: Field) -> bool { // Power function of two Field arguments of arbitrary size. // Adapted from std::field::pow_32. pub fn pow(x: Field, y: Field) -> Field { - // As in tests with minor modifications - let N_BITS = crate::field::modulus_num_bits(); - let mut r = 1 as Field; - let b = y.to_le_bits(N_BITS as u32); + let b: [u1; 254] = y.to_le_bits(); - for i in 0..N_BITS { + for i in 0..254 { r *= r; - r *= (b[N_BITS - 1 - i] as Field)*x + (1-b[N_BITS - 1 - i] as Field); + r *= (b[254 - 1 - i] as Field)*x + (1-b[254 - 1 - i] as Field); } r diff --git a/noir/noir-repo/noir_stdlib/src/ec/swcurve.nr b/noir/noir-repo/noir_stdlib/src/ec/swcurve.nr index 3ad3af41cff..4139492af63 100644 --- a/noir/noir-repo/noir_stdlib/src/ec/swcurve.nr +++ b/noir/noir-repo/noir_stdlib/src/ec/swcurve.nr @@ -350,11 +350,9 @@ mod curvegroup { // Scalar multiplication (p + ... + p n times) pub fn mul(self, n: Field, p: Point) -> Point { - let N_BITS = crate::field::modulus_num_bits(); - // TODO: temporary workaround until issue 1354 is solved let mut n_as_bits: [u1; 254] = [0; 254]; - let tmp = n.to_le_bits(N_BITS as u32); + let tmp: [u1; 254] = n.to_le_bits(); for i in 0..254 { n_as_bits[i] = tmp[i]; } diff --git a/noir/noir-repo/noir_stdlib/src/ec/tecurve.nr b/noir/noir-repo/noir_stdlib/src/ec/tecurve.nr index aaf66f903cc..2ba3fd4401c 100644 --- a/noir/noir-repo/noir_stdlib/src/ec/tecurve.nr +++ b/noir/noir-repo/noir_stdlib/src/ec/tecurve.nr @@ -354,11 +354,9 @@ mod curvegroup { // Scalar multiplication (p + ... + p n times) pub fn mul(self, n: Field, p: Point) -> Point { - let N_BITS = crate::field::modulus_num_bits(); - // TODO: temporary workaround until issue 1354 is solved let mut n_as_bits: [u1; 254] = [0; 254]; - let tmp = n.to_le_bits(N_BITS as u32); + let tmp: [u1; 254] = n.to_le_bits(); for i in 0..254 { n_as_bits[i] = tmp[i]; } diff --git a/noir/noir-repo/noir_stdlib/src/eddsa.nr b/noir/noir-repo/noir_stdlib/src/eddsa.nr index 337969be90e..cfdbbe9c3d0 100644 --- a/noir/noir-repo/noir_stdlib/src/eddsa.nr +++ b/noir/noir-repo/noir_stdlib/src/eddsa.nr @@ -1,7 +1,6 @@ -use crate::hash::poseidon; use crate::ec::consts::te::baby_jubjub; use crate::ec::tecurve::affine::Point as TEPoint; -use crate::hash::{Hash, Hasher, BuildHasher, BuildHasherDefault}; +use crate::hash::Hasher; use crate::hash::poseidon::PoseidonHasher; use crate::default::Default; diff --git a/noir/noir-repo/noir_stdlib/src/field/bn254.nr b/noir/noir-repo/noir_stdlib/src/field/bn254.nr index ed0053694c7..0aa5ca0717b 100644 --- a/noir/noir-repo/noir_stdlib/src/field/bn254.nr +++ b/noir/noir-repo/noir_stdlib/src/field/bn254.nr @@ -8,7 +8,7 @@ global TWO_POW_128: Field = 0x100000000000000000000000000000000; // Decomposes a single field into two 16 byte fields. fn compute_decomposition(x: Field) -> (Field, Field) { - let x_bytes = x.to_le_bytes(32); + let x_bytes: [u8; 32] = x.to_le_bytes(); let mut low: Field = 0; let mut high: Field = 0; @@ -28,8 +28,8 @@ unconstrained pub(crate) fn decompose_hint(x: Field) -> (Field, Field) { } fn compute_lt(x: Field, y: Field, num_bytes: u32) -> bool { - let x_bytes = x.to_le_radix(256, num_bytes); - let y_bytes = y.to_le_radix(256, num_bytes); + let x_bytes: [u8; 32] = x.to_le_bytes(); + let y_bytes: [u8; 32] = y.to_le_bytes(); let mut x_is_lt = false; let mut done = false; for i in 0..num_bytes { @@ -142,7 +142,7 @@ pub fn lt(a: Field, b: Field) -> bool { mod tests { // TODO: Allow imports from "super" - use crate::field::bn254::{decompose_hint, decompose, compute_lt, assert_gt, gt, lt, TWO_POW_128, compute_lte, PLO, PHI}; + use crate::field::bn254::{decompose, compute_lt, assert_gt, gt, TWO_POW_128, compute_lte, PLO, PHI}; #[test] fn check_decompose() { diff --git a/noir/noir-repo/noir_stdlib/src/field/mod.nr b/noir/noir-repo/noir_stdlib/src/field/mod.nr index 534ac012beb..176f102321d 100644 --- a/noir/noir-repo/noir_stdlib/src/field/mod.nr +++ b/noir/noir-repo/noir_stdlib/src/field/mod.nr @@ -6,7 +6,9 @@ impl Field { /// /// # Failures /// Causes a constraint failure for `Field` values exceeding `2^{bit_size}`. + // docs:start:assert_max_bit_size pub fn assert_max_bit_size(self, bit_size: u32) { + // docs:end:assert_max_bit_size crate::assert_constant(bit_size); assert(bit_size < modulus_num_bits() as u32); self.__assert_max_bit_size(bit_size); @@ -15,45 +17,37 @@ impl Field { #[builtin(apply_range_constraint)] fn __assert_max_bit_size(self, bit_size: u32) {} - /// Decomposes `self` into its little endian bit decomposition as a `[u1]` slice of length `bit_size`. + /// Decomposes `self` into its little endian bit decomposition as a `[u1; N]` array. /// This slice will be zero padded should not all bits be necessary to represent `self`. /// /// # Failures - /// Causes a constraint failure for `Field` values exceeding `2^{bit_size}` as the resulting slice will not + /// Causes a constraint failure for `Field` values exceeding `2^N` as the resulting slice will not /// be able to represent the original `Field`. /// /// # Safety - /// Values of `bit_size` equal to or greater than the number of bits necessary to represent the `Field` modulus + /// Values of `N` equal to or greater than the number of bits necessary to represent the `Field` modulus /// (e.g. 254 for the BN254 field) allow for multiple bit decompositions. This is due to how the `Field` will /// wrap around due to overflow when verifying the decomposition. - pub fn to_le_bits(self: Self, bit_size: u32) -> [u1] { - crate::assert_constant(bit_size); - self.__to_le_bits(bit_size) - } + #[builtin(to_le_bits)] + // docs:start:to_le_bits + pub fn to_le_bits(self: Self) -> [u1; N] {} + // docs:end:to_le_bits - /// Decomposes `self` into its big endian bit decomposition as a `[u1]` slice of length `bit_size`. - /// This slice will be zero padded should not all bits be necessary to represent `self`. + /// Decomposes `self` into its big endian bit decomposition as a `[u1; N]` array. + /// This array will be zero padded should not all bits be necessary to represent `self`. /// /// # Failures - /// Causes a constraint failure for `Field` values exceeding `2^{bit_size}` as the resulting slice will not + /// Causes a constraint failure for `Field` values exceeding `2^N` as the resulting slice will not /// be able to represent the original `Field`. /// /// # Safety - /// Values of `bit_size` equal to or greater than the number of bits necessary to represent the `Field` modulus + /// Values of `N` equal to or greater than the number of bits necessary to represent the `Field` modulus /// (e.g. 254 for the BN254 field) allow for multiple bit decompositions. This is due to how the `Field` will /// wrap around due to overflow when verifying the decomposition. - pub fn to_be_bits(self: Self, bit_size: u32) -> [u1] { - crate::assert_constant(bit_size); - self.__to_be_bits(bit_size) - } - - /// See `Field.to_be_bits` - #[builtin(to_le_bits)] - fn __to_le_bits(self, _bit_size: u32) -> [u1] {} - - /// See `Field.to_le_bits` #[builtin(to_be_bits)] - fn __to_be_bits(self, bit_size: u32) -> [u1] {} + // docs:start:to_be_bits + pub fn to_be_bits(self: Self) -> [u1; N] {} + // docs:end:to_be_bits /// Decomposes `self` into its little endian byte decomposition as a `[u8]` slice of length `byte_size`. /// This slice will be zero padded should not all bytes be necessary to represent `self`. @@ -66,9 +60,11 @@ impl Field { /// Values of `byte_size` equal to or greater than the number of bytes necessary to represent the `Field` modulus /// (e.g. 32 for the BN254 field) allow for multiple byte decompositions. This is due to how the `Field` will /// wrap around due to overflow when verifying the decomposition. - pub fn to_le_bytes(self: Self, byte_size: u32) -> [u8] { - self.to_le_radix(256, byte_size) + // docs:start:to_le_bytes + pub fn to_le_bytes(self: Self) -> [u8; N] { + self.to_le_radix(256) } + // docs:end:to_le_bytes /// Decomposes `self` into its big endian byte decomposition as a `[u8]` slice of length `byte_size`. /// This slice will be zero padded should not all bytes be necessary to represent `self`. @@ -81,35 +77,39 @@ impl Field { /// Values of `byte_size` equal to or greater than the number of bytes necessary to represent the `Field` modulus /// (e.g. 32 for the BN254 field) allow for multiple byte decompositions. This is due to how the `Field` will /// wrap around due to overflow when verifying the decomposition. - pub fn to_be_bytes(self: Self, byte_size: u32) -> [u8] { - self.to_be_radix(256, byte_size) + // docs:start:to_be_bytes + pub fn to_be_bytes(self: Self) -> [u8; N] { + self.to_be_radix(256) } + // docs:end:to_be_bytes - pub fn to_le_radix(self: Self, radix: u32, result_len: u32) -> [u8] { + // docs:start:to_le_radix + pub fn to_le_radix(self: Self, radix: u32) -> [u8; N] { crate::assert_constant(radix); - crate::assert_constant(result_len); - self.__to_le_radix(radix, result_len) + self.__to_le_radix(radix) } + // docs:end:to_le_radix - pub fn to_be_radix(self: Self, radix: u32, result_len: u32) -> [u8] { + // docs:start:to_be_radix + pub fn to_be_radix(self: Self, radix: u32) -> [u8; N] { crate::assert_constant(radix); - crate::assert_constant(result_len); - self.__to_be_radix(radix, result_len) + self.__to_be_radix(radix) } + // docs:end:to_be_radix // `_radix` must be less than 256 #[builtin(to_le_radix)] - fn __to_le_radix(self, radix: u32, result_len: u32) -> [u8] {} + fn __to_le_radix(self, radix: u32) -> [u8; N] {} #[builtin(to_be_radix)] - fn __to_be_radix(self, radix: u32, result_len: u32) -> [u8] {} + fn __to_be_radix(self, radix: u32) -> [u8; N] {} // Returns self to the power of the given exponent value. // Caution: we assume the exponent fits into 32 bits // using a bigger bit size impacts negatively the performance and should be done only if the exponent does not fit in 32 bits pub fn pow_32(self, exponent: Field) -> Field { let mut r: Field = 1; - let b = exponent.to_le_bits(32); + let b: [u1; 32] = exponent.to_le_bits(); for i in 1..33 { r *= r; @@ -164,15 +164,14 @@ pub fn bytes32_to_field(bytes32: [u8; 32]) -> Field { } fn lt_fallback(x: Field, y: Field) -> bool { - let num_bytes = (modulus_num_bits() as u32 + 7) / 8; - let x_bytes = x.to_le_bytes(num_bytes); - let y_bytes = y.to_le_bytes(num_bytes); + let x_bytes: [u8; 32] = x.to_le_bytes(); + let y_bytes: [u8; 32] = y.to_le_bytes(); let mut x_is_lt = false; let mut done = false; - for i in 0..num_bytes { + for i in 0..32 { if (!done) { - let x_byte = x_bytes[num_bytes - 1 - i] as u8; - let y_byte = y_bytes[num_bytes - 1 - i] as u8; + let x_byte = x_bytes[32 - 1 - i] as u8; + let y_byte = y_bytes[32 - 1 - i] as u8; let bytes_match = x_byte == y_byte; if !bytes_match { x_is_lt = x_byte < y_byte; @@ -183,3 +182,58 @@ fn lt_fallback(x: Field, y: Field) -> bool { x_is_lt } +mod tests { + #[test] + // docs:start:to_be_bits_example + fn test_to_be_bits() { + let field = 2; + let bits: [u1; 8] = field.to_be_bits(); + assert_eq(bits, [0, 0, 0, 0, 0, 0, 1, 0]); + } + // docs:end:to_be_bits_example + + #[test] + // docs:start:to_le_bits_example + fn test_to_le_bits() { + let field = 2; + let bits: [u1; 8] = field.to_le_bits(); + assert_eq(bits, [0, 1, 0, 0, 0, 0, 0, 0]); + } + // docs:end:to_le_bits_example + + #[test] + // docs:start:to_be_bytes_example + fn test_to_be_bytes() { + let field = 2; + let bits: [u8; 8] = field.to_be_bytes(); + assert_eq(bits, [0, 0, 0, 0, 0, 0, 0, 2]); + } + // docs:end:to_be_bytes_example + + #[test] + // docs:start:to_le_bytes_example + fn test_to_le_bytes() { + let field = 2; + let bits: [u8; 8] = field.to_le_bytes(); + assert_eq(bits, [2, 0, 0, 0, 0, 0, 0, 0]); + } + // docs:end:to_le_bytes_example + + #[test] + // docs:start:to_be_radix_example + fn test_to_be_radix() { + let field = 2; + let bits: [u8; 8] = field.to_be_radix(256); + assert_eq(bits, [0, 0, 0, 0, 0, 0, 0, 2]); + } + // docs:end:to_be_radix_example + + #[test] + // docs:start:to_le_radix_example + fn test_to_le_radix() { + let field = 2; + let bits: [u8; 8] = field.to_le_radix(256); + assert_eq(bits, [2, 0, 0, 0, 0, 0, 0, 0]); + } + // docs:end:to_le_radix_example +} diff --git a/noir/noir-repo/noir_stdlib/src/hash/keccak.nr b/noir/noir-repo/noir_stdlib/src/hash/keccak.nr index 0c31d238f66..36787c7b4af 100644 --- a/noir/noir-repo/noir_stdlib/src/hash/keccak.nr +++ b/noir/noir-repo/noir_stdlib/src/hash/keccak.nr @@ -115,7 +115,7 @@ pub(crate) fn keccak256(input: [u8; N], message_size: u32) -> [u8; 3 let mut result = [0; 32]; for i in 0..4 { let lane = state[i] as Field; - let lane_le = lane.to_le_bytes(8); + let lane_le: [u8; 8] = lane.to_le_bytes(); for j in 0..8 { result[8*i+j] = lane_le[j]; } diff --git a/noir/noir-repo/noir_stdlib/src/hash/mod.nr b/noir/noir-repo/noir_stdlib/src/hash/mod.nr index 657e1cd8309..0e15595ff40 100644 --- a/noir/noir-repo/noir_stdlib/src/hash/mod.nr +++ b/noir/noir-repo/noir_stdlib/src/hash/mod.nr @@ -12,7 +12,7 @@ use crate::embedded_curve_ops::{EmbeddedCurvePoint, EmbeddedCurveScalar, multi_s use crate::meta::derive_via; // Kept for backwards compatibility -use sha256::{digest, sha256, sha256_compression, sha256_var}; +pub use sha256::{digest, sha256, sha256_compression, sha256_var}; #[foreign(blake2s)] // docs:start:blake2s @@ -118,7 +118,7 @@ pub fn hash_to_field(inputs: [Field]) -> Field { let mut sum = 0; for input in inputs { - let input_bytes: [u8; 32] = input.to_le_bytes(32).as_array(); + let input_bytes: [u8; 32] = input.to_le_bytes(); sum += crate::field::bytes32_to_field(blake2s(input_bytes)); } diff --git a/noir/noir-repo/noir_stdlib/src/hash/poseidon/bn254.nr b/noir/noir-repo/noir_stdlib/src/hash/poseidon/bn254.nr index 103ab3d166a..848d561f755 100644 --- a/noir/noir-repo/noir_stdlib/src/hash/poseidon/bn254.nr +++ b/noir/noir-repo/noir_stdlib/src/hash/poseidon/bn254.nr @@ -2,7 +2,7 @@ mod perm; mod consts; -use crate::hash::poseidon::{PoseidonConfig, absorb}; +use crate::hash::poseidon::absorb; // Variable-length Poseidon-128 sponge as suggested in second bullet point of §3 of https://eprint.iacr.org/2019/458.pdf #[field(bn254)] diff --git a/noir/noir-repo/noir_stdlib/src/hash/poseidon/bn254/perm.nr b/noir/noir-repo/noir_stdlib/src/hash/poseidon/bn254/perm.nr index b7c022c5a00..b7dc05ef9de 100644 --- a/noir/noir-repo/noir_stdlib/src/hash/poseidon/bn254/perm.nr +++ b/noir/noir-repo/noir_stdlib/src/hash/poseidon/bn254/perm.nr @@ -1,7 +1,6 @@ // Instantiations of Poseidon permutation for the prime field of the same order as BN254 use crate::hash::poseidon::bn254::consts; use crate::hash::poseidon::permute; -use crate::hash::poseidon::PoseidonConfig; #[field(bn254)] pub fn x5_2(mut state: [Field; 2]) -> [Field; 2] { diff --git a/noir/noir-repo/noir_stdlib/src/hash/sha256.nr b/noir/noir-repo/noir_stdlib/src/hash/sha256.nr index 50cf2f965f9..7f255fe5586 100644 --- a/noir/noir-repo/noir_stdlib/src/hash/sha256.nr +++ b/noir/noir-repo/noir_stdlib/src/hash/sha256.nr @@ -194,7 +194,7 @@ pub fn sha256_var(msg: [u8; N], message_size: u64) -> [u8; 32] { } let len = 8 * message_size; - let len_bytes = (len as Field).to_be_bytes(8); + let len_bytes: [u8; 8] = (len as Field).to_be_bytes(); for i in 56..64 { assert_eq(msg_block[i], len_bytes[i - 56]); } @@ -222,7 +222,7 @@ unconstrained fn pad_msg_block(mut msg_block: [u8; 64], mut msg_byte_ptr: u64) - unconstrained fn attach_len_to_msg_block(mut msg_block: [u8; 64], mut msg_byte_ptr: u64, message_size: u64) -> [u8; 64] { let len = 8 * message_size; - let len_bytes = (len as Field).to_be_bytes(8); + let len_bytes: [u8; 8] = (len as Field).to_be_bytes(); for _i in 0..64 { // In any case, fill blocks up with zeros until the last 64 (i.e. until msg_byte_ptr = 56). if msg_byte_ptr < 56 { @@ -246,7 +246,7 @@ fn hash_final_block(msg_block: [u8; 64], mut state: [u32; 8]) -> [u8; 32] { // Return final hash as byte array for j in 0..8 { - let h_bytes = (state[7 - j] as Field).to_le_bytes(4); + let h_bytes: [u8; 4] = (state[7 - j] as Field).to_le_bytes(); for k in 0..4 { out_h[31 - 4*j - k] = h_bytes[k]; } diff --git a/noir/noir-repo/noir_stdlib/src/hash/sha512.nr b/noir/noir-repo/noir_stdlib/src/hash/sha512.nr index be255a594af..09de174103f 100644 --- a/noir/noir-repo/noir_stdlib/src/hash/sha512.nr +++ b/noir/noir-repo/noir_stdlib/src/hash/sha512.nr @@ -135,7 +135,7 @@ pub fn digest(msg: [u8; N]) -> [u8; 64] { } let len = 8 * msg.len(); - let len_bytes = (len as Field).to_le_bytes(16); + let len_bytes: [u8; 16] = (len as Field).to_le_bytes(); for _i in 0..128 { // In any case, fill blocks up with zeros until the last 128 (i.e. until i = 112). if i < 112 { @@ -155,7 +155,7 @@ pub fn digest(msg: [u8; N]) -> [u8; 64] { } // Return final hash as byte array for j in 0..8 { - let h_bytes = (h[7 - j] as Field).to_le_bytes(8); + let h_bytes: [u8; 8] = (h[7 - j] as Field).to_le_bytes(); for k in 0..8 { out_h[63 - 8*j - k] = h_bytes[k]; } diff --git a/noir/noir-repo/noir_stdlib/src/merkle.nr b/noir/noir-repo/noir_stdlib/src/merkle.nr index 17e539ab9b7..f6806874444 100644 --- a/noir/noir-repo/noir_stdlib/src/merkle.nr +++ b/noir/noir-repo/noir_stdlib/src/merkle.nr @@ -3,10 +3,9 @@ // XXX: In the future we can add an arity parameter // Returns the merkle root of the tree from the provided leaf, its hashpath, using a pedersen hash function. pub fn compute_merkle_root(leaf: Field, index: Field, hash_path: [Field; N]) -> Field { - let n = hash_path.len(); - let index_bits = index.to_le_bits(n as u32); + let index_bits: [u1; N] = index.to_le_bits(); let mut current = leaf; - for i in 0..n { + for i in 0..N { let path_bit = index_bits[i] as bool; let (hash_left, hash_right) = if path_bit { (hash_path[i], current) diff --git a/noir/noir-repo/noir_stdlib/src/meta/expr.nr b/noir/noir-repo/noir_stdlib/src/meta/expr.nr index 9b5eee03229..43638ad791b 100644 --- a/noir/noir-repo/noir_stdlib/src/meta/expr.nr +++ b/noir/noir-repo/noir_stdlib/src/meta/expr.nr @@ -13,6 +13,11 @@ impl Expr { fn as_assert(self) -> Option<(Expr, Option)> {} // docs:end:as_assert + #[builtin(expr_as_assert_eq)] + // docs:start:as_assert_eq + fn as_assert_eq(self) -> Option<(Expr, Expr, Option)> {} + // docs:end:as_assert_eq + #[builtin(expr_as_assign)] // docs:start:as_assign fn as_assign(self) -> Option<(Expr, Expr)> {} @@ -121,6 +126,7 @@ impl Expr { // docs:end:modify let result = modify_array(self, f); let result = result.or_else(|| modify_assert(self, f)); + let result = result.or_else(|| modify_assert_eq(self, f)); let result = result.or_else(|| modify_assign(self, f)); let result = result.or_else(|| modify_binary_op(self, f)); let result = result.or_else(|| modify_block(self, f)); @@ -178,6 +184,18 @@ fn modify_assert(expr: Expr, f: fn[Env](Expr) -> Option) -> Option(expr: Expr, f: fn[Env](Expr) -> Option) -> Option { + expr.as_assert_eq().map( + |expr: (Expr, Expr, Option)| { + let (lhs, rhs, msg) = expr; + let lhs = lhs.modify(f); + let rhs = rhs.modify(f); + let msg = msg.map(|msg: Expr| msg.modify(f)); + new_assert_eq(lhs, rhs, msg) + } + ) +} + fn modify_assign(expr: Expr, f: fn[Env](Expr) -> Option) -> Option { expr.as_assign().map( |expr: (Expr, Expr)| { @@ -360,6 +378,15 @@ fn new_assert(predicate: Expr, msg: Option) -> Expr { } } +fn new_assert_eq(lhs: Expr, rhs: Expr, msg: Option) -> Expr { + if msg.is_some() { + let msg = msg.unwrap(); + quote { assert_eq($lhs, $rhs, $msg) }.as_expr().unwrap() + } else { + quote { assert_eq($lhs, $rhs) }.as_expr().unwrap() + } +} + fn new_assign(lhs: Expr, rhs: Expr) -> Expr { quote { $lhs = $rhs }.as_expr().unwrap() } diff --git a/noir/noir-repo/noir_stdlib/src/ops/mod.nr b/noir/noir-repo/noir_stdlib/src/ops/mod.nr index 8b1903cff0b..6cf20432468 100644 --- a/noir/noir-repo/noir_stdlib/src/ops/mod.nr +++ b/noir/noir-repo/noir_stdlib/src/ops/mod.nr @@ -1,5 +1,5 @@ mod arith; mod bit; -use arith::{Add, Sub, Mul, Div, Rem, Neg}; -use bit::{Not, BitOr, BitAnd, BitXor, Shl, Shr}; +pub use arith::{Add, Sub, Mul, Div, Rem, Neg}; +pub use bit::{Not, BitOr, BitAnd, BitXor, Shl, Shr}; diff --git a/noir/noir-repo/noir_stdlib/src/prelude.nr b/noir/noir-repo/noir_stdlib/src/prelude.nr index 0d423e3556d..b14f38bdf55 100644 --- a/noir/noir-repo/noir_stdlib/src/prelude.nr +++ b/noir/noir-repo/noir_stdlib/src/prelude.nr @@ -1,9 +1,9 @@ -use crate::collections::vec::Vec; -use crate::collections::bounded_vec::BoundedVec; -use crate::option::Option; -use crate::{print, println, assert_constant}; -use crate::uint128::U128; -use crate::cmp::{Eq, Ord}; -use crate::default::Default; -use crate::convert::{From, Into}; -use crate::meta::{derive, derive_via}; +pub use crate::collections::vec::Vec; +pub use crate::collections::bounded_vec::BoundedVec; +pub use crate::option::Option; +pub use crate::{print, println, assert_constant}; +pub use crate::uint128::U128; +pub use crate::cmp::{Eq, Ord}; +pub use crate::default::Default; +pub use crate::convert::{From, Into}; +pub use crate::meta::{derive, derive_via}; diff --git a/noir/noir-repo/noir_stdlib/src/sha256.nr b/noir/noir-repo/noir_stdlib/src/sha256.nr index c3e18b13e91..ce217f7a689 100644 --- a/noir/noir-repo/noir_stdlib/src/sha256.nr +++ b/noir/noir-repo/noir_stdlib/src/sha256.nr @@ -1,2 +1,2 @@ // This file is kept for backwards compatibility. -use crate::hash::sha256::{digest, sha256_var}; +pub use crate::hash::sha256::{digest, sha256_var}; diff --git a/noir/noir-repo/noir_stdlib/src/sha512.nr b/noir/noir-repo/noir_stdlib/src/sha512.nr index 1ddbf6e0ae1..b474e27b416 100644 --- a/noir/noir-repo/noir_stdlib/src/sha512.nr +++ b/noir/noir-repo/noir_stdlib/src/sha512.nr @@ -1,2 +1,2 @@ // This file is kept for backwards compatibility. -use crate::hash::sha512::digest; +pub use crate::hash::sha512::digest; diff --git a/noir/noir-repo/noir_stdlib/src/uint128.nr b/noir/noir-repo/noir_stdlib/src/uint128.nr index a6d980c7f33..ac7f744cb3b 100644 --- a/noir/noir-repo/noir_stdlib/src/uint128.nr +++ b/noir/noir-repo/noir_stdlib/src/uint128.nr @@ -1,6 +1,5 @@ use crate::ops::{Add, Sub, Mul, Div, Rem, Not, BitOr, BitAnd, BitXor, Shl, Shr}; use crate::cmp::{Eq, Ord, Ordering}; -use crate::println; global pow64 : Field = 18446744073709551616; //2^64; global pow63 : Field = 9223372036854775808; // 2^63; @@ -45,8 +44,8 @@ impl U128 { } pub fn to_be_bytes(self: Self) -> [u8; 16] { - let lo = self.lo.to_be_bytes(8); - let hi = self.hi.to_be_bytes(8); + let lo: [u8; 8] = self.lo.to_be_bytes(); + let hi: [u8; 8] = self.hi.to_be_bytes(); let mut bytes = [0; 16]; for i in 0..8 { bytes[i] = hi[i]; @@ -56,8 +55,8 @@ impl U128 { } pub fn to_le_bytes(self: Self) -> [u8; 16] { - let lo = self.lo.to_le_bytes(8); - let hi = self.hi.to_le_bytes(8); + let lo: [u8; 8] = self.lo.to_le_bytes(); + let hi: [u8; 8] = self.hi.to_le_bytes(); let mut bytes = [0; 16]; for i in 0..8 { bytes[i] = lo[i]; @@ -295,12 +294,13 @@ impl BitXor for U128 { impl Shl for U128 { fn shl(self, other: u8) -> U128 { assert(other < 128, "attempt to shift left with overflow"); - let exp_bits = (other as Field).to_be_bits(7); + let exp_bits: [u1; 7] = (other as Field).to_be_bits(); let mut r: Field = 2; let mut y: Field = 1; for i in 1..8 { - y = (exp_bits[7-i] as Field) * (r * y) + (1 - exp_bits[7-i] as Field) * y; + let bit = exp_bits[7-i] as Field; + y = bit * (r * y) + (1 - bit) * y; r *= r; } self.wrapping_mul(U128::from_integer(y)) @@ -310,12 +310,13 @@ impl Shl for U128 { impl Shr for U128 { fn shr(self, other: u8) -> U128 { assert(other < 128, "attempt to shift right with overflow"); - let exp_bits = (other as Field).to_be_bits(7); + let exp_bits: [u1; 7] = (other as Field).to_be_bits(); let mut r: Field = 2; let mut y: Field = 1; for i in 1..8 { - y = (exp_bits[7-i] as Field) * (r * y) + (1 - exp_bits[7-i] as Field) * y; + let bit = exp_bits[7-i] as Field; + y = bit * (r * y) + (1 - bit) * y; r *= r; } self / U128::from_integer(y) @@ -435,7 +436,7 @@ mod tests { // but in the past it was possible to create a value > (1<<128) let a = U128::from_hex("0x~fffffffffffffffffffffffffffffff"); let b:Field= a.to_integer(); - let c= b.to_le_bytes(17); + let c: [u8; 17]= b.to_le_bytes(); assert(c[16] != 0); } diff --git a/noir/noir-repo/test_programs/compile_failure/builtin_function_declaration/src/main.nr b/noir/noir-repo/test_programs/compile_failure/builtin_function_declaration/src/main.nr index 473b5405691..1f0af24a243 100644 --- a/noir/noir-repo/test_programs/compile_failure/builtin_function_declaration/src/main.nr +++ b/noir/noir-repo/test_programs/compile_failure/builtin_function_declaration/src/main.nr @@ -2,9 +2,9 @@ // This would otherwise be a perfectly valid declaration of the `to_le_bits` builtin function #[builtin(to_le_bits)] -fn to_le_bits(_x: Field, _bit_size: u32) -> [u1] {} +fn to_le_bits(_x: Field) -> [u1: N] {} fn main(x: Field) -> pub u1 { - let bits = to_le_bits(x, 100); + let bits: [u1; 100] = to_le_bits(x); bits[0] -} \ No newline at end of file +} diff --git a/noir/noir-repo/test_programs/compile_failure/radix_non_constant_length/Nargo.toml b/noir/noir-repo/test_programs/compile_failure/radix_non_constant_length/Nargo.toml deleted file mode 100644 index 349698cc32d..00000000000 --- a/noir/noir-repo/test_programs/compile_failure/radix_non_constant_length/Nargo.toml +++ /dev/null @@ -1,6 +0,0 @@ -[package] -name = "radix_non_constant_length" -type = "bin" -authors = [""] - -[dependencies] diff --git a/noir/noir-repo/test_programs/compile_failure/radix_non_constant_length/src/main.nr b/noir/noir-repo/test_programs/compile_failure/radix_non_constant_length/src/main.nr deleted file mode 100644 index c6dd68d925c..00000000000 --- a/noir/noir-repo/test_programs/compile_failure/radix_non_constant_length/src/main.nr +++ /dev/null @@ -1,4 +0,0 @@ -fn main(x: Field, y: pub u32) { - let bytes = x.to_be_bytes(y); - assert(bytes[0] == 0); -} diff --git a/noir/noir-repo/test_programs/compile_success_empty/comptime_str_as_bytes/Nargo.toml b/noir/noir-repo/test_programs/compile_success_empty/comptime_str_as_bytes/Nargo.toml new file mode 100644 index 00000000000..f387e0e393a --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/comptime_str_as_bytes/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "comptime_str_as_bytes" +type = "bin" +authors = [""] +compiler_version = ">=0.24.0" + +[dependencies] diff --git a/noir/noir-repo/test_programs/compile_success_empty/comptime_str_as_bytes/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/comptime_str_as_bytes/src/main.nr new file mode 100644 index 00000000000..eefea67100f --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/comptime_str_as_bytes/src/main.nr @@ -0,0 +1,9 @@ +fn main() { + comptime + { + let hello_world_string = "hello world"; + let hello_world_bytes: [u8; 11] = [0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x20, 0x77, 0x6f, 0x72, 0x6c, 0x64]; + + assert_eq(hello_world_string.as_bytes(), hello_world_bytes); + } +} diff --git a/noir/noir-repo/test_programs/compile_success_empty/conditional_regression_to_bits/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/conditional_regression_to_bits/src/main.nr index 9b5d95c11bc..5205e2bff73 100644 --- a/noir/noir-repo/test_programs/compile_success_empty/conditional_regression_to_bits/src/main.nr +++ b/noir/noir-repo/test_programs/compile_success_empty/conditional_regression_to_bits/src/main.nr @@ -5,7 +5,7 @@ fn main() { let as_bits_hardcode_1 = [1, 0]; let mut c1 = 0; for i in 0..2 { - let mut as_bits = (arr[i] as Field).to_le_bits(2); + let mut as_bits: [u1; 2] = (arr[i] as Field).to_le_bits(); c1 = c1 + as_bits[0] as Field; if i == 0 { diff --git a/noir/noir-repo/test_programs/compile_success_empty/to_bits/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/to_bits/src/main.nr index 84ace83903a..918a2fad036 100644 --- a/noir/noir-repo/test_programs/compile_success_empty/to_bits/src/main.nr +++ b/noir/noir-repo/test_programs/compile_success_empty/to_bits/src/main.nr @@ -1,7 +1,7 @@ fn main() { let field = 1000; - let be_bits = field.to_be_bits(16); - let le_bits = field.to_le_bits(16); + let be_bits:[u1; 16] = field.to_be_bits(); + let le_bits:[u1; 16] = field.to_le_bits(); for i in 0..16 { let x = be_bits[i]; @@ -10,8 +10,8 @@ fn main() { } let x = 3; - let be_bits_x = x.to_be_bits(4); - let le_bits_x = x.to_le_bits(4); + let be_bits_x: [u1; 4] = x.to_be_bits(); + let le_bits_x: [u1; 4] = x.to_le_bits(); for i in 0..4 { let be_bit = be_bits_x[i]; diff --git a/noir/noir-repo/test_programs/execution_success/7_function/src/main.nr b/noir/noir-repo/test_programs/execution_success/7_function/src/main.nr index 95568dd4ccd..dc56f2bea4f 100644 --- a/noir/noir-repo/test_programs/execution_success/7_function/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/7_function/src/main.nr @@ -29,7 +29,7 @@ fn test2(z: Field, t: u32) { fn pow(base: Field, exponent: Field) -> Field { let mut r = 1 as Field; - let b = exponent.to_le_bits(32 as u32); + let b: [u1; 32] = exponent.to_le_bits(); for i in 1..33 { r = r*r; r = (b[32-i] as Field) * (r * base) + (1 - b[32-i] as Field) * r; diff --git a/noir/noir-repo/test_programs/execution_success/array_len/src/main.nr b/noir/noir-repo/test_programs/execution_success/array_len/src/main.nr index 45c09b8a282..126fcd18d3f 100644 --- a/noir/noir-repo/test_programs/execution_success/array_len/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/array_len/src/main.nr @@ -20,5 +20,5 @@ fn main(x: Field, len3: [u8; 3], len4: [Field; 4]) { // Regression for #1023, ensure .len still works after calling to_le_bytes on a witness. // This was needed because normally .len is evaluated before acir-gen where to_le_bytes // on a witness is only evaluated during/after acir-gen. - assert(x.to_le_bytes(8).len() != 0); + assert(x.to_le_bytes::<8>().len() != 0); } diff --git a/noir/noir-repo/test_programs/execution_success/bigint/src/main.nr b/noir/noir-repo/test_programs/execution_success/bigint/src/main.nr index 188ccd01131..8592e6fb230 100644 --- a/noir/noir-repo/test_programs/execution_success/bigint/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/bigint/src/main.nr @@ -35,7 +35,7 @@ fn main(mut x: [u8; 5], y: [u8; 5]) { let d = d - b; let mut result = [0; 32]; - let result_slice = (a_field * b_field - b_field).to_le_bytes(32); + let result_slice: [u8; 32] = (a_field * b_field - b_field).to_le_bytes(); for i in 0..32 { result[i] = result_slice[i]; } diff --git a/noir/noir-repo/test_programs/execution_success/brillig_cow_regression/src/main.nr b/noir/noir-repo/test_programs/execution_success/brillig_cow_regression/src/main.nr index ae3adf6425c..adeadfc9f20 100644 --- a/noir/noir-repo/test_programs/execution_success/brillig_cow_regression/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/brillig_cow_regression/src/main.nr @@ -160,14 +160,14 @@ unconstrained fn main(kernel_data: DataToHash) -> pub [Field; NUM_FIELDS_PER_SHA let mut hash_input_flattened = [0; TX_EFFECT_HASH_FULL_FIELDS * 32 + TX_EFFECT_HASH_LOG_FIELDS * 16]; for offset in 0..TX_EFFECT_HASH_FULL_FIELDS { - let input_as_bytes = tx_effects_hash_inputs[offset].to_be_bytes(32); + let input_as_bytes: [u8; 32] = tx_effects_hash_inputs[offset].to_be_bytes(); for byte_index in 0..32 { hash_input_flattened[offset * 32 + byte_index] = input_as_bytes[byte_index]; } } for log_field_index in 0..TX_EFFECT_HASH_LOG_FIELDS { - let input_as_bytes = tx_effects_hash_inputs[TX_EFFECT_HASH_FULL_FIELDS + log_field_index].to_be_bytes(16); + let input_as_bytes: [u8; 16] = tx_effects_hash_inputs[TX_EFFECT_HASH_FULL_FIELDS + log_field_index].to_be_bytes(); for byte_index in 0..16 { hash_input_flattened[TX_EFFECT_HASH_FULL_FIELDS * 32 + log_field_index * 16 + byte_index] = input_as_bytes[byte_index]; } diff --git a/noir/noir-repo/test_programs/execution_success/schnorr/src/main.nr b/noir/noir-repo/test_programs/execution_success/schnorr/src/main.nr index cf22fd371d1..8b3b4735145 100644 --- a/noir/noir-repo/test_programs/execution_success/schnorr/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/schnorr/src/main.nr @@ -11,7 +11,7 @@ fn main( ) { // Regression for issue #2421 // We want to make sure that we can accurately verify a signature whose message is a slice vs. an array - let message_field_bytes = message_field.to_be_bytes(10); + let message_field_bytes: [u8; 10] = message_field.to_be_bytes(); let mut message2 = [0; 42]; for i in 0..10 { assert(message[i] == message_field_bytes[i]); @@ -52,7 +52,7 @@ pub fn verify_signature_noir(public_key: embedded_curve_ops::EmbeddedCurvePoi // compare the _hashes_ rather than field elements modulo r let pedersen_hash = std::hash::pedersen_hash([r.x, public_key.x, public_key.y]); let mut hash_input = [0; M]; - let pde = pedersen_hash.to_be_bytes(32); + let pde: [u8; 32] = pedersen_hash.to_be_bytes(); for i in 0..32 { hash_input[i] = pde[i]; @@ -103,7 +103,7 @@ pub fn assert_valid_signature(public_key: embedded_curve_ops::EmbeddedCurvePo // compare the _hashes_ rather than field elements modulo r let pedersen_hash = std::hash::pedersen_hash([r.x, public_key.x, public_key.y]); let mut hash_input = [0; M]; - let pde = pedersen_hash.to_be_bytes(32); + let pde: [u8; 32] = pedersen_hash.to_be_bytes(); for i in 0..32 { hash_input[i] = pde[i]; diff --git a/noir/noir-repo/test_programs/execution_success/simple_radix/src/main.nr b/noir/noir-repo/test_programs/execution_success/simple_radix/src/main.nr index 4a335e1bade..b905d2e533e 100644 --- a/noir/noir-repo/test_programs/execution_success/simple_radix/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/simple_radix/src/main.nr @@ -1,6 +1,6 @@ // Simple program to test to_radix fn main(x: Field) { - let bits = x.to_le_bits(3); + let bits: [u1; 3] = x.to_le_bits(); assert(bits[0] == 0); assert(bits[1] == 1); assert(bits[2] == 0); diff --git a/noir/noir-repo/test_programs/execution_success/slices/src/main.nr b/noir/noir-repo/test_programs/execution_success/slices/src/main.nr index 2bd4dbd97b0..47272869b04 100644 --- a/noir/noir-repo/test_programs/execution_success/slices/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/slices/src/main.nr @@ -315,7 +315,7 @@ fn regression_2370() { } fn regression_4418(x: Field) { - let mut crash = x.to_be_bytes(32); + let mut crash: [u8; 32] = x.to_be_bytes(); if x != 0 { crash[0] = 10; diff --git a/noir/noir-repo/test_programs/execution_success/to_be_bytes/src/main.nr b/noir/noir-repo/test_programs/execution_success/to_be_bytes/src/main.nr index 64d54ddff66..809d8ad4563 100644 --- a/noir/noir-repo/test_programs/execution_success/to_be_bytes/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/to_be_bytes/src/main.nr @@ -1,6 +1,6 @@ fn main(x: Field) -> pub [u8; 31] { // The result of this byte array will be big-endian - let byte_array = x.to_be_bytes(31); + let byte_array: [u8; 31] = x.to_be_bytes(); let mut bytes = [0; 31]; for i in 0..31 { bytes[i] = byte_array[i]; diff --git a/noir/noir-repo/test_programs/execution_success/to_bits/src/main.nr b/noir/noir-repo/test_programs/execution_success/to_bits/src/main.nr index 84ace83903a..dc2ff4be394 100644 --- a/noir/noir-repo/test_programs/execution_success/to_bits/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/to_bits/src/main.nr @@ -1,7 +1,7 @@ fn main() { let field = 1000; - let be_bits = field.to_be_bits(16); - let le_bits = field.to_le_bits(16); + let be_bits: [u1; 16] = field.to_be_bits(); + let le_bits: [u1; 16] = field.to_le_bits(); for i in 0..16 { let x = be_bits[i]; @@ -10,8 +10,8 @@ fn main() { } let x = 3; - let be_bits_x = x.to_be_bits(4); - let le_bits_x = x.to_le_bits(4); + let be_bits_x: [u1; 4] = x.to_be_bits(); + let le_bits_x: [u1; 4] = x.to_le_bits(); for i in 0..4 { let be_bit = be_bits_x[i]; diff --git a/noir/noir-repo/test_programs/execution_success/to_bytes_consistent/src/main.nr b/noir/noir-repo/test_programs/execution_success/to_bytes_consistent/src/main.nr index 638b34c9bab..a51d52da855 100644 --- a/noir/noir-repo/test_programs/execution_success/to_bytes_consistent/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/to_bytes_consistent/src/main.nr @@ -3,9 +3,9 @@ // with constant inputs or with witness inputs. // x = 2040124 fn main(x: Field) { - let byte_array = x.to_be_bytes(31); + let byte_array: [u8; 31] = x.to_be_bytes(); let x_as_constant = 2040124; - let constant_byte_array = x_as_constant.to_be_bytes(31); + let constant_byte_array: [u8; 31] = x_as_constant.to_be_bytes(); assert(constant_byte_array.len() == byte_array.len()); for i in 0..constant_byte_array.len() { assert(constant_byte_array[i] == byte_array[i]); diff --git a/noir/noir-repo/test_programs/execution_success/to_bytes_integration/src/main.nr b/noir/noir-repo/test_programs/execution_success/to_bytes_integration/src/main.nr index 21c4ad90bfe..21245378971 100644 --- a/noir/noir-repo/test_programs/execution_success/to_bytes_integration/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/to_bytes_integration/src/main.nr @@ -1,7 +1,7 @@ fn main(x: Field, a: Field) { let y: Field = 2040124; - let be_byte_array = y.to_be_bytes(31); - let le_byte_array = x.to_le_bytes(31); + let be_byte_array: [u8; 31] = y.to_be_bytes(); + let le_byte_array: [u8; 31] = x.to_le_bytes(); assert(le_byte_array[0] == 60); assert(le_byte_array[0] == be_byte_array[30]); @@ -10,14 +10,14 @@ fn main(x: Field, a: Field) { let z = 0 - 1; let p_bytes = std::field::modulus_le_bytes(); - let z_bytes = z.to_le_bytes(32); + let z_bytes: [u8; 32] = z.to_le_bytes(); assert(p_bytes[10] == z_bytes[10]); assert(p_bytes[0] == z_bytes[0] as u8 + 1 as u8); let p_bits = std::field::modulus_le_bits(); - let z_bits = z.to_le_bits(std::field::modulus_num_bits() as u32); + let z_bits: [u1; 254] = z.to_le_bits(); assert(z_bits[0] == 0); assert(p_bits[100] == z_bits[100]); - a.to_le_bits(std::field::modulus_num_bits() as u32); + let _: [u1; 254] = a.to_le_bits(); } diff --git a/noir/noir-repo/test_programs/execution_success/to_le_bytes/src/main.nr b/noir/noir-repo/test_programs/execution_success/to_le_bytes/src/main.nr index a0b48efe528..4e232b025aa 100644 --- a/noir/noir-repo/test_programs/execution_success/to_le_bytes/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/to_le_bytes/src/main.nr @@ -1,6 +1,6 @@ fn main(x: Field, cond: bool) -> pub [u8; 31] { // The result of this byte array will be little-endian - let byte_array = x.to_le_bytes(31); + let byte_array: [u8; 31] = x.to_le_bytes(); assert(byte_array.len() == 31); let mut bytes = [0; 31]; @@ -10,7 +10,7 @@ fn main(x: Field, cond: bool) -> pub [u8; 31] { if cond { // We've set x = "2040124" so we shouldn't be able to represent this as a single byte. - let bad_byte_array = x.to_le_bytes(1); + let bad_byte_array: [u8; 1] = x.to_le_bytes(); assert_eq(bad_byte_array.len(), 1); } diff --git a/noir/noir-repo/test_programs/noir_test_success/comptime_expr/src/main.nr b/noir/noir-repo/test_programs/noir_test_success/comptime_expr/src/main.nr index 1488783c72c..c082c1dde33 100644 --- a/noir/noir-repo/test_programs/noir_test_success/comptime_expr/src/main.nr +++ b/noir/noir-repo/test_programs/noir_test_success/comptime_expr/src/main.nr @@ -63,6 +63,44 @@ mod tests { } } + #[test] + fn test_expr_as_assert_eq() { + comptime + { + let expr = quote { assert_eq(true, false) }.as_expr().unwrap(); + let (lhs, rhs, msg) = expr.as_assert_eq().unwrap(); + assert_eq(lhs.as_bool().unwrap(), true); + assert_eq(rhs.as_bool().unwrap(), false); + assert(msg.is_none()); + + let expr = quote { assert_eq(false, true, "oops") }.as_expr().unwrap(); + let (lhs, rhs, msg) = expr.as_assert_eq().unwrap(); + assert_eq(lhs.as_bool().unwrap(), false); + assert_eq(rhs.as_bool().unwrap(), true); + assert(msg.is_some()); + } + } + + #[test] + fn test_expr_mutate_for_assert_eq() { + comptime + { + let expr = quote { assert_eq(1, 2) }.as_expr().unwrap(); + let expr = expr.modify(times_two); + let (lhs, rhs, msg) = expr.as_assert_eq().unwrap(); + assert_eq(lhs.as_integer().unwrap(), (2, false)); + assert_eq(rhs.as_integer().unwrap(), (4, false)); + assert(msg.is_none()); + + let expr = quote { assert_eq(1, 2, 3) }.as_expr().unwrap(); + let expr = expr.modify(times_two); + let (lhs, rhs, msg) = expr.as_assert_eq().unwrap(); + assert_eq(lhs.as_integer().unwrap(), (2, false)); + assert_eq(rhs.as_integer().unwrap(), (4, false)); + assert_eq(msg.unwrap().as_integer().unwrap(), (6, false)); + } + } + #[test] fn test_expr_as_assign() { comptime diff --git a/noir/noir-repo/tooling/lsp/src/lib.rs b/noir/noir-repo/tooling/lsp/src/lib.rs index c2885543844..4a764f4268b 100644 --- a/noir/noir-repo/tooling/lsp/src/lib.rs +++ b/noir/noir-repo/tooling/lsp/src/lib.rs @@ -22,8 +22,8 @@ use fm::{codespan_files as files, FileManager}; use fxhash::FxHashSet; use lsp_types::{ request::{ - Completion, DocumentSymbolRequest, HoverRequest, InlayHintRequest, PrepareRenameRequest, - References, Rename, SignatureHelpRequest, + CodeActionRequest, Completion, DocumentSymbolRequest, HoverRequest, InlayHintRequest, + PrepareRenameRequest, References, Rename, SignatureHelpRequest, }, CodeLens, }; @@ -51,21 +51,24 @@ use notifications::{ on_did_open_text_document, on_did_save_text_document, on_exit, on_initialized, }; use requests::{ - on_code_lens_request, on_completion_request, on_document_symbol_request, on_formatting, - on_goto_declaration_request, on_goto_definition_request, on_goto_type_definition_request, - on_hover_request, on_initialize, on_inlay_hint_request, on_prepare_rename_request, - on_profile_run_request, on_references_request, on_rename_request, on_shutdown, - on_signature_help_request, on_test_run_request, on_tests_request, LspInitializationOptions, + on_code_action_request, on_code_lens_request, on_completion_request, + on_document_symbol_request, on_formatting, on_goto_declaration_request, + on_goto_definition_request, on_goto_type_definition_request, on_hover_request, on_initialize, + on_inlay_hint_request, on_prepare_rename_request, on_profile_run_request, + on_references_request, on_rename_request, on_shutdown, on_signature_help_request, + on_test_run_request, on_tests_request, LspInitializationOptions, }; use serde_json::Value as JsonValue; use thiserror::Error; use tower::Service; +mod modules; mod notifications; mod requests; mod solver; mod types; mod utils; +mod visibility; #[cfg(test)] mod test_utils; @@ -144,6 +147,7 @@ impl NargoLspService { .request::(on_inlay_hint_request) .request::(on_completion_request) .request::(on_signature_help_request) + .request::(on_code_action_request) .notification::(on_initialized) .notification::(on_did_change_configuration) .notification::(on_did_open_text_document) diff --git a/noir/noir-repo/tooling/lsp/src/modules.rs b/noir/noir-repo/tooling/lsp/src/modules.rs new file mode 100644 index 00000000000..54074dbd94c --- /dev/null +++ b/noir/noir-repo/tooling/lsp/src/modules.rs @@ -0,0 +1,131 @@ +use std::collections::BTreeMap; + +use noirc_frontend::{ + ast::ItemVisibility, + graph::CrateId, + hir::def_map::{CrateDefMap, ModuleId}, + macros_api::{ModuleDefId, NodeInterner}, + node_interner::ReferenceId, +}; + +use crate::visibility::is_visible; + +pub(crate) fn get_parent_module( + interner: &NodeInterner, + module_def_id: ModuleDefId, +) -> Option { + let reference_id = module_def_id_to_reference_id(module_def_id); + interner.reference_module(reference_id).copied() +} + +pub(crate) fn get_parent_module_id( + def_maps: &BTreeMap, + module_id: ModuleId, +) -> Option { + let crate_def_map = &def_maps[&module_id.krate]; + let module_data = &crate_def_map.modules()[module_id.local_id.0]; + module_data.parent.map(|parent| ModuleId { krate: module_id.krate, local_id: parent }) +} + +pub(crate) fn module_def_id_to_reference_id(module_def_id: ModuleDefId) -> ReferenceId { + match module_def_id { + ModuleDefId::ModuleId(id) => ReferenceId::Module(id), + ModuleDefId::FunctionId(id) => ReferenceId::Function(id), + ModuleDefId::TypeId(id) => ReferenceId::Struct(id), + ModuleDefId::TypeAliasId(id) => ReferenceId::Alias(id), + ModuleDefId::TraitId(id) => ReferenceId::Trait(id), + ModuleDefId::GlobalId(id) => ReferenceId::Global(id), + } +} + +/// Returns the fully-qualified path of the given `ModuleDefId` relative to `current_module_id`: +/// - If `ModuleDefId` is a module, that module's path is returned +/// - Otherwise, that item's parent module's path is returned +pub(crate) fn module_full_path( + module_def_id: ModuleDefId, + visibility: ItemVisibility, + current_module_id: ModuleId, + current_module_parent_id: Option, + interner: &NodeInterner, +) -> Option { + let full_path; + if let ModuleDefId::ModuleId(module_id) = module_def_id { + if !is_visible(visibility, current_module_id, module_id) { + return None; + } + + full_path = + module_id_path(module_id, ¤t_module_id, current_module_parent_id, interner); + } else { + let Some(parent_module) = get_parent_module(interner, module_def_id) else { + return None; + }; + + if !is_visible(visibility, current_module_id, parent_module) { + return None; + } + + full_path = + module_id_path(parent_module, ¤t_module_id, current_module_parent_id, interner); + } + Some(full_path) +} + +/// Returns the path to reach an item inside `target_module_id` from inside `current_module_id`. +/// Returns a relative path if possible. +pub(crate) fn module_id_path( + target_module_id: ModuleId, + current_module_id: &ModuleId, + current_module_parent_id: Option, + interner: &NodeInterner, +) -> String { + if Some(target_module_id) == current_module_parent_id { + return "super".to_string(); + } + + let mut segments: Vec<&str> = Vec::new(); + let mut is_relative = false; + + if let Some(module_attributes) = interner.try_module_attributes(&target_module_id) { + segments.push(&module_attributes.name); + + let mut current_attributes = module_attributes; + loop { + let Some(parent_local_id) = current_attributes.parent else { + break; + }; + + let parent_module_id = + &ModuleId { krate: target_module_id.krate, local_id: parent_local_id }; + + if current_module_id == parent_module_id { + is_relative = true; + break; + } + + if current_module_parent_id == Some(*parent_module_id) { + segments.push("super"); + is_relative = true; + break; + } + + let Some(parent_attributes) = interner.try_module_attributes(parent_module_id) else { + break; + }; + + segments.push(&parent_attributes.name); + current_attributes = parent_attributes; + } + } + + if !is_relative { + // We don't record module attributes for the root module, + // so we handle that case separately + if let CrateId::Root(_) = target_module_id.krate { + segments.push("crate"); + } + } + + segments.reverse(); + segments.join("::") +} diff --git a/noir/noir-repo/tooling/lsp/src/notifications/mod.rs b/noir/noir-repo/tooling/lsp/src/notifications/mod.rs index 8b030c9e0aa..d1ffdb55066 100644 --- a/noir/noir-repo/tooling/lsp/src/notifications/mod.rs +++ b/noir/noir-repo/tooling/lsp/src/notifications/mod.rs @@ -2,7 +2,8 @@ use std::ops::ControlFlow; use crate::insert_all_files_for_workspace_into_file_manager; use async_lsp::{ErrorCode, LanguageClient, ResponseError}; -use noirc_driver::{check_crate, file_manager_with_stdlib, CheckOptions}; +use lsp_types::DiagnosticTag; +use noirc_driver::{check_crate, file_manager_with_stdlib}; use noirc_errors::{DiagnosticKind, FileDiagnostic}; use crate::types::{ @@ -132,11 +133,7 @@ pub(crate) fn process_workspace_for_noir_document( let (mut context, crate_id) = crate::prepare_package(&workspace_file_manager, &parsed_files, package); - let options = CheckOptions { - error_on_unused_imports: package.error_on_unused_imports(), - ..Default::default() - }; - let file_diagnostics = match check_crate(&mut context, crate_id, &options) { + let file_diagnostics = match check_crate(&mut context, crate_id, &Default::default()) { Ok(((), warnings)) => warnings, Err(errors_and_warnings) => errors_and_warnings, }; @@ -189,10 +186,20 @@ pub(crate) fn process_workspace_for_noir_document( DiagnosticKind::Info => DiagnosticSeverity::INFORMATION, DiagnosticKind::Bug => DiagnosticSeverity::WARNING, }; + + let mut tags = Vec::new(); + if diagnostic.unnecessary { + tags.push(DiagnosticTag::UNNECESSARY); + } + if diagnostic.deprecated { + tags.push(DiagnosticTag::DEPRECATED); + } + Some(Diagnostic { range, severity: Some(severity), message: diagnostic.message, + tags: if tags.is_empty() { None } else { Some(tags) }, ..Default::default() }) }) diff --git a/noir/noir-repo/tooling/lsp/src/requests/code_action.rs b/noir/noir-repo/tooling/lsp/src/requests/code_action.rs new file mode 100644 index 00000000000..8e153bb0b46 --- /dev/null +++ b/noir/noir-repo/tooling/lsp/src/requests/code_action.rs @@ -0,0 +1,312 @@ +use std::{ + collections::{BTreeMap, HashMap}, + future::{self, Future}, +}; + +use async_lsp::ResponseError; +use fm::{FileId, FileMap, PathString}; +use lsp_types::{ + CodeAction, CodeActionKind, CodeActionOrCommand, CodeActionParams, CodeActionResponse, + Position, Range, TextDocumentPositionParams, TextEdit, Url, WorkspaceEdit, +}; +use noirc_errors::{Location, Span}; +use noirc_frontend::{ + ast::{Ident, Path, Visitor}, + graph::CrateId, + hir::def_map::{CrateDefMap, LocalModuleId, ModuleId}, + macros_api::{ModuleDefId, NodeInterner}, + parser::{Item, ItemKind, ParsedSubModule}, + ParsedModule, +}; + +use crate::{ + byte_span_to_range, + modules::{get_parent_module_id, module_full_path, module_id_path}, + utils, LspState, +}; + +use super::{process_request, to_lsp_location}; + +#[cfg(test)] +mod tests; + +pub(crate) fn on_code_action_request( + state: &mut LspState, + params: CodeActionParams, +) -> impl Future, ResponseError>> { + let uri = params.text_document.clone().uri; + let position = params.range.start; + let text_document_position_params = + TextDocumentPositionParams { text_document: params.text_document, position }; + + let result = process_request(state, text_document_position_params, |args| { + let path = PathString::from_path(uri.to_file_path().unwrap()); + args.files.get_file_id(&path).and_then(|file_id| { + utils::position_to_byte_index(args.files, file_id, &position).and_then(|byte_index| { + let file = args.files.get_file(file_id).unwrap(); + let source = file.source(); + let (parsed_module, _errors) = noirc_frontend::parse_program(source); + + let mut finder = CodeActionFinder::new( + uri, + args.files, + file_id, + source, + byte_index, + args.crate_id, + args.def_maps, + args.interner, + ); + finder.find(&parsed_module) + }) + }) + }); + future::ready(result) +} + +struct CodeActionFinder<'a> { + uri: Url, + files: &'a FileMap, + file: FileId, + lines: Vec<&'a str>, + byte_index: usize, + /// The module ID in scope. This might change as we traverse the AST + /// if we are analyzing something inside an inline module declaration. + module_id: ModuleId, + def_maps: &'a BTreeMap, + interner: &'a NodeInterner, + /// How many nested `mod` we are in deep + nesting: usize, + /// The line where an auto_import must be inserted + auto_import_line: usize, + code_actions: Vec, +} + +impl<'a> CodeActionFinder<'a> { + #[allow(clippy::too_many_arguments)] + fn new( + uri: Url, + files: &'a FileMap, + file: FileId, + source: &'a str, + byte_index: usize, + krate: CrateId, + def_maps: &'a BTreeMap, + interner: &'a NodeInterner, + ) -> Self { + // Find the module the current file belongs to + let def_map = &def_maps[&krate]; + let local_id = if let Some((module_index, _)) = + def_map.modules().iter().find(|(_, module_data)| module_data.location.file == file) + { + LocalModuleId(module_index) + } else { + def_map.root() + }; + let module_id = ModuleId { krate, local_id }; + Self { + uri, + files, + file, + lines: source.lines().collect(), + byte_index, + module_id, + def_maps, + interner, + nesting: 0, + auto_import_line: 0, + code_actions: vec![], + } + } + + fn find(&mut self, parsed_module: &ParsedModule) -> Option { + parsed_module.accept(self); + + if self.code_actions.is_empty() { + return None; + } + + let mut code_actions = std::mem::take(&mut self.code_actions); + code_actions.sort_by_key(|code_action| { + let CodeActionOrCommand::CodeAction(code_action) = code_action else { + panic!("We only gather code actions, never commands"); + }; + code_action.title.clone() + }); + + Some(code_actions) + } + + fn push_import_code_action(&mut self, full_path: &str) { + let line = self.auto_import_line as u32; + let character = (self.nesting * 4) as u32; + let indent = " ".repeat(self.nesting * 4); + let mut newlines = "\n"; + + // If the line we are inserting into is not an empty line, insert an extra line to make some room + if let Some(line_text) = self.lines.get(line as usize) { + if !line_text.trim().is_empty() { + newlines = "\n\n"; + } + } + + let title = format!("Import {}", full_path); + let text_edit = TextEdit { + range: Range { start: Position { line, character }, end: Position { line, character } }, + new_text: format!("use {};{}{}", full_path, newlines, indent), + }; + + let code_action = self.new_quick_fix(title, text_edit); + self.code_actions.push(CodeActionOrCommand::CodeAction(code_action)); + } + + fn push_qualify_code_action(&mut self, ident: &Ident, prefix: &str, full_path: &str) { + let Some(range) = byte_span_to_range( + self.files, + self.file, + ident.span().start() as usize..ident.span().start() as usize, + ) else { + return; + }; + + let title = format!("Qualify as {}", full_path); + let text_edit = TextEdit { range, new_text: format!("{}::", prefix) }; + + let code_action = self.new_quick_fix(title, text_edit); + self.code_actions.push(CodeActionOrCommand::CodeAction(code_action)); + } + + fn new_quick_fix(&self, title: String, text_edit: TextEdit) -> CodeAction { + let mut changes = HashMap::new(); + changes.insert(self.uri.clone(), vec![text_edit]); + + let workspace_edit = WorkspaceEdit { + changes: Some(changes), + document_changes: None, + change_annotations: None, + }; + + CodeAction { + title, + kind: Some(CodeActionKind::QUICKFIX), + diagnostics: None, + edit: Some(workspace_edit), + command: None, + is_preferred: None, + disabled: None, + data: None, + } + } + + fn includes_span(&self, span: Span) -> bool { + span.start() as usize <= self.byte_index && self.byte_index <= span.end() as usize + } +} + +impl<'a> Visitor for CodeActionFinder<'a> { + fn visit_item(&mut self, item: &Item) -> bool { + if let ItemKind::Import(..) = &item.kind { + if let Some(lsp_location) = to_lsp_location(self.files, self.file, item.span) { + self.auto_import_line = (lsp_location.range.end.line + 1) as usize; + } + } + + self.includes_span(item.span) + } + + fn visit_parsed_submodule(&mut self, parsed_sub_module: &ParsedSubModule, span: Span) -> bool { + // Switch `self.module_id` to the submodule + let previous_module_id = self.module_id; + + let def_map = &self.def_maps[&self.module_id.krate]; + let Some(module_data) = def_map.modules().get(self.module_id.local_id.0) else { + return false; + }; + if let Some(child_module) = module_data.children.get(&parsed_sub_module.name) { + self.module_id = ModuleId { krate: self.module_id.krate, local_id: *child_module }; + } + + let old_auto_import_line = self.auto_import_line; + self.nesting += 1; + + if let Some(lsp_location) = to_lsp_location(self.files, self.file, span) { + self.auto_import_line = (lsp_location.range.start.line + 1) as usize; + } + + parsed_sub_module.contents.accept(self); + + // Restore the old module before continuing + self.module_id = previous_module_id; + self.nesting -= 1; + self.auto_import_line = old_auto_import_line; + + false + } + + fn visit_path(&mut self, path: &Path) { + if path.segments.len() != 1 { + return; + } + + let ident = &path.segments[0].ident; + if !self.includes_span(ident.span()) { + return; + } + + let location = Location::new(ident.span(), self.file); + if self.interner.find_referenced(location).is_some() { + return; + } + + let current_module_parent_id = get_parent_module_id(self.def_maps, self.module_id); + + // The Path doesn't resolve to anything so it means it's an error and maybe we + // can suggest an import or to fully-qualify the path. + for (name, entries) in self.interner.get_auto_import_names() { + if name != &ident.0.contents { + continue; + } + + for (module_def_id, visibility, defining_module) in entries { + let module_full_path = if let Some(defining_module) = defining_module { + module_id_path( + *defining_module, + &self.module_id, + current_module_parent_id, + self.interner, + ) + } else { + let Some(module_full_path) = module_full_path( + *module_def_id, + *visibility, + self.module_id, + current_module_parent_id, + self.interner, + ) else { + continue; + }; + module_full_path + }; + + let full_path = if defining_module.is_some() + || !matches!(module_def_id, ModuleDefId::ModuleId(..)) + { + format!("{}::{}", module_full_path, name) + } else { + module_full_path.clone() + }; + + let qualify_prefix = if let ModuleDefId::ModuleId(..) = module_def_id { + let mut segments: Vec<_> = module_full_path.split("::").collect(); + segments.pop(); + segments.join("::") + } else { + module_full_path + }; + + self.push_import_code_action(&full_path); + self.push_qualify_code_action(ident, &qualify_prefix, &full_path); + } + } + } +} diff --git a/noir/noir-repo/tooling/lsp/src/requests/code_action/tests.rs b/noir/noir-repo/tooling/lsp/src/requests/code_action/tests.rs new file mode 100644 index 00000000000..a5a19049141 --- /dev/null +++ b/noir/noir-repo/tooling/lsp/src/requests/code_action/tests.rs @@ -0,0 +1,236 @@ +use crate::{notifications::on_did_open_text_document, test_utils}; + +use lsp_types::{ + CodeActionContext, CodeActionOrCommand, CodeActionParams, CodeActionResponse, + DidOpenTextDocumentParams, PartialResultParams, Position, Range, TextDocumentIdentifier, + TextDocumentItem, TextEdit, WorkDoneProgressParams, +}; +use tokio::test; + +use super::on_code_action_request; + +async fn get_code_action(src: &str) -> CodeActionResponse { + let (mut state, noir_text_document) = test_utils::init_lsp_server("document_symbol").await; + + let (line, column) = src + .lines() + .enumerate() + .find_map(|(line_index, line)| line.find(">|<").map(|char_index| (line_index, char_index))) + .expect("Expected to find one >|< in the source code"); + + let src = src.replace(">|<", ""); + + on_did_open_text_document( + &mut state, + DidOpenTextDocumentParams { + text_document: TextDocumentItem { + uri: noir_text_document.clone(), + language_id: "noir".to_string(), + version: 0, + text: src.to_string(), + }, + }, + ); + + let position = Position { line: line as u32, character: column as u32 }; + + on_code_action_request( + &mut state, + CodeActionParams { + text_document: TextDocumentIdentifier { uri: noir_text_document }, + range: Range { start: position, end: position }, + context: CodeActionContext { diagnostics: Vec::new(), only: None, trigger_kind: None }, + work_done_progress_params: WorkDoneProgressParams { work_done_token: None }, + partial_result_params: PartialResultParams { partial_result_token: None }, + }, + ) + .await + .expect("Could not execute on_code_action_request") + .unwrap() +} + +async fn assert_code_action(title: &str, src: &str, expected: &str) { + let actions = get_code_action(src).await; + let action = actions + .iter() + .filter_map(|action| { + if let CodeActionOrCommand::CodeAction(action) = action { + if action.title == title { + Some(action) + } else { + None + } + } else { + None + } + }) + .next() + .expect("Couldn't find an action with the given title"); + + let workspace_edit = action.edit.as_ref().unwrap(); + let text_edits = workspace_edit.changes.as_ref().unwrap().iter().next().unwrap().1; + assert_eq!(text_edits.len(), 1); + + let result = apply_text_edit(&src.replace(">|<", ""), &text_edits[0]); + assert_eq!(result, expected); +} + +fn apply_text_edit(src: &str, text_edit: &TextEdit) -> String { + let mut lines: Vec<_> = src.lines().collect(); + assert_eq!(text_edit.range.start.line, text_edit.range.end.line); + + let mut line = lines[text_edit.range.start.line as usize].to_string(); + line.replace_range( + text_edit.range.start.character as usize..text_edit.range.end.character as usize, + &text_edit.new_text, + ); + lines[text_edit.range.start.line as usize] = &line; + lines.join("\n") +} + +#[test] +async fn test_qualify_code_action_for_struct() { + let title = "Qualify as foo::bar::SomeTypeInBar"; + + let src = r#" + mod foo { + mod bar { + struct SomeTypeInBar {} + } + } + + fn foo(x: SomeType>||||| { lines: Vec<&'a str>, byte_index: usize, byte: Option, - /// The module ID of the current file. - root_module_id: ModuleId, /// The module ID in scope. This might change as we traverse the AST /// if we are analyzing something inside an inline module declaration. module_id: ModuleId, @@ -131,7 +126,6 @@ impl<'a> NodeFinder<'a> { ) -> Self { // Find the module the current file belongs to let def_map = &def_maps[&krate]; - let root_module_id = ModuleId { krate, local_id: def_map.root() }; let local_id = if let Some((module_index, _)) = def_map.modules().iter().find(|(_, module_data)| module_data.location.file == file) { @@ -146,7 +140,6 @@ impl<'a> NodeFinder<'a> { lines: source.lines().collect(), byte_index, byte, - root_module_id, module_id, def_maps, dependencies, @@ -278,12 +271,6 @@ impl<'a> NodeFinder<'a> { let is_single_segment = !after_colons && idents.is_empty() && path.kind == PathKind::Plain; let module_id; - let module_completion_kind = if after_colons || !idents.is_empty() { - ModuleCompletionKind::DirectChildren - } else { - ModuleCompletionKind::AllVisibleItems - }; - // When completing in the middle of an ident, we don't want to complete // with function parameters because there might already be function parameters, // and in the middle of a path it leads to code that won't compile @@ -346,7 +333,6 @@ impl<'a> NodeFinder<'a> { &prefix, path.kind, at_root, - module_completion_kind, function_completion_kind, requested_items, ); @@ -447,7 +433,6 @@ impl<'a> NodeFinder<'a> { } } - let module_completion_kind = ModuleCompletionKind::DirectChildren; let function_completion_kind = FunctionCompletionKind::Name; let requested_items = RequestedItems::AnyItems; @@ -463,7 +448,6 @@ impl<'a> NodeFinder<'a> { prefix, path_kind, at_root, - module_completion_kind, function_completion_kind, requested_items, ); @@ -478,7 +462,6 @@ impl<'a> NodeFinder<'a> { &prefix, path_kind, at_root, - module_completion_kind, function_completion_kind, requested_items, ); @@ -489,7 +472,6 @@ impl<'a> NodeFinder<'a> { &prefix, path_kind, at_root, - module_completion_kind, function_completion_kind, requested_items, ); @@ -604,6 +586,7 @@ impl<'a> NodeFinder<'a> { for func_id in methods.iter() { if name_matches(name, prefix) { if let Some(completion_item) = self.function_completion_item( + name, func_id, function_completion_kind, function_kind, @@ -625,9 +608,12 @@ impl<'a> NodeFinder<'a> { ) { for (name, func_id) in &trait_.method_ids { if name_matches(name, prefix) { - if let Some(completion_item) = - self.function_completion_item(*func_id, function_completion_kind, function_kind) - { + if let Some(completion_item) = self.function_completion_item( + name, + *func_id, + function_completion_kind, + function_kind, + ) { self.completion_items.push(completion_item); self.suggested_module_def_ids.insert(ModuleDefId::FunctionId(*func_id)); } @@ -661,7 +647,6 @@ impl<'a> NodeFinder<'a> { prefix: &str, path_kind: PathKind, at_root: bool, - module_completion_kind: ModuleCompletionKind, function_completion_kind: FunctionCompletionKind, requested_items: RequestedItems, ) { @@ -694,12 +679,7 @@ impl<'a> NodeFinder<'a> { let function_kind = FunctionKind::Any; - let items = match module_completion_kind { - ModuleCompletionKind::DirectChildren => module_data.definitions(), - ModuleCompletionKind::AllVisibleItems => module_data.scope(), - }; - - for ident in items.names() { + for ident in module_data.scope().names() { let name = &ident.0.contents; if name_matches(name, prefix) { @@ -773,14 +753,6 @@ impl<'a> NodeFinder<'a> { fn resolve_path(&self, segments: Vec) -> Option { let last_segment = segments.last().unwrap().clone(); - let path_segments = segments.into_iter().map(PathSegment::from).collect(); - let path = Path { segments: path_segments, kind: PathKind::Plain, span: Span::default() }; - - let path_resolver = StandardPathResolver::new(self.root_module_id); - if let Ok(path_resolution) = path_resolver.resolve(self.def_maps, path, &mut None) { - return Some(path_resolution.module_def_id); - } - // If we can't resolve a path trough lookup, let's see if the last segment is bound to a type let location = Location::new(last_segment.span(), self.file); if let Some(reference_id) = self.interner.find_referenced(location) { @@ -808,7 +780,7 @@ impl<'a> Visitor for NodeFinder<'a> { self.includes_span(item.span) } - fn visit_import(&mut self, use_tree: &UseTree) -> bool { + fn visit_import(&mut self, use_tree: &UseTree, _visibility: ItemVisibility) -> bool { let mut prefixes = Vec::new(); self.find_in_use_tree(use_tree, &mut prefixes); false diff --git a/noir/noir-repo/tooling/lsp/src/requests/completion/auto_import.rs b/noir/noir-repo/tooling/lsp/src/requests/completion/auto_import.rs index 32f777dec34..bbd471dfea1 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/completion/auto_import.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/completion/auto_import.rs @@ -1,13 +1,7 @@ -use std::collections::BTreeMap; - use lsp_types::{Position, Range, TextEdit}; -use noirc_frontend::{ - ast::ItemVisibility, - graph::CrateId, - hir::def_map::{CrateDefMap, ModuleId}, - macros_api::{ModuleDefId, NodeInterner}, - node_interner::ReferenceId, -}; +use noirc_frontend::macros_api::ModuleDefId; + +use crate::modules::{get_parent_module_id, module_full_path, module_id_path}; use super::{ kinds::{FunctionCompletionKind, FunctionKind, RequestedItems}, @@ -30,7 +24,7 @@ impl<'a> NodeFinder<'a> { continue; } - for (module_def_id, visibility) in entries { + for (module_def_id, visibility, defining_module) in entries { if self.suggested_module_def_ids.contains(module_def_id) { continue; } @@ -45,46 +39,32 @@ impl<'a> NodeFinder<'a> { continue; }; - let module_full_path; - if let ModuleDefId::ModuleId(module_id) = module_def_id { - module_full_path = module_id_path( - *module_id, + let module_full_path = if let Some(defining_module) = defining_module { + module_id_path( + *defining_module, &self.module_id, current_module_parent_id, self.interner, - ); + ) } else { - let Some(parent_module) = get_parent_module(self.interner, *module_def_id) - else { - continue; - }; - - match *visibility { - ItemVisibility::Public => (), - ItemVisibility::Private => { - // Technically this can't be reached because we don't record private items for auto-import, - // but this is here for completeness. - continue; - } - ItemVisibility::PublicCrate => { - if self.module_id.krate != parent_module.krate { - continue; - } - } - } - - module_full_path = module_id_path( - parent_module, - &self.module_id, + let Some(module_full_path) = module_full_path( + *module_def_id, + *visibility, + self.module_id, current_module_parent_id, self.interner, - ); - } - - let full_path = if let ModuleDefId::ModuleId(..) = module_def_id { + ) else { + continue; + }; module_full_path - } else { + }; + + let full_path = if defining_module.is_some() + || !matches!(module_def_id, ModuleDefId::ModuleId(..)) + { format!("{}::{}", module_full_path, name) + } else { + module_full_path }; let mut label_details = completion_item.label_details.unwrap(); @@ -119,88 +99,3 @@ impl<'a> NodeFinder<'a> { } } } - -fn get_parent_module(interner: &NodeInterner, module_def_id: ModuleDefId) -> Option { - let reference_id = module_def_id_to_reference_id(module_def_id); - interner.reference_module(reference_id).copied() -} - -fn get_parent_module_id( - def_maps: &BTreeMap, - module_id: ModuleId, -) -> Option { - let crate_def_map = &def_maps[&module_id.krate]; - let module_data = &crate_def_map.modules()[module_id.local_id.0]; - module_data.parent.map(|parent| ModuleId { krate: module_id.krate, local_id: parent }) -} - -fn module_def_id_to_reference_id(module_def_id: ModuleDefId) -> ReferenceId { - match module_def_id { - ModuleDefId::ModuleId(id) => ReferenceId::Module(id), - ModuleDefId::FunctionId(id) => ReferenceId::Function(id), - ModuleDefId::TypeId(id) => ReferenceId::Struct(id), - ModuleDefId::TypeAliasId(id) => ReferenceId::Alias(id), - ModuleDefId::TraitId(id) => ReferenceId::Trait(id), - ModuleDefId::GlobalId(id) => ReferenceId::Global(id), - } -} - -/// Returns the path to reach an item inside `target_module_id` from inside `current_module_id`. -/// Returns a relative path if possible. -fn module_id_path( - target_module_id: ModuleId, - current_module_id: &ModuleId, - current_module_parent_id: Option, - interner: &NodeInterner, -) -> String { - if Some(target_module_id) == current_module_parent_id { - return "super".to_string(); - } - - let mut segments: Vec<&str> = Vec::new(); - let mut is_relative = false; - - if let Some(module_attributes) = interner.try_module_attributes(&target_module_id) { - segments.push(&module_attributes.name); - - let mut current_attributes = module_attributes; - - loop { - let Some(parent_local_id) = current_attributes.parent else { - break; - }; - - let parent_module_id = - &ModuleId { krate: target_module_id.krate, local_id: parent_local_id }; - - if current_module_id == parent_module_id { - is_relative = true; - break; - } - - if current_module_parent_id == Some(*parent_module_id) { - segments.push("super"); - is_relative = true; - break; - } - - let Some(parent_attributes) = interner.try_module_attributes(parent_module_id) else { - break; - }; - - segments.push(&parent_attributes.name); - current_attributes = parent_attributes; - } - } - - if !is_relative { - // We don't record module attributes for the root module, - // so we handle that case separately - if let CrateId::Root(_) = target_module_id.krate { - segments.push("crate"); - } - } - - segments.reverse(); - segments.join("::") -} diff --git a/noir/noir-repo/tooling/lsp/src/requests/completion/completion_items.rs b/noir/noir-repo/tooling/lsp/src/requests/completion/completion_items.rs index d4190f5241c..21c3a607b18 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/completion/completion_items.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/completion/completion_items.rs @@ -3,8 +3,8 @@ use lsp_types::{ }; use noirc_frontend::{ hir_def::{function::FuncMeta, stmt::HirPattern}, - macros_api::{ModuleDefId, StructId}, - node_interner::{FuncId, GlobalId, TraitId, TypeAliasId}, + macros_api::ModuleDefId, + node_interner::{FuncId, GlobalId}, Type, }; @@ -38,45 +38,32 @@ impl<'a> NodeFinder<'a> { match module_def_id { ModuleDefId::ModuleId(_) => Some(module_completion_item(name)), - ModuleDefId::FunctionId(func_id) => { - self.function_completion_item(func_id, function_completion_kind, function_kind) - } - ModuleDefId::TypeId(struct_id) => Some(self.struct_completion_item(struct_id)), - ModuleDefId::TypeAliasId(type_alias_id) => { - Some(self.type_alias_completion_item(type_alias_id)) - } - ModuleDefId::TraitId(trait_id) => Some(self.trait_completion_item(trait_id)), - ModuleDefId::GlobalId(global_id) => Some(self.global_completion_item(global_id)), + ModuleDefId::FunctionId(func_id) => self.function_completion_item( + &name, + func_id, + function_completion_kind, + function_kind, + ), + ModuleDefId::TypeId(..) => Some(self.struct_completion_item(name)), + ModuleDefId::TypeAliasId(..) => Some(self.type_alias_completion_item(name)), + ModuleDefId::TraitId(..) => Some(self.trait_completion_item(name)), + ModuleDefId::GlobalId(global_id) => Some(self.global_completion_item(name, global_id)), } } - fn struct_completion_item(&self, struct_id: StructId) -> CompletionItem { - let struct_type = self.interner.get_struct(struct_id); - let struct_type = struct_type.borrow(); - let name = struct_type.name.to_string(); - + fn struct_completion_item(&self, name: String) -> CompletionItem { simple_completion_item(name.clone(), CompletionItemKind::STRUCT, Some(name)) } - fn type_alias_completion_item(&self, type_alias_id: TypeAliasId) -> CompletionItem { - let type_alias = self.interner.get_type_alias(type_alias_id); - let type_alias = type_alias.borrow(); - let name = type_alias.name.to_string(); - + fn type_alias_completion_item(&self, name: String) -> CompletionItem { simple_completion_item(name.clone(), CompletionItemKind::STRUCT, Some(name)) } - fn trait_completion_item(&self, trait_id: TraitId) -> CompletionItem { - let trait_ = self.interner.get_trait(trait_id); - let name = trait_.name.to_string(); - + fn trait_completion_item(&self, name: String) -> CompletionItem { simple_completion_item(name.clone(), CompletionItemKind::INTERFACE, Some(name)) } - fn global_completion_item(&self, global_id: GlobalId) -> CompletionItem { - let global_definition = self.interner.get_global_definition(global_id); - let name = global_definition.name.clone(); - + fn global_completion_item(&self, name: String, global_id: GlobalId) -> CompletionItem { let global = self.interner.get_global(global_id); let typ = self.interner.definition_type(global.definition_id); let description = typ.to_string(); @@ -86,12 +73,12 @@ impl<'a> NodeFinder<'a> { pub(super) fn function_completion_item( &self, + name: &String, func_id: FuncId, function_completion_kind: FunctionCompletionKind, function_kind: FunctionKind, ) -> Option { let func_meta = self.interner.function_meta(&func_id); - let name = &self.interner.function_name(&func_id).to_string(); let func_self_type = if let Some((pattern, typ, _)) = func_meta.parameters.0.first() { if self.hir_pattern_is_self_type(pattern) { diff --git a/noir/noir-repo/tooling/lsp/src/requests/completion/kinds.rs b/noir/noir-repo/tooling/lsp/src/requests/completion/kinds.rs index e01fcfc8c56..2fe039ba331 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/completion/kinds.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/completion/kinds.rs @@ -1,17 +1,5 @@ use noirc_frontend::Type; -/// When finding items in a module, whether to show only direct children or all visible items. -#[derive(Clone, Copy, PartialEq, Eq, Debug)] -pub(super) enum ModuleCompletionKind { - // Only show a module's direct children. This is used when completing a use statement - // or a path after the first segment. - DirectChildren, - // Show all of a module's visible items. This is used when completing a path outside - // of a use statement (in regular code) when the path is just a single segment: - // we want to find items exposed in the current module. - AllVisibleItems, -} - /// When suggest a function as a result of completion, whether to autocomplete its name or its name and parameters. #[derive(Clone, Copy, PartialEq, Eq, Debug)] pub(super) enum FunctionCompletionKind { diff --git a/noir/noir-repo/tooling/lsp/src/requests/completion/tests.rs b/noir/noir-repo/tooling/lsp/src/requests/completion/tests.rs index 2d667ead6bf..d621ca21bb8 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/completion/tests.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/completion/tests.rs @@ -119,14 +119,14 @@ mod completion_tests { #[test] async fn test_use_first_segment() { let src = r#" - mod foo {} + mod foobaz {} mod foobar {} - use f>|< + use foob>|< "#; assert_completion( src, - vec![module_completion_item("foo"), module_completion_item("foobar")], + vec![module_completion_item("foobaz"), module_completion_item("foobar")], ) .await; } @@ -218,7 +218,7 @@ mod completion_tests { #[test] async fn test_use_suggests_hardcoded_crate() { let src = r#" - use c>|< + use cr>|< "#; assert_completion( @@ -291,16 +291,16 @@ mod completion_tests { #[test] async fn test_use_after_super() { let src = r#" - mod foo {} + mod foobar {} mod bar { mod something {} - use super::f>|< + use super::foob>|< } "#; - assert_completion(src, vec![module_completion_item("foo")]).await; + assert_completion(src, vec![module_completion_item("foobar")]).await; } #[test] @@ -1791,4 +1791,76 @@ mod completion_tests { ) .await; } + + #[test] + async fn test_suggests_pub_use() { + let src = r#" + mod bar { + mod baz { + mod coco {} + } + + pub use baz::coco; + } + + fn main() { + bar::c>|< + } + "#; + assert_completion(src, vec![module_completion_item("coco")]).await; + } + + #[test] + async fn test_auto_import_suggests_pub_use_for_module() { + let src = r#" + mod bar { + mod baz { + mod coco {} + } + + pub use baz::coco as foobar; + } + + fn main() { + foob>|< + } + "#; + + let items = get_completions(src).await; + assert_eq!(items.len(), 1); + + let item = &items[0]; + assert_eq!(item.label, "foobar"); + assert_eq!( + item.label_details.as_ref().unwrap().detail, + Some("(use bar::foobar)".to_string()), + ); + } + + #[test] + async fn test_auto_import_suggests_pub_use_for_function() { + let src = r#" + mod bar { + mod baz { + pub fn coco() {} + } + + pub use baz::coco as foobar; + } + + fn main() { + foob>|< + } + "#; + + let items = get_completions(src).await; + assert_eq!(items.len(), 1); + + let item = &items[0]; + assert_eq!(item.label, "foobar()"); + assert_eq!( + item.label_details.as_ref().unwrap().detail, + Some("(use bar::foobar)".to_string()), + ); + } } diff --git a/noir/noir-repo/tooling/lsp/src/requests/mod.rs b/noir/noir-repo/tooling/lsp/src/requests/mod.rs index e88c7f11465..5bd9959fd63 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/mod.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/mod.rs @@ -10,7 +10,7 @@ use crate::{ use async_lsp::{ErrorCode, ResponseError}; use fm::{codespan_files::Error, FileMap, PathString}; use lsp_types::{ - DeclarationCapability, Location, Position, TextDocumentPositionParams, + CodeActionKind, DeclarationCapability, Location, Position, TextDocumentPositionParams, TextDocumentSyncCapability, TextDocumentSyncKind, TypeDefinitionProviderCapability, Url, WorkDoneProgressOptions, }; @@ -36,6 +36,7 @@ use crate::{ // They are not attached to the `NargoLspService` struct so they can be unit tested with only `LspState` // and params passed in. +mod code_action; mod code_lens_request; mod completion; mod document_symbol; @@ -51,14 +52,15 @@ mod test_run; mod tests; pub(crate) use { - code_lens_request::collect_lenses_for_package, code_lens_request::on_code_lens_request, - completion::on_completion_request, document_symbol::on_document_symbol_request, - goto_declaration::on_goto_declaration_request, goto_definition::on_goto_definition_request, - goto_definition::on_goto_type_definition_request, hover::on_hover_request, - inlay_hint::on_inlay_hint_request, profile_run::on_profile_run_request, - references::on_references_request, rename::on_prepare_rename_request, - rename::on_rename_request, signature_help::on_signature_help_request, - test_run::on_test_run_request, tests::on_tests_request, + code_action::on_code_action_request, code_lens_request::collect_lenses_for_package, + code_lens_request::on_code_lens_request, completion::on_completion_request, + document_symbol::on_document_symbol_request, goto_declaration::on_goto_declaration_request, + goto_definition::on_goto_definition_request, goto_definition::on_goto_type_definition_request, + hover::on_hover_request, inlay_hint::on_inlay_hint_request, + profile_run::on_profile_run_request, references::on_references_request, + rename::on_prepare_rename_request, rename::on_rename_request, + signature_help::on_signature_help_request, test_run::on_test_run_request, + tests::on_tests_request, }; /// LSP client will send initialization request after the server has started. @@ -252,6 +254,13 @@ pub(crate) fn on_initialize( }, }, )), + code_action_provider: Some(lsp_types::OneOf::Right(lsp_types::CodeActionOptions { + code_action_kinds: Some(vec![CodeActionKind::QUICKFIX]), + work_done_progress_options: WorkDoneProgressOptions { + work_done_progress: None, + }, + resolve_provider: None, + })), }, server_info: None, }) diff --git a/noir/noir-repo/tooling/lsp/src/types.rs b/noir/noir-repo/tooling/lsp/src/types.rs index 3ac1f35e18e..043c50a87fd 100644 --- a/noir/noir-repo/tooling/lsp/src/types.rs +++ b/noir/noir-repo/tooling/lsp/src/types.rs @@ -1,8 +1,8 @@ use fm::FileId; use lsp_types::{ - CompletionOptions, DeclarationCapability, DefinitionOptions, DocumentSymbolOptions, - HoverOptions, InlayHintOptions, OneOf, ReferencesOptions, RenameOptions, SignatureHelpOptions, - TypeDefinitionProviderCapability, + CodeActionOptions, CompletionOptions, DeclarationCapability, DefinitionOptions, + DocumentSymbolOptions, HoverOptions, InlayHintOptions, OneOf, ReferencesOptions, RenameOptions, + SignatureHelpOptions, TypeDefinitionProviderCapability, }; use noirc_driver::DebugFile; use noirc_errors::{debug_info::OpCodesCount, Location}; @@ -165,6 +165,10 @@ pub(crate) struct ServerCapabilities { /// The server provides signature help support. #[serde(skip_serializing_if = "Option::is_none")] pub(crate) signature_help_provider: Option>, + + /// The server provides code action support. + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) code_action_provider: Option>, } #[derive(Debug, PartialEq, Clone, Default, Deserialize, Serialize)] diff --git a/noir/noir-repo/tooling/lsp/src/visibility.rs b/noir/noir-repo/tooling/lsp/src/visibility.rs new file mode 100644 index 00000000000..aad8b47fbbe --- /dev/null +++ b/noir/noir-repo/tooling/lsp/src/visibility.rs @@ -0,0 +1,13 @@ +use noirc_frontend::{ast::ItemVisibility, hir::def_map::ModuleId}; + +pub(super) fn is_visible( + visibility: ItemVisibility, + current_module: ModuleId, + target_module: ModuleId, +) -> bool { + match visibility { + ItemVisibility::Public => true, + ItemVisibility::Private => false, + ItemVisibility::PublicCrate => current_module.krate == target_module.krate, + } +} diff --git a/noir/noir-repo/tooling/nargo/src/package.rs b/noir/noir-repo/tooling/nargo/src/package.rs index cde616a9e32..f55ca5550a3 100644 --- a/noir/noir-repo/tooling/nargo/src/package.rs +++ b/noir/noir-repo/tooling/nargo/src/package.rs @@ -73,11 +73,4 @@ impl Package { pub fn is_library(&self) -> bool { self.package_type == PackageType::Library } - - pub fn error_on_unused_imports(&self) -> bool { - match self.package_type { - PackageType::Library => false, - PackageType::Binary | PackageType::Contract => true, - } - } } diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs index 1130a82fdfc..5239070b4d2 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs @@ -10,7 +10,7 @@ use nargo::{ use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::{AbiParameter, AbiType, MAIN_RETURN_NAME}; use noirc_driver::{ - check_crate, compute_function_abi, file_manager_with_stdlib, CheckOptions, CompileOptions, + check_crate, compute_function_abi, file_manager_with_stdlib, CompileOptions, NOIR_ARTIFACT_VERSION_STRING, }; use noirc_frontend::{ @@ -81,9 +81,7 @@ fn check_package( allow_overwrite: bool, ) -> Result { let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); - let error_on_unused_imports = package.error_on_unused_imports(); - let check_options = CheckOptions::new(compile_options, error_on_unused_imports); - check_crate_and_report_errors(&mut context, crate_id, &check_options)?; + check_crate_and_report_errors(&mut context, crate_id, compile_options)?; if package.is_library() || package.is_contract() { // Libraries do not have ABIs while contracts have many, so we cannot generate a `Prover.toml` file. @@ -152,10 +150,9 @@ fn create_input_toml_template( pub(crate) fn check_crate_and_report_errors( context: &mut Context, crate_id: CrateId, - check_options: &CheckOptions, + options: &CompileOptions, ) -> Result<(), CompileError> { - let options = &check_options.compile_options; - let result = check_crate(context, crate_id, check_options); + let result = check_crate(context, crate_id, options); report_errors(result, &context.file_manager, options.deny_warnings, options.silence_warnings) } diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs index 5721dd33e27..19add7f30dc 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs @@ -12,7 +12,7 @@ use nargo::workspace::Workspace; use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_driver::{ - compile_no_check, file_manager_with_stdlib, CheckOptions, CompileOptions, CompiledProgram, + compile_no_check, file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, }; @@ -83,9 +83,7 @@ fn compile_exported_functions( compile_options: &CompileOptions, ) -> Result<(), CliError> { let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); - let error_on_unused_imports = package.error_on_unused_imports(); - let check_options = CheckOptions::new(compile_options, error_on_unused_imports); - check_crate_and_report_errors(&mut context, crate_id, &check_options)?; + check_crate_and_report_errors(&mut context, crate_id, compile_options)?; let exported_functions = context.get_all_exported_functions_in_crate(&crate_id); diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs index 751d49e6427..2f9390d72e0 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs @@ -10,8 +10,7 @@ use nargo::{ }; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_driver::{ - check_crate, file_manager_with_stdlib, CheckOptions, CompileOptions, - NOIR_ARTIFACT_VERSION_STRING, + check_crate, file_manager_with_stdlib, CompileOptions, NOIR_ARTIFACT_VERSION_STRING, }; use noirc_frontend::{ graph::CrateName, @@ -186,9 +185,7 @@ fn run_test + Default>( // We then need to construct a separate copy for each test. let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); - let error_on_unused_imports = package.error_on_unused_imports(); - let check_options = CheckOptions::new(compile_options, error_on_unused_imports); - check_crate(&mut context, crate_id, &check_options) + check_crate(&mut context, crate_id, compile_options) .expect("Any errors should have occurred when collecting test functions"); let test_functions = context @@ -217,9 +214,7 @@ fn get_tests_in_package( options: &CompileOptions, ) -> Result, CliError> { let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); - let error_on_unused_imports = package.error_on_unused_imports(); - let check_options = CheckOptions::new(options, error_on_unused_imports); - check_crate_and_report_errors(&mut context, crate_id, &check_options)?; + check_crate_and_report_errors(&mut context, crate_id, options)?; Ok(context .get_all_test_functions_in_crate_matching(&crate_id, fn_name) diff --git a/noir/noir-repo/tooling/nargo_fmt/src/rewrite/imports.rs b/noir/noir-repo/tooling/nargo_fmt/src/rewrite/imports.rs index 025d354259e..6c63c551f7d 100644 --- a/noir/noir-repo/tooling/nargo_fmt/src/rewrite/imports.rs +++ b/noir/noir-repo/tooling/nargo_fmt/src/rewrite/imports.rs @@ -1,4 +1,4 @@ -use noirc_frontend::ast; +use noirc_frontend::ast::{self, ItemVisibility}; use crate::{ items::Item, @@ -96,8 +96,18 @@ impl UseTree { result } - pub(crate) fn rewrite_top_level(&self, visitor: &FmtVisitor, shape: Shape) -> String { - format!("use {};", self.rewrite(visitor, shape)) + pub(crate) fn rewrite_top_level( + &self, + visitor: &FmtVisitor, + shape: Shape, + visibility: ItemVisibility, + ) -> String { + let rewrite = self.rewrite(visitor, shape); + if visibility == ItemVisibility::Private { + format!("use {};", rewrite) + } else { + format!("{} use {};", visibility, rewrite) + } } fn rewrite(&self, visitor: &FmtVisitor, shape: Shape) -> String { diff --git a/noir/noir-repo/tooling/nargo_fmt/src/visitor/item.rs b/noir/noir-repo/tooling/nargo_fmt/src/visitor/item.rs index 94a32449ebe..0e2d07f13d0 100644 --- a/noir/noir-repo/tooling/nargo_fmt/src/visitor/item.rs +++ b/noir/noir-repo/tooling/nargo_fmt/src/visitor/item.rs @@ -216,9 +216,9 @@ impl super::FmtVisitor<'_> { self.last_position = span.end(); } } - ItemKind::Import(use_tree) => { - let use_tree = - UseTree::from_ast(use_tree).rewrite_top_level(self, self.shape()); + ItemKind::Import(use_tree, visibility) => { + let use_tree = UseTree::from_ast(use_tree); + let use_tree = use_tree.rewrite_top_level(self, self.shape(), visibility); self.push_rewrite(use_tree, span); self.last_position = span.end(); }