Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: allow unconstrained after visibility #6246

Merged
merged 10 commits into from
Oct 8, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 16 additions & 5 deletions compiler/noirc_frontend/src/lexer/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -378,8 +378,16 @@ impl fmt::Display for Token {
Token::Keyword(k) => write!(f, "{k}"),
Token::Attribute(ref a) => write!(f, "{a}"),
Token::InnerAttribute(ref a) => write!(f, "#![{a}]"),
Token::LineComment(ref s, _style) => write!(f, "//{s}"),
Token::BlockComment(ref s, _style) => write!(f, "/*{s}*/"),
Token::LineComment(ref s, style) => match style {
Some(DocStyle::Inner) => write!(f, "//!{s}"),
Some(DocStyle::Outer) => write!(f, "///{s}"),
None => write!(f, "//{s}"),
},
Token::BlockComment(ref s, style) => match style {
Some(DocStyle::Inner) => write!(f, "/*!{s}*/"),
Some(DocStyle::Outer) => write!(f, "/**{s}*/"),
None => write!(f, "/*{s}*/"),
},
Token::Quote(ref stream) => {
write!(f, "quote {{")?;
for token in stream.0.iter() {
Expand Down Expand Up @@ -456,6 +464,7 @@ pub enum TokenKind {
InternedUnresolvedTypeData,
InternedPattern,
UnquoteMarker,
Comment,
OuterDocComment,
InnerDocComment,
}
Expand All @@ -477,6 +486,7 @@ impl fmt::Display for TokenKind {
TokenKind::InternedUnresolvedTypeData => write!(f, "interned unresolved type"),
TokenKind::InternedPattern => write!(f, "interned pattern"),
TokenKind::UnquoteMarker => write!(f, "macro result"),
TokenKind::Comment => write!(f, "comment"),
TokenKind::OuterDocComment => write!(f, "outer doc comment"),
TokenKind::InnerDocComment => write!(f, "inner doc comment"),
}
Expand All @@ -503,6 +513,7 @@ impl Token {
Token::InternedLValue(_) => TokenKind::InternedLValue,
Token::InternedUnresolvedTypeData(_) => TokenKind::InternedUnresolvedTypeData,
Token::InternedPattern(_) => TokenKind::InternedPattern,
Token::LineComment(_, None) | Token::BlockComment(_, None) => TokenKind::Comment,
Token::LineComment(_, Some(DocStyle::Outer))
| Token::BlockComment(_, Some(DocStyle::Outer)) => TokenKind::OuterDocComment,
Token::LineComment(_, Some(DocStyle::Inner))
Expand Down Expand Up @@ -635,8 +646,8 @@ impl fmt::Display for TestScope {
match self {
TestScope::None => write!(f, ""),
TestScope::ShouldFailWith { reason } => match reason {
Some(failure_reason) => write!(f, "(should_fail_with = ({failure_reason}))"),
None => write!(f, "should_fail"),
Some(failure_reason) => write!(f, "(should_fail_with = {failure_reason:?})"),
None => write!(f, "(should_fail)"),
},
}
}
Expand Down Expand Up @@ -991,7 +1002,7 @@ impl fmt::Display for SecondaryAttribute {
match self {
SecondaryAttribute::Deprecated(None) => write!(f, "#[deprecated]"),
SecondaryAttribute::Deprecated(Some(ref note)) => {
write!(f, r#"#[deprecated("{note}")]"#)
write!(f, r#"#[deprecated({note:?})]"#)
}
SecondaryAttribute::Tag(ref attribute) => write!(f, "#['{}]", attribute.contents),
SecondaryAttribute::Meta(ref attribute) => write!(f, "#[{}]", attribute.contents),
Expand Down
11 changes: 10 additions & 1 deletion compiler/noirc_frontend/src/parser/parser/function.rs
Original file line number Diff line number Diff line change
Expand Up @@ -285,7 +285,7 @@ fn empty_body() -> BlockExpression {
#[cfg(test)]
mod tests {
use crate::{
ast::{NoirFunction, UnresolvedTypeData, Visibility},
ast::{ItemVisibility, NoirFunction, UnresolvedTypeData, Visibility},
parser::{
parser::{
parse_program,
Expand Down Expand Up @@ -480,4 +480,13 @@ mod tests {
let error = get_single_error(&errors, span);
assert_eq!(error.to_string(), "Expected a type but found ,");
}

#[test]
fn parse_function_with_unconstrained_after_visibility() {
let src = "pub unconstrained fn foo() {}";
let noir_function = parse_function_no_error(src);
assert_eq!("foo", noir_function.def.name.to_string());
assert!(noir_function.def.is_unconstrained);
assert_eq!(noir_function.def.visibility, ItemVisibility::Public);
}
}
15 changes: 14 additions & 1 deletion compiler/noirc_frontend/src/parser/parser/modifiers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,10 @@ pub(crate) struct Modifiers {
}

impl<'a> Parser<'a> {
/// Modifiers = 'unconstrained'? ItemVisibility 'comptime'? 'mut'?
/// Modifiers = ItemVisibility 'unconstrained'? 'comptime'? 'mut'?
///
/// NOTE: we also allow `unconstrained` before the visibility for backwards compatibility.
/// The formatter will put it after the visibility.
pub(crate) fn parse_modifiers(&mut self, allow_mutable: bool) -> Modifiers {
let unconstrained = if self.eat_keyword(Keyword::Unconstrained) {
Some(self.previous_token_span)
Expand All @@ -26,6 +29,16 @@ impl<'a> Parser<'a> {
let visibility = self.parse_item_visibility();
let visibility_span = self.span_since(start_span);

let unconstrained = if unconstrained.is_none() {
if self.eat_keyword(Keyword::Unconstrained) {
Some(self.previous_token_span)
} else {
None
}
} else {
unconstrained
};

let comptime =
if self.eat_keyword(Keyword::Comptime) { Some(self.previous_token_span) } else { None };
let mutable = if allow_mutable && self.eat_keyword(Keyword::Mut) {
Expand Down
2 changes: 1 addition & 1 deletion noir_stdlib/src/array/quicksort.nr
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ unconstrained fn quicksort_recursive<T, Env, let N: u32>(arr: &mut [T; N], low:
}
}

unconstrained pub(crate) fn quicksort<T, Env, let N: u32>(_arr: [T; N], sortfn: fn[Env](T, T) -> bool) -> [T; N] {
pub(crate) unconstrained fn quicksort<T, Env, let N: u32>(_arr: [T; N], sortfn: fn[Env](T, T) -> bool) -> [T; N] {
let mut arr: [T; N] = _arr;
if arr.len() <= 1 {} else {
quicksort_recursive(&mut arr, 0, arr.len() - 1, sortfn);
Expand Down
2 changes: 1 addition & 1 deletion noir_stdlib/src/collections/bounded_vec.nr
Original file line number Diff line number Diff line change
Expand Up @@ -554,7 +554,7 @@ mod bounded_vec_tests {
assert_eq(bounded_vec.storage()[2], 3);
}

#[test(should_fail_with="from array out of bounds")]
#[test(should_fail_with = "from array out of bounds")]
fn max_len_lower_then_array_len() {
let _: BoundedVec<Field, 2> = BoundedVec::from_array([0; 3]);
}
Expand Down
10 changes: 5 additions & 5 deletions noir_stdlib/src/collections/umap.nr
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,7 @@ impl<K, V, B> UHashMap<K, V, B> {

// For each key-value entry applies mutator function.
// docs:start:iter_mut
unconstrained pub fn iter_mut<H>(
pub unconstrained fn iter_mut<H>(
&mut self,
f: fn(K, V) -> (K, V)
)
Expand All @@ -210,7 +210,7 @@ impl<K, V, B> UHashMap<K, V, B> {

// For each key applies mutator function.
// docs:start:iter_keys_mut
unconstrained pub fn iter_keys_mut<H>(
pub unconstrained fn iter_keys_mut<H>(
&mut self,
f: fn(K) -> K
)
Expand Down Expand Up @@ -277,7 +277,7 @@ impl<K, V, B> UHashMap<K, V, B> {

// Get the value by key. If it does not exist, returns none().
// docs:start:get
unconstrained pub fn get<H>(
pub unconstrained fn get<H>(
self,
key: K
) -> Option<V>
Expand Down Expand Up @@ -309,7 +309,7 @@ impl<K, V, B> UHashMap<K, V, B> {

// Insert key-value entry. In case key was already present, value is overridden.
// docs:start:insert
unconstrained pub fn insert<H>(
pub unconstrained fn insert<H>(
&mut self,
key: K,
value: V
Expand Down Expand Up @@ -362,7 +362,7 @@ impl<K, V, B> UHashMap<K, V, B> {

// Removes a key-value entry. If key is not present, UHashMap remains unchanged.
// docs:start:remove
unconstrained pub fn remove<H>(
pub unconstrained fn remove<H>(
&mut self,
key: K
)
Expand Down
2 changes: 1 addition & 1 deletion noir_stdlib/src/field/bn254.nr
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ fn compute_decomposition(x: Field) -> (Field, Field) {
(low, high)
}

unconstrained pub(crate) fn decompose_hint(x: Field) -> (Field, Field) {
pub(crate) unconstrained fn decompose_hint(x: Field) -> (Field, Field) {
compute_decomposition(x)
}

Expand Down
2 changes: 1 addition & 1 deletion noir_stdlib/src/field/mod.nr
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ impl Field {
/// (e.g. 254 for the BN254 field) allow for multiple bit decompositions. This is due to how the `Field` will
/// wrap around due to overflow when verifying the decomposition.
#[builtin(to_be_bits)]
// docs:start:to_be_bits
// docs:start:to_be_bits
pub fn to_be_bits<let N: u32>(self: Self) -> [u1; N] {}
// docs:end:to_be_bits

Expand Down
2 changes: 1 addition & 1 deletion noir_stdlib/src/meta/trait_def.nr
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use crate::cmp::Eq;

impl TraitDefinition {
#[builtin(trait_def_as_trait_constraint)]
// docs:start:as_trait_constraint
// docs:start:as_trait_constraint
pub comptime fn as_trait_constraint(_self: Self) -> TraitConstraint {}
// docs:end:as_trait_constraint
}
Expand Down
12 changes: 6 additions & 6 deletions noir_stdlib/src/test.nr
Original file line number Diff line number Diff line change
Expand Up @@ -21,30 +21,30 @@ pub struct OracleMock {
}

impl OracleMock {
unconstrained pub fn mock<let N: u32>(name: str<N>) -> Self {
pub unconstrained fn mock<let N: u32>(name: str<N>) -> Self {
Self { id: create_mock_oracle(name) }
}

unconstrained pub fn with_params<P>(self, params: P) -> Self {
pub unconstrained fn with_params<P>(self, params: P) -> Self {
set_mock_params_oracle(self.id, params);
self
}

unconstrained pub fn get_last_params<P>(self) -> P {
pub unconstrained fn get_last_params<P>(self) -> P {
get_mock_last_params_oracle(self.id)
}

unconstrained pub fn returns<R>(self, returns: R) -> Self {
pub unconstrained fn returns<R>(self, returns: R) -> Self {
set_mock_returns_oracle(self.id, returns);
self
}

unconstrained pub fn times(self, times: u64) -> Self {
pub unconstrained fn times(self, times: u64) -> Self {
set_mock_times_oracle(self.id, times);
self
}

unconstrained pub fn clear(self) {
pub unconstrained fn clear(self) {
clear_mock_oracle(self.id);
}
}
134 changes: 132 additions & 2 deletions tooling/nargo_fmt/src/visitor/item.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@ use crate::{
};
use noirc_frontend::{
ast::{ItemVisibility, NoirFunction, TraitImplItemKind, UnresolvedTypeData, Visibility},
token::SecondaryAttribute,
lexer::Lexer,
token::{SecondaryAttribute, TokenKind},
};
use noirc_frontend::{
hir::resolution::errors::Span,
Expand All @@ -27,7 +28,8 @@ impl super::FmtVisitor<'_> {
let name_span = func.name_ident().span();
let func_span = func.span();

let mut result = self.slice(start..name_span.end()).to_owned();
let fn_header = self.slice(start..name_span.end());
let mut result = self.format_fn_header(fn_header, &func);

let params_open =
self.span_before(name_span.end()..func_span.start(), Token::LeftParen).start();
Expand Down Expand Up @@ -109,6 +111,134 @@ impl super::FmtVisitor<'_> {
(result.trim_end().to_string(), last_line_contains_single_line_comment(maybe_comment))
}

// This formats the function outer doc comments, attributes, modifiers, and `fn name`.
fn format_fn_header(&self, src: &str, func: &NoirFunction) -> String {
let mut result = String::new();
let mut lexer = Lexer::new(src).skip_comments(false).peekable();

// First there might be outer doc comments
while let Some(Ok(token)) = lexer.peek() {
if token.kind() == TokenKind::OuterDocComment {
result.push_str(&token.to_string());
result.push('\n');
result.push_str(&self.indent.to_string());
lexer.next();

self.append_comments_if_any(&mut lexer, &mut result);
} else {
break;
}
}

// Then, optionally, attributes
while let Some(Ok(token)) = lexer.peek() {
if token.kind() == TokenKind::Attribute {
result.push_str(&token.to_string());
result.push('\n');
result.push_str(&self.indent.to_string());
lexer.next();

self.append_comments_if_any(&mut lexer, &mut result);
} else {
break;
}
}

self.append_comments_if_any(&mut lexer, &mut result);

// Then, optionally, the `unconstrained` keyword
// (eventually we'll stop accepting this, but we keep it for backwards compatibility)
if let Some(Ok(token)) = lexer.peek() {
if let Token::Keyword(Keyword::Unconstrained) = token.token() {
lexer.next();
}
}

self.append_comments_if_any(&mut lexer, &mut result);

// Then the visibility
let mut has_visibility = false;
if let Some(Ok(token)) = lexer.peek() {
if let Token::Keyword(Keyword::Pub) = token.token() {
has_visibility = true;
lexer.next();
if let Some(Ok(token)) = lexer.peek() {
if let Token::LeftParen = token.token() {
lexer.next(); // Skip '('
lexer.next(); // Skip 'crate'
lexer.next(); // Skip ')'
}
}
}
}

if has_visibility {
result.push_str(&func.def.visibility.to_string());
result.push(' ');
}

self.append_comments_if_any(&mut lexer, &mut result);

// Then, optionally, and again, the `unconstrained` keyword
if let Some(Ok(token)) = lexer.peek() {
if let Token::Keyword(Keyword::Unconstrained) = token.token() {
lexer.next();
}
}

if func.def.is_unconstrained {
result.push_str("unconstrained ");
}

self.append_comments_if_any(&mut lexer, &mut result);

// Then, optionally, the `comptime` keyword
if let Some(Ok(token)) = lexer.peek() {
if let Token::Keyword(Keyword::Comptime) = token.token() {
lexer.next();
}
}

if func.def.is_comptime {
result.push_str("comptime ");
}

self.append_comments_if_any(&mut lexer, &mut result);

// Then the `fn` keyword
lexer.next(); // Skip fn
result.push_str("fn ");

self.append_comments_if_any(&mut lexer, &mut result);

// Then the function name
result.push_str(&func.def.name.0.contents);

result
}

fn append_comments_if_any(
&self,
lexer: &mut std::iter::Peekable<Lexer<'_>>,
result: &mut String,
) {
while let Some(Ok(token)) = lexer.peek() {
match token.token() {
Token::LineComment(..) => {
result.push_str(&token.to_string());
result.push('\n');
result.push_str(&self.indent.to_string());
lexer.next();
}
Token::BlockComment(..) => {
result.push_str(&token.to_string());
lexer.next();
}
_ => break,
}
}
}

fn format_return_type(
&self,
span: Span,
Expand Down
Loading
Loading