Skip to content

Commit

Permalink
Support new WIT syntax in wit-parser
Browse files Browse the repository at this point in the history
This commit implements the changes outlined in
WebAssembly/component-model#193 for the `wit-parser` crate. Namely this
updates all parsing, lexing, and resolution of a WIT package. The
largest change is that the concept of a "document" has been removed.
Additionally most tests needed an update to have a `package foo` header.

Intra-package resolution is also a bit trickier now and required a
restructuring of the AST resolution pass, but nothing too too radical
for what it's doing.
  • Loading branch information
alexcrichton committed May 13, 2023
1 parent 48110d4 commit e46b179
Show file tree
Hide file tree
Showing 220 changed files with 1,592 additions and 1,593 deletions.
333 changes: 201 additions & 132 deletions crates/wit-parser/src/ast.rs

Large diffs are not rendered by default.

47 changes: 32 additions & 15 deletions crates/wit-parser/src/ast/lex.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,8 @@ pub enum Token {
GreaterThan,
RArrow,
Star,
At,
Slash,

Use,
Type,
Expand Down Expand Up @@ -79,16 +81,15 @@ pub enum Token {
Static,
Interface,
Tuple,
Implements,
Import,
Export,
World,
Default,
Pkg,
Self_,
Package,

Id,
ExplicitId,

Integer,
}

#[derive(Eq, PartialEq, Debug)]
Expand Down Expand Up @@ -145,6 +146,11 @@ impl<'a> Tokenizer<'a> {
Ok(id_part)
}

pub fn parse_u32(&self, span: Span) -> Result<u32> {
let ret = self.get_span(span);
Ok(ret.parse()?)
}

pub fn next(&mut self) -> Result<Option<(Span, Token)>, Error> {
loop {
match self.next_raw()? {
Expand Down Expand Up @@ -174,6 +180,7 @@ impl<'a> Tokenizer<'a> {
break;
}
}
Comment
// eat a block comment if it's `/*...`
} else if self.eatc('*') {
let mut depth = 1;
Expand All @@ -188,11 +195,10 @@ impl<'a> Tokenizer<'a> {
_ => {}
}
}
Comment
} else {
return Err(Error::Unexpected(start, ch));
Slash
}

Comment
}
'=' => Equals,
',' => Comma,
Expand All @@ -206,6 +212,7 @@ impl<'a> Tokenizer<'a> {
'<' => LessThan,
'>' => GreaterThan,
'*' => Star,
'@' => At,
'-' => {
if self.eatc('>') {
RArrow
Expand Down Expand Up @@ -272,16 +279,26 @@ impl<'a> Tokenizer<'a> {
"static" => Static,
"interface" => Interface,
"tuple" => Tuple,
"implements" => Implements,
"world" => World,
"import" => Import,
"export" => Export,
"default" => Default,
"pkg" => Pkg,
"self" => Self_,
"package" => Package,
_ => Id,
}
}

ch if ch.is_ascii_digit() => {
let mut iter = self.chars.clone();
while let Some((_, ch)) = iter.next() {
if !ch.is_ascii_digit() {
break;
}
self.chars = iter.clone();
}

Integer
}

ch => return Err(Error::Unexpected(start, ch)),
};
let end = match self.chars.clone().next() {
Expand Down Expand Up @@ -504,18 +521,18 @@ impl Token {
ExplicitId => "an '%' identifier",
RArrow => "`->`",
Star => "`*`",
At => "`@`",
Slash => "`/`",
As => "keyword `as`",
From_ => "keyword `from`",
Static => "keyword `static`",
Interface => "keyword `interface`",
Tuple => "keyword `tuple`",
Implements => "keyword `implements`",
Import => "keyword `import`",
Export => "keyword `export`",
World => "keyword `world`",
Default => "keyword `default`",
Self_ => "keyword `self`",
Pkg => "keyword `pkg`",
Package => "keyword `package`",
Integer => "an integer",
}
}
}
Expand Down
Loading

0 comments on commit e46b179

Please sign in to comment.