Skip to content

Commit

Permalink
Merge pull request sass#1515 from mgreter/bugfix/ruleset-line-reporting
Browse files Browse the repository at this point in the history
Add option to lexer to force parserState update
  • Loading branch information
mgreter committed Sep 3, 2015
2 parents e12f974 + bd8b50a commit 412826c
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 5 deletions.
2 changes: 2 additions & 0 deletions src/parser.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -561,6 +561,8 @@ namespace Sass {
// a ruleset connects a selector and a block
Ruleset* Parser::parse_ruleset(Lookahead lookahead)
{
// make sure to move up the the last position
lex < optional_css_whitespace >(false, true);
// create the connector object (add parts later)
Ruleset* ruleset = SASS_MEMORY_NEW(ctx.mem, Ruleset, pstate);
// parse selector static or as schema to be evaluated later
Expand Down
13 changes: 8 additions & 5 deletions src/parser.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ namespace Sass {
// sourcemap offset and we modify the position pointer!
// lex will only skip over space, tabs and line comment
template <Prelexer::prelexer mx>
const char* lex(bool lazy = true)
const char* lex(bool lazy = true, bool force = false)
{

// position considered before lexed token
Expand All @@ -136,10 +136,13 @@ namespace Sass {
// now call matcher to get position after token
const char* it_after_token = mx(it_before_token);

// assertion that we got a valid match
if (it_after_token == 0) return 0;
// assertion that we actually lexed something
if (it_after_token == it_before_token) return 0;
// maybe we want to update the parser state anyway?
if (force == false) {
// assertion that we got a valid match
if (it_after_token == 0) return 0;
// assertion that we actually lexed something
if (it_after_token == it_before_token) return 0;
}

// create new lexed token object (holds the parse results)
lexed = Token(position, it_before_token, it_after_token);
Expand Down

0 comments on commit 412826c

Please sign in to comment.