Skip to content

Commit

Permalink
fix bug with determining type of inline token
Browse files Browse the repository at this point in the history
- *} determined as open type as it matched single symbol pattern '*'
- also, rules for ~/^ inline tokens adjusted to comply with rules for emphasis/strong markup
  • Loading branch information
sivukhin committed Mar 26, 2024
1 parent cf23eab commit 8e9588a
Show file tree
Hide file tree
Showing 3 changed files with 34 additions and 6 deletions.
8 changes: 8 additions & 0 deletions djot_parser/examples/inline-type-force.djot
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
*}hi*
*hi{*
_}hi_
_hi{_
~}hi~
~hi{~
^}hi^
^hi{^
8 changes: 8 additions & 0 deletions djot_parser/examples/inline-type-force.html
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
<p>*}hi*
*hi{*
_}hi_
_hi{_
~}hi~
~hi{~
^}hi^
^hi{^</p>
24 changes: 18 additions & 6 deletions djot_tokenizer/djot_inline_token.go
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ func matchInlineToken(
if next, ok := r.Token2(s, [...]byte{'{', '_'}); ok {
return next, ok
}
if next, ok := r.Token1(s, [...]byte{'_'}); ok && !r.HasMask(next, tokenizer.SpaceNewLineByteMask) {
if next, ok := r.Token1(s, [...]byte{'_'}); ok && !r.HasMask(next, tokenizer.SpaceNewLineByteMask) && !r.HasToken1(next, [...]byte{'}'}) {
return next, ok
}
return fail()
Expand All @@ -105,7 +105,7 @@ func matchInlineToken(
if next, ok := r.Token2(s, [...]byte{'{', '*'}); ok {
return next, ok
}
if next, ok := r.Token1(s, [...]byte{'*'}); ok && !r.HasMask(next, tokenizer.SpaceNewLineByteMask) {
if next, ok := r.Token1(s, [...]byte{'*'}); ok && !r.HasMask(next, tokenizer.SpaceNewLineByteMask) && !r.HasToken1(next, [...]byte{'}'}) {
return next, ok
}
return fail()
Expand All @@ -125,22 +125,34 @@ func matchInlineToken(
if next, ok := r.Token2(s, [...]byte{'{', '~'}); ok {
return next, ok
}
return r.Token1(s, [...]byte{'~'})
if next, ok := r.Token1(s, [...]byte{'~'}); ok && !r.HasMask(next, tokenizer.SpaceNewLineByteMask) && !r.HasToken1(next, [...]byte{'}'}) {
return next, ok
}
return fail()
case SubscriptInline ^ tokenizer.Open:
if next, ok := r.Token2(s, [...]byte{'~', '}'}); ok {
return next, ok
}
return r.Token1(s, [...]byte{'~'})
if next, ok := r.Token1(s, [...]byte{'~'}); ok && s > 0 && !r.HasMask(s-1, tokenizer.SpaceNewLineByteMask) {
return next, ok
}
return fail()
case SuperscriptInline:
if next, ok := r.Token2(s, [...]byte{'{', '^'}); ok {
return next, ok
}
return r.Token1(s, [...]byte{'^'})
if next, ok := r.Token1(s, [...]byte{'^'}); ok && !r.HasMask(next, tokenizer.SpaceNewLineByteMask) && !r.HasToken1(next, [...]byte{'}'}) {
return next, ok
}
return fail()
case SuperscriptInline ^ tokenizer.Open:
if next, ok := r.Token2(s, [...]byte{'^', '}'}); ok {
return next, ok
}
return r.Token1(s, [...]byte{'^'})
if next, ok := r.Token1(s, [...]byte{'^'}); ok && s > 0 && !r.HasMask(s-1, tokenizer.SpaceNewLineByteMask) {
return next, ok
}
return fail()
case InsertInline:
return r.Token2(s, [...]byte{'{', '+'})
case InsertInline ^ tokenizer.Open:
Expand Down

0 comments on commit 8e9588a

Please sign in to comment.