From 3c83a054a9f2a785ef691391ae90d5b102802ddc Mon Sep 17 00:00:00 2001 From: Tony Brix Date: Tue, 8 Oct 2024 23:13:23 -0600 Subject: [PATCH 1/7] fix: escape codespan in renderer --- src/Renderer.ts | 4 ++-- src/Tokenizer.ts | 2 +- src/Tokens.ts | 1 + test/unit/Lexer.test.js | 20 ++++++++++---------- 4 files changed, 14 insertions(+), 13 deletions(-) diff --git a/src/Renderer.ts b/src/Renderer.ts index c2a2645ced..641edc1067 100644 --- a/src/Renderer.ts +++ b/src/Renderer.ts @@ -163,8 +163,8 @@ export class _Renderer { return `${this.parser.parseInline(tokens)}`; } - codespan({ text }: Tokens.Codespan): string { - return `${text}`; + codespan({ text, escaped }: Tokens.Codespan): string { + return `${escaped ? text : escape(text, true)}`; } br(token: Tokens.Br): string { diff --git a/src/Tokenizer.ts b/src/Tokenizer.ts index f29795e704..fc1519b70a 100644 --- a/src/Tokenizer.ts +++ b/src/Tokenizer.ts @@ -766,11 +766,11 @@ export class _Tokenizer { if (hasNonSpaceChars && hasSpaceCharsOnBothEnds) { text = text.substring(1, text.length - 1); } - text = escape(text, true); return { type: 'codespan', raw: cap[0], text, + escaped: false, }; } } diff --git a/src/Tokens.ts b/src/Tokens.ts index e5f8402e6a..e8e30ba560 100644 --- a/src/Tokens.ts +++ b/src/Tokens.ts @@ -185,6 +185,7 @@ export namespace Tokens { type: 'codespan'; raw: string; text: string; + escaped: boolean; } export interface Br { diff --git a/test/unit/Lexer.test.js b/test/unit/Lexer.test.js index 82a2d6df65..5dbb852afe 100644 --- a/test/unit/Lexer.test.js +++ b/test/unit/Lexer.test.js @@ -1425,7 +1425,7 @@ paragraph expectInlineTokens({ md: '`code`', tokens: [ - { type: 'codespan', raw: '`code`', text: 'code' }, + { type: 'codespan', raw: '`code`', text: 'code', escaped: false }, ], }); }); @@ -1434,7 +1434,7 @@ paragraph expectInlineTokens({ md: '` `', tokens: [ - { type: 'codespan', raw: '` `', text: ' ' }, + { type: 'codespan', raw: '` `', text: ' ', escaped: false }, ], }); }); @@ -1443,7 +1443,7 @@ paragraph expectInlineTokens({ md: '` a`', tokens: [ - { type: 'codespan', raw: '` a`', text: ' a' }, + { type: 'codespan', raw: '` a`', text: ' a', escaped: false }, ], }); }); @@ -1452,7 +1452,7 @@ paragraph expectInlineTokens({ md: '`a `', tokens: [ - { type: 'codespan', raw: '`a `', text: 'a ' }, + { type: 'codespan', raw: '`a `', text: 'a ', escaped: false }, ], }); }); @@ -1461,7 +1461,7 @@ paragraph expectInlineTokens({ md: '` a `', tokens: [ - { type: 'codespan', raw: '` a `', text: 'a' }, + { type: 'codespan', raw: '` a `', text: 'a', escaped: false }, ], }); }); @@ -1470,7 +1470,7 @@ paragraph expectInlineTokens({ md: '`\na\n`', tokens: [ - { type: 'codespan', raw: '`\na\n`', text: 'a' }, + { type: 'codespan', raw: '`\na\n`', text: 'a', escaped: false }, ], }); }); @@ -1479,7 +1479,7 @@ paragraph expectInlineTokens({ md: '`\ta\t`', tokens: [ - { type: 'codespan', raw: '`\ta\t`', text: '\ta\t' }, + { type: 'codespan', raw: '`\ta\t`', text: '\ta\t', escaped: false }, ], }); }); @@ -1488,7 +1488,7 @@ paragraph expectInlineTokens({ md: '`\na\n`', tokens: [ - { type: 'codespan', raw: '`\na\n`', text: 'a' }, + { type: 'codespan', raw: '`\na\n`', text: 'a', escaped: false }, ], }); }); @@ -1497,7 +1497,7 @@ paragraph expectInlineTokens({ md: '` a `', tokens: [ - { type: 'codespan', raw: '` a `', text: ' a ' }, + { type: 'codespan', raw: '` a `', text: ' a ', escaped: false }, ], }); }); @@ -1506,7 +1506,7 @@ paragraph expectInlineTokens({ md: '`a\nb`', tokens: [ - { type: 'codespan', raw: '`a\nb`', text: 'a b' }, + { type: 'codespan', raw: '`a\nb`', text: 'a b', escaped: false }, ], }); }); From ab673613d3a3e537e4c9b121297fc29662cb05b9 Mon Sep 17 00:00:00 2001 From: Tony Brix Date: Tue, 8 Oct 2024 23:31:23 -0600 Subject: [PATCH 2/7] fix: escape escape in renderer --- src/Renderer.ts | 4 +++- src/Tokenizer.ts | 3 ++- src/Tokens.ts | 1 + test/unit/Lexer.test.js | 10 +++++----- 4 files changed, 11 insertions(+), 7 deletions(-) diff --git a/src/Renderer.ts b/src/Renderer.ts index 641edc1067..28a7bd4750 100644 --- a/src/Renderer.ts +++ b/src/Renderer.ts @@ -206,6 +206,8 @@ export class _Renderer { } text(token: Tokens.Text | Tokens.Escape | Tokens.Tag) : string { - return 'tokens' in token && token.tokens ? this.parser.parseInline(token.tokens) : token.text; + return 'tokens' in token && token.tokens + ? this.parser.parseInline(token.tokens) + : ('escaped' in token && !token.escaped ? escape(token.text) : token.text); } } diff --git a/src/Tokenizer.ts b/src/Tokenizer.ts index fc1519b70a..35a57d93da 100644 --- a/src/Tokenizer.ts +++ b/src/Tokenizer.ts @@ -583,7 +583,8 @@ export class _Tokenizer { return { type: 'escape', raw: cap[0], - text: escape(cap[1]), + text: cap[1], + escaped: false, }; } } diff --git a/src/Tokens.ts b/src/Tokens.ts index e8e30ba560..1e724e7b0e 100644 --- a/src/Tokens.ts +++ b/src/Tokens.ts @@ -139,6 +139,7 @@ export namespace Tokens { type: 'escape'; raw: string; text: string; + escaped: boolean; } export interface Tag { diff --git a/test/unit/Lexer.test.js b/test/unit/Lexer.test.js index 5dbb852afe..f7d8502446 100644 --- a/test/unit/Lexer.test.js +++ b/test/unit/Lexer.test.js @@ -1214,7 +1214,7 @@ paragraph expectInlineTokens({ md: '\\>', tokens: [ - { type: 'escape', raw: '\\>', text: '>' }, + { type: 'escape', raw: '\\>', text: '>', escaped: false }, ], }); }); @@ -1229,10 +1229,10 @@ paragraph text: 'strong text\\[', tokens: [ { type: 'text', raw: 'strong text', text: 'strong text' }, - { type: 'escape', raw: '\\[', text: '[' }, + { type: 'escape', raw: '\\[', text: '[', escaped: false }, ], }, - { type: 'escape', raw: '\\]', text: ']' }, + { type: 'escape', raw: '\\]', text: ']', escaped: false }, ], }); expectInlineTokens({ @@ -1244,9 +1244,9 @@ paragraph text: 'em\\sis', tokens: [ { type: 'text', raw: 'em', text: 'em' }, - { type: 'escape', raw: '\\<', text: '<' }, + { type: 'escape', raw: '\\<', text: '<', escaped: false }, { type: 'text', raw: 'pha', text: 'pha' }, - { type: 'escape', raw: '\\>', text: '>' }, + { type: 'escape', raw: '\\>', text: '>', escaped: false }, { type: 'text', raw: 'sis', text: 'sis' }, ], }, From a2bd2c754e4e7f4171d1098ab5674949ddfa940b Mon Sep 17 00:00:00 2001 From: Tony Brix Date: Wed, 9 Oct 2024 00:40:14 -0600 Subject: [PATCH 3/7] fix: escape link and image in renderer --- src/Renderer.ts | 12 ++++++++---- src/Tokenizer.ts | 28 ++++++++++++++++------------ src/Tokens.ts | 2 ++ test/unit/Lexer.test.js | 10 ++++++++++ 4 files changed, 36 insertions(+), 16 deletions(-) diff --git a/src/Renderer.ts b/src/Renderer.ts index 28a7bd4750..ae35caccee 100644 --- a/src/Renderer.ts +++ b/src/Renderer.ts @@ -175,7 +175,7 @@ export class _Renderer { return `${this.parser.parseInline(tokens)}`; } - link({ href, title, tokens }: Tokens.Link): string { + link({ href, title, tokens, escaped }: Tokens.Link): string { const text = this.parser.parseInline(tokens); const cleanHref = cleanUrl(href); if (cleanHref === null) { @@ -184,13 +184,17 @@ export class _Renderer { href = cleanHref; let out = ''; return out; } - image({ href, title, text }: Tokens.Image): string { + image({ href, title, text, escaped }: Tokens.Image): string { + if (escaped === false) { + text = escape(text); + } + const cleanHref = cleanUrl(href); if (cleanHref === null) { return text; @@ -199,7 +203,7 @@ export class _Renderer { let out = `${text}, raw: string, lexer: _Lexer): Tokens.Link | Tokens.Image { const href = link.href; - const title = link.title ? escape(link.title) : null; + const title = link.title || null; const text = cap[1].replace(/\\([\[\]])/g, '$1'); if (cap[0].charAt(0) !== '!') { @@ -23,6 +23,7 @@ function outputLink(cap: string[], link: Pick, ra href, title, text, + escaped: false, tokens: lexer.inlineTokens(text), }; lexer.state.inLink = false; @@ -33,7 +34,8 @@ function outputLink(cap: string[], link: Pick, ra raw, href, title, - text: escape(text), + text, + escaped: false, }; } @@ -803,23 +805,24 @@ export class _Tokenizer { if (cap) { let text, href; if (cap[2] === '@') { - text = escape(cap[1]); - href = 'mailto:' + text; + text = cap[1]; + href = 'mailto:' + escape(text); } else { - text = escape(cap[1]); - href = text; + text = cap[1]; + href = escape(text); } return { type: 'link', raw: cap[0], text, + escaped: false, href, tokens: [ { type: 'text', raw: text, - text, + text: escape(text), }, ], }; @@ -831,8 +834,8 @@ export class _Tokenizer { if (cap = this.rules.inline.url.exec(src)) { let text, href; if (cap[2] === '@') { - text = escape(cap[0]); - href = 'mailto:' + text; + text = cap[0]; + href = 'mailto:' + escape(text); } else { // do extended autolink path validation let prevCapZero; @@ -840,7 +843,7 @@ export class _Tokenizer { prevCapZero = cap[0]; cap[0] = this.rules.inline._backpedal.exec(cap[0])?.[0] ?? ''; } while (prevCapZero !== cap[0]); - text = escape(cap[0]); + text = cap[0]; if (cap[1] === 'www.') { href = 'http://' + cap[0]; } else { @@ -851,12 +854,13 @@ export class _Tokenizer { type: 'link', raw: cap[0], text, + escaped: false, href, tokens: [ { type: 'text', - raw: text, - text, + raw: escape(text), + text: escape(text), }, ], }; diff --git a/src/Tokens.ts b/src/Tokens.ts index 1e724e7b0e..8f7daf41aa 100644 --- a/src/Tokens.ts +++ b/src/Tokens.ts @@ -158,6 +158,7 @@ export namespace Tokens { title?: string | null; text: string; tokens: Token[]; + escaped: boolean; } export interface Image { @@ -166,6 +167,7 @@ export namespace Tokens { href: string; title: string | null; text: string; + escaped: boolean; } export interface Strong { diff --git a/test/unit/Lexer.test.js b/test/unit/Lexer.test.js index f7d8502446..27b29b18a4 100644 --- a/test/unit/Lexer.test.js +++ b/test/unit/Lexer.test.js @@ -1275,6 +1275,7 @@ paragraph href: 'https://example.com', title: null, text: 'link', + escaped: false, tokens: [ { type: 'text', raw: 'link', text: 'link' }, ], @@ -1293,6 +1294,7 @@ paragraph href: 'https://example.com', title: 'title', text: 'link', + escaped: false, tokens: [ { type: 'text', raw: 'link', text: 'link' }, ], @@ -1311,6 +1313,7 @@ paragraph text: 'image', href: 'https://example.com/image.png', title: null, + escaped: false, }, ], }); @@ -1326,6 +1329,7 @@ paragraph text: 'image', href: 'https://example.com/image.png', title: 'title', + escaped: false, }, ], }); @@ -1345,6 +1349,7 @@ paragraph href: 'https://example.com', title: 'title', text: 'link', + escaped: false, tokens: [{ type: 'text', raw: 'link', @@ -1368,6 +1373,7 @@ paragraph href: 'https://example.com', title: 'title', text: 'link', + escaped: false, tokens: [{ type: 'text', raw: 'link', @@ -1561,6 +1567,7 @@ paragraph raw: '', text: 'https://example.com', href: 'https://example.com', + escaped: false, tokens: [ { type: 'text', raw: 'https://example.com', text: 'https://example.com' }, ], @@ -1579,6 +1586,7 @@ paragraph raw: '', text: 'test@example.com', href: 'mailto:test@example.com', + escaped: false, tokens: [ { type: 'text', raw: 'test@example.com', text: 'test@example.com' }, ], @@ -1596,6 +1604,7 @@ paragraph raw: 'https://example.com', text: 'https://example.com', href: 'https://example.com', + escaped: false, tokens: [ { type: 'text', raw: 'https://example.com', text: 'https://example.com' }, ], @@ -1614,6 +1623,7 @@ paragraph raw: 'test@example.com', text: 'test@example.com', href: 'mailto:test@example.com', + escaped: false, tokens: [ { type: 'text', raw: 'test@example.com', text: 'test@example.com' }, ], From c99905f24b2551fdaa2c9173457ad0ab013238c1 Mon Sep 17 00:00:00 2001 From: Tony Brix Date: Wed, 9 Oct 2024 01:39:22 -0600 Subject: [PATCH 4/7] fix: escape text in renderer --- src/Lexer.ts | 4 + src/Tokenizer.ts | 20 ++--- src/Tokens.ts | 1 + test/unit/Lexer.test.js | 194 +++++++++++++++++++++++++++++----------- 4 files changed, 159 insertions(+), 60 deletions(-) diff --git a/src/Lexer.ts b/src/Lexer.ts index 9aa79cdbf9..43d061fc17 100644 --- a/src/Lexer.ts +++ b/src/Lexer.ts @@ -451,6 +451,10 @@ export class _Lexer { if (lastToken && lastToken.type === 'text') { lastToken.raw += token.raw; lastToken.text += token.text; + + if (!token.escaped && 'escaped' in lastToken && lastToken.escaped) { + lastToken.escaped = false; + } } else { tokens.push(token); } diff --git a/src/Tokenizer.ts b/src/Tokenizer.ts index 5a3f85c6ae..f56678d95a 100644 --- a/src/Tokenizer.ts +++ b/src/Tokenizer.ts @@ -575,6 +575,7 @@ export class _Tokenizer { raw: cap[0], text: cap[0], tokens: this.lexer.inline(cap[0]), + escaped: true, }; } } @@ -684,6 +685,7 @@ export class _Tokenizer { type: 'text', raw: text, text, + escaped: true, }; } return outputLink(cap, link, cap[0], this.lexer); @@ -822,7 +824,8 @@ export class _Tokenizer { { type: 'text', raw: text, - text: escape(text), + text, + escaped: false, }, ], }; @@ -859,8 +862,9 @@ export class _Tokenizer { tokens: [ { type: 'text', - raw: escape(text), - text: escape(text), + raw: text, + text, + escaped: false, }, ], }; @@ -870,16 +874,12 @@ export class _Tokenizer { inlineText(src: string): Tokens.Text | undefined { const cap = this.rules.inline.text.exec(src); if (cap) { - let text; - if (this.lexer.state.inRawBlock) { - text = cap[0]; - } else { - text = escape(cap[0]); - } + const escaped = this.lexer.state.inRawBlock; return { type: 'text', raw: cap[0], - text, + text: cap[0], + escaped, }; } } diff --git a/src/Tokens.ts b/src/Tokens.ts index 8f7daf41aa..538c6ac604 100644 --- a/src/Tokens.ts +++ b/src/Tokens.ts @@ -125,6 +125,7 @@ export namespace Tokens { raw: string; text: string; tokens?: Token[]; + escaped: boolean; } export interface Def { diff --git a/test/unit/Lexer.test.js b/test/unit/Lexer.test.js index 27b29b18a4..2e1c8f698e 100644 --- a/test/unit/Lexer.test.js +++ b/test/unit/Lexer.test.js @@ -35,14 +35,14 @@ describe('Lexer', () => { type: 'paragraph', raw: 'paragraph 1', text: 'paragraph 1', - tokens: [{ type: 'text', raw: 'paragraph 1', text: 'paragraph 1' }], + tokens: [{ type: 'text', raw: 'paragraph 1', text: 'paragraph 1', escaped: false }], }, { type: 'space', raw: '\n\n' }, { type: 'paragraph', raw: 'paragraph 2', text: 'paragraph 2', - tokens: [{ type: 'text', raw: 'paragraph 2', text: 'paragraph 2' }], + tokens: [{ type: 'text', raw: 'paragraph 2', text: 'paragraph 2', escaped: false }], }, ], }); @@ -110,56 +110,56 @@ lheading 2 raw: '# heading 1\n\n', depth: 1, text: 'heading 1', - tokens: [{ type: 'text', raw: 'heading 1', text: 'heading 1' }], + tokens: [{ type: 'text', raw: 'heading 1', text: 'heading 1', escaped: false }], }, { type: 'heading', raw: '## heading 2\n\n', depth: 2, text: 'heading 2', - tokens: [{ type: 'text', raw: 'heading 2', text: 'heading 2' }], + tokens: [{ type: 'text', raw: 'heading 2', text: 'heading 2', escaped: false }], }, { type: 'heading', raw: '### heading 3\n\n', depth: 3, text: 'heading 3', - tokens: [{ type: 'text', raw: 'heading 3', text: 'heading 3' }], + tokens: [{ type: 'text', raw: 'heading 3', text: 'heading 3', escaped: false }], }, { type: 'heading', raw: '#### heading 4\n\n', depth: 4, text: 'heading 4', - tokens: [{ type: 'text', raw: 'heading 4', text: 'heading 4' }], + tokens: [{ type: 'text', raw: 'heading 4', text: 'heading 4', escaped: false }], }, { type: 'heading', raw: '##### heading 5\n\n', depth: 5, text: 'heading 5', - tokens: [{ type: 'text', raw: 'heading 5', text: 'heading 5' }], + tokens: [{ type: 'text', raw: 'heading 5', text: 'heading 5', escaped: false }], }, { type: 'heading', raw: '###### heading 6\n\n', depth: 6, text: 'heading 6', - tokens: [{ type: 'text', raw: 'heading 6', text: 'heading 6' }], + tokens: [{ type: 'text', raw: 'heading 6', text: 'heading 6', escaped: false }], }, { type: 'heading', raw: 'lheading 1\n==========\n\n', depth: 1, text: 'lheading 1', - tokens: [{ type: 'text', raw: 'lheading 1', text: 'lheading 1' }], + tokens: [{ type: 'text', raw: 'lheading 1', text: 'lheading 1', escaped: false }], }, { type: 'heading', raw: 'lheading 2\n----------\n', depth: 2, text: 'lheading 2', - tokens: [{ type: 'text', raw: 'lheading 2', text: 'lheading 2' }], + tokens: [{ type: 'text', raw: 'lheading 2', text: 'lheading 2', escaped: false }], }, ], }); @@ -172,7 +172,7 @@ lheading 2 type: 'paragraph', raw: '####### heading 7', text: '####### heading 7', - tokens: [{ type: 'text', raw: '####### heading 7', text: '####### heading 7' }], + tokens: [{ type: 'text', raw: '####### heading 7', text: '####### heading 7', escaped: false }], }], }); }); @@ -196,13 +196,13 @@ lheading 2 header: [ { text: 'a', - tokens: [{ type: 'text', raw: 'a', text: 'a' }], + tokens: [{ type: 'text', raw: 'a', text: 'a', escaped: false }], header: true, align: null, }, { text: 'b', - tokens: [{ type: 'text', raw: 'b', text: 'b' }], + tokens: [{ type: 'text', raw: 'b', text: 'b', escaped: false }], header: true, align: null, }, @@ -211,13 +211,13 @@ lheading 2 [ { text: '1', - tokens: [{ type: 'text', raw: '1', text: '1' }], + tokens: [{ type: 'text', raw: '1', text: '1', escaped: false }], header: false, align: null, }, { text: '2', - tokens: [{ type: 'text', raw: '2', text: '2' }], + tokens: [{ type: 'text', raw: '2', text: '2', escaped: false }], header: false, align: null, }, @@ -242,7 +242,7 @@ paragraph 1 type: 'paragraph', raw: 'paragraph 1\n', text: 'paragraph 1', - tokens: [{ type: 'text', raw: 'paragraph 1', text: 'paragraph 1' }], + tokens: [{ type: 'text', raw: 'paragraph 1', text: 'paragraph 1', escaped: false }], }, { type: 'table', @@ -251,13 +251,13 @@ paragraph 1 header: [ { text: 'a', - tokens: [{ type: 'text', raw: 'a', text: 'a' }], + tokens: [{ type: 'text', raw: 'a', text: 'a', escaped: false }], header: true, align: null, }, { text: 'b', - tokens: [{ type: 'text', raw: 'b', text: 'b' }], + tokens: [{ type: 'text', raw: 'b', text: 'b', escaped: false }], header: true, align: null, }, @@ -266,13 +266,13 @@ paragraph 1 [ { text: '1', - tokens: [{ type: 'text', raw: '1', text: '1' }], + tokens: [{ type: 'text', raw: '1', text: '1', escaped: false }], header: false, align: null, }, { text: '2', - tokens: [{ type: 'text', raw: '2', text: '2' }], + tokens: [{ type: 'text', raw: '2', text: '2', escaped: false }], header: false, align: null, }, @@ -300,19 +300,19 @@ paragraph 1 header: [ { text: 'a', - tokens: [{ type: 'text', raw: 'a', text: 'a' }], + tokens: [{ type: 'text', raw: 'a', text: 'a', escaped: false }], header: true, align: 'left', }, { text: 'b', - tokens: [{ type: 'text', raw: 'b', text: 'b' }], + tokens: [{ type: 'text', raw: 'b', text: 'b', escaped: false }], header: true, align: 'center', }, { text: 'c', - tokens: [{ type: 'text', raw: 'c', text: 'c' }], + tokens: [{ type: 'text', raw: 'c', text: 'c', escaped: false }], header: true, align: 'right', }, @@ -321,19 +321,19 @@ paragraph 1 [ { text: '1', - tokens: [{ type: 'text', raw: '1', text: '1' }], + tokens: [{ type: 'text', raw: '1', text: '1', escaped: false }], header: false, align: 'left', }, { text: '2', - tokens: [{ type: 'text', raw: '2', text: '2' }], + tokens: [{ type: 'text', raw: '2', text: '2', escaped: false }], header: false, align: 'center', }, { text: '3', - tokens: [{ type: 'text', raw: '3', text: '3' }], + tokens: [{ type: 'text', raw: '3', text: '3', escaped: false }], header: false, align: 'right', }, @@ -361,13 +361,13 @@ a | b header: [ { text: 'a', - tokens: [{ type: 'text', raw: 'a', text: 'a' }], + tokens: [{ type: 'text', raw: 'a', text: 'a', escaped: false }], header: true, align: null, }, { text: 'b', - tokens: [{ type: 'text', raw: 'b', text: 'b' }], + tokens: [{ type: 'text', raw: 'b', text: 'b', escaped: false }], header: true, align: null, }, @@ -376,13 +376,13 @@ a | b [ { text: '1', - tokens: [{ type: 'text', raw: '1', text: '1' }], + tokens: [{ type: 'text', raw: '1', text: '1', escaped: false }], header: false, align: null, }, { text: '2', - tokens: [{ type: 'text', raw: '2', text: '2' }], + tokens: [{ type: 'text', raw: '2', text: '2', escaped: false }], header: false, align: null, }, @@ -418,7 +418,7 @@ a | b raw: 'blockquote', text: 'blockquote', tokens: [ - { type: 'text', raw: 'blockquote', text: 'blockquote' }, + { type: 'text', raw: 'blockquote', text: 'blockquote', escaped: false }, ], }], }, @@ -439,7 +439,7 @@ a | b raw: 'blockquote', text: 'blockquote', tokens: [ - { type: 'text', raw: 'blockquote', text: 'blockquote' }, + { type: 'text', raw: 'blockquote', text: 'blockquote', escaped: false }, ], }], }, @@ -475,7 +475,7 @@ a | b raw: 'blockquote', text: 'blockquote', tokens: [ - { type: 'text', raw: 'blockquote', text: 'blockquote' }, + { type: 'text', raw: 'blockquote', text: 'blockquote', escaped: false }, ], }, ], @@ -519,7 +519,8 @@ a | b type: 'text', raw: 'item 1', text: 'item 1', - tokens: [{ type: 'text', raw: 'item 1', text: 'item 1' }], + escaped: true, + tokens: [{ type: 'text', raw: 'item 1', text: 'item 1', escaped: false }], }], }, { @@ -533,7 +534,8 @@ a | b type: 'text', raw: 'item 2', text: 'item 2', - tokens: [{ type: 'text', raw: 'item 2', text: 'item 2' }], + escaped: true, + tokens: [{ type: 'text', raw: 'item 2', text: 'item 2', escaped: false }], }], }, ], @@ -572,11 +574,13 @@ a | b type: 'text', raw: 'item 1', text: 'item 1', + escaped: true, tokens: [ { type: 'text', raw: 'item 1', text: 'item 1', + escaped: false, }, ], }, @@ -594,11 +598,13 @@ a | b type: 'text', raw: 'item 2', text: 'item 2', + escaped: true, tokens: [ { type: 'text', raw: 'item 2', text: 'item 2', + escaped: false, }, ], }, @@ -640,11 +646,13 @@ a | b type: 'text', raw: 'item 1', text: 'item 1', + escaped: true, tokens: [ { type: 'text', raw: 'item 1', text: 'item 1', + escaped: false, }, ], }, @@ -662,11 +670,13 @@ a | b type: 'text', raw: 'item 2', text: 'item 2', + escaped: true, tokens: [ { type: 'text', raw: 'item 2', text: 'item 2', + escaped: false, }, ], }, @@ -710,11 +720,13 @@ paragraph type: 'text', raw: 'item 1', text: 'item 1', + escaped: true, tokens: [ { type: 'text', raw: 'item 1', text: 'item 1', + escaped: false, }, ], }, @@ -732,11 +744,13 @@ paragraph type: 'text', raw: 'item 2', text: 'item 2', + escaped: true, tokens: [ { type: 'text', raw: 'item 2', text: 'item 2', + escaped: false, }, ], }, @@ -757,6 +771,7 @@ paragraph type: 'text', raw: 'paragraph', text: 'paragraph', + escaped: false, }, ], }, @@ -794,11 +809,13 @@ paragraph type: 'text', raw: 'item 1', text: 'item 1', + escaped: true, tokens: [ { type: 'text', raw: 'item 1', text: 'item 1', + escaped: false, }, ], }, @@ -816,11 +833,13 @@ paragraph type: 'text', raw: 'item 2', text: 'item 2', + escaped: true, tokens: [ { type: 'text', raw: 'item 2', text: 'item 2', + escaped: false, }, ], }, @@ -863,11 +882,13 @@ paragraph type: 'text', raw: 'item 1\n', text: 'item 1', + escaped: true, tokens: [ { type: 'text', raw: 'item 1', text: 'item 1', + escaped: false, }, ], }, @@ -885,11 +906,13 @@ paragraph type: 'text', raw: 'item 2', text: 'item 2', + escaped: true, tokens: [ { type: 'text', raw: 'item 2', text: 'item 2', + escaped: false, }, ], }, @@ -934,11 +957,13 @@ paragraph type: 'text', raw: 'item 1', text: 'item 1', + escaped: true, tokens: [ { type: 'text', raw: 'item 1', text: 'item 1', + escaped: false, }, ], }, @@ -956,11 +981,13 @@ paragraph type: 'text', raw: 'item 2', text: 'item 2', + escaped: true, tokens: [ { type: 'text', raw: 'item 2', text: 'item 2', + escaped: false, }, ], }, @@ -972,11 +999,13 @@ paragraph type: 'text', raw: 'item 2a', text: 'item 2a', + escaped: true, tokens: [ { type: 'text', raw: 'item 2a', text: 'item 2a', + escaped: false, }, ], }, @@ -994,11 +1023,13 @@ paragraph type: 'text', raw: 'item 3', text: 'item 3', + escaped: true, tokens: [ { type: 'text', raw: 'item 3', text: 'item 3', + escaped: false, }, ], }, @@ -1040,11 +1071,13 @@ paragraph type: 'text', raw: 'item 1\n', text: 'item 1', + escaped: true, tokens: [ { type: 'text', raw: 'item 1', text: 'item 1', + escaped: false, }, ], }, @@ -1067,11 +1100,13 @@ paragraph type: 'text', raw: 'item 2', text: 'item 2', + escaped: true, tokens: [ { type: 'text', raw: 'item 2', text: 'item 2', + escaped: false, }, ], }, @@ -1117,11 +1152,13 @@ paragraph type: 'text', raw: 'item 1', text: 'item 1', + escaped: true, tokens: [ { type: 'text', raw: 'item 1', text: 'item 1', + escaped: false, }, ], }, @@ -1139,11 +1176,13 @@ paragraph type: 'text', raw: 'item 2', text: 'item 2', + escaped: true, tokens: [ { type: 'text', raw: 'item 2', text: 'item 2', + escaped: false, }, ], }, @@ -1228,7 +1267,7 @@ paragraph raw: '**strong text\\[**', text: 'strong text\\[', tokens: [ - { type: 'text', raw: 'strong text', text: 'strong text' }, + { type: 'text', raw: 'strong text', text: 'strong text', escaped: false }, { type: 'escape', raw: '\\[', text: '[', escaped: false }, ], }, @@ -1243,11 +1282,11 @@ paragraph raw: '_em\\sis_', text: 'em\\sis', tokens: [ - { type: 'text', raw: 'em', text: 'em' }, + { type: 'text', raw: 'em', text: 'em', escaped: false }, { type: 'escape', raw: '\\<', text: '<', escaped: false }, - { type: 'text', raw: 'pha', text: 'pha' }, + { type: 'text', raw: 'pha', text: 'pha', escaped: false }, { type: 'escape', raw: '\\>', text: '>', escaped: false }, - { type: 'text', raw: 'sis', text: 'sis' }, + { type: 'text', raw: 'sis', text: 'sis', escaped: false }, ], }, ], @@ -1259,7 +1298,7 @@ paragraph md: '
html
', tokens: [ { type: 'html', raw: '
', inLink: false, inRawBlock: false, block: false, text: '
' }, - { type: 'text', raw: 'html', text: 'html' }, + { type: 'text', raw: 'html', text: 'html', escaped: false }, { type: 'html', raw: '
', inLink: false, inRawBlock: false, block: false, text: '
' }, ], }); @@ -1277,7 +1316,12 @@ paragraph text: 'link', escaped: false, tokens: [ - { type: 'text', raw: 'link', text: 'link' }, + { + type: 'text', + raw: 'link', + text: 'link', + escaped: false, + }, ], }, ], @@ -1296,7 +1340,12 @@ paragraph text: 'link', escaped: false, tokens: [ - { type: 'text', raw: 'link', text: 'link' }, + { + type: 'text', + raw: 'link', + text: 'link', + escaped: false, + }, ], }, ], @@ -1354,6 +1403,7 @@ paragraph type: 'text', raw: 'link', text: 'link', + escaped: false, }], }, ], @@ -1378,6 +1428,7 @@ paragraph type: 'text', raw: 'link', text: 'link', + escaped: false, }], }, ], @@ -1388,7 +1439,12 @@ paragraph expectInlineTokens({ md: '[link]', tokens: [ - { type: 'text', raw: '[link]', text: '[link]' }, + { + type: 'text', + raw: '[link]', + text: '[link]', + escaped: false, + }, ], }); }); @@ -1403,7 +1459,12 @@ paragraph raw: '**strong**', text: 'strong', tokens: [ - { type: 'text', raw: 'strong', text: 'strong' }, + { + type: 'text', + raw: 'strong', + text: 'strong', + escaped: false, + }, ], }, ], @@ -1419,7 +1480,12 @@ paragraph raw: '*em*', text: 'em', tokens: [ - { type: 'text', raw: 'em', text: 'em' }, + { + type: 'text', + raw: 'em', + text: 'em', + escaped: false, + }, ], }, ], @@ -1527,6 +1593,7 @@ paragraph raw: 'a', text: 'a', type: 'text', + escaped: false, }, { raw: '\n', @@ -1536,6 +1603,7 @@ paragraph raw: 'b', text: 'b', type: 'text', + escaped: false, }, ], }); @@ -1550,7 +1618,12 @@ paragraph raw: '~~del~~', text: 'del', tokens: [ - { type: 'text', raw: 'del', text: 'del' }, + { + type: 'text', + raw: 'del', + text: 'del', + escaped: false, + }, ], }, ], @@ -1569,7 +1642,12 @@ paragraph href: 'https://example.com', escaped: false, tokens: [ - { type: 'text', raw: 'https://example.com', text: 'https://example.com' }, + { + type: 'text', + raw: 'https://example.com', + text: 'https://example.com', + escaped: false, + }, ], }, ], @@ -1588,7 +1666,12 @@ paragraph href: 'mailto:test@example.com', escaped: false, tokens: [ - { type: 'text', raw: 'test@example.com', text: 'test@example.com' }, + { + type: 'text', + raw: 'test@example.com', + text: 'test@example.com', + escaped: false, + }, ], }, ], @@ -1606,7 +1689,12 @@ paragraph href: 'https://example.com', escaped: false, tokens: [ - { type: 'text', raw: 'https://example.com', text: 'https://example.com' }, + { + type: 'text', + raw: 'https://example.com', + text: 'https://example.com', + escaped: false, + }, ], }, ], @@ -1625,7 +1713,12 @@ paragraph href: 'mailto:test@example.com', escaped: false, tokens: [ - { type: 'text', raw: 'test@example.com', text: 'test@example.com' }, + { + type: 'text', + raw: 'test@example.com', + text: 'test@example.com', + escaped: false, + }, ], }, ], @@ -1641,6 +1734,7 @@ paragraph type: 'text', raw: 'text', text: 'text', + escaped: false, }, ], }); From 7a9df5a34a6562f7526f0ce5d09efb9958a38231 Mon Sep 17 00:00:00 2001 From: Tony Brix Date: Wed, 9 Oct 2024 01:48:41 -0600 Subject: [PATCH 5/7] remove escape from tokenizer --- src/Tokenizer.ts | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/Tokenizer.ts b/src/Tokenizer.ts index f56678d95a..b0459fe665 100644 --- a/src/Tokenizer.ts +++ b/src/Tokenizer.ts @@ -2,7 +2,6 @@ import { _defaults } from './defaults.ts'; import { rtrim, splitCells, - escape, findClosingBracket, } from './helpers.ts'; import type { Rules } from './rules.ts'; @@ -808,10 +807,10 @@ export class _Tokenizer { let text, href; if (cap[2] === '@') { text = cap[1]; - href = 'mailto:' + escape(text); + href = 'mailto:' + text; } else { text = cap[1]; - href = escape(text); + href = text; } return { @@ -838,7 +837,7 @@ export class _Tokenizer { let text, href; if (cap[2] === '@') { text = cap[0]; - href = 'mailto:' + escape(text); + href = 'mailto:' + text; } else { // do extended autolink path validation let prevCapZero; From 8b3ebfa87c58cbaa2c457f7967ed7e3e8ae8f7a3 Mon Sep 17 00:00:00 2001 From: Tony Brix Date: Sun, 20 Oct 2024 20:31:02 -0600 Subject: [PATCH 6/7] remove unnecessary escaped properties --- src/Lexer.ts | 7 +---- src/Parser.ts | 4 +-- src/Renderer.ts | 26 ++++++++--------- src/Tokenizer.ts | 10 ------- src/Tokens.ts | 8 ++--- test/unit/Lexer.test.js | 65 ++++++++++------------------------------- 6 files changed, 32 insertions(+), 88 deletions(-) diff --git a/src/Lexer.ts b/src/Lexer.ts index 43d061fc17..62ff2237c4 100644 --- a/src/Lexer.ts +++ b/src/Lexer.ts @@ -355,12 +355,7 @@ export class _Lexer { if (token = this.tokenizer.tag(src)) { src = src.substring(token.raw.length); lastToken = tokens[tokens.length - 1]; - if (lastToken && token.type === 'text' && lastToken.type === 'text') { - lastToken.raw += token.raw; - lastToken.text += token.text; - } else { - tokens.push(token); - } + tokens.push(token); continue; } diff --git a/src/Parser.ts b/src/Parser.ts index 8a6716196d..ebaaa2113c 100644 --- a/src/Parser.ts +++ b/src/Parser.ts @@ -98,7 +98,7 @@ export class _Parser { let textToken = token; let body = this.renderer.text(textToken); while (i + 1 < tokens.length && tokens[i + 1].type === 'text') { - textToken = tokens[++i] as Tokens.Text | Tokens.Tag; + textToken = tokens[++i] as Tokens.Text; body += '\n' + this.renderer.text(textToken); } if (top) { @@ -106,7 +106,7 @@ export class _Parser { type: 'paragraph', raw: body, text: body, - tokens: [{ type: 'text', raw: body, text: body }], + tokens: [{ type: 'text', raw: body, text: body, escaped: true }], }); } else { out += body; diff --git a/src/Renderer.ts b/src/Renderer.ts index ae35caccee..8d11f19c42 100644 --- a/src/Renderer.ts +++ b/src/Renderer.ts @@ -79,13 +79,15 @@ export class _Renderer { if (item.tokens.length > 0 && item.tokens[0].type === 'paragraph') { item.tokens[0].text = checkbox + ' ' + item.tokens[0].text; if (item.tokens[0].tokens && item.tokens[0].tokens.length > 0 && item.tokens[0].tokens[0].type === 'text') { - item.tokens[0].tokens[0].text = checkbox + ' ' + item.tokens[0].tokens[0].text; + item.tokens[0].tokens[0].text = checkbox + ' ' + escape(item.tokens[0].tokens[0].text); + item.tokens[0].tokens[0].escaped = true; } } else { item.tokens.unshift({ type: 'text', raw: checkbox + ' ', text: checkbox + ' ', + escaped: true, }); } } else { @@ -163,8 +165,8 @@ export class _Renderer { return `${this.parser.parseInline(tokens)}`; } - codespan({ text, escaped }: Tokens.Codespan): string { - return `${escaped ? text : escape(text, true)}`; + codespan({ text }: Tokens.Codespan): string { + return `${escape(text, true)}`; } br(token: Tokens.Br): string { @@ -175,7 +177,7 @@ export class _Renderer { return `${this.parser.parseInline(tokens)}`; } - link({ href, title, tokens, escaped }: Tokens.Link): string { + link({ href, title, tokens }: Tokens.Link): string { const text = this.parser.parseInline(tokens); const cleanHref = cleanUrl(href); if (cleanHref === null) { @@ -184,34 +186,30 @@ export class _Renderer { href = cleanHref; let out = '
'; return out; } - image({ href, title, text, escaped }: Tokens.Image): string { - if (escaped === false) { - text = escape(text); - } - + image({ href, title, text }: Tokens.Image): string { const cleanHref = cleanUrl(href); if (cleanHref === null) { - return text; + return escape(text); } href = cleanHref; let out = `${text}, ra href, title, text, - escaped: false, tokens: lexer.inlineTokens(text), }; lexer.state.inLink = false; @@ -34,7 +33,6 @@ function outputLink(cap: string[], link: Pick, ra href, title, text, - escaped: false, }; } @@ -574,7 +572,6 @@ export class _Tokenizer { raw: cap[0], text: cap[0], tokens: this.lexer.inline(cap[0]), - escaped: true, }; } } @@ -586,7 +583,6 @@ export class _Tokenizer { type: 'escape', raw: cap[0], text: cap[1], - escaped: false, }; } } @@ -684,7 +680,6 @@ export class _Tokenizer { type: 'text', raw: text, text, - escaped: true, }; } return outputLink(cap, link, cap[0], this.lexer); @@ -774,7 +769,6 @@ export class _Tokenizer { type: 'codespan', raw: cap[0], text, - escaped: false, }; } } @@ -817,14 +811,12 @@ export class _Tokenizer { type: 'link', raw: cap[0], text, - escaped: false, href, tokens: [ { type: 'text', raw: text, text, - escaped: false, }, ], }; @@ -856,14 +848,12 @@ export class _Tokenizer { type: 'link', raw: cap[0], text, - escaped: false, href, tokens: [ { type: 'text', raw: text, text, - escaped: false, }, ], }; diff --git a/src/Tokens.ts b/src/Tokens.ts index 538c6ac604..0e8bf346f3 100644 --- a/src/Tokens.ts +++ b/src/Tokens.ts @@ -125,7 +125,7 @@ export namespace Tokens { raw: string; text: string; tokens?: Token[]; - escaped: boolean; + escaped?: boolean; } export interface Def { @@ -140,11 +140,10 @@ export namespace Tokens { type: 'escape'; raw: string; text: string; - escaped: boolean; } export interface Tag { - type: 'text' | 'html'; + type: 'html'; raw: string; inLink: boolean; inRawBlock: boolean; @@ -159,7 +158,6 @@ export namespace Tokens { title?: string | null; text: string; tokens: Token[]; - escaped: boolean; } export interface Image { @@ -168,7 +166,6 @@ export namespace Tokens { href: string; title: string | null; text: string; - escaped: boolean; } export interface Strong { @@ -189,7 +186,6 @@ export namespace Tokens { type: 'codespan'; raw: string; text: string; - escaped: boolean; } export interface Br { diff --git a/test/unit/Lexer.test.js b/test/unit/Lexer.test.js index 2e1c8f698e..a1dee78081 100644 --- a/test/unit/Lexer.test.js +++ b/test/unit/Lexer.test.js @@ -519,7 +519,6 @@ a | b type: 'text', raw: 'item 1', text: 'item 1', - escaped: true, tokens: [{ type: 'text', raw: 'item 1', text: 'item 1', escaped: false }], }], }, @@ -534,7 +533,6 @@ a | b type: 'text', raw: 'item 2', text: 'item 2', - escaped: true, tokens: [{ type: 'text', raw: 'item 2', text: 'item 2', escaped: false }], }], }, @@ -574,7 +572,6 @@ a | b type: 'text', raw: 'item 1', text: 'item 1', - escaped: true, tokens: [ { type: 'text', @@ -598,7 +595,6 @@ a | b type: 'text', raw: 'item 2', text: 'item 2', - escaped: true, tokens: [ { type: 'text', @@ -646,7 +642,6 @@ a | b type: 'text', raw: 'item 1', text: 'item 1', - escaped: true, tokens: [ { type: 'text', @@ -670,7 +665,6 @@ a | b type: 'text', raw: 'item 2', text: 'item 2', - escaped: true, tokens: [ { type: 'text', @@ -720,7 +714,6 @@ paragraph type: 'text', raw: 'item 1', text: 'item 1', - escaped: true, tokens: [ { type: 'text', @@ -744,7 +737,6 @@ paragraph type: 'text', raw: 'item 2', text: 'item 2', - escaped: true, tokens: [ { type: 'text', @@ -809,7 +801,6 @@ paragraph type: 'text', raw: 'item 1', text: 'item 1', - escaped: true, tokens: [ { type: 'text', @@ -833,7 +824,6 @@ paragraph type: 'text', raw: 'item 2', text: 'item 2', - escaped: true, tokens: [ { type: 'text', @@ -882,7 +872,6 @@ paragraph type: 'text', raw: 'item 1\n', text: 'item 1', - escaped: true, tokens: [ { type: 'text', @@ -906,7 +895,6 @@ paragraph type: 'text', raw: 'item 2', text: 'item 2', - escaped: true, tokens: [ { type: 'text', @@ -957,7 +945,6 @@ paragraph type: 'text', raw: 'item 1', text: 'item 1', - escaped: true, tokens: [ { type: 'text', @@ -981,7 +968,6 @@ paragraph type: 'text', raw: 'item 2', text: 'item 2', - escaped: true, tokens: [ { type: 'text', @@ -999,7 +985,6 @@ paragraph type: 'text', raw: 'item 2a', text: 'item 2a', - escaped: true, tokens: [ { type: 'text', @@ -1023,7 +1008,6 @@ paragraph type: 'text', raw: 'item 3', text: 'item 3', - escaped: true, tokens: [ { type: 'text', @@ -1071,7 +1055,6 @@ paragraph type: 'text', raw: 'item 1\n', text: 'item 1', - escaped: true, tokens: [ { type: 'text', @@ -1100,7 +1083,6 @@ paragraph type: 'text', raw: 'item 2', text: 'item 2', - escaped: true, tokens: [ { type: 'text', @@ -1152,7 +1134,6 @@ paragraph type: 'text', raw: 'item 1', text: 'item 1', - escaped: true, tokens: [ { type: 'text', @@ -1176,7 +1157,6 @@ paragraph type: 'text', raw: 'item 2', text: 'item 2', - escaped: true, tokens: [ { type: 'text', @@ -1253,7 +1233,7 @@ paragraph expectInlineTokens({ md: '\\>', tokens: [ - { type: 'escape', raw: '\\>', text: '>', escaped: false }, + { type: 'escape', raw: '\\>', text: '>' }, ], }); }); @@ -1268,10 +1248,10 @@ paragraph text: 'strong text\\[', tokens: [ { type: 'text', raw: 'strong text', text: 'strong text', escaped: false }, - { type: 'escape', raw: '\\[', text: '[', escaped: false }, + { type: 'escape', raw: '\\[', text: '[' }, ], }, - { type: 'escape', raw: '\\]', text: ']', escaped: false }, + { type: 'escape', raw: '\\]', text: ']' }, ], }); expectInlineTokens({ @@ -1283,9 +1263,9 @@ paragraph text: 'em\\sis', tokens: [ { type: 'text', raw: 'em', text: 'em', escaped: false }, - { type: 'escape', raw: '\\<', text: '<', escaped: false }, + { type: 'escape', raw: '\\<', text: '<' }, { type: 'text', raw: 'pha', text: 'pha', escaped: false }, - { type: 'escape', raw: '\\>', text: '>', escaped: false }, + { type: 'escape', raw: '\\>', text: '>' }, { type: 'text', raw: 'sis', text: 'sis', escaped: false }, ], }, @@ -1314,7 +1294,6 @@ paragraph href: 'https://example.com', title: null, text: 'link', - escaped: false, tokens: [ { type: 'text', @@ -1338,7 +1317,6 @@ paragraph href: 'https://example.com', title: 'title', text: 'link', - escaped: false, tokens: [ { type: 'text', @@ -1362,7 +1340,6 @@ paragraph text: 'image', href: 'https://example.com/image.png', title: null, - escaped: false, }, ], }); @@ -1378,7 +1355,6 @@ paragraph text: 'image', href: 'https://example.com/image.png', title: 'title', - escaped: false, }, ], }); @@ -1398,7 +1374,6 @@ paragraph href: 'https://example.com', title: 'title', text: 'link', - escaped: false, tokens: [{ type: 'text', raw: 'link', @@ -1423,7 +1398,6 @@ paragraph href: 'https://example.com', title: 'title', text: 'link', - escaped: false, tokens: [{ type: 'text', raw: 'link', @@ -1443,7 +1417,6 @@ paragraph type: 'text', raw: '[link]', text: '[link]', - escaped: false, }, ], }); @@ -1497,7 +1470,7 @@ paragraph expectInlineTokens({ md: '`code`', tokens: [ - { type: 'codespan', raw: '`code`', text: 'code', escaped: false }, + { type: 'codespan', raw: '`code`', text: 'code' }, ], }); }); @@ -1506,7 +1479,7 @@ paragraph expectInlineTokens({ md: '` `', tokens: [ - { type: 'codespan', raw: '` `', text: ' ', escaped: false }, + { type: 'codespan', raw: '` `', text: ' ' }, ], }); }); @@ -1515,7 +1488,7 @@ paragraph expectInlineTokens({ md: '` a`', tokens: [ - { type: 'codespan', raw: '` a`', text: ' a', escaped: false }, + { type: 'codespan', raw: '` a`', text: ' a' }, ], }); }); @@ -1524,7 +1497,7 @@ paragraph expectInlineTokens({ md: '`a `', tokens: [ - { type: 'codespan', raw: '`a `', text: 'a ', escaped: false }, + { type: 'codespan', raw: '`a `', text: 'a ' }, ], }); }); @@ -1533,7 +1506,7 @@ paragraph expectInlineTokens({ md: '` a `', tokens: [ - { type: 'codespan', raw: '` a `', text: 'a', escaped: false }, + { type: 'codespan', raw: '` a `', text: 'a' }, ], }); }); @@ -1542,7 +1515,7 @@ paragraph expectInlineTokens({ md: '`\na\n`', tokens: [ - { type: 'codespan', raw: '`\na\n`', text: 'a', escaped: false }, + { type: 'codespan', raw: '`\na\n`', text: 'a' }, ], }); }); @@ -1551,7 +1524,7 @@ paragraph expectInlineTokens({ md: '`\ta\t`', tokens: [ - { type: 'codespan', raw: '`\ta\t`', text: '\ta\t', escaped: false }, + { type: 'codespan', raw: '`\ta\t`', text: '\ta\t' }, ], }); }); @@ -1560,7 +1533,7 @@ paragraph expectInlineTokens({ md: '`\na\n`', tokens: [ - { type: 'codespan', raw: '`\na\n`', text: 'a', escaped: false }, + { type: 'codespan', raw: '`\na\n`', text: 'a' }, ], }); }); @@ -1569,7 +1542,7 @@ paragraph expectInlineTokens({ md: '` a `', tokens: [ - { type: 'codespan', raw: '` a `', text: ' a ', escaped: false }, + { type: 'codespan', raw: '` a `', text: ' a ' }, ], }); }); @@ -1578,7 +1551,7 @@ paragraph expectInlineTokens({ md: '`a\nb`', tokens: [ - { type: 'codespan', raw: '`a\nb`', text: 'a b', escaped: false }, + { type: 'codespan', raw: '`a\nb`', text: 'a b' }, ], }); }); @@ -1640,13 +1613,11 @@ paragraph raw: '', text: 'https://example.com', href: 'https://example.com', - escaped: false, tokens: [ { type: 'text', raw: 'https://example.com', text: 'https://example.com', - escaped: false, }, ], }, @@ -1664,13 +1635,11 @@ paragraph raw: '', text: 'test@example.com', href: 'mailto:test@example.com', - escaped: false, tokens: [ { type: 'text', raw: 'test@example.com', text: 'test@example.com', - escaped: false, }, ], }, @@ -1687,13 +1656,11 @@ paragraph raw: 'https://example.com', text: 'https://example.com', href: 'https://example.com', - escaped: false, tokens: [ { type: 'text', raw: 'https://example.com', text: 'https://example.com', - escaped: false, }, ], }, @@ -1711,13 +1678,11 @@ paragraph raw: 'test@example.com', text: 'test@example.com', href: 'mailto:test@example.com', - escaped: false, tokens: [ { type: 'text', raw: 'test@example.com', text: 'test@example.com', - escaped: false, }, ], }, From 9e565dc2f3a73c2cc5edcb4842cc313a96fa94a2 Mon Sep 17 00:00:00 2001 From: Tony Brix Date: Sun, 20 Oct 2024 23:41:30 -0600 Subject: [PATCH 7/7] remove lasttoken.escaped in lexer --- src/Lexer.ts | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/Lexer.ts b/src/Lexer.ts index 62ff2237c4..bed5886c6b 100644 --- a/src/Lexer.ts +++ b/src/Lexer.ts @@ -446,10 +446,6 @@ export class _Lexer { if (lastToken && lastToken.type === 'text') { lastToken.raw += token.raw; lastToken.text += token.text; - - if (!token.escaped && 'escaped' in lastToken && lastToken.escaped) { - lastToken.escaped = false; - } } else { tokens.push(token); }