Skip to content

Commit

Permalink
feat: better matching tolerance
Browse files Browse the repository at this point in the history
  • Loading branch information
antfu committed Feb 23, 2024
1 parent 160b766 commit 7e9868d
Show file tree
Hide file tree
Showing 2 changed files with 175 additions and 54 deletions.
36 changes: 27 additions & 9 deletions src/core.ts
Original file line number Diff line number Diff line change
Expand Up @@ -119,15 +119,33 @@ export function syncTokenKeys(
// In the matched parts, we override the keys with the same key so that the transition group can know they are the same element
const matches = findTextMatches(from.code, to.code)
matches.forEach((match) => {
const rangeFrom = from.tokens.filter(t => t.offset >= match.from[0] && t.offset + t.content.length <= match.from[1] && !isWhitespace(t.content))
const rangeTo = to.tokens.filter(t => t.offset >= match.to[0] && t.offset + t.content.length <= match.to[1] && !isWhitespace(t.content))

rangeTo.forEach((token, i) => {
if (token.content === rangeFrom[i]?.content)
token.key = rangeFrom[i].key
else
console.warn('[shiki-magic-move] Mismatched token content', rangeFrom[i], token)
})
const tokensF = from.tokens.filter(t => t.offset >= match.from[0] && t.offset + t.content.length <= match.from[1] && !isWhitespace(t.content))
const tokensT = to.tokens.filter(t => t.offset >= match.to[0] && t.offset + t.content.length <= match.to[1] && !isWhitespace(t.content))

let idxF = 0
let idxT = 0
while (idxF < tokensF.length && idxT < tokensT.length) {
if (!tokensF[idxF] || !tokensT[idxT])
break
if (tokensF[idxF].content === tokensT[idxT].content) {
tokensT[idxT].key = tokensF[idxF].key
idxF++
idxT++
}
else if (tokensF[idxF + 1]?.content === tokensT[idxT].content) {
// console.log('Token missing match', tokensF[idxF], undefined)
idxF++
}
else if (tokensF[idxF].content === tokensT[idxT + 1]?.content) {
// console.log('Token missing match', undefined, tokensT[idxT])
idxT++
}
else {
// console.log('Token missing match', tokensF[idxF], tokensT[idxT])
idxF++
idxT++
}
}
})

return to
Expand Down
193 changes: 148 additions & 45 deletions test/diff.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import { getHighlighter } from 'shiki/bundle/web'
import type { KeyedTokensInfo } from '../src/core'
import { codeToKeyedTokens, syncTokenKeys } from '../src/core'

it('exported', async () => {
it('diff1', async () => {
const code1 = `const a = 1`
const code2 = `const a = ref(1 + 1)`
const code3 = `const b = ref(2)`
Expand Down Expand Up @@ -38,55 +38,158 @@ it('exported', async () => {
expect(syncedKeys2).not.toEqual(originKeys2)
expect(syncedKeys3).not.toEqual(originKeys3)

function printDiff(info: KeyedTokensInfo, keys: string[]) {
return info.tokens.map((t, i) => {
if (t.key === keys[i])
return `+ ${t.key} | ${t.content}`
return `${keys[i]} -> ${t.key} | ${t.content}`
})
}
expect(printDiff(tokens2, originKeys2)).toMatchInlineSnapshot(`
"
1-0 const
2-1
1-2 a
2-3
1-4 =
2-5
2-6 ref
2-7 (
1-6 1
2-9
2-10 +
2-11
2-12 1
2-13 )
2-14 ⏎
"
`)

function normalizeKeys(info: KeyedTokensInfo, name: string) {
info.tokens.forEach((t) => {
t.key = t.key.replace(info.hash, name)
})
}
expect(printDiff(tokens3, originKeys3)).toMatchInlineSnapshot(`
"
1-0 const
3-1
3-2 b
3-3
1-4 =
3-5
2-6 ref
2-7 (
3-8 2
2-13 )
3-10 ⏎
"
`)
})

it('diff2', async () => {
const code1 = `
<template>
<p class="">{{ greeting }}</p>
</template>
`.trim()
const code2 = `
<template>
<p class="a">{{ greeting }}</p>
</template>
`.trim()
const code3 = code1

const theme = 'vitesse-light'
const lang = 'vue'
const highlighter = await getHighlighter({
themes: [theme],
langs: [lang],
})

const tokens1 = codeToKeyedTokens(highlighter, code1, { lang, theme })
const tokens2 = codeToKeyedTokens(highlighter, code2, { lang, theme })
const tokens3 = codeToKeyedTokens(highlighter, code3, { lang, theme })

normalizeKeys(tokens1, '1')
normalizeKeys(tokens2, '2')
normalizeKeys(tokens3, '3')

const originKeys1 = tokens1.tokens.map(t => t.key)
const originKeys2 = tokens2.tokens.map(t => t.key)
const originKeys3 = tokens3.tokens.map(t => t.key)

syncTokenKeys(tokens1, tokens2)
syncTokenKeys(tokens2, tokens3)

const syncedKeys1 = tokens1.tokens.map(t => t.key)
const syncedKeys2 = tokens2.tokens.map(t => t.key)
const syncedKeys3 = tokens3.tokens.map(t => t.key)

expect(syncedKeys1).toEqual(originKeys1)
expect(syncedKeys2).not.toEqual(originKeys2)
expect(syncedKeys3).not.toEqual(originKeys3)

expect(printDiff(tokens2, originKeys2)).toMatchInlineSnapshot(`
[
"2-0 -> 1-0 | const",
"+ 2-1 | ",
"2-2 -> 1-2 | a",
"+ 2-3 | ",
"2-4 -> 1-4 | =",
"+ 2-5 | ",
"+ 2-6 | ref",
"+ 2-7 | (",
"2-8 -> 1-6 | 1",
"+ 2-9 | ",
"+ 2-10 | +",
"+ 2-11 | ",
"+ 2-12 | 1",
"+ 2-13 | )",
"+ 2-14 |
",
]
"
1-0 <
1-1 template
1-2 >
2-3 ⏎
2-4
1-5 <
1-6 p
2-7
1-8 class
1-9 =
2-10 "
2-11 a
2-12 "
1-11 >{{
2-14
1-13 greeting
2-16
1-15 }}</
1-16 p
1-17 >
2-20 ⏎
1-19 </
1-20 template
1-21 >
2-24 ⏎
"
`)

expect(printDiff(tokens3, originKeys3)).toMatchInlineSnapshot(`
[
"3-0 -> 1-0 | const",
"+ 3-1 | ",
"+ 3-2 | b",
"+ 3-3 | ",
"3-4 -> 1-4 | =",
"+ 3-5 | ",
"3-6 -> 2-6 | ref",
"3-7 -> 2-7 | (",
"+ 3-8 | 2",
"3-9 -> 2-13 | )",
"+ 3-10 |
",
]
"
1-0 <
1-1 template
1-2 >
3-3 ⏎
3-4
1-5 <
1-6 p
3-7
1-8 class
1-9 =
3-10 ""
1-11 >{{
3-12
1-13 greeting
3-14
1-15 }}</
1-16 p
1-17 >
3-18 ⏎
1-19 </
1-20 template
1-21 >
3-22 ⏎
"
`)
})

function printDiff(info: KeyedTokensInfo, keys: string[]) {
const text = info.tokens
.map((t, i) => {
if (t.key === keys[i])
return `${` ${t.key}`.padEnd(20, ' ')} ${t.content.replace(/\n/g, '⏎')}`
return `${`${t.key}`.padEnd(20, ' ')} ${t.content.replace(/\n/g, '⏎')}`
})
.join('\n')
return `\n${text}\n`
}

function normalizeKeys(info: KeyedTokensInfo, name: string) {
info.tokens.forEach((t) => {
t.key = t.key.replace(info.hash, name)
})
}

0 comments on commit 7e9868d

Please sign in to comment.