Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -80,93 +80,97 @@ var NoDuplicateTypeConstituentsRule = rule.Rule{
ctx.ReportNode(constituentNode, message)
return
}
kind := ast.KindUnionType
if unionOrIntersection == unionOrIntersection_Intersection {
kind = ast.KindIntersectionType
}

parent := unwindedParentType(constituentNode, kind)
s := scanner.GetScannerForSourceFile(ctx.SourceFile, parent.Pos())
foundBefore := false
prevStart := 0
bracketBeforeTokens := []core.TextRange{}

for {
if s.TokenStart() >= constituentNode.Pos() {
break
ctx.ReportNodeWithFixes(constituentNode, message, func() []rule.RuleFix {
kind := ast.KindUnionType
if unionOrIntersection == unionOrIntersection_Intersection {
kind = ast.KindIntersectionType
}
if s.Token() == ast.KindAmpersandToken || s.Token() == ast.KindBarToken {
foundBefore = true
prevStart = s.TokenStart()
bracketBeforeTokens = bracketBeforeTokens[:0]
} else if s.Token() == ast.KindOpenParenToken {
bracketBeforeTokens = append(bracketBeforeTokens, s.TokenRange())

parent := unwindedParentType(constituentNode, kind)
s := scanner.GetScannerForSourceFile(ctx.SourceFile, parent.Pos())
foundBefore := false
prevStart := 0
bracketBeforeTokens := []core.TextRange{}

for {
if s.TokenStart() >= constituentNode.Pos() {
break
}
if s.Token() == ast.KindAmpersandToken || s.Token() == ast.KindBarToken {
foundBefore = true
prevStart = s.TokenStart()
bracketBeforeTokens = bracketBeforeTokens[:0]
} else if s.Token() == ast.KindOpenParenToken {
bracketBeforeTokens = append(bracketBeforeTokens, s.TokenRange())
}
s.Scan()
}
s.Scan()
}
fixes := []rule.RuleFix{
rule.RuleFixRemoveRange(utils.TrimNodeTextRange(ctx.SourceFile, constituentNode)),
}
if foundBefore {
fixes = append(fixes, rule.RuleFixRemoveRange(core.NewTextRange(prevStart, prevStart+1)))
for _, before := range bracketBeforeTokens {
fixes = append(fixes, rule.RuleFixRemoveRange(before))
fixes := []rule.RuleFix{
rule.RuleFixRemoveRange(utils.TrimNodeTextRange(ctx.SourceFile, constituentNode)),
}
s.ResetPos(constituentNode.End())
for range bracketBeforeTokens {
s.Scan()
if s.Token() != ast.KindCloseParenToken {
panic(fmt.Sprintf("expected next scanned token to be ')', got '%v'", s.Token()))
if foundBefore {
fixes = append(fixes, rule.RuleFixRemoveRange(core.NewTextRange(prevStart, prevStart+1)))
for _, before := range bracketBeforeTokens {
fixes = append(fixes, rule.RuleFixRemoveRange(before))
}
fixes = append(fixes, rule.RuleFixRemoveRange(s.TokenRange()))
}
} else {
s.ResetPos(constituentNode.End())
s.ResetPos(constituentNode.End())
for range bracketBeforeTokens {
s.Scan()
if s.Token() != ast.KindCloseParenToken {
panic(fmt.Sprintf("expected next scanned token to be ')', got '%v'", s.Token()))
}
fixes = append(fixes, rule.RuleFixRemoveRange(s.TokenRange()))
}
} else {
s.ResetPos(constituentNode.End())

closingParensCount := 0
for {
s.Scan()
closingParensCount := 0
for {
s.Scan()

if s.TokenStart() >= parent.End() {
panic("couldn't find '&' or '|' token")
}
if s.TokenStart() >= parent.End() {
panic("couldn't find '&' or '|' token")
}

if s.Token() == ast.KindAmpersandToken || s.Token() == ast.KindBarToken {
if s.Token() == ast.KindAmpersandToken || s.Token() == ast.KindBarToken {
fixes = append(fixes, rule.RuleFixRemoveRange(s.TokenRange()))
break
}
if s.Token() != ast.KindCloseParenToken {
panic(fmt.Sprintf("expected next scanned token to be ')', got '%v'", s.Token()))
}
closingParensCount++
fixes = append(fixes, rule.RuleFixRemoveRange(s.TokenRange()))
break
}
if s.Token() != ast.KindCloseParenToken {
panic(fmt.Sprintf("expected next scanned token to be ')', got '%v'", s.Token()))
}
closingParensCount++
fixes = append(fixes, rule.RuleFixRemoveRange(s.TokenRange()))
}

openingParens := make([]core.TextRange, 0, closingParensCount)
s.ResetPos(parent.Pos())
for range closingParensCount {
s.Scan()
if s.Token() == ast.KindOpenParenToken {
if len(openingParens) < closingParensCount {
openingParens = append(openingParens, s.TokenRange())
openingParens := make([]core.TextRange, 0, closingParensCount)
s.ResetPos(parent.Pos())
for range closingParensCount {
s.Scan()
if s.Token() == ast.KindOpenParenToken {
if len(openingParens) < closingParensCount {
openingParens = append(openingParens, s.TokenRange())
}
} else {
openingParens = openingParens[:0]
}
} else {
openingParens = openingParens[:0]
}

if s.TokenStart() == constituentNode.Pos() {
if len(openingParens) != closingParensCount {
panic(fmt.Sprintf("expected to find %v opening parens, found only %v", closingParensCount, len(openingParens)))
if s.TokenStart() == constituentNode.Pos() {
if len(openingParens) != closingParensCount {
panic(fmt.Sprintf("expected to find %v opening parens, found only %v", closingParensCount, len(openingParens)))
}
break
}
break
}

for _, openingParenRange := range openingParens {
fixes = append(fixes, rule.RuleFixRemoveRange(openingParenRange))
for _, openingParenRange := range openingParens {
fixes = append(fixes, rule.RuleFixRemoveRange(openingParenRange))
}
}
}
}
ctx.ReportNodeWithFixes(constituentNode, message, func() []rule.RuleFix { return fixes })

return fixes
})
}

var checkDuplicateRecursively func(
Expand Down