diff --git a/go.mod b/go.mod index f3778f6060c..119c51440ca 100644 --- a/go.mod +++ b/go.mod @@ -9,7 +9,7 @@ require ( github.com/PuerkitoBio/goquery v1.9.2 github.com/Soontao/goHttpDigestClient v0.0.0-20170320082612-6d28bb1415c5 github.com/andybalholm/brotli v1.1.1 - github.com/evanw/esbuild v0.21.2 + github.com/evanw/esbuild v0.24.2 github.com/fatih/color v1.18.0 github.com/go-sourcemap/sourcemap v2.1.4+incompatible github.com/golang/protobuf v1.5.4 diff --git a/go.sum b/go.sum index 4e9a3c72dbf..9ac0829f167 100644 --- a/go.sum +++ b/go.sum @@ -48,8 +48,8 @@ github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymF github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/evanw/esbuild v0.21.2 h1:CLplcGi794CfHLVmUbvVfTMKkykm+nyIHU8SU60KUTA= -github.com/evanw/esbuild v0.21.2/go.mod h1:D2vIQZqV/vIf/VRHtViaUtViZmG7o+kKmlBfVQuRi48= +github.com/evanw/esbuild v0.24.2 h1:PQExybVBrjHjN6/JJiShRGIXh1hWVm6NepVnhZhrt0A= +github.com/evanw/esbuild v0.24.2/go.mod h1:D2vIQZqV/vIf/VRHtViaUtViZmG7o+kKmlBfVQuRi48= github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM= github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU= github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY= diff --git a/vendor/github.com/evanw/esbuild/internal/bundler/bundler.go b/vendor/github.com/evanw/esbuild/internal/bundler/bundler.go index e267f1ee5d1..3b398ec18c2 100644 --- a/vendor/github.com/evanw/esbuild/internal/bundler/bundler.go +++ b/vendor/github.com/evanw/esbuild/internal/bundler/bundler.go @@ -120,11 +120,29 @@ type tlaCheck struct { } func parseFile(args parseArgs) { + pathForIdentifierName := args.keyPath.Text + + // Identifier name generation may use the name of the parent folder if the + // file name starts with "index". However, this is problematic when the + // parent folder includes the parent directory of what the developer + // considers to be the root of the source tree. If that happens, strip the + // parent folder to avoid including it in the generated name. + if relative, ok := args.fs.Rel(args.options.AbsOutputBase, pathForIdentifierName); ok { + for { + next := strings.TrimPrefix(strings.TrimPrefix(relative, "../"), "..\\") + if relative == next { + break + } + relative = next + } + pathForIdentifierName = relative + } + source := logger.Source{ Index: args.sourceIndex, KeyPath: args.keyPath, PrettyPath: args.prettyPath, - IdentifierName: js_ast.GenerateNonUniqueNameFromPath(args.keyPath.Text), + IdentifierName: js_ast.GenerateNonUniqueNameFromPath(pathForIdentifierName), } var loader config.Loader @@ -149,7 +167,6 @@ func parseFile(args parseArgs) { &source, args.importSource, args.importPathRange, - args.importWith, args.pluginData, args.options.WatchMode, ) @@ -175,6 +192,44 @@ func parseFile(args parseArgs) { loader = loaderFromFileExtension(args.options.ExtensionToLoader, base+ext) } + // Reject unsupported import attributes when the loader isn't "copy" (since + // "copy" is kind of like "external"). But only do this if this file was not + // loaded by a plugin. Plugins are allowed to assign whatever semantics they + // want to import attributes. + if loader != config.LoaderCopy && pluginName == "" { + for _, attr := range source.KeyPath.ImportAttributes.DecodeIntoArray() { + var errorText string + var errorRange js_lexer.KeyOrValue + + // We only currently handle "type: json" + if attr.Key != "type" { + errorText = fmt.Sprintf("Importing with the %q attribute is not supported", attr.Key) + errorRange = js_lexer.KeyRange + } else if attr.Value == "json" { + loader = config.LoaderWithTypeJSON + continue + } else { + errorText = fmt.Sprintf("Importing with a type attribute of %q is not supported", attr.Value) + errorRange = js_lexer.ValueRange + } + + // Everything else is an error + r := args.importPathRange + if args.importWith != nil { + r = js_lexer.RangeOfImportAssertOrWith(*args.importSource, *ast.FindAssertOrWithEntry(args.importWith.Entries, attr.Key), errorRange) + } + tracker := logger.MakeLineColumnTracker(args.importSource) + args.log.AddError(&tracker, r, errorText) + if args.inject != nil { + args.inject <- config.InjectedFile{ + Source: source, + } + } + args.results <- parseResult{} + return + } + } + if loader == config.LoaderEmpty { source.Contents = "" } @@ -398,6 +453,16 @@ func parseFile(args parseArgs) { continue } + // Encode the import attributes + var attrs logger.ImportAttributes + if record.AssertOrWith != nil && record.AssertOrWith.Keyword == ast.WithKeyword { + data := make(map[string]string, len(record.AssertOrWith.Entries)) + for _, entry := range record.AssertOrWith.Entries { + data[helpers.UTF16ToString(entry.Key)] = helpers.UTF16ToString(entry.Value) + } + attrs = logger.EncodeImportAttributes(data) + } + // Special-case glob pattern imports if record.GlobPattern != nil { prettyPath := helpers.GlobPatternToString(record.GlobPattern.Parts) @@ -414,6 +479,13 @@ func parseFile(args parseArgs) { if result.globResolveResults == nil { result.globResolveResults = make(map[uint32]globResolveResult) } + for key, result := range results { + result.PathPair.Primary.ImportAttributes = attrs + if result.PathPair.HasSecondary() { + result.PathPair.Secondary.ImportAttributes = attrs + } + results[key] = result + } result.globResolveResults[uint32(importRecordIndex)] = globResolveResult{ resolveResults: results, absPath: args.fs.Join(absResolveDir, "(glob)"), @@ -432,16 +504,6 @@ func parseFile(args parseArgs) { continue } - // Encode the import attributes - var attrs logger.ImportAttributes - if record.AssertOrWith != nil && record.AssertOrWith.Keyword == ast.WithKeyword { - data := make(map[string]string, len(record.AssertOrWith.Entries)) - for _, entry := range record.AssertOrWith.Entries { - data[helpers.UTF16ToString(entry.Key)] = helpers.UTF16ToString(entry.Value) - } - attrs = logger.EncodeImportAttributes(data) - } - // Cache the path in case it's imported multiple times in this file cacheKey := cacheKey{ kind: record.Kind, @@ -463,6 +525,7 @@ func parseFile(args parseArgs) { record.Range, source.KeyPath, record.Path.Text, + attrs, record.Kind, absResolveDir, pluginData, @@ -865,6 +928,7 @@ func RunOnResolvePlugins( importPathRange logger.Range, importer logger.Path, path string, + importAttributes logger.ImportAttributes, kind ast.ImportKind, absResolveDir string, pluginData interface{}, @@ -875,6 +939,7 @@ func RunOnResolvePlugins( Kind: kind, PluginData: pluginData, Importer: importer, + With: importAttributes, } applyPath := logger.Path{ Text: path, @@ -988,7 +1053,6 @@ func runOnLoadPlugins( source *logger.Source, importSource *logger.Source, importPathRange logger.Range, - importWith *ast.ImportAssertOrWith, pluginData interface{}, isWatchMode bool, ) (loaderPluginResult, bool) { @@ -1055,30 +1119,6 @@ func runOnLoadPlugins( } } - // Reject unsupported import attributes - loader := config.LoaderDefault - for _, attr := range source.KeyPath.ImportAttributes.Decode() { - if attr.Key == "type" { - if attr.Value == "json" { - loader = config.LoaderWithTypeJSON - } else { - r := importPathRange - if importWith != nil { - r = js_lexer.RangeOfImportAssertOrWith(*importSource, *ast.FindAssertOrWithEntry(importWith.Entries, attr.Key), js_lexer.ValueRange) - } - log.AddError(&tracker, r, fmt.Sprintf("Importing with a type attribute of %q is not supported", attr.Value)) - return loaderPluginResult{}, false - } - } else { - r := importPathRange - if importWith != nil { - r = js_lexer.RangeOfImportAssertOrWith(*importSource, *ast.FindAssertOrWithEntry(importWith.Entries, attr.Key), js_lexer.KeyRange) - } - log.AddError(&tracker, r, fmt.Sprintf("Importing with the %q attribute is not supported", attr.Key)) - return loaderPluginResult{}, false - } - } - // Force disabled modules to be empty if source.KeyPath.IsDisabled() { return loaderPluginResult{loader: config.LoaderEmpty}, true @@ -1089,7 +1129,7 @@ func runOnLoadPlugins( if contents, err, originalError := fsCache.ReadFile(fs, source.KeyPath.Text); err == nil { source.Contents = contents return loaderPluginResult{ - loader: loader, + loader: config.LoaderDefault, absResolveDir: fs.Dir(source.KeyPath.Text), }, true } else { @@ -1118,9 +1158,6 @@ func runOnLoadPlugins( return loaderPluginResult{loader: config.LoaderNone}, true } else { source.Contents = contents - if loader != config.LoaderDefault { - return loaderPluginResult{loader: loader}, true - } if mimeType := parsed.DecodeMIMEType(); mimeType != resolver.MIMETypeUnsupported { switch mimeType { case resolver.MIMETypeTextCSS: @@ -1625,6 +1662,7 @@ func (s *scanner) preprocessInjectedFiles() { logger.Range{}, importer, importPath, + logger.ImportAttributes{}, ast.ImportEntryPoint, injectAbsResolveDir, nil, @@ -1804,6 +1842,7 @@ func (s *scanner) addEntryPoints(entryPoints []EntryPoint) []graph.EntryPoint { logger.Range{}, importer, entryPoint.InputPath, + logger.ImportAttributes{}, ast.ImportEntryPoint, entryPointAbsResolveDir, nil, @@ -1836,15 +1875,20 @@ func (s *scanner) addEntryPoints(entryPoints []EntryPoint) []graph.EntryPoint { return nil } - // Parse all entry points that were resolved successfully + // Determine output paths for all entry points that were resolved successfully + type entryPointToParse struct { + index int + parse func() uint32 + } + var entryPointsToParse []entryPointToParse for i, info := range entryPointInfos { if info.results == nil { continue } for _, resolveResult := range info.results { + resolveResult := resolveResult prettyPath := resolver.PrettyPath(s.fs, resolveResult.PathPair.Primary) - sourceIndex := s.maybeParseFile(resolveResult, prettyPath, nil, logger.Range{}, nil, inputKindEntryPoint, nil) outputPath := entryPoints[i].OutputPath outputPathWasAutoGenerated := false @@ -1879,9 +1923,17 @@ func (s *scanner) addEntryPoints(entryPoints []EntryPoint) []graph.EntryPoint { outputPathWasAutoGenerated = true } + // Defer parsing for this entry point until later + entryPointsToParse = append(entryPointsToParse, entryPointToParse{ + index: len(entryMetas), + parse: func() uint32 { + return s.maybeParseFile(resolveResult, prettyPath, nil, logger.Range{}, nil, inputKindEntryPoint, nil) + }, + }) + entryMetas = append(entryMetas, graph.EntryPoint{ OutputPath: outputPath, - SourceIndex: sourceIndex, + SourceIndex: ast.InvalidRef.SourceIndex, OutputPathWasAutoGenerated: outputPathWasAutoGenerated, }) } @@ -1903,6 +1955,11 @@ func (s *scanner) addEntryPoints(entryPoints []EntryPoint) []graph.EntryPoint { } } + // Only parse entry points after "AbsOutputBase" has been determined + for _, toParse := range entryPointsToParse { + entryMetas[toParse.index].SourceIndex = toParse.parse() + } + // Turn all output paths back into relative paths, but this time relative to // the "outbase" value we computed above for i := range entryMetas { @@ -2203,7 +2260,7 @@ func (s *scanner) processScannedFiles(entryPointMeta []graph.EntryPoint) []scann for _, sourceIndex := range sourceIndices { source := &s.results[sourceIndex].file.inputFile.Source - attrs := source.KeyPath.ImportAttributes.Decode() + attrs := source.KeyPath.ImportAttributes.DecodeIntoArray() if len(attrs) == 0 { continue } @@ -2491,7 +2548,7 @@ func (s *scanner) processScannedFiles(entryPointMeta []graph.EntryPoint) []scann } else { sb.WriteString("]") } - if attrs := result.file.inputFile.Source.KeyPath.ImportAttributes.Decode(); len(attrs) > 0 { + if attrs := result.file.inputFile.Source.KeyPath.ImportAttributes.DecodeIntoArray(); len(attrs) > 0 { sb.WriteString(",\n \"with\": {") for i, attr := range attrs { if i > 0 { @@ -2520,11 +2577,13 @@ func (s *scanner) processScannedFiles(entryPointMeta []graph.EntryPoint) []scann // the entry point itself. customFilePath := "" useOutputFile := false + isEntryPoint := false if result.file.inputFile.Loader == config.LoaderCopy { if metaIndex, ok := entryPointSourceIndexToMetaIndex[uint32(sourceIndex)]; ok { template = s.options.EntryPathTemplate customFilePath = entryPointMeta[metaIndex].OutputPath useOutputFile = s.options.AbsOutputFile != "" + isEntryPoint = true } } @@ -2575,8 +2634,14 @@ func (s *scanner) processScannedFiles(entryPointMeta []graph.EntryPoint) []scann helpers.QuoteForJSON(result.file.inputFile.Source.PrettyPath, s.options.ASCIIOnly), len(bytes), ) + entryPointJSON := "" + if isEntryPoint { + entryPointJSON = fmt.Sprintf("\"entryPoint\": %s,\n ", + helpers.QuoteForJSON(result.file.inputFile.Source.PrettyPath, s.options.ASCIIOnly)) + } jsonMetadataChunk = fmt.Sprintf( - "{\n \"imports\": [],\n \"exports\": [],\n \"inputs\": %s,\n \"bytes\": %d\n }", + "{\n \"imports\": [],\n \"exports\": [],\n %s\"inputs\": %s,\n \"bytes\": %d\n }", + entryPointJSON, inputs, len(bytes), ) diff --git a/vendor/github.com/evanw/esbuild/internal/compat/css_table.go b/vendor/github.com/evanw/esbuild/internal/compat/css_table.go index 5ceb52efae2..0ee1001ac0e 100644 --- a/vendor/github.com/evanw/esbuild/internal/compat/css_table.go +++ b/vendor/github.com/evanw/esbuild/internal/compat/css_table.go @@ -191,20 +191,19 @@ var cssPrefixTable = map[css_ast.D][]prefixData{ {engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}}, }, css_ast.DBackdropFilter: { - {engine: IOS, prefix: WebkitPrefix}, - {engine: Safari, prefix: WebkitPrefix}, + {engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{18, 0, 0}}, + {engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{18, 0, 0}}, }, css_ast.DBackgroundClip: { {engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}}, {engine: Edge, prefix: MsPrefix, withoutPrefix: v{15, 0, 0}}, {engine: Edge, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}}, - {engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{14, 0, 0}}, {engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{106, 0, 0}}, - {engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{14, 0, 0}}, + {engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{5, 0, 0}}, }, css_ast.DBoxDecorationBreak: { - {engine: Chrome, prefix: WebkitPrefix}, - {engine: Edge, prefix: WebkitPrefix}, + {engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{130, 0, 0}}, + {engine: Edge, prefix: WebkitPrefix, withoutPrefix: v{130, 0, 0}}, {engine: IOS, prefix: WebkitPrefix}, {engine: Opera, prefix: WebkitPrefix}, {engine: Safari, prefix: WebkitPrefix}, @@ -241,7 +240,7 @@ var cssPrefixTable = map[css_ast.D][]prefixData{ }, css_ast.DMaskImage: { {engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}}, - {engine: Edge, prefix: WebkitPrefix}, + {engine: Edge, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}}, {engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}}, {engine: Opera, prefix: WebkitPrefix}, {engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}}, @@ -255,21 +254,21 @@ var cssPrefixTable = map[css_ast.D][]prefixData{ }, css_ast.DMaskPosition: { {engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}}, - {engine: Edge, prefix: WebkitPrefix}, + {engine: Edge, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}}, {engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}}, {engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{106, 0, 0}}, {engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}}, }, css_ast.DMaskRepeat: { {engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}}, - {engine: Edge, prefix: WebkitPrefix}, + {engine: Edge, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}}, {engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}}, {engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{106, 0, 0}}, {engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}}, }, css_ast.DMaskSize: { {engine: Chrome, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}}, - {engine: Edge, prefix: WebkitPrefix}, + {engine: Edge, prefix: WebkitPrefix, withoutPrefix: v{120, 0, 0}}, {engine: IOS, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}}, {engine: Opera, prefix: WebkitPrefix, withoutPrefix: v{106, 0, 0}}, {engine: Safari, prefix: WebkitPrefix, withoutPrefix: v{15, 4, 0}}, diff --git a/vendor/github.com/evanw/esbuild/internal/compat/js_table.go b/vendor/github.com/evanw/esbuild/internal/compat/js_table.go index 808eca3c69b..18f2bbe1c6d 100644 --- a/vendor/github.com/evanw/esbuild/internal/compat/js_table.go +++ b/vendor/github.com/evanw/esbuild/internal/compat/js_table.go @@ -513,7 +513,7 @@ var jsTable = map[JSFeature]map[Engine][]versionRange{ }, ForOf: { // Note: The latest version of "IE" failed 9 tests including: for..of loops: iterator closing, break - // Note: The latest version of "Rhino" failed 4 tests including: for..of loops: iterator closing, break + // Note: The latest version of "Rhino" failed 2 tests including: for..of loops: iterator closing, break Chrome: {{start: v{51, 0, 0}}}, Deno: {{start: v{1, 0, 0}}}, Edge: {{start: v{15, 0, 0}}}, @@ -527,7 +527,6 @@ var jsTable = map[JSFeature]map[Engine][]versionRange{ }, FunctionNameConfigurable: { // Note: The latest version of "IE" failed this test: function "name" property: isn't writable, is configurable - // Note: The latest version of "Rhino" failed this test: function "name" property: isn't writable, is configurable Chrome: {{start: v{43, 0, 0}}}, Deno: {{start: v{1, 0, 0}}}, Edge: {{start: v{12, 0, 0}}}, @@ -537,6 +536,7 @@ var jsTable = map[JSFeature]map[Engine][]versionRange{ IOS: {{start: v{10, 0, 0}}}, Node: {{start: v{4, 0, 0}}}, Opera: {{start: v{30, 0, 0}}}, + Rhino: {{start: v{1, 7, 15}}}, Safari: {{start: v{10, 0, 0}}}, }, FunctionOrClassPropertyAccess: { @@ -556,7 +556,7 @@ var jsTable = map[JSFeature]map[Engine][]versionRange{ Generator: { // Note: The latest version of "Hermes" failed 3 tests including: generators: computed shorthand generators, classes // Note: The latest version of "IE" failed 27 tests including: generators: %GeneratorPrototype% - // Note: The latest version of "Rhino" failed 15 tests including: generators: %GeneratorPrototype% + // Note: The latest version of "Rhino" failed 11 tests including: generators: %GeneratorPrototype% Chrome: {{start: v{50, 0, 0}}}, Deno: {{start: v{1, 0, 0}}}, Edge: {{start: v{13, 0, 0}}}, @@ -569,29 +569,30 @@ var jsTable = map[JSFeature]map[Engine][]versionRange{ }, Hashbang: { // Note: The latest version of "IE" failed this test: Hashbang Grammar - // Note: The latest version of "Rhino" failed this test: Hashbang Grammar Chrome: {{start: v{74, 0, 0}}}, Deno: {{start: v{1, 0, 0}}}, Edge: {{start: v{79, 0, 0}}}, + ES: {{start: v{2023, 0, 0}}}, Firefox: {{start: v{67, 0, 0}}}, Hermes: {{start: v{0, 7, 0}}}, IOS: {{start: v{13, 4, 0}}}, Node: {{start: v{12, 5, 0}}}, Opera: {{start: v{62, 0, 0}}}, + Rhino: {{start: v{1, 7, 15}}}, Safari: {{start: v{13, 1, 0}}}, }, ImportAssertions: { Chrome: {{start: v{91, 0, 0}}}, Deno: {{start: v{1, 17, 0}}}, Edge: {{start: v{91, 0, 0}}}, - Node: {{start: v{16, 14, 0}}}, + Node: {{start: v{16, 14, 0}, end: v{22, 0, 0}}}, }, ImportAttributes: { Chrome: {{start: v{123, 0, 0}}}, Deno: {{start: v{1, 37, 0}}}, Edge: {{start: v{123, 0, 0}}}, IOS: {{start: v{17, 2, 0}}}, - Node: {{start: v{20, 10, 0}}}, + Node: {{start: v{18, 20, 0}, end: v{19, 0, 0}}, {start: v{20, 10, 0}}}, Opera: {{start: v{109, 0, 0}}}, Safari: {{start: v{17, 2, 0}}}, }, @@ -650,9 +651,11 @@ var jsTable = map[JSFeature]map[Engine][]versionRange{ Safari: {{start: v{10, 0, 0}}}, }, NodeColonPrefixImport: { + ES: {{start: v{0, 0, 0}}}, Node: {{start: v{12, 20, 0}, end: v{13, 0, 0}}, {start: v{14, 13, 1}}}, }, NodeColonPrefixRequire: { + ES: {{start: v{0, 0, 0}}}, Node: {{start: v{14, 18, 0}, end: v{15, 0, 0}}, {start: v{16, 0, 0}}}, }, NullishCoalescing: { @@ -713,7 +716,7 @@ var jsTable = map[JSFeature]map[Engine][]versionRange{ }, OptionalCatchBinding: { // Note: The latest version of "IE" failed 3 tests including: optional catch binding: await - // Note: The latest version of "Rhino" failed 3 tests including: optional catch binding: await + // Note: The latest version of "Rhino" failed this test: optional catch binding: await Chrome: {{start: v{66, 0, 0}}}, Deno: {{start: v{1, 0, 0}}}, Edge: {{start: v{79, 0, 0}}}, @@ -741,7 +744,6 @@ var jsTable = map[JSFeature]map[Engine][]versionRange{ }, RegexpDotAllFlag: { // Note: The latest version of "IE" failed this test: s (dotAll) flag for regular expressions - // Note: The latest version of "Rhino" failed this test: s (dotAll) flag for regular expressions Chrome: {{start: v{62, 0, 0}}}, Deno: {{start: v{1, 0, 0}}}, Edge: {{start: v{79, 0, 0}}}, @@ -751,6 +753,7 @@ var jsTable = map[JSFeature]map[Engine][]versionRange{ IOS: {{start: v{11, 3, 0}}}, Node: {{start: v{8, 10, 0}}}, Opera: {{start: v{49, 0, 0}}}, + Rhino: {{start: v{1, 7, 15}}}, Safari: {{start: v{11, 1, 0}}}, }, RegexpLookbehindAssertions: { @@ -792,10 +795,12 @@ var jsTable = map[JSFeature]map[Engine][]versionRange{ Opera: {{start: v{51, 0, 0}}}, Safari: {{start: v{11, 1, 0}}}, }, - RegexpSetNotation: {}, + RegexpSetNotation: { + ES: {{start: v{2024, 0, 0}}}, + }, RegexpStickyAndUnicodeFlags: { // Note: The latest version of "IE" failed 6 tests including: RegExp "y" and "u" flags: "u" flag - // Note: The latest version of "Rhino" failed 6 tests including: RegExp "y" and "u" flags: "u" flag + // Note: The latest version of "Rhino" failed 4 tests including: RegExp "y" and "u" flags: "u" flag Chrome: {{start: v{50, 0, 0}}}, Deno: {{start: v{1, 0, 0}}}, Edge: {{start: v{13, 0, 0}}}, @@ -808,20 +813,21 @@ var jsTable = map[JSFeature]map[Engine][]versionRange{ Safari: {{start: v{12, 0, 0}}}, }, RegexpUnicodePropertyEscapes: { - // Note: The latest version of "Chrome" failed this test: RegExp Unicode Property Escapes: Unicode 15.1 - // Note: The latest version of "Firefox" failed this test: RegExp Unicode Property Escapes: Unicode 15.1 + // Note: The latest version of "Chrome" failed this test: RegExp Unicode Property Escapes: Unicode 16.0 + // Note: The latest version of "Edge" failed this test: RegExp Unicode Property Escapes: Unicode 16.0 // Note: The latest version of "Hermes" failed 8 tests including: RegExp Unicode Property Escapes: Unicode 11 // Note: The latest version of "IE" failed 8 tests including: RegExp Unicode Property Escapes: Unicode 11 - // Note: The latest version of "IOS" failed this test: RegExp Unicode Property Escapes: Unicode 15.1 + // Note: The latest version of "IOS" failed this test: RegExp Unicode Property Escapes: Unicode 16.0 // Note: The latest version of "Rhino" failed 8 tests including: RegExp Unicode Property Escapes: Unicode 11 - // Note: The latest version of "Safari" failed this test: RegExp Unicode Property Escapes: Unicode 15.1 - ES: {{start: v{2018, 0, 0}}}, - Node: {{start: v{18, 20, 0}, end: v{19, 0, 0}}, {start: v{20, 12, 0}, end: v{21, 0, 0}}, {start: v{21, 3, 0}}}, + // Note: The latest version of "Safari" failed this test: RegExp Unicode Property Escapes: Unicode 16.0 + ES: {{start: v{2018, 0, 0}}}, + Firefox: {{start: v{134, 0, 0}}}, + Node: {{start: v{22, 12, 0}, end: v{23, 0, 0}}, {start: v{23, 3, 0}}}, }, RestArgument: { // Note: The latest version of "Hermes" failed this test: rest parameters: function 'length' property // Note: The latest version of "IE" failed 5 tests including: rest parameters: arguments object interaction - // Note: The latest version of "Rhino" failed 5 tests including: rest parameters: arguments object interaction + // Note: The latest version of "Rhino" failed 2 tests including: rest parameters: arguments object interaction Chrome: {{start: v{47, 0, 0}}}, Deno: {{start: v{1, 0, 0}}}, Edge: {{start: v{12, 0, 0}}}, @@ -835,7 +841,7 @@ var jsTable = map[JSFeature]map[Engine][]versionRange{ TemplateLiteral: { // Note: The latest version of "Hermes" failed this test: template literals: TemplateStrings call site caching // Note: The latest version of "IE" failed 7 tests including: template literals: TemplateStrings call site caching - // Note: The latest version of "Rhino" failed 2 tests including: template literals: basic functionality + // Note: The latest version of "Rhino" failed this test: template literals: toString conversion Chrome: {{start: v{41, 0, 0}}}, Deno: {{start: v{1, 0, 0}}}, Edge: {{start: v{13, 0, 0}}}, @@ -872,7 +878,6 @@ var jsTable = map[JSFeature]map[Engine][]versionRange{ }, UnicodeEscapes: { // Note: The latest version of "IE" failed 2 tests including: Unicode code point escapes: in identifiers - // Note: The latest version of "Rhino" failed 4 tests including: Unicode code point escapes: in identifiers Chrome: {{start: v{44, 0, 0}}}, Deno: {{start: v{1, 0, 0}}}, Edge: {{start: v{12, 0, 0}}}, @@ -882,6 +887,7 @@ var jsTable = map[JSFeature]map[Engine][]versionRange{ IOS: {{start: v{9, 0, 0}}}, Node: {{start: v{4, 0, 0}}}, Opera: {{start: v{31, 0, 0}}}, + Rhino: {{start: v{1, 7, 15}}}, Safari: {{start: v{9, 0, 0}}}, }, Using: {}, diff --git a/vendor/github.com/evanw/esbuild/internal/config/config.go b/vendor/github.com/evanw/esbuild/internal/config/config.go index 3dd2e553100..615d6882eaa 100644 --- a/vendor/github.com/evanw/esbuild/internal/config/config.go +++ b/vendor/github.com/evanw/esbuild/internal/config/config.go @@ -771,6 +771,7 @@ type OnResolveArgs struct { PluginData interface{} Importer logger.Path Kind ast.ImportKind + With logger.ImportAttributes } type OnResolveResult struct { diff --git a/vendor/github.com/evanw/esbuild/internal/config/globals.go b/vendor/github.com/evanw/esbuild/internal/config/globals.go index 4a77c0267c0..1bbc9786f8c 100644 --- a/vendor/github.com/evanw/esbuild/internal/config/globals.go +++ b/vendor/github.com/evanw/esbuild/internal/config/globals.go @@ -2,7 +2,6 @@ package config import ( "math" - "strings" "sync" "github.com/evanw/esbuild/internal/ast" @@ -868,6 +867,7 @@ type DefineExpr struct { } type DefineData struct { + KeyParts []string DefineExpr *DefineExpr Flags DefineFlags } @@ -905,14 +905,9 @@ func mergeDefineData(old DefineData, new DefineData) DefineData { return new } -type DotDefine struct { - Data DefineData - Parts []string -} - type ProcessedDefines struct { IdentifierDefines map[string]DefineData - DotDefines map[string][]DotDefine + DotDefines map[string][]DefineData } // This transformation is expensive, so we only want to do it once. Make sure @@ -920,7 +915,7 @@ type ProcessedDefines struct { // doesn't have an efficient way to copy a map and the overhead of copying // all of the properties into a new map once for every new parser noticeably // slows down our benchmarks. -func ProcessDefines(userDefines map[string]DefineData) ProcessedDefines { +func ProcessDefines(userDefines []DefineData) ProcessedDefines { // Optimization: reuse known globals if there are no user-specified defines hasUserDefines := len(userDefines) != 0 if !hasUserDefines { @@ -934,7 +929,7 @@ func ProcessDefines(userDefines map[string]DefineData) ProcessedDefines { result := ProcessedDefines{ IdentifierDefines: make(map[string]DefineData), - DotDefines: make(map[string][]DotDefine), + DotDefines: make(map[string][]DefineData), } // Mark these property accesses as free of side effects. That means they can @@ -956,7 +951,7 @@ func ProcessDefines(userDefines map[string]DefineData) ProcessedDefines { flags |= IsSymbolInstance } - result.DotDefines[tail] = append(result.DotDefines[tail], DotDefine{Parts: parts, Data: DefineData{Flags: flags}}) + result.DotDefines[tail] = append(result.DotDefines[tail], DefineData{KeyParts: parts, Flags: flags}) } } @@ -973,31 +968,29 @@ func ProcessDefines(userDefines map[string]DefineData) ProcessedDefines { // Then copy the user-specified defines in afterwards, which will overwrite // any known globals above. - for key, data := range userDefines { - parts := strings.Split(key, ".") - + for _, data := range userDefines { // Identifier defines are special-cased - if len(parts) == 1 { - result.IdentifierDefines[key] = mergeDefineData(result.IdentifierDefines[key], data) + if len(data.KeyParts) == 1 { + name := data.KeyParts[0] + result.IdentifierDefines[name] = mergeDefineData(result.IdentifierDefines[name], data) continue } - tail := parts[len(parts)-1] + tail := data.KeyParts[len(data.KeyParts)-1] dotDefines := result.DotDefines[tail] found := false // Try to merge with existing dot defines first for i, define := range dotDefines { - if helpers.StringArraysEqual(parts, define.Parts) { - define := &dotDefines[i] - define.Data = mergeDefineData(define.Data, data) + if helpers.StringArraysEqual(data.KeyParts, define.KeyParts) { + dotDefines[i] = mergeDefineData(dotDefines[i], data) found = true break } } if !found { - dotDefines = append(dotDefines, DotDefine{Parts: parts, Data: data}) + dotDefines = append(dotDefines, data) } result.DotDefines[tail] = dotDefines } diff --git a/vendor/github.com/evanw/esbuild/internal/css_parser/css_parser.go b/vendor/github.com/evanw/esbuild/internal/css_parser/css_parser.go index 6fbcbeea011..131ec5edfcd 100644 --- a/vendor/github.com/evanw/esbuild/internal/css_parser/css_parser.go +++ b/vendor/github.com/evanw/esbuild/internal/css_parser/css_parser.go @@ -1102,6 +1102,10 @@ var specialAtRules = map[string]atRuleKind{ // Defining before-change style: the @starting-style rule // Reference: https://drafts.csswg.org/css-transitions-2/#defining-before-change-style-the-starting-style-rule "starting-style": atRuleInheritContext, + + // Anchor Positioning + // Reference: https://drafts.csswg.org/css-anchor-position-1/#at-ruledef-position-try + "position-try": atRuleDeclarations, } var atKnownRuleCanBeRemovedIfEmpty = map[string]bool{ diff --git a/vendor/github.com/evanw/esbuild/internal/fs/fs.go b/vendor/github.com/evanw/esbuild/internal/fs/fs.go index b68da2c20d2..ccfcc6cded8 100644 --- a/vendor/github.com/evanw/esbuild/internal/fs/fs.go +++ b/vendor/github.com/evanw/esbuild/internal/fs/fs.go @@ -201,6 +201,7 @@ type FS interface { Join(parts ...string) string Cwd() string Rel(base string, target string) (string, bool) + EvalSymlinks(path string) (string, bool) // This is used in the implementation of "Entry" kind(dir string, base string) (symlink string, kind EntryKind) diff --git a/vendor/github.com/evanw/esbuild/internal/fs/fs_mock.go b/vendor/github.com/evanw/esbuild/internal/fs/fs_mock.go index 1bb62b97fd2..8626b59796d 100644 --- a/vendor/github.com/evanw/esbuild/internal/fs/fs_mock.go +++ b/vendor/github.com/evanw/esbuild/internal/fs/fs_mock.go @@ -281,6 +281,10 @@ func (fs *mockFS) Rel(base string, target string) (string, bool) { return target, true } +func (fs *mockFS) EvalSymlinks(path string) (string, bool) { + return "", false +} + func (fs *mockFS) kind(dir string, base string) (symlink string, kind EntryKind) { panic("This should never be called") } diff --git a/vendor/github.com/evanw/esbuild/internal/fs/fs_real.go b/vendor/github.com/evanw/esbuild/internal/fs/fs_real.go index 5b0ef3b4c55..412eb687d85 100644 --- a/vendor/github.com/evanw/esbuild/internal/fs/fs_real.go +++ b/vendor/github.com/evanw/esbuild/internal/fs/fs_real.go @@ -340,6 +340,13 @@ func (fs *realFS) Rel(base string, target string) (string, bool) { return "", false } +func (fs *realFS) EvalSymlinks(path string) (string, bool) { + if path, err := fs.fp.evalSymlinks(path); err == nil { + return path, true + } + return "", false +} + func (fs *realFS) readdir(dirname string) (entries []string, canonicalError error, originalError error) { BeforeFileOpen() defer AfterFileClose() diff --git a/vendor/github.com/evanw/esbuild/internal/fs/fs_zip.go b/vendor/github.com/evanw/esbuild/internal/fs/fs_zip.go index 4f168f25fad..58a7b8ba494 100644 --- a/vendor/github.com/evanw/esbuild/internal/fs/fs_zip.go +++ b/vendor/github.com/evanw/esbuild/internal/fs/fs_zip.go @@ -322,6 +322,10 @@ func (fs *zipFS) Rel(base string, target string) (string, bool) { return fs.inner.Rel(base, target) } +func (fs *zipFS) EvalSymlinks(path string) (string, bool) { + return fs.inner.EvalSymlinks(path) +} + func (fs *zipFS) kind(dir string, base string) (symlink string, kind EntryKind) { return fs.inner.kind(dir, base) } diff --git a/vendor/github.com/evanw/esbuild/internal/js_ast/js_ast.go b/vendor/github.com/evanw/esbuild/internal/js_ast/js_ast.go index f44f1f83b2f..f8d3fe32f5b 100644 --- a/vendor/github.com/evanw/esbuild/internal/js_ast/js_ast.go +++ b/vendor/github.com/evanw/esbuild/internal/js_ast/js_ast.go @@ -374,6 +374,15 @@ type Class struct { BodyLoc logger.Loc CloseBraceLoc logger.Loc + // If true, JavaScript decorators (i.e. not TypeScript experimental + // decorators) should be lowered. This is the case either if JavaScript + // decorators are not supported in the configured target environment, or + // if "useDefineForClassFields" is set to false and this class has + // decorators on it. Note that this flag is not necessarily set to true if + // "useDefineForClassFields" is false and a class has an "accessor" even + // though the accessor feature comes from the decorator specification. + ShouldLowerStandardDecorators bool + // If true, property field initializers cannot be assumed to have no side // effects. For example: // diff --git a/vendor/github.com/evanw/esbuild/internal/js_ast/js_ast_helpers.go b/vendor/github.com/evanw/esbuild/internal/js_ast/js_ast_helpers.go index c4c65ca0b3b..da78ea76919 100644 --- a/vendor/github.com/evanw/esbuild/internal/js_ast/js_ast_helpers.go +++ b/vendor/github.com/evanw/esbuild/internal/js_ast/js_ast_helpers.go @@ -1133,9 +1133,15 @@ func approximatePrintedIntCharCount(intValue float64) int { return count } -func ShouldFoldBinaryArithmeticWhenMinifying(binary *EBinary) bool { +func ShouldFoldBinaryOperatorWhenMinifying(binary *EBinary) bool { switch binary.Op { case + // Equality tests should always result in smaller code when folded + BinOpLooseEq, + BinOpLooseNe, + BinOpStrictEq, + BinOpStrictNe, + // Minification always folds right signed shift operations since they are // unlikely to result in larger output. Note: ">>>" could result in // bigger output such as "-1 >>> 0" becoming "4294967295". @@ -1161,6 +1167,11 @@ func ShouldFoldBinaryArithmeticWhenMinifying(binary *EBinary) bool { return true } + // String addition should pretty much always be more compact when folded + if _, _, ok := extractStringValues(binary.Left, binary.Right); ok { + return true + } + case BinOpSub: // Subtraction of small-ish integers can definitely be folded without issues // "3 - 1" => "2" @@ -1197,18 +1208,26 @@ func ShouldFoldBinaryArithmeticWhenMinifying(binary *EBinary) bool { resultLen := approximatePrintedIntCharCount(float64(ToUint32(left) >> (ToUint32(right) & 31))) return resultLen <= leftLen+3+rightLen } + + case BinOpLogicalAnd, BinOpLogicalOr, BinOpNullishCoalescing: + if IsPrimitiveLiteral(binary.Left.Data) { + return true + } } return false } // This function intentionally avoids mutating the input AST so it can be // called after the AST has been frozen (i.e. after parsing ends). -func FoldBinaryArithmetic(loc logger.Loc, e *EBinary) Expr { +func FoldBinaryOperator(loc logger.Loc, e *EBinary) Expr { switch e.Op { case BinOpAdd: if left, right, ok := extractNumericValues(e.Left, e.Right); ok { return Expr{Loc: loc, Data: &ENumber{Value: left + right}} } + if left, right, ok := extractStringValues(e.Left, e.Right); ok { + return Expr{Loc: loc, Data: &EString{Value: joinStrings(left, right)}} + } case BinOpSub: if left, right, ok := extractNumericValues(e.Left, e.Right); ok { @@ -1296,6 +1315,49 @@ func FoldBinaryArithmetic(loc logger.Loc, e *EBinary) Expr { if left, right, ok := extractStringValues(e.Left, e.Right); ok { return Expr{Loc: loc, Data: &EBoolean{Value: stringCompareUCS2(left, right) >= 0}} } + + case BinOpLooseEq, BinOpStrictEq: + if left, right, ok := extractNumericValues(e.Left, e.Right); ok { + return Expr{Loc: loc, Data: &EBoolean{Value: left == right}} + } + if left, right, ok := extractStringValues(e.Left, e.Right); ok { + return Expr{Loc: loc, Data: &EBoolean{Value: stringCompareUCS2(left, right) == 0}} + } + + case BinOpLooseNe, BinOpStrictNe: + if left, right, ok := extractNumericValues(e.Left, e.Right); ok { + return Expr{Loc: loc, Data: &EBoolean{Value: left != right}} + } + if left, right, ok := extractStringValues(e.Left, e.Right); ok { + return Expr{Loc: loc, Data: &EBoolean{Value: stringCompareUCS2(left, right) != 0}} + } + + case BinOpLogicalAnd: + if boolean, sideEffects, ok := ToBooleanWithSideEffects(e.Left.Data); ok { + if !boolean { + return e.Left + } else if sideEffects == NoSideEffects { + return e.Right + } + } + + case BinOpLogicalOr: + if boolean, sideEffects, ok := ToBooleanWithSideEffects(e.Left.Data); ok { + if boolean { + return e.Left + } else if sideEffects == NoSideEffects { + return e.Right + } + } + + case BinOpNullishCoalescing: + if isNullOrUndefined, sideEffects, ok := ToNullOrUndefinedWithSideEffects(e.Left.Data); ok { + if !isNullOrUndefined { + return e.Left + } else if sideEffects == NoSideEffects { + return e.Right + } + } } return Expr{} diff --git a/vendor/github.com/evanw/esbuild/internal/js_lexer/js_lexer.go b/vendor/github.com/evanw/esbuild/internal/js_lexer/js_lexer.go index 8bac729c71e..3776f9727dd 100644 --- a/vendor/github.com/evanw/esbuild/internal/js_lexer/js_lexer.go +++ b/vendor/github.com/evanw/esbuild/internal/js_lexer/js_lexer.go @@ -794,12 +794,6 @@ func (lexer *Lexer) NextJSXElementChild() { if needsFixing { // Slow path lexer.decodedStringLiteralOrNil = fixWhitespaceAndDecodeJSXEntities(text) - - // Skip this token if it turned out to be empty after trimming - if len(lexer.decodedStringLiteralOrNil) == 0 { - lexer.HasNewlineBefore = true - continue - } } else { // Fast path n := len(text) diff --git a/vendor/github.com/evanw/esbuild/internal/js_parser/global_name_parser.go b/vendor/github.com/evanw/esbuild/internal/js_parser/global_name_parser.go index 0064990b75a..78649ead8aa 100644 --- a/vendor/github.com/evanw/esbuild/internal/js_parser/global_name_parser.go +++ b/vendor/github.com/evanw/esbuild/internal/js_parser/global_name_parser.go @@ -19,9 +19,22 @@ func ParseGlobalName(log logger.Log, source logger.Source) (result []string, ok lexer := js_lexer.NewLexerGlobalName(log, source) - // Start off with an identifier + // Start off with an identifier or a keyword that results in an object result = append(result, lexer.Identifier.String) - lexer.Expect(js_lexer.TIdentifier) + switch lexer.Token { + case js_lexer.TThis: + lexer.Next() + + case js_lexer.TImport: + // Handle "import.meta" + lexer.Next() + lexer.Expect(js_lexer.TDot) + result = append(result, lexer.Identifier.String) + lexer.ExpectContextualKeyword("meta") + + default: + lexer.Expect(js_lexer.TIdentifier) + } // Follow with dot or index expressions for lexer.Token != js_lexer.TEndOfFile { diff --git a/vendor/github.com/evanw/esbuild/internal/js_parser/js_parser.go b/vendor/github.com/evanw/esbuild/internal/js_parser/js_parser.go index 626cf0ba430..01c3fde9fa6 100644 --- a/vendor/github.com/evanw/esbuild/internal/js_parser/js_parser.go +++ b/vendor/github.com/evanw/esbuild/internal/js_parser/js_parser.go @@ -1605,7 +1605,7 @@ func (p *parser) hoistSymbols(scope *js_ast.Scope) { func (p *parser) declareBinding(kind ast.SymbolKind, binding js_ast.Binding, opts parseStmtOpts) { js_ast.ForEachIdentifierBinding(binding, func(loc logger.Loc, b *js_ast.BIdentifier) { if !opts.isTypeScriptDeclare || (opts.isNamespaceScope && opts.isExport) { - b.Ref = p.declareSymbol(kind, binding.Loc, p.loadNameFromRef(b.Ref)) + b.Ref = p.declareSymbol(kind, loc, p.loadNameFromRef(b.Ref)) } }) } @@ -2136,47 +2136,50 @@ func (p *parser) parseProperty(startLoc logger.Loc, kind js_ast.PropertyKind, op couldBeModifierKeyword := p.lexer.IsIdentifierOrKeyword() if !couldBeModifierKeyword { switch p.lexer.Token { - case js_lexer.TOpenBracket, js_lexer.TNumericLiteral, js_lexer.TStringLiteral, - js_lexer.TAsterisk, js_lexer.TPrivateIdentifier: + case js_lexer.TOpenBracket, js_lexer.TNumericLiteral, js_lexer.TStringLiteral, js_lexer.TPrivateIdentifier: couldBeModifierKeyword = true + case js_lexer.TAsterisk: + if opts.isAsync || (raw != "get" && raw != "set") { + couldBeModifierKeyword = true + } } } // If so, check for a modifier keyword if couldBeModifierKeyword { - switch name.String { + switch raw { case "get": - if !opts.isAsync && raw == name.String { + if !opts.isAsync { p.markSyntaxFeature(compat.ObjectAccessors, nameRange) return p.parseProperty(startLoc, js_ast.PropertyGetter, opts, nil) } case "set": - if !opts.isAsync && raw == name.String { + if !opts.isAsync { p.markSyntaxFeature(compat.ObjectAccessors, nameRange) return p.parseProperty(startLoc, js_ast.PropertySetter, opts, nil) } case "accessor": - if !p.lexer.HasNewlineBefore && !opts.isAsync && opts.isClass && raw == name.String { + if !p.lexer.HasNewlineBefore && !opts.isAsync && opts.isClass { return p.parseProperty(startLoc, js_ast.PropertyAutoAccessor, opts, nil) } case "async": - if !p.lexer.HasNewlineBefore && !opts.isAsync && raw == name.String { + if !p.lexer.HasNewlineBefore && !opts.isAsync { opts.isAsync = true opts.asyncRange = nameRange return p.parseProperty(startLoc, js_ast.PropertyMethod, opts, nil) } case "static": - if !opts.isStatic && !opts.isAsync && opts.isClass && raw == name.String { + if !opts.isStatic && !opts.isAsync && opts.isClass { opts.isStatic = true return p.parseProperty(startLoc, kind, opts, nil) } case "declare": - if !p.lexer.HasNewlineBefore && opts.isClass && p.options.ts.Parse && opts.tsDeclareRange.Len == 0 && raw == name.String { + if !p.lexer.HasNewlineBefore && opts.isClass && p.options.ts.Parse && opts.tsDeclareRange.Len == 0 { opts.tsDeclareRange = nameRange scopeIndex := len(p.scopesInOrder) @@ -2213,7 +2216,7 @@ func (p *parser) parseProperty(startLoc logger.Loc, kind js_ast.PropertyKind, op } case "abstract": - if !p.lexer.HasNewlineBefore && opts.isClass && p.options.ts.Parse && !opts.isTSAbstract && raw == name.String { + if !p.lexer.HasNewlineBefore && opts.isClass && p.options.ts.Parse && !opts.isTSAbstract { opts.isTSAbstract = true scopeIndex := len(p.scopesInOrder) @@ -2249,7 +2252,7 @@ func (p *parser) parseProperty(startLoc logger.Loc, kind js_ast.PropertyKind, op case "private", "protected", "public", "readonly", "override": // Skip over TypeScript keywords - if opts.isClass && p.options.ts.Parse && raw == name.String { + if opts.isClass && p.options.ts.Parse { return p.parseProperty(startLoc, kind, opts, nil) } } @@ -5234,8 +5237,10 @@ func (p *parser) parseJSXElement(loc logger.Loc) js_ast.Expr { case js_lexer.TStringLiteral: if p.options.jsx.Preserve { nullableChildren = append(nullableChildren, js_ast.Expr{Loc: p.lexer.Loc(), Data: &js_ast.EJSXText{Raw: p.lexer.Raw()}}) + } else if str := p.lexer.StringLiteral(); len(str) > 0 { + nullableChildren = append(nullableChildren, js_ast.Expr{Loc: p.lexer.Loc(), Data: &js_ast.EString{Value: str}}) } else { - nullableChildren = append(nullableChildren, js_ast.Expr{Loc: p.lexer.Loc(), Data: &js_ast.EString{Value: p.lexer.StringLiteral()}}) + // Skip this token if it turned out to be empty after trimming } p.lexer.NextJSXElementChild() @@ -5484,8 +5489,6 @@ func (p *parser) parseClauseAlias(kind string) js_lexer.MaybeSubstring { if !ok { p.log.AddError(&p.tracker, r, fmt.Sprintf("This %s alias is invalid because it contains the unpaired Unicode surrogate U+%X", kind, problem)) - } else { - p.markSyntaxFeature(compat.ArbitraryModuleNamespaceNames, r) } return js_lexer.MaybeSubstring{String: alias} } @@ -6287,6 +6290,7 @@ func (p *parser) parseClass(classKeyword logger.Range, name *ast.LocRef, classOp bodyLoc := p.lexer.Loc() p.lexer.Expect(js_lexer.TOpenBrace) properties := []js_ast.Property{} + hasPropertyDecorator := false // Allow "in" and private fields inside class bodies oldAllowIn := p.allowIn @@ -6316,6 +6320,9 @@ func (p *parser) parseClass(classKeyword logger.Range, name *ast.LocRef, classOp firstDecoratorLoc := p.lexer.Loc() scopeIndex := len(p.scopesInOrder) opts.decorators = p.parseDecorators(p.currentScope, classKeyword, opts.decoratorContext) + if len(opts.decorators) > 0 { + hasPropertyDecorator = true + } // This property may turn out to be a type in TypeScript, which should be ignored if property, ok := p.parseProperty(p.saveExprCommentsHere(), js_ast.PropertyField, opts, nil); ok { @@ -6353,6 +6360,33 @@ func (p *parser) parseClass(classKeyword logger.Range, name *ast.LocRef, classOp closeBraceLoc := p.saveExprCommentsHere() p.lexer.Expect(js_lexer.TCloseBrace) + + // TypeScript has legacy behavior that uses assignment semantics instead of + // define semantics for class fields when "useDefineForClassFields" is enabled + // (in which case TypeScript behaves differently than JavaScript, which is + // arguably "wrong"). + // + // This legacy behavior exists because TypeScript added class fields to + // TypeScript before they were added to JavaScript. They decided to go with + // assignment semantics for whatever reason. Later on TC39 decided to go with + // define semantics for class fields instead. This behaves differently if the + // base class has a setter with the same name. + // + // The value of "useDefineForClassFields" defaults to false when it's not + // specified and the target is earlier than "ES2022" since the class field + // language feature was added in ES2022. However, TypeScript's "target" + // setting currently defaults to "ES3" which unfortunately means that the + // "useDefineForClassFields" setting defaults to false (i.e. to "wrong"). + // + // We default "useDefineForClassFields" to true (i.e. to "correct") instead. + // This is partially because our target defaults to "esnext", and partially + // because this is a legacy behavior that no one should be using anymore. + // Users that want the wrong behavior can either set "useDefineForClassFields" + // to false in "tsconfig.json" explicitly, or set TypeScript's "target" to + // "ES2021" or earlier in their in "tsconfig.json" file. + useDefineForClassFields := !p.options.ts.Parse || p.options.ts.Config.UseDefineForClassFields == config.True || + (p.options.ts.Config.UseDefineForClassFields == config.Unspecified && p.options.ts.Config.Target != config.TSTargetBelowES2022) + return js_ast.Class{ ClassKeyword: classKeyword, Decorators: classOpts.decorators, @@ -6362,31 +6396,16 @@ func (p *parser) parseClass(classKeyword logger.Range, name *ast.LocRef, classOp Properties: properties, CloseBraceLoc: closeBraceLoc, - // TypeScript has legacy behavior that uses assignment semantics instead of - // define semantics for class fields when "useDefineForClassFields" is enabled - // (in which case TypeScript behaves differently than JavaScript, which is - // arguably "wrong"). - // - // This legacy behavior exists because TypeScript added class fields to - // TypeScript before they were added to JavaScript. They decided to go with - // assignment semantics for whatever reason. Later on TC39 decided to go with - // define semantics for class fields instead. This behaves differently if the - // base class has a setter with the same name. - // - // The value of "useDefineForClassFields" defaults to false when it's not - // specified and the target is earlier than "ES2022" since the class field - // language feature was added in ES2022. However, TypeScript's "target" - // setting currently defaults to "ES3" which unfortunately means that the - // "useDefineForClassFields" setting defaults to false (i.e. to "wrong"). - // - // We default "useDefineForClassFields" to true (i.e. to "correct") instead. - // This is partially because our target defaults to "esnext", and partially - // because this is a legacy behavior that no one should be using anymore. - // Users that want the wrong behavior can either set "useDefineForClassFields" - // to false in "tsconfig.json" explicitly, or set TypeScript's "target" to - // "ES2021" or earlier in their in "tsconfig.json" file. - UseDefineForClassFields: !p.options.ts.Parse || p.options.ts.Config.UseDefineForClassFields == config.True || - (p.options.ts.Config.UseDefineForClassFields == config.Unspecified && p.options.ts.Config.Target != config.TSTargetBelowES2022), + // Always lower standard decorators if they are present and TypeScript's + // "useDefineForClassFields" setting is false even if the configured target + // environment supports decorators. This setting changes the behavior of + // class fields, and so we must lower decorators so they behave correctly. + ShouldLowerStandardDecorators: (len(classOpts.decorators) > 0 || hasPropertyDecorator) && + ((!p.options.ts.Parse && p.options.unsupportedJSFeatures.Has(compat.Decorators)) || + (p.options.ts.Parse && p.options.ts.Config.ExperimentalDecorators != config.True && + (p.options.unsupportedJSFeatures.Has(compat.Decorators) || !useDefineForClassFields))), + + UseDefineForClassFields: useDefineForClassFields, } } @@ -6480,6 +6499,9 @@ func (p *parser) parsePath() (logger.Range, string, *ast.ImportAssertOrWith, ast closeBraceLoc := p.saveExprCommentsHere() p.lexer.Expect(js_lexer.TCloseBrace) + if keyword == ast.AssertKeyword { + p.maybeWarnAboutAssertKeyword(keywordLoc) + } assertOrWith = &ast.ImportAssertOrWith{ Entries: entries, Keyword: keyword, @@ -6492,6 +6514,20 @@ func (p *parser) parsePath() (logger.Range, string, *ast.ImportAssertOrWith, ast return pathRange, pathText, assertOrWith, flags } +// Let people know if they probably should be using "with" instead of "assert" +func (p *parser) maybeWarnAboutAssertKeyword(loc logger.Loc) { + if p.options.unsupportedJSFeatures.Has(compat.ImportAssertions) && !p.options.unsupportedJSFeatures.Has(compat.ImportAttributes) { + where := config.PrettyPrintTargetEnvironment(p.options.originalTargetEnv, p.options.unsupportedJSFeatureOverridesMask) + msg := logger.Msg{ + Kind: logger.Warning, + Data: p.tracker.MsgData(js_lexer.RangeOfIdentifier(p.source, loc), "The \"assert\" keyword is not supported in "+where), + Notes: []logger.MsgData{{Text: "Did you mean to use \"with\" instead of \"assert\"?"}}, + } + msg.Data.Location.Suggestion = "with" + p.log.AddMsgID(logger.MsgID_JS_AssertToWith, msg) + } +} + // This assumes the "function" token has already been parsed func (p *parser) parseFnStmt(loc logger.Loc, opts parseStmtOpts, isAsync bool, asyncRange logger.Range) js_ast.Stmt { isGenerator := p.lexer.Token == js_lexer.TAsterisk @@ -7329,7 +7365,7 @@ func (p *parser) parseStmt(opts parseStmtOpts) js_ast.Stmt { for p.lexer.Token != js_lexer.TCloseBrace { var value js_ast.Expr body := []js_ast.Stmt{} - caseLoc := p.lexer.Loc() + caseLoc := p.saveExprCommentsHere() if p.lexer.Token == js_lexer.TDefault { if foundDefault { @@ -10158,7 +10194,7 @@ func (p *parser) visitAndAppendStmt(stmts []js_ast.Stmt, stmt js_ast.Stmt) []js_ result := p.visitClass(s.Value.Loc, &s2.Class, s.DefaultName.Ref, "default") // Lower class field syntax for browsers that don't support it - classStmts, _ := p.lowerClass(stmt, js_ast.Expr{}, result) + classStmts, _ := p.lowerClass(stmt, js_ast.Expr{}, result, "") // Remember if the class was side-effect free before lowering if result.canBeRemovedIfUnused { @@ -10284,6 +10320,18 @@ func (p *parser) visitAndAppendStmt(stmts []js_ast.Stmt, stmt js_ast.Stmt) []js_ } } + // Handle "for await" that has been lowered by moving this label inside the "try" + if try, ok := s.Stmt.Data.(*js_ast.STry); ok && len(try.Block.Stmts) > 0 { + if _, ok := try.Block.Stmts[0].Data.(*js_ast.SFor); ok { + try.Block.Stmts[0] = js_ast.Stmt{Loc: stmt.Loc, Data: &js_ast.SLabel{ + Stmt: try.Block.Stmts[0], + Name: s.Name, + IsSingleLineStmt: s.IsSingleLineStmt, + }} + return append(stmts, s.Stmt) + } + } + case *js_ast.SLocal: // Silently remove unsupported top-level "await" in dead code branches if s.Kind == js_ast.LocalAwaitUsing && p.fnOrArrowDataVisit.isOutsideFnOrArrow { @@ -10726,6 +10774,13 @@ func (p *parser) visitAndAppendStmt(stmts []js_ast.Stmt, stmt js_ast.Stmt) []js_ p.popScope() if s.Catch != nil { + old := p.isControlFlowDead + + // If the try body is empty, then the catch body is dead + if len(s.Block.Stmts) == 0 { + p.isControlFlowDead = true + } + p.pushScopeForVisitPass(js_ast.ScopeCatchBinding, s.Catch.Loc) if s.Catch.BindingOrNil.Data != nil { p.visitBinding(s.Catch.BindingOrNil, bindingOpts{}) @@ -10737,6 +10792,8 @@ func (p *parser) visitAndAppendStmt(stmts []js_ast.Stmt, stmt js_ast.Stmt) []js_ p.lowerObjectRestInCatchBinding(s.Catch) p.popScope() + + p.isControlFlowDead = old } if s.Finally != nil { @@ -10745,6 +10802,44 @@ func (p *parser) visitAndAppendStmt(stmts []js_ast.Stmt, stmt js_ast.Stmt) []js_ p.popScope() } + // Drop the whole thing if the try body is empty + if p.options.minifySyntax && len(s.Block.Stmts) == 0 { + keepCatch := false + + // Certain "catch" blocks need to be preserved: + // + // try {} catch { let foo } // Can be removed + // try {} catch { var foo } // Must be kept + // + if s.Catch != nil { + for _, stmt2 := range s.Catch.Block.Stmts { + if shouldKeepStmtInDeadControlFlow(stmt2) { + keepCatch = true + break + } + } + } + + // Make sure to preserve the "finally" block if present + if !keepCatch { + if s.Finally == nil { + return stmts + } + finallyNeedsBlock := false + for _, stmt2 := range s.Finally.Block.Stmts { + if statementCaresAboutScope(stmt2) { + finallyNeedsBlock = true + break + } + } + if !finallyNeedsBlock { + return append(stmts, s.Finally.Block.Stmts...) + } + block := s.Finally.Block + stmt = js_ast.Stmt{Loc: s.Finally.Loc, Data: &block} + } + } + case *js_ast.SSwitch: s.Test = p.visitExpr(s.Test) p.pushScopeForVisitPass(js_ast.ScopeBlock, s.BodyLoc) @@ -10856,7 +10951,7 @@ func (p *parser) visitAndAppendStmt(stmts []js_ast.Stmt, stmt js_ast.Stmt) []js_ } // Lower class field syntax for browsers that don't support it - classStmts, _ := p.lowerClass(stmt, js_ast.Expr{}, result) + classStmts, _ := p.lowerClass(stmt, js_ast.Expr{}, result, "") // Remember if the class was side-effect free before lowering if result.canBeRemovedIfUnused { @@ -11927,8 +12022,8 @@ func (p *parser) instantiateDefineExpr(loc logger.Loc, expr config.DefineExpr, o // Substitute user-specified defines if defines, ok := p.options.defines.DotDefines[parts[len(parts)-1]]; ok { for _, define := range defines { - if define.Data.DefineExpr != nil && helpers.StringArraysEqual(define.Parts, parts) { - return p.instantiateDefineExpr(loc, *define.Data.DefineExpr, opts) + if define.DefineExpr != nil && helpers.StringArraysEqual(define.KeyParts, parts) { + return p.instantiateDefineExpr(loc, *define.DefineExpr, opts) } } } @@ -12861,10 +12956,10 @@ func (p *parser) visitExprInOut(expr js_ast.Expr, in exprIn) (js_ast.Expr, exprO // Check both user-specified defines and known globals if defines, ok := p.options.defines.DotDefines["meta"]; ok { for _, define := range defines { - if p.isDotOrIndexDefineMatch(expr, define.Parts) { + if p.isDotOrIndexDefineMatch(expr, define.KeyParts) { // Substitute user-specified defines - if define.Data.DefineExpr != nil { - return p.instantiateDefineExpr(expr.Loc, *define.Data.DefineExpr, identifierOpts{ + if define.DefineExpr != nil { + return p.instantiateDefineExpr(expr.Loc, *define.DefineExpr, identifierOpts{ assignTarget: in.assignTarget, isCallTarget: isCallTarget, isDeleteTarget: isDeleteTarget, @@ -13492,10 +13587,10 @@ func (p *parser) visitExprInOut(expr js_ast.Expr, in exprIn) (js_ast.Expr, exprO // Check both user-specified defines and known globals if defines, ok := p.options.defines.DotDefines[e.Name]; ok { for _, define := range defines { - if p.isDotOrIndexDefineMatch(expr, define.Parts) { + if p.isDotOrIndexDefineMatch(expr, define.KeyParts) { // Substitute user-specified defines - if define.Data.DefineExpr != nil { - new := p.instantiateDefineExpr(expr.Loc, *define.Data.DefineExpr, identifierOpts{ + if define.DefineExpr != nil { + new := p.instantiateDefineExpr(expr.Loc, *define.DefineExpr, identifierOpts{ assignTarget: in.assignTarget, isCallTarget: isCallTarget, isDeleteTarget: isDeleteTarget, @@ -13511,13 +13606,13 @@ func (p *parser) visitExprInOut(expr js_ast.Expr, in exprIn) (js_ast.Expr, exprO } // Copy the side effect flags over in case this expression is unused - if define.Data.Flags.Has(config.CanBeRemovedIfUnused) { + if define.Flags.Has(config.CanBeRemovedIfUnused) { e.CanBeRemovedIfUnused = true } - if define.Data.Flags.Has(config.CallCanBeUnwrappedIfUnused) && !p.options.ignoreDCEAnnotations { + if define.Flags.Has(config.CallCanBeUnwrappedIfUnused) && !p.options.ignoreDCEAnnotations { e.CallCanBeUnwrappedIfUnused = true } - if define.Data.Flags.Has(config.IsSymbolInstance) { + if define.Flags.Has(config.IsSymbolInstance) { e.IsSymbolInstance = true } break @@ -13614,10 +13709,10 @@ func (p *parser) visitExprInOut(expr js_ast.Expr, in exprIn) (js_ast.Expr, exprO if str, ok := e.Index.Data.(*js_ast.EString); ok { if defines, ok := p.options.defines.DotDefines[helpers.UTF16ToString(str.Value)]; ok { for _, define := range defines { - if p.isDotOrIndexDefineMatch(expr, define.Parts) { + if p.isDotOrIndexDefineMatch(expr, define.KeyParts) { // Substitute user-specified defines - if define.Data.DefineExpr != nil { - new := p.instantiateDefineExpr(expr.Loc, *define.Data.DefineExpr, identifierOpts{ + if define.DefineExpr != nil { + new := p.instantiateDefineExpr(expr.Loc, *define.DefineExpr, identifierOpts{ assignTarget: in.assignTarget, isCallTarget: isCallTarget, isDeleteTarget: isDeleteTarget, @@ -13637,13 +13732,13 @@ func (p *parser) visitExprInOut(expr js_ast.Expr, in exprIn) (js_ast.Expr, exprO } // Copy the side effect flags over in case this expression is unused - if define.Data.Flags.Has(config.CanBeRemovedIfUnused) { + if define.Flags.Has(config.CanBeRemovedIfUnused) { e.CanBeRemovedIfUnused = true } - if define.Data.Flags.Has(config.CallCanBeUnwrappedIfUnused) && !p.options.ignoreDCEAnnotations { + if define.Flags.Has(config.CallCanBeUnwrappedIfUnused) && !p.options.ignoreDCEAnnotations { e.CallCanBeUnwrappedIfUnused = true } - if define.Data.Flags.Has(config.IsSymbolInstance) { + if define.Flags.Has(config.IsSymbolInstance) { e.IsSymbolInstance = true } break @@ -14265,6 +14360,9 @@ func (p *parser) visitExprInOut(expr js_ast.Expr, in exprIn) (js_ast.Expr, exprO break } if entries != nil { + if keyword == ast.AssertKeyword { + p.maybeWarnAboutAssertKeyword(prop.Key.Loc) + } assertOrWith = &ast.ImportAssertOrWith{ Entries: entries, Keyword: keyword, @@ -15099,7 +15197,7 @@ func (p *parser) visitExprInOut(expr js_ast.Expr, in exprIn) (js_ast.Expr, exprO result := p.visitClass(expr.Loc, &e.Class, ast.InvalidRef, nameToKeep) // Lower class field syntax for browsers that don't support it - _, expr = p.lowerClass(js_ast.Stmt{}, expr, result) + _, expr = p.lowerClass(js_ast.Stmt{}, expr, result, nameToKeep) // We may be able to determine that a class is side-effect before lowering // but not after lowering (e.g. due to "--keep-names" mutating the object). @@ -15287,8 +15385,8 @@ func (v *binaryExprVisitor) visitRightAndFinish(p *parser) js_ast.Expr { } } - if p.shouldFoldTypeScriptConstantExpressions || (p.options.minifySyntax && js_ast.ShouldFoldBinaryArithmeticWhenMinifying(e)) { - if result := js_ast.FoldBinaryArithmetic(v.loc, e); result.Data != nil { + if p.shouldFoldTypeScriptConstantExpressions || (p.options.minifySyntax && js_ast.ShouldFoldBinaryOperatorWhenMinifying(e)) { + if result := js_ast.FoldBinaryOperator(v.loc, e); result.Data != nil { return result } } diff --git a/vendor/github.com/evanw/esbuild/internal/js_parser/js_parser_lower.go b/vendor/github.com/evanw/esbuild/internal/js_parser/js_parser_lower.go index f9f9958a781..3991058e178 100644 --- a/vendor/github.com/evanw/esbuild/internal/js_parser/js_parser_lower.go +++ b/vendor/github.com/evanw/esbuild/internal/js_parser/js_parser_lower.go @@ -88,11 +88,6 @@ func (p *parser) markSyntaxFeature(feature compat.JSFeature, r logger.Range) (di "Top-level await is not available in %s", where)) return - case compat.ArbitraryModuleNamespaceNames: - p.log.AddError(&p.tracker, r, fmt.Sprintf( - "Using a string as a module namespace identifier name is not supported in %s", where)) - return - case compat.Bigint: // Transforming these will never be supported p.log.AddError(&p.tracker, r, fmt.Sprintf( diff --git a/vendor/github.com/evanw/esbuild/internal/js_parser/js_parser_lower_class.go b/vendor/github.com/evanw/esbuild/internal/js_parser/js_parser_lower_class.go index f92ec631b88..01458368cf1 100644 --- a/vendor/github.com/evanw/esbuild/internal/js_parser/js_parser_lower_class.go +++ b/vendor/github.com/evanw/esbuild/internal/js_parser/js_parser_lower_class.go @@ -416,8 +416,7 @@ func (p *parser) computeClassLoweringInfo(class *js_ast.Class) (result classLowe // due to the complexity of the decorator specification. The specification is // also still evolving so trying to optimize it now is also potentially // premature. - if p.options.unsupportedJSFeatures.Has(compat.Decorators) && - (!p.options.ts.Parse || p.options.ts.Config.ExperimentalDecorators != config.True) { + if class.ShouldLowerStandardDecorators { for _, prop := range class.Properties { if len(prop.Decorators) > 0 { for _, prop := range class.Properties { @@ -623,14 +622,15 @@ const ( ) type lowerClassContext struct { - optionalNameHint string - kind classKind - class *js_ast.Class - classLoc logger.Loc - classExpr js_ast.Expr // Only for "kind == classKindExpr", may be replaced by "nameFunc()" - defaultName ast.LocRef + nameToKeep string + kind classKind + class *js_ast.Class + classLoc logger.Loc + classExpr js_ast.Expr // Only for "kind == classKindExpr", may be replaced by "nameFunc()" + defaultName ast.LocRef ctor *js_ast.EFunction + extendsRef ast.Ref parameterFields []js_ast.Stmt instanceMembers []js_ast.Stmt instancePrivateMethods []js_ast.Stmt @@ -679,8 +679,10 @@ type lowerClassContext struct { // body (e.g. the contents of initializers, methods, and static blocks). Those // have already been transformed by "visitClass" by this point. It's done that // way for performance so that we don't need to do another AST pass. -func (p *parser) lowerClass(stmt js_ast.Stmt, expr js_ast.Expr, result visitClassResult) ([]js_ast.Stmt, js_ast.Expr) { +func (p *parser) lowerClass(stmt js_ast.Stmt, expr js_ast.Expr, result visitClassResult, nameToKeep string) ([]js_ast.Stmt, js_ast.Expr) { ctx := lowerClassContext{ + nameToKeep: nameToKeep, + extendsRef: ast.InvalidRef, decoratorContextRef: ast.InvalidRef, privateInstanceMethodRef: ast.InvalidRef, privateStaticMethodRef: ast.InvalidRef, @@ -694,7 +696,7 @@ func (p *parser) lowerClass(stmt js_ast.Stmt, expr js_ast.Expr, result visitClas ctx.kind = classKindExpr if ctx.class.Name != nil { symbol := &p.symbols[ctx.class.Name.Ref.InnerIndex] - ctx.optionalNameHint = symbol.OriginalName + ctx.nameToKeep = symbol.OriginalName // The inner class name inside the class expression should be the same as // the class expression name itself @@ -708,13 +710,10 @@ func (p *parser) lowerClass(stmt js_ast.Stmt, expr js_ast.Expr, result visitClas ctx.class.Name = nil } } - if p.nameToKeepIsFor == e { - ctx.optionalNameHint = p.nameToKeep - } } else if s, ok := stmt.Data.(*js_ast.SClass); ok { ctx.class = &s.Class if ctx.class.Name != nil { - ctx.optionalNameHint = p.symbols[ctx.class.Name.Ref.InnerIndex].OriginalName + ctx.nameToKeep = p.symbols[ctx.class.Name.Ref.InnerIndex].OriginalName } if s.IsExport { ctx.kind = classKindExportStmt @@ -726,7 +725,7 @@ func (p *parser) lowerClass(stmt js_ast.Stmt, expr js_ast.Expr, result visitClas s2, _ := s.Value.Data.(*js_ast.SClass) ctx.class = &s2.Class if ctx.class.Name != nil { - ctx.optionalNameHint = p.symbols[ctx.class.Name.Ref.InnerIndex].OriginalName + ctx.nameToKeep = p.symbols[ctx.class.Name.Ref.InnerIndex].OriginalName } ctx.defaultName = s.DefaultName ctx.kind = classKindExportDefaultStmt @@ -823,8 +822,9 @@ func (ctx *lowerClassContext) lowerField( shouldOmitFieldInitializer bool, staticFieldToBlockAssign bool, initializerIndex int, -) (js_ast.Property, bool) { +) (js_ast.Property, ast.Ref, bool) { mustLowerPrivate := private != nil && p.privateSymbolNeedsToBeLowered(private) + ref := ast.InvalidRef // The TypeScript compiler doesn't follow the JavaScript spec for // uninitialized fields. They are supposed to be set to undefined but the @@ -858,7 +858,7 @@ func (ctx *lowerClassContext) lowerField( } args := []js_ast.Expr{ {Loc: loc, Data: &js_ast.EIdentifier{Ref: ctx.decoratorContextRef}}, - {Loc: loc, Data: &js_ast.ENumber{Value: float64((3 + 2*initializerIndex) << 1)}}, + {Loc: loc, Data: &js_ast.ENumber{Value: float64((4 + 2*initializerIndex) << 1)}}, value, } if _, ok := init.Data.(*js_ast.EUndefined); !ok { @@ -872,7 +872,7 @@ func (ctx *lowerClassContext) lowerField( var memberExpr js_ast.Expr if mustLowerPrivate { // Generate a new symbol for this private field - ref := p.generateTempRef(tempRefNeedsDeclare, "_"+p.symbols[private.Ref.InnerIndex].OriginalName[1:]) + ref = p.generateTempRef(tempRefNeedsDeclare, "_"+p.symbols[private.Ref.InnerIndex].OriginalName[1:]) p.symbols[private.Ref.InnerIndex].Link = ref // Initialize the private field to a new WeakMap @@ -930,7 +930,7 @@ func (ctx *lowerClassContext) lowerField( } memberExpr = js_ast.JoinWithComma(memberExpr, p.callRuntime(loc, "__runInitializers", []js_ast.Expr{ {Loc: loc, Data: &js_ast.EIdentifier{Ref: ctx.decoratorContextRef}}, - {Loc: loc, Data: &js_ast.ENumber{Value: float64(((4 + 2*initializerIndex) << 1) | 1)}}, + {Loc: loc, Data: &js_ast.ENumber{Value: float64(((5 + 2*initializerIndex) << 1) | 1)}}, value, })) p.recordUsage(ctx.decoratorContextRef) @@ -949,7 +949,7 @@ func (ctx *lowerClassContext) lowerField( {Loc: loc, Data: &js_ast.SExpr{Value: memberExpr}}}, }, }, - }, true + }, ref, true } else { // Move this property to an assignment after the class ends ctx.staticMembers = append(ctx.staticMembers, memberExpr) @@ -962,12 +962,12 @@ func (ctx *lowerClassContext) lowerField( if private == nil || mustLowerPrivate { // Remove the field from the class body - return js_ast.Property{}, false + return js_ast.Property{}, ref, false } // Keep the private field but remove the initializer prop.InitializerOrNil = js_ast.Expr{} - return prop, true + return prop, ref, true } func (ctx *lowerClassContext) lowerPrivateMethod(p *parser, prop js_ast.Property, private *js_ast.EPrivateIdentifier) { @@ -986,8 +986,8 @@ func (ctx *lowerClassContext) lowerPrivateMethod(p *parser, prop js_ast.Property } else { name = "_instances" } - if ctx.optionalNameHint != "" { - name = fmt.Sprintf("_%s%s", ctx.optionalNameHint, name) + if ctx.nameToKeep != "" { + name = fmt.Sprintf("_%s%s", ctx.nameToKeep, name) } *ref = p.generateTempRef(tempRefNeedsDeclare, name) @@ -1107,7 +1107,7 @@ func (ctx *lowerClassContext) analyzeProperty(p *parser, prop js_ast.Property, c analysis.private, _ = prop.Key.Data.(*js_ast.EPrivateIdentifier) mustLowerPrivate := analysis.private != nil && p.privateSymbolNeedsToBeLowered(analysis.private) analysis.shouldOmitFieldInitializer = p.options.ts.Parse && !prop.Kind.IsMethodDefinition() && prop.InitializerOrNil.Data == nil && - !ctx.class.UseDefineForClassFields && !mustLowerPrivate + !ctx.class.UseDefineForClassFields && !mustLowerPrivate && !ctx.class.ShouldLowerStandardDecorators // Class fields must be lowered if the environment doesn't support them if !prop.Kind.IsMethodDefinition() { @@ -1139,7 +1139,7 @@ func (ctx *lowerClassContext) analyzeProperty(p *parser, prop js_ast.Property, c // they will end up being lowered (if they are even being lowered at all) if p.options.ts.Parse && p.options.ts.Config.ExperimentalDecorators == config.True { analysis.propExperimentalDecorators = prop.Decorators - } else if p.options.unsupportedJSFeatures.Has(compat.Decorators) { + } else if ctx.class.ShouldLowerStandardDecorators { analysis.propDecorators = prop.Decorators } @@ -1180,19 +1180,17 @@ func (ctx *lowerClassContext) analyzeProperty(p *parser, prop js_ast.Property, c return } -func (p *parser) propertyNameHint(key js_ast.Expr, suffix string) string { - var text string +func (p *parser) propertyNameHint(key js_ast.Expr) string { switch k := key.Data.(type) { case *js_ast.EString: - text = helpers.UTF16ToString(k.Value) + return helpers.UTF16ToString(k.Value) case *js_ast.EIdentifier: - text = p.symbols[k.Ref.InnerIndex].OriginalName + return p.symbols[k.Ref.InnerIndex].OriginalName case *js_ast.EPrivateIdentifier: - text = p.symbols[k.Ref.InnerIndex].OriginalName[1:] + return p.symbols[k.Ref.InnerIndex].OriginalName[1:] default: - return suffix + return "" } - return fmt.Sprintf("_%s%s", text, suffix) } func (ctx *lowerClassContext) hoistComputedProperties(p *parser, classLoweringInfo classLoweringInfo) ( @@ -1234,7 +1232,12 @@ func (ctx *lowerClassContext) hoistComputedProperties(p *parser, classLoweringIn // Evaluate the decorator expressions inline before computed property keys var decorators js_ast.Expr if len(analysis.propDecorators) > 0 { - ref := p.generateTempRef(tempRefNeedsDeclare, p.propertyNameHint(prop.Key, "_dec")) + name := p.propertyNameHint(prop.Key) + if name != "" { + name = "_" + name + } + name += "_dec" + ref := p.generateTempRef(tempRefNeedsDeclare, name) values := make([]js_ast.Expr, len(analysis.propDecorators)) for i, decorator := range analysis.propDecorators { values[i] = decorator.Value @@ -1400,13 +1403,13 @@ func (ctx *lowerClassContext) hoistComputedProperties(p *parser, classLoweringIn // __publicField(Foo, _a); // if ctx.computedPropertyChain.Data != nil && ctx.class.ExtendsOrNil.Data != nil { - ref := p.generateTempRef(tempRefNeedsDeclare, "") + ctx.extendsRef = p.generateTempRef(tempRefNeedsDeclare, "") ctx.class.ExtendsOrNil = js_ast.JoinWithComma(js_ast.JoinWithComma( - js_ast.Assign(js_ast.Expr{Loc: ctx.class.ExtendsOrNil.Loc, Data: &js_ast.EIdentifier{Ref: ref}}, ctx.class.ExtendsOrNil), + js_ast.Assign(js_ast.Expr{Loc: ctx.class.ExtendsOrNil.Loc, Data: &js_ast.EIdentifier{Ref: ctx.extendsRef}}, ctx.class.ExtendsOrNil), ctx.computedPropertyChain), - js_ast.Expr{Loc: ctx.class.ExtendsOrNil.Loc, Data: &js_ast.EIdentifier{Ref: ref}}) - p.recordUsage(ref) - p.recordUsage(ref) + js_ast.Expr{Loc: ctx.class.ExtendsOrNil.Loc, Data: &js_ast.EIdentifier{Ref: ctx.extendsRef}}) + p.recordUsage(ctx.extendsRef) + p.recordUsage(ctx.extendsRef) ctx.computedPropertyChain = js_ast.Expr{} } return @@ -1447,7 +1450,7 @@ func (ctx *lowerClassContext) processProperties(p *parser, classLoweringInfo cla propertyKeyTempRefs, decoratorTempRefs := ctx.hoistComputedProperties(p, classLoweringInfo) // Save the initializer index for each field and accessor element - if p.options.unsupportedJSFeatures.Has(compat.Decorators) && (!p.options.ts.Parse || p.options.ts.Config.ExperimentalDecorators != config.True) { + if ctx.class.ShouldLowerStandardDecorators { var counts [4]int // Count how many initializers there are in each section @@ -1480,9 +1483,8 @@ func (ctx *lowerClassContext) processProperties(p *parser, classLoweringInfo cla } // Evaluate the decorator expressions inline - if p.options.unsupportedJSFeatures.Has(compat.Decorators) && len(ctx.class.Decorators) > 0 && - (!p.options.ts.Parse || p.options.ts.Config.ExperimentalDecorators != config.True) { - name := ctx.optionalNameHint + if ctx.class.ShouldLowerStandardDecorators && len(ctx.class.Decorators) > 0 { + name := ctx.nameToKeep if name == "" { name = "class" } @@ -1676,16 +1678,20 @@ func (ctx *lowerClassContext) processProperties(p *parser, classLoweringInfo cla args = append(args, ctx.nameFunc()) } - autoAccessorWeakMapRef := ast.InvalidRef + // Auto-accessors will generate a private field for storage. Lower this + // field, which will generate a WeakMap instance, and then pass the + // WeakMap instance into the decorator helper so the lowered getter and + // setter can use it. if prop.Kind == js_ast.PropertyAutoAccessor { - // Initialize the private field to a new WeakMap - if p.weakMapRef == ast.InvalidRef { - p.weakMapRef = p.newSymbol(ast.SymbolUnbound, "WeakMap") - p.moduleScope.Generated = append(p.moduleScope.Generated, p.weakMapRef) + var kind ast.SymbolKind + if prop.Flags.Has(js_ast.PropertyIsStatic) { + kind = ast.SymbolPrivateStaticField + } else { + kind = ast.SymbolPrivateField } - - // Pass the WeakMap instance into the decorator helper - autoAccessorWeakMapRef = p.generateTempRef(tempRefNeedsDeclare, p.propertyNameHint(prop.Key, "")) + ref := p.newSymbol(kind, "#"+p.propertyNameHint(prop.Key)) + p.symbols[ref.InnerIndex].Flags |= ast.PrivateSymbolMustBeLowered + _, autoAccessorWeakMapRef, _ := ctx.lowerField(p, prop, &js_ast.EPrivateIdentifier{Ref: ref}, false, false, initializerIndex) args = append(args, js_ast.Expr{Loc: keyLoc, Data: &js_ast.EIdentifier{Ref: autoAccessorWeakMapRef}}) p.recordUsage(autoAccessorWeakMapRef) } @@ -1739,80 +1745,6 @@ func (ctx *lowerClassContext) processProperties(p *parser, classLoweringInfo cla // Omit decorated auto-accessors as they will be now generated at run-time instead if prop.Kind == js_ast.PropertyAutoAccessor { - // Determine where to store the field - var target js_ast.Expr - if prop.Flags.Has(js_ast.PropertyIsStatic) && !analysis.staticFieldToBlockAssign { - target = ctx.nameFunc() - } else { - target = js_ast.Expr{Loc: loc, Data: js_ast.EThisShared} - } - - // Generate the assignment initializer - var init js_ast.Expr - if prop.InitializerOrNil.Data != nil { - init = prop.InitializerOrNil - } else { - init = js_ast.Expr{Loc: loc, Data: js_ast.EUndefinedShared} - } - - // Optionally call registered decorator initializers - if initializerIndex != -1 { - var value js_ast.Expr - if prop.Flags.Has(js_ast.PropertyIsStatic) { - value = ctx.nameFunc() - } else { - value = js_ast.Expr{Loc: loc, Data: js_ast.EThisShared} - } - args := []js_ast.Expr{ - {Loc: loc, Data: &js_ast.EIdentifier{Ref: ctx.decoratorContextRef}}, - {Loc: loc, Data: &js_ast.ENumber{Value: float64((3 + 2*initializerIndex) << 1)}}, - value, - } - if _, ok := init.Data.(*js_ast.EUndefined); !ok { - args = append(args, init) - } - init = p.callRuntime(init.Loc, "__runInitializers", args) - p.recordUsage(ctx.decoratorContextRef) - } - - // Initialize the private field to a new WeakMap - ctx.privateMembers = append(ctx.privateMembers, js_ast.Assign( - js_ast.Expr{Loc: prop.Key.Loc, Data: &js_ast.EIdentifier{Ref: autoAccessorWeakMapRef}}, - js_ast.Expr{Loc: prop.Key.Loc, Data: &js_ast.ENew{Target: js_ast.Expr{Loc: prop.Key.Loc, Data: &js_ast.EIdentifier{Ref: p.weakMapRef}}}}, - )) - p.recordUsage(autoAccessorWeakMapRef) - - // Add every newly-constructed instance into this map - key := js_ast.Expr{Loc: prop.Key.Loc, Data: &js_ast.EIdentifier{Ref: autoAccessorWeakMapRef}} - args := []js_ast.Expr{target, key} - if _, ok := init.Data.(*js_ast.EUndefined); !ok { - args = append(args, init) - } - memberExpr := p.callRuntime(loc, "__privateAdd", args) - p.recordUsage(autoAccessorWeakMapRef) - - // Run extra initializers - if initializerIndex != -1 { - var value js_ast.Expr - if prop.Flags.Has(js_ast.PropertyIsStatic) { - value = ctx.nameFunc() - } else { - value = js_ast.Expr{Loc: loc, Data: js_ast.EThisShared} - } - memberExpr = js_ast.JoinWithComma(memberExpr, p.callRuntime(loc, "__runInitializers", []js_ast.Expr{ - {Loc: loc, Data: &js_ast.EIdentifier{Ref: ctx.decoratorContextRef}}, - {Loc: loc, Data: &js_ast.ENumber{Value: float64(((4 + 2*initializerIndex) << 1) | 1)}}, - value, - })) - p.recordUsage(ctx.decoratorContextRef) - } - - if prop.Flags.Has(js_ast.PropertyIsStatic) { - ctx.staticMembers = append(ctx.staticMembers, memberExpr) - } else { - ctx.instanceMembers = append(ctx.instanceMembers, js_ast.Stmt{Loc: loc, Data: &js_ast.SExpr{Value: memberExpr}}) - } - if analysis.private != nil { ctx.lowerPrivateMethod(p, prop, analysis.private) } @@ -1829,7 +1761,7 @@ func (ctx *lowerClassContext) processProperties(p *parser, classLoweringInfo cla // Lower fields if (!prop.Kind.IsMethodDefinition() && analysis.mustLowerField) || analysis.staticFieldToBlockAssign { var keep bool - prop, keep = ctx.lowerField(p, prop, analysis.private, analysis.shouldOmitFieldInitializer, analysis.staticFieldToBlockAssign, initializerIndex) + prop, _, keep = ctx.lowerField(p, prop, analysis.private, analysis.shouldOmitFieldInitializer, analysis.staticFieldToBlockAssign, initializerIndex) if !keep { continue } @@ -1936,7 +1868,7 @@ func (ctx *lowerClassContext) rewriteAutoAccessorToGetSet( } if !mustLowerField { properties = append(properties, storageProp) - } else if prop, ok := ctx.lowerField(p, storageProp, storagePrivate, false, false, -1); ok { + } else if prop, _, ok := ctx.lowerField(p, storageProp, storagePrivate, false, false, -1); ok { properties = append(properties, prop) } @@ -2059,7 +1991,7 @@ func (ctx *lowerClassContext) insertInitializersIntoConstructor(p *parser, class if ctx.decoratorCallInstanceMethodExtraInitializers { decoratorInstanceMethodExtraInitializers = p.callRuntime(ctx.classLoc, "__runInitializers", []js_ast.Expr{ {Loc: ctx.classLoc, Data: &js_ast.EIdentifier{Ref: ctx.decoratorContextRef}}, - {Loc: ctx.classLoc, Data: &js_ast.ENumber{Value: 5}}, + {Loc: ctx.classLoc, Data: &js_ast.ENumber{Value: (2 << 1) | 1}}, {Loc: ctx.classLoc, Data: js_ast.EThisShared}, }) p.recordUsage(ctx.decoratorContextRef) @@ -2145,25 +2077,31 @@ func (ctx *lowerClassContext) finishAndGenerateCode(p *parser, result visitClass if p.options.ts.Parse && p.options.ts.Config.ExperimentalDecorators == config.True { classExperimentalDecorators = ctx.class.Decorators ctx.class.Decorators = nil - } else if p.options.unsupportedJSFeatures.Has(compat.Decorators) { + } else if ctx.class.ShouldLowerStandardDecorators { classDecorators = ctx.decoratorClassDecorators } - // Handle JavaScript decorators on the class itself var decorateClassExpr js_ast.Expr if classDecorators.Data != nil { + // Handle JavaScript decorators on the class itself if ctx.decoratorContextRef == ast.InvalidRef { ctx.decoratorContextRef = p.generateTempRef(tempRefNeedsDeclare, "_init") } decorateClassExpr = p.callRuntime(ctx.classLoc, "__decorateElement", []js_ast.Expr{ {Loc: ctx.classLoc, Data: &js_ast.EIdentifier{Ref: ctx.decoratorContextRef}}, {Loc: ctx.classLoc, Data: &js_ast.ENumber{Value: 0}}, - {Loc: ctx.classLoc, Data: &js_ast.EString{Value: helpers.StringToUTF16(ctx.optionalNameHint)}}, + {Loc: ctx.classLoc, Data: &js_ast.EString{Value: helpers.StringToUTF16(ctx.nameToKeep)}}, classDecorators, ctx.nameFunc(), }) p.recordUsage(ctx.decoratorContextRef) decorateClassExpr = js_ast.Assign(ctx.nameFunc(), decorateClassExpr) + } else if ctx.decoratorContextRef != ast.InvalidRef { + // Decorator metadata is present if there are any decorators on the class at all + decorateClassExpr = p.callRuntime(ctx.classLoc, "__decoratorMetadata", []js_ast.Expr{ + {Loc: ctx.classLoc, Data: &js_ast.EIdentifier{Ref: ctx.decoratorContextRef}}, + ctx.nameFunc(), + }) } // If this is true, we have removed some code from the class body that could @@ -2176,9 +2114,14 @@ func (ctx *lowerClassContext) finishAndGenerateCode(p *parser, result visitClass len(ctx.privateMembers) > 0 || len(ctx.staticPrivateMethods) > 0 || len(ctx.staticMembers) > 0 || + + // TypeScript experimental decorators len(ctx.instanceExperimentalDecorators) > 0 || len(ctx.staticExperimentalDecorators) > 0 || - len(classExperimentalDecorators) > 0) + len(classExperimentalDecorators) > 0 || + + // JavaScript decorators + ctx.decoratorContextRef != ast.InvalidRef) // If we need to represent the class as an expression (even if it's a // statement), then generate another symbol to use as the class name @@ -2206,13 +2149,18 @@ func (ctx *lowerClassContext) finishAndGenerateCode(p *parser, result visitClass // If there are JavaScript decorators, start by allocating a context object if ctx.decoratorContextRef != ast.InvalidRef { - prefixExprs = append(prefixExprs, js_ast.Assign( + base := js_ast.Expr{Loc: ctx.classLoc, Data: js_ast.ENullShared} + if ctx.class.ExtendsOrNil.Data != nil { + if ctx.extendsRef == ast.InvalidRef { + ctx.extendsRef = p.generateTempRef(tempRefNeedsDeclare, "") + ctx.class.ExtendsOrNil = js_ast.Assign(js_ast.Expr{Loc: ctx.class.ExtendsOrNil.Loc, Data: &js_ast.EIdentifier{Ref: ctx.extendsRef}}, ctx.class.ExtendsOrNil) + p.recordUsage(ctx.extendsRef) + } + base.Data = &js_ast.EIdentifier{Ref: ctx.extendsRef} + } + suffixExprs = append(suffixExprs, js_ast.Assign( js_ast.Expr{Loc: ctx.classLoc, Data: &js_ast.EIdentifier{Ref: ctx.decoratorContextRef}}, - js_ast.Expr{Loc: ctx.classLoc, Data: &js_ast.EArray{IsSingleLine: true, Items: []js_ast.Expr{ - {Loc: ctx.classLoc, Data: js_ast.EMissingShared}, // classExtraInitializers - {Loc: ctx.classLoc, Data: js_ast.EMissingShared}, // staticMethodExtraInitializers - {Loc: ctx.classLoc, Data: js_ast.EMissingShared}, // instanceMethodExtraInitializers - }}}, + p.callRuntime(ctx.classLoc, "__decoratorStart", []js_ast.Expr{base}), )) p.recordUsage(ctx.decoratorContextRef) } @@ -2244,7 +2192,7 @@ func (ctx *lowerClassContext) finishAndGenerateCode(p *parser, result visitClass if ctx.decoratorCallStaticMethodExtraInitializers { suffixExprs = append(suffixExprs, p.callRuntime(ctx.classLoc, "__runInitializers", []js_ast.Expr{ {Loc: ctx.classLoc, Data: &js_ast.EIdentifier{Ref: ctx.decoratorContextRef}}, - {Loc: ctx.classLoc, Data: &js_ast.ENumber{Value: 3}}, + {Loc: ctx.classLoc, Data: &js_ast.ENumber{Value: (1 << 1) | 1}}, ctx.nameFunc(), })) p.recordUsage(ctx.decoratorContextRef) @@ -2259,10 +2207,10 @@ func (ctx *lowerClassContext) finishAndGenerateCode(p *parser, result visitClass suffixExprs = append(suffixExprs, ctx.staticExperimentalDecorators...) // For each element initializer of classExtraInitializers - if decorateClassExpr.Data != nil { + if classDecorators.Data != nil { suffixExprs = append(suffixExprs, p.callRuntime(ctx.classLoc, "__runInitializers", []js_ast.Expr{ {Loc: ctx.classLoc, Data: &js_ast.EIdentifier{Ref: ctx.decoratorContextRef}}, - {Loc: ctx.classLoc, Data: &js_ast.ENumber{Value: 1}}, + {Loc: ctx.classLoc, Data: &js_ast.ENumber{Value: (0 << 1) | 1}}, ctx.nameFunc(), })) p.recordUsage(ctx.decoratorContextRef) diff --git a/vendor/github.com/evanw/esbuild/internal/js_printer/js_printer.go b/vendor/github.com/evanw/esbuild/internal/js_printer/js_printer.go index d97f0827faa..3c5cab04571 100644 --- a/vendor/github.com/evanw/esbuild/internal/js_printer/js_printer.go +++ b/vendor/github.com/evanw/esbuild/internal/js_printer/js_printer.go @@ -1749,7 +1749,7 @@ func (p *printer) guardAgainstBehaviorChangeDueToSubstitution(expr js_ast.Expr, // module numeric constants and bitwise operations. This is not an general- // purpose/optimal approach and never will be. For example, we can't affect // tree shaking at this stage because it has already happened. -func (p *printer) lateConstantFoldUnaryOrBinaryExpr(expr js_ast.Expr) js_ast.Expr { +func (p *printer) lateConstantFoldUnaryOrBinaryOrIfExpr(expr js_ast.Expr) js_ast.Expr { switch e := expr.Data.(type) { case *js_ast.EImportIdentifier: ref := ast.FollowSymbols(p.symbols, e.Ref) @@ -1758,23 +1758,28 @@ func (p *printer) lateConstantFoldUnaryOrBinaryExpr(expr js_ast.Expr) js_ast.Exp } case *js_ast.EDot: - if value, ok := p.tryToGetImportedEnumValue(e.Target, e.Name); ok && value.String == nil { - value := js_ast.Expr{Loc: expr.Loc, Data: &js_ast.ENumber{Value: value.Number}} + if value, ok := p.tryToGetImportedEnumValue(e.Target, e.Name); ok { + var inlinedValue js_ast.Expr + if value.String != nil { + inlinedValue = js_ast.Expr{Loc: expr.Loc, Data: &js_ast.EString{Value: value.String}} + } else { + inlinedValue = js_ast.Expr{Loc: expr.Loc, Data: &js_ast.ENumber{Value: value.Number}} + } if strings.Contains(e.Name, "*/") { // Don't wrap with a comment - return value + return inlinedValue } // Wrap with a comment - return js_ast.Expr{Loc: value.Loc, Data: &js_ast.EInlinedEnum{ - Value: value, + return js_ast.Expr{Loc: inlinedValue.Loc, Data: &js_ast.EInlinedEnum{ + Value: inlinedValue, Comment: e.Name, }} } case *js_ast.EUnary: - value := p.lateConstantFoldUnaryOrBinaryExpr(e.Value) + value := p.lateConstantFoldUnaryOrBinaryOrIfExpr(e.Value) // Only fold again if something chained if value.Data != e.Value.Data { @@ -1797,16 +1802,16 @@ func (p *printer) lateConstantFoldUnaryOrBinaryExpr(expr js_ast.Expr) js_ast.Exp } case *js_ast.EBinary: - left := p.lateConstantFoldUnaryOrBinaryExpr(e.Left) - right := p.lateConstantFoldUnaryOrBinaryExpr(e.Right) + left := p.lateConstantFoldUnaryOrBinaryOrIfExpr(e.Left) + right := p.lateConstantFoldUnaryOrBinaryOrIfExpr(e.Right) // Only fold again if something changed if left.Data != e.Left.Data || right.Data != e.Right.Data { binary := &js_ast.EBinary{Op: e.Op, Left: left, Right: right} // Only fold certain operations (just like the parser) - if js_ast.ShouldFoldBinaryArithmeticWhenMinifying(binary) { - if result := js_ast.FoldBinaryArithmetic(expr.Loc, binary); result.Data != nil { + if js_ast.ShouldFoldBinaryOperatorWhenMinifying(binary) { + if result := js_ast.FoldBinaryOperator(expr.Loc, binary); result.Data != nil { return result } } @@ -1814,6 +1819,23 @@ func (p *printer) lateConstantFoldUnaryOrBinaryExpr(expr js_ast.Expr) js_ast.Exp // Don't mutate the original AST expr.Data = binary } + + case *js_ast.EIf: + test := p.lateConstantFoldUnaryOrBinaryOrIfExpr(e.Test) + + // Only fold again if something changed + if test.Data != e.Test.Data { + if boolean, sideEffects, ok := js_ast.ToBooleanWithSideEffects(test.Data); ok && sideEffects == js_ast.NoSideEffects { + if boolean { + return p.lateConstantFoldUnaryOrBinaryOrIfExpr(e.Yes) + } else { + return p.lateConstantFoldUnaryOrBinaryOrIfExpr(e.No) + } + } + + // Don't mutate the original AST + expr.Data = &js_ast.EIf{Test: test, Yes: e.Yes, No: e.No} + } } return expr @@ -1964,7 +1986,7 @@ const ( isDeleteTarget isCallTargetOrTemplateTag isPropertyAccessTarget - parentWasUnaryOrBinary + parentWasUnaryOrBinaryOrIfTest ) func (p *printer) printExpr(expr js_ast.Expr, level js_ast.L, flags printExprFlags) { @@ -1978,10 +2000,10 @@ func (p *printer) printExpr(expr js_ast.Expr, level js_ast.L, flags printExprFla // This sets a flag to avoid doing this when the parent is a unary or binary // operator so that we don't trigger O(n^2) behavior when traversing over a // large expression tree. - if p.options.MinifySyntax && (flags&parentWasUnaryOrBinary) == 0 { + if p.options.MinifySyntax && (flags&parentWasUnaryOrBinaryOrIfTest) == 0 { switch expr.Data.(type) { - case *js_ast.EUnary, *js_ast.EBinary: - expr = p.lateConstantFoldUnaryOrBinaryExpr(expr) + case *js_ast.EUnary, *js_ast.EBinary, *js_ast.EIf: + expr = p.lateConstantFoldUnaryOrBinaryOrIfExpr(expr) } } @@ -2650,7 +2672,7 @@ func (p *printer) printExpr(expr js_ast.Expr, level js_ast.L, flags printExprFla p.print("(") flags &= ^forbidIn } - p.printExpr(e.Test, js_ast.LConditional, flags&forbidIn) + p.printExpr(e.Test, js_ast.LConditional, (flags&forbidIn)|parentWasUnaryOrBinaryOrIfTest) p.printSpace() p.print("?") if p.options.LineLimit <= 0 || !p.printNewlinePastLineLimit() { @@ -3136,7 +3158,7 @@ func (p *printer) printExpr(expr js_ast.Expr, level js_ast.L, flags printExprFla } if !e.Op.IsPrefix() { - p.printExpr(e.Value, js_ast.LPostfix-1, parentWasUnaryOrBinary) + p.printExpr(e.Value, js_ast.LPostfix-1, parentWasUnaryOrBinaryOrIfTest) } if entry.IsKeyword { @@ -3157,7 +3179,7 @@ func (p *printer) printExpr(expr js_ast.Expr, level js_ast.L, flags printExprFla } if e.Op.IsPrefix() { - valueFlags := parentWasUnaryOrBinary + valueFlags := parentWasUnaryOrBinaryOrIfTest if e.Op == js_ast.UnOpDelete { valueFlags |= isDeleteTarget } @@ -3347,9 +3369,9 @@ func (v *binaryExprVisitor) checkAndPrepare(p *printer) bool { if e.Op == js_ast.BinOpComma { // The result of the left operand of the comma operator is unused - v.leftFlags = (v.flags & forbidIn) | exprResultIsUnused | parentWasUnaryOrBinary + v.leftFlags = (v.flags & forbidIn) | exprResultIsUnused | parentWasUnaryOrBinaryOrIfTest } else { - v.leftFlags = (v.flags & forbidIn) | parentWasUnaryOrBinary + v.leftFlags = (v.flags & forbidIn) | parentWasUnaryOrBinaryOrIfTest } return true } @@ -3377,9 +3399,9 @@ func (v *binaryExprVisitor) visitRightAndFinish(p *printer) { if e.Op == js_ast.BinOpComma { // The result of the right operand of the comma operator is unused if the caller doesn't use it - p.printExpr(e.Right, v.rightLevel, (v.flags&(forbidIn|exprResultIsUnused))|parentWasUnaryOrBinary) + p.printExpr(e.Right, v.rightLevel, (v.flags&(forbidIn|exprResultIsUnused))|parentWasUnaryOrBinaryOrIfTest) } else { - p.printExpr(e.Right, v.rightLevel, (v.flags&forbidIn)|parentWasUnaryOrBinary) + p.printExpr(e.Right, v.rightLevel, (v.flags&forbidIn)|parentWasUnaryOrBinaryOrIfTest) } if v.wrap { @@ -4555,6 +4577,7 @@ func (p *printer) printStmt(stmt js_ast.Stmt, flags printStmtFlags) { for _, c := range s.Cases { p.printSemicolonIfNeeded() p.printIndent() + p.printExprCommentsAtLoc(c.Loc) p.addSourceMapping(c.Loc) if c.ValueOrNil.Data != nil { diff --git a/vendor/github.com/evanw/esbuild/internal/linker/linker.go b/vendor/github.com/evanw/esbuild/internal/linker/linker.go index 8985541da16..f1d6c83d5a0 100644 --- a/vendor/github.com/evanw/esbuild/internal/linker/linker.go +++ b/vendor/github.com/evanw/esbuild/internal/linker/linker.go @@ -1623,6 +1623,10 @@ func (c *linkerContext) scanImportsAndExports() { continue } + if c.options.OutputFormat == config.FormatESModule && c.options.UnsupportedJSFeatures.Has(compat.ArbitraryModuleNamespaceNames) && c.graph.Files[sourceIndex].IsEntryPoint() { + c.maybeForbidArbitraryModuleNamespaceIdentifier("export", export.SourceIndex, export.NameLoc, alias) + } + aliases = append(aliases, alias) } sort.Strings(aliases) @@ -2837,6 +2841,15 @@ loop: return } +func (c *linkerContext) maybeForbidArbitraryModuleNamespaceIdentifier(kind string, sourceIndex uint32, loc logger.Loc, alias string) { + if !js_ast.IsIdentifier(alias) { + file := &c.graph.Files[sourceIndex] + where := config.PrettyPrintTargetEnvironment(c.options.OriginalTargetEnv, c.options.UnsupportedJSFeatureOverridesMask) + c.log.AddError(file.LineColumnTracker(), file.InputFile.Source.RangeOfString(loc), fmt.Sprintf( + "Using the string %q as an %s name is not supported in %s", alias, kind, where)) + } +} + // Attempt to correct an import name with a typo func (c *linkerContext) maybeCorrectObviousTypo(repr *graph.JSRepr, name string, msg *logger.Msg) { if repr.Meta.ResolvedExportTypos == nil { @@ -4307,6 +4320,12 @@ func (c *linkerContext) convertStmtsForChunk(sourceIndex uint32, stmtList *stmtL continue } + if c.options.UnsupportedJSFeatures.Has(compat.ArbitraryModuleNamespaceNames) && s.Items != nil { + for _, item := range *s.Items { + c.maybeForbidArbitraryModuleNamespaceIdentifier("import", sourceIndex, item.AliasLoc, item.Alias) + } + } + // Make sure these don't end up in the wrapper closure if shouldExtractESMStmtsForWrap { stmtList.outsideWrapperPrefix = append(stmtList.outsideWrapperPrefix, stmt) @@ -4320,6 +4339,10 @@ func (c *linkerContext) convertStmtsForChunk(sourceIndex uint32, stmtList *stmtL continue } + if c.options.UnsupportedJSFeatures.Has(compat.ArbitraryModuleNamespaceNames) { + c.maybeForbidArbitraryModuleNamespaceIdentifier("export", sourceIndex, s.Alias.Loc, s.Alias.OriginalName) + } + if shouldStripExports { // Turn this statement into "import * as ns from 'path'" stmt.Data = &js_ast.SImport{ @@ -4426,6 +4449,15 @@ func (c *linkerContext) convertStmtsForChunk(sourceIndex uint32, stmtList *stmtL continue } + if c.options.UnsupportedJSFeatures.Has(compat.ArbitraryModuleNamespaceNames) { + for _, item := range s.Items { + c.maybeForbidArbitraryModuleNamespaceIdentifier("export", sourceIndex, item.AliasLoc, item.Alias) + if item.AliasLoc != item.Name.Loc { + c.maybeForbidArbitraryModuleNamespaceIdentifier("import", sourceIndex, item.Name.Loc, item.OriginalName) + } + } + } + if shouldStripExports { // Turn this statement into "import {foo} from 'path'" for i, item := range s.Items { @@ -4451,6 +4483,12 @@ func (c *linkerContext) convertStmtsForChunk(sourceIndex uint32, stmtList *stmtL continue } + if c.options.UnsupportedJSFeatures.Has(compat.ArbitraryModuleNamespaceNames) { + for _, item := range s.Items { + c.maybeForbidArbitraryModuleNamespaceIdentifier("export", sourceIndex, item.AliasLoc, item.Alias) + } + } + // Make sure these don't end up in the wrapper closure if shouldExtractESMStmtsForWrap { stmtList.outsideWrapperPrefix = append(stmtList.outsideWrapperPrefix, stmt) @@ -5811,6 +5849,16 @@ func (c *linkerContext) generateChunkJS(chunkIndex int, chunkWaitGroup *sync.Wai // Ignore empty source map chunks if compileResult.SourceMapChunk.ShouldIgnore { prevOffset.AdvanceBytes(compileResult.JS) + + // Include a null entry in the source map + if len(compileResult.JS) > 0 && c.options.SourceMap != config.SourceMapNone { + if n := len(compileResultsForSourceMap); n > 0 && !compileResultsForSourceMap[n-1].isNullEntry { + compileResultsForSourceMap = append(compileResultsForSourceMap, compileResultForSourceMap{ + sourceIndex: compileResult.sourceIndex, + isNullEntry: true, + }) + } + } } else { prevOffset = sourcemap.LineColumnOffset{} @@ -5925,7 +5973,7 @@ func (c *linkerContext) generateChunkJS(chunkIndex int, chunkWaitGroup *sync.Wai func (c *linkerContext) generateGlobalNamePrefix() string { var text string globalName := c.options.GlobalName - prefix := globalName[0] + prefix, globalName := globalName[0], globalName[1:] space := " " join := ";\n" @@ -5934,9 +5982,18 @@ func (c *linkerContext) generateGlobalNamePrefix() string { join = ";" } + // Assume the "this" and "import.meta" objects always exist + isExistingObject := prefix == "this" + if prefix == "import" && len(globalName) > 0 && globalName[0] == "meta" { + prefix, globalName = "import.meta", globalName[1:] + isExistingObject = true + } + // Use "||=" to make the code more compact when it's supported - if len(globalName) > 1 && !c.options.UnsupportedJSFeatures.Has(compat.LogicalAssignment) { - if js_printer.CanEscapeIdentifier(prefix, c.options.UnsupportedJSFeatures, c.options.ASCIIOnly) { + if len(globalName) > 0 && !c.options.UnsupportedJSFeatures.Has(compat.LogicalAssignment) { + if isExistingObject { + // Keep the prefix as it is + } else if js_printer.CanEscapeIdentifier(prefix, c.options.UnsupportedJSFeatures, c.options.ASCIIOnly) { if c.options.ASCIIOnly { prefix = string(js_printer.QuoteIdentifier(nil, prefix, c.options.UnsupportedJSFeatures)) } @@ -5944,7 +6001,7 @@ func (c *linkerContext) generateGlobalNamePrefix() string { } else { prefix = fmt.Sprintf("this[%s]", helpers.QuoteForJSON(prefix, c.options.ASCIIOnly)) } - for _, name := range globalName[1:] { + for _, name := range globalName { var dotOrIndex string if js_printer.CanEscapeIdentifier(name, c.options.UnsupportedJSFeatures, c.options.ASCIIOnly) { if c.options.ASCIIOnly { @@ -5954,12 +6011,19 @@ func (c *linkerContext) generateGlobalNamePrefix() string { } else { dotOrIndex = fmt.Sprintf("[%s]", helpers.QuoteForJSON(name, c.options.ASCIIOnly)) } - prefix = fmt.Sprintf("(%s%s||=%s{})%s", prefix, space, space, dotOrIndex) + if isExistingObject { + prefix = fmt.Sprintf("%s%s", prefix, dotOrIndex) + isExistingObject = false + } else { + prefix = fmt.Sprintf("(%s%s||=%s{})%s", prefix, space, space, dotOrIndex) + } } return fmt.Sprintf("%s%s%s=%s", text, prefix, space, space) } - if js_printer.CanEscapeIdentifier(prefix, c.options.UnsupportedJSFeatures, c.options.ASCIIOnly) { + if isExistingObject { + text = fmt.Sprintf("%s%s=%s", prefix, space, space) + } else if js_printer.CanEscapeIdentifier(prefix, c.options.UnsupportedJSFeatures, c.options.ASCIIOnly) { if c.options.ASCIIOnly { prefix = string(js_printer.QuoteIdentifier(nil, prefix, c.options.UnsupportedJSFeatures)) } @@ -5969,7 +6033,7 @@ func (c *linkerContext) generateGlobalNamePrefix() string { text = fmt.Sprintf("%s%s=%s", prefix, space, space) } - for _, name := range globalName[1:] { + for _, name := range globalName { oldPrefix := prefix if js_printer.CanEscapeIdentifier(name, c.options.UnsupportedJSFeatures, c.options.ASCIIOnly) { if c.options.ASCIIOnly { @@ -6292,6 +6356,16 @@ func (c *linkerContext) generateChunkCSS(chunkIndex int, chunkWaitGroup *sync.Wa // Ignore empty source map chunks if compileResult.SourceMapChunk.ShouldIgnore { prevOffset.AdvanceBytes(compileResult.CSS) + + // Include a null entry in the source map + if len(compileResult.CSS) > 0 && c.options.SourceMap != config.SourceMapNone && compileResult.sourceIndex.IsValid() { + if n := len(compileResultsForSourceMap); n > 0 && !compileResultsForSourceMap[n-1].isNullEntry { + compileResultsForSourceMap = append(compileResultsForSourceMap, compileResultForSourceMap{ + sourceIndex: compileResult.sourceIndex.GetIndex(), + isNullEntry: true, + }) + } + } } else { prevOffset = sourcemap.LineColumnOffset{} @@ -6864,6 +6938,7 @@ type compileResultForSourceMap struct { sourceMapChunk sourcemap.Chunk generatedOffset sourcemap.LineColumnOffset sourceIndex uint32 + isNullEntry bool } func (c *linkerContext) generateSourceMapForChunk( @@ -6891,6 +6966,9 @@ func (c *linkerContext) generateSourceMapForChunk( continue } sourceIndexToSourcesIndex[result.sourceIndex] = nextSourcesIndex + if result.isNullEntry { + continue + } file := &c.graph.Files[result.sourceIndex] // Simple case: no nested source map @@ -7006,28 +7084,38 @@ func (c *linkerContext) generateSourceMapForChunk( startState.GeneratedColumn += prevColumnOffset } - // Append the precomputed source map chunk - sourcemap.AppendSourceMapChunk(&j, prevEndState, startState, chunk.Buffer) + if result.isNullEntry { + // Emit a "null" mapping + chunk.Buffer.Data = []byte("A") + sourcemap.AppendSourceMapChunk(&j, prevEndState, startState, chunk.Buffer) - // Generate the relative offset to start from next time - prevOriginalName := prevEndState.OriginalName - prevEndState = chunk.EndState - prevEndState.SourceIndex += sourcesIndex - if chunk.Buffer.FirstNameOffset.IsValid() { - prevEndState.OriginalName += totalQuotedNameLen + // Only the generated position was advanced + prevEndState.GeneratedLine = startState.GeneratedLine + prevEndState.GeneratedColumn = startState.GeneratedColumn } else { - // It's possible for a chunk to have mappings but for none of those - // mappings to have an associated name. The name is optional and is - // omitted when the mapping is for a non-name token or if the final - // and original names are the same. In that case we need to restore - // the previous original name end state since it wasn't modified after - // all. If we don't do this, then files after this will adjust their - // name offsets assuming that the previous generated mapping has this - // file's offset, which is wrong. - prevEndState.OriginalName = prevOriginalName - } - prevColumnOffset = chunk.FinalGeneratedColumn - totalQuotedNameLen += len(chunk.QuotedNames) + // Append the precomputed source map chunk + sourcemap.AppendSourceMapChunk(&j, prevEndState, startState, chunk.Buffer) + + // Generate the relative offset to start from next time + prevOriginalName := prevEndState.OriginalName + prevEndState = chunk.EndState + prevEndState.SourceIndex += sourcesIndex + if chunk.Buffer.FirstNameOffset.IsValid() { + prevEndState.OriginalName += totalQuotedNameLen + } else { + // It's possible for a chunk to have mappings but for none of those + // mappings to have an associated name. The name is optional and is + // omitted when the mapping is for a non-name token or if the final + // and original names are the same. In that case we need to restore + // the previous original name end state since it wasn't modified after + // all. If we don't do this, then files after this will adjust their + // name offsets assuming that the previous generated mapping has this + // file's offset, which is wrong. + prevEndState.OriginalName = prevOriginalName + } + prevColumnOffset = chunk.FinalGeneratedColumn + totalQuotedNameLen += len(chunk.QuotedNames) + } // If this was all one line, include the column offset from the start if prevEndState.GeneratedLine == 0 { diff --git a/vendor/github.com/evanw/esbuild/internal/logger/logger.go b/vendor/github.com/evanw/esbuild/internal/logger/logger.go index 29d1fe9dcc2..8acb9048add 100644 --- a/vendor/github.com/evanw/esbuild/internal/logger/logger.go +++ b/vendor/github.com/evanw/esbuild/internal/logger/logger.go @@ -272,7 +272,7 @@ type ImportAttribute struct { } // This returns a sorted array instead of a map to make determinism easier -func (attrs ImportAttributes) Decode() (result []ImportAttribute) { +func (attrs ImportAttributes) DecodeIntoArray() (result []ImportAttribute) { if attrs.packedData == "" { return nil } @@ -289,7 +289,20 @@ func (attrs ImportAttributes) Decode() (result []ImportAttribute) { return result } +func (attrs ImportAttributes) DecodeIntoMap() (result map[string]string) { + if array := attrs.DecodeIntoArray(); len(array) > 0 { + result = make(map[string]string, len(array)) + for _, attr := range array { + result[attr.Key] = attr.Value + } + } + return +} + func EncodeImportAttributes(value map[string]string) ImportAttributes { + if len(value) == 0 { + return ImportAttributes{} + } keys := make([]string, 0, len(value)) for k := range value { keys = append(keys, k) diff --git a/vendor/github.com/evanw/esbuild/internal/logger/msg_ids.go b/vendor/github.com/evanw/esbuild/internal/logger/msg_ids.go index 3f2773da08b..2e1e305ca4b 100644 --- a/vendor/github.com/evanw/esbuild/internal/logger/msg_ids.go +++ b/vendor/github.com/evanw/esbuild/internal/logger/msg_ids.go @@ -12,6 +12,7 @@ const ( MsgID_None MsgID = iota // JavaScript + MsgID_JS_AssertToWith MsgID_JS_AssertTypeJSON MsgID_JS_AssignToConstant MsgID_JS_AssignToDefine @@ -73,6 +74,7 @@ const ( // package.json MsgID_PackageJSON_FIRST // Keep this first + MsgID_PackageJSON_DeadCondition MsgID_PackageJSON_InvalidBrowser MsgID_PackageJSON_InvalidImportsOrExports MsgID_PackageJSON_InvalidSideEffects @@ -96,6 +98,8 @@ const ( func StringToMsgIDs(str string, logLevel LogLevel, overrides map[MsgID]LogLevel) { switch str { // JS + case "assert-to-with": + overrides[MsgID_JS_AssertToWith] = logLevel case "assert-type-json": overrides[MsgID_JS_AssertTypeJSON] = logLevel case "assign-to-constant": @@ -226,6 +230,8 @@ func StringToMsgIDs(str string, logLevel LogLevel, overrides map[MsgID]LogLevel) func MsgIDToString(id MsgID) string { switch id { // JS + case MsgID_JS_AssertToWith: + return "assert-to-with" case MsgID_JS_AssertTypeJSON: return "assert-type-json" case MsgID_JS_AssignToConstant: diff --git a/vendor/github.com/evanw/esbuild/internal/resolver/package_json.go b/vendor/github.com/evanw/esbuild/internal/resolver/package_json.go index 409349bbe3b..068acdea0ef 100644 --- a/vendor/github.com/evanw/esbuild/internal/resolver/package_json.go +++ b/vendor/github.com/evanw/esbuild/internal/resolver/package_json.go @@ -675,6 +675,16 @@ func parseImportsExportsMap(source logger.Source, log logger.Log, json js_ast.Ex firstToken := logger.Range{Loc: expr.Loc, Len: 1} isConditionalSugar := false + type DeadCondition struct { + reason string + ranges []logger.Range + notes []logger.MsgData + } + var foundDefault logger.Range + var foundImport logger.Range + var foundRequire logger.Range + var deadCondition DeadCondition + for i, property := range e.Properties { keyStr, _ := property.Key.Data.(*js_ast.EString) key := helpers.UTF16ToString(keyStr.Value) @@ -697,6 +707,35 @@ func parseImportsExportsMap(source logger.Source, log logger.Log, json js_ast.Ex } } + // Track "dead" conditional branches that can never be reached + if foundDefault.Len != 0 || (foundImport.Len != 0 && foundRequire.Len != 0) { + deadCondition.ranges = append(deadCondition.ranges, keyRange) + // Note: Don't warn about the "default" condition as it's supposed to be a catch-all condition + if deadCondition.reason == "" && key != "default" { + if foundDefault.Len != 0 { + deadCondition.reason = "\"default\"" + deadCondition.notes = []logger.MsgData{ + tracker.MsgData(foundDefault, "The \"default\" condition comes earlier and will always be chosen:"), + } + } else { + deadCondition.reason = "both \"import\" and \"require\"" + deadCondition.notes = []logger.MsgData{ + tracker.MsgData(foundImport, "The \"import\" condition comes earlier and will be used for all \"import\" statements:"), + tracker.MsgData(foundRequire, "The \"require\" condition comes earlier and will be used for all \"require\" calls:"), + } + } + } + } else { + switch key { + case "default": + foundDefault = keyRange + case "import": + foundImport = keyRange + case "require": + foundRequire = keyRange + } + } + entry := pjMapEntry{ key: key, keyRange: keyRange, @@ -715,6 +754,30 @@ func parseImportsExportsMap(source logger.Source, log logger.Log, json js_ast.Ex // PATTERN_KEY_COMPARE which orders in descending order of specificity. sort.Stable(expansionKeys) + // Warn about "dead" conditional branches that can never be reached + if deadCondition.reason != "" { + kind := logger.Warning + if helpers.IsInsideNodeModules(source.KeyPath.Text) { + kind = logger.Debug + } + var conditions string + conditionWord := "condition" + itComesWord := "it comes" + if len(deadCondition.ranges) > 1 { + conditionWord = "conditions" + itComesWord = "they come" + } + for i, r := range deadCondition.ranges { + if i > 0 { + conditions += " and " + } + conditions += source.TextForRange(r) + } + log.AddIDWithNotes(logger.MsgID_PackageJSON_DeadCondition, kind, &tracker, deadCondition.ranges[0], + fmt.Sprintf("The %s %s here will never be used as %s after %s", conditionWord, conditions, itComesWord, deadCondition.reason), + deadCondition.notes) + } + return pjEntry{ kind: pjObject, firstToken: firstToken, diff --git a/vendor/github.com/evanw/esbuild/internal/resolver/resolver.go b/vendor/github.com/evanw/esbuild/internal/resolver/resolver.go index 6e50acd9e1d..b3f6c8b565d 100644 --- a/vendor/github.com/evanw/esbuild/internal/resolver/resolver.go +++ b/vendor/github.com/evanw/esbuild/internal/resolver/resolver.go @@ -331,14 +331,14 @@ func NewResolver(call config.APICall, fs fs.FS, log logger.Log, caches *cache.Ca if r.log.Level <= logger.LevelDebug { r.debugLogs = &debugLogs{what: fmt.Sprintf("Resolving tsconfig file %q", options.TSConfigPath)} } - res.tsConfigOverride, err = r.parseTSConfig(options.TSConfigPath, visited) + res.tsConfigOverride, err = r.parseTSConfig(options.TSConfigPath, visited, fs.Dir(options.TSConfigPath)) } else { source := logger.Source{ KeyPath: logger.Path{Text: fs.Join(fs.Cwd(), ""), Namespace: "file"}, PrettyPath: "", Contents: options.TSConfigRaw, } - res.tsConfigOverride, err = r.parseTSConfigFromSource(source, visited) + res.tsConfigOverride, err = r.parseTSConfigFromSource(source, visited, fs.Cwd()) } if err != nil { if err == syscall.ENOENT { @@ -1132,12 +1132,19 @@ func (r resolverQuery) dirInfoCached(path string) *dirInfo { // Cache hit: stop now if !ok { + // Update the cache to indicate failure. Even if the read failed, we don't + // want to retry again later. The directory is inaccessible so trying again + // is wasted. Doing this before calling "dirInfoUncached" prevents stack + // overflow in case this directory is recursively encountered again. + r.dirCache[path] = nil + // Cache miss: read the info cached = r.dirInfoUncached(path) - // Update the cache unconditionally. Even if the read failed, we don't want to - // retry again later. The directory is inaccessible so trying again is wasted. - r.dirCache[path] = cached + // Only update the cache again on success + if cached != nil { + r.dirCache[path] = cached + } } if r.debugLogs != nil { @@ -1164,16 +1171,18 @@ var errParseErrorAlreadyLogged = errors.New("(error already logged)") // // Nested calls may also return "parseErrorImportCycle". In that case the // caller is responsible for logging an appropriate error message. -func (r resolverQuery) parseTSConfig(file string, visited map[string]bool) (*TSConfigJSON, error) { +func (r resolverQuery) parseTSConfig(file string, visited map[string]bool, configDir string) (*TSConfigJSON, error) { + // Resolve any symlinks first before parsing the file + if !r.options.PreserveSymlinks { + if real, ok := r.fs.EvalSymlinks(file); ok { + file = real + } + } + // Don't infinite loop if a series of "extends" links forms a cycle if visited[file] { return nil, errParseErrorImportCycle } - if visited != nil { - // This is only non-nil for "build" API calls. This is nil for "transform" - // API calls, which tells us to not process "extends" fields. - visited[file] = true - } contents, err, originalError := r.caches.FSCache.ReadFile(r.fs, file) if r.debugLogs != nil && originalError != nil { @@ -1192,15 +1201,28 @@ func (r resolverQuery) parseTSConfig(file string, visited map[string]bool) (*TSC PrettyPath: PrettyPath(r.fs, keyPath), Contents: contents, } - return r.parseTSConfigFromSource(source, visited) + if visited != nil { + // This is only non-nil for "build" API calls. This is nil for "transform" + // API calls, which tells us to not process "extends" fields. + visited[file] = true + } + result, err := r.parseTSConfigFromSource(source, visited, configDir) + if visited != nil { + // Reset this to back false in case something uses TypeScript 5.0's multiple + // inheritance feature for "tsconfig.json" files. It should be valid to visit + // the same base "tsconfig.json" file multiple times from different multiple + // inheritance subtrees. + visited[file] = false + } + return result, err } -func (r resolverQuery) parseTSConfigFromSource(source logger.Source, visited map[string]bool) (*TSConfigJSON, error) { +func (r resolverQuery) parseTSConfigFromSource(source logger.Source, visited map[string]bool, configDir string) (*TSConfigJSON, error) { tracker := logger.MakeLineColumnTracker(&source) fileDir := r.fs.Dir(source.KeyPath.Text) isExtends := len(visited) > 1 - result := ParseTSConfigJSON(r.log, source, &r.caches.JSONCache, func(extends string, extendsRange logger.Range) *TSConfigJSON { + result := ParseTSConfigJSON(r.log, source, &r.caches.JSONCache, r.fs, fileDir, configDir, func(extends string, extendsRange logger.Range) *TSConfigJSON { if visited == nil { // If this is nil, then we're in a "transform" API call. In that case we // deliberately skip processing "extends" fields. This is because the @@ -1287,8 +1309,9 @@ func (r resolverQuery) parseTSConfigFromSource(source logger.Source, visited map if entry, _ := entries.Get("package.json"); entry != nil && entry.Kind(r.fs) == fs.FileEntry { // Check the "exports" map if packageJSON := r.parsePackageJSON(result.pkgDirPath); packageJSON != nil && packageJSON.exportsMap != nil { - if absolute, ok, _ := r.esmResolveAlgorithm(result.pkgIdent, "."+result.pkgSubpath, packageJSON, result.pkgDirPath, source.KeyPath.Text); ok { - base, err := r.parseTSConfig(absolute.Primary.Text, visited) + if absolute, ok, _ := r.esmResolveAlgorithm(finalizeImportsExportsYarnPnPTSConfigExtends, + result.pkgIdent, "."+result.pkgSubpath, packageJSON, result.pkgDirPath, source.KeyPath.Text); ok { + base, err := r.parseTSConfig(absolute.Primary.Text, visited, configDir) if result, shouldReturn := maybeFinishOurSearch(base, err, absolute.Primary.Text); shouldReturn { return result } @@ -1348,7 +1371,7 @@ func (r resolverQuery) parseTSConfigFromSource(source logger.Source, visited map // This is a very abbreviated version of our ESM resolution if status == pjStatusExact || status == pjStatusExactEndsWithStar { fileToCheck := r.fs.Join(pkgDir, resolvedPath) - base, err := r.parseTSConfig(fileToCheck, visited) + base, err := r.parseTSConfig(fileToCheck, visited, configDir) if result, shouldReturn := maybeFinishOurSearch(base, err, fileToCheck); shouldReturn { return result @@ -1362,7 +1385,7 @@ func (r resolverQuery) parseTSConfigFromSource(source logger.Source, visited map filesToCheck := []string{r.fs.Join(join, "tsconfig.json"), join, join + ".json"} for _, fileToCheck := range filesToCheck { - base, err := r.parseTSConfig(fileToCheck, visited) + base, err := r.parseTSConfig(fileToCheck, visited, configDir) // Explicitly ignore matches if they are directories instead of files if err != nil && err != syscall.ENOENT { @@ -1410,7 +1433,7 @@ func (r resolverQuery) parseTSConfigFromSource(source logger.Source, visited map if !r.fs.IsAbs(extendsFile) { extendsFile = r.fs.Join(fileDir, extendsFile) } - base, err := r.parseTSConfig(extendsFile, visited) + base, err := r.parseTSConfig(extendsFile, visited, configDir) // TypeScript's handling of "extends" has some specific edge cases. We // must only try adding ".json" if it's not already present, which is @@ -1422,7 +1445,7 @@ func (r resolverQuery) parseTSConfigFromSource(source logger.Source, visited map extendsBase := r.fs.Base(extendsFile) if entry, _ := entries.Get(extendsBase); entry == nil || entry.Kind(r.fs) != fs.FileEntry { if entry, _ := entries.Get(extendsBase + ".json"); entry != nil && entry.Kind(r.fs) == fs.FileEntry { - base, err = r.parseTSConfig(extendsFile+".json", visited) + base, err = r.parseTSConfig(extendsFile+".json", visited, configDir) } } } @@ -1451,14 +1474,6 @@ func (r resolverQuery) parseTSConfigFromSource(source logger.Source, visited map return nil, errParseErrorAlreadyLogged } - if result.BaseURL != nil && !r.fs.IsAbs(*result.BaseURL) { - *result.BaseURL = r.fs.Join(fileDir, *result.BaseURL) - } - - if result.Paths != nil && !r.fs.IsAbs(result.BaseURLForPaths) { - result.BaseURLForPaths = r.fs.Join(fileDir, result.BaseURLForPaths) - } - // Now that we have parsed the entire "tsconfig.json" file, filter out any // paths that are invalid due to being a package-style path without a base // URL specified. This must be done here instead of when we're parsing the @@ -1605,7 +1620,7 @@ func (r resolverQuery) dirInfoUncached(path string) *dirInfo { // many other tools anyway. So now these files are ignored. if tsConfigPath != "" && !info.isInsideNodeModules { var err error - info.enclosingTSConfigJSON, err = r.parseTSConfig(tsConfigPath, make(map[string]bool)) + info.enclosingTSConfigJSON, err = r.parseTSConfig(tsConfigPath, make(map[string]bool), r.fs.Dir(tsConfigPath)) if err != nil { if err == syscall.ENOENT { r.log.AddError(nil, logger.Range{}, fmt.Sprintf("Cannot find tsconfig file %q", @@ -2087,7 +2102,7 @@ func (r resolverQuery) matchTSConfigPaths(tsConfigJSON *TSConfigJSON, path strin } if r.debugLogs != nil { - r.debugLogs.addNote(fmt.Sprintf("Using %q as \"baseURL\"", absBaseURL)) + r.debugLogs.addNote(fmt.Sprintf("Using %q as \"baseUrl\"", absBaseURL)) } // Check for exact matches first @@ -2237,6 +2252,7 @@ func (r resolverQuery) loadPackageImports(importPath string, dirInfoPackageJSON } absolute, ok, diffCase := r.finalizeImportsExportsResult( + finalizeImportsExportsNormal, dirInfoPackageJSON.absPath, conditions, *packageJSON.importsMap, packageJSON, resolvedPath, status, debug, "", "", "", @@ -2244,7 +2260,14 @@ func (r resolverQuery) loadPackageImports(importPath string, dirInfoPackageJSON return absolute, ok, diffCase, nil } -func (r resolverQuery) esmResolveAlgorithm(esmPackageName string, esmPackageSubpath string, packageJSON *packageJSON, absPkgPath string, absPath string) (PathPair, bool, *fs.DifferentCase) { +func (r resolverQuery) esmResolveAlgorithm( + kind finalizeImportsExportsKind, + esmPackageName string, + esmPackageSubpath string, + packageJSON *packageJSON, + absPkgPath string, + absPath string, +) (PathPair, bool, *fs.DifferentCase) { if r.debugLogs != nil { r.debugLogs.addNote(fmt.Sprintf("Looking for %q in \"exports\" map in %q", esmPackageSubpath, packageJSON.source.KeyPath.Text)) r.debugLogs.increaseIndent() @@ -2279,6 +2302,7 @@ func (r resolverQuery) esmResolveAlgorithm(esmPackageName string, esmPackageSubp resolvedPath, status, debug = r.esmHandlePostConditions(resolvedPath, status, debug) return r.finalizeImportsExportsResult( + kind, absPkgPath, conditions, *packageJSON.exportsMap, packageJSON, resolvedPath, status, debug, esmPackageName, esmPackageSubpath, absPath, @@ -2359,7 +2383,7 @@ func (r resolverQuery) loadNodeModules(importPath string, dirInfo *dirInfo, forb if pkgDirInfo := r.dirInfoCached(result.pkgDirPath); pkgDirInfo != nil { // Check the "exports" map if packageJSON := pkgDirInfo.packageJSON; packageJSON != nil && packageJSON.exportsMap != nil { - absolute, ok, diffCase := r.esmResolveAlgorithm(result.pkgIdent, "."+result.pkgSubpath, packageJSON, pkgDirInfo.absPath, absPath) + absolute, ok, diffCase := r.esmResolveAlgorithm(finalizeImportsExportsNormal, result.pkgIdent, "."+result.pkgSubpath, packageJSON, pkgDirInfo.absPath, absPath) return absolute, ok, diffCase, nil } @@ -2394,7 +2418,7 @@ func (r resolverQuery) loadNodeModules(importPath string, dirInfo *dirInfo, forb // Check for self-references if dirInfoPackageJSON != nil { if packageJSON := dirInfoPackageJSON.packageJSON; packageJSON.name == esmPackageName && packageJSON.exportsMap != nil { - absolute, ok, diffCase := r.esmResolveAlgorithm(esmPackageName, esmPackageSubpath, packageJSON, + absolute, ok, diffCase := r.esmResolveAlgorithm(finalizeImportsExportsNormal, esmPackageName, esmPackageSubpath, packageJSON, dirInfoPackageJSON.absPath, r.fs.Join(dirInfoPackageJSON.absPath, esmPackageSubpath)) return absolute, ok, diffCase, nil } @@ -2413,7 +2437,7 @@ func (r resolverQuery) loadNodeModules(importPath string, dirInfo *dirInfo, forb if pkgDirInfo := r.dirInfoCached(absPkgPath); pkgDirInfo != nil { // Check the "exports" map if packageJSON := pkgDirInfo.packageJSON; packageJSON != nil && packageJSON.exportsMap != nil { - absolute, ok, diffCase := r.esmResolveAlgorithm(esmPackageName, esmPackageSubpath, packageJSON, absPkgPath, absPath) + absolute, ok, diffCase := r.esmResolveAlgorithm(finalizeImportsExportsNormal, esmPackageName, esmPackageSubpath, packageJSON, absPkgPath, absPath) return absolute, ok, diffCase, nil, true } @@ -2525,7 +2549,15 @@ func (r resolverQuery) checkForBuiltInNodeModules(importPath string) (PathPair, return PathPair{}, false, nil } +type finalizeImportsExportsKind uint8 + +const ( + finalizeImportsExportsNormal finalizeImportsExportsKind = iota + finalizeImportsExportsYarnPnPTSConfigExtends +) + func (r resolverQuery) finalizeImportsExportsResult( + kind finalizeImportsExportsKind, absDirPath string, conditions map[string]bool, importExportMap pjMap, @@ -2552,6 +2584,14 @@ func (r resolverQuery) finalizeImportsExportsResult( r.debugLogs.addNote(fmt.Sprintf("The resolved path %q is exact", absResolvedPath)) } + // Avoid calling "dirInfoCached" recursively for "tsconfig.json" extends with Yarn PnP + if kind == finalizeImportsExportsYarnPnPTSConfigExtends { + if r.debugLogs != nil { + r.debugLogs.addNote(fmt.Sprintf("Resolved to %q", absResolvedPath)) + } + return PathPair{Primary: logger.Path{Text: absResolvedPath, Namespace: "file"}}, true, nil + } + resolvedDirInfo := r.dirInfoCached(r.fs.Dir(absResolvedPath)) base := r.fs.Base(absResolvedPath) extensionOrder := r.options.ExtensionOrder diff --git a/vendor/github.com/evanw/esbuild/internal/resolver/tsconfig_json.go b/vendor/github.com/evanw/esbuild/internal/resolver/tsconfig_json.go index ffeb75dbb93..1f9063e58a9 100644 --- a/vendor/github.com/evanw/esbuild/internal/resolver/tsconfig_json.go +++ b/vendor/github.com/evanw/esbuild/internal/resolver/tsconfig_json.go @@ -6,6 +6,7 @@ import ( "github.com/evanw/esbuild/internal/cache" "github.com/evanw/esbuild/internal/config" + "github.com/evanw/esbuild/internal/fs" "github.com/evanw/esbuild/internal/helpers" "github.com/evanw/esbuild/internal/js_ast" "github.com/evanw/esbuild/internal/js_lexer" @@ -95,6 +96,9 @@ func ParseTSConfigJSON( log logger.Log, source logger.Source, jsonCache *cache.JSONCache, + fs fs.FS, + fileDir string, + configDir string, extends func(string, logger.Range) *TSConfigJSON, ) *TSConfigJSON { // Unfortunately "tsconfig.json" isn't actually JSON. It's some other @@ -138,6 +142,10 @@ func ParseTSConfigJSON( // Parse "baseUrl" if valueJSON, _, ok := getProperty(compilerOptionsJSON, "baseUrl"); ok { if value, ok := getString(valueJSON); ok { + value = getSubstitutedPathWithConfigDirTemplate(fs, value, configDir) + if !fs.IsAbs(value) { + value = fs.Join(fileDir, value) + } result.BaseURL = &value } } @@ -213,7 +221,7 @@ func ParseTSConfigJSON( switch lowerValue { case "es3", "es5", "es6", "es2015", "es2016", "es2017", "es2018", "es2019", "es2020", "es2021": result.Settings.Target = config.TSTargetBelowES2022 - case "es2022", "es2023", "esnext": + case "es2022", "es2023", "es2024", "esnext": result.Settings.Target = config.TSTargetAtOrAboveES2022 default: ok = false @@ -301,12 +309,7 @@ func ParseTSConfigJSON( // Parse "paths" if valueJSON, _, ok := getProperty(compilerOptionsJSON, "paths"); ok { if paths, ok := valueJSON.Data.(*js_ast.EObject); ok { - hasBaseURL := result.BaseURL != nil - if hasBaseURL { - result.BaseURLForPaths = *result.BaseURL - } else { - result.BaseURLForPaths = "." - } + result.BaseURLForPaths = fileDir result.Paths = &TSConfigPaths{Source: source, Map: make(map[string][]TSConfigPath)} for _, prop := range paths.Properties { if key, ok := getString(prop.Key); ok { @@ -339,6 +342,7 @@ func ParseTSConfigJSON( for _, item := range array.Items { if str, ok := getString(item); ok { if isValidTSConfigPathPattern(str, log, &source, &tracker, item.Loc) { + str = getSubstitutedPathWithConfigDirTemplate(fs, str, configDir) result.Paths.Map[key] = append(result.Paths.Map[key], TSConfigPath{Text: str, Loc: item.Loc}) } } @@ -387,6 +391,14 @@ func ParseTSConfigJSON( return &result } +// See: https://github.com/microsoft/TypeScript/pull/58042 +func getSubstitutedPathWithConfigDirTemplate(fs fs.FS, value string, basePath string) string { + if strings.HasPrefix(value, "${configDir}") { + return fs.Join(basePath, "./"+value[12:]) + } + return value +} + func parseMemberExpressionForJSX(log logger.Log, source *logger.Source, tracker *logger.LineColumnTracker, loc logger.Loc, text string) []string { if text == "" { return nil diff --git a/vendor/github.com/evanw/esbuild/internal/runtime/runtime.go b/vendor/github.com/evanw/esbuild/internal/runtime/runtime.go index b99b5751455..42ccf39f538 100644 --- a/vendor/github.com/evanw/esbuild/internal/runtime/runtime.go +++ b/vendor/github.com/evanw/esbuild/internal/runtime/runtime.go @@ -261,10 +261,12 @@ func Source(unsupportedJSFeatures compat.JSFeature) logger.Source { export var __decorateParam = (index, decorator) => (target, key) => decorator(target, key, index) // For JavaScript decorators + export var __decoratorStart = base => [, , , __create(base?.[__knownSymbol('metadata')] ?? null)] var __decoratorStrings = ['class', 'method', 'getter', 'setter', 'accessor', 'field', 'value', 'get', 'set'] var __expectFn = fn => fn !== void 0 && typeof fn !== 'function' ? __typeError('Function expected') : fn - var __decoratorContext = (kind, name, done, fns) => ({ kind: __decoratorStrings[kind], name, addInitializer: fn => - done._ ? __typeError('Already initialized') : fns.push(__expectFn(fn || null)), }) + var __decoratorContext = (kind, name, done, metadata, fns) => ({ kind: __decoratorStrings[kind], name, metadata, addInitializer: fn => + done._ ? __typeError('Already initialized') : fns.push(__expectFn(fn || null)) }) + export var __decoratorMetadata = (array, target) => __defNormalProp(target, __knownSymbol('metadata'), array[3]) export var __runInitializers = (array, flags, self, value) => { for (var i = 0, fns = array[flags >> 1], n = fns && fns.length; i < n; i++) flags & 1 ? fns[i].call(self) : value = fns[i].call(self, value) return value @@ -279,7 +281,7 @@ func Source(unsupportedJSFeatures compat.JSFeature) logger.Source { ` // Avoid object extensions when not using ES6 - if !unsupportedJSFeatures.Has(compat.ObjectExtensions) { + if !unsupportedJSFeatures.Has(compat.ObjectExtensions) && !unsupportedJSFeatures.Has(compat.ObjectAccessors) { text += `__getOwnPropDesc(k < 4 ? target : { get [name]() { return __privateGet(this, extra) }, set [name](x) { return __privateSet(this, extra, x) } }, name)` } else { text += `(k < 4 ? __getOwnPropDesc(target, name) : { get: () => __privateGet(this, extra), set: x => __privateSet(this, extra, x) })` @@ -290,7 +292,7 @@ func Source(unsupportedJSFeatures compat.JSFeature) logger.Source { k ? p && k < 4 && __name(extra, (k > 2 ? 'set ' : k > 1 ? 'get ' : '') + name) : __name(target, name) for (var i = decorators.length - 1; i >= 0; i--) { - ctx = __decoratorContext(k, name, done = {}, extraInitializers) + ctx = __decoratorContext(k, name, done = {}, array[3], extraInitializers) if (k) { ctx.static = s, ctx.private = p, access = ctx.access = { has: p ? x => __privateIn(target, x) : x => name in x } @@ -305,7 +307,9 @@ func Source(unsupportedJSFeatures compat.JSFeature) logger.Source { else __expectFn(fn = it.get) && (desc.get = fn), __expectFn(fn = it.set) && (desc.set = fn), __expectFn(fn = it.init) && initializers.unshift(fn) } - return desc && __defProp(target, name, desc), p ? k ^ 4 ? extra : desc : target + return k || __decoratorMetadata(array, target), + desc && __defProp(target, name, desc), + p ? k ^ 4 ? extra : desc : target } // For class members @@ -501,10 +505,14 @@ func Source(unsupportedJSFeatures compat.JSFeature) logger.Source { export var __using = (stack, value, async) => { if (value != null) { if (typeof value !== 'object' && typeof value !== 'function') __typeError('Object expected') - var dispose + var dispose, inner if (async) dispose = value[__knownSymbol('asyncDispose')] - if (dispose === void 0) dispose = value[__knownSymbol('dispose')] + if (dispose === void 0) { + dispose = value[__knownSymbol('dispose')] + if (async) inner = dispose + } if (typeof dispose !== 'function') __typeError('Object not disposable') + if (inner) dispose = function() { try { inner.call(this) } catch (e) { return Promise.reject(e) } } stack.push([async, dispose, value]) } else if (async) { stack.push([async]) diff --git a/vendor/github.com/evanw/esbuild/internal/sourcemap/sourcemap.go b/vendor/github.com/evanw/esbuild/internal/sourcemap/sourcemap.go index 93effc2102e..4d759dbbaec 100644 --- a/vendor/github.com/evanw/esbuild/internal/sourcemap/sourcemap.go +++ b/vendor/github.com/evanw/esbuild/internal/sourcemap/sourcemap.go @@ -2,6 +2,7 @@ package sourcemap import ( "bytes" + "strings" "unicode/utf8" "github.com/evanw/esbuild/internal/ast" @@ -315,14 +316,14 @@ func (pieces SourceMapPieces) Finalize(shifts []SourceMapShift) []byte { potentialStartOfRun := current - // Skip over the original position information - _, current = DecodeVLQ(pieces.Mappings, current) // The original source - _, current = DecodeVLQ(pieces.Mappings, current) // The original line - _, current = DecodeVLQ(pieces.Mappings, current) // The original column - - // Skip over the original name + // Skip over the original position information if present if current < len(pieces.Mappings) { - if c := pieces.Mappings[current]; c != ',' && c != ';' { + _, current = DecodeVLQ(pieces.Mappings, current) // The original source + _, current = DecodeVLQ(pieces.Mappings, current) // The original line + _, current = DecodeVLQ(pieces.Mappings, current) // The original column + + // Skip over the original name if present + if current < len(pieces.Mappings) { _, current = DecodeVLQ(pieces.Mappings, current) } } @@ -426,20 +427,28 @@ func AppendSourceMapChunk(j *helpers.Joiner, prevEndState SourceMapState, startS // case below instead. Original names are optional and are often omitted, so // we handle it uniformly by saving an index to the first original name, // which may or may not be a part of the first mapping. + var sourceIndex int + var originalLine int + var originalColumn int + omitSource := false generatedColumn, i := DecodeVLQ(buffer.Data, semicolons) - sourceIndex, i := DecodeVLQ(buffer.Data, i) - originalLine, i := DecodeVLQ(buffer.Data, i) - originalColumn, i := DecodeVLQ(buffer.Data, i) + if i == len(buffer.Data) || strings.IndexByte(",;", buffer.Data[i]) != -1 { + omitSource = true + } else { + sourceIndex, i = DecodeVLQ(buffer.Data, i) + originalLine, i = DecodeVLQ(buffer.Data, i) + originalColumn, i = DecodeVLQ(buffer.Data, i) + } // Rewrite the first mapping to be relative to the end state of the previous // chunk. We now know what the end state is because we're in the second pass // where all chunks have already been generated. - startState.SourceIndex += sourceIndex startState.GeneratedColumn += generatedColumn + startState.SourceIndex += sourceIndex startState.OriginalLine += originalLine startState.OriginalColumn += originalColumn prevEndState.HasOriginalName = false // This is handled separately below - rewritten, _ := appendMappingToBuffer(nil, j.LastByte(), prevEndState, startState) + rewritten, _ := appendMappingToBuffer(nil, j.LastByte(), prevEndState, startState, omitSource) j.AddBytes(rewritten) // Next, if there's an original name, we need to rewrite that as well to be @@ -458,7 +467,9 @@ func AppendSourceMapChunk(j *helpers.Joiner, prevEndState SourceMapState, startS j.AddBytes(buffer.Data[i:]) } -func appendMappingToBuffer(buffer []byte, lastByte byte, prevState SourceMapState, currentState SourceMapState) ([]byte, ast.Index32) { +func appendMappingToBuffer( + buffer []byte, lastByte byte, prevState SourceMapState, currentState SourceMapState, omitSource bool, +) ([]byte, ast.Index32) { // Put commas in between mappings if lastByte != 0 && lastByte != ';' && lastByte != '"' { buffer = append(buffer, ',') @@ -466,9 +477,11 @@ func appendMappingToBuffer(buffer []byte, lastByte byte, prevState SourceMapStat // Record the mapping (note that the generated line is recorded using ';' elsewhere) buffer = encodeVLQ(buffer, currentState.GeneratedColumn-prevState.GeneratedColumn) - buffer = encodeVLQ(buffer, currentState.SourceIndex-prevState.SourceIndex) - buffer = encodeVLQ(buffer, currentState.OriginalLine-prevState.OriginalLine) - buffer = encodeVLQ(buffer, currentState.OriginalColumn-prevState.OriginalColumn) + if !omitSource { + buffer = encodeVLQ(buffer, currentState.SourceIndex-prevState.SourceIndex) + buffer = encodeVLQ(buffer, currentState.OriginalLine-prevState.OriginalLine) + buffer = encodeVLQ(buffer, currentState.OriginalColumn-prevState.OriginalColumn) + } // Record the optional original name var nameOffset ast.Index32 @@ -820,7 +833,7 @@ func (b *ChunkBuilder) appendMappingWithoutRemapping(currentState SourceMapState } var nameOffset ast.Index32 - b.sourceMap, nameOffset = appendMappingToBuffer(b.sourceMap, lastByte, b.prevState, currentState) + b.sourceMap, nameOffset = appendMappingToBuffer(b.sourceMap, lastByte, b.prevState, currentState, false) prevOriginalName := b.prevState.OriginalName b.prevState = currentState if !currentState.HasOriginalName { diff --git a/vendor/github.com/evanw/esbuild/pkg/api/api.go b/vendor/github.com/evanw/esbuild/pkg/api/api.go index 017ed8905c6..08a597ec2a0 100644 --- a/vendor/github.com/evanw/esbuild/pkg/api/api.go +++ b/vendor/github.com/evanw/esbuild/pkg/api/api.go @@ -133,6 +133,7 @@ const ( ES2021 ES2022 ES2023 + ES2024 ) type Loader uint16 @@ -179,6 +180,7 @@ type Packages uint8 const ( PackagesDefault Packages = iota + PackagesBundle PackagesExternal ) @@ -576,6 +578,7 @@ type ResolveOptions struct { ResolveDir string Kind ResolveKind PluginData interface{} + With map[string]string } // Documentation: https://esbuild.github.io/plugins/#resolve-results @@ -615,6 +618,7 @@ type OnResolveArgs struct { ResolveDir string Kind ResolveKind PluginData interface{} + With map[string]string } // Documentation: https://esbuild.github.io/plugins/#on-resolve-results diff --git a/vendor/github.com/evanw/esbuild/pkg/api/api_impl.go b/vendor/github.com/evanw/esbuild/pkg/api/api_impl.go index 18efb58d3b3..d294d366b9d 100644 --- a/vendor/github.com/evanw/esbuild/pkg/api/api_impl.go +++ b/vendor/github.com/evanw/esbuild/pkg/api/api_impl.go @@ -216,6 +216,17 @@ func validateASCIIOnly(value Charset) bool { } } +func validateExternalPackages(value Packages) bool { + switch value { + case PackagesDefault, PackagesBundle: + return false + case PackagesExternal: + return true + default: + panic("Invalid packages") + } +} + func validateTreeShaking(value TreeShaking, bundle bool, format Format) bool { switch value { case TreeShakingDefault: @@ -307,6 +318,8 @@ func validateFeatures(log logger.Log, target Target, engines []Engine) (compat.J constraints[compat.ES] = compat.Semver{Parts: []int{2022}} case ES2023: constraints[compat.ES] = compat.Semver{Parts: []int{2023}} + case ES2024: + constraints[compat.ES] = compat.Semver{Parts: []int{2024}} case ESNext, DefaultTarget: default: panic("Invalid target") @@ -373,11 +386,11 @@ func validateSupported(log logger.Log, supported map[string]bool) ( return } -func validateGlobalName(log logger.Log, text string) []string { +func validateGlobalName(log logger.Log, text string, path string) []string { if text != "" { source := logger.Source{ - KeyPath: logger.Path{Text: "(global path)"}, - PrettyPath: "(global name)", + KeyPath: logger.Path{Text: path}, + PrettyPath: path, Contents: text, } @@ -511,6 +524,18 @@ func validateJSXExpr(log logger.Log, text string, name string) config.DefineExpr return config.DefineExpr{} } +// This returns an arbitrary but unique key for each unique array of strings +func mapKeyForDefine(parts []string) string { + var sb strings.Builder + var n [4]byte + for _, part := range parts { + binary.LittleEndian.PutUint32(n[:], uint32(len(part))) + sb.Write(n[:]) + sb.WriteString(part) + } + return sb.String() +} + func validateDefines( log logger.Log, defines map[string]string, @@ -520,32 +545,36 @@ func validateDefines( minify bool, drop Drop, ) (*config.ProcessedDefines, []config.InjectedDefine) { + // Sort injected defines for determinism, since the imports will be injected + // into every file in the order that we return them from this function + sortedKeys := make([]string, 0, len(defines)) + for key := range defines { + sortedKeys = append(sortedKeys, key) + } + sort.Strings(sortedKeys) + rawDefines := make(map[string]config.DefineData) - var valueToInject map[string]config.InjectedDefine - var definesToInject []string - - for key, value := range defines { - // The key must be a dot-separated identifier list - for _, part := range strings.Split(key, ".") { - if !js_ast.IsIdentifier(part) { - if part == key { - log.AddError(nil, logger.Range{}, fmt.Sprintf("The define key %q must be a valid identifier", key)) - } else { - log.AddError(nil, logger.Range{}, fmt.Sprintf("The define key %q contains invalid identifier %q", key, part)) - } - continue - } + nodeEnvParts := []string{"process", "env", "NODE_ENV"} + nodeEnvMapKey := mapKeyForDefine(nodeEnvParts) + var injectedDefines []config.InjectedDefine + + for _, key := range sortedKeys { + value := defines[key] + keyParts := validateGlobalName(log, key, "(define name)") + if keyParts == nil { + continue } + mapKey := mapKeyForDefine(keyParts) // Parse the value defineExpr, injectExpr := js_parser.ParseDefineExprOrJSON(value) // Define simple expressions if defineExpr.Constant != nil || len(defineExpr.Parts) > 0 { - rawDefines[key] = config.DefineData{DefineExpr: &defineExpr} + rawDefines[mapKey] = config.DefineData{KeyParts: keyParts, DefineExpr: &defineExpr} // Try to be helpful for common mistakes - if len(defineExpr.Parts) == 1 && key == "process.env.NODE_ENV" { + if len(defineExpr.Parts) == 1 && mapKey == nodeEnvMapKey { data := logger.MsgData{ Text: fmt.Sprintf("%q is defined as an identifier instead of a string (surround %q with quotes to get a string)", key, value), } @@ -593,15 +622,13 @@ func validateDefines( // Inject complex expressions if injectExpr != nil { - definesToInject = append(definesToInject, key) - if valueToInject == nil { - valueToInject = make(map[string]config.InjectedDefine) - } - valueToInject[key] = config.InjectedDefine{ + index := ast.MakeIndex32(uint32(len(injectedDefines))) + injectedDefines = append(injectedDefines, config.InjectedDefine{ Source: logger.Source{Contents: value}, Data: injectExpr, Name: key, - } + }) + rawDefines[mapKey] = config.DefineData{KeyParts: keyParts, DefineExpr: &config.DefineExpr{InjectedDefineIndex: index}} continue } @@ -609,18 +636,6 @@ func validateDefines( log.AddError(nil, logger.Range{}, fmt.Sprintf("Invalid define value (must be an entity name or valid JSON syntax): %s", value)) } - // Sort injected defines for determinism, since the imports will be injected - // into every file in the order that we return them from this function - var injectedDefines []config.InjectedDefine - if len(definesToInject) > 0 { - injectedDefines = make([]config.InjectedDefine, len(definesToInject)) - sort.Strings(definesToInject) - for i, key := range definesToInject { - injectedDefines[i] = valueToInject[key] - rawDefines[key] = config.DefineData{DefineExpr: &config.DefineExpr{InjectedDefineIndex: ast.MakeIndex32(uint32(i))}} - } - } - // If we're bundling for the browser, add a special-cased define for // "process.env.NODE_ENV" that is "development" when not minifying and // "production" when minifying. This is a convention from the React world @@ -628,16 +643,16 @@ func validateDefines( // is only done if it's not already defined so that you can override it if // necessary. if isBuildAPI && platform == config.PlatformBrowser { - if _, process := rawDefines["process"]; !process { - if _, processEnv := rawDefines["process.env"]; !processEnv { - if _, processEnvNodeEnv := rawDefines["process.env.NODE_ENV"]; !processEnvNodeEnv { + if _, process := rawDefines[mapKeyForDefine([]string{"process"})]; !process { + if _, processEnv := rawDefines[mapKeyForDefine([]string{"process.env"})]; !processEnv { + if _, processEnvNodeEnv := rawDefines[nodeEnvMapKey]; !processEnvNodeEnv { var value []uint16 if minify { value = helpers.StringToUTF16("production") } else { value = helpers.StringToUTF16("development") } - rawDefines["process.env.NODE_ENV"] = config.DefineData{DefineExpr: &config.DefineExpr{Constant: &js_ast.EString{Value: value}}} + rawDefines[nodeEnvMapKey] = config.DefineData{KeyParts: nodeEnvParts, DefineExpr: &config.DefineExpr{Constant: &js_ast.EString{Value: value}}} } } } @@ -645,29 +660,35 @@ func validateDefines( // If we're dropping all console API calls, replace each one with undefined if (drop & DropConsole) != 0 { - define := rawDefines["console"] + consoleParts := []string{"console"} + consoleMapKey := mapKeyForDefine(consoleParts) + define := rawDefines[consoleMapKey] + define.KeyParts = consoleParts define.Flags |= config.MethodCallsMustBeReplacedWithUndefined - rawDefines["console"] = define + rawDefines[consoleMapKey] = define } for _, key := range pureFns { - // The key must be a dot-separated identifier list - for _, part := range strings.Split(key, ".") { - if !js_ast.IsIdentifier(part) { - log.AddError(nil, logger.Range{}, fmt.Sprintf("Invalid pure function: %q", key)) - continue - } + keyParts := validateGlobalName(log, key, "(pure name)") + if keyParts == nil { + continue } + mapKey := mapKeyForDefine(keyParts) // Merge with any previously-specified defines - define := rawDefines[key] + define := rawDefines[mapKey] + define.KeyParts = keyParts define.Flags |= config.CallCanBeUnwrappedIfUnused - rawDefines[key] = define + rawDefines[mapKey] = define } // Processing defines is expensive. Process them once here so the same object // can be shared between all parsers we create using these arguments. - processed := config.ProcessDefines(rawDefines) + definesArray := make([]config.DefineData, 0, len(rawDefines)) + for _, define := range rawDefines { + definesArray = append(definesArray, define) + } + processed := config.ProcessDefines(definesArray) return &processed, injectedDefines } @@ -1250,7 +1271,7 @@ func validateBuildOptions( ASCIIOnly: validateASCIIOnly(buildOpts.Charset), IgnoreDCEAnnotations: buildOpts.IgnoreAnnotations, TreeShaking: validateTreeShaking(buildOpts.TreeShaking, buildOpts.Bundle, buildOpts.Format), - GlobalName: validateGlobalName(log, buildOpts.GlobalName), + GlobalName: validateGlobalName(log, buildOpts.GlobalName, "(global name)"), CodeSplitting: buildOpts.Splitting, OutputFormat: validateFormat(buildOpts.Format), AbsOutputFile: validatePath(log, realFS, buildOpts.Outfile, "outfile path"), @@ -1265,7 +1286,7 @@ func validateBuildOptions( ExtensionToLoader: validateLoaders(log, buildOpts.Loader), ExtensionOrder: validateResolveExtensions(log, buildOpts.ResolveExtensions), ExternalSettings: validateExternals(log, realFS, buildOpts.External), - ExternalPackages: buildOpts.Packages == PackagesExternal, + ExternalPackages: validateExternalPackages(buildOpts.Packages), PackageAliases: validateAlias(log, realFS, buildOpts.Alias), TSConfigPath: validatePath(log, realFS, buildOpts.Tsconfig, "tsconfig path"), TSConfigRaw: buildOpts.TsconfigRaw, @@ -1694,7 +1715,7 @@ func transformImpl(input string, transformOpts TransformOptions) TransformResult SourceRoot: transformOpts.SourceRoot, ExcludeSourcesContent: transformOpts.SourcesContent == SourcesContentExclude, OutputFormat: validateFormat(transformOpts.Format), - GlobalName: validateGlobalName(log, transformOpts.GlobalName), + GlobalName: validateGlobalName(log, transformOpts.GlobalName, "(global name)"), MinifySyntax: transformOpts.MinifySyntax, MinifyWhitespace: transformOpts.MinifyWhitespace, MinifyIdentifiers: transformOpts.MinifyIdentifiers, @@ -1898,6 +1919,7 @@ func (impl *pluginImpl) onResolve(options OnResolveOptions, callback func(OnReso ResolveDir: args.ResolveDir, Kind: importKindToResolveKind(args.Kind), PluginData: args.PluginData, + With: args.With.DecodeIntoMap(), }) result.PluginName = response.PluginName result.AbsWatchFiles = impl.validatePathsArray(response.WatchFiles, "watch file") @@ -1924,6 +1946,33 @@ func (impl *pluginImpl) onResolve(options OnResolveOptions, callback func(OnReso // Convert log messages result.Msgs = convertErrorsAndWarningsToInternal(response.Errors, response.Warnings) + + // Warn if the plugin returned things without resolving the path + if response.Path == "" && !response.External { + var what string + if response.Namespace != "" { + what = "namespace" + } else if response.Suffix != "" { + what = "suffix" + } else if response.PluginData != nil { + what = "pluginData" + } else if response.WatchFiles != nil { + what = "watchFiles" + } else if response.WatchDirs != nil { + what = "watchDirs" + } + if what != "" { + path := "path" + if logger.API == logger.GoAPI { + what = strings.Title(what) + path = strings.Title(path) + } + result.Msgs = append(result.Msgs, logger.Msg{ + Kind: logger.Warning, + Data: logger.MsgData{Text: fmt.Sprintf("Returning %q doesn't do anything when %q is empty", what, path)}, + }) + } + } return }, }) @@ -1940,16 +1989,12 @@ func (impl *pluginImpl) onLoad(options OnLoadOptions, callback func(OnLoadArgs) Filter: filter, Namespace: options.Namespace, Callback: func(args config.OnLoadArgs) (result config.OnLoadResult) { - with := make(map[string]string) - for _, attr := range args.Path.ImportAttributes.Decode() { - with[attr.Key] = attr.Value - } response, err := callback(OnLoadArgs{ Path: args.Path.Text, Namespace: args.Path.Namespace, PluginData: args.PluginData, Suffix: args.Path.IgnoredSuffix, - With: with, + With: args.Path.ImportAttributes.DecodeIntoMap(), }) result.PluginName = response.PluginName result.AbsWatchFiles = impl.validatePathsArray(response.WatchFiles, "watch file") @@ -2054,6 +2099,7 @@ func loadPlugins(initialOptions *BuildOptions, fs fs.FS, log logger.Log, caches logger.Range{}, // importPathRange logger.Path{Text: options.Importer, Namespace: options.Namespace}, path, + logger.EncodeImportAttributes(options.With), kind, absResolveDir, options.PluginData, @@ -2323,11 +2369,11 @@ func analyzeMetafileImpl(metafile string, opts AnalyzeMetafileOptions) string { third := "100.0%" table = append(table, tableEntry{ - first: fmt.Sprintf("%s%s%s", colors.Bold, entry.name, colors.Reset), + first: entry.name, firstLen: utf8.RuneCountInString(entry.name), - second: fmt.Sprintf("%s%s%s", colors.Bold, second, colors.Reset), + second: second, secondLen: len(second), - third: fmt.Sprintf("%s%s%s", colors.Bold, third, colors.Reset), + third: third, thirdLen: len(third), isTopLevel: true, }) @@ -2404,8 +2450,10 @@ func analyzeMetafileImpl(metafile string, opts AnalyzeMetafileOptions) string { // Render the columns now that we know the widths for _, entry := range table { prefix := "\n" + color := colors.Bold if !entry.isTopLevel { prefix = "" + color = "" } // Import paths don't have second and third columns @@ -2427,17 +2475,23 @@ func analyzeMetafileImpl(metafile string, opts AnalyzeMetafileOptions) string { extraSpace = 1 } - sb.WriteString(fmt.Sprintf("%s %s %s%s%s %s %s%s%s %s\n", + sb.WriteString(fmt.Sprintf("%s %s%s%s %s%s%s %s%s%s %s%s%s %s%s%s\n", prefix, + color, entry.first, + colors.Reset, colors.Dim, strings.Repeat(lineChar, extraSpace+maxFirstLen-entry.firstLen+maxSecondLen-entry.secondLen), colors.Reset, + color, secondTrimmed, + colors.Reset, colors.Dim, strings.Repeat(lineChar, extraSpace+maxThirdLen-entry.thirdLen+len(second)-len(secondTrimmed)), colors.Reset, + color, entry.third, + colors.Reset, )) } diff --git a/vendor/modules.txt b/vendor/modules.txt index e6023d48f5c..4aafcaffb2c 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -113,7 +113,7 @@ github.com/dgryski/go-rendezvous ## explicit; go 1.13 github.com/dlclark/regexp2 github.com/dlclark/regexp2/syntax -# github.com/evanw/esbuild v0.21.2 +# github.com/evanw/esbuild v0.24.2 ## explicit; go 1.13 github.com/evanw/esbuild/internal/api_helpers github.com/evanw/esbuild/internal/ast