From cd4762c0faa06bacb6e0bec21275488787fdd8bd Mon Sep 17 00:00:00 2001 From: Evan Wallace Date: Sat, 10 Apr 2021 17:10:40 -0700 Subject: [PATCH] count dynamic cross-chunk imports as dependencies --- CHANGELOG.md | 14 ++++++ internal/bundler/linker.go | 99 +++++++++++++++++++++++++++----------- scripts/js-api-tests.js | 94 ++++++++++++++++++++++++++++++++++-- 3 files changed, 175 insertions(+), 32 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a73783eda9d..f1b5d8a9cd0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## Unreleased + +* Fix hash calculation for code splitting and dynamic imports ([#1076](https://github.com/evanw/esbuild/issues/1076)) + + The hash included in the file name of each output file is intended to change if and only if anything relevant to the content of that output file changes. It includes: + + * The contents of the file with the paths of other output files omitted + * The output path of the file the final hash omitted + * Some information about the input files involved in that output file + * The contents of the associated source map, if there is one + * All of the information above for all transitive dependencies found by following `import` statements + + However, this didn't include dynamic `import()` expressions due to an oversight. With this release, dynamic `import()` expressions are now also counted as transitive dependencies. This fixes an issue where the content of an output file could change without its hash also changing. As a side effect of this change, dynamic imports inside output files of other output files are now listed in the metadata file if the `metafile` setting is enabled. + ## 0.11.7 * Fix incorrect chunk reference with code splitting, css, and dynamic imports ([#1125](https://github.com/evanw/esbuild/issues/1125)) diff --git a/internal/bundler/linker.go b/internal/bundler/linker.go index 69b913d18d4..e849cc9a3e3 100644 --- a/internal/bundler/linker.go +++ b/internal/bundler/linker.go @@ -281,7 +281,7 @@ type chunkInfo struct { entryPointBit uint // An index into "c.entryPoints" // For code splitting - crossChunkImports []uint32 + crossChunkImports []chunkImport // This is the representation-specific information chunkRepr chunkRepr @@ -312,6 +312,11 @@ type chunkInfo struct { isExecutable bool } +type chunkImport struct { + chunkIndex uint32 + importKind ast.ImportKind +} + // This is a chunk of source code followed by a reference to another chunk. For // example, the file "@import 'CHUNK0001'; body { color: black; }" would be // represented by two pieces, one with the data "@import '" and another with the @@ -607,8 +612,14 @@ func (c *linkerContext) enforceNoCyclicChunkImports(chunks []chunkInfo) { } } path = append(path, chunkIndex) - for _, otherChunkIndex := range chunks[chunkIndex].crossChunkImports { - validate(int(otherChunkIndex), path) + for _, chunkImport := range chunks[chunkIndex].crossChunkImports { + // Ignore cycles caused by dynamic "import()" expressions. These are fine + // because they don't necessarily cause initialization order issues and + // they don't indicate a bug in our chunk generation algorithm. They arise + // normally in real code (e.g. two files that import each other). + if chunkImport.importKind != ast.ImportDynamic { + validate(int(chunkImport.chunkIndex), path) + } } } path := make([]int, 0, len(chunks)) @@ -933,8 +944,9 @@ func (c *linkerContext) computeCrossChunkDependencies(chunks []chunkInfo) { } type chunkMeta struct { - imports map[js_ast.Ref]bool - exports map[js_ast.Ref]bool + imports map[js_ast.Ref]bool + exports map[js_ast.Ref]bool + dynamicImports map[int]bool } chunkMetas := make([]chunkMeta, len(chunks)) @@ -945,8 +957,10 @@ func (c *linkerContext) computeCrossChunkDependencies(chunks []chunkInfo) { waitGroup.Add(len(chunks)) for chunkIndex, chunk := range chunks { go func(chunkIndex int, chunk chunkInfo) { + chunkMeta := &chunkMetas[chunkIndex] imports := make(map[js_ast.Ref]bool) - chunkMetas[chunkIndex] = chunkMeta{imports: imports, exports: make(map[js_ast.Ref]bool)} + chunkMeta.imports = imports + chunkMeta.exports = make(map[js_ast.Ref]bool) // Go over each file in this chunk for sourceIndex := range chunk.filesWithPartsInChunk { @@ -966,6 +980,16 @@ func (c *linkerContext) computeCrossChunkDependencies(chunks []chunkInfo) { otherChunkIndex := c.files[record.SourceIndex.GetIndex()].entryPointChunkIndex record.Path.Text = chunks[otherChunkIndex].uniqueKey record.SourceIndex = ast.Index32{} + + // Track this cross-chunk dynamic import so we make sure to + // include its hash when we're calculating the hashes of all + // dependencies of this chunk. + if int(otherChunkIndex) != chunkIndex { + if chunkMeta.dynamicImports == nil { + chunkMeta.dynamicImports = make(map[int]bool) + } + chunkMeta.dynamicImports[int(otherChunkIndex)] = true + } } } @@ -1067,10 +1091,11 @@ func (c *linkerContext) computeCrossChunkDependencies(chunks []chunkInfo) { if !ok { continue } + chunkMeta := chunkMetas[chunkIndex] // Find all uses in this chunk of symbols from other chunks chunkRepr.importsFromOtherChunks = make(map[uint32]crossChunkImportItemArray) - for importRef := range chunkMetas[chunkIndex].imports { + for importRef := range chunkMeta.imports { // Ignore uses that aren't top-level symbols if otherChunkIndex := c.symbols.Get(importRef).ChunkIndex; otherChunkIndex.IsValid() { if otherChunkIndex := otherChunkIndex.GetIndex(); otherChunkIndex != uint32(chunkIndex) { @@ -1092,6 +1117,23 @@ func (c *linkerContext) computeCrossChunkDependencies(chunks []chunkInfo) { } } } + + // Make sure we also track dynamic cross-chunk imports. These need to be + // tracked so we count them as dependencies of this chunk for the purpose + // of hash calculation. + if chunkMeta.dynamicImports != nil { + sortedDynamicImports := make([]int, 0, len(chunkMeta.dynamicImports)) + for chunkIndex := range chunkMeta.dynamicImports { + sortedDynamicImports = append(sortedDynamicImports, chunkIndex) + } + sort.Ints(sortedDynamicImports) + for _, chunkIndex := range sortedDynamicImports { + chunk.crossChunkImports = append(chunk.crossChunkImports, chunkImport{ + importKind: ast.ImportDynamic, + chunkIndex: uint32(chunkIndex), + }) + } + } } // Generate cross-chunk exports. These must be computed before cross-chunk @@ -1140,7 +1182,6 @@ func (c *linkerContext) computeCrossChunkDependencies(chunks []chunkInfo) { continue } - var crossChunkImports []uint32 var crossChunkPrefixStmts []js_ast.Stmt for _, crossChunkImport := range c.sortedCrossChunkImports(chunks, chunkRepr.importsFromOtherChunks) { @@ -1150,8 +1191,11 @@ func (c *linkerContext) computeCrossChunkDependencies(chunks []chunkInfo) { for _, item := range crossChunkImport.sortedImportItems { items = append(items, js_ast.ClauseItem{Name: js_ast.LocRef{Ref: item.ref}, Alias: item.exportAlias}) } - importRecordIndex := uint32(len(crossChunkImports)) - crossChunkImports = append(crossChunkImports, crossChunkImport.chunkIndex) + importRecordIndex := uint32(len(chunk.crossChunkImports)) + chunk.crossChunkImports = append(chunk.crossChunkImports, chunkImport{ + importKind: ast.ImportStmt, + chunkIndex: crossChunkImport.chunkIndex, + }) if len(items) > 0 { // "import {a, b} from './chunk.js'" crossChunkPrefixStmts = append(crossChunkPrefixStmts, js_ast.Stmt{Data: &js_ast.SImport{ @@ -1170,14 +1214,12 @@ func (c *linkerContext) computeCrossChunkDependencies(chunks []chunkInfo) { } } - chunk.crossChunkImports = crossChunkImports chunkRepr.crossChunkPrefixStmts = crossChunkPrefixStmts } } type crossChunkImport struct { chunkIndex uint32 - sortingKey string sortedImportItems crossChunkImportItemArray } @@ -1188,7 +1230,7 @@ func (a crossChunkImportArray) Len() int { return len(a) } func (a crossChunkImportArray) Swap(i int, j int) { a[i], a[j] = a[j], a[i] } func (a crossChunkImportArray) Less(i int, j int) bool { - return a[i].sortingKey < a[j].sortingKey + return a[i].chunkIndex < a[j].chunkIndex } // Sort cross-chunk imports by chunk name for determinism @@ -1197,14 +1239,14 @@ func (c *linkerContext) sortedCrossChunkImports(chunks []chunkInfo, importsFromO for otherChunkIndex, importItems := range importsFromOtherChunks { // Sort imports from a single chunk by alias for determinism - exportsToOtherChunks := chunks[otherChunkIndex].chunkRepr.(*chunkReprJS).exportsToOtherChunks + otherChunk := &chunks[otherChunkIndex] + exportsToOtherChunks := otherChunk.chunkRepr.(*chunkReprJS).exportsToOtherChunks for i, item := range importItems { importItems[i].exportAlias = exportsToOtherChunks[item.ref] } sort.Sort(importItems) result = append(result, crossChunkImport{ chunkIndex: otherChunkIndex, - sortingKey: chunks[otherChunkIndex].entryBits.String(), sortedImportItems: importItems, }) } @@ -2958,9 +3000,8 @@ func (c *linkerContext) computeChunks() []chunkInfo { chunk.filesWithPartsInChunk[uint32(sourceIndex)] = true } - // Sort the chunks for determinism. This mostly doesn't matter because each - // chunk is a separate file, but it matters for error messages in tests since - // tests stop on the first output mismatch. + // Sort the chunks for determinism. This matters because we use chunk indices + // as sorting keys in a few places. sortedChunks := make([]chunkInfo, 0, len(jsChunks)+len(cssChunks)) sortedKeys := make([]string, 0, len(jsChunks)+len(cssChunks)) for key := range jsChunks { @@ -4362,10 +4403,10 @@ func (c *linkerContext) generateChunkJS(chunks []chunkInfo, chunkIndex int, chun MangleSyntax: c.options.MangleSyntax, } crossChunkImportRecords := make([]ast.ImportRecord, len(chunk.crossChunkImports)) - for i, otherChunkIndex := range chunk.crossChunkImports { + for i, chunkImport := range chunk.crossChunkImports { crossChunkImportRecords[i] = ast.ImportRecord{ - Kind: ast.ImportStmt, - Path: logger.Path{Text: chunks[otherChunkIndex].uniqueKey}, + Kind: chunkImport.importKind, + Path: logger.Path{Text: chunks[chunkImport.chunkIndex].uniqueKey}, } } crossChunkPrefix = js_printer.Print(js_ast.AST{ @@ -4461,15 +4502,15 @@ func (c *linkerContext) generateChunkJS(chunks []chunkInfo, chunkIndex int, chun // Print imports isFirstMeta := true jMeta.AddString("{\n \"imports\": [") - for _, otherChunkIndex := range chunk.crossChunkImports { + for _, chunkImport := range chunk.crossChunkImports { if isFirstMeta { isFirstMeta = false } else { jMeta.AddString(",") } jMeta.AddString(fmt.Sprintf("\n {\n \"path\": %s,\n \"kind\": %s\n }", - js_printer.QuoteForJSON(c.res.PrettyPath(logger.Path{Text: chunks[otherChunkIndex].uniqueKey, Namespace: "file"}), c.options.ASCIIOnly), - js_printer.QuoteForJSON(ast.ImportStmt.StringForMetafile(), c.options.ASCIIOnly))) + js_printer.QuoteForJSON(c.res.PrettyPath(logger.Path{Text: chunks[chunkImport.chunkIndex].uniqueKey, Namespace: "file"}), c.options.ASCIIOnly), + js_printer.QuoteForJSON(chunkImport.importKind.StringForMetafile(), c.options.ASCIIOnly))) } if !isFirstMeta { jMeta.AddString("\n ") @@ -4816,15 +4857,15 @@ func (c *linkerContext) generateChunkCSS(chunks []chunkInfo, chunkIndex int, chu if c.options.NeedsMetafile { isFirstMeta := true jMeta.AddString("{\n \"imports\": [") - for _, otherChunkIndex := range chunk.crossChunkImports { + for _, chunkImport := range chunk.crossChunkImports { if isFirstMeta { isFirstMeta = false } else { jMeta.AddString(",") } jMeta.AddString(fmt.Sprintf("\n {\n \"path\": %s,\n \"kind\": %s\n }", - js_printer.QuoteForJSON(c.res.PrettyPath(logger.Path{Text: chunks[otherChunkIndex].uniqueKey, Namespace: "file"}), c.options.ASCIIOnly), - js_printer.QuoteForJSON(ast.ImportAt.StringForMetafile(), c.options.ASCIIOnly))) + js_printer.QuoteForJSON(c.res.PrettyPath(logger.Path{Text: chunks[chunkImport.chunkIndex].uniqueKey, Namespace: "file"}), c.options.ASCIIOnly), + js_printer.QuoteForJSON(chunkImport.importKind.StringForMetafile(), c.options.ASCIIOnly))) } if !isFirstMeta { jMeta.AddString("\n ") @@ -4919,8 +4960,8 @@ func appendIsolatedHashesForImportedChunks( chunk := &chunks[chunkIndex] // Visit the other chunks that this chunk imports before visiting this chunk - for _, otherChunkIndex := range chunk.crossChunkImports { - appendIsolatedHashesForImportedChunks(hash, chunks, otherChunkIndex, visited, visitedKey) + for _, chunkImport := range chunk.crossChunkImports { + appendIsolatedHashesForImportedChunks(hash, chunks, chunkImport.chunkIndex, visited, visitedKey) } // Mix in the hash for this chunk diff --git a/scripts/js-api-tests.js b/scripts/js-api-tests.js index accea416719..d46e728a874 100644 --- a/scripts/js-api-tests.js +++ b/scripts/js-api-tests.js @@ -991,9 +991,13 @@ body { outChunk, ]) - assert.deepStrictEqual(json.outputs[outEntry].imports, [{ path: makeOutPath(chunk), kind: 'import-statement' }]) - assert.deepStrictEqual(json.outputs[outImport1].imports, [{ path: makeOutPath(chunk), kind: 'import-statement' }]) - assert.deepStrictEqual(json.outputs[outImport2].imports, [{ path: makeOutPath(chunk), kind: 'import-statement' }]) + assert.deepStrictEqual(json.outputs[outEntry].imports, [ + { path: outImport1, kind: 'dynamic-import' }, + { path: outImport2, kind: 'dynamic-import' }, + { path: outChunk, kind: 'import-statement' }, + ]) + assert.deepStrictEqual(json.outputs[outImport1].imports, [{ path: outChunk, kind: 'import-statement' }]) + assert.deepStrictEqual(json.outputs[outImport2].imports, [{ path: outChunk, kind: 'import-statement' }]) assert.deepStrictEqual(json.outputs[outChunk].imports, []) assert.deepStrictEqual(json.outputs[outEntry].exports, []) @@ -1580,6 +1584,90 @@ export { assert.strictEqual(await result3.default(), 'shared3shared1'); }, + async splittingStaticImportHashChange({ esbuild, testDir }) { + const input1 = path.join(testDir, 'a', 'in1.js') + const input2 = path.join(testDir, 'b', 'in2.js') + const outdir = path.join(testDir, 'out') + + await mkdirAsync(path.dirname(input1), { recursive: true }) + await mkdirAsync(path.dirname(input2), { recursive: true }) + await writeFileAsync(input1, `import ${JSON.stringify(input2)}`) + await writeFileAsync(input2, `console.log(123)`) + + const result1 = await esbuild.build({ + entryPoints: [input1, input2], + bundle: true, + outdir, + format: 'esm', + splitting: true, + write: false, + entryNames: '[name]-[hash]', + }) + + await writeFileAsync(input2, `console.log(321)`) + + const result2 = await esbuild.build({ + entryPoints: [input1, input2], + bundle: true, + outdir, + format: 'esm', + splitting: true, + write: false, + entryNames: '[name]-[hash]', + }) + + assert.strictEqual(result1.outputFiles.length, 3) + assert.strictEqual(result2.outputFiles.length, 3) + + // The hashes of both output files must change. Previously there was a bug + // where hash changes worked for static imports but not for dynamic imports. + for (const { path: oldPath } of result1.outputFiles) + for (const { path: newPath } of result2.outputFiles) + assert.notStrictEqual(oldPath, newPath) + }, + + async splittingDynamicImportHashChangeIssue1076({ esbuild, testDir }) { + const input1 = path.join(testDir, 'a', 'in1.js') + const input2 = path.join(testDir, 'b', 'in2.js') + const outdir = path.join(testDir, 'out') + + await mkdirAsync(path.dirname(input1), { recursive: true }) + await mkdirAsync(path.dirname(input2), { recursive: true }) + await writeFileAsync(input1, `import(${JSON.stringify(input2)})`) + await writeFileAsync(input2, `console.log(123)`) + + const result1 = await esbuild.build({ + entryPoints: [input1], + bundle: true, + outdir, + format: 'esm', + splitting: true, + write: false, + entryNames: '[name]-[hash]', + }) + + await writeFileAsync(input2, `console.log(321)`) + + const result2 = await esbuild.build({ + entryPoints: [input1], + bundle: true, + outdir, + format: 'esm', + splitting: true, + write: false, + entryNames: '[name]-[hash]', + }) + + assert.strictEqual(result1.outputFiles.length, 2) + assert.strictEqual(result2.outputFiles.length, 2) + + // The hashes of both output files must change. Previously there was a bug + // where hash changes worked for static imports but not for dynamic imports. + for (const { path: oldPath } of result1.outputFiles) + for (const { path: newPath } of result2.outputFiles) + assert.notStrictEqual(oldPath, newPath) + }, + async stdinStdoutBundle({ esbuild, testDir }) { const auxiliary = path.join(testDir, 'auxiliary.js') await writeFileAsync(auxiliary, 'export default 123')