Skip to content

Commit

Permalink
fix bug in metafile path generation (#662)
Browse files Browse the repository at this point in the history
  • Loading branch information
evanw committed Jan 13, 2021
1 parent c8e19b0 commit 7d0a0b5
Show file tree
Hide file tree
Showing 3 changed files with 96 additions and 13 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,10 @@

The JavaScript implementation of esbuild's API now calls `unref()` internally so node will now exit even if the internal long-lived esbuild process is still running. You should no longer need to explicitly call `stop()` on the service returned by `startService()`, which simplifies service lifetime management. This feature was contributed by [@SalvatorePreviti](https://github.com/SalvatorePreviti).

* Fix bug in metafile path generation ([#662](https://github.com/evanw/esbuild/issues/662))

Certain import path metadata in the JSON file generated by the `--metafile` setting could be incorrect in scenarios with code splitting active and multiple entry points in different subdirectories. The incorrect paths referred to cross-chunk imports of other generated code splitting chunks and were incorrectly relative to the subdirectory inside the output directory instead of relative to the output directory itself. This issue has been fixed.

## 0.8.31

* Fix minification issue from previous release ([#648](https://github.com/evanw/esbuild/issues/648))
Expand Down
34 changes: 21 additions & 13 deletions internal/bundler/linker.go
Original file line number Diff line number Diff line change
Expand Up @@ -300,8 +300,13 @@ type chunkInfo struct {
repr chunkRepr
}

type generateContinue struct {
crossChunkImportRecords []ast.ImportRecord
crossChunkAbsPaths []string
}

type chunkRepr interface {
generate(c *linkerContext, chunk *chunkInfo) func(crossChunkImportRecords []ast.ImportRecord) []OutputFile
generate(c *linkerContext, chunk *chunkInfo) func(generateContinue) []OutputFile
}

type chunkReprJS struct {
Expand Down Expand Up @@ -672,15 +677,21 @@ func (c *linkerContext) generateChunksInParallel(chunks []chunkInfo) []OutputFil

// Fill in the cross-chunk import records now that the paths are known
crossChunkImportRecords := make([]ast.ImportRecord, len(chunk.crossChunkImports))
crossChunkAbsPaths := make([]string, len(chunk.crossChunkImports))
for i, otherChunkIndex := range chunk.crossChunkImports {
relPath := chunks[otherChunkIndex].relPath()
crossChunkAbsPaths[i] = c.fs.Join(c.options.AbsOutputDir, relPath)
crossChunkImportRecords[i] = ast.ImportRecord{
Kind: ast.ImportStmt,
Path: logger.Path{Text: c.pathBetweenChunks(chunk.relDir, chunks[otherChunkIndex].relPath())},
Path: logger.Path{Text: c.pathBetweenChunks(chunk.relDir, relPath)},
}
}

// Generate the chunk
results[i] = resume(crossChunkImportRecords)
results[i] = resume(generateContinue{
crossChunkAbsPaths: crossChunkAbsPaths,
crossChunkImportRecords: crossChunkImportRecords,
})

// Wake up any dependents now that we're done
for _, chunkIndex := range order.dependents {
Expand Down Expand Up @@ -3492,7 +3503,7 @@ func (c *linkerContext) renameSymbolsInChunk(chunk *chunkInfo, filesInOrder []ui
return r
}

func (repr *chunkReprJS) generate(c *linkerContext, chunk *chunkInfo) func([]ast.ImportRecord) []OutputFile {
func (repr *chunkReprJS) generate(c *linkerContext, chunk *chunkInfo) func(generateContinue) []OutputFile {
var results []OutputFile
compileResults := make([]compileResultJS, 0, len(chunk.partsInChunkInOrder))
runtimeMembers := c.files[runtime.SourceIndex].repr.(*reprJS).ast.ModuleScope.Members
Expand Down Expand Up @@ -3534,7 +3545,7 @@ func (repr *chunkReprJS) generate(c *linkerContext, chunk *chunkInfo) func([]ast
}

// Wait for cross-chunk import records before continuing
return func(crossChunkImportRecords []ast.ImportRecord) []OutputFile {
return func(continueData generateContinue) []OutputFile {
// Also generate the cross-chunk binding code
var crossChunkPrefix []byte
var crossChunkSuffix []byte
Expand All @@ -3551,7 +3562,7 @@ func (repr *chunkReprJS) generate(c *linkerContext, chunk *chunkInfo) func([]ast
MangleSyntax: c.options.MangleSyntax,
}
crossChunkPrefix = js_printer.Print(js_ast.AST{
ImportRecords: crossChunkImportRecords,
ImportRecords: continueData.crossChunkImportRecords,
Parts: []js_ast.Part{{Stmts: repr.crossChunkPrefixStmts}},
}, c.symbols, r, printOptions).JS
crossChunkSuffix = js_printer.Print(js_ast.AST{
Expand Down Expand Up @@ -3633,14 +3644,12 @@ func (repr *chunkReprJS) generate(c *linkerContext, chunk *chunkInfo) func([]ast
// Print imports
isFirstMeta := true
jMeta.AddString("{\n \"imports\": [")
for _, record := range crossChunkImportRecords {
for _, importAbsPath := range continueData.crossChunkAbsPaths {
if isFirstMeta {
isFirstMeta = false
} else {
jMeta.AddString(",")
}
chunkBaseWithoutPublicPath := path.Base(record.Path.Text)
importAbsPath := c.fs.Join(c.options.AbsOutputDir, chunk.relDir, chunkBaseWithoutPublicPath)
jMeta.AddString(fmt.Sprintf("\n {\n \"path\": %s\n }",
js_printer.QuoteForJSON(c.res.PrettyPath(logger.Path{Text: importAbsPath, Namespace: "file"}), c.options.ASCIIOnly)))
}
Expand Down Expand Up @@ -3955,7 +3964,7 @@ type compileResultCSS struct {
externalImportRecords []ast.ImportRecord
}

func (repr *chunkReprCSS) generate(c *linkerContext, chunk *chunkInfo) func([]ast.ImportRecord) []OutputFile {
func (repr *chunkReprCSS) generate(c *linkerContext, chunk *chunkInfo) func(generateContinue) []OutputFile {
var results []OutputFile
compileResults := make([]compileResultCSS, 0, len(chunk.filesInChunkInOrder))

Expand Down Expand Up @@ -4001,7 +4010,7 @@ func (repr *chunkReprCSS) generate(c *linkerContext, chunk *chunkInfo) func([]as
}

// Wait for cross-chunk import records before continuing
return func(crossChunkImportRecords []ast.ImportRecord) []OutputFile {
return func(continueData generateContinue) []OutputFile {
waitGroup.Wait()
j := js_printer.Joiner{}
newlineBeforeComment := false
Expand Down Expand Up @@ -4043,13 +4052,12 @@ func (repr *chunkReprCSS) generate(c *linkerContext, chunk *chunkInfo) func([]as
if c.options.AbsMetadataFile != "" {
isFirstMeta := true
jMeta.AddString("{\n \"imports\": [")
for _, record := range crossChunkImportRecords {
for _, importAbsPath := range continueData.crossChunkAbsPaths {
if isFirstMeta {
isFirstMeta = false
} else {
jMeta.AddString(",")
}
importAbsPath := c.fs.Join(c.options.AbsOutputDir, chunk.relDir, record.Path.Text)
jMeta.AddString(fmt.Sprintf("\n {\n \"path\": %s\n }",
js_printer.QuoteForJSON(c.res.PrettyPath(logger.Path{Text: importAbsPath, Namespace: "file"}), c.options.ASCIIOnly)))
}
Expand Down
71 changes: 71 additions & 0 deletions scripts/js-api-tests.js
Original file line number Diff line number Diff line change
Expand Up @@ -583,6 +583,77 @@ body {
assert.deepStrictEqual(json.outputs[outChunk].inputs, { [inImported]: { bytesInOutput: 51 } })
},

async metafileSplittingDoubleDynamicImport({ esbuild, testDir }) {
const entry = path.join(testDir, 'entry.js')
const importDir = path.join(testDir, 'import-dir')
const import1 = path.join(importDir, 'import1.js')
const import2 = path.join(importDir, 'import2.js')
const shared = path.join(testDir, 'shared.js')
const outdir = path.join(testDir, 'out')
const metafile = path.join(testDir, 'meta.json')
await mkdirAsync(importDir)
await writeFileAsync(entry, `
import "./${path.relative(path.dirname(entry), shared)}"
import("./${path.relative(path.dirname(entry), import1)}")
import("./${path.relative(path.dirname(entry), import2)}")
`)
await writeFileAsync(import1, `
import "./${path.relative(path.dirname(import1), shared)}"
`)
await writeFileAsync(import2, `
import "./${path.relative(path.dirname(import2), shared)}"
`)
await writeFileAsync(shared, `
console.log('side effect')
`)
await esbuild.build({
entryPoints: [entry],
bundle: true,
outdir,
metafile,
splitting: true,
format: 'esm',
})

const json = JSON.parse(await readFileAsync(metafile))
assert.strictEqual(Object.keys(json.inputs).length, 4)
assert.strictEqual(Object.keys(json.outputs).length, 4)
const cwd = process.cwd()
const makeOutPath = basename => path.relative(cwd, path.join(outdir, basename)).split(path.sep).join('/')
const makeInPath = pathname => path.relative(cwd, pathname).split(path.sep).join('/')

// Check metafile
const inEntry = makeInPath(entry);
const inImport1 = makeInPath(import1);
const inImport2 = makeInPath(import2);
const inShared = makeInPath(shared);
const chunk = 'chunk.27QEWJHV.js';
const outEntry = makeOutPath(path.relative(testDir, entry));
const outImport1 = makeOutPath(path.relative(testDir, import1));
const outImport2 = makeOutPath(path.relative(testDir, import2));
const outChunk = makeOutPath(chunk);

assert.deepStrictEqual(json.inputs[inEntry], { bytes: 112, imports: [{ path: inShared }, { path: inImport1 }, { path: inImport2 }] })
assert.deepStrictEqual(json.inputs[inImport1], { bytes: 35, imports: [{ path: inShared }] })
assert.deepStrictEqual(json.inputs[inImport2], { bytes: 35, imports: [{ path: inShared }] })
assert.deepStrictEqual(json.inputs[inShared], { bytes: 38, imports: [] })

assert.deepStrictEqual(json.outputs[outEntry].imports, [{ path: makeOutPath(chunk) }])
assert.deepStrictEqual(json.outputs[outImport1].imports, [{ path: makeOutPath(chunk) }])
assert.deepStrictEqual(json.outputs[outImport2].imports, [{ path: makeOutPath(chunk) }])
assert.deepStrictEqual(json.outputs[outChunk].imports, [])

assert.deepStrictEqual(json.outputs[outEntry].exports, [])
assert.deepStrictEqual(json.outputs[outImport1].exports, [])
assert.deepStrictEqual(json.outputs[outImport2].exports, [])
assert.deepStrictEqual(json.outputs[outChunk].exports, [])

assert.deepStrictEqual(json.outputs[outEntry].inputs, { [inEntry]: { bytesInOutput: 70 } })
assert.deepStrictEqual(json.outputs[outImport1].inputs, {})
assert.deepStrictEqual(json.outputs[outImport2].inputs, {})
assert.deepStrictEqual(json.outputs[outChunk].inputs, { [inShared]: { bytesInOutput: 28 } })
},

async metafileCJSInFormatIIFE({ esbuild, testDir }) {
const entry = path.join(testDir, 'entry.js')
const outfile = path.join(testDir, 'out.js')
Expand Down

0 comments on commit 7d0a0b5

Please sign in to comment.