Skip to content

Commit

Permalink
support splitting new URL(import.meta.url, ...)
Browse files Browse the repository at this point in the history
  • Loading branch information
evanw committed Aug 31, 2022
1 parent 1e9b160 commit 0c96ea9
Show file tree
Hide file tree
Showing 9 changed files with 116 additions and 24 deletions.
15 changes: 15 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,21 @@

The `define` parameter appears at first glance to take a JSON object if you aren't paying close attention, but this actually isn't true. Values for `define` are instead strings of JavaScript code. This means you have to use `define: { foo: '"bar"' }` to replace `foo` with the string `"bar"`. Using `define: { foo: 'bar' }` actually replaces `foo` with the identifier `bar`. Previously esbuild allowed you to pass `define: { foo: false }` and `false` was automatically converted into a string, which made it more confusing to understand what `define` actually represents. Starting with this release, passing non-string values such as with `define: { foo: false }` will no longer be allowed. You will now have to write `define: { foo: 'false' }` instead.

* Interpret and rewrite `new URL(..., import.meta.url)` expressions when bundling

Some other bundlers have adopted a convention where the syntax `new URL('./file.js', import.meta.url)` causes `file.js` to be included in the current bundling operation as an additional entry point. The `'./file.js'` string is rewritten in the bundler's output to point to the resulting generated file for that entry point in the output directory (relative to the generated file containing the `new URL(...)` syntax). This is somewhat similar to how `import('./file.js')` works except that this reference just returns a `URL` object without importing the module. That lets you pass the URL of a module to other APIs such as `new Worker(...)` that take a script URL as input.

Previously this pattern didn't work at all with esbuild, but it will now work in esbuild starting with this release. To use it you must ensure that bundling is enabled, that the output format is set to `esm`, and that code splitting is enabled (so you need to use `--bundle --format=esm --splitting`). In addition, the path must be a relative path (i.e. it must start with either `./` or `../`). Here's what using this feature looks like:

```ts
const url = new URL('./worker.ts', import.meta.url)
const worker = new Worker(url, { type: 'module' })

worker.onmessage = (event: MessageEvent) => {
console.log(event.data)
}
```

* Move all binary executable packages to the `@esbuild/` scope

Binary package executables for esbuild are published as individual packages separate from the main `esbuild` package so you only have to download the relevant one for the current platform when you install esbuild. This release moves all of these packages under the `@esbuild/` scope to avoid collisions with 3rd-party packages. It also changes them to a consistent naming scheme that uses the `os` and `cpu` names from node.
Expand Down
5 changes: 5 additions & 0 deletions internal/ast/ast.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,9 @@ const (
// A call to "require.resolve()"
ImportRequireResolve

// "new URL('path', import.meta.url)" with a string argument
ImportNewURL

// A CSS "@import" rule
ImportAt

Expand All @@ -47,6 +50,8 @@ func (kind ImportKind) StringForMetafile() string {
return "dynamic-import"
case ImportRequireResolve:
return "require-resolve"
case ImportNewURL:
return "new-url"
case ImportAt, ImportAtConditional:
return "import-rule"
case ImportURL:
Expand Down
18 changes: 10 additions & 8 deletions internal/bundler/linker.go
Original file line number Diff line number Diff line change
Expand Up @@ -866,7 +866,7 @@ func (c *linkerContext) computeCrossChunkDependencies(chunks []chunkInfo) {
// Rewrite external dynamic imports to point to the chunk for that entry point
for _, importRecordIndex := range part.ImportRecordIndices {
record := &repr.AST.ImportRecords[importRecordIndex]
if record.SourceIndex.IsValid() && c.isExternalDynamicImport(record, sourceIndex) {
if record.SourceIndex.IsValid() && c.isImportOfAdditionalEntryPoint(record, sourceIndex) {
otherChunkIndex := c.graph.Files[record.SourceIndex.GetIndex()].EntryPointChunkIndex
record.Path.Text = chunks[otherChunkIndex].uniqueKey
record.SourceIndex = ast.Index32{}
Expand Down Expand Up @@ -1752,8 +1752,8 @@ func (c *linkerContext) scanImportsAndExports() {
for _, importRecordIndex := range part.ImportRecordIndices {
record := &repr.AST.ImportRecords[importRecordIndex]

// Don't follow external imports (this includes import() expressions)
if !record.SourceIndex.IsValid() || c.isExternalDynamicImport(record, sourceIndex) {
// Don't follow external imports (this includes "import()" and "new URL()" expressions)
if !record.SourceIndex.IsValid() || c.isImportOfAdditionalEntryPoint(record, sourceIndex) {
// This is an external import. Check if it will be a "require()" call.
if record.Kind == ast.ImportRequire || !c.options.OutputFormat.KeepES6ImportExportSyntax() ||
(record.Kind == ast.ImportDynamic && c.options.UnsupportedJSFeatures.Has(compat.DynamicImport)) {
Expand Down Expand Up @@ -2814,7 +2814,7 @@ func (c *linkerContext) markFileReachableForCodeSplitting(sourceIndex uint32, en

// Traverse into all imported files
for _, record := range repr.AST.ImportRecords {
if record.SourceIndex.IsValid() && !c.isExternalDynamicImport(&record, sourceIndex) {
if record.SourceIndex.IsValid() && !c.isImportOfAdditionalEntryPoint(&record, sourceIndex) {
c.markFileReachableForCodeSplitting(record.SourceIndex.GetIndex(), entryPointBit, distanceFromEntryPoint)
}
}
Expand Down Expand Up @@ -2903,8 +2903,10 @@ func (c *linkerContext) markFileLiveForTreeShaking(sourceIndex uint32) {
}
}

func (c *linkerContext) isExternalDynamicImport(record *ast.ImportRecord, sourceIndex uint32) bool {
return record.Kind == ast.ImportDynamic && c.graph.Files[record.SourceIndex.GetIndex()].IsEntryPoint() && record.SourceIndex.GetIndex() != sourceIndex
func (c *linkerContext) isImportOfAdditionalEntryPoint(record *ast.ImportRecord, sourceIndex uint32) bool {
return (record.Kind == ast.ImportDynamic || record.Kind == ast.ImportNewURL) &&
c.graph.Files[record.SourceIndex.GetIndex()].IsEntryPoint() &&
record.SourceIndex.GetIndex() != sourceIndex
}

func (c *linkerContext) markPartLiveForTreeShaking(sourceIndex uint32, partIndex uint32) {
Expand Down Expand Up @@ -3440,8 +3442,8 @@ func (c *linkerContext) findImportedPartsInJSOrder(chunk *chunkInfo) (js []uint3
for _, importRecordIndex := range part.ImportRecordIndices {
record := &repr.AST.ImportRecords[importRecordIndex]
if record.SourceIndex.IsValid() && (record.Kind == ast.ImportStmt || isPartInThisChunk) {
if c.isExternalDynamicImport(record, sourceIndex) {
// Don't follow import() dependencies
if c.isImportOfAdditionalEntryPoint(record, sourceIndex) {
// Don't follow "import()" or "new URL()" dependencies
continue
}
visit(record.SourceIndex.GetIndex())
Expand Down
23 changes: 12 additions & 11 deletions internal/graph/graph.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ type entryPointKind uint8
const (
entryPointNone entryPointKind = iota
entryPointUserSpecified
entryPointDynamicImport
entryPointAdditional
)

type LinkerFile struct {
Expand All @@ -54,7 +54,7 @@ type LinkerFile struct {
// This file is an entry point if and only if this is not "entryPointNone".
// Note that dynamically-imported files are allowed to also be specified by
// the user as top-level entry points, so some dynamically-imported files
// may be "entryPointUserSpecified" instead of "entryPointDynamicImport".
// may be "entryPointUserSpecified" instead of "entryPointAdditional".
entryPointKind entryPointKind

// This is true if this file has been marked as live by the tree shaking
Expand Down Expand Up @@ -141,8 +141,8 @@ func CloneLinkerGraph(
// Clone various things since we may mutate them later. Do this in parallel
// for a speedup (around ~2x faster for this function in the three.js
// benchmark on a 6-core laptop).
var dynamicImportEntryPoints []uint32
var dynamicImportEntryPointsMutex sync.Mutex
var additionalEntryPoints []uint32
var additionalEntryPointsMutex sync.Mutex
waitGroup := sync.WaitGroup{}
waitGroup.Add(len(reachableFiles))
stableSourceIndices := make([]uint32, len(inputFiles))
Expand Down Expand Up @@ -185,10 +185,11 @@ func CloneLinkerGraph(
// Add dynamic imports as additional entry points if code splitting is active
if codeSplitting {
for importRecordIndex := range repr.AST.ImportRecords {
if record := &repr.AST.ImportRecords[importRecordIndex]; record.SourceIndex.IsValid() && record.Kind == ast.ImportDynamic {
dynamicImportEntryPointsMutex.Lock()
dynamicImportEntryPoints = append(dynamicImportEntryPoints, record.SourceIndex.GetIndex())
dynamicImportEntryPointsMutex.Unlock()
if record := &repr.AST.ImportRecords[importRecordIndex]; record.SourceIndex.IsValid() &&
(record.Kind == ast.ImportDynamic || record.Kind == ast.ImportNewURL) {
additionalEntryPointsMutex.Lock()
additionalEntryPoints = append(additionalEntryPoints, record.SourceIndex.GetIndex())
additionalEntryPointsMutex.Unlock()

// Remove import assertions for dynamic imports of additional
// entry points so that they don't mess with the run-time behavior.
Expand Down Expand Up @@ -250,11 +251,11 @@ func CloneLinkerGraph(
waitGroup.Wait()

// Process dynamic entry points after merging control flow again
stableEntryPoints := make([]int, 0, len(dynamicImportEntryPoints))
for _, sourceIndex := range dynamicImportEntryPoints {
stableEntryPoints := make([]int, 0, len(additionalEntryPoints))
for _, sourceIndex := range additionalEntryPoints {
if otherFile := &files[sourceIndex]; otherFile.entryPointKind == entryPointNone {
stableEntryPoints = append(stableEntryPoints, int(stableSourceIndices[sourceIndex]))
otherFile.entryPointKind = entryPointDynamicImport
otherFile.entryPointKind = entryPointAdditional
}
}

Expand Down
5 changes: 5 additions & 0 deletions internal/js_ast/js_ast.go
Original file line number Diff line number Diff line change
Expand Up @@ -441,6 +441,7 @@ func (*ERequireString) isExpr() {}
func (*ERequireResolveString) isExpr() {}
func (*EImportString) isExpr() {}
func (*EImportCall) isExpr() {}
func (*ENewURLImportMeta) isExpr() {}

type EArray struct {
Items []Expr
Expand Down Expand Up @@ -755,6 +756,10 @@ type EImportCall struct {
LeadingInteriorComments []Comment
}

type ENewURLImportMeta struct {
ImportRecordIndex uint32
}

type Stmt struct {
Data S
Loc logger.Loc
Expand Down
57 changes: 52 additions & 5 deletions internal/js_parser/js_parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -426,6 +426,7 @@ type optionsThatSupportStructuralEquality struct {
treeShaking bool
dropDebugger bool
mangleQuoted bool
codeSplitting bool
unusedImportFlagsTS config.UnusedImportFlagsTS
useDefineForClassFields config.MaybeBool

Expand Down Expand Up @@ -472,6 +473,7 @@ func OptionsFromConfig(options *config.Options) Options {
treeShaking: options.TreeShaking,
dropDebugger: options.DropDebugger,
mangleQuoted: options.MangleQuoted,
codeSplitting: options.CodeSplitting,
unusedImportFlagsTS: options.UnusedImportFlagsTS,
useDefineForClassFields: options.UseDefineForClassFields,
},
Expand Down Expand Up @@ -7191,11 +7193,11 @@ func extractDeclsForBinding(binding js_ast.Binding, decls []js_ast.Decl) []js_as
return decls
}

func (p *parser) addImportRecord(kind ast.ImportKind, loc logger.Loc, text string, assertions *[]ast.AssertEntry, flags ast.ImportRecordFlags) uint32 {
func (p *parser) addImportRecord(kind ast.ImportKind, pathLoc logger.Loc, text string, assertions *[]ast.AssertEntry, flags ast.ImportRecordFlags) uint32 {
index := uint32(len(p.importRecords))
p.importRecords = append(p.importRecords, ast.ImportRecord{
Kind: kind,
Range: p.source.RangeOfString(loc),
Range: p.source.RangeOfString(pathLoc),
Path: logger.Path{Text: text},
Assertions: assertions,
Flags: flags,
Expand Down Expand Up @@ -14002,6 +14004,7 @@ func (p *parser) visitExprInOut(expr js_ast.Expr, in exprIn) (js_ast.Expr, exprO
}

importRecordIndex := p.addImportRecord(ast.ImportDynamic, arg.Loc, helpers.UTF16ToString(str.Value), assertions, flags)
p.importRecordsForCurrentPart = append(p.importRecordsForCurrentPart, importRecordIndex)
if isAwaitTarget && p.fnOrArrowDataVisit.tryBodyCount != 0 {
record := &p.importRecords[importRecordIndex]
record.Flags |= ast.HandlesImportErrors
Expand All @@ -14011,7 +14014,6 @@ func (p *parser) visitExprInOut(expr js_ast.Expr, in exprIn) (js_ast.Expr, exprO
record.Flags |= ast.HandlesImportErrors
record.ErrorHandlerLoc = p.thenCatchChain.catchLoc
}
p.importRecordsForCurrentPart = append(p.importRecordsForCurrentPart, importRecordIndex)
return js_ast.Expr{Loc: expr.Loc, Data: &js_ast.EImportString{
ImportRecordIndex: importRecordIndex,
LeadingInteriorComments: e.LeadingInteriorComments,
Expand Down Expand Up @@ -14281,12 +14283,12 @@ func (p *parser) visitExprInOut(expr js_ast.Expr, in exprIn) (js_ast.Expr, exprO
}

importRecordIndex := p.addImportRecord(ast.ImportRequireResolve, e.Args[0].Loc, helpers.UTF16ToString(str.Value), nil, 0)
p.importRecordsForCurrentPart = append(p.importRecordsForCurrentPart, importRecordIndex)
if p.fnOrArrowDataVisit.tryBodyCount != 0 {
record := &p.importRecords[importRecordIndex]
record.Flags |= ast.HandlesImportErrors
record.ErrorHandlerLoc = p.fnOrArrowDataVisit.tryCatchLoc
}
p.importRecordsForCurrentPart = append(p.importRecordsForCurrentPart, importRecordIndex)

// Create a new expression to represent the operation
return js_ast.Expr{Loc: arg.Loc, Data: &js_ast.ERequireResolveString{
Expand Down Expand Up @@ -14393,12 +14395,12 @@ func (p *parser) visitExprInOut(expr js_ast.Expr, in exprIn) (js_ast.Expr, exprO
}

importRecordIndex := p.addImportRecord(ast.ImportRequire, arg.Loc, helpers.UTF16ToString(str.Value), nil, 0)
p.importRecordsForCurrentPart = append(p.importRecordsForCurrentPart, importRecordIndex)
if p.fnOrArrowDataVisit.tryBodyCount != 0 {
record := &p.importRecords[importRecordIndex]
record.Flags |= ast.HandlesImportErrors
record.ErrorHandlerLoc = p.fnOrArrowDataVisit.tryCatchLoc
}
p.importRecordsForCurrentPart = append(p.importRecordsForCurrentPart, importRecordIndex)

// Create a new expression to represent the operation
return js_ast.Expr{Loc: expr.Loc, Data: &js_ast.ERequireString{
Expand Down Expand Up @@ -14468,6 +14470,51 @@ func (p *parser) visitExprInOut(expr js_ast.Expr, in exprIn) (js_ast.Expr, exprO

p.maybeMarkKnownGlobalConstructorAsPure(e)

// Recognize "new URL('./path', import.meta.url)"
if p.options.mode == config.ModeBundle && p.options.outputFormat == config.FormatESModule && len(e.Args) == 2 && !p.isControlFlowDead {
if id, ok := e.Target.Data.(*js_ast.EIdentifier); ok {
if symbol := &p.symbols[id.Ref.InnerIndex]; symbol.Kind == js_ast.SymbolUnbound && symbol.OriginalName == "URL" {
if dot, ok := e.Args[1].Data.(*js_ast.EDot); ok && dot.Name == "url" {
if _, ok := dot.Target.Data.(*js_ast.EImportMeta); ok {
// Support "new URL(a ? './b' : './c', import.meta.url)"
return p.maybeTransposeIfExprChain(e.Args[0], func(arg js_ast.Expr) js_ast.Expr {
if str, ok := arg.Data.(*js_ast.EString); ok {
if path := helpers.UTF16ToString(str.Value); strings.HasPrefix(path, "./") || strings.HasPrefix(path, "../") {
if !p.options.codeSplitting {
p.log.AddID(logger.MsgID_Bundler_NewURLImportMeta, logger.Warning, &p.tracker,
logger.Range{Loc: expr.Loc, Len: e.CloseParenLoc.Start + 1 - expr.Loc.Start},
"The \"new URL(..., import.meta.url)\" syntax won't be bundled without code splitting enabled")
} else {
importRecordIndex := p.addImportRecord(ast.ImportNewURL, arg.Loc, path, nil, 0)
p.importRecordsForCurrentPart = append(p.importRecordsForCurrentPart, importRecordIndex)
if p.fnOrArrowDataVisit.tryBodyCount != 0 {
record := &p.importRecords[importRecordIndex]
record.Flags |= ast.HandlesImportErrors
record.ErrorHandlerLoc = p.fnOrArrowDataVisit.tryCatchLoc
}
return js_ast.Expr{Loc: expr.Loc, Data: &js_ast.ENewURLImportMeta{ImportRecordIndex: importRecordIndex}}
}
} else {
p.log.AddID(logger.MsgID_Bundler_NewURLImportMeta, logger.Debug, &p.tracker, p.source.RangeOfString(arg.Loc),
fmt.Sprintf("Ignoring new URL of %q because it does not begin with \"./\" or \"../\"", path))
}
}

importMetaURL := *dot
return js_ast.Expr{Loc: expr.Loc, Data: &js_ast.ENew{
Target: js_ast.Expr{Loc: e.Target.Loc, Data: &js_ast.EIdentifier{Ref: id.Ref}},
Args: []js_ast.Expr{
arg,
{Loc: e.Args[0].Loc, Data: &importMetaURL},
},
}}
}), exprOut{}
}
}
}
}
}

case *js_ast.EArrow:
asyncArrowNeedsToBeLowered := e.IsAsync && p.options.unsupportedJSFeatures.Has(compat.AsyncAwait)
oldFnOrArrowData := p.fnOrArrowDataVisit
Expand Down
11 changes: 11 additions & 0 deletions internal/js_printer/js_printer.go
Original file line number Diff line number Diff line change
Expand Up @@ -1933,6 +1933,17 @@ func (p *printer) printExpr(expr js_ast.Expr, level js_ast.L, flags printExprFla
p.print(")")
}

case *js_ast.ENewURLImportMeta:
record := &p.importRecords[e.ImportRecordIndex]
p.printSpaceBeforeIdentifier()
p.addSourceMapping(expr.Loc)
p.print("new URL(")
p.addSourceMapping(record.Range.Loc)
p.printQuotedUTF8(record.Path.Text, true /* allowBacktick */)
p.print(",")
p.printSpace()
p.print("import.meta.url)")

case *js_ast.EDot:
wrap := false
if e.OptionalChain == js_ast.OptionalChainNone {
Expand Down
5 changes: 5 additions & 0 deletions internal/logger/msg_ids.go
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ const (
MsgID_Bundler_IgnoredDynamicImport
MsgID_Bundler_ImportIsUndefined
MsgID_Bundler_RequireResolveNotExternal
MsgID_Bundler_NewURLImportMeta

// Source maps
MsgID_SourceMap_InvalidSourceMappings
Expand Down Expand Up @@ -172,6 +173,8 @@ func StringToMsgIDs(str string, logLevel LogLevel, overrides map[MsgID]LogLevel)
overrides[MsgID_Bundler_ImportIsUndefined] = logLevel
case "require-resolve-not-external":
overrides[MsgID_Bundler_RequireResolveNotExternal] = logLevel
case "new-url-import-meta":
overrides[MsgID_Bundler_NewURLImportMeta] = logLevel

// Source maps
case "invalid-source-mappings":
Expand Down Expand Up @@ -286,6 +289,8 @@ func MsgIDToString(id MsgID) string {
return "import-is-undefined"
case MsgID_Bundler_RequireResolveNotExternal:
return "require-resolve-not-external"
case MsgID_Bundler_NewURLImportMeta:
return "new-url-import-meta"

// Source maps
case MsgID_SourceMap_InvalidSourceMappings:
Expand Down
1 change: 1 addition & 0 deletions lib/shared/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -359,6 +359,7 @@ export type ImportKind =
| 'require-call'
| 'dynamic-import'
| 'require-resolve'
| 'new-url'

// CSS
| 'import-rule'
Expand Down

0 comments on commit 0c96ea9

Please sign in to comment.