Skip to content

Commit

Permalink
fix #2071: remap bytesInOutput for substitutions
Browse files Browse the repository at this point in the history
closes #2091
  • Loading branch information
evanw committed Dec 14, 2022
1 parent 0e12657 commit a375b37
Show file tree
Hide file tree
Showing 4 changed files with 89 additions and 44 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,10 @@
}
```

* Fix byte counts in metafile regarding references to other output files ([#2071](https://github.com/evanw/esbuild/issues/2071))

Previously files that contained references to other output files had slightly incorrect metadata for the byte counts of input files which contributed to that output file. So for example if `app.js` imports `image.png` using the file loader and esbuild generates `out.js` and `image-LSAMBFUD.png`, the metadata for how many bytes of `out.js` are from `app.js` was slightly off (the metadata for the byte count of `out.js` was still correct). The reason is because esbuild substitutes the final paths for references between output files toward the end of the build to handle cyclic references, and the byte counts needed to be adjusted as well during the path substitution. This release fixes these byte counts (specifically the `bytesInOutput` values).

* The alias feature now strips a trailing slash ([#2730](https://github.com/evanw/esbuild/issues/2730))

People sometimes add a trailing slash to the name of one of node's built-in modules to force node to import from the file system instead of importing the built-in module. For example, importing `util` imports node's built-in module called `util` but importing `util/` tries to find a package called `util` on the file system. Previously attempting to use esbuild's package alias feature to replace imports to `util` with a specific file would fail because the file path would also gain a trailing slash (e.g. mapping `util` to `./file.js` turned `util/` into `./file.js/`). With this release, esbuild will now omit the path suffix if it's a single trailing slash, which should now allow you to successfully apply aliases to these import paths.
Expand Down
113 changes: 77 additions & 36 deletions internal/bundler/linker.go
Original file line number Diff line number Diff line change
Expand Up @@ -614,7 +614,7 @@ func (c *linkerContext) generateChunksInParallel(additionalFiles []graph.OutputF
// Path substitution for the JSON metadata
var jsonMetadataChunk string
if c.options.NeedsMetafile {
jsonMetadataChunkPieces := c.breakOutputIntoPieces(chunk.jsonMetadataChunkCallback(len(outputContents)))
jsonMetadataChunkPieces := c.breakJoinerIntoPieces(chunk.jsonMetadataChunkCallback(len(outputContents)))
jsonMetadataChunkBytes, _ := c.substituteFinalPaths(jsonMetadataChunkPieces, func(finalRelPathForImport string) string {
return c.res.PrettyPath(logger.Path{Text: c.fs.Join(c.options.AbsOutputDir, finalRelPathForImport), Namespace: "file"})
})
Expand Down Expand Up @@ -704,6 +704,37 @@ func (c *linkerContext) substituteFinalPaths(
return
}

func (c *linkerContext) accurateFinalByteCount(output intermediateOutput, chunkFinalRelDir string) int {
count := 0

// Note: The paths generated here must match "substituteFinalPaths" above
for _, piece := range output.pieces {
count += len(piece.data)

switch piece.kind {
case outputPieceAssetIndex:
file := c.graph.Files[piece.index]
if len(file.InputFile.AdditionalFiles) != 1 {
panic("Internal error")
}
relPath, _ := c.fs.Rel(c.options.AbsOutputDir, file.InputFile.AdditionalFiles[0].AbsPath)

// Make sure to always use forward slashes, even on Windows
relPath = strings.ReplaceAll(relPath, "\\", "/")

importPath := c.pathBetweenChunks(chunkFinalRelDir, relPath)
count += len(importPath)

case outputPieceChunkIndex:
chunk := c.chunks[piece.index]
importPath := c.pathBetweenChunks(chunkFinalRelDir, chunk.finalRelPath)
count += len(importPath)
}
}

return count
}

func (c *linkerContext) pathBetweenChunks(fromRelDir string, toRelPath string) string {
// Join with the public path if it has been configured
if c.options.PublicPath != "" {
Expand Down Expand Up @@ -5036,12 +5067,12 @@ func (c *linkerContext) generateChunkJS(chunkIndex int, chunkWaitGroup *sync.Wai
var compileResultsForSourceMap []compileResultForSourceMap
var legalCommentList []string
var metaOrder []uint32
var metaByteCount map[string]int
var metaBytes map[uint32][][]byte
legalCommentSet := make(map[string]bool)
prevFileNameComment := uint32(0)
if c.options.NeedsMetafile {
metaOrder = make([]uint32, 0, len(compileResults))
metaByteCount = make(map[string]int, len(compileResults))
metaBytes = make(map[uint32][][]byte, len(compileResults))
}
for _, compileResult := range compileResults {
isRuntime := compileResult.sourceIndex == runtime.SourceIndex
Expand Down Expand Up @@ -5104,13 +5135,11 @@ func (c *linkerContext) generateChunkJS(chunkIndex int, chunkWaitGroup *sync.Wai
// Include this file in the metadata
if c.options.NeedsMetafile {
// Accumulate file sizes since a given file may be split into multiple parts
path := c.graph.Files[compileResult.sourceIndex].InputFile.Source.PrettyPath
if count, ok := metaByteCount[path]; ok {
metaByteCount[path] = count + len(compileResult.JS)
} else {
bytes, ok := metaBytes[compileResult.sourceIndex]
if !ok {
metaOrder = append(metaOrder, compileResult.sourceIndex)
metaByteCount[path] = len(compileResult.JS)
}
metaBytes[compileResult.sourceIndex] = append(bytes, compileResult.JS)
}
}

Expand Down Expand Up @@ -5148,7 +5177,7 @@ func (c *linkerContext) generateChunkJS(chunkIndex int, chunkWaitGroup *sync.Wai
}

// The JavaScript contents are done now that the source map comment is in
chunk.intermediateOutput = c.breakOutputIntoPieces(j)
chunk.intermediateOutput = c.breakJoinerIntoPieces(j)
timer.End("Join JavaScript files")

if c.options.SourceMap != config.SourceMapNone {
Expand All @@ -5161,20 +5190,30 @@ func (c *linkerContext) generateChunkJS(chunkIndex int, chunkWaitGroup *sync.Wai
// End the metadata lazily. The final output size is not known until the
// final import paths are substituted into the output pieces generated below.
if c.options.NeedsMetafile {
pieces := make([][]intermediateOutput, len(metaOrder))
for i, sourceIndex := range metaOrder {
slices := metaBytes[sourceIndex]
outputs := make([]intermediateOutput, len(slices))
for j, slice := range slices {
outputs[j] = c.breakOutputIntoPieces(slice)
}
pieces[i] = outputs
}
chunk.jsonMetadataChunkCallback = func(finalOutputSize int) helpers.Joiner {
isFirstMeta := true
for _, sourceIndex := range metaOrder {
if isFirstMeta {
isFirstMeta = false
} else {
finalRelDir := c.fs.Dir(chunk.finalRelPath)
for i, sourceIndex := range metaOrder {
if i > 0 {
jMeta.AddString(",")
}
path := c.graph.Files[sourceIndex].InputFile.Source.PrettyPath
extra := c.generateExtraDataForFileJS(sourceIndex)
jMeta.AddString(fmt.Sprintf("\n %s: {\n \"bytesInOutput\": %d\n %s}",
helpers.QuoteForJSON(path, c.options.ASCIIOnly), metaByteCount[path], extra))
count := 0
for _, output := range pieces[i] {
count += c.accurateFinalByteCount(output, finalRelDir)
}
jMeta.AddString(fmt.Sprintf("\n %s: {\n \"bytesInOutput\": %d\n }",
helpers.QuoteForJSON(c.graph.Files[sourceIndex].InputFile.Source.PrettyPath, c.options.ASCIIOnly),
count))
}
if !isFirstMeta {
if len(metaOrder) > 0 {
jMeta.AddString("\n ")
}
jMeta.AddString(fmt.Sprintf("},\n \"bytes\": %d\n }", finalOutputSize))
Expand Down Expand Up @@ -5461,7 +5500,6 @@ func (c *linkerContext) generateChunkCSS(chunkIndex int, chunkWaitGroup *sync.Wa
jMeta.AddString("],\n \"inputs\": {")
}
}
isFirstMeta := true

// Concatenate the generated CSS chunks together
var compileResultsForSourceMap []compileResultForSourceMap
Expand Down Expand Up @@ -5507,18 +5545,6 @@ func (c *linkerContext) generateChunkCSS(chunkIndex int, chunkWaitGroup *sync.Wa
})
}
}

// Include this file in the metadata
if c.options.NeedsMetafile {
if isFirstMeta {
isFirstMeta = false
} else {
jMeta.AddString(",")
}
jMeta.AddString(fmt.Sprintf("\n %s: {\n \"bytesInOutput\": %d\n }",
helpers.QuoteForJSON(c.graph.Files[compileResult.sourceIndex].InputFile.Source.PrettyPath, c.options.ASCIIOnly),
len(compileResult.CSS)))
}
}

// Make sure the file ends with a newline
Expand All @@ -5531,7 +5557,7 @@ func (c *linkerContext) generateChunkCSS(chunkIndex int, chunkWaitGroup *sync.Wa
}

// The CSS contents are done now that the source map comment is in
chunk.intermediateOutput = c.breakOutputIntoPieces(j)
chunk.intermediateOutput = c.breakJoinerIntoPieces(j)
timer.End("Join CSS files")

if c.options.SourceMap != config.SourceMapNone {
Expand All @@ -5544,8 +5570,21 @@ func (c *linkerContext) generateChunkCSS(chunkIndex int, chunkWaitGroup *sync.Wa
// End the metadata lazily. The final output size is not known until the
// final import paths are substituted into the output pieces generated below.
if c.options.NeedsMetafile {
pieces := make([]intermediateOutput, len(compileResults))
for i, compileResult := range compileResults {
pieces[i] = c.breakOutputIntoPieces(compileResult.CSS)
}
chunk.jsonMetadataChunkCallback = func(finalOutputSize int) helpers.Joiner {
if !isFirstMeta {
finalRelDir := c.fs.Dir(chunk.finalRelPath)
for i, compileResult := range compileResults {
if i > 0 {
jMeta.AddString(",")
}
jMeta.AddString(fmt.Sprintf("\n %s: {\n \"bytesInOutput\": %d\n }",
helpers.QuoteForJSON(c.graph.Files[compileResult.sourceIndex].InputFile.Source.PrettyPath, c.options.ASCIIOnly),
c.accurateFinalByteCount(pieces[i], finalRelDir)))
}
if len(compileResults) > 0 {
jMeta.AddString("\n ")
}
jMeta.AddString(fmt.Sprintf("},\n \"bytes\": %d\n }", finalOutputSize))
Expand Down Expand Up @@ -5632,16 +5671,18 @@ func (c *linkerContext) appendIsolatedHashesForImportedChunks(
hash.Write(chunk.waitForIsolatedHash())
}

func (c *linkerContext) breakOutputIntoPieces(j helpers.Joiner) intermediateOutput {
func (c *linkerContext) breakJoinerIntoPieces(j helpers.Joiner) intermediateOutput {
// Optimization: If there can be no substitutions, just reuse the initial
// joiner that was used when generating the intermediate chunk output
// instead of creating another one and copying the whole file into it.
if !j.Contains(c.uniqueKeyPrefix, c.uniqueKeyPrefixBytes) {
return intermediateOutput{joiner: j}
}
return c.breakOutputIntoPieces(j.Done())
}

func (c *linkerContext) breakOutputIntoPieces(output []byte) intermediateOutput {
var pieces []outputPiece
output := j.Done()
prefix := c.uniqueKeyPrefixBytes
for {
// Scan for the next piece boundary
Expand Down
14 changes: 7 additions & 7 deletions internal/bundler/snapshots/snapshots_default.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3165,7 +3165,7 @@ d {
"bytesInOutput": 101
},
"project/entry.js": {
"bytesInOutput": 242
"bytesInOutput": 233
},
"project/esm.js": {
"bytesInOutput": 21
Expand All @@ -3174,7 +3174,7 @@ d {
"bytesInOutput": 24
},
"project/file.file": {
"bytesInOutput": 48
"bytesInOutput": 43
}
},
"bytes": 642
Expand Down Expand Up @@ -3234,7 +3234,7 @@ d {
"entryPoint": "project/entry.css",
"inputs": {
"project/entry.css": {
"bytesInOutput": 193
"bytesInOutput": 183
}
},
"bytes": 230
Expand Down Expand Up @@ -3351,7 +3351,7 @@ a {
"entryPoint": "project/bytesInOutput should be at least 99 (1).js",
"inputs": {
"project/111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111.file": {
"bytesInOutput": 45
"bytesInOutput": 135
},
"project/bytesInOutput should be at least 99 (1).js": {
"bytesInOutput": 24
Expand Down Expand Up @@ -3380,7 +3380,7 @@ a {
"entryPoint": "project/bytesInOutput should be at least 99 (2).js",
"inputs": {
"project/bytesInOutput should be at least 99 (2).js": {
"bytesInOutput": 59
"bytesInOutput": 149
}
},
"bytes": 203
Expand All @@ -3396,7 +3396,7 @@ a {
"entryPoint": "project/bytesInOutput should be at least 99 (3).js",
"inputs": {
"project/bytesInOutput should be at least 99 (3).js": {
"bytesInOutput": 55
"bytesInOutput": 143
}
},
"bytes": 197
Expand Down Expand Up @@ -3432,7 +3432,7 @@ a {
"entryPoint": "project/bytesInOutput should be at least 99.css",
"inputs": {
"project/bytesInOutput should be at least 99.css": {
"bytesInOutput": 52
"bytesInOutput": 142
}
},
"bytes": 196
Expand Down
2 changes: 1 addition & 1 deletion scripts/js-api-tests.js
Original file line number Diff line number Diff line change
Expand Up @@ -1241,7 +1241,7 @@ body {
assert.deepStrictEqual(json.outputs[outImport2].exports, [])
assert.deepStrictEqual(json.outputs[outChunk].exports, [])

assert.deepStrictEqual(json.outputs[outEntry].inputs, { [inEntry]: { bytesInOutput: 74 } })
assert.deepStrictEqual(json.outputs[outEntry].inputs, { [inEntry]: { bytesInOutput: 66 } })
assert.deepStrictEqual(json.outputs[outImport1].inputs, { [inImport1]: { bytesInOutput: 0 } })
assert.deepStrictEqual(json.outputs[outImport2].inputs, { [inImport2]: { bytesInOutput: 0 } })
assert.deepStrictEqual(json.outputs[outChunk].inputs, { [inShared]: { bytesInOutput: 28 } })
Expand Down

0 comments on commit a375b37

Please sign in to comment.