Skip to content

Commit cd3fdcc

Browse files
committed
move chunks array onto linker
1 parent eb0e118 commit cd3fdcc

File tree

1 file changed

+56
-57
lines changed

1 file changed

+56
-57
lines changed

internal/bundler/linker.go

Lines changed: 56 additions & 57 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,7 @@ type linkerContext struct {
3838
fs fs.FS
3939
res resolver.Resolver
4040
graph graph.LinkerGraph
41+
chunks []chunkInfo
4142

4243
// This helps avoid an infinite loop when matching imports to exports
4344
cycleDetector []importTracker
@@ -313,8 +314,8 @@ func link(
313314
}
314315
}
315316

316-
chunks := c.computeChunks()
317-
c.computeCrossChunkDependencies(chunks)
317+
c.computeChunks()
318+
c.computeCrossChunkDependencies()
318319

319320
// Merge mangled properties before chunks are generated since the names must
320321
// be consistent across all chunks, or the generated code will break
@@ -330,7 +331,7 @@ func link(
330331
// won't hit concurrent map mutation hazards
331332
js_ast.FollowAllSymbols(c.graph.Symbols)
332333

333-
return c.generateChunksInParallel(chunks, additionalFiles)
334+
return c.generateChunksInParallel(additionalFiles)
334335
}
335336

336337
func (c *linkerContext) mangleProps(mangleCache map[string]interface{}) {
@@ -442,7 +443,7 @@ func (c *linkerContext) mangleProps(mangleCache map[string]interface{}) {
442443
// Since that work hasn't been finished yet, cycles in the chunk import graph
443444
// can cause initialization bugs. So let's forbid these cycles for now to guard
444445
// against code splitting bugs that could cause us to generate buggy chunks.
445-
func (c *linkerContext) enforceNoCyclicChunkImports(chunks []chunkInfo) {
446+
func (c *linkerContext) enforceNoCyclicChunkImports() {
446447
var validate func(int, []int)
447448
validate = func(chunkIndex int, path []int) {
448449
for _, otherChunkIndex := range path {
@@ -452,7 +453,7 @@ func (c *linkerContext) enforceNoCyclicChunkImports(chunks []chunkInfo) {
452453
}
453454
}
454455
path = append(path, chunkIndex)
455-
for _, chunkImport := range chunks[chunkIndex].crossChunkImports {
456+
for _, chunkImport := range c.chunks[chunkIndex].crossChunkImports {
456457
// Ignore cycles caused by dynamic "import()" expressions. These are fine
457458
// because they don't necessarily cause initialization order issues and
458459
// they don't indicate a bug in our chunk generation algorithm. They arise
@@ -462,44 +463,44 @@ func (c *linkerContext) enforceNoCyclicChunkImports(chunks []chunkInfo) {
462463
}
463464
}
464465
}
465-
path := make([]int, 0, len(chunks))
466-
for i := range chunks {
466+
path := make([]int, 0, len(c.chunks))
467+
for i := range c.chunks {
467468
validate(i, path)
468469
}
469470
}
470471

471-
func (c *linkerContext) generateChunksInParallel(chunks []chunkInfo, additionalFiles []graph.OutputFile) []graph.OutputFile {
472+
func (c *linkerContext) generateChunksInParallel(additionalFiles []graph.OutputFile) []graph.OutputFile {
472473
c.timer.Begin("Generate chunks")
473474
defer c.timer.End("Generate chunks")
474475

475476
// Generate each chunk on a separate goroutine
476477
generateWaitGroup := sync.WaitGroup{}
477-
generateWaitGroup.Add(len(chunks))
478-
for chunkIndex := range chunks {
479-
switch chunks[chunkIndex].chunkRepr.(type) {
478+
generateWaitGroup.Add(len(c.chunks))
479+
for chunkIndex := range c.chunks {
480+
switch c.chunks[chunkIndex].chunkRepr.(type) {
480481
case *chunkReprJS:
481-
go c.generateChunkJS(chunks, chunkIndex, &generateWaitGroup)
482+
go c.generateChunkJS(chunkIndex, &generateWaitGroup)
482483
case *chunkReprCSS:
483-
go c.generateChunkCSS(chunks, chunkIndex, &generateWaitGroup)
484+
go c.generateChunkCSS(chunkIndex, &generateWaitGroup)
484485
}
485486
}
486-
c.enforceNoCyclicChunkImports(chunks)
487+
c.enforceNoCyclicChunkImports()
487488
generateWaitGroup.Wait()
488489

489490
// Compute the final hashes of each chunk. This can technically be done in
490491
// parallel but it probably doesn't matter so much because we're not hashing
491492
// that much data.
492-
visited := make([]uint32, len(chunks))
493+
visited := make([]uint32, len(c.chunks))
493494
var finalBytes []byte
494-
for chunkIndex := range chunks {
495-
chunk := &chunks[chunkIndex]
495+
for chunkIndex := range c.chunks {
496+
chunk := &c.chunks[chunkIndex]
496497
var hashSubstitution *string
497498

498499
// Only wait for the hash if necessary
499500
if config.HasPlaceholder(chunk.finalTemplate, config.HashPlaceholder) {
500501
// Compute the final hash using the isolated hashes of the dependencies
501502
hash := xxhash.New()
502-
c.appendIsolatedHashesForImportedChunks(hash, chunks, uint32(chunkIndex), visited, ^uint32(chunkIndex))
503+
c.appendIsolatedHashesForImportedChunks(hash, uint32(chunkIndex), visited, ^uint32(chunkIndex))
503504
finalBytes = hash.Sum(finalBytes[:0])
504505
finalString := hashForFileName(finalBytes)
505506
hashSubstitution = &finalString
@@ -514,9 +515,9 @@ func (c *linkerContext) generateChunksInParallel(chunks []chunkInfo, additionalF
514515
// Generate the final output files by joining file pieces together
515516
c.timer.Begin("Generate final output files")
516517
var resultsWaitGroup sync.WaitGroup
517-
results := make([][]graph.OutputFile, len(chunks))
518-
resultsWaitGroup.Add(len(chunks))
519-
for chunkIndex, chunk := range chunks {
518+
results := make([][]graph.OutputFile, len(c.chunks))
519+
resultsWaitGroup.Add(len(c.chunks))
520+
for chunkIndex, chunk := range c.chunks {
520521
go func(chunkIndex int, chunk chunkInfo) {
521522
var outputFiles []graph.OutputFile
522523

@@ -541,7 +542,7 @@ func (c *linkerContext) generateChunksInParallel(chunks []chunkInfo, additionalF
541542

542543
// Path substitution for the chunk itself
543544
finalRelDir := c.fs.Dir(chunk.finalRelPath)
544-
outputContentsJoiner, outputSourceMapShifts := c.substituteFinalPaths(chunks, chunk.intermediateOutput,
545+
outputContentsJoiner, outputSourceMapShifts := c.substituteFinalPaths(chunk.intermediateOutput,
545546
func(finalRelPathForImport string) string {
546547
return c.pathBetweenChunks(finalRelDir, finalRelPathForImport)
547548
})
@@ -613,8 +614,8 @@ func (c *linkerContext) generateChunksInParallel(chunks []chunkInfo, additionalF
613614
// Path substitution for the JSON metadata
614615
var jsonMetadataChunk string
615616
if c.options.NeedsMetafile {
616-
jsonMetadataChunkPieces := c.breakOutputIntoPieces(chunk.jsonMetadataChunkCallback(len(outputContents)), uint32(len(chunks)))
617-
jsonMetadataChunkBytes, _ := c.substituteFinalPaths(chunks, jsonMetadataChunkPieces, func(finalRelPathForImport string) string {
617+
jsonMetadataChunkPieces := c.breakOutputIntoPieces(chunk.jsonMetadataChunkCallback(len(outputContents)))
618+
jsonMetadataChunkBytes, _ := c.substituteFinalPaths(jsonMetadataChunkPieces, func(finalRelPathForImport string) string {
618619
return c.res.PrettyPath(logger.Path{Text: c.fs.Join(c.options.AbsOutputDir, finalRelPathForImport), Namespace: "file"})
619620
})
620621
jsonMetadataChunk = string(jsonMetadataChunkBytes.Done())
@@ -652,7 +653,6 @@ func (c *linkerContext) generateChunksInParallel(chunks []chunkInfo, additionalF
652653
// between import paths), substitute the final import paths in and then join
653654
// everything into a single byte buffer.
654655
func (c *linkerContext) substituteFinalPaths(
655-
chunks []chunkInfo,
656656
intermediateOutput intermediateOutput,
657657
modifyPath func(string) string,
658658
) (j helpers.Joiner, shifts []sourcemap.SourceMapShift) {
@@ -692,7 +692,7 @@ func (c *linkerContext) substituteFinalPaths(
692692
shifts = append(shifts, shift)
693693

694694
case outputPieceChunkIndex:
695-
chunk := chunks[piece.index]
695+
chunk := c.chunks[piece.index]
696696
importPath := modifyPath(chunk.finalRelPath)
697697
j.AddString(importPath)
698698
shift.Before.AdvanceString(chunk.uniqueKey)
@@ -822,12 +822,12 @@ func pathRelativeToOutbase(
822822
return
823823
}
824824

825-
func (c *linkerContext) computeCrossChunkDependencies(chunks []chunkInfo) {
825+
func (c *linkerContext) computeCrossChunkDependencies() {
826826
c.timer.Begin("Compute cross-chunk dependencies")
827827
defer c.timer.End("Compute cross-chunk dependencies")
828828

829829
jsChunks := 0
830-
for _, chunk := range chunks {
830+
for _, chunk := range c.chunks {
831831
if _, ok := chunk.chunkRepr.(*chunkReprJS); ok {
832832
jsChunks++
833833
}
@@ -843,13 +843,13 @@ func (c *linkerContext) computeCrossChunkDependencies(chunks []chunkInfo) {
843843
dynamicImports map[int]bool
844844
}
845845

846-
chunkMetas := make([]chunkMeta, len(chunks))
846+
chunkMetas := make([]chunkMeta, len(c.chunks))
847847

848848
// For each chunk, see what symbols it uses from other chunks. Do this in
849849
// parallel because it's the most expensive part of this function.
850850
waitGroup := sync.WaitGroup{}
851-
waitGroup.Add(len(chunks))
852-
for chunkIndex, chunk := range chunks {
851+
waitGroup.Add(len(c.chunks))
852+
for chunkIndex, chunk := range c.chunks {
853853
go func(chunkIndex int, chunk chunkInfo) {
854854
chunkMeta := &chunkMetas[chunkIndex]
855855
imports := make(map[js_ast.Ref]bool)
@@ -872,7 +872,7 @@ func (c *linkerContext) computeCrossChunkDependencies(chunks []chunkInfo) {
872872
record := &repr.AST.ImportRecords[importRecordIndex]
873873
if record.SourceIndex.IsValid() && c.isExternalDynamicImport(record, sourceIndex) {
874874
otherChunkIndex := c.graph.Files[record.SourceIndex.GetIndex()].EntryPointChunkIndex
875-
record.Path.Text = chunks[otherChunkIndex].uniqueKey
875+
record.Path.Text = c.chunks[otherChunkIndex].uniqueKey
876876
record.SourceIndex = ast.Index32{}
877877
record.Flags |= ast.ShouldNotBeExternalInMetafile
878878

@@ -988,8 +988,8 @@ func (c *linkerContext) computeCrossChunkDependencies(chunks []chunkInfo) {
988988
waitGroup.Wait()
989989

990990
// Mark imported symbols as exported in the chunk from which they are declared
991-
for chunkIndex := range chunks {
992-
chunk := &chunks[chunkIndex]
991+
for chunkIndex := range c.chunks {
992+
chunk := &c.chunks[chunkIndex]
993993
chunkRepr, ok := chunk.chunkRepr.(*chunkReprJS)
994994
if !ok {
995995
continue
@@ -1013,7 +1013,7 @@ func (c *linkerContext) computeCrossChunkDependencies(chunks []chunkInfo) {
10131013
// this entry point, even if there are no imports. We need to make sure
10141014
// these chunks are evaluated for their side effects too.
10151015
if chunk.isEntryPoint {
1016-
for otherChunkIndex, otherChunk := range chunks {
1016+
for otherChunkIndex, otherChunk := range c.chunks {
10171017
if _, ok := otherChunk.chunkRepr.(*chunkReprJS); ok && chunkIndex != otherChunkIndex && otherChunk.entryBits.HasBit(chunk.entryPointBit) {
10181018
imports := chunkRepr.importsFromOtherChunks[uint32(otherChunkIndex)]
10191019
chunkRepr.importsFromOtherChunks[uint32(otherChunkIndex)] = imports
@@ -1042,8 +1042,8 @@ func (c *linkerContext) computeCrossChunkDependencies(chunks []chunkInfo) {
10421042
// Generate cross-chunk exports. These must be computed before cross-chunk
10431043
// imports because of export alias renaming, which must consider all export
10441044
// aliases simultaneously to avoid collisions.
1045-
for chunkIndex := range chunks {
1046-
chunk := &chunks[chunkIndex]
1045+
for chunkIndex := range c.chunks {
1046+
chunk := &c.chunks[chunkIndex]
10471047
chunkRepr, ok := chunk.chunkRepr.(*chunkReprJS)
10481048
if !ok {
10491049
continue
@@ -1078,16 +1078,16 @@ func (c *linkerContext) computeCrossChunkDependencies(chunks []chunkInfo) {
10781078
// Generate cross-chunk imports. These must be computed after cross-chunk
10791079
// exports because the export aliases must already be finalized so they can
10801080
// be embedded in the generated import statements.
1081-
for chunkIndex := range chunks {
1082-
chunk := &chunks[chunkIndex]
1081+
for chunkIndex := range c.chunks {
1082+
chunk := &c.chunks[chunkIndex]
10831083
chunkRepr, ok := chunk.chunkRepr.(*chunkReprJS)
10841084
if !ok {
10851085
continue
10861086
}
10871087

10881088
var crossChunkPrefixStmts []js_ast.Stmt
10891089

1090-
for _, crossChunkImport := range c.sortedCrossChunkImports(chunks, chunkRepr.importsFromOtherChunks) {
1090+
for _, crossChunkImport := range c.sortedCrossChunkImports(chunkRepr.importsFromOtherChunks) {
10911091
switch c.options.OutputFormat {
10921092
case config.FormatESModule:
10931093
var items []js_ast.ClauseItem
@@ -1137,12 +1137,12 @@ func (a crossChunkImportArray) Less(i int, j int) bool {
11371137
}
11381138

11391139
// Sort cross-chunk imports by chunk name for determinism
1140-
func (c *linkerContext) sortedCrossChunkImports(chunks []chunkInfo, importsFromOtherChunks map[uint32]crossChunkImportItemArray) crossChunkImportArray {
1140+
func (c *linkerContext) sortedCrossChunkImports(importsFromOtherChunks map[uint32]crossChunkImportItemArray) crossChunkImportArray {
11411141
result := make(crossChunkImportArray, 0, len(importsFromOtherChunks))
11421142

11431143
for otherChunkIndex, importItems := range importsFromOtherChunks {
11441144
// Sort imports from a single chunk by alias for determinism
1145-
otherChunk := &chunks[otherChunkIndex]
1145+
otherChunk := &c.chunks[otherChunkIndex]
11461146
exportsToOtherChunks := otherChunk.chunkRepr.(*chunkReprJS).exportsToOtherChunks
11471147
for i, item := range importItems {
11481148
importItems[i].exportAlias = exportsToOtherChunks[item.ref]
@@ -3142,7 +3142,7 @@ func (c *linkerContext) findImportedFilesInCSSOrder(entryPoints []uint32) (exter
31423142
return
31433143
}
31443144

3145-
func (c *linkerContext) computeChunks() []chunkInfo {
3145+
func (c *linkerContext) computeChunks() {
31463146
c.timer.Begin("Compute chunks")
31473147
defer c.timer.End("Compute chunks")
31483148

@@ -3358,7 +3358,7 @@ func (c *linkerContext) computeChunks() []chunkInfo {
33583358
})
33593359
}
33603360

3361-
return sortedChunks
3361+
c.chunks = sortedChunks
33623362
}
33633363

33643364
type chunkOrder struct {
@@ -4775,10 +4775,10 @@ func (c *linkerContext) renameSymbolsInChunk(chunk *chunkInfo, filesInOrder []ui
47754775
return r
47764776
}
47774777

4778-
func (c *linkerContext) generateChunkJS(chunks []chunkInfo, chunkIndex int, chunkWaitGroup *sync.WaitGroup) {
4778+
func (c *linkerContext) generateChunkJS(chunkIndex int, chunkWaitGroup *sync.WaitGroup) {
47794779
defer c.recoverInternalError(chunkWaitGroup, runtime.SourceIndex)
47804780

4781-
chunk := &chunks[chunkIndex]
4781+
chunk := &c.chunks[chunkIndex]
47824782

47834783
timer := c.timer.Fork()
47844784
if timer != nil {
@@ -4855,7 +4855,7 @@ func (c *linkerContext) generateChunkJS(chunks []chunkInfo, chunkIndex int, chun
48554855
for i, chunkImport := range chunk.crossChunkImports {
48564856
crossChunkImportRecords[i] = ast.ImportRecord{
48574857
Kind: chunkImport.importKind,
4858-
Path: logger.Path{Text: chunks[chunkImport.chunkIndex].uniqueKey},
4858+
Path: logger.Path{Text: c.chunks[chunkImport.chunkIndex].uniqueKey},
48594859
Flags: ast.ShouldNotBeExternalInMetafile,
48604860
}
48614861
}
@@ -5027,7 +5027,7 @@ func (c *linkerContext) generateChunkJS(chunks []chunkInfo, chunkIndex int, chun
50275027
jMeta.AddString(fmt.Sprintf(" \"entryPoint\": %s,\n", helpers.QuoteForJSON(entryPoint, c.options.ASCIIOnly)))
50285028
}
50295029
if chunkRepr.hasCSSChunk {
5030-
jMeta.AddString(fmt.Sprintf(" \"cssBundle\": %s,\n", helpers.QuoteForJSON(chunks[chunkRepr.cssChunkIndex].uniqueKey, c.options.ASCIIOnly)))
5030+
jMeta.AddString(fmt.Sprintf(" \"cssBundle\": %s,\n", helpers.QuoteForJSON(c.chunks[chunkRepr.cssChunkIndex].uniqueKey, c.options.ASCIIOnly)))
50315031
}
50325032
jMeta.AddString(" \"inputs\": {")
50335033
}
@@ -5148,7 +5148,7 @@ func (c *linkerContext) generateChunkJS(chunks []chunkInfo, chunkIndex int, chun
51485148
}
51495149

51505150
// The JavaScript contents are done now that the source map comment is in
5151-
chunk.intermediateOutput = c.breakOutputIntoPieces(j, uint32(len(chunks)))
5151+
chunk.intermediateOutput = c.breakOutputIntoPieces(j)
51525152
timer.End("Join JavaScript files")
51535153

51545154
if c.options.SourceMap != config.SourceMapNone {
@@ -5261,10 +5261,10 @@ type compileResultCSS struct {
52615261
hasCharset bool
52625262
}
52635263

5264-
func (c *linkerContext) generateChunkCSS(chunks []chunkInfo, chunkIndex int, chunkWaitGroup *sync.WaitGroup) {
5264+
func (c *linkerContext) generateChunkCSS(chunkIndex int, chunkWaitGroup *sync.WaitGroup) {
52655265
defer c.recoverInternalError(chunkWaitGroup, runtime.SourceIndex)
52665266

5267-
chunk := &chunks[chunkIndex]
5267+
chunk := &c.chunks[chunkIndex]
52685268

52695269
timer := c.timer.Fork()
52705270
if timer != nil {
@@ -5531,7 +5531,7 @@ func (c *linkerContext) generateChunkCSS(chunks []chunkInfo, chunkIndex int, chu
55315531
}
55325532

55335533
// The CSS contents are done now that the source map comment is in
5534-
chunk.intermediateOutput = c.breakOutputIntoPieces(j, uint32(len(chunks)))
5534+
chunk.intermediateOutput = c.breakOutputIntoPieces(j)
55355535
timer.End("Join CSS files")
55365536

55375537
if c.options.SourceMap != config.SourceMapNone {
@@ -5592,7 +5592,6 @@ func maybeAppendLegalComments(
55925592

55935593
func (c *linkerContext) appendIsolatedHashesForImportedChunks(
55945594
hash hash.Hash,
5595-
chunks []chunkInfo,
55965595
chunkIndex uint32,
55975596
visited []uint32,
55985597
visitedKey uint32,
@@ -5605,11 +5604,11 @@ func (c *linkerContext) appendIsolatedHashesForImportedChunks(
56055604
return
56065605
}
56075606
visited[chunkIndex] = visitedKey
5608-
chunk := &chunks[chunkIndex]
5607+
chunk := &c.chunks[chunkIndex]
56095608

56105609
// Visit the other chunks that this chunk imports before visiting this chunk
56115610
for _, chunkImport := range chunk.crossChunkImports {
5612-
c.appendIsolatedHashesForImportedChunks(hash, chunks, chunkImport.chunkIndex, visited, visitedKey)
5611+
c.appendIsolatedHashesForImportedChunks(hash, chunkImport.chunkIndex, visited, visitedKey)
56135612
}
56145613

56155614
// Mix in hashes for referenced asset paths (i.e. the "file" loader)
@@ -5633,7 +5632,7 @@ func (c *linkerContext) appendIsolatedHashesForImportedChunks(
56335632
hash.Write(chunk.waitForIsolatedHash())
56345633
}
56355634

5636-
func (c *linkerContext) breakOutputIntoPieces(j helpers.Joiner, chunkCount uint32) intermediateOutput {
5635+
func (c *linkerContext) breakOutputIntoPieces(j helpers.Joiner) intermediateOutput {
56375636
// Optimization: If there can be no substitutions, just reuse the initial
56385637
// joiner that was used when generating the intermediate chunk output
56395638
// instead of creating another one and copying the whole file into it.
@@ -5680,7 +5679,7 @@ func (c *linkerContext) breakOutputIntoPieces(j helpers.Joiner, chunkCount uint3
56805679
}
56815680

56825681
case outputPieceChunkIndex:
5683-
if index >= chunkCount {
5682+
if index >= uint32(len(c.chunks)) {
56845683
boundary = -1
56855684
}
56865685

0 commit comments

Comments
 (0)