|
8 | 8 | "slices" |
9 | 9 | "strings" |
10 | 10 | "sync" |
| 11 | + "sync/atomic" |
11 | 12 |
|
12 | 13 | "github.com/go-json-experiment/json" |
13 | 14 | "github.com/microsoft/typescript-go/internal/ast" |
@@ -351,28 +352,22 @@ func (p *Program) BindSourceFiles() { |
351 | 352 | } |
352 | 353 |
|
353 | 354 | func (p *Program) CheckSourceFiles(ctx context.Context, files []*ast.SourceFile) { |
354 | | - wg := core.NewWorkGroup(p.SingleThreaded()) |
355 | | - checkers, done := p.checkerPool.GetAllCheckers(ctx) |
356 | | - defer done() |
357 | | - for _, checker := range checkers { |
358 | | - wg.Queue(func() { |
359 | | - for file := range p.checkerPool.Files(checker) { |
360 | | - if files == nil || slices.Contains(files, file) { |
361 | | - checker.CheckSourceFile(ctx, file) |
362 | | - } |
| 355 | + p.checkerPool.ForEachCheckerParallel(ctx, func(_ int, checker *checker.Checker) { |
| 356 | + for file := range p.checkerPool.Files(checker) { |
| 357 | + if files == nil || slices.Contains(files, file) { |
| 358 | + checker.CheckSourceFile(ctx, file) |
363 | 359 | } |
364 | | - }) |
365 | | - } |
366 | | - wg.RunAndWait() |
| 360 | + } |
| 361 | + }) |
367 | 362 | } |
368 | 363 |
|
369 | 364 | // Return the type checker associated with the program. |
370 | 365 | func (p *Program) GetTypeChecker(ctx context.Context) (*checker.Checker, func()) { |
371 | 366 | return p.checkerPool.GetChecker(ctx) |
372 | 367 | } |
373 | 368 |
|
374 | | -func (p *Program) GetTypeCheckers(ctx context.Context) ([]*checker.Checker, func()) { |
375 | | - return p.checkerPool.GetAllCheckers(ctx) |
| 369 | +func (p *Program) ForEachCheckerParallel(ctx context.Context, cb func(idx int, c *checker.Checker)) { |
| 370 | + p.checkerPool.ForEachCheckerParallel(ctx, cb) |
376 | 371 | } |
377 | 372 |
|
378 | 373 | // Return a checker for the given file. We may have multiple checkers in concurrent scenarios and this |
@@ -965,14 +960,12 @@ func (p *Program) GetGlobalDiagnostics(ctx context.Context) []*ast.Diagnostic { |
965 | 960 | return nil |
966 | 961 | } |
967 | 962 |
|
968 | | - var globalDiagnostics []*ast.Diagnostic |
969 | | - checkers, done := p.checkerPool.GetAllCheckers(ctx) |
970 | | - defer done() |
971 | | - for _, checker := range checkers { |
972 | | - globalDiagnostics = append(globalDiagnostics, checker.GetGlobalDiagnostics()...) |
973 | | - } |
| 963 | + globalDiagnostics := make([][]*ast.Diagnostic, p.checkerPool.Count()) |
| 964 | + p.checkerPool.ForEachCheckerParallel(ctx, func(idx int, checker *checker.Checker) { |
| 965 | + globalDiagnostics[idx] = checker.GetGlobalDiagnostics() |
| 966 | + }) |
974 | 967 |
|
975 | | - return SortAndDeduplicateDiagnostics(globalDiagnostics) |
| 968 | + return SortAndDeduplicateDiagnostics(slices.Concat(globalDiagnostics...)) |
976 | 969 | } |
977 | 970 |
|
978 | 971 | func (p *Program) GetDeclarationDiagnostics(ctx context.Context, sourceFile *ast.SourceFile) []*ast.Diagnostic { |
@@ -1033,22 +1026,23 @@ func (p *Program) getSemanticDiagnosticsForFileNotFilter(ctx context.Context, so |
1033 | 1026 | defer done() |
1034 | 1027 | } |
1035 | 1028 | diags := slices.Clip(sourceFile.BindDiagnostics()) |
1036 | | - checkers, closeCheckers := p.checkerPool.GetAllCheckers(ctx) |
1037 | | - defer closeCheckers() |
1038 | 1029 |
|
1039 | 1030 | // Ask for diags from all checkers; checking one file may add diagnostics to other files. |
1040 | 1031 | // These are deduplicated later. |
1041 | | - for _, checker := range checkers { |
| 1032 | + checkerDiags := make([][]*ast.Diagnostic, p.checkerPool.Count()) |
| 1033 | + p.checkerPool.ForEachCheckerParallel(ctx, func(idx int, checker *checker.Checker) { |
1042 | 1034 | if sourceFile == nil || checker == fileChecker { |
1043 | | - diags = append(diags, checker.GetDiagnostics(ctx, sourceFile)...) |
| 1035 | + checkerDiags[idx] = checker.GetDiagnostics(ctx, sourceFile) |
1044 | 1036 | } else { |
1045 | | - diags = append(diags, checker.GetDiagnosticsWithoutCheck(sourceFile)...) |
| 1037 | + checkerDiags[idx] = checker.GetDiagnosticsWithoutCheck(sourceFile) |
1046 | 1038 | } |
1047 | | - } |
| 1039 | + }) |
1048 | 1040 | if ctx.Err() != nil { |
1049 | 1041 | return nil |
1050 | 1042 | } |
1051 | 1043 |
|
| 1044 | + diags = append(diags, slices.Concat(checkerDiags...)...) |
| 1045 | + |
1052 | 1046 | // !!! This should be rewritten to work like getBindAndCheckDiagnosticsForFileNoCache. |
1053 | 1047 |
|
1054 | 1048 | isPlainJS := ast.IsPlainJSFile(sourceFile, compilerOptions.CheckJs) |
@@ -1140,22 +1134,20 @@ func (p *Program) getSuggestionDiagnosticsForFile(ctx context.Context, sourceFil |
1140 | 1134 |
|
1141 | 1135 | diags := slices.Clip(sourceFile.BindSuggestionDiagnostics) |
1142 | 1136 |
|
1143 | | - checkers, closeCheckers := p.checkerPool.GetAllCheckers(ctx) |
1144 | | - defer closeCheckers() |
1145 | | - |
1146 | | - // Ask for diags from all checkers; checking one file may add diagnostics to other files. |
1147 | | - // These are deduplicated later. |
1148 | | - for _, checker := range checkers { |
| 1137 | + checkerDiags := make([][]*ast.Diagnostic, p.checkerPool.Count()) |
| 1138 | + p.checkerPool.ForEachCheckerParallel(ctx, func(idx int, checker *checker.Checker) { |
1149 | 1139 | if sourceFile == nil || checker == fileChecker { |
1150 | | - diags = append(diags, checker.GetSuggestionDiagnostics(ctx, sourceFile)...) |
| 1140 | + checkerDiags[idx] = checker.GetSuggestionDiagnostics(ctx, sourceFile) |
1151 | 1141 | } else { |
1152 | 1142 | // !!! is there any case where suggestion diagnostics are produced in other checkers? |
1153 | 1143 | } |
1154 | | - } |
| 1144 | + }) |
1155 | 1145 | if ctx.Err() != nil { |
1156 | 1146 | return nil |
1157 | 1147 | } |
1158 | 1148 |
|
| 1149 | + diags = append(diags, slices.Concat(checkerDiags...)...) |
| 1150 | + |
1159 | 1151 | return diags |
1160 | 1152 | } |
1161 | 1153 |
|
@@ -1251,32 +1243,28 @@ func (p *Program) SymbolCount() int { |
1251 | 1243 | for _, file := range p.files { |
1252 | 1244 | count += file.SymbolCount |
1253 | 1245 | } |
1254 | | - checkers, done := p.checkerPool.GetAllCheckers(context.Background()) |
1255 | | - defer done() |
1256 | | - for _, checker := range checkers { |
1257 | | - count += int(checker.SymbolCount) |
1258 | | - } |
1259 | | - return count |
| 1246 | + var val atomic.Uint32 |
| 1247 | + val.Store(uint32(count)) |
| 1248 | + p.checkerPool.ForEachCheckerParallel(context.Background(), func(idx int, c *checker.Checker) { |
| 1249 | + val.Add(c.SymbolCount) |
| 1250 | + }) |
| 1251 | + return int(val.Load()) |
1260 | 1252 | } |
1261 | 1253 |
|
1262 | 1254 | func (p *Program) TypeCount() int { |
1263 | | - var count int |
1264 | | - checkers, done := p.checkerPool.GetAllCheckers(context.Background()) |
1265 | | - defer done() |
1266 | | - for _, checker := range checkers { |
1267 | | - count += int(checker.TypeCount) |
1268 | | - } |
1269 | | - return count |
| 1255 | + var val atomic.Uint32 |
| 1256 | + p.checkerPool.ForEachCheckerParallel(context.Background(), func(idx int, c *checker.Checker) { |
| 1257 | + val.Add(c.TypeCount) |
| 1258 | + }) |
| 1259 | + return int(val.Load()) |
1270 | 1260 | } |
1271 | 1261 |
|
1272 | 1262 | func (p *Program) InstantiationCount() int { |
1273 | | - var count int |
1274 | | - checkers, done := p.checkerPool.GetAllCheckers(context.Background()) |
1275 | | - defer done() |
1276 | | - for _, checker := range checkers { |
1277 | | - count += int(checker.TotalInstantiationCount) |
1278 | | - } |
1279 | | - return count |
| 1263 | + var val atomic.Uint32 |
| 1264 | + p.checkerPool.ForEachCheckerParallel(context.Background(), func(idx int, c *checker.Checker) { |
| 1265 | + val.Add(c.TotalInstantiationCount) |
| 1266 | + }) |
| 1267 | + return int(val.Load()) |
1280 | 1268 | } |
1281 | 1269 |
|
1282 | 1270 | func (p *Program) Program() *Program { |
|
0 commit comments