Skip to content

Commit

Permalink
gopls/internal/lsp/cache: move purgeFuncBodies into the parse cache
Browse files Browse the repository at this point in the history
When working in a package we must repeatedly re-build package handles,
which requires parsing with purged func bodies. Although purging func
bodies leads to faster parsing, it is still expensive enough to warrant
caching.

Move the 'purgeFuncBodies' field into the parse cache.

For golang/go#61207

Change-Id: I90575e5b6be7181743e8376c24312115a1029188
Reviewed-on: https://go-review.googlesource.com/c/tools/+/503440
gopls-CI: kokoro <noreply+kokoro@google.com>
Reviewed-by: Alan Donovan <adonovan@google.com>
TryBot-Result: Gopher Robot <gobot@golang.org>
Run-TryBot: Robert Findley <rfindley@google.com>
  • Loading branch information
findleyr committed Jul 24, 2023
1 parent 3577700 commit 4f74786
Show file tree
Hide file tree
Showing 12 changed files with 54 additions and 52 deletions.
2 changes: 1 addition & 1 deletion gopls/internal/lsp/cache/analysis.go
Original file line number Diff line number Diff line change
Expand Up @@ -754,7 +754,7 @@ func (an *analysisNode) run(ctx context.Context) (*analyzeSummary, error) {
// as cached ASTs require the global FileSet.
// ast.Object resolution is unfortunately an implied part of the
// go/analysis contract.
pgf, err := parseGoImpl(ctx, an.fset, fh, source.ParseFull&^source.SkipObjectResolution)
pgf, err := parseGoImpl(ctx, an.fset, fh, source.ParseFull&^source.SkipObjectResolution, false)
parsed[i] = pgf
return err
})
Expand Down
18 changes: 6 additions & 12 deletions gopls/internal/lsp/cache/check.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ import (
"golang.org/x/mod/module"
"golang.org/x/sync/errgroup"
"golang.org/x/tools/go/ast/astutil"
goplsastutil "golang.org/x/tools/gopls/internal/astutil"
"golang.org/x/tools/gopls/internal/bug"
"golang.org/x/tools/gopls/internal/lsp/filecache"
"golang.org/x/tools/gopls/internal/lsp/protocol"
Expand Down Expand Up @@ -611,7 +610,7 @@ func (b *typeCheckBatch) checkPackageForImport(ctx context.Context, ph *packageH
for i, fh := range ph.localInputs.compiledGoFiles {
i, fh := i, fh
group.Go(func() error {
pgf, err := parseGoImpl(ctx, b.fset, fh, parser.SkipObjectResolution)
pgf, err := parseGoImpl(ctx, b.fset, fh, parser.SkipObjectResolution, false)
pgfs[i] = pgf
return err
})
Expand Down Expand Up @@ -1233,14 +1232,9 @@ func (s *snapshot) typerefData(ctx context.Context, id PackageID, imports map[Im
bug.Reportf("internal error reading typerefs data: %v", err)
}

pgfs := make([]*source.ParsedGoFile, len(cgfs))
for i, fh := range cgfs {
content, err := fh.Content()
if err != nil {
return nil, err
}
content = goplsastutil.PurgeFuncBodies(content)
pgfs[i], _ = ParseGoSrc(ctx, token.NewFileSet(), fh.URI(), content, source.ParseFull&^parser.ParseComments)
pgfs, err := s.view.parseCache.parseFiles(ctx, token.NewFileSet(), source.ParseFull&^parser.ParseComments, true, cgfs...)
if err != nil {
return nil, err
}
data := typerefs.Encode(pgfs, id, imports)

Expand Down Expand Up @@ -1495,11 +1489,11 @@ func doTypeCheck(ctx context.Context, b *typeCheckBatch, inputs typeCheckInputs)
// Collect parsed files from the type check pass, capturing parse errors from
// compiled files.
var err error
pkg.goFiles, err = b.parseCache.parseFiles(ctx, b.fset, source.ParseFull, inputs.goFiles...)
pkg.goFiles, err = b.parseCache.parseFiles(ctx, b.fset, source.ParseFull, false, inputs.goFiles...)
if err != nil {
return nil, err
}
pkg.compiledGoFiles, err = b.parseCache.parseFiles(ctx, b.fset, source.ParseFull, inputs.compiledGoFiles...)
pkg.compiledGoFiles, err = b.parseCache.parseFiles(ctx, b.fset, source.ParseFull, false, inputs.compiledGoFiles...)
if err != nil {
return nil, err
}
Expand Down
4 changes: 3 additions & 1 deletion gopls/internal/lsp/cache/errors.go
Original file line number Diff line number Diff line change
Expand Up @@ -518,12 +518,14 @@ func parseGoListImportCycleError(ctx context.Context, e packages.Error, m *sourc
// to use in a list of file of a package, for example.
//
// It returns an error if the file could not be read.
//
// TODO(rfindley): eliminate this helper.
func parseGoURI(ctx context.Context, fs source.FileSource, uri span.URI, mode parser.Mode) (*source.ParsedGoFile, error) {
fh, err := fs.ReadFile(ctx, uri)
if err != nil {
return nil, err
}
return parseGoImpl(ctx, token.NewFileSet(), fh, mode)
return parseGoImpl(ctx, token.NewFileSet(), fh, mode, false)
}

// parseModURI is a helper to parse the Mod file at the given URI from the file
Expand Down
2 changes: 1 addition & 1 deletion gopls/internal/lsp/cache/mod_tidy.go
Original file line number Diff line number Diff line change
Expand Up @@ -486,7 +486,7 @@ func missingModuleForImport(pgf *source.ParsedGoFile, imp *ast.ImportSpec, req *
//
// TODO(rfindley): this should key off source.ImportPath.
func parseImports(ctx context.Context, s *snapshot, files []source.FileHandle) (map[string]bool, error) {
pgfs, err := s.view.parseCache.parseFiles(ctx, token.NewFileSet(), source.ParseHeader, files...)
pgfs, err := s.view.parseCache.parseFiles(ctx, token.NewFileSet(), source.ParseHeader, false, files...)
if err != nil { // e.g. context cancellation
return nil, err
}
Expand Down
12 changes: 8 additions & 4 deletions gopls/internal/lsp/cache/parse.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ import (
"path/filepath"
"reflect"

goplsastutil "golang.org/x/tools/gopls/internal/astutil"
"golang.org/x/tools/gopls/internal/lsp/protocol"
"golang.org/x/tools/gopls/internal/lsp/safetoken"
"golang.org/x/tools/gopls/internal/lsp/source"
Expand All @@ -27,15 +28,15 @@ import (
// ParseGo parses the file whose contents are provided by fh, using a cache.
// The resulting tree may have beeen fixed up.
func (s *snapshot) ParseGo(ctx context.Context, fh source.FileHandle, mode parser.Mode) (*source.ParsedGoFile, error) {
pgfs, err := s.view.parseCache.parseFiles(ctx, token.NewFileSet(), mode, fh)
pgfs, err := s.view.parseCache.parseFiles(ctx, token.NewFileSet(), mode, false, fh)
if err != nil {
return nil, err
}
return pgfs[0], nil
}

// parseGoImpl parses the Go source file whose content is provided by fh.
func parseGoImpl(ctx context.Context, fset *token.FileSet, fh source.FileHandle, mode parser.Mode) (*source.ParsedGoFile, error) {
func parseGoImpl(ctx context.Context, fset *token.FileSet, fh source.FileHandle, mode parser.Mode, purgeFuncBodies bool) (*source.ParsedGoFile, error) {
ext := filepath.Ext(fh.URI().Filename())
if ext != ".go" && ext != "" { // files generated by cgo have no extension
return nil, fmt.Errorf("cannot parse non-Go file %s", fh.URI())
Expand All @@ -48,14 +49,17 @@ func parseGoImpl(ctx context.Context, fset *token.FileSet, fh source.FileHandle,
if ctx.Err() != nil {
return nil, ctx.Err()
}
pgf, _ := ParseGoSrc(ctx, fset, fh.URI(), content, mode)
pgf, _ := ParseGoSrc(ctx, fset, fh.URI(), content, mode, purgeFuncBodies)
return pgf, nil
}

// ParseGoSrc parses a buffer of Go source, repairing the tree if necessary.
//
// The provided ctx is used only for logging.
func ParseGoSrc(ctx context.Context, fset *token.FileSet, uri span.URI, src []byte, mode parser.Mode) (res *source.ParsedGoFile, fixes []fixType) {
func ParseGoSrc(ctx context.Context, fset *token.FileSet, uri span.URI, src []byte, mode parser.Mode, purgeFuncBodies bool) (res *source.ParsedGoFile, fixes []fixType) {
if purgeFuncBodies {
src = goplsastutil.PurgeFuncBodies(src)
}
ctx, done := event.Start(ctx, "cache.ParseGoSrc", tag.File.Of(uri.Filename()))
defer done()

Expand Down
22 changes: 12 additions & 10 deletions gopls/internal/lsp/cache/parse_cache.go
Original file line number Diff line number Diff line change
Expand Up @@ -126,8 +126,9 @@ func (c *parseCache) stop() {

// parseKey uniquely identifies a parsed Go file.
type parseKey struct {
uri span.URI
mode parser.Mode
uri span.URI
mode parser.Mode
purgeFuncBodies bool
}

type parseCacheEntry struct {
Expand All @@ -145,7 +146,7 @@ type parseCacheEntry struct {
// The resulting slice has an entry for every given file handle, though some
// entries may be nil if there was an error reading the file (in which case the
// resulting error will be non-nil).
func (c *parseCache) startParse(mode parser.Mode, fhs ...source.FileHandle) ([]*memoize.Promise, error) {
func (c *parseCache) startParse(mode parser.Mode, purgeFuncBodies bool, fhs ...source.FileHandle) ([]*memoize.Promise, error) {
c.mu.Lock()
defer c.mu.Unlock()

Expand Down Expand Up @@ -173,8 +174,9 @@ func (c *parseCache) startParse(mode parser.Mode, fhs ...source.FileHandle) ([]*
data[i] = content

key := parseKey{
uri: fh.URI(),
mode: mode,
uri: fh.URI(),
mode: mode,
purgeFuncBodies: purgeFuncBodies,
}

if e, ok := c.m[key]; ok {
Expand All @@ -197,7 +199,7 @@ func (c *parseCache) startParse(mode parser.Mode, fhs ...source.FileHandle) ([]*
// inside of parseGoSrc without exceeding the allocated space.
base, nextBase := c.allocateSpace(2*len(content) + parsePadding)

pgf, fixes1 := ParseGoSrc(ctx, fileSetWithBase(base), uri, content, mode)
pgf, fixes1 := ParseGoSrc(ctx, fileSetWithBase(base), uri, content, mode, purgeFuncBodies)
file := pgf.Tok
if file.Base()+file.Size()+1 > nextBase {
// The parsed file exceeds its allocated space, likely due to multiple
Expand All @@ -209,7 +211,7 @@ func (c *parseCache) startParse(mode parser.Mode, fhs ...source.FileHandle) ([]*
// there, as parseGoSrc will repeat them.
actual := file.Base() + file.Size() - base // actual size consumed, after re-parsing
base2, nextBase2 := c.allocateSpace(actual)
pgf2, fixes2 := ParseGoSrc(ctx, fileSetWithBase(base2), uri, content, mode)
pgf2, fixes2 := ParseGoSrc(ctx, fileSetWithBase(base2), uri, content, mode, purgeFuncBodies)

// In golang/go#59097 we observed that this panic condition was hit.
// One bug was found and fixed, but record more information here in
Expand Down Expand Up @@ -314,7 +316,7 @@ func (c *parseCache) allocateSpace(size int) (int, int) {
//
// If parseFiles returns an error, it still returns a slice,
// but with a nil entry for each file that could not be parsed.
func (c *parseCache) parseFiles(ctx context.Context, fset *token.FileSet, mode parser.Mode, fhs ...source.FileHandle) ([]*source.ParsedGoFile, error) {
func (c *parseCache) parseFiles(ctx context.Context, fset *token.FileSet, mode parser.Mode, purgeFuncBodies bool, fhs ...source.FileHandle) ([]*source.ParsedGoFile, error) {
pgfs := make([]*source.ParsedGoFile, len(fhs))

// Temporary fall-back for 32-bit systems, where reservedForParsing is too
Expand All @@ -324,15 +326,15 @@ func (c *parseCache) parseFiles(ctx context.Context, fset *token.FileSet, mode p
if bits.UintSize == 32 {
for i, fh := range fhs {
var err error
pgfs[i], err = parseGoImpl(ctx, fset, fh, mode)
pgfs[i], err = parseGoImpl(ctx, fset, fh, mode, purgeFuncBodies)
if err != nil {
return pgfs, err
}
}
return pgfs, nil
}

promises, firstErr := c.startParse(mode, fhs...)
promises, firstErr := c.startParse(mode, purgeFuncBodies, fhs...)

// Await all parsing.
var g errgroup.Group
Expand Down
26 changes: 13 additions & 13 deletions gopls/internal/lsp/cache/parse_cache_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -31,12 +31,12 @@ func TestParseCache(t *testing.T) {
fset := token.NewFileSet()

cache := newParseCache(0)
pgfs1, err := cache.parseFiles(ctx, fset, source.ParseFull, fh)
pgfs1, err := cache.parseFiles(ctx, fset, source.ParseFull, false, fh)
if err != nil {
t.Fatal(err)
}
pgf1 := pgfs1[0]
pgfs2, err := cache.parseFiles(ctx, fset, source.ParseFull, fh)
pgfs2, err := cache.parseFiles(ctx, fset, source.ParseFull, false, fh)
pgf2 := pgfs2[0]
if err != nil {
t.Fatal(err)
Expand All @@ -50,7 +50,7 @@ func TestParseCache(t *testing.T) {
files := []source.FileHandle{fh}
files = append(files, dummyFileHandles(parseCacheMinFiles-1)...)

pgfs3, err := cache.parseFiles(ctx, fset, source.ParseFull, files...)
pgfs3, err := cache.parseFiles(ctx, fset, source.ParseFull, false, files...)
pgf3 := pgfs3[0]
if pgf3 != pgf1 {
t.Errorf("parseFiles(%q, ...): unexpected cache miss", uri)
Expand All @@ -65,13 +65,13 @@ func TestParseCache(t *testing.T) {
// Now overwrite the cache, after which we should get new results.
cache.gcOnce()
files = dummyFileHandles(parseCacheMinFiles)
_, err = cache.parseFiles(ctx, fset, source.ParseFull, files...)
_, err = cache.parseFiles(ctx, fset, source.ParseFull, false, files...)
if err != nil {
t.Fatal(err)
}
// force a GC, which should collect the recently parsed files
cache.gcOnce()
pgfs4, err := cache.parseFiles(ctx, fset, source.ParseFull, fh)
pgfs4, err := cache.parseFiles(ctx, fset, source.ParseFull, false, fh)
if err != nil {
t.Fatal(err)
}
Expand All @@ -95,7 +95,7 @@ func TestParseCache_Reparsing(t *testing.T) {

// Parsing should succeed even though we overflow the padding.
cache := newParseCache(0)
_, err := cache.parseFiles(context.Background(), token.NewFileSet(), source.ParseFull, files...)
_, err := cache.parseFiles(context.Background(), token.NewFileSet(), source.ParseFull, false, files...)
if err != nil {
t.Fatal(err)
}
Expand All @@ -115,7 +115,7 @@ func TestParseCache_Issue59097(t *testing.T) {

// Parsing should succeed even though we overflow the padding.
cache := newParseCache(0)
_, err := cache.parseFiles(context.Background(), token.NewFileSet(), source.ParseFull, files...)
_, err := cache.parseFiles(context.Background(), token.NewFileSet(), source.ParseFull, false, files...)
if err != nil {
t.Fatal(err)
}
Expand All @@ -133,19 +133,19 @@ func TestParseCache_TimeEviction(t *testing.T) {
cache := newParseCache(gcDuration)
cache.stop() // we'll manage GC manually, for testing.

pgfs0, err := cache.parseFiles(ctx, fset, source.ParseFull, fh, fh)
pgfs0, err := cache.parseFiles(ctx, fset, source.ParseFull, false, fh, fh)
if err != nil {
t.Fatal(err)
}

files := dummyFileHandles(parseCacheMinFiles)
_, err = cache.parseFiles(ctx, fset, source.ParseFull, files...)
_, err = cache.parseFiles(ctx, fset, source.ParseFull, false, files...)
if err != nil {
t.Fatal(err)
}

// Even after filling up the 'min' files, we get a cache hit for our original file.
pgfs1, err := cache.parseFiles(ctx, fset, source.ParseFull, fh, fh)
pgfs1, err := cache.parseFiles(ctx, fset, source.ParseFull, false, fh, fh)
if err != nil {
t.Fatal(err)
}
Expand All @@ -155,14 +155,14 @@ func TestParseCache_TimeEviction(t *testing.T) {
}

// But after GC, we get a cache miss.
_, err = cache.parseFiles(ctx, fset, source.ParseFull, files...) // mark dummy files as newer
_, err = cache.parseFiles(ctx, fset, source.ParseFull, false, files...) // mark dummy files as newer
if err != nil {
t.Fatal(err)
}
time.Sleep(gcDuration)
cache.gcOnce()

pgfs2, err := cache.parseFiles(ctx, fset, source.ParseFull, fh, fh)
pgfs2, err := cache.parseFiles(ctx, fset, source.ParseFull, false, fh, fh)
if err != nil {
t.Fatal(err)
}
Expand All @@ -180,7 +180,7 @@ func TestParseCache_Duplicates(t *testing.T) {
fh := makeFakeFileHandle(uri, []byte("package p\n\nconst _ = \"foo\""))

cache := newParseCache(0)
pgfs, err := cache.parseFiles(ctx, token.NewFileSet(), source.ParseFull, fh, fh)
pgfs, err := cache.parseFiles(ctx, token.NewFileSet(), source.ParseFull, false, fh, fh)
if err != nil {
t.Fatal(err)
}
Expand Down
10 changes: 5 additions & 5 deletions gopls/internal/lsp/cache/snapshot.go
Original file line number Diff line number Diff line change
Expand Up @@ -2432,8 +2432,8 @@ func metadataChanges(ctx context.Context, lockedSnapshot *snapshot, oldFH, newFH

fset := token.NewFileSet()
// Parse headers to compare package names and imports.
oldHeads, oldErr := lockedSnapshot.view.parseCache.parseFiles(ctx, fset, source.ParseHeader, oldFH)
newHeads, newErr := lockedSnapshot.view.parseCache.parseFiles(ctx, fset, source.ParseHeader, newFH)
oldHeads, oldErr := lockedSnapshot.view.parseCache.parseFiles(ctx, fset, source.ParseHeader, false, oldFH)
newHeads, newErr := lockedSnapshot.view.parseCache.parseFiles(ctx, fset, source.ParseHeader, false, newFH)

if oldErr != nil || newErr != nil {
// TODO(rfindley): we can get here if newFH does not exist. There is
Expand Down Expand Up @@ -2484,8 +2484,8 @@ func metadataChanges(ctx context.Context, lockedSnapshot *snapshot, oldFH, newFH
// Note: if this affects performance we can probably avoid parsing in the
// common case by first scanning the source for potential comments.
if !invalidate {
origFulls, oldErr := lockedSnapshot.view.parseCache.parseFiles(ctx, fset, source.ParseFull, oldFH)
newFulls, newErr := lockedSnapshot.view.parseCache.parseFiles(ctx, fset, source.ParseFull, newFH)
origFulls, oldErr := lockedSnapshot.view.parseCache.parseFiles(ctx, fset, source.ParseFull, false, oldFH)
newFulls, newErr := lockedSnapshot.view.parseCache.parseFiles(ctx, fset, source.ParseFull, false, newFH)
if oldErr == nil && newErr == nil {
invalidate = magicCommentsChanged(origFulls[0].File, newFulls[0].File)
} else {
Expand Down Expand Up @@ -2570,7 +2570,7 @@ func (s *snapshot) BuiltinFile(ctx context.Context) (*source.ParsedGoFile, error
// For the builtin file only, we need syntactic object resolution
// (since we can't type check).
mode := source.ParseFull &^ source.SkipObjectResolution
pgfs, err := s.view.parseCache.parseFiles(ctx, token.NewFileSet(), mode, fh)
pgfs, err := s.view.parseCache.parseFiles(ctx, token.NewFileSet(), mode, false, fh)
if err != nil {
return nil, err
}
Expand Down
4 changes: 1 addition & 3 deletions gopls/internal/lsp/cache/symbols.go
Original file line number Diff line number Diff line change
Expand Up @@ -60,10 +60,8 @@ func (s *snapshot) symbolize(ctx context.Context, uri span.URI) ([]source.Symbol
}

// symbolizeImpl reads and parses a file and extracts symbols from it.
// It may use a parsed file already present in the cache but
// otherwise does not populate the cache.
func symbolizeImpl(ctx context.Context, snapshot *snapshot, fh source.FileHandle) ([]source.Symbol, error) {
pgfs, err := snapshot.view.parseCache.parseFiles(ctx, token.NewFileSet(), source.ParseFull, fh)
pgfs, err := snapshot.view.parseCache.parseFiles(ctx, token.NewFileSet(), source.ParseFull, false, fh)
if err != nil {
return nil, err
}
Expand Down
2 changes: 1 addition & 1 deletion gopls/internal/lsp/source/typerefs/pkgrefs_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -295,7 +295,7 @@ func (p *memoizedParser) parse(ctx context.Context, uri span.URI) (*ParsedGoFile
return nil, err
}
content = astutil.PurgeFuncBodies(content)
pgf, _ := cache.ParseGoSrc(ctx, token.NewFileSet(), uri, content, source.ParseFull)
pgf, _ := cache.ParseGoSrc(ctx, token.NewFileSet(), uri, content, source.ParseFull, false)
return pgf, nil
}

Expand Down
2 changes: 1 addition & 1 deletion gopls/internal/lsp/source/typerefs/refs_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -516,7 +516,7 @@ type Z map[ext.A]ext.B
var pgfs []*source.ParsedGoFile
for i, src := range test.srcs {
uri := span.URI(fmt.Sprintf("file:///%d.go", i))
pgf, _ := cache.ParseGoSrc(ctx, token.NewFileSet(), uri, []byte(src), source.ParseFull)
pgf, _ := cache.ParseGoSrc(ctx, token.NewFileSet(), uri, []byte(src), source.ParseFull, false)
if !test.allowErrs && pgf.ParseErr != nil {
t.Fatalf("ParseGoSrc(...) returned parse errors: %v", pgf.ParseErr)
}
Expand Down
2 changes: 2 additions & 0 deletions gopls/internal/lsp/source/types_format.go
Original file line number Diff line number Diff line change
Expand Up @@ -289,6 +289,8 @@ func FormatVarType(ctx context.Context, snapshot Snapshot, srcpkg Package, obj *
return types.TypeString(obj.Type(), qf), nil
}

// TODO(rfindley): parsing to produce candidates can be costly; consider
// using faster methods.
targetpgf, pos, err := parseFull(ctx, snapshot, srcpkg.FileSet(), obj.Pos())
if err != nil {
return "", err // e.g. ctx cancelled
Expand Down

0 comments on commit 4f74786

Please sign in to comment.