Peter Weinberger has uploaded this change for review.
internal/lsp/semantic: Avoid doing semantic tokens for large files
LSP (and gopls) support both full-file semantic token requests and
requests for just a range, typically roughly what's visible to the user.
It can be slow to produce the full set for a very large file, so
this code now responds with an error if the file is bigger than
100,000 bytes. After getting this error, vscode, at least,
will stop asking for full requests and use range requests.
Alternatively, server capabilities could say gopls never responds to
full-file requests, but doing that doesn't stop vscode from asking for
them. Another possibility would be to fix a time limit (like 8ms) for
how long to spend generating full-file semantic tokens. That's tricky
to get right, but one could instead generate an error when there
are more than 4,000 semantic tokens (on my laptop, that's about 8ms.)
Large files are unusual; a simple size limit seems adequate for now.
Change-Id: Ieea0d16aad6e37cc4f14b1a6a7116a4e41197aae
---
M internal/lsp/cmd/cmd.go
M internal/lsp/cmd/semantictokens.go
M internal/lsp/semantic.go
3 files changed, 23 insertions(+), 11 deletions(-)
diff --git a/internal/lsp/cmd/cmd.go b/internal/lsp/cmd/cmd.go
index 41c2bce..fdaf6d1 100644
--- a/internal/lsp/cmd/cmd.go
+++ b/internal/lsp/cmd/cmd.go
@@ -504,13 +504,9 @@
return file
}
-func (c *connection) semanticTokens(ctx context.Context, file span.URI) (*protocol.SemanticTokens, error) {
- p := &protocol.SemanticTokensParams{
- TextDocument: protocol.TextDocumentIdentifier{
- URI: protocol.URIFromSpanURI(file),
- },
- }
- resp, err := c.Server.SemanticTokensFull(ctx, p)
+func (c *connection) semanticTokens(ctx context.Context, p *protocol.SemanticTokensRangeParams) (*protocol.SemanticTokens, error) {
+ // use range to avoid limits on full
+ resp, err := c.Server.SemanticTokensRange(ctx, p)
if err != nil {
return nil, err
}
diff --git a/internal/lsp/cmd/semantictokens.go b/internal/lsp/cmd/semantictokens.go
index 41e353c..58ae5ab 100644
--- a/internal/lsp/cmd/semantictokens.go
+++ b/internal/lsp/cmd/semantictokens.go
@@ -94,13 +94,24 @@
return file.err
}
- resp, err := conn.semanticTokens(ctx, uri)
+ buf, err := os.ReadFile(args[0])
if err != nil {
return err
}
- buf, err := ioutil.ReadFile(args[0])
+ lines := bytes.Split(buf, []byte{'\n'})
+ p := &protocol.SemanticTokensRangeParams{
+ TextDocument: protocol.TextDocumentIdentifier{
+ URI: protocol.URIFromSpanURI(uri),
+ },
+ Range: protocol.Range{Start: protocol.Position{Line: 0, Character: 0},
+ End: protocol.Position{
+ Line: uint32(len(lines) - 1),
+ Character: uint32(len(lines[len(lines)-1]))},
+ },
+ }
+ resp, err := conn.semanticTokens(ctx, p)
if err != nil {
- log.Fatal(err)
+ return err
}
fset := token.NewFileSet()
f, err := parser.ParseFile(fset, args[0], buf, 0)
diff --git a/internal/lsp/semantic.go b/internal/lsp/semantic.go
index fbca581..2faa863 100644
--- a/internal/lsp/semantic.go
+++ b/internal/lsp/semantic.go
@@ -22,6 +22,8 @@
errors "golang.org/x/xerrors"
)
+const tooLargeForFull int = 100000 // reject full semantic token requests for large files
+
func (s *Server) semanticTokensFull(ctx context.Context, p *protocol.SemanticTokensParams) (*protocol.SemanticTokens, error) {
ret, err := s.computeSemanticTokens(ctx, p.TextDocument, nil)
return ret, err
@@ -68,6 +70,9 @@
if pgf.ParseErr != nil {
return nil, pgf.ParseErr
}
+ if rng == nil && len(pgf.Src) > tooLargeForFull {
+ return nil, errors.Errorf("too large (%d>%d) for full", len(pgf.Src), tooLargeForFull)
+ }
e := &encoded{
ctx: ctx,
pgf: pgf,
@@ -491,7 +496,7 @@
}
span, err := e.pgf.Mapper.RangeSpan(*e.rng)
if err != nil {
- return errors.Errorf("range span error for %s", e.pgf.File.Name)
+ return errors.Errorf("range span (%v) error for %s", err, e.pgf.File.Name)
}
e.end = e.start + token.Pos(span.End().Offset())
e.start += token.Pos(span.Start().Offset())
To view, visit change 307729. To unsubscribe, or for help writing mail filters, visit settings.
Attention is currently required from: Peter Weinberger.
Kokoro presubmit build finished with status: FAILURE
Logs at: https://source.cloud.google.com/results/invocations/383b21dd-4476-46ca-86c8-08542ff83972
Patch set 1:gopls-CI -1
Attention is currently required from: Peter Weinberger.
Peter Weinberger uploaded patch set #2 to this change.
internal/lsp/semantic: Avoid doing semantic tokens for large files
LSP (and gopls) support both full-file semantic token requests and
requests for just a range, typically roughly what's visible to the user.
It can be slow to produce the full set for a very large file, so
this code now responds with an error if the file is bigger than
100,000 bytes. After getting this error, vscode, at least,
will stop asking for full requests and use range requests.
Alternatively, server capabilities could say gopls never responds to
full-file requests, but doing that doesn't stop vscode from asking for
them. Another possibility would be to fix a time limit (like 8ms) for
how long to spend generating full-file semantic tokens. That's tricky
to get right, but one could instead generate an error when there
are more than 4,000 semantic tokens (on my laptop, that's about 8ms.)
Large files are unusual; a simple size limit seems adequate for now.
Change-Id: Ieea0d16aad6e37cc4f14b1a6a7116a4e41197aae
---
M internal/lsp/cmd/cmd.go
M internal/lsp/cmd/semantictokens.go
M internal/lsp/semantic.go
3 files changed, 23 insertions(+), 11 deletions(-)
To view, visit change 307729. To unsubscribe, or for help writing mail filters, visit settings.
Attention is currently required from: Peter Weinberger.
Kokoro presubmit build finished with status: SUCCESS
Logs at: https://source.cloud.google.com/results/invocations/f228350d-ff54-4865-85a3-3f8035092137
Patch set 2:gopls-CI +1
Patch set 2:Trust +1
Attention is currently required from: Peter Weinberger.
4 comments:
Commit Message:
Patch Set #2, Line 7: Avoid
nit: lowercase
File internal/lsp/cmd/cmd.go:
// use range to avoid limits on full
resp, err := c.Server.SemanticTokensRange(ctx,
do we have to make this change if we just document the limits on the command-line? This seems to complicate things, and I can't imagine that many folks are using gopls semantic tokens on the command-line
File internal/lsp/semantic.go:
Patch Set #2, Line 25: tooLargeForFull
maxFullFileSize?
maybe fmt.Sprintf("semantic tokens: file %s too large for full (%d>%d)", td.URI.Filename(), len(pgf.Src), tooLargeForFull)
To view, visit change 307729. To unsubscribe, or for help writing mail filters, visit settings.
Attention is currently required from: Peter Weinberger.
Peter Weinberger uploaded patch set #3 to this change.
internal/lsp/semantic: avoid doing semantic tokens for large files
LSP (and gopls) support both full-file semantic token requests and
requests for just a range, typically roughly what's visible to the user.
It can be slow to produce the full set for a very large file, so
this code now responds with an error if the file is bigger than
100,000 bytes. After getting this error, vscode, at least,
will stop asking for full requests and use range requests.
Alternatively, server capabilities could say gopls never responds to
full-file requests, but doing that doesn't stop vscode from asking for
them. Another possibility would be to fix a time limit (like 8ms) for
how long to spend generating full-file semantic tokens. That's tricky
to get right, but one could instead generate an error when there
are more than 4,000 semantic tokens (on my laptop, that's about 8ms.)
Large files are unusual; a simple size limit seems adequate for now.
Change-Id: Ieea0d16aad6e37cc4f14b1a6a7116a4e41197aae
---
M internal/lsp/cmd/cmd.go
M internal/lsp/cmd/semantictokens.go
M internal/lsp/semantic.go
3 files changed, 25 insertions(+), 11 deletions(-)
To view, visit change 307729. To unsubscribe, or for help writing mail filters, visit settings.
Attention is currently required from: Peter Weinberger.
Kokoro presubmit build finished with status: SUCCESS
Logs at: https://source.cloud.google.com/results/invocations/22be8096-106c-4b63-b33c-96439f5ca713
Patch set 3:gopls-CI +1
Attention is currently required from: Rebecca Stambler.
Patch set 3:Trust +1
4 comments:
Commit Message:
Patch Set #2, Line 7: Avoid
nit: lowercase
Done
File internal/lsp/cmd/cmd.go:
// use range to avoid limits on full
resp, err := c.Server.SemanticTokensRange(ctx,
do we have to make this change if we just document the limits on the command-line? This seems to com […]
I use the command line tool to see what the semantic tokens are. For that reason it should mimic the server processing. (What you say is surely true, but i'd like a giant vote)
File internal/lsp/semantic.go:
Patch Set #2, Line 25: tooLargeForFull
maxFullFileSize?
Done
maybe fmt.Sprintf("semantic tokens: file %s too large for full (%d>%d)", td.URI.Filename(), len(pgf. […]
Done
To view, visit change 307729. To unsubscribe, or for help writing mail filters, visit settings.
Attention is currently required from: Peter Weinberger.
Patch set 3:Code-Review +2
Peter Weinberger submitted this change.
internal/lsp/semantic: avoid doing semantic tokens for large files
LSP (and gopls) support both full-file semantic token requests and
requests for just a range, typically roughly what's visible to the user.
It can be slow to produce the full set for a very large file, so
this code now responds with an error if the file is bigger than
100,000 bytes. After getting this error, vscode, at least,
will stop asking for full requests and use range requests.
Alternatively, server capabilities could say gopls never responds to
full-file requests, but doing that doesn't stop vscode from asking for
them. Another possibility would be to fix a time limit (like 8ms) for
how long to spend generating full-file semantic tokens. That's tricky
to get right, but one could instead generate an error when there
are more than 4,000 semantic tokens (on my laptop, that's about 8ms.)
Large files are unusual; a simple size limit seems adequate for now.
Change-Id: Ieea0d16aad6e37cc4f14b1a6a7116a4e41197aae
Reviewed-on: https://go-review.googlesource.com/c/tools/+/307729
Run-TryBot: Peter Weinberger <p...@google.com>
gopls-CI: kokoro <noreply...@google.com>
TryBot-Result: Go Bot <go...@golang.org>
Trust: Peter Weinberger <p...@google.com>
Reviewed-by: Rebecca Stambler <rsta...@golang.org>
---
M internal/lsp/cmd/cmd.go
M internal/lsp/cmd/semantictokens.go
M internal/lsp/semantic.go
3 files changed, 25 insertions(+), 11 deletions(-)
diff --git a/internal/lsp/cmd/cmd.go b/internal/lsp/cmd/cmd.go
index 41c2bce..fdaf6d1 100644
--- a/internal/lsp/cmd/cmd.go
+++ b/internal/lsp/cmd/cmd.go
@@ -504,13 +504,9 @@
return file
}
-func (c *connection) semanticTokens(ctx context.Context, file span.URI) (*protocol.SemanticTokens, error) {
- p := &protocol.SemanticTokensParams{
- TextDocument: protocol.TextDocumentIdentifier{
- URI: protocol.URIFromSpanURI(file),
- },
- }
- resp, err := c.Server.SemanticTokensFull(ctx, p)
+func (c *connection) semanticTokens(ctx context.Context, p *protocol.SemanticTokensRangeParams) (*protocol.SemanticTokens, error) {
+ // use range to avoid limits on full
+ resp, err := c.Server.SemanticTokensRange(ctx, p)
if err != nil {
return nil, err
}
diff --git a/internal/lsp/cmd/semantictokens.go b/internal/lsp/cmd/semantictokens.go
index 41e353c..e8f9018 100644
--- a/internal/lsp/cmd/semantictokens.go
+++ b/internal/lsp/cmd/semantictokens.go
@@ -94,13 +94,24 @@
return file.err
}
- resp, err := conn.semanticTokens(ctx, uri)
+ buf, err := ioutil.ReadFile(args[0])
if err != nil {
return err
}
- buf, err := ioutil.ReadFile(args[0])
+ lines := bytes.Split(buf, []byte{'\n'})
+ p := &protocol.SemanticTokensRangeParams{
+ TextDocument: protocol.TextDocumentIdentifier{
+ URI: protocol.URIFromSpanURI(uri),
+ },
+ Range: protocol.Range{Start: protocol.Position{Line: 0, Character: 0},
+ End: protocol.Position{
+ Line: uint32(len(lines) - 1),
+ Character: uint32(len(lines[len(lines)-1]))},
+ },
+ }
+ resp, err := conn.semanticTokens(ctx, p)
if err != nil {
- log.Fatal(err)
+ return err
}
fset := token.NewFileSet()
f, err := parser.ParseFile(fset, args[0], buf, 0)
diff --git a/internal/lsp/semantic.go b/internal/lsp/semantic.go
index fbca581..8230a7c 100644
--- a/internal/lsp/semantic.go
+++ b/internal/lsp/semantic.go
@@ -22,6 +22,8 @@
errors "golang.org/x/xerrors"
)
+const maxFullFileSize int = 100000 // reject full semantic token requests for large files
+
func (s *Server) semanticTokensFull(ctx context.Context, p *protocol.SemanticTokensParams) (*protocol.SemanticTokens, error) {
ret, err := s.computeSemanticTokens(ctx, p.TextDocument, nil)
return ret, err
@@ -68,6 +70,11 @@
if pgf.ParseErr != nil {
return nil, pgf.ParseErr
}
+ if rng == nil && len(pgf.Src) > maxFullFileSize {
+ err := fmt.Errorf("semantic tokens: file %s too large for full (%d>%d)",
+ td.URI.SpanURI().Filename(), len(pgf.Src), maxFullFileSize)
+ return nil, err
+ }
e := &encoded{
ctx: ctx,
pgf: pgf,
@@ -491,7 +498,7 @@
}
span, err := e.pgf.Mapper.RangeSpan(*e.rng)
if err != nil {
- return errors.Errorf("range span error for %s", e.pgf.File.Name)
+ return errors.Errorf("range span (%v) error for %s", err, e.pgf.File.Name)
}
e.end = e.start + token.Pos(span.End().Offset())
e.start += token.Pos(span.Start().Offset())
To view, visit change 307729. To unsubscribe, or for help writing mail filters, visit settings.