Gopher Robot submitted this change.
11 is the latest approved patch-set.
The change was submitted with unreviewed changes in the following files:
```
The name of the file: gopls/internal/lsp/source/inline_all.go
Insertions: 5, Deletions: 1.
@@ -128,7 +128,11 @@
call, _ = path[1].(*ast.CallExpr)
}
if name == nil || call == nil {
- // TODO(rfindley): handle this case with literalization.
+ // TODO(rfindley): handle this case with eta-abstraction:
+ // a reference to the target function f in a non-call position
+ // use(f)
+ // is replaced by
+ // use(func(...) { f(...) })
return nil, fmt.Errorf("cannot inline: found non-call function reference %v", ref)
}
// Sanity check.
```
```
The name of the file: gopls/internal/lsp/source/change_signature.go
Insertions: 8, Deletions: 9.
@@ -28,16 +28,16 @@
"golang.org/x/tools/internal/typesinternal"
)
-// RemoveParameter computes a refactoring to remove the parameter indicated by
-// the given range, which must be contained within an unused parameter name or
-// field.
+// RemoveUnusedParameter computes a refactoring to remove the parameter
+// indicated by the given range, which must be contained within an unused
+// parameter name or field.
//
-// This operation is a work in progress, and has known bugs. Remaining TODO:
+// This operation is a work in progress. Remaining TODO:
// - Handle function assignment correctly.
// - Improve the extra newlines in output.
// - Stream type checking via ForEachPackage.
// - Avoid unnecessary additional type checking.
-func RemoveParameter(ctx context.Context, fh FileHandle, rng protocol.Range, snapshot Snapshot) ([]protocol.DocumentChanges, error) {
+func RemoveUnusedParameter(ctx context.Context, fh FileHandle, rng protocol.Range, snapshot Snapshot) ([]protocol.DocumentChanges, error) {
pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI())
if err != nil {
return nil, err
@@ -111,9 +111,7 @@
}
args = append(args, &ast.Ident{Name: n.Name})
if i == len(params.List)-1 {
- if _, isEllipsis := fld.Type.(*ast.Ellipsis); isEllipsis {
- variadic = true
- }
+ _, variadic = fld.Type.(*ast.Ellipsis)
}
}
}
@@ -253,6 +251,7 @@
info := ParamInfo{FieldIndex: -1, NameIndex: -1}
start, end, err := pgf.RangePos(rng)
if err != nil {
+ bug.Reportf("(file=%v).RangePos(%v) failed: %v", pgf.URI, rng, err)
return info
}
@@ -424,7 +423,7 @@
return nil, fmt.Errorf("analyzing callee: %v", err)
}
- post := func(got []byte) []byte { return bytes.Replace(got, []byte(tag), nil, -1) }
+ post := func(got []byte) []byte { return bytes.ReplaceAll(got, []byte(tag), nil) }
return inlineAllCalls(ctx, logf, rw.snapshot, rw.pkg, rw.pgf, rw.origDecl, calleeInfo, post)
}
```
```
The name of the file: gopls/internal/lsp/command.go
Insertions: 2, Deletions: 2.
@@ -1215,8 +1215,8 @@
return c.run(ctx, commandConfig{
forURI: args.RemoveParameter.URI,
}, func(ctx context.Context, deps commandDeps) error {
- // For now, gopls only supports removing parameters.
- changes, err := source.RemoveParameter(ctx, deps.fh, args.RemoveParameter.Range, deps.snapshot)
+ // For now, gopls only supports removing unused parameters.
+ changes, err := source.RemoveUnusedParameter(ctx, deps.fh, args.RemoveParameter.Range, deps.snapshot)
if err != nil {
return err
}
```
```
The name of the file: gopls/internal/lsp/code_action.go
Insertions: 2, Deletions: 2.
@@ -444,7 +444,7 @@
return nil, err
}
actions = append(actions, protocol.CodeAction{
- Title: "Refactor: remove unused parameter (experimental)",
+ Title: "Refactor: remove unused parameter",
Kind: protocol.RefactorRewrite,
Command: &cmd,
})
@@ -572,7 +572,7 @@
// If range is within call expression, offer inline action.
if _, fn, err := source.EnclosingStaticCall(pkg, pgf, rng); err == nil {
- cmd, err := command.NewApplyFixCommand(fmt.Sprintf("Inline call to %s (experimental)", fn.Name()), command.ApplyFixArgs{
+ cmd, err := command.NewApplyFixCommand(fmt.Sprintf("Inline call to %s", fn.Name()), command.ApplyFixArgs{
URI: protocol.URIFromSpanURI(pgf.URI),
Fix: source.InlineCall,
Range: rng,
```
gopls/internal/lsp: add code actions to remove unused parameters
Use the new inlining technology to implement a first change-signature
refactoring, by rewriting the declaration to be a trivial wrapper around
a declaration with modified signature, inlining the wrapper, and then
performing string substitution to replace references to the synthetic
delegate.
This demonstrates the power of inlining, which can be seen as a more
general tool for rewriting source code: express old code as new code,
recognize instances of old code (in this case, calls), and inline.
However, this CL was still rather difficult, primarily because of (1)
the problem of manipulating syntax without breaking formatting and
comments, and (2) the problem of composing multiple refactoring
operations, which in general requires iterative type checking.
Neither of those difficulties have general solutions: any form of
nontrivial AST manipulation tends to result in an unacceptable movement
of comments, and iterative type checking is difficult because it may
involve an arbitrarily modified package graph, and because it is
difficult to correlate references in the previous version of the package
with references in the new version of the package.
But in the case of removing a parameter, these problems are solvable: We
can avoid most AST mangling by restricting the scope of rewriting to the
function signature. We can type check the new package using the imports
of the old package. We can find the next reference in the new package by
counting.
Fixes golang/go#63534
Change-Id: Iba5fa6b0da503b7723bea1b43fd2c4151576a672
Reviewed-on: https://go-review.googlesource.com/c/tools/+/532495
Reviewed-by: Alan Donovan <adon...@google.com>
LUCI-TryBot-Result: Go LUCI <golang...@luci-project-accounts.iam.gserviceaccount.com>
Auto-Submit: Robert Findley <rfin...@google.com>
---
M gopls/doc/commands.md
A gopls/internal/lsp/analysis/unusedparams/cmd/main.go
M gopls/internal/lsp/analysis/unusedparams/unusedparams.go
M gopls/internal/lsp/cache/check.go
M gopls/internal/lsp/code_action.go
M gopls/internal/lsp/command.go
M gopls/internal/lsp/command/command_gen.go
M gopls/internal/lsp/command/interface.go
M gopls/internal/lsp/regtest/marker.go
M gopls/internal/lsp/source/api_json.go
A gopls/internal/lsp/source/change_signature.go
M gopls/internal/lsp/source/inline.go
A gopls/internal/lsp/source/inline_all.go
M gopls/internal/lsp/source/util.go
M gopls/internal/lsp/source/view.go
A gopls/internal/regtest/marker/testdata/codeaction/removeparam.txt
A gopls/internal/regtest/marker/testdata/codeaction/removeparam_formatting.txt
A gopls/internal/regtest/marker/testdata/codeaction/removeparam_funcvalue.txt
A gopls/internal/regtest/marker/testdata/codeaction/removeparam_imports.txt
A internal/astutil/clone.go
M internal/refactor/inline/inline.go
21 files changed, 1,655 insertions(+), 86 deletions(-)
diff --git a/gopls/doc/commands.md b/gopls/doc/commands.md
index 833dad9..3404c91 100644
--- a/gopls/doc/commands.md
+++ b/gopls/doc/commands.md
@@ -84,6 +84,26 @@
}
```
+### **performs a "change signature" refactoring.**
+Identifier: `gopls.change_signature`
+
+This command is experimental, currently only supporting parameter removal.
+Its signature will certainly change in the future (pun intended).
+
+Args:
+
+```
+{
+ "RemoveParameter": {
+ "uri": string,
+ "range": {
+ "start": { ... },
+ "end": { ... },
+ },
+ },
+}
+```
+
### **Check for upgrades**
Identifier: `gopls.check_upgrades`
diff --git a/gopls/internal/lsp/analysis/unusedparams/cmd/main.go b/gopls/internal/lsp/analysis/unusedparams/cmd/main.go
new file mode 100644
index 0000000..fafb126
--- /dev/null
+++ b/gopls/internal/lsp/analysis/unusedparams/cmd/main.go
@@ -0,0 +1,13 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// The stringintconv command runs the stringintconv analyzer.
+package main
+
+import (
+ "golang.org/x/tools/go/analysis/singlechecker"
+ "golang.org/x/tools/gopls/internal/lsp/analysis/unusedparams"
+)
+
+func main() { singlechecker.Main(unusedparams.Analyzer) }
diff --git a/gopls/internal/lsp/analysis/unusedparams/unusedparams.go b/gopls/internal/lsp/analysis/unusedparams/unusedparams.go
index e0ef5ef..64702b2 100644
--- a/gopls/internal/lsp/analysis/unusedparams/unusedparams.go
+++ b/gopls/internal/lsp/analysis/unusedparams/unusedparams.go
@@ -28,11 +28,20 @@
- functions in test files
- functions with empty bodies or those with just a return stmt`
-var Analyzer = &analysis.Analyzer{
- Name: "unusedparams",
- Doc: Doc,
- Requires: []*analysis.Analyzer{inspect.Analyzer},
- Run: run,
+var (
+ Analyzer = &analysis.Analyzer{
+ Name: "unusedparams",
+ Doc: Doc,
+ Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Run: run,
+ }
+ inspectLits bool
+ inspectWrappers bool
+)
+
+func init() {
+ Analyzer.Flags.BoolVar(&inspectLits, "lits", true, "inspect function literals")
+ Analyzer.Flags.BoolVar(&inspectWrappers, "wrappers", false, "inspect functions whose body consists of a single return statement")
}
type paramData struct {
@@ -45,7 +54,9 @@
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
nodeFilter := []ast.Node{
(*ast.FuncDecl)(nil),
- (*ast.FuncLit)(nil),
+ }
+ if inspectLits {
+ nodeFilter = append(nodeFilter, (*ast.FuncLit)(nil))
}
inspect.Preorder(nodeFilter, func(n ast.Node) {
@@ -62,6 +73,7 @@
if f.Recv != nil {
return
}
+
// Ignore functions in _test.go files to reduce false positives.
if file := pass.Fset.File(n.Pos()); file != nil && strings.HasSuffix(file.Name(), "_test.go") {
return
@@ -76,8 +88,10 @@
switch expr := body.List[0].(type) {
case *ast.ReturnStmt:
- // Ignore functions that only contain a return statement to reduce false positives.
- return
+ if !inspectWrappers {
+ // Ignore functions that only contain a return statement to reduce false positives.
+ return
+ }
case *ast.ExprStmt:
callExpr, ok := expr.X.(*ast.CallExpr)
if !ok || len(body.List) > 1 {
diff --git a/gopls/internal/lsp/cache/check.go b/gopls/internal/lsp/cache/check.go
index e0be99b..a95c439 100644
--- a/gopls/internal/lsp/cache/check.go
+++ b/gopls/internal/lsp/cache/check.go
@@ -1487,6 +1487,7 @@
return pkg, nil
}
+// TODO(golang/go#63472): this looks wrong with the new Go version syntax.
var goVersionRx = regexp.MustCompile(`^go([1-9][0-9]*)\.(0|[1-9][0-9]*)$`)
func doTypeCheck(ctx context.Context, b *typeCheckBatch, inputs typeCheckInputs) (*syntaxPackage, error) {
diff --git a/gopls/internal/lsp/code_action.go b/gopls/internal/lsp/code_action.go
index 555131e..d756748 100644
--- a/gopls/internal/lsp/code_action.go
+++ b/gopls/internal/lsp/code_action.go
@@ -430,6 +430,26 @@
rerr = bug.Errorf("refactor.rewrite code actions panicked: %v", r)
}
}()
+
+ var actions []protocol.CodeAction
+
+ if canRemoveParameter(pkg, pgf, rng) {
+ cmd, err := command.NewChangeSignatureCommand("remove unused parameter", command.ChangeSignatureArgs{
+ RemoveParameter: protocol.Location{
+ URI: protocol.URIFromSpanURI(pgf.URI),
+ Range: rng,
+ },
+ })
+ if err != nil {
+ return nil, err
+ }
+ actions = append(actions, protocol.CodeAction{
+ Title: "Refactor: remove unused parameter",
+ Kind: protocol.RefactorRewrite,
+ Command: &cmd,
+ })
+ }
+
start, end, err := pgf.RangePos(rng)
if err != nil {
return nil, err
@@ -471,7 +491,6 @@
}
}
- var actions []protocol.CodeAction
for i := range commands {
actions = append(actions, protocol.CodeAction{
Title: commands[i].Title,
@@ -510,6 +529,43 @@
return actions, nil
}
+// canRemoveParameter reports whether we can remove the function parameter
+// indicated by the given [start, end) range.
+//
+// This is true if:
+// - [start, end) is contained within an unused field or parameter name
+// - ... of a non-method function declaration.
+func canRemoveParameter(pkg source.Package, pgf *source.ParsedGoFile, rng protocol.Range) bool {
+ info := source.FindParam(pgf, rng)
+ if info.Decl == nil || info.Field == nil {
+ return false
+ }
+
+ if len(info.Field.Names) == 0 {
+ return true // no names => field is unused
+ }
+ if info.Name == nil {
+ return false // no name is indicated
+ }
+ if info.Name.Name == "_" {
+ return true // trivially unused
+ }
+
+ obj := pkg.GetTypesInfo().Defs[info.Name]
+ if obj == nil {
+ return false // something went wrong
+ }
+
+ used := false
+ ast.Inspect(info.Decl.Body, func(node ast.Node) bool {
+ if n, ok := node.(*ast.Ident); ok && pkg.GetTypesInfo().Uses[n] == obj {
+ used = true
+ }
+ return !used // keep going until we find a use
+ })
+ return !used
+}
+
// refactorInline returns inline actions available at the specified range.
func refactorInline(ctx context.Context, snapshot source.Snapshot, pkg source.Package, pgf *source.ParsedGoFile, fh source.FileHandle, rng protocol.Range) ([]protocol.CodeAction, error) {
var commands []protocol.Command
diff --git a/gopls/internal/lsp/command.go b/gopls/internal/lsp/command.go
index f4d4a9e..8fe1ec8 100644
--- a/gopls/internal/lsp/command.go
+++ b/gopls/internal/lsp/command.go
@@ -1210,3 +1210,25 @@
event.Log(ctx, fmt.Sprintf("client declined to open document %v", url))
}
}
+
+func (c *commandHandler) ChangeSignature(ctx context.Context, args command.ChangeSignatureArgs) error {
+ return c.run(ctx, commandConfig{
+ forURI: args.RemoveParameter.URI,
+ }, func(ctx context.Context, deps commandDeps) error {
+ // For now, gopls only supports removing unused parameters.
+ changes, err := source.RemoveUnusedParameter(ctx, deps.fh, args.RemoveParameter.Range, deps.snapshot)
+ if err != nil {
+ return err
+ }
+ r, err := c.s.client.ApplyEdit(ctx, &protocol.ApplyWorkspaceEditParams{
+ Edit: protocol.WorkspaceEdit{
+ DocumentChanges: changes,
+ },
+ })
+ if !r.Applied {
+ return fmt.Errorf("failed to apply edits: %v", r.FailureReason)
+ }
+
+ return nil
+ })
+}
diff --git a/gopls/internal/lsp/command/command_gen.go b/gopls/internal/lsp/command/command_gen.go
index 5dd2a9d..e54030c 100644
--- a/gopls/internal/lsp/command/command_gen.go
+++ b/gopls/internal/lsp/command/command_gen.go
@@ -23,6 +23,7 @@
AddImport Command = "add_import"
AddTelemetryCounters Command = "add_telemetry_counters"
ApplyFix Command = "apply_fix"
+ ChangeSignature Command = "change_signature"
CheckUpgrades Command = "check_upgrades"
EditGoDirective Command = "edit_go_directive"
FetchVulncheckResult Command = "fetch_vulncheck_result"
@@ -56,6 +57,7 @@
AddImport,
AddTelemetryCounters,
ApplyFix,
+ ChangeSignature,
CheckUpgrades,
EditGoDirective,
FetchVulncheckResult,
@@ -110,6 +112,12 @@
return nil, err
}
return nil, s.ApplyFix(ctx, a0)
+ case "gopls.change_signature":
+ var a0 ChangeSignatureArgs
+ if err := UnmarshalArgs(params.Arguments, &a0); err != nil {
+ return nil, err
+ }
+ return nil, s.ChangeSignature(ctx, a0)
case "gopls.check_upgrades":
var a0 CheckUpgradesArgs
if err := UnmarshalArgs(params.Arguments, &a0); err != nil {
@@ -308,6 +316,18 @@
}, nil
}
+func NewChangeSignatureCommand(title string, a0 ChangeSignatureArgs) (protocol.Command, error) {
+ args, err := MarshalArgs(a0)
+ if err != nil {
+ return protocol.Command{}, err
+ }
+ return protocol.Command{
+ Title: title,
+ Command: "gopls.change_signature",
+ Arguments: args,
+ }, nil
+}
+
func NewCheckUpgradesCommand(title string, a0 CheckUpgradesArgs) (protocol.Command, error) {
args, err := MarshalArgs(a0)
if err != nil {
diff --git a/gopls/internal/lsp/command/interface.go b/gopls/internal/lsp/command/interface.go
index c3bd921..066f16f 100644
--- a/gopls/internal/lsp/command/interface.go
+++ b/gopls/internal/lsp/command/interface.go
@@ -201,6 +201,12 @@
// the user to ask if they want to enable Go telemetry uploading. If the user
// responds 'Yes', the telemetry mode is set to "on".
MaybePromptForTelemetry(context.Context) error
+
+ // ChangeSignature: performs a "change signature" refactoring.
+ //
+ // This command is experimental, currently only supporting parameter removal.
+ // Its signature will certainly change in the future (pun intended).
+ ChangeSignature(context.Context, ChangeSignatureArgs) error
}
type RunTestsArgs struct {
@@ -519,3 +525,8 @@
Names []string // Name of counters.
Values []int64 // Values added to the corresponding counters. Must be non-negative.
}
+
+// ChangeSignatureArgs specifies a "change signature" refactoring to perform.
+type ChangeSignatureArgs struct {
+ RemoveParameter protocol.Location
+}
diff --git a/gopls/internal/lsp/regtest/marker.go b/gopls/internal/lsp/regtest/marker.go
index f6c1d4c..928f77d 100644
--- a/gopls/internal/lsp/regtest/marker.go
+++ b/gopls/internal/lsp/regtest/marker.go
@@ -1001,6 +1001,9 @@
// ...followed by any new golden files.
var newGoldenFiles []txtar.File
for filename, data := range updatedGolden {
+ // TODO(rfindley): it looks like this implicitly removes trailing newlines
+ // from golden content. Is there any way to fix that? Perhaps we should
+ // just make the diff tolerant of missing newlines?
newGoldenFiles = append(newGoldenFiles, txtar.File{Name: filename, Data: data})
}
// Sort new golden files lexically.
@@ -2047,7 +2050,7 @@
Command: action.Command.Command,
Arguments: action.Command.Arguments,
}); err != nil {
- env.T.Fatalf("error converting command %q to edits: %v", action.Command.Command, err)
+ return nil, err
}
if err := applyDocumentChanges(env, env.Awaiter.takeDocumentChanges(), fileChanges); err != nil {
diff --git a/gopls/internal/lsp/source/api_json.go b/gopls/internal/lsp/source/api_json.go
index 0fd6b07..0ad9f1d 100644
--- a/gopls/internal/lsp/source/api_json.go
+++ b/gopls/internal/lsp/source/api_json.go
@@ -734,6 +734,12 @@
ArgDoc: "{\n\t// The fix to apply.\n\t\"Fix\": string,\n\t// The file URI for the document to fix.\n\t\"URI\": string,\n\t// The document range to scan for fixes.\n\t\"Range\": {\n\t\t\"start\": {\n\t\t\t\"line\": uint32,\n\t\t\t\"character\": uint32,\n\t\t},\n\t\t\"end\": {\n\t\t\t\"line\": uint32,\n\t\t\t\"character\": uint32,\n\t\t},\n\t},\n}",
},
{
+ Command: "gopls.change_signature",
+ Title: "performs a \"change signature\" refactoring.",
+ Doc: "This command is experimental, currently only supporting parameter removal.\nIts signature will certainly change in the future (pun intended).",
+ ArgDoc: "{\n\t\"RemoveParameter\": {\n\t\t\"uri\": string,\n\t\t\"range\": {\n\t\t\t\"start\": { ... },\n\t\t\t\"end\": { ... },\n\t\t},\n\t},\n}",
+ },
+ {
Command: "gopls.check_upgrades",
Title: "Check for upgrades",
Doc: "Checks for module upgrades.",
diff --git a/gopls/internal/lsp/source/change_signature.go b/gopls/internal/lsp/source/change_signature.go
new file mode 100644
index 0000000..8dfd013
--- /dev/null
+++ b/gopls/internal/lsp/source/change_signature.go
@@ -0,0 +1,572 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "go/ast"
+ "go/format"
+ "go/parser"
+ "go/token"
+ "go/types"
+ "regexp"
+
+ "golang.org/x/tools/go/ast/astutil"
+ "golang.org/x/tools/gopls/internal/bug"
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/gopls/internal/lsp/safetoken"
+ "golang.org/x/tools/gopls/internal/span"
+ "golang.org/x/tools/imports"
+ internalastutil "golang.org/x/tools/internal/astutil"
+ "golang.org/x/tools/internal/diff"
+ "golang.org/x/tools/internal/refactor/inline"
+ "golang.org/x/tools/internal/tokeninternal"
+ "golang.org/x/tools/internal/typesinternal"
+)
+
+// RemoveUnusedParameter computes a refactoring to remove the parameter
+// indicated by the given range, which must be contained within an unused
+// parameter name or field.
+//
+// This operation is a work in progress. Remaining TODO:
+// - Handle function assignment correctly.
+// - Improve the extra newlines in output.
+// - Stream type checking via ForEachPackage.
+// - Avoid unnecessary additional type checking.
+func RemoveUnusedParameter(ctx context.Context, fh FileHandle, rng protocol.Range, snapshot Snapshot) ([]protocol.DocumentChanges, error) {
+ pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI())
+ if err != nil {
+ return nil, err
+ }
+ if perrors, terrors := pkg.GetParseErrors(), pkg.GetTypeErrors(); len(perrors) > 0 || len(terrors) > 0 {
+ var sample string
+ if len(perrors) > 0 {
+ sample = perrors[0].Error()
+ } else {
+ sample = terrors[0].Error()
+ }
+ return nil, fmt.Errorf("can't change signatures for packages with parse or type errors: (e.g. %s)", sample)
+ }
+
+ info := FindParam(pgf, rng)
+ if info.Decl == nil {
+ return nil, fmt.Errorf("failed to find declaration")
+ }
+ if info.Decl.Recv != nil {
+ return nil, fmt.Errorf("can't change signature of methods (yet)")
+ }
+ if info.Field == nil {
+ return nil, fmt.Errorf("failed to find field")
+ }
+
+ // Create the new declaration, which is a copy of the original decl with the
+ // unnecessary parameter removed.
+ newDecl := internalastutil.CloneNode(info.Decl)
+ if info.Name != nil {
+ names := remove(newDecl.Type.Params.List[info.FieldIndex].Names, info.NameIndex)
+ newDecl.Type.Params.List[info.FieldIndex].Names = names
+ }
+ if len(newDecl.Type.Params.List[info.FieldIndex].Names) == 0 {
+ // Unnamed, or final name was removed: in either case, remove the field.
+ newDecl.Type.Params.List = remove(newDecl.Type.Params.List, info.FieldIndex)
+ }
+
+ // Compute inputs into building a wrapper function around the modified
+ // signature.
+ var (
+ params = internalastutil.CloneNode(info.Decl.Type.Params) // "_" names will be modified
+ args []ast.Expr // arguments to delegate
+ variadic = false // whether the signature is variadic
+ )
+ {
+ allNames := make(map[string]bool) // for renaming blanks
+ for _, fld := range params.List {
+ for _, n := range fld.Names {
+ if n.Name != "_" {
+ allNames[n.Name] = true
+ }
+ }
+ }
+ blanks := 0
+ for i, fld := range params.List {
+ for j, n := range fld.Names {
+ if i == info.FieldIndex && j == info.NameIndex {
+ continue
+ }
+ if n.Name == "_" {
+ // Create names for blank (_) parameters so the delegating wrapper
+ // can refer to them.
+ for {
+ newName := fmt.Sprintf("blank%d", blanks)
+ blanks++
+ if !allNames[newName] {
+ n.Name = newName
+ break
+ }
+ }
+ }
+ args = append(args, &ast.Ident{Name: n.Name})
+ if i == len(params.List)-1 {
+ _, variadic = fld.Type.(*ast.Ellipsis)
+ }
+ }
+ }
+ }
+
+ // Rewrite all referring calls.
+ newContent, err := rewriteCalls(ctx, signatureRewrite{
+ snapshot: snapshot,
+ pkg: pkg,
+ pgf: pgf,
+ origDecl: info.Decl,
+ newDecl: newDecl,
+ params: params,
+ callArgs: args,
+ variadic: variadic,
+ })
+ if err != nil {
+ return nil, err
+ }
+ // Finally, rewrite the original declaration. We do this after inlining all
+ // calls, as there may be calls in the same file as the declaration. But none
+ // of the inlining should have changed the location of the original
+ // declaration.
+ {
+ idx := findDecl(pgf.File, info.Decl)
+ if idx < 0 {
+ return nil, bug.Errorf("didn't find original decl")
+ }
+
+ src, ok := newContent[pgf.URI]
+ if !ok {
+ src = pgf.Src
+ }
+ fset := tokeninternal.FileSetFor(pgf.Tok)
+ src, err = rewriteSignature(fset, idx, src, newDecl)
+ newContent[pgf.URI] = src
+ }
+
+ // Translate the resulting state into document changes.
+ var changes []protocol.DocumentChanges
+ for uri, after := range newContent {
+ fh, err := snapshot.ReadFile(ctx, uri)
+ if err != nil {
+ return nil, err
+ }
+ before, err := fh.Content()
+ if err != nil {
+ return nil, err
+ }
+ edits := diff.Bytes(before, after)
+ mapper := protocol.NewMapper(uri, before)
+ pedits, err := ToProtocolEdits(mapper, edits)
+ if err != nil {
+ return nil, fmt.Errorf("computing edits for %s: %v", uri, err)
+ }
+ changes = append(changes, protocol.DocumentChanges{
+ TextDocumentEdit: &protocol.TextDocumentEdit{
+ TextDocument: protocol.OptionalVersionedTextDocumentIdentifier{
+ Version: fh.Version(),
+ TextDocumentIdentifier: protocol.TextDocumentIdentifier{URI: protocol.URIFromSpanURI(uri)},
+ },
+ Edits: pedits,
+ },
+ })
+ }
+ return changes, nil
+}
+
+// rewriteSignature rewrites the signature of the declIdx'th declaration in src
+// to use the signature of newDecl (described by fset).
+//
+// TODO(rfindley): I think this operation could be generalized, for example by
+// using a concept of a 'nodepath' to correlate nodes between two related
+// files.
+//
+// Note that with its current application, rewriteSignature is expected to
+// succeed. Separate bug.Errorf calls are used below (rather than one call at
+// the callsite) in order to have greater precision.
+func rewriteSignature(fset *token.FileSet, declIdx int, src0 []byte, newDecl *ast.FuncDecl) ([]byte, error) {
+ // Parse the new file0 content, to locate the original params.
+ file0, err := parser.ParseFile(fset, "", src0, parser.ParseComments|parser.SkipObjectResolution)
+ if err != nil {
+ return nil, bug.Errorf("re-parsing declaring file failed: %v", err)
+ }
+ decl0, _ := file0.Decls[declIdx].(*ast.FuncDecl)
+ // Inlining shouldn't have changed the location of any declarations, but do
+ // a sanity check.
+ if decl0 == nil || decl0.Name.Name != newDecl.Name.Name {
+ return nil, bug.Errorf("inlining affected declaration order: found %v, not func %s", decl0, newDecl.Name.Name)
+ }
+ opening0, closing0, err := safetoken.Offsets(fset.File(decl0.Pos()), decl0.Type.Params.Opening, decl0.Type.Params.Closing)
+ if err != nil {
+ return nil, bug.Errorf("can't find params: %v", err)
+ }
+
+ // Format the modified signature and apply a textual replacement. This
+ // minimizes comment disruption.
+ formattedType := FormatNode(fset, newDecl.Type)
+ expr, err := parser.ParseExprFrom(fset, "", []byte(formattedType), 0)
+ if err != nil {
+ return nil, bug.Errorf("parsing modified signature: %v", err)
+ }
+ newType := expr.(*ast.FuncType)
+ opening1, closing1, err := safetoken.Offsets(fset.File(newType.Pos()), newType.Params.Opening, newType.Params.Closing)
+ if err != nil {
+ return nil, bug.Errorf("param offsets: %v", err)
+ }
+ newParams := formattedType[opening1 : closing1+1]
+
+ // Splice.
+ var buf bytes.Buffer
+ buf.Write(src0[:opening0])
+ buf.WriteString(newParams)
+ buf.Write(src0[closing0+1:])
+ newSrc := buf.Bytes()
+ if len(file0.Imports) > 0 {
+ formatted, err := imports.Process("output", newSrc, nil)
+ if err != nil {
+ return nil, bug.Errorf("imports.Process failed: %v", err)
+ }
+ newSrc = formatted
+ }
+ return newSrc, nil
+}
+
+// ParamInfo records information about a param identified by a position.
+type ParamInfo struct {
+ Decl *ast.FuncDecl // enclosing func decl, or nil
+ FieldIndex int // index of Field in Decl.Type.Params, or -1
+ Field *ast.Field // enclosing field of Decl, or nil
+ NameIndex int // index of Name in Field.Names, or nil
+ Name *ast.Ident // indicated name (either enclosing, or Field.Names[0] if len(Field.Names) == 1)
+}
+
+// FindParam finds the parameter information spanned by the given range.
+func FindParam(pgf *ParsedGoFile, rng protocol.Range) ParamInfo {
+ info := ParamInfo{FieldIndex: -1, NameIndex: -1}
+ start, end, err := pgf.RangePos(rng)
+ if err != nil {
+ bug.Reportf("(file=%v).RangePos(%v) failed: %v", pgf.URI, rng, err)
+ return info
+ }
+
+ path, _ := astutil.PathEnclosingInterval(pgf.File, start, end)
+ var (
+ id *ast.Ident
+ field *ast.Field
+ decl *ast.FuncDecl
+ )
+ // Find the outermost enclosing node of each kind, whether or not they match
+ // the semantics described in the docstring.
+ for _, n := range path {
+ switch n := n.(type) {
+ case *ast.Ident:
+ id = n
+ case *ast.Field:
+ field = n
+ case *ast.FuncDecl:
+ decl = n
+ }
+ }
+ // Check the conditions described in the docstring.
+ if decl == nil {
+ return info
+ }
+ info.Decl = decl
+ for fi, f := range decl.Type.Params.List {
+ if f == field {
+ info.FieldIndex = fi
+ info.Field = f
+ for ni, n := range f.Names {
+ if n == id {
+ info.NameIndex = ni
+ info.Name = n
+ break
+ }
+ }
+ if info.Name == nil && len(info.Field.Names) == 1 {
+ info.NameIndex = 0
+ info.Name = info.Field.Names[0]
+ }
+ break
+ }
+ }
+ return info
+}
+
+// signatureRewrite defines a rewritten function signature.
+//
+// See rewriteCalls for more details.
+type signatureRewrite struct {
+ snapshot Snapshot
+ pkg Package
+ pgf *ParsedGoFile
+ origDecl, newDecl *ast.FuncDecl
+ params *ast.FieldList
+ callArgs []ast.Expr
+ variadic bool
+}
+
+// rewriteCalls returns the document changes required to rewrite the
+// signature of origDecl to that of newDecl.
+//
+// This is a rather complicated factoring of the rewrite operation, but is able
+// to describe arbitrary rewrites. Specifically, rewriteCalls creates a
+// synthetic copy of pkg, where the original function declaration is changed to
+// be a trivial wrapper around the new declaration. params and callArgs are
+// used to perform this delegation: params must have the same type as origDecl,
+// but may have renamed parameters (such as is required for delegating blank
+// parameters). callArgs are the arguments of the delegated call (i.e. using
+// params).
+//
+// For example, consider removing the unused 'b' parameter below, rewriting
+//
+// func Foo(a, b, c, _ int) int {
+// return a+c
+// }
+//
+// To
+//
+// func Foo(a, c, _ int) int {
+// return a+c
+// }
+//
+// In this case, rewriteCalls is parameterized as follows:
+// - origDecl is the original declaration
+// - newDecl is the new declaration, which is a copy of origDecl less the 'b'
+// parameter.
+// - params is a new parameter list (a, b, c, blank0 int) to be used for the
+// new wrapper.
+// - callArgs is the argument list (a, c, blank0), to be used to call the new
+// delegate.
+//
+// rewriting is expressed this way so that rewriteCalls can own the details
+// of *how* this rewriting is performed. For example, as of writing it names
+// the synthetic delegate G_o_p_l_s_foo, but the caller need not know this.
+//
+// By passing an entirely new declaration, rewriteCalls may be used for
+// signature refactorings that may affect the function body, such as removing
+// or adding return values.
+func rewriteCalls(ctx context.Context, rw signatureRewrite) (map[span.URI][]byte, error) {
+ // tag is a unique prefix that is added to the delegated declaration.
+ //
+ // It must have a ~0% probability of causing collisions with existing names.
+ const tag = "G_o_p_l_s_"
+
+ var (
+ modifiedSrc []byte
+ modifiedFile *ast.File
+ modifiedDecl *ast.FuncDecl
+ )
+ {
+ delegate := internalastutil.CloneNode(rw.newDecl) // clone before modifying
+ delegate.Name.Name = tag + delegate.Name.Name
+ if obj := rw.pkg.GetTypes().Scope().Lookup(delegate.Name.Name); obj != nil {
+ return nil, fmt.Errorf("synthetic name %q conflicts with an existing declaration", delegate.Name.Name)
+ }
+
+ wrapper := internalastutil.CloneNode(rw.origDecl)
+ wrapper.Type.Params = rw.params
+ call := &ast.CallExpr{
+ Fun: &ast.Ident{Name: delegate.Name.Name},
+ Args: rw.callArgs,
+ }
+ if rw.variadic {
+ call.Ellipsis = 1 // must not be token.NoPos
+ }
+
+ var stmt ast.Stmt
+ if delegate.Type.Results.NumFields() > 0 {
+ stmt = &ast.ReturnStmt{
+ Results: []ast.Expr{call},
+ }
+ } else {
+ stmt = &ast.ExprStmt{
+ X: call,
+ }
+ }
+ wrapper.Body = &ast.BlockStmt{
+ List: []ast.Stmt{stmt},
+ }
+
+ fset := tokeninternal.FileSetFor(rw.pgf.Tok)
+ var err error
+ modifiedSrc, err = replaceFileDecl(rw.pgf, rw.origDecl, delegate)
+ if err != nil {
+ return nil, err
+ }
+ // TODO(rfindley): we can probably get away with one fewer parse operations
+ // by returning the modified AST from replaceDecl. Investigate if that is
+ // accurate.
+ modifiedSrc = append(modifiedSrc, []byte("\n\n"+FormatNode(fset, wrapper))...)
+ modifiedFile, err = parser.ParseFile(rw.pkg.FileSet(), rw.pgf.URI.Filename(), modifiedSrc, parser.ParseComments|parser.SkipObjectResolution)
+ if err != nil {
+ return nil, err
+ }
+ modifiedDecl = modifiedFile.Decls[len(modifiedFile.Decls)-1].(*ast.FuncDecl)
+ }
+
+ // Type check pkg again with the modified file, to compute the synthetic
+ // callee.
+ logf := logger(ctx, "change signature", rw.snapshot.Options().VerboseOutput)
+ pkg2, info, err := reTypeCheck(logf, rw.pkg, map[span.URI]*ast.File{rw.pgf.URI: modifiedFile}, false)
+ if err != nil {
+ return nil, err
+ }
+ calleeInfo, err := inline.AnalyzeCallee(logf, rw.pkg.FileSet(), pkg2, info, modifiedDecl, modifiedSrc)
+ if err != nil {
+ return nil, fmt.Errorf("analyzing callee: %v", err)
+ }
+
+ post := func(got []byte) []byte { return bytes.ReplaceAll(got, []byte(tag), nil) }
+ return inlineAllCalls(ctx, logf, rw.snapshot, rw.pkg, rw.pgf, rw.origDecl, calleeInfo, post)
+}
+
+// reTypeCheck re-type checks orig with new file contents defined by fileMask.
+//
+// It expects that any newly added imports are already present in the
+// transitive imports of orig.
+//
+// If expectErrors is true, reTypeCheck allows errors in the new package.
+// TODO(rfindley): perhaps this should be a filter to specify which errors are
+// acceptable.
+func reTypeCheck(logf func(string, ...any), orig Package, fileMask map[span.URI]*ast.File, expectErrors bool) (*types.Package, *types.Info, error) {
+ pkg := types.NewPackage(string(orig.Metadata().PkgPath), string(orig.Metadata().Name))
+ info := &types.Info{
+ Types: make(map[ast.Expr]types.TypeAndValue),
+ Defs: make(map[*ast.Ident]types.Object),
+ Uses: make(map[*ast.Ident]types.Object),
+ Implicits: make(map[ast.Node]types.Object),
+ Selections: make(map[*ast.SelectorExpr]*types.Selection),
+ Scopes: make(map[ast.Node]*types.Scope),
+ Instances: make(map[*ast.Ident]types.Instance),
+ }
+ {
+ var files []*ast.File
+ for _, pgf := range orig.CompiledGoFiles() {
+ if mask, ok := fileMask[pgf.URI]; ok {
+ files = append(files, mask)
+ } else {
+ files = append(files, pgf.File)
+ }
+ }
+
+ // Implement a BFS for imports in the transitive package graph.
+ //
+ // Note that this only works if any newly added imports are expected to be
+ // present among transitive imports. In general we cannot assume this to
+ // be the case, but in the special case of removing a parameter it works
+ // because any parameter types must be present in export data.
+ var importer func(importPath string) (*types.Package, error)
+ {
+ var (
+ importsByPath = make(map[string]*types.Package) // cached imports
+ toSearch = []*types.Package{orig.GetTypes()} // packages to search
+ searched = make(map[string]bool) // path -> (false, if present in toSearch; true, if already searched)
+ )
+ importer = func(path string) (*types.Package, error) {
+ if p, ok := importsByPath[path]; ok {
+ return p, nil
+ }
+ for len(toSearch) > 0 {
+ pkg := toSearch[0]
+ toSearch = toSearch[1:]
+ searched[pkg.Path()] = true
+ for _, p := range pkg.Imports() {
+ // TODO(rfindley): this is incorrect: p.Path() is a package path,
+ // whereas path is an import path. We can fix this by reporting any
+ // newly added imports from inlining, or by using the ImporterFrom
+ // interface and package metadata.
+ //
+ // TODO(rfindley): can't the inliner also be wrong here? It's
+ // possible that an import path means different things depending on
+ // the location.
+ importsByPath[p.Path()] = p
+ if _, ok := searched[p.Path()]; !ok {
+ searched[p.Path()] = false
+ toSearch = append(toSearch, p)
+ }
+ }
+ if p, ok := importsByPath[path]; ok {
+ return p, nil
+ }
+ }
+ return nil, fmt.Errorf("missing import")
+ }
+ }
+ cfg := &types.Config{
+ Sizes: orig.Metadata().TypesSizes,
+ Importer: ImporterFunc(importer),
+ }
+
+ // Copied from cache/check.go.
+ // TODO(rfindley): factor this out and fix goVersionRx.
+ // Set Go dialect.
+ if module := orig.Metadata().Module; module != nil && module.GoVersion != "" {
+ goVersion := "go" + module.GoVersion
+ // types.NewChecker panics if GoVersion is invalid.
+ // An unparsable mod file should probably stop us
+ // before we get here, but double check just in case.
+ if goVersionRx.MatchString(goVersion) {
+ typesinternal.SetGoVersion(cfg, goVersion)
+ }
+ }
+ if expectErrors {
+ cfg.Error = func(err error) {
+ logf("re-type checking: expected error: %v", err)
+ }
+ }
+ typesinternal.SetUsesCgo(cfg)
+ checker := types.NewChecker(cfg, orig.FileSet(), pkg, info)
+ if err := checker.Files(files); err != nil && !expectErrors {
+ return nil, nil, fmt.Errorf("type checking rewritten package: %v", err)
+ }
+ }
+ return pkg, info, nil
+}
+
+// TODO(golang/go#63472): this looks wrong with the new Go version syntax.
+var goVersionRx = regexp.MustCompile(`^go([1-9][0-9]*)\.(0|[1-9][0-9]*)$`)
+
+func remove[T any](s []T, i int) []T {
+ return append(s[:i], s[i+1:]...)
+}
+
+// replaceFileDecl replaces old with new in the file described by pgf.
+//
+// TODO(rfindley): generalize, and combine with rewriteSignature.
+func replaceFileDecl(pgf *ParsedGoFile, old, new ast.Decl) ([]byte, error) {
+ i := findDecl(pgf.File, old)
+ if i == -1 {
+ return nil, bug.Errorf("didn't find old declaration")
+ }
+ start, end, err := safetoken.Offsets(pgf.Tok, old.Pos(), old.End())
+ if err != nil {
+ return nil, err
+ }
+ var out bytes.Buffer
+ out.Write(pgf.Src[:start])
+ fset := tokeninternal.FileSetFor(pgf.Tok)
+ if err := format.Node(&out, fset, new); err != nil {
+ return nil, bug.Errorf("formatting new node: %v", err)
+ }
+ out.Write(pgf.Src[end:])
+ return out.Bytes(), nil
+}
+
+// findDecl finds the index of decl in file.Decls.
+//
+// TODO: use slices.Index when it is available.
+func findDecl(file *ast.File, decl ast.Decl) int {
+ for i, d := range file.Decls {
+ if d == decl {
+ return i
+ }
+ }
+ return -1
+}
diff --git a/gopls/internal/lsp/source/inline.go b/gopls/internal/lsp/source/inline.go
index 4e6e16f..da3e8e5 100644
--- a/gopls/internal/lsp/source/inline.go
+++ b/gopls/internal/lsp/source/inline.go
@@ -106,9 +106,7 @@
// Users can consult the gopls event log to see
// why a particular inlining strategy was chosen.
- logf := func(format string, args ...any) {
- event.Log(ctx, "inliner: "+fmt.Sprintf(format, args...))
- }
+ logf := logger(ctx, "inliner", snapshot.Options().VerboseOutput)
callee, err := inline.AnalyzeCallee(logf, calleePkg.FileSet(), calleePkg.GetTypes(), calleePkg.GetTypesInfo(), calleeDecl, calleePGF.Src)
if err != nil {
@@ -136,3 +134,14 @@
TextEdits: diffToTextEdits(callerPGF.Tok, diff.Bytes(callerPGF.Src, got)),
}, nil
}
+
+// TODO(adonovan): change the inliner to instead accept an io.Writer.
+func logger(ctx context.Context, name string, verbose bool) func(format string, args ...any) {
+ if verbose {
+ return func(format string, args ...any) {
+ event.Log(ctx, name+": "+fmt.Sprintf(format, args...))
+ }
+ } else {
+ return func(string, ...any) {}
+ }
+}
diff --git a/gopls/internal/lsp/source/inline_all.go b/gopls/internal/lsp/source/inline_all.go
new file mode 100644
index 0000000..d814a11
--- /dev/null
+++ b/gopls/internal/lsp/source/inline_all.go
@@ -0,0 +1,263 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+ "context"
+ "fmt"
+ "go/ast"
+ "go/parser"
+
+ "golang.org/x/tools/go/ast/astutil"
+ "golang.org/x/tools/go/types/typeutil"
+ "golang.org/x/tools/gopls/internal/bug"
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/gopls/internal/span"
+ "golang.org/x/tools/internal/refactor/inline"
+)
+
+// inlineAllCalls inlines all calls to the original function declaration
+// described by callee, returning the resulting modified file content.
+//
+// inlining everything is currently an expensive operation: it involves re-type
+// checking every package that contains a potential call, as reported by
+// References. In cases where there are multiple calls per file, inlineAllCalls
+// must type check repeatedly for each additional call.
+//
+// The provided post processing function is applied to the resulting source
+// after each transformation. This is necessary because we are using this
+// function to inline synthetic wrappers for the purpose of signature
+// rewriting. The delegated function has a fake name that doesn't exist in the
+// snapshot, and so we can't re-type check until we replace this fake name.
+//
+// TODO(rfindley): this only works because removing a parameter is a very
+// narrow operation. A better solution would be to allow for ad-hoc snapshots
+// that expose the full machinery of real snapshots: minimal invalidation,
+// batched type checking, etc. Then we could actually rewrite the declaring
+// package in this snapshot (and so 'post' would not be necessary), and could
+// robustly re-type check for the purpose of iterative inlining, even if the
+// inlined code pulls in new imports that weren't present in export data.
+//
+// The code below notes where are assumptions are made that only hold true in
+// the case of parameter removal (annotated with 'Assumption:')
+func inlineAllCalls(ctx context.Context, logf func(string, ...any), snapshot Snapshot, pkg Package, pgf *ParsedGoFile, origDecl *ast.FuncDecl, callee *inline.Callee, post func([]byte) []byte) (map[span.URI][]byte, error) {
+ // Collect references.
+ var refs []protocol.Location
+ {
+ funcPos, err := pgf.Mapper.PosPosition(pgf.Tok, origDecl.Name.NamePos)
+ if err != nil {
+ return nil, err
+ }
+ fh, err := snapshot.ReadFile(ctx, pgf.URI)
+ if err != nil {
+ return nil, err
+ }
+ refs, err = References(ctx, snapshot, fh, funcPos, false)
+ if err != nil {
+ return nil, fmt.Errorf("finding references to rewrite: %v", err)
+ }
+ }
+
+ // Type-check the narrowest package containing each reference.
+ // TODO(rfindley): we should expose forEachPackage in order to operate in
+ // parallel and to reduce peak memory for this operation.
+ var (
+ pkgForRef = make(map[protocol.Location]PackageID)
+ pkgs = make(map[PackageID]Package)
+ )
+ {
+ needPkgs := make(map[PackageID]struct{})
+ for _, ref := range refs {
+ md, err := NarrowestMetadataForFile(ctx, snapshot, ref.URI.SpanURI())
+ if err != nil {
+ return nil, fmt.Errorf("finding ref metadata: %v", err)
+ }
+ pkgForRef[ref] = md.ID
+ needPkgs[md.ID] = struct{}{}
+ }
+ var pkgIDs []PackageID
+ for id := range needPkgs { // TODO: use maps.Keys once it is available to us
+ pkgIDs = append(pkgIDs, id)
+ }
+
+ refPkgs, err := snapshot.TypeCheck(ctx, pkgIDs...)
+ if err != nil {
+ return nil, fmt.Errorf("type checking reference packages: %v", err)
+ }
+
+ for _, p := range refPkgs {
+ pkgs[p.Metadata().ID] = p
+ }
+ }
+
+ // Organize calls by top file declaration. Calls within a single file may
+ // affect each other, as the inlining edit may affect the surrounding scope
+ // or imports Therefore, when inlining subsequent calls in the same
+ // declaration, we must re-type check.
+
+ type fileCalls struct {
+ pkg Package
+ pgf *ParsedGoFile
+ calls []*ast.CallExpr
+ }
+
+ refsByFile := make(map[span.URI]*fileCalls)
+ for _, ref := range refs {
+ refpkg := pkgs[pkgForRef[ref]]
+ pgf, err := refpkg.File(ref.URI.SpanURI())
+ if err != nil {
+ return nil, bug.Errorf("finding %s in %s: %v", ref.URI, refpkg.Metadata().ID, err)
+ }
+ start, end, err := pgf.RangePos(ref.Range)
+ if err != nil {
+ return nil, bug.Errorf("RangePos(ref): %v", err)
+ }
+
+ // Look for the surrounding call expression.
+ var (
+ name *ast.Ident
+ call *ast.CallExpr
+ )
+ path, _ := astutil.PathEnclosingInterval(pgf.File, start, end)
+ name, _ = path[0].(*ast.Ident)
+ if _, ok := path[1].(*ast.SelectorExpr); ok {
+ call, _ = path[2].(*ast.CallExpr)
+ } else {
+ call, _ = path[1].(*ast.CallExpr)
+ }
+ if name == nil || call == nil {
+ // TODO(rfindley): handle this case with eta-abstraction:
+ // a reference to the target function f in a non-call position
+ // use(f)
+ // is replaced by
+ // use(func(...) { f(...) })
+ return nil, fmt.Errorf("cannot inline: found non-call function reference %v", ref)
+ }
+ // Sanity check.
+ if obj := refpkg.GetTypesInfo().ObjectOf(name); obj == nil ||
+ obj.Name() != origDecl.Name.Name ||
+ obj.Pkg() == nil ||
+ obj.Pkg().Path() != string(pkg.Metadata().PkgPath) {
+ return nil, bug.Errorf("cannot inline: corrupted reference %v", ref)
+ }
+
+ callInfo, ok := refsByFile[ref.URI.SpanURI()]
+ if !ok {
+ callInfo = &fileCalls{
+ pkg: refpkg,
+ pgf: pgf,
+ }
+ refsByFile[ref.URI.SpanURI()] = callInfo
+ }
+ callInfo.calls = append(callInfo.calls, call)
+ }
+
+ // Inline each call within the same decl in sequence, re-typechecking after
+ // each one. If there is only a single call within the decl, we can avoid
+ // additional type checking.
+ //
+ // Assumption: inlining does not affect the package scope, so we can operate
+ // on separate files independently.
+ result := make(map[span.URI][]byte)
+ for uri, callInfo := range refsByFile {
+ var (
+ calls = callInfo.calls
+ fset = callInfo.pkg.FileSet()
+ tpkg = callInfo.pkg.GetTypes()
+ tinfo = callInfo.pkg.GetTypesInfo()
+ file = callInfo.pgf.File
+ content = callInfo.pgf.Src
+ )
+ currentCall := 0
+ for currentCall < len(calls) {
+ caller := &inline.Caller{
+ Fset: fset,
+ Types: tpkg,
+ Info: tinfo,
+ File: file,
+ Call: calls[currentCall],
+ Content: content,
+ }
+ var err error
+ content, err = inline.Inline(logf, caller, callee)
+ if err != nil {
+ return nil, fmt.Errorf("inlining failed: %v", err)
+ }
+ if post != nil {
+ content = post(content)
+ }
+ if len(calls) <= 1 {
+ // No need to re-type check, as we've inlined all calls.
+ break
+ }
+
+ // TODO(rfindley): develop a theory of "trivial" inlining, which are
+ // inlinings that don't require re-type checking.
+ //
+ // In principle, if the inlining only involves replacing one call with
+ // another, the scope of the caller is unchanged and there is no need to
+ // type check again before inlining subsequent calls (edits should not
+ // overlap, and should not affect each other semantically). However, it
+ // feels sufficiently complicated that, to be safe, this optimization is
+ // deferred until later.
+
+ file, err = parser.ParseFile(fset, uri.Filename(), content, parser.ParseComments|parser.SkipObjectResolution)
+ if err != nil {
+ return nil, bug.Errorf("inlined file failed to parse: %v", err)
+ }
+
+ // After inlining one call with a removed parameter, the package will
+ // fail to type check due to "not enough arguments". Therefore, we must
+ // allow type errors here.
+ //
+ // Assumption: the resulting type errors do not affect the correctness of
+ // subsequent inlining, because invalid arguments to a call do not affect
+ // anything in the surrounding scope.
+ //
+ // TODO(rfindley): improve this.
+ tpkg, tinfo, err = reTypeCheck(logf, callInfo.pkg, map[span.URI]*ast.File{uri: file}, true)
+ if err != nil {
+ return nil, bug.Errorf("type checking after inlining failed: %v", err)
+ }
+
+ // Collect calls to the target function in the modified declaration.
+ var calls2 []*ast.CallExpr
+ ast.Inspect(file, func(n ast.Node) bool {
+ if call, ok := n.(*ast.CallExpr); ok {
+ fn := typeutil.StaticCallee(tinfo, call)
+ if fn != nil && fn.Pkg().Path() == string(pkg.Metadata().PkgPath) && fn.Name() == origDecl.Name.Name {
+ calls2 = append(calls2, call)
+ }
+ }
+ return true
+ })
+
+ // If the number of calls has increased, this process will never cease.
+ // If the number of calls has decreased, assume that inlining removed a
+ // call.
+ // If the number of calls didn't change, assume that inlining replaced
+ // a call, and move on to the next.
+ //
+ // Assumption: we're inlining a call that has at most one recursive
+ // reference (which holds for signature rewrites).
+ //
+ // TODO(rfindley): this isn't good enough. We should be able to support
+ // inlining all existing calls even if they increase calls. How do we
+ // correlate the before and after syntax?
+ switch {
+ case len(calls2) > len(calls):
+ return nil, fmt.Errorf("inlining increased calls %d->%d, possible recursive call? content:\n%s", len(calls), len(calls2), content)
+ case len(calls2) < len(calls):
+ calls = calls2
+ case len(calls2) == len(calls):
+ calls = calls2
+ currentCall++
+ }
+ }
+
+ result[callInfo.pgf.URI] = content
+ }
+ return result, nil
+}
diff --git a/gopls/internal/lsp/source/util.go b/gopls/internal/lsp/source/util.go
index d0ecd50..2cbce61 100644
--- a/gopls/internal/lsp/source/util.go
+++ b/gopls/internal/lsp/source/util.go
@@ -119,6 +119,8 @@
func FormatNode(fset *token.FileSet, n ast.Node) string {
var buf strings.Builder
if err := printer.Fprint(&buf, fset, n); err != nil {
+ // TODO(rfindley): we should use bug.Reportf here.
+ // We encounter this during completion.resolveInvalid.
return ""
}
return buf.String()
@@ -531,3 +533,9 @@
}
return nil
}
+
+// An importFunc is an implementation of the single-method
+// types.Importer interface based on a function value.
+type ImporterFunc func(path string) (*types.Package, error)
+
+func (f ImporterFunc) Import(path string) (*types.Package, error) { return f(path) }
diff --git a/gopls/internal/lsp/source/view.go b/gopls/internal/lsp/source/view.go
index f4a8192..d501e04 100644
--- a/gopls/internal/lsp/source/view.go
+++ b/gopls/internal/lsp/source/view.go
@@ -284,24 +284,44 @@
return []label.Label{tag.Snapshot.Of(snapshot.SequenceID()), tag.Directory.Of(snapshot.View().Folder())}
}
-// NarrowestPackageForFile is a convenience function that selects the
-// narrowest non-ITV package to which this file belongs, type-checks
-// it in the requested mode (full or workspace), and returns it, along
-// with the parse tree of that file.
+// NarrowestPackageForFile is a convenience function that selects the narrowest
+// non-ITV package to which this file belongs, type-checks it in the requested
+// mode (full or workspace), and returns it, along with the parse tree of that
+// file.
//
-// The "narrowest" package is the one with the fewest number of files
-// that includes the given file. This solves the problem of test
-// variants, as the test will have more files than the non-test package.
-// (Historically the preference was a parameter but widest was almost
-// never needed.)
+// The "narrowest" package is the one with the fewest number of files that
+// includes the given file. This solves the problem of test variants, as the
+// test will have more files than the non-test package.
//
-// An intermediate test variant (ITV) package has identical source
-// to a regular package but resolves imports differently.
-// gopls should never need to type-check them.
+// An intermediate test variant (ITV) package has identical source to a regular
+// package but resolves imports differently. gopls should never need to
+// type-check them.
//
-// Type-checking is expensive. Call snapshot.ParseGo if all you need
-// is a parse tree, or snapshot.MetadataForFile if you only need metadata.
+// Type-checking is expensive. Call snapshot.ParseGo if all you need is a parse
+// tree, or snapshot.MetadataForFile if you only need metadata.
func NarrowestPackageForFile(ctx context.Context, snapshot Snapshot, uri span.URI) (Package, *ParsedGoFile, error) {
+ return selectPackageForFile(ctx, snapshot, uri, func(metas []*Metadata) *Metadata { return metas[0] })
+}
+
+// WidestPackageForFile is a convenience function that selects the widest
+// non-ITV package to which this file belongs, type-checks it in the requested
+// mode (full or workspace), and returns it, along with the parse tree of that
+// file.
+//
+// The "widest" package is the one with the most number of files that includes
+// the given file. Which is the test variant if one exists.
+//
+// An intermediate test variant (ITV) package has identical source to a regular
+// package but resolves imports differently. gopls should never need to
+// type-check them.
+//
+// Type-checking is expensive. Call snapshot.ParseGo if all you need is a parse
+// tree, or snapshot.MetadataForFile if you only need metadata.
+func WidestPackageForFile(ctx context.Context, snapshot Snapshot, uri span.URI) (Package, *ParsedGoFile, error) {
+ return selectPackageForFile(ctx, snapshot, uri, func(metas []*Metadata) *Metadata { return metas[len(metas)-1] })
+}
+
+func selectPackageForFile(ctx context.Context, snapshot Snapshot, uri span.URI, selector func([]*Metadata) *Metadata) (Package, *ParsedGoFile, error) {
metas, err := snapshot.MetadataForFile(ctx, uri)
if err != nil {
return nil, nil, err
@@ -310,8 +330,8 @@
if len(metas) == 0 {
return nil, nil, fmt.Errorf("no package metadata for file %s", uri)
}
- narrowest := metas[0]
- pkgs, err := snapshot.TypeCheck(ctx, narrowest.ID)
+ md := selector(metas)
+ pkgs, err := snapshot.TypeCheck(ctx, md.ID)
if err != nil {
return nil, nil, err
}
diff --git a/gopls/internal/regtest/marker/testdata/codeaction/removeparam.txt b/gopls/internal/regtest/marker/testdata/codeaction/removeparam.txt
new file mode 100644
index 0000000..c7f8d54
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/codeaction/removeparam.txt
@@ -0,0 +1,264 @@
+This test exercises the refactoring to remove unused parameters.
+
+-- go.mod --
+module unused.mod
+
+go 1.18
+
+-- a/a.go --
+package a
+
+func A(x, unused int) int { //@codeaction("refactor.rewrite", "unused", "unused", a)
+ return x
+}
+
+-- @a/a/a.go --
+package a
+
+func A(x int) int { //@codeaction("refactor.rewrite", "unused", "unused", a)
+ return x
+}
+
+-- a/a2.go --
+package a
+
+func _() {
+ A(1, 2)
+}
+
+-- a/a_test.go --
+package a
+
+func _() {
+ A(1, 2)
+}
+
+-- a/a_x_test.go --
+package a_test
+
+import "unused.mod/a"
+
+func _() {
+ a.A(1, 2)
+}
+
+-- b/b.go --
+package b
+
+import "unused.mod/a"
+
+func f() int {
+ return 1
+}
+
+func g() int {
+ return 2
+}
+
+func _() {
+ a.A(f(), 1)
+}
+
+-- @a/a/a2.go --
+package a
+
+func _() {
+ A(1)
+}
+-- @a/a/a_test.go --
+package a
+
+func _() {
+ A(1)
+}
+-- @a/a/a_x_test.go --
+package a_test
+
+import "unused.mod/a"
+
+func _() {
+ a.A(1)
+}
+-- @a/b/b.go --
+package b
+
+import "unused.mod/a"
+
+func f() int {
+ return 1
+}
+
+func g() int {
+ return 2
+}
+
+func _() {
+ a.A(f())
+}
+-- field/field.go --
+package field
+
+func Field(x int, field int) { //@codeaction("refactor.rewrite", "int", "int", field)
+}
+
+func _() {
+ Field(1, 2)
+}
+-- @field/field/field.go --
+package field
+
+func Field(field int) { //@codeaction("refactor.rewrite", "int", "int", field)
+}
+
+func _() {
+ Field(2)
+}
+-- ellipsis/ellipsis.go --
+package ellipsis
+
+func Ellipsis(...any) { //@codeaction("refactor.rewrite", "any", "any", ellipsis)
+}
+
+func _() {
+ // TODO(rfindley): investigate the broken formatting resulting from these inlinings.
+ Ellipsis()
+ Ellipsis(1)
+ Ellipsis(1, 2)
+ Ellipsis(1, f(), g())
+ Ellipsis(h())
+ Ellipsis(i()...)
+}
+
+func f() int
+func g() int
+func h() (int, int)
+func i() []any
+
+-- @ellipsis/ellipsis/ellipsis.go --
+package ellipsis
+
+func Ellipsis() { //@codeaction("refactor.rewrite", "any", "any", ellipsis)
+}
+
+func _() {
+ // TODO(rfindley): investigate the broken formatting resulting from these inlinings.
+ Ellipsis()
+ Ellipsis()
+ Ellipsis()
+
+ var _ []any = []any{1, f(), g()}
+ Ellipsis()
+
+ func(_ ...any) {
+ Ellipsis()
+ }(h())
+
+ var _ []any = i()
+ Ellipsis()
+
+}
+
+func f() int
+func g() int
+func h() (int, int)
+func i() []any
+-- ellipsis2/ellipsis2.go --
+package ellipsis2
+
+func Ellipsis2(_, _ int, rest ...int) { //@codeaction("refactor.rewrite", "_", "_", ellipsis2)
+}
+
+func _() {
+ Ellipsis2(1,2,3)
+ Ellipsis2(h())
+ Ellipsis2(1,2, []int{3, 4}...)
+}
+
+func h() (int, int)
+
+-- @ellipsis2/ellipsis2/ellipsis2.go --
+package ellipsis2
+
+func Ellipsis2(_ int, rest ...int) { //@codeaction("refactor.rewrite", "_", "_", ellipsis2)
+}
+
+func _() {
+ Ellipsis2(2, []int{3}...)
+ func(_, blank0 int, rest ...int) {
+ Ellipsis2(blank0, rest...)
+ }(h())
+ Ellipsis2(2, []int{3, 4}...)
+}
+
+func h() (int, int)
+-- overlapping/overlapping.go --
+package overlapping
+
+func Overlapping(i int) int { //@codeaction("refactor.rewrite", re"(i) int", re"(i) int", overlapping)
+ return 0
+}
+
+func _() {
+ x := Overlapping(Overlapping(0))
+ _ = x
+}
+-- @overlapping/overlapping/overlapping.go --
+package overlapping
+
+func Overlapping() int { //@codeaction("refactor.rewrite", re"(i) int", re"(i) int", overlapping)
+ return 0
+}
+
+func _() {
+ x := func(_ int) int {
+ return Overlapping()
+ }(Overlapping())
+ _ = x
+}
+-- effects/effects.go --
+package effects
+
+func effects(x, y int) int { //@codeaction("refactor.rewrite", "y", "y", effects)
+ return x
+}
+
+func f() int
+func g() int
+
+func _() {
+ effects(f(), g())
+ effects(f(), g())
+}
+-- @effects/effects/effects.go --
+package effects
+
+func effects(x int) int { //@codeaction("refactor.rewrite", "y", "y", effects)
+ return x
+}
+
+func f() int
+func g() int
+
+func _() {
+
+ var x, _ int = f(), g()
+ effects(x)
+
+ {
+ var x, _ int = f(), g()
+ effects(x)
+ }
+}
+-- recursive/recursive.go --
+package recursive
+
+func Recursive(x int) int { //@codeaction("refactor.rewrite", "x", "x", recursive)
+ return Recursive(1)
+}
+
+-- @recursive/recursive/recursive.go --
+package recursive
+
+func Recursive() int { //@codeaction("refactor.rewrite", "x", "x", recursive)
+ return Recursive()
+}
diff --git a/gopls/internal/regtest/marker/testdata/codeaction/removeparam_formatting.txt b/gopls/internal/regtest/marker/testdata/codeaction/removeparam_formatting.txt
new file mode 100644
index 0000000..39f3ddb
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/codeaction/removeparam_formatting.txt
@@ -0,0 +1,55 @@
+This test exercises behavior of change signature refactoring with respect to
+comments.
+
+Currently, inline comments around arguments or parameters are dropped, which is
+probably acceptable. Fixing this is likely intractible without fixing comment
+representation in the AST.
+
+-- go.mod --
+module unused.mod
+
+go 1.18
+
+-- a/a.go --
+package a
+
+// A doc comment.
+func A(x /* used parameter */, unused int /* unused parameter */ ) int { //@codeaction("refactor.rewrite", "unused", "unused", a)
+ // about to return
+ return x // returning
+ // just returned
+}
+
+// This function makes calls.
+func _() {
+ // about to call
+ A(one() /* used arg */, 2 /* unused arg */) // calling
+ // just called
+}
+
+func one() int {
+ // I should be unaffected!
+ return 1
+}
+
+-- @a/a/a.go --
+package a
+
+// A doc comment.
+func A(x int) int { //@codeaction("refactor.rewrite", "unused", "unused", a)
+ // about to return
+ return x // returning
+ // just returned
+}
+
+// This function makes calls.
+func _() {
+ // about to call
+ A(one()) // calling
+ // just called
+}
+
+func one() int {
+ // I should be unaffected!
+ return 1
+}
diff --git a/gopls/internal/regtest/marker/testdata/codeaction/removeparam_funcvalue.txt b/gopls/internal/regtest/marker/testdata/codeaction/removeparam_funcvalue.txt
new file mode 100644
index 0000000..4173184
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/codeaction/removeparam_funcvalue.txt
@@ -0,0 +1,19 @@
+This test exercises change signature refactoring handling of function values.
+
+TODO(rfindley): use a literalization strategy to allow these references.
+
+-- go.mod --
+module unused.mod
+
+go 1.18
+
+-- a/a.go --
+package a
+
+func A(x, unused int) int { //@codeactionerr("refactor.rewrite", "unused", "unused", re"non-call function reference")
+ return x
+}
+
+func _() {
+ _ = A
+}
diff --git a/gopls/internal/regtest/marker/testdata/codeaction/removeparam_imports.txt b/gopls/internal/regtest/marker/testdata/codeaction/removeparam_imports.txt
new file mode 100644
index 0000000..8a09cb9
--- /dev/null
+++ b/gopls/internal/regtest/marker/testdata/codeaction/removeparam_imports.txt
@@ -0,0 +1,171 @@
+This test checks the behavior of removing a parameter with respect to various
+import scenarios.
+
+-- go.mod --
+module mod.test
+
+go 1.21
+
+
+-- a/a1.go --
+package a
+
+import "mod.test/b"
+
+func _() {
+ b.B(<-b.Chan, <-b.Chan)
+}
+
+-- a/a2.go --
+package a
+
+import "mod.test/b"
+
+func _() {
+ b.B(<-b.Chan, <-b.Chan)
+ b.B(<-b.Chan, <-b.Chan)
+}
+
+-- a/a3.go --
+package a
+
+import "mod.test/b"
+
+func _() {
+ b.B(<-b.Chan, <-b.Chan)
+}
+
+func _() {
+ b.B(<-b.Chan, <-b.Chan)
+}
+
+-- a/a4.go --
+package a
+
+// TODO(rfindley/adonovan): inlining here adds an additional import of
+// mod.test/b. Can we do better?
+import (
+ . "mod.test/b"
+)
+
+func _() {
+ B(<-Chan, <-Chan)
+}
+
+-- b/b.go --
+package b
+
+import "mod.test/c"
+
+var Chan chan c.C
+
+func B(x, y c.C) { //@codeaction("refactor.rewrite", "x", "x", b)
+}
+
+-- c/c.go --
+package c
+
+type C int
+
+-- d/d.go --
+package d
+
+// Removing the parameter should remove this import.
+import "mod.test/c"
+
+func D(x c.C) { //@codeaction("refactor.rewrite", "x", "x", d)
+}
+
+func _() {
+ D(1)
+}
+
+-- @b/a/a1.go --
+package a
+
+import (
+ "mod.test/b"
+ c "mod.test/c"
+)
+
+func _() {
+
+ var _ c.C = <-b.Chan
+ b.B(<-b.Chan)
+
+}
+-- @b/a/a2.go --
+package a
+
+import (
+ "mod.test/b"
+ c "mod.test/c"
+)
+
+func _() {
+
+ var _ c.C = <-b.Chan
+ b.B(<-b.Chan)
+
+ var _ c.C = <-b.Chan
+ b.B(<-b.Chan)
+
+}
+-- @b/a/a3.go --
+package a
+
+import (
+ "mod.test/b"
+ c "mod.test/c"
+)
+
+func _() {
+
+ var _ c.C = <-b.Chan
+ b.B(<-b.Chan)
+
+}
+
+func _() {
+
+ var _ c.C = <-b.Chan
+ b.B(<-b.Chan)
+
+}
+-- @b/a/a4.go --
+package a
+
+// TODO(rfindley/adonovan): inlining here adds an additional import of
+// mod.test/b. Can we do better?
+import (
+ . "mod.test/b"
+ b "mod.test/b"
+ c "mod.test/c"
+)
+
+func _() {
+
+ var _ c.C = <-Chan
+ b.B(<-Chan)
+
+}
+-- @b/b/b.go --
+package b
+
+import "mod.test/c"
+
+var Chan chan c.C
+
+func B(y c.C) { //@codeaction("refactor.rewrite", "x", "x", b)
+}
+-- @d/d/d.go --
+package d
+
+// Removing the parameter should remove this import.
+
+func D() { //@codeaction("refactor.rewrite", "x", "x", d)
+}
+
+func _() {
+ D()
+}
diff --git a/internal/astutil/clone.go b/internal/astutil/clone.go
new file mode 100644
index 0000000..d5ee82c
--- /dev/null
+++ b/internal/astutil/clone.go
@@ -0,0 +1,71 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package astutil
+
+import (
+ "go/ast"
+ "reflect"
+)
+
+// CloneNode returns a deep copy of a Node.
+// It omits pointers to ast.{Scope,Object} variables.
+func CloneNode[T ast.Node](n T) T {
+ return cloneNode(n).(T)
+}
+
+func cloneNode(n ast.Node) ast.Node {
+ var clone func(x reflect.Value) reflect.Value
+ set := func(dst, src reflect.Value) {
+ src = clone(src)
+ if src.IsValid() {
+ dst.Set(src)
+ }
+ }
+ clone = func(x reflect.Value) reflect.Value {
+ switch x.Kind() {
+ case reflect.Ptr:
+ if x.IsNil() {
+ return x
+ }
+ // Skip fields of types potentially involved in cycles.
+ switch x.Interface().(type) {
+ case *ast.Object, *ast.Scope:
+ return reflect.Zero(x.Type())
+ }
+ y := reflect.New(x.Type().Elem())
+ set(y.Elem(), x.Elem())
+ return y
+
+ case reflect.Struct:
+ y := reflect.New(x.Type()).Elem()
+ for i := 0; i < x.Type().NumField(); i++ {
+ set(y.Field(i), x.Field(i))
+ }
+ return y
+
+ case reflect.Slice:
+ if x.IsNil() {
+ return x
+ }
+ y := reflect.MakeSlice(x.Type(), x.Len(), x.Cap())
+ for i := 0; i < x.Len(); i++ {
+ set(y.Index(i), x.Index(i))
+ }
+ return y
+
+ case reflect.Interface:
+ y := reflect.New(x.Type()).Elem()
+ set(y, x.Elem())
+ return y
+
+ case reflect.Array, reflect.Chan, reflect.Func, reflect.Map, reflect.UnsafePointer:
+ panic(x) // unreachable in AST
+
+ default:
+ return x // bool, string, number
+ }
+ }
+ return clone(reflect.ValueOf(n)).Interface().(ast.Node)
+}
diff --git a/internal/refactor/inline/inline.go b/internal/refactor/inline/inline.go
index 53119e3..53adfbc 100644
--- a/internal/refactor/inline/inline.go
+++ b/internal/refactor/inline/inline.go
@@ -21,6 +21,7 @@
"golang.org/x/tools/go/ast/astutil"
"golang.org/x/tools/go/types/typeutil"
"golang.org/x/tools/imports"
+ internalastutil "golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/typeparams"
)
@@ -487,8 +488,12 @@
if samePkg {
// Caller and callee are in same package.
// Check caller has not shadowed the decl.
- found := caller.lookup(obj.Name) // can't fail
- if !isPkgLevel(found) {
+ //
+ // This may fail if the callee is "fake", such as for signature
+ // refactoring where the callee is modified to be a trivial wrapper
+ // around the refactored signature.
+ found := caller.lookup(obj.Name)
+ if found != nil && !isPkgLevel(found) {
return nil, fmt.Errorf("cannot inline because %q is shadowed in caller by a %s (line %d)",
obj.Name, objectKind(found),
caller.Fset.PositionFor(found.Pos(), false).Line)
@@ -1323,7 +1328,7 @@
logf("replacing parameter %q by argument %q",
param.info.Name, debugFormatNode(caller.Fset, arg.expr))
for _, ref := range param.info.Refs {
- replaceCalleeID(ref, cloneNode(arg.expr).(ast.Expr))
+ replaceCalleeID(ref, internalastutil.CloneNode(arg.expr).(ast.Expr))
}
params[i] = nil // substituted
args[i] = nil // substituted
@@ -2340,60 +2345,6 @@
}
}
-// cloneNode returns a deep copy of a Node.
-// It omits pointers to ast.{Scope,Object} variables.
-func cloneNode(n ast.Node) ast.Node {
- var clone func(x reflect.Value) reflect.Value
- set := func(dst, src reflect.Value) {
- src = clone(src)
- if src.IsValid() {
- dst.Set(src)
- }
- }
- clone = func(x reflect.Value) reflect.Value {
- switch x.Kind() {
- case reflect.Ptr:
- if x.IsNil() {
- return x
- }
- // Skip fields of types potentially involved in cycles.
- switch x.Interface().(type) {
- case *ast.Object, *ast.Scope:
- return reflect.Zero(x.Type())
- }
- y := reflect.New(x.Type().Elem())
- set(y.Elem(), x.Elem())
- return y
-
- case reflect.Struct:
- y := reflect.New(x.Type()).Elem()
- for i := 0; i < x.Type().NumField(); i++ {
- set(y.Field(i), x.Field(i))
- }
- return y
-
- case reflect.Slice:
- y := reflect.MakeSlice(x.Type(), x.Len(), x.Cap())
- for i := 0; i < x.Len(); i++ {
- set(y.Index(i), x.Index(i))
- }
- return y
-
- case reflect.Interface:
- y := reflect.New(x.Type()).Elem()
- set(y, x.Elem())
- return y
-
- case reflect.Array, reflect.Chan, reflect.Func, reflect.Map, reflect.UnsafePointer:
- panic(x) // unreachable in AST
-
- default:
- return x // bool, string, number
- }
- }
- return clone(reflect.ValueOf(n)).Interface().(ast.Node)
-}
-
// clearPositions destroys token.Pos information within the tree rooted at root,
// as positions in callee trees may cause caller comments to be emitted prematurely.
//
To view, visit change 532495. To unsubscribe, or for help writing mail filters, visit settings.