Source file src/cmd/vendor/golang.org/x/tools/internal/analysisinternal/analysis.go

     1  // Copyright 2020 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  // Package analysisinternal provides gopls' internal analyses with a
     6  // number of helper functions that operate on typed syntax trees.
     7  package analysisinternal
     8  
     9  import (
    10  	"bytes"
    11  	"cmp"
    12  	"fmt"
    13  	"go/ast"
    14  	"go/printer"
    15  	"go/scanner"
    16  	"go/token"
    17  	"go/types"
    18  	"iter"
    19  	pathpkg "path"
    20  	"slices"
    21  	"strings"
    22  
    23  	"golang.org/x/tools/go/analysis"
    24  	"golang.org/x/tools/go/ast/inspector"
    25  	"golang.org/x/tools/internal/astutil/cursor"
    26  	"golang.org/x/tools/internal/typesinternal"
    27  )
    28  
    29  // Deprecated: this heuristic is ill-defined.
    30  // TODO(adonovan): move to sole use in gopls/internal/cache.
    31  func TypeErrorEndPos(fset *token.FileSet, src []byte, start token.Pos) token.Pos {
    32  	// Get the end position for the type error.
    33  	file := fset.File(start)
    34  	if file == nil {
    35  		return start
    36  	}
    37  	if offset := file.PositionFor(start, false).Offset; offset > len(src) {
    38  		return start
    39  	} else {
    40  		src = src[offset:]
    41  	}
    42  
    43  	// Attempt to find a reasonable end position for the type error.
    44  	//
    45  	// TODO(rfindley): the heuristic implemented here is unclear. It looks like
    46  	// it seeks the end of the primary operand starting at start, but that is not
    47  	// quite implemented (for example, given a func literal this heuristic will
    48  	// return the range of the func keyword).
    49  	//
    50  	// We should formalize this heuristic, or deprecate it by finally proposing
    51  	// to add end position to all type checker errors.
    52  	//
    53  	// Nevertheless, ensure that the end position at least spans the current
    54  	// token at the cursor (this was golang/go#69505).
    55  	end := start
    56  	{
    57  		var s scanner.Scanner
    58  		fset := token.NewFileSet()
    59  		f := fset.AddFile("", fset.Base(), len(src))
    60  		s.Init(f, src, nil /* no error handler */, scanner.ScanComments)
    61  		pos, tok, lit := s.Scan()
    62  		if tok != token.SEMICOLON && token.Pos(f.Base()) <= pos && pos <= token.Pos(f.Base()+f.Size()) {
    63  			off := file.Offset(pos) + len(lit)
    64  			src = src[off:]
    65  			end += token.Pos(off)
    66  		}
    67  	}
    68  
    69  	// Look for bytes that might terminate the current operand. See note above:
    70  	// this is imprecise.
    71  	if width := bytes.IndexAny(src, " \n,():;[]+-*/"); width > 0 {
    72  		end += token.Pos(width)
    73  	}
    74  	return end
    75  }
    76  
    77  // WalkASTWithParent walks the AST rooted at n. The semantics are
    78  // similar to ast.Inspect except it does not call f(nil).
    79  func WalkASTWithParent(n ast.Node, f func(n ast.Node, parent ast.Node) bool) {
    80  	var ancestors []ast.Node
    81  	ast.Inspect(n, func(n ast.Node) (recurse bool) {
    82  		if n == nil {
    83  			ancestors = ancestors[:len(ancestors)-1]
    84  			return false
    85  		}
    86  
    87  		var parent ast.Node
    88  		if len(ancestors) > 0 {
    89  			parent = ancestors[len(ancestors)-1]
    90  		}
    91  		ancestors = append(ancestors, n)
    92  		return f(n, parent)
    93  	})
    94  }
    95  
    96  // MatchingIdents finds the names of all identifiers in 'node' that match any of the given types.
    97  // 'pos' represents the position at which the identifiers may be inserted. 'pos' must be within
    98  // the scope of each of identifier we select. Otherwise, we will insert a variable at 'pos' that
    99  // is unrecognized.
   100  func MatchingIdents(typs []types.Type, node ast.Node, pos token.Pos, info *types.Info, pkg *types.Package) map[types.Type][]string {
   101  
   102  	// Initialize matches to contain the variable types we are searching for.
   103  	matches := make(map[types.Type][]string)
   104  	for _, typ := range typs {
   105  		if typ == nil {
   106  			continue // TODO(adonovan): is this reachable?
   107  		}
   108  		matches[typ] = nil // create entry
   109  	}
   110  
   111  	seen := map[types.Object]struct{}{}
   112  	ast.Inspect(node, func(n ast.Node) bool {
   113  		if n == nil {
   114  			return false
   115  		}
   116  		// Prevent circular definitions. If 'pos' is within an assignment statement, do not
   117  		// allow any identifiers in that assignment statement to be selected. Otherwise,
   118  		// we could do the following, where 'x' satisfies the type of 'f0':
   119  		//
   120  		// x := fakeStruct{f0: x}
   121  		//
   122  		if assign, ok := n.(*ast.AssignStmt); ok && pos > assign.Pos() && pos <= assign.End() {
   123  			return false
   124  		}
   125  		if n.End() > pos {
   126  			return n.Pos() <= pos
   127  		}
   128  		ident, ok := n.(*ast.Ident)
   129  		if !ok || ident.Name == "_" {
   130  			return true
   131  		}
   132  		obj := info.Defs[ident]
   133  		if obj == nil || obj.Type() == nil {
   134  			return true
   135  		}
   136  		if _, ok := obj.(*types.TypeName); ok {
   137  			return true
   138  		}
   139  		// Prevent duplicates in matches' values.
   140  		if _, ok = seen[obj]; ok {
   141  			return true
   142  		}
   143  		seen[obj] = struct{}{}
   144  		// Find the scope for the given position. Then, check whether the object
   145  		// exists within the scope.
   146  		innerScope := pkg.Scope().Innermost(pos)
   147  		if innerScope == nil {
   148  			return true
   149  		}
   150  		_, foundObj := innerScope.LookupParent(ident.Name, pos)
   151  		if foundObj != obj {
   152  			return true
   153  		}
   154  		// The object must match one of the types that we are searching for.
   155  		// TODO(adonovan): opt: use typeutil.Map?
   156  		if names, ok := matches[obj.Type()]; ok {
   157  			matches[obj.Type()] = append(names, ident.Name)
   158  		} else {
   159  			// If the object type does not exactly match
   160  			// any of the target types, greedily find the first
   161  			// target type that the object type can satisfy.
   162  			for typ := range matches {
   163  				if equivalentTypes(obj.Type(), typ) {
   164  					matches[typ] = append(matches[typ], ident.Name)
   165  				}
   166  			}
   167  		}
   168  		return true
   169  	})
   170  	return matches
   171  }
   172  
   173  func equivalentTypes(want, got types.Type) bool {
   174  	if types.Identical(want, got) {
   175  		return true
   176  	}
   177  	// Code segment to help check for untyped equality from (golang/go#32146).
   178  	if rhs, ok := want.(*types.Basic); ok && rhs.Info()&types.IsUntyped > 0 {
   179  		if lhs, ok := got.Underlying().(*types.Basic); ok {
   180  			return rhs.Info()&types.IsConstType == lhs.Info()&types.IsConstType
   181  		}
   182  	}
   183  	return types.AssignableTo(want, got)
   184  }
   185  
   186  // A ReadFileFunc is a function that returns the
   187  // contents of a file, such as [os.ReadFile].
   188  type ReadFileFunc = func(filename string) ([]byte, error)
   189  
   190  // CheckedReadFile returns a wrapper around a Pass.ReadFile
   191  // function that performs the appropriate checks.
   192  func CheckedReadFile(pass *analysis.Pass, readFile ReadFileFunc) ReadFileFunc {
   193  	return func(filename string) ([]byte, error) {
   194  		if err := CheckReadable(pass, filename); err != nil {
   195  			return nil, err
   196  		}
   197  		return readFile(filename)
   198  	}
   199  }
   200  
   201  // CheckReadable enforces the access policy defined by the ReadFile field of [analysis.Pass].
   202  func CheckReadable(pass *analysis.Pass, filename string) error {
   203  	if slices.Contains(pass.OtherFiles, filename) ||
   204  		slices.Contains(pass.IgnoredFiles, filename) {
   205  		return nil
   206  	}
   207  	for _, f := range pass.Files {
   208  		if pass.Fset.File(f.FileStart).Name() == filename {
   209  			return nil
   210  		}
   211  	}
   212  	return fmt.Errorf("Pass.ReadFile: %s is not among OtherFiles, IgnoredFiles, or names of Files", filename)
   213  }
   214  
   215  // AddImport checks whether this file already imports pkgpath and
   216  // that import is in scope at pos. If so, it returns the name under
   217  // which it was imported and a zero edit. Otherwise, it adds a new
   218  // import of pkgpath, using a name derived from the preferred name,
   219  // and returns the chosen name, a prefix to be concatenated with member
   220  // to form a qualified name, and the edit for the new import.
   221  //
   222  // In the special case that pkgpath is dot-imported then member, the
   223  // identifier for which the import is being added, is consulted. If
   224  // member is not shadowed at pos, AddImport returns (".", "", nil).
   225  // (AddImport accepts the caller's implicit claim that the imported
   226  // package declares member.)
   227  //
   228  // It does not mutate its arguments.
   229  func AddImport(info *types.Info, file *ast.File, preferredName, pkgpath, member string, pos token.Pos) (name, prefix string, newImport []analysis.TextEdit) {
   230  	// Find innermost enclosing lexical block.
   231  	scope := info.Scopes[file].Innermost(pos)
   232  	if scope == nil {
   233  		panic("no enclosing lexical block")
   234  	}
   235  
   236  	// Is there an existing import of this package?
   237  	// If so, are we in its scope? (not shadowed)
   238  	for _, spec := range file.Imports {
   239  		pkgname := info.PkgNameOf(spec)
   240  		if pkgname != nil && pkgname.Imported().Path() == pkgpath {
   241  			name = pkgname.Name()
   242  			if name == "." {
   243  				// The scope of ident must be the file scope.
   244  				if s, _ := scope.LookupParent(member, pos); s == info.Scopes[file] {
   245  					return name, "", nil
   246  				}
   247  			} else if _, obj := scope.LookupParent(name, pos); obj == pkgname {
   248  				return name, name + ".", nil
   249  			}
   250  		}
   251  	}
   252  
   253  	// We must add a new import.
   254  	// Ensure we have a fresh name.
   255  	newName := FreshName(scope, pos, preferredName)
   256  
   257  	// Create a new import declaration either before the first existing
   258  	// declaration (which must exist), including its comments; or
   259  	// inside the declaration, if it is an import group.
   260  	//
   261  	// Use a renaming import whenever the preferred name is not
   262  	// available, or the chosen name does not match the last
   263  	// segment of its path.
   264  	newText := fmt.Sprintf("%q", pkgpath)
   265  	if newName != preferredName || newName != pathpkg.Base(pkgpath) {
   266  		newText = fmt.Sprintf("%s %q", newName, pkgpath)
   267  	}
   268  	decl0 := file.Decls[0]
   269  	var before ast.Node = decl0
   270  	switch decl0 := decl0.(type) {
   271  	case *ast.GenDecl:
   272  		if decl0.Doc != nil {
   273  			before = decl0.Doc
   274  		}
   275  	case *ast.FuncDecl:
   276  		if decl0.Doc != nil {
   277  			before = decl0.Doc
   278  		}
   279  	}
   280  	// If the first decl is an import group, add this new import at the end.
   281  	if gd, ok := before.(*ast.GenDecl); ok && gd.Tok == token.IMPORT && gd.Rparen.IsValid() {
   282  		pos = gd.Rparen
   283  		newText = "\t" + newText + "\n"
   284  	} else {
   285  		pos = before.Pos()
   286  		newText = "import " + newText + "\n\n"
   287  	}
   288  	return newName, newName + ".", []analysis.TextEdit{{
   289  		Pos:     pos,
   290  		End:     pos,
   291  		NewText: []byte(newText),
   292  	}}
   293  }
   294  
   295  // FreshName returns the name of an identifier that is undefined
   296  // at the specified position, based on the preferred name.
   297  func FreshName(scope *types.Scope, pos token.Pos, preferred string) string {
   298  	newName := preferred
   299  	for i := 0; ; i++ {
   300  		if _, obj := scope.LookupParent(newName, pos); obj == nil {
   301  			break // fresh
   302  		}
   303  		newName = fmt.Sprintf("%s%d", preferred, i)
   304  	}
   305  	return newName
   306  }
   307  
   308  // Format returns a string representation of the expression e.
   309  func Format(fset *token.FileSet, e ast.Expr) string {
   310  	var buf strings.Builder
   311  	printer.Fprint(&buf, fset, e) // ignore errors
   312  	return buf.String()
   313  }
   314  
   315  // Imports returns true if path is imported by pkg.
   316  func Imports(pkg *types.Package, path string) bool {
   317  	for _, imp := range pkg.Imports() {
   318  		if imp.Path() == path {
   319  			return true
   320  		}
   321  	}
   322  	return false
   323  }
   324  
   325  // IsTypeNamed reports whether t is (or is an alias for) a
   326  // package-level defined type with the given package path and one of
   327  // the given names. It returns false if t is nil.
   328  //
   329  // This function avoids allocating the concatenation of "pkg.Name",
   330  // which is important for the performance of syntax matching.
   331  func IsTypeNamed(t types.Type, pkgPath string, names ...string) bool {
   332  	if named, ok := types.Unalias(t).(*types.Named); ok {
   333  		tname := named.Obj()
   334  		return tname != nil &&
   335  			typesinternal.IsPackageLevel(tname) &&
   336  			tname.Pkg().Path() == pkgPath &&
   337  			slices.Contains(names, tname.Name())
   338  	}
   339  	return false
   340  }
   341  
   342  // IsPointerToNamed reports whether t is (or is an alias for) a pointer to a
   343  // package-level defined type with the given package path and one of the given
   344  // names. It returns false if t is not a pointer type.
   345  func IsPointerToNamed(t types.Type, pkgPath string, names ...string) bool {
   346  	r := typesinternal.Unpointer(t)
   347  	if r == t {
   348  		return false
   349  	}
   350  	return IsTypeNamed(r, pkgPath, names...)
   351  }
   352  
   353  // IsFunctionNamed reports whether obj is a package-level function
   354  // defined in the given package and has one of the given names.
   355  // It returns false if obj is nil.
   356  //
   357  // This function avoids allocating the concatenation of "pkg.Name",
   358  // which is important for the performance of syntax matching.
   359  func IsFunctionNamed(obj types.Object, pkgPath string, names ...string) bool {
   360  	f, ok := obj.(*types.Func)
   361  	return ok &&
   362  		typesinternal.IsPackageLevel(obj) &&
   363  		f.Pkg().Path() == pkgPath &&
   364  		f.Type().(*types.Signature).Recv() == nil &&
   365  		slices.Contains(names, f.Name())
   366  }
   367  
   368  // IsMethodNamed reports whether obj is a method defined on a
   369  // package-level type with the given package and type name, and has
   370  // one of the given names. It returns false if obj is nil.
   371  //
   372  // This function avoids allocating the concatenation of "pkg.TypeName.Name",
   373  // which is important for the performance of syntax matching.
   374  func IsMethodNamed(obj types.Object, pkgPath string, typeName string, names ...string) bool {
   375  	if fn, ok := obj.(*types.Func); ok {
   376  		if recv := fn.Type().(*types.Signature).Recv(); recv != nil {
   377  			_, T := typesinternal.ReceiverNamed(recv)
   378  			return T != nil &&
   379  				IsTypeNamed(T, pkgPath, typeName) &&
   380  				slices.Contains(names, fn.Name())
   381  		}
   382  	}
   383  	return false
   384  }
   385  
   386  // ValidateFixes validates the set of fixes for a single diagnostic.
   387  // Any error indicates a bug in the originating analyzer.
   388  //
   389  // It updates fixes so that fixes[*].End.IsValid().
   390  //
   391  // It may be used as part of an analysis driver implementation.
   392  func ValidateFixes(fset *token.FileSet, a *analysis.Analyzer, fixes []analysis.SuggestedFix) error {
   393  	fixMessages := make(map[string]bool)
   394  	for i := range fixes {
   395  		fix := &fixes[i]
   396  		if fixMessages[fix.Message] {
   397  			return fmt.Errorf("analyzer %q suggests two fixes with same Message (%s)", a.Name, fix.Message)
   398  		}
   399  		fixMessages[fix.Message] = true
   400  		if err := validateFix(fset, fix); err != nil {
   401  			return fmt.Errorf("analyzer %q suggests invalid fix (%s): %v", a.Name, fix.Message, err)
   402  		}
   403  	}
   404  	return nil
   405  }
   406  
   407  // validateFix validates a single fix.
   408  // Any error indicates a bug in the originating analyzer.
   409  //
   410  // It updates fix so that fix.End.IsValid().
   411  func validateFix(fset *token.FileSet, fix *analysis.SuggestedFix) error {
   412  
   413  	// Stably sort edits by Pos. This ordering puts insertions
   414  	// (end = start) before deletions (end > start) at the same
   415  	// point, but uses a stable sort to preserve the order of
   416  	// multiple insertions at the same point.
   417  	slices.SortStableFunc(fix.TextEdits, func(x, y analysis.TextEdit) int {
   418  		if sign := cmp.Compare(x.Pos, y.Pos); sign != 0 {
   419  			return sign
   420  		}
   421  		return cmp.Compare(x.End, y.End)
   422  	})
   423  
   424  	var prev *analysis.TextEdit
   425  	for i := range fix.TextEdits {
   426  		edit := &fix.TextEdits[i]
   427  
   428  		// Validate edit individually.
   429  		start := edit.Pos
   430  		file := fset.File(start)
   431  		if file == nil {
   432  			return fmt.Errorf("no token.File for TextEdit.Pos (%v)", edit.Pos)
   433  		}
   434  		if end := edit.End; end.IsValid() {
   435  			if end < start {
   436  				return fmt.Errorf("TextEdit.Pos (%v) > TextEdit.End (%v)", edit.Pos, edit.End)
   437  			}
   438  			endFile := fset.File(end)
   439  			if endFile == nil {
   440  				return fmt.Errorf("no token.File for TextEdit.End (%v; File(start).FileEnd is %d)", end, file.Base()+file.Size())
   441  			}
   442  			if endFile != file {
   443  				return fmt.Errorf("edit #%d spans files (%v and %v)",
   444  					i, file.Position(edit.Pos), endFile.Position(edit.End))
   445  			}
   446  		} else {
   447  			edit.End = start // update the SuggestedFix
   448  		}
   449  		if eof := token.Pos(file.Base() + file.Size()); edit.End > eof {
   450  			return fmt.Errorf("end is (%v) beyond end of file (%v)", edit.End, eof)
   451  		}
   452  
   453  		// Validate the sequence of edits:
   454  		// properly ordered, no overlapping deletions
   455  		if prev != nil && edit.Pos < prev.End {
   456  			xpos := fset.Position(prev.Pos)
   457  			xend := fset.Position(prev.End)
   458  			ypos := fset.Position(edit.Pos)
   459  			yend := fset.Position(edit.End)
   460  			return fmt.Errorf("overlapping edits to %s (%d:%d-%d:%d and %d:%d-%d:%d)",
   461  				xpos.Filename,
   462  				xpos.Line, xpos.Column,
   463  				xend.Line, xend.Column,
   464  				ypos.Line, ypos.Column,
   465  				yend.Line, yend.Column,
   466  			)
   467  		}
   468  		prev = edit
   469  	}
   470  
   471  	return nil
   472  }
   473  
   474  // CanImport reports whether one package is allowed to import another.
   475  //
   476  // TODO(adonovan): allow customization of the accessibility relation
   477  // (e.g. for Bazel).
   478  func CanImport(from, to string) bool {
   479  	// TODO(adonovan): better segment hygiene.
   480  	if to == "internal" || strings.HasPrefix(to, "internal/") {
   481  		// Special case: only std packages may import internal/...
   482  		// We can't reliably know whether we're in std, so we
   483  		// use a heuristic on the first segment.
   484  		first, _, _ := strings.Cut(from, "/")
   485  		if strings.Contains(first, ".") {
   486  			return false // example.com/foo ∉ std
   487  		}
   488  		if first == "testdata" {
   489  			return false // testdata/foo ∉ std
   490  		}
   491  	}
   492  	if strings.HasSuffix(to, "/internal") {
   493  		return strings.HasPrefix(from, to[:len(to)-len("/internal")])
   494  	}
   495  	if i := strings.LastIndex(to, "/internal/"); i >= 0 {
   496  		return strings.HasPrefix(from, to[:i])
   497  	}
   498  	return true
   499  }
   500  
   501  // DeleteStmt returns the edits to remove stmt if it is contained
   502  // in a BlockStmt, CaseClause, CommClause, or is the STMT in switch STMT; ... {...}
   503  // The report function abstracts gopls' bug.Report.
   504  func DeleteStmt(fset *token.FileSet, astFile *ast.File, stmt ast.Stmt, report func(string, ...any)) []analysis.TextEdit {
   505  	// TODO: pass in the cursor to a ast.Stmt. callers should provide the Cursor
   506  	insp := inspector.New([]*ast.File{astFile})
   507  	root := cursor.Root(insp)
   508  	cstmt, ok := root.FindNode(stmt)
   509  	if !ok {
   510  		report("%s not found in file", stmt.Pos())
   511  		return nil
   512  	}
   513  	// some paranoia
   514  	if !stmt.Pos().IsValid() || !stmt.End().IsValid() {
   515  		report("%s: stmt has invalid position", stmt.Pos())
   516  		return nil
   517  	}
   518  
   519  	// if the stmt is on a line by itself delete the whole line
   520  	// otherwise just delete the statement.
   521  
   522  	// this logic would be a lot simpler with the file contents, and somewhat simpler
   523  	// if the cursors included the comments.
   524  
   525  	tokFile := fset.File(stmt.Pos())
   526  	lineOf := tokFile.Line
   527  	stmtStartLine, stmtEndLine := lineOf(stmt.Pos()), lineOf(stmt.End())
   528  
   529  	var from, to token.Pos
   530  	// bounds of adjacent syntax/comments on same line, if any
   531  	limits := func(left, right token.Pos) {
   532  		if lineOf(left) == stmtStartLine {
   533  			from = left
   534  		}
   535  		if lineOf(right) == stmtEndLine {
   536  			to = right
   537  		}
   538  	}
   539  	// TODO(pjw): there are other places a statement might be removed:
   540  	// IfStmt = "if" [ SimpleStmt ";" ] Expression Block [ "else" ( IfStmt | Block ) ] .
   541  	// (removing the blocks requires more rewriting than this routine would do)
   542  	// CommCase   = "case" ( SendStmt | RecvStmt ) | "default" .
   543  	// (removing the stmt requires more rewriting, and it's unclear what the user means)
   544  	switch parent := cstmt.Parent().Node().(type) {
   545  	case *ast.SwitchStmt:
   546  		limits(parent.Switch, parent.Body.Lbrace)
   547  	case *ast.TypeSwitchStmt:
   548  		limits(parent.Switch, parent.Body.Lbrace)
   549  		if parent.Assign == stmt {
   550  			return nil // don't let the user break the type switch
   551  		}
   552  	case *ast.BlockStmt:
   553  		limits(parent.Lbrace, parent.Rbrace)
   554  	case *ast.CommClause:
   555  		limits(parent.Colon, cstmt.Parent().Parent().Node().(*ast.BlockStmt).Rbrace)
   556  		if parent.Comm == stmt {
   557  			return nil // maybe the user meant to remove the entire CommClause?
   558  		}
   559  	case *ast.CaseClause:
   560  		limits(parent.Colon, cstmt.Parent().Parent().Node().(*ast.BlockStmt).Rbrace)
   561  	case *ast.ForStmt:
   562  		limits(parent.For, parent.Body.Lbrace)
   563  
   564  	default:
   565  		return nil // not one of ours
   566  	}
   567  
   568  	if prev, found := cstmt.PrevSibling(); found && lineOf(prev.Node().End()) == stmtStartLine {
   569  		from = prev.Node().End() // preceding statement ends on same line
   570  	}
   571  	if next, found := cstmt.NextSibling(); found && lineOf(next.Node().Pos()) == stmtEndLine {
   572  		to = next.Node().Pos() // following statement begins on same line
   573  	}
   574  	// and now for the comments
   575  Outer:
   576  	for _, cg := range astFile.Comments {
   577  		for _, co := range cg.List {
   578  			if lineOf(co.End()) < stmtStartLine {
   579  				continue
   580  			} else if lineOf(co.Pos()) > stmtEndLine {
   581  				break Outer // no more are possible
   582  			}
   583  			if lineOf(co.End()) == stmtStartLine && co.End() < stmt.Pos() {
   584  				if !from.IsValid() || co.End() > from {
   585  					from = co.End()
   586  					continue // maybe there are more
   587  				}
   588  			}
   589  			if lineOf(co.Pos()) == stmtEndLine && co.Pos() > stmt.End() {
   590  				if !to.IsValid() || co.Pos() < to {
   591  					to = co.Pos()
   592  					continue // maybe there are more
   593  				}
   594  			}
   595  		}
   596  	}
   597  	// if either from or to is valid, just remove the statement
   598  	// otherwise remove the line
   599  	edit := analysis.TextEdit{Pos: stmt.Pos(), End: stmt.End()}
   600  	if from.IsValid() || to.IsValid() {
   601  		// remove just the statment.
   602  		// we can't tell if there is a ; or whitespace right after the statment
   603  		// ideally we'd like to remove the former and leave the latter
   604  		// (if gofmt has run, there likely won't be a ;)
   605  		// In type switches we know there's a semicolon somewhere after the statement,
   606  		// but the extra work for this special case is not worth it, as gofmt will fix it.
   607  		return []analysis.TextEdit{edit}
   608  	}
   609  	// remove the whole line
   610  	for lineOf(edit.Pos) == stmtStartLine {
   611  		edit.Pos--
   612  	}
   613  	edit.Pos++ // get back tostmtStartLine
   614  	for lineOf(edit.End) == stmtEndLine {
   615  		edit.End++
   616  	}
   617  	return []analysis.TextEdit{edit}
   618  }
   619  
   620  // Comments returns an iterator over the comments overlapping the specified interval.
   621  func Comments(file *ast.File, start, end token.Pos) iter.Seq[*ast.Comment] {
   622  	// TODO(adonovan): optimize use binary O(log n) instead of linear O(n) search.
   623  	return func(yield func(*ast.Comment) bool) {
   624  		for _, cg := range file.Comments {
   625  			for _, co := range cg.List {
   626  				if co.Pos() > end {
   627  					return
   628  				}
   629  				if co.End() < start {
   630  					continue
   631  				}
   632  
   633  				if !yield(co) {
   634  					return
   635  				}
   636  			}
   637  		}
   638  	}
   639  }
   640  

View as plain text