Source file src/cmd/vendor/golang.org/x/tools/internal/analysisinternal/analysis.go

     1  // Copyright 2020 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  // Package analysisinternal provides gopls' internal analyses with a
     6  // number of helper functions that operate on typed syntax trees.
     7  package analysisinternal
     8  
     9  import (
    10  	"bytes"
    11  	"cmp"
    12  	"fmt"
    13  	"go/ast"
    14  	"go/printer"
    15  	"go/scanner"
    16  	"go/token"
    17  	"go/types"
    18  	"iter"
    19  	pathpkg "path"
    20  	"slices"
    21  	"strings"
    22  
    23  	"golang.org/x/tools/go/analysis"
    24  	"golang.org/x/tools/go/ast/inspector"
    25  	"golang.org/x/tools/internal/moreiters"
    26  	"golang.org/x/tools/internal/typesinternal"
    27  )
    28  
    29  // Deprecated: this heuristic is ill-defined.
    30  // TODO(adonovan): move to sole use in gopls/internal/cache.
    31  func TypeErrorEndPos(fset *token.FileSet, src []byte, start token.Pos) token.Pos {
    32  	// Get the end position for the type error.
    33  	file := fset.File(start)
    34  	if file == nil {
    35  		return start
    36  	}
    37  	if offset := file.PositionFor(start, false).Offset; offset > len(src) {
    38  		return start
    39  	} else {
    40  		src = src[offset:]
    41  	}
    42  
    43  	// Attempt to find a reasonable end position for the type error.
    44  	//
    45  	// TODO(rfindley): the heuristic implemented here is unclear. It looks like
    46  	// it seeks the end of the primary operand starting at start, but that is not
    47  	// quite implemented (for example, given a func literal this heuristic will
    48  	// return the range of the func keyword).
    49  	//
    50  	// We should formalize this heuristic, or deprecate it by finally proposing
    51  	// to add end position to all type checker errors.
    52  	//
    53  	// Nevertheless, ensure that the end position at least spans the current
    54  	// token at the cursor (this was golang/go#69505).
    55  	end := start
    56  	{
    57  		var s scanner.Scanner
    58  		fset := token.NewFileSet()
    59  		f := fset.AddFile("", fset.Base(), len(src))
    60  		s.Init(f, src, nil /* no error handler */, scanner.ScanComments)
    61  		pos, tok, lit := s.Scan()
    62  		if tok != token.SEMICOLON && token.Pos(f.Base()) <= pos && pos <= token.Pos(f.Base()+f.Size()) {
    63  			off := file.Offset(pos) + len(lit)
    64  			src = src[off:]
    65  			end += token.Pos(off)
    66  		}
    67  	}
    68  
    69  	// Look for bytes that might terminate the current operand. See note above:
    70  	// this is imprecise.
    71  	if width := bytes.IndexAny(src, " \n,():;[]+-*/"); width > 0 {
    72  		end += token.Pos(width)
    73  	}
    74  	return end
    75  }
    76  
    77  // MatchingIdents finds the names of all identifiers in 'node' that match any of the given types.
    78  // 'pos' represents the position at which the identifiers may be inserted. 'pos' must be within
    79  // the scope of each of identifier we select. Otherwise, we will insert a variable at 'pos' that
    80  // is unrecognized.
    81  func MatchingIdents(typs []types.Type, node ast.Node, pos token.Pos, info *types.Info, pkg *types.Package) map[types.Type][]string {
    82  
    83  	// Initialize matches to contain the variable types we are searching for.
    84  	matches := make(map[types.Type][]string)
    85  	for _, typ := range typs {
    86  		if typ == nil {
    87  			continue // TODO(adonovan): is this reachable?
    88  		}
    89  		matches[typ] = nil // create entry
    90  	}
    91  
    92  	seen := map[types.Object]struct{}{}
    93  	ast.Inspect(node, func(n ast.Node) bool {
    94  		if n == nil {
    95  			return false
    96  		}
    97  		// Prevent circular definitions. If 'pos' is within an assignment statement, do not
    98  		// allow any identifiers in that assignment statement to be selected. Otherwise,
    99  		// we could do the following, where 'x' satisfies the type of 'f0':
   100  		//
   101  		// x := fakeStruct{f0: x}
   102  		//
   103  		if assign, ok := n.(*ast.AssignStmt); ok && pos > assign.Pos() && pos <= assign.End() {
   104  			return false
   105  		}
   106  		if n.End() > pos {
   107  			return n.Pos() <= pos
   108  		}
   109  		ident, ok := n.(*ast.Ident)
   110  		if !ok || ident.Name == "_" {
   111  			return true
   112  		}
   113  		obj := info.Defs[ident]
   114  		if obj == nil || obj.Type() == nil {
   115  			return true
   116  		}
   117  		if _, ok := obj.(*types.TypeName); ok {
   118  			return true
   119  		}
   120  		// Prevent duplicates in matches' values.
   121  		if _, ok = seen[obj]; ok {
   122  			return true
   123  		}
   124  		seen[obj] = struct{}{}
   125  		// Find the scope for the given position. Then, check whether the object
   126  		// exists within the scope.
   127  		innerScope := pkg.Scope().Innermost(pos)
   128  		if innerScope == nil {
   129  			return true
   130  		}
   131  		_, foundObj := innerScope.LookupParent(ident.Name, pos)
   132  		if foundObj != obj {
   133  			return true
   134  		}
   135  		// The object must match one of the types that we are searching for.
   136  		// TODO(adonovan): opt: use typeutil.Map?
   137  		if names, ok := matches[obj.Type()]; ok {
   138  			matches[obj.Type()] = append(names, ident.Name)
   139  		} else {
   140  			// If the object type does not exactly match
   141  			// any of the target types, greedily find the first
   142  			// target type that the object type can satisfy.
   143  			for typ := range matches {
   144  				if equivalentTypes(obj.Type(), typ) {
   145  					matches[typ] = append(matches[typ], ident.Name)
   146  				}
   147  			}
   148  		}
   149  		return true
   150  	})
   151  	return matches
   152  }
   153  
   154  func equivalentTypes(want, got types.Type) bool {
   155  	if types.Identical(want, got) {
   156  		return true
   157  	}
   158  	// Code segment to help check for untyped equality from (golang/go#32146).
   159  	if rhs, ok := want.(*types.Basic); ok && rhs.Info()&types.IsUntyped > 0 {
   160  		if lhs, ok := got.Underlying().(*types.Basic); ok {
   161  			return rhs.Info()&types.IsConstType == lhs.Info()&types.IsConstType
   162  		}
   163  	}
   164  	return types.AssignableTo(want, got)
   165  }
   166  
   167  // A ReadFileFunc is a function that returns the
   168  // contents of a file, such as [os.ReadFile].
   169  type ReadFileFunc = func(filename string) ([]byte, error)
   170  
   171  // CheckedReadFile returns a wrapper around a Pass.ReadFile
   172  // function that performs the appropriate checks.
   173  func CheckedReadFile(pass *analysis.Pass, readFile ReadFileFunc) ReadFileFunc {
   174  	return func(filename string) ([]byte, error) {
   175  		if err := CheckReadable(pass, filename); err != nil {
   176  			return nil, err
   177  		}
   178  		return readFile(filename)
   179  	}
   180  }
   181  
   182  // CheckReadable enforces the access policy defined by the ReadFile field of [analysis.Pass].
   183  func CheckReadable(pass *analysis.Pass, filename string) error {
   184  	if slices.Contains(pass.OtherFiles, filename) ||
   185  		slices.Contains(pass.IgnoredFiles, filename) {
   186  		return nil
   187  	}
   188  	for _, f := range pass.Files {
   189  		if pass.Fset.File(f.FileStart).Name() == filename {
   190  			return nil
   191  		}
   192  	}
   193  	return fmt.Errorf("Pass.ReadFile: %s is not among OtherFiles, IgnoredFiles, or names of Files", filename)
   194  }
   195  
   196  // AddImport checks whether this file already imports pkgpath and
   197  // that import is in scope at pos. If so, it returns the name under
   198  // which it was imported and a zero edit. Otherwise, it adds a new
   199  // import of pkgpath, using a name derived from the preferred name,
   200  // and returns the chosen name, a prefix to be concatenated with member
   201  // to form a qualified name, and the edit for the new import.
   202  //
   203  // In the special case that pkgpath is dot-imported then member, the
   204  // identifier for which the import is being added, is consulted. If
   205  // member is not shadowed at pos, AddImport returns (".", "", nil).
   206  // (AddImport accepts the caller's implicit claim that the imported
   207  // package declares member.)
   208  //
   209  // It does not mutate its arguments.
   210  func AddImport(info *types.Info, file *ast.File, preferredName, pkgpath, member string, pos token.Pos) (name, prefix string, newImport []analysis.TextEdit) {
   211  	// Find innermost enclosing lexical block.
   212  	scope := info.Scopes[file].Innermost(pos)
   213  	if scope == nil {
   214  		panic("no enclosing lexical block")
   215  	}
   216  
   217  	// Is there an existing import of this package?
   218  	// If so, are we in its scope? (not shadowed)
   219  	for _, spec := range file.Imports {
   220  		pkgname := info.PkgNameOf(spec)
   221  		if pkgname != nil && pkgname.Imported().Path() == pkgpath {
   222  			name = pkgname.Name()
   223  			if name == "." {
   224  				// The scope of ident must be the file scope.
   225  				if s, _ := scope.LookupParent(member, pos); s == info.Scopes[file] {
   226  					return name, "", nil
   227  				}
   228  			} else if _, obj := scope.LookupParent(name, pos); obj == pkgname {
   229  				return name, name + ".", nil
   230  			}
   231  		}
   232  	}
   233  
   234  	// We must add a new import.
   235  	// Ensure we have a fresh name.
   236  	newName := FreshName(scope, pos, preferredName)
   237  
   238  	// Create a new import declaration either before the first existing
   239  	// declaration (which must exist), including its comments; or
   240  	// inside the declaration, if it is an import group.
   241  	//
   242  	// Use a renaming import whenever the preferred name is not
   243  	// available, or the chosen name does not match the last
   244  	// segment of its path.
   245  	newText := fmt.Sprintf("%q", pkgpath)
   246  	if newName != preferredName || newName != pathpkg.Base(pkgpath) {
   247  		newText = fmt.Sprintf("%s %q", newName, pkgpath)
   248  	}
   249  	decl0 := file.Decls[0]
   250  	var before ast.Node = decl0
   251  	switch decl0 := decl0.(type) {
   252  	case *ast.GenDecl:
   253  		if decl0.Doc != nil {
   254  			before = decl0.Doc
   255  		}
   256  	case *ast.FuncDecl:
   257  		if decl0.Doc != nil {
   258  			before = decl0.Doc
   259  		}
   260  	}
   261  	if gd, ok := before.(*ast.GenDecl); ok && gd.Tok == token.IMPORT && gd.Rparen.IsValid() {
   262  		// Have existing grouped import ( ... ) decl.
   263  		if IsStdPackage(pkgpath) && len(gd.Specs) > 0 {
   264  			// Add spec for a std package before
   265  			// first existing spec, followed by
   266  			// a blank line if the next one is non-std.
   267  			first := gd.Specs[0].(*ast.ImportSpec)
   268  			pos = first.Pos()
   269  			if !IsStdPackage(first.Path.Value) {
   270  				newText += "\n"
   271  			}
   272  			newText += "\n\t"
   273  		} else {
   274  			// Add spec at end of group.
   275  			pos = gd.Rparen
   276  			newText = "\t" + newText + "\n"
   277  		}
   278  	} else {
   279  		// No import decl, or non-grouped import.
   280  		// Add a new import decl before first decl.
   281  		// (gofmt will merge multiple import decls.)
   282  		pos = before.Pos()
   283  		newText = "import " + newText + "\n\n"
   284  	}
   285  	return newName, newName + ".", []analysis.TextEdit{{
   286  		Pos:     pos,
   287  		End:     pos,
   288  		NewText: []byte(newText),
   289  	}}
   290  }
   291  
   292  // FreshName returns the name of an identifier that is undefined
   293  // at the specified position, based on the preferred name.
   294  func FreshName(scope *types.Scope, pos token.Pos, preferred string) string {
   295  	newName := preferred
   296  	for i := 0; ; i++ {
   297  		if _, obj := scope.LookupParent(newName, pos); obj == nil {
   298  			break // fresh
   299  		}
   300  		newName = fmt.Sprintf("%s%d", preferred, i)
   301  	}
   302  	return newName
   303  }
   304  
   305  // Format returns a string representation of the node n.
   306  func Format(fset *token.FileSet, n ast.Node) string {
   307  	var buf strings.Builder
   308  	printer.Fprint(&buf, fset, n) // ignore errors
   309  	return buf.String()
   310  }
   311  
   312  // Imports returns true if path is imported by pkg.
   313  func Imports(pkg *types.Package, path string) bool {
   314  	for _, imp := range pkg.Imports() {
   315  		if imp.Path() == path {
   316  			return true
   317  		}
   318  	}
   319  	return false
   320  }
   321  
   322  // IsTypeNamed reports whether t is (or is an alias for) a
   323  // package-level defined type with the given package path and one of
   324  // the given names. It returns false if t is nil.
   325  //
   326  // This function avoids allocating the concatenation of "pkg.Name",
   327  // which is important for the performance of syntax matching.
   328  func IsTypeNamed(t types.Type, pkgPath string, names ...string) bool {
   329  	if named, ok := types.Unalias(t).(*types.Named); ok {
   330  		tname := named.Obj()
   331  		return tname != nil &&
   332  			typesinternal.IsPackageLevel(tname) &&
   333  			tname.Pkg().Path() == pkgPath &&
   334  			slices.Contains(names, tname.Name())
   335  	}
   336  	return false
   337  }
   338  
   339  // IsPointerToNamed reports whether t is (or is an alias for) a pointer to a
   340  // package-level defined type with the given package path and one of the given
   341  // names. It returns false if t is not a pointer type.
   342  func IsPointerToNamed(t types.Type, pkgPath string, names ...string) bool {
   343  	r := typesinternal.Unpointer(t)
   344  	if r == t {
   345  		return false
   346  	}
   347  	return IsTypeNamed(r, pkgPath, names...)
   348  }
   349  
   350  // IsFunctionNamed reports whether obj is a package-level function
   351  // defined in the given package and has one of the given names.
   352  // It returns false if obj is nil.
   353  //
   354  // This function avoids allocating the concatenation of "pkg.Name",
   355  // which is important for the performance of syntax matching.
   356  func IsFunctionNamed(obj types.Object, pkgPath string, names ...string) bool {
   357  	f, ok := obj.(*types.Func)
   358  	return ok &&
   359  		typesinternal.IsPackageLevel(obj) &&
   360  		f.Pkg().Path() == pkgPath &&
   361  		f.Type().(*types.Signature).Recv() == nil &&
   362  		slices.Contains(names, f.Name())
   363  }
   364  
   365  // IsMethodNamed reports whether obj is a method defined on a
   366  // package-level type with the given package and type name, and has
   367  // one of the given names. It returns false if obj is nil.
   368  //
   369  // This function avoids allocating the concatenation of "pkg.TypeName.Name",
   370  // which is important for the performance of syntax matching.
   371  func IsMethodNamed(obj types.Object, pkgPath string, typeName string, names ...string) bool {
   372  	if fn, ok := obj.(*types.Func); ok {
   373  		if recv := fn.Type().(*types.Signature).Recv(); recv != nil {
   374  			_, T := typesinternal.ReceiverNamed(recv)
   375  			return T != nil &&
   376  				IsTypeNamed(T, pkgPath, typeName) &&
   377  				slices.Contains(names, fn.Name())
   378  		}
   379  	}
   380  	return false
   381  }
   382  
   383  // ValidateFixes validates the set of fixes for a single diagnostic.
   384  // Any error indicates a bug in the originating analyzer.
   385  //
   386  // It updates fixes so that fixes[*].End.IsValid().
   387  //
   388  // It may be used as part of an analysis driver implementation.
   389  func ValidateFixes(fset *token.FileSet, a *analysis.Analyzer, fixes []analysis.SuggestedFix) error {
   390  	fixMessages := make(map[string]bool)
   391  	for i := range fixes {
   392  		fix := &fixes[i]
   393  		if fixMessages[fix.Message] {
   394  			return fmt.Errorf("analyzer %q suggests two fixes with same Message (%s)", a.Name, fix.Message)
   395  		}
   396  		fixMessages[fix.Message] = true
   397  		if err := validateFix(fset, fix); err != nil {
   398  			return fmt.Errorf("analyzer %q suggests invalid fix (%s): %v", a.Name, fix.Message, err)
   399  		}
   400  	}
   401  	return nil
   402  }
   403  
   404  // validateFix validates a single fix.
   405  // Any error indicates a bug in the originating analyzer.
   406  //
   407  // It updates fix so that fix.End.IsValid().
   408  func validateFix(fset *token.FileSet, fix *analysis.SuggestedFix) error {
   409  
   410  	// Stably sort edits by Pos. This ordering puts insertions
   411  	// (end = start) before deletions (end > start) at the same
   412  	// point, but uses a stable sort to preserve the order of
   413  	// multiple insertions at the same point.
   414  	slices.SortStableFunc(fix.TextEdits, func(x, y analysis.TextEdit) int {
   415  		if sign := cmp.Compare(x.Pos, y.Pos); sign != 0 {
   416  			return sign
   417  		}
   418  		return cmp.Compare(x.End, y.End)
   419  	})
   420  
   421  	var prev *analysis.TextEdit
   422  	for i := range fix.TextEdits {
   423  		edit := &fix.TextEdits[i]
   424  
   425  		// Validate edit individually.
   426  		start := edit.Pos
   427  		file := fset.File(start)
   428  		if file == nil {
   429  			return fmt.Errorf("no token.File for TextEdit.Pos (%v)", edit.Pos)
   430  		}
   431  		fileEnd := token.Pos(file.Base() + file.Size())
   432  		if end := edit.End; end.IsValid() {
   433  			if end < start {
   434  				return fmt.Errorf("TextEdit.Pos (%v) > TextEdit.End (%v)", edit.Pos, edit.End)
   435  			}
   436  			endFile := fset.File(end)
   437  			if endFile != file && end < fileEnd+10 {
   438  				// Relax the checks below in the special case when the end position
   439  				// is only slightly beyond EOF, as happens when End is computed
   440  				// (as in ast.{Struct,Interface}Type) rather than based on
   441  				// actual token positions. In such cases, truncate end to EOF.
   442  				//
   443  				// This is a workaround for #71659; see:
   444  				// https://github.com/golang/go/issues/71659#issuecomment-2651606031
   445  				// A better fix would be more faithful recording of token
   446  				// positions (or their absence) in the AST.
   447  				edit.End = fileEnd
   448  				continue
   449  			}
   450  			if endFile == nil {
   451  				return fmt.Errorf("no token.File for TextEdit.End (%v; File(start).FileEnd is %d)", end, file.Base()+file.Size())
   452  			}
   453  			if endFile != file {
   454  				return fmt.Errorf("edit #%d spans files (%v and %v)",
   455  					i, file.Position(edit.Pos), endFile.Position(edit.End))
   456  			}
   457  		} else {
   458  			edit.End = start // update the SuggestedFix
   459  		}
   460  		if eof := fileEnd; edit.End > eof {
   461  			return fmt.Errorf("end is (%v) beyond end of file (%v)", edit.End, eof)
   462  		}
   463  
   464  		// Validate the sequence of edits:
   465  		// properly ordered, no overlapping deletions
   466  		if prev != nil && edit.Pos < prev.End {
   467  			xpos := fset.Position(prev.Pos)
   468  			xend := fset.Position(prev.End)
   469  			ypos := fset.Position(edit.Pos)
   470  			yend := fset.Position(edit.End)
   471  			return fmt.Errorf("overlapping edits to %s (%d:%d-%d:%d and %d:%d-%d:%d)",
   472  				xpos.Filename,
   473  				xpos.Line, xpos.Column,
   474  				xend.Line, xend.Column,
   475  				ypos.Line, ypos.Column,
   476  				yend.Line, yend.Column,
   477  			)
   478  		}
   479  		prev = edit
   480  	}
   481  
   482  	return nil
   483  }
   484  
   485  // CanImport reports whether one package is allowed to import another.
   486  //
   487  // TODO(adonovan): allow customization of the accessibility relation
   488  // (e.g. for Bazel).
   489  func CanImport(from, to string) bool {
   490  	// TODO(adonovan): better segment hygiene.
   491  	if to == "internal" || strings.HasPrefix(to, "internal/") {
   492  		// Special case: only std packages may import internal/...
   493  		// We can't reliably know whether we're in std, so we
   494  		// use a heuristic on the first segment.
   495  		first, _, _ := strings.Cut(from, "/")
   496  		if strings.Contains(first, ".") {
   497  			return false // example.com/foo ∉ std
   498  		}
   499  		if first == "testdata" {
   500  			return false // testdata/foo ∉ std
   501  		}
   502  	}
   503  	if strings.HasSuffix(to, "/internal") {
   504  		return strings.HasPrefix(from, to[:len(to)-len("/internal")])
   505  	}
   506  	if i := strings.LastIndex(to, "/internal/"); i >= 0 {
   507  		return strings.HasPrefix(from, to[:i])
   508  	}
   509  	return true
   510  }
   511  
   512  // DeleteStmt returns the edits to remove the [ast.Stmt] identified by
   513  // curStmt, if it is contained within a BlockStmt, CaseClause,
   514  // CommClause, or is the STMT in switch STMT; ... {...}. It returns nil otherwise.
   515  func DeleteStmt(fset *token.FileSet, curStmt inspector.Cursor) []analysis.TextEdit {
   516  	stmt := curStmt.Node().(ast.Stmt)
   517  	// if the stmt is on a line by itself delete the whole line
   518  	// otherwise just delete the statement.
   519  
   520  	// this logic would be a lot simpler with the file contents, and somewhat simpler
   521  	// if the cursors included the comments.
   522  
   523  	tokFile := fset.File(stmt.Pos())
   524  	lineOf := tokFile.Line
   525  	stmtStartLine, stmtEndLine := lineOf(stmt.Pos()), lineOf(stmt.End())
   526  
   527  	var from, to token.Pos
   528  	// bounds of adjacent syntax/comments on same line, if any
   529  	limits := func(left, right token.Pos) {
   530  		if lineOf(left) == stmtStartLine {
   531  			from = left
   532  		}
   533  		if lineOf(right) == stmtEndLine {
   534  			to = right
   535  		}
   536  	}
   537  	// TODO(pjw): there are other places a statement might be removed:
   538  	// IfStmt = "if" [ SimpleStmt ";" ] Expression Block [ "else" ( IfStmt | Block ) ] .
   539  	// (removing the blocks requires more rewriting than this routine would do)
   540  	// CommCase   = "case" ( SendStmt | RecvStmt ) | "default" .
   541  	// (removing the stmt requires more rewriting, and it's unclear what the user means)
   542  	switch parent := curStmt.Parent().Node().(type) {
   543  	case *ast.SwitchStmt:
   544  		limits(parent.Switch, parent.Body.Lbrace)
   545  	case *ast.TypeSwitchStmt:
   546  		limits(parent.Switch, parent.Body.Lbrace)
   547  		if parent.Assign == stmt {
   548  			return nil // don't let the user break the type switch
   549  		}
   550  	case *ast.BlockStmt:
   551  		limits(parent.Lbrace, parent.Rbrace)
   552  	case *ast.CommClause:
   553  		limits(parent.Colon, curStmt.Parent().Parent().Node().(*ast.BlockStmt).Rbrace)
   554  		if parent.Comm == stmt {
   555  			return nil // maybe the user meant to remove the entire CommClause?
   556  		}
   557  	case *ast.CaseClause:
   558  		limits(parent.Colon, curStmt.Parent().Parent().Node().(*ast.BlockStmt).Rbrace)
   559  	case *ast.ForStmt:
   560  		limits(parent.For, parent.Body.Lbrace)
   561  
   562  	default:
   563  		return nil // not one of ours
   564  	}
   565  
   566  	if prev, found := curStmt.PrevSibling(); found && lineOf(prev.Node().End()) == stmtStartLine {
   567  		from = prev.Node().End() // preceding statement ends on same line
   568  	}
   569  	if next, found := curStmt.NextSibling(); found && lineOf(next.Node().Pos()) == stmtEndLine {
   570  		to = next.Node().Pos() // following statement begins on same line
   571  	}
   572  	// and now for the comments
   573  Outer:
   574  	for _, cg := range enclosingFile(curStmt).Comments {
   575  		for _, co := range cg.List {
   576  			if lineOf(co.End()) < stmtStartLine {
   577  				continue
   578  			} else if lineOf(co.Pos()) > stmtEndLine {
   579  				break Outer // no more are possible
   580  			}
   581  			if lineOf(co.End()) == stmtStartLine && co.End() < stmt.Pos() {
   582  				if !from.IsValid() || co.End() > from {
   583  					from = co.End()
   584  					continue // maybe there are more
   585  				}
   586  			}
   587  			if lineOf(co.Pos()) == stmtEndLine && co.Pos() > stmt.End() {
   588  				if !to.IsValid() || co.Pos() < to {
   589  					to = co.Pos()
   590  					continue // maybe there are more
   591  				}
   592  			}
   593  		}
   594  	}
   595  	// if either from or to is valid, just remove the statement
   596  	// otherwise remove the line
   597  	edit := analysis.TextEdit{Pos: stmt.Pos(), End: stmt.End()}
   598  	if from.IsValid() || to.IsValid() {
   599  		// remove just the statement.
   600  		// we can't tell if there is a ; or whitespace right after the statement
   601  		// ideally we'd like to remove the former and leave the latter
   602  		// (if gofmt has run, there likely won't be a ;)
   603  		// In type switches we know there's a semicolon somewhere after the statement,
   604  		// but the extra work for this special case is not worth it, as gofmt will fix it.
   605  		return []analysis.TextEdit{edit}
   606  	}
   607  	// remove the whole line
   608  	for lineOf(edit.Pos) == stmtStartLine {
   609  		edit.Pos--
   610  	}
   611  	edit.Pos++ // get back tostmtStartLine
   612  	for lineOf(edit.End) == stmtEndLine {
   613  		edit.End++
   614  	}
   615  	return []analysis.TextEdit{edit}
   616  }
   617  
   618  // Comments returns an iterator over the comments overlapping the specified interval.
   619  func Comments(file *ast.File, start, end token.Pos) iter.Seq[*ast.Comment] {
   620  	// TODO(adonovan): optimize use binary O(log n) instead of linear O(n) search.
   621  	return func(yield func(*ast.Comment) bool) {
   622  		for _, cg := range file.Comments {
   623  			for _, co := range cg.List {
   624  				if co.Pos() > end {
   625  					return
   626  				}
   627  				if co.End() < start {
   628  					continue
   629  				}
   630  
   631  				if !yield(co) {
   632  					return
   633  				}
   634  			}
   635  		}
   636  	}
   637  }
   638  
   639  // IsStdPackage reports whether the specified package path belongs to a
   640  // package in the standard library (including internal dependencies).
   641  func IsStdPackage(path string) bool {
   642  	// A standard package has no dot in its first segment.
   643  	// (It may yet have a dot, e.g. "vendor/golang.org/x/foo".)
   644  	slash := strings.IndexByte(path, '/')
   645  	if slash < 0 {
   646  		slash = len(path)
   647  	}
   648  	return !strings.Contains(path[:slash], ".") && path != "testdata"
   649  }
   650  
   651  // Range returns an [analysis.Range] for the specified start and end positions.
   652  func Range(pos, end token.Pos) analysis.Range {
   653  	return tokenRange{pos, end}
   654  }
   655  
   656  // tokenRange is an implementation of the [analysis.Range] interface.
   657  type tokenRange struct{ StartPos, EndPos token.Pos }
   658  
   659  func (r tokenRange) Pos() token.Pos { return r.StartPos }
   660  func (r tokenRange) End() token.Pos { return r.EndPos }
   661  
   662  // enclosingFile returns the syntax tree for the file enclosing c.
   663  func enclosingFile(c inspector.Cursor) *ast.File {
   664  	c, _ = moreiters.First(c.Enclosing((*ast.File)(nil)))
   665  	return c.Node().(*ast.File)
   666  }
   667  

View as plain text