Update and fix staticcheck

This commit is contained in:
kolaente 2020-05-29 22:15:21 +02:00
parent aae1bc3cab
commit a525787ab7
Signed by untrusted user: konrad
GPG Key ID: F40E70337AB24C9B
100 changed files with 12353 additions and 7912 deletions

2
go.mod
View File

@ -74,7 +74,7 @@ require (
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect
gopkg.in/d4l3k/messagediff.v1 v1.2.1
gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df
honnef.co/go/tools v0.0.1-2019.2.3
honnef.co/go/tools v0.0.1-2020.1.4
src.techknowlogick.com/xgo v0.0.0-20200514233805-209a5cf70012
src.techknowlogick.com/xormigrate v1.2.0
xorm.io/builder v0.3.7

3
go.sum
View File

@ -682,6 +682,7 @@ golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtn
golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7 h1:EBZoQjiKKPaLbPrbpssUfuHtwM6KV/vb4U85g/cigFY=
golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200410194907-79a7a3126eef h1:RHORRhs540cYZYrzgU2CPUyykkwZM78hGdzocOo9P8A=
@ -773,6 +774,8 @@ honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a h1:LJwr7TCTghdatWv40WobzlK
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.1-2019.2.3 h1:3JgtbtFHMiCmsznwGVTUWbgGov+pVqnlf1dEJTNAXeM=
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
honnef.co/go/tools v0.0.1-2020.1.4 h1:UoveltGrhghAA7ePc+e+QYDHXrBps2PqFZiHkGR/xK8=
honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
src.techknowlogick.com/xgo v0.0.0-20200514233805-209a5cf70012 h1:k1/qGRpsaGka4IHT2UIUdPqM6+oo3O7+8S3ACN2kJZQ=
src.techknowlogick.com/xgo v0.0.0-20200514233805-209a5cf70012/go.mod h1:31CE1YKtDOrKTk9PSnjTpe6YbO6W/0LTYZ1VskL09oU=

View File

@ -35,7 +35,7 @@ func IsErrGenericForbidden(err error) bool {
}
func (err ErrGenericForbidden) Error() string {
return fmt.Sprintf("Forbidden")
return "Forbidden"
}
// ErrorCodeGenericForbidden holds the unique world-error code of this error
@ -60,7 +60,7 @@ func IsErrIDCannotBeZero(err error) bool {
}
func (err ErrIDCannotBeZero) Error() string {
return fmt.Sprintf("ID cannot be empty or 0")
return "ID cannot be empty or 0"
}
// ErrCodeIDCannotBeZero holds the unique world-error code of this error
@ -169,7 +169,7 @@ func IsErrListTitleCannotBeEmpty(err error) bool {
}
func (err ErrListTitleCannotBeEmpty) Error() string {
return fmt.Sprintf("List title cannot be empty.")
return "List title cannot be empty."
}
// ErrCodeListTitleCannotBeEmpty holds the unique world-error code of this error
@ -193,7 +193,7 @@ func IsErrListShareDoesNotExist(err error) bool {
}
func (err ErrListShareDoesNotExist) Error() string {
return fmt.Sprintf("List share does not exist.")
return "List share does not exist."
}
// ErrCodeListShareDoesNotExist holds the unique world-error code of this error
@ -216,7 +216,7 @@ func IsErrListIdentifierIsNotUnique(err error) bool {
}
func (err ErrListIdentifierIsNotUnique) Error() string {
return fmt.Sprintf("List identifier is not unique.")
return "List identifier is not unique."
}
// ErrCodeListIdentifierIsNotUnique holds the unique world-error code of this error
@ -268,7 +268,7 @@ func IsErrTaskCannotBeEmpty(err error) bool {
}
func (err ErrTaskCannotBeEmpty) Error() string {
return fmt.Sprintf("List task title cannot be empty.")
return "List task title cannot be empty."
}
// ErrCodeTaskCannotBeEmpty holds the unique world-error code of this error
@ -336,7 +336,7 @@ func IsErrBulkTasksNeedAtLeastOne(err error) bool {
}
func (err ErrBulkTasksNeedAtLeastOne) Error() string {
return fmt.Sprintf("Need at least one task when bulk editing tasks")
return "Need at least one task when bulk editing tasks"
}
// ErrCodeBulkTasksNeedAtLeastOne holds the unique world-error code of this error

View File

@ -82,7 +82,7 @@ func UserTOTPEnable(c echo.Context) error {
if he, is := err.(*echo.HTTPError); is {
return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid model provided. Error was: %s", he.Message))
}
return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid model provided."))
return echo.NewHTTPError(http.StatusBadRequest, "Invalid model provided.")
}
err = user.EnableTOTP(passcode)
@ -113,7 +113,7 @@ func UserTOTPDisable(c echo.Context) error {
if he, is := err.(*echo.HTTPError); is {
return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid model provided. Error was: %s", he.Message))
}
return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid model provided."))
return echo.NewHTTPError(http.StatusBadRequest, "Invalid model provided.")
}
u, err := user.GetCurrentUser(c)

View File

@ -47,7 +47,7 @@ func UpdateUserEmail(c echo.Context) (err error) {
if he, is := err.(*echo.HTTPError); is {
return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid model provided. Error was: %s", he.Message))
}
return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid model provided."))
return echo.NewHTTPError(http.StatusBadRequest, "Invalid model provided.")
}
emailUpdate.User, err = user.GetCurrentUser(c)

View File

@ -85,7 +85,7 @@ func IsErrNoUsernamePassword(err error) bool {
}
func (err ErrNoUsernamePassword) Error() string {
return fmt.Sprintf("No username and password provided")
return "No username and password provided"
}
// ErrCodeNoUsernamePassword holds the unique world-error code of this error
@ -129,7 +129,7 @@ func IsErrCouldNotGetUserID(err error) bool {
}
func (err ErrCouldNotGetUserID) Error() string {
return fmt.Sprintf("Could not get user ID")
return "Could not get user ID"
}
// ErrCodeCouldNotGetUserID holds the unique world-error code of this error
@ -208,7 +208,7 @@ type ErrWrongUsernameOrPassword struct {
}
func (err ErrWrongUsernameOrPassword) Error() string {
return fmt.Sprintf("Wrong username or password")
return "Wrong username or password"
}
// ErrCodeWrongUsernameOrPassword holds the unique world-error code of this error
@ -258,7 +258,7 @@ func IsErrEmptyNewPassword(err error) bool {
}
func (err ErrEmptyNewPassword) Error() string {
return fmt.Sprintf("New password is empty")
return "New password is empty"
}
// ErrCodeEmptyNewPassword holds the unique world-error code of this error
@ -279,7 +279,7 @@ func IsErrEmptyOldPassword(err error) bool {
}
func (err ErrEmptyOldPassword) Error() string {
return fmt.Sprintf("Old password is empty")
return "Old password is empty"
}
// ErrCodeEmptyOldPassword holds the unique world-error code of this error
@ -300,7 +300,7 @@ func IsErrTOTPAlreadyEnabled(err error) bool {
}
func (err ErrTOTPAlreadyEnabled) Error() string {
return fmt.Sprintf("Totp is already enabled for this user")
return "Totp is already enabled for this user"
}
// ErrCodeTOTPAlreadyEnabled holds the unique world-error code of this error
@ -325,7 +325,7 @@ func IsErrTOTPNotEnabled(err error) bool {
}
func (err ErrTOTPNotEnabled) Error() string {
return fmt.Sprintf("Totp is not enabled for this user")
return "Totp is not enabled for this user"
}
// ErrCodeTOTPNotEnabled holds the unique world-error code of this error
@ -352,7 +352,7 @@ func IsErrInvalidTOTPPasscode(err error) bool {
}
func (err ErrInvalidTOTPPasscode) Error() string {
return fmt.Sprintf("Invalid totp passcode")
return "Invalid totp passcode"
}
// ErrCodeInvalidTOTPPasscode holds the unique world-error code of this error

View File

@ -1,3 +1,5 @@
module github.com/hashicorp/hcl
require github.com/davecgh/go-spew v1.1.1
go 1.13

View File

@ -1,3 +1,5 @@
module github.com/spf13/afero
require golang.org/x/text v0.3.0
go 1.13

View File

@ -75,7 +75,7 @@ resulting binaries. These projects are:
limitations under the License.
* github.com/kisielk/gotool https://github.com/kisielk/gotool
* github.com/kisielk/gotool - https://github.com/kisielk/gotool
Copyright (c) 2013 Kamil Kisiel <kamil@kamilkisiel.net>
@ -224,3 +224,61 @@ resulting binaries. These projects are:
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* gogrep - https://github.com/mvdan/gogrep
Copyright (c) 2017, Daniel Martí. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* gosmith - https://github.com/dvyukov/gosmith
Copyright (c) 2014 Dmitry Vyukov. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* The name of Dmitry Vyukov may be used to endorse or promote
products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

481
vendor/honnef.co/go/tools/code/code.go vendored Normal file
View File

@ -0,0 +1,481 @@
// Package code answers structural and type questions about Go code.
package code
import (
"flag"
"fmt"
"go/ast"
"go/constant"
"go/token"
"go/types"
"strings"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/astutil"
"golang.org/x/tools/go/ast/inspector"
"honnef.co/go/tools/facts"
"honnef.co/go/tools/go/types/typeutil"
"honnef.co/go/tools/ir"
"honnef.co/go/tools/lint"
)
type Positioner interface {
Pos() token.Pos
}
func CallName(call *ir.CallCommon) string {
if call.IsInvoke() {
return ""
}
switch v := call.Value.(type) {
case *ir.Function:
fn, ok := v.Object().(*types.Func)
if !ok {
return ""
}
return lint.FuncName(fn)
case *ir.Builtin:
return v.Name()
}
return ""
}
func IsCallTo(call *ir.CallCommon, name string) bool { return CallName(call) == name }
func IsCallToAny(call *ir.CallCommon, names ...string) bool {
q := CallName(call)
for _, name := range names {
if q == name {
return true
}
}
return false
}
func IsType(T types.Type, name string) bool { return types.TypeString(T, nil) == name }
func FilterDebug(instr []ir.Instruction) []ir.Instruction {
var out []ir.Instruction
for _, ins := range instr {
if _, ok := ins.(*ir.DebugRef); !ok {
out = append(out, ins)
}
}
return out
}
func IsExample(fn *ir.Function) bool {
if !strings.HasPrefix(fn.Name(), "Example") {
return false
}
f := fn.Prog.Fset.File(fn.Pos())
if f == nil {
return false
}
return strings.HasSuffix(f.Name(), "_test.go")
}
func IsPointerLike(T types.Type) bool {
switch T := T.Underlying().(type) {
case *types.Interface, *types.Chan, *types.Map, *types.Signature, *types.Pointer:
return true
case *types.Basic:
return T.Kind() == types.UnsafePointer
}
return false
}
func IsIdent(expr ast.Expr, ident string) bool {
id, ok := expr.(*ast.Ident)
return ok && id.Name == ident
}
// isBlank returns whether id is the blank identifier "_".
// If id == nil, the answer is false.
func IsBlank(id ast.Expr) bool {
ident, _ := id.(*ast.Ident)
return ident != nil && ident.Name == "_"
}
func IsIntLiteral(expr ast.Expr, literal string) bool {
lit, ok := expr.(*ast.BasicLit)
return ok && lit.Kind == token.INT && lit.Value == literal
}
// Deprecated: use IsIntLiteral instead
func IsZero(expr ast.Expr) bool {
return IsIntLiteral(expr, "0")
}
func IsOfType(pass *analysis.Pass, expr ast.Expr, name string) bool {
return IsType(pass.TypesInfo.TypeOf(expr), name)
}
func IsInTest(pass *analysis.Pass, node Positioner) bool {
// FIXME(dh): this doesn't work for global variables with
// initializers
f := pass.Fset.File(node.Pos())
return f != nil && strings.HasSuffix(f.Name(), "_test.go")
}
// IsMain reports whether the package being processed is a package
// main.
func IsMain(pass *analysis.Pass) bool {
return pass.Pkg.Name() == "main"
}
// IsMainLike reports whether the package being processed is a
// main-like package. A main-like package is a package that is
// package main, or that is intended to be used by a tool framework
// such as cobra to implement a command.
//
// Note that this function errs on the side of false positives; it may
// return true for packages that aren't main-like. IsMainLike is
// intended for analyses that wish to suppress diagnostics for
// main-like packages to avoid false positives.
func IsMainLike(pass *analysis.Pass) bool {
if pass.Pkg.Name() == "main" {
return true
}
for _, imp := range pass.Pkg.Imports() {
if imp.Path() == "github.com/spf13/cobra" {
return true
}
}
return false
}
func SelectorName(pass *analysis.Pass, expr *ast.SelectorExpr) string {
info := pass.TypesInfo
sel := info.Selections[expr]
if sel == nil {
if x, ok := expr.X.(*ast.Ident); ok {
pkg, ok := info.ObjectOf(x).(*types.PkgName)
if !ok {
// This shouldn't happen
return fmt.Sprintf("%s.%s", x.Name, expr.Sel.Name)
}
return fmt.Sprintf("%s.%s", pkg.Imported().Path(), expr.Sel.Name)
}
panic(fmt.Sprintf("unsupported selector: %v", expr))
}
return fmt.Sprintf("(%s).%s", sel.Recv(), sel.Obj().Name())
}
func IsNil(pass *analysis.Pass, expr ast.Expr) bool {
return pass.TypesInfo.Types[expr].IsNil()
}
func BoolConst(pass *analysis.Pass, expr ast.Expr) bool {
val := pass.TypesInfo.ObjectOf(expr.(*ast.Ident)).(*types.Const).Val()
return constant.BoolVal(val)
}
func IsBoolConst(pass *analysis.Pass, expr ast.Expr) bool {
// We explicitly don't support typed bools because more often than
// not, custom bool types are used as binary enums and the
// explicit comparison is desired.
ident, ok := expr.(*ast.Ident)
if !ok {
return false
}
obj := pass.TypesInfo.ObjectOf(ident)
c, ok := obj.(*types.Const)
if !ok {
return false
}
basic, ok := c.Type().(*types.Basic)
if !ok {
return false
}
if basic.Kind() != types.UntypedBool && basic.Kind() != types.Bool {
return false
}
return true
}
func ExprToInt(pass *analysis.Pass, expr ast.Expr) (int64, bool) {
tv := pass.TypesInfo.Types[expr]
if tv.Value == nil {
return 0, false
}
if tv.Value.Kind() != constant.Int {
return 0, false
}
return constant.Int64Val(tv.Value)
}
func ExprToString(pass *analysis.Pass, expr ast.Expr) (string, bool) {
val := pass.TypesInfo.Types[expr].Value
if val == nil {
return "", false
}
if val.Kind() != constant.String {
return "", false
}
return constant.StringVal(val), true
}
// Dereference returns a pointer's element type; otherwise it returns
// T.
func Dereference(T types.Type) types.Type {
if p, ok := T.Underlying().(*types.Pointer); ok {
return p.Elem()
}
return T
}
// DereferenceR returns a pointer's element type; otherwise it returns
// T. If the element type is itself a pointer, DereferenceR will be
// applied recursively.
func DereferenceR(T types.Type) types.Type {
if p, ok := T.Underlying().(*types.Pointer); ok {
return DereferenceR(p.Elem())
}
return T
}
func CallNameAST(pass *analysis.Pass, call *ast.CallExpr) string {
switch fun := astutil.Unparen(call.Fun).(type) {
case *ast.SelectorExpr:
fn, ok := pass.TypesInfo.ObjectOf(fun.Sel).(*types.Func)
if !ok {
return ""
}
return lint.FuncName(fn)
case *ast.Ident:
obj := pass.TypesInfo.ObjectOf(fun)
switch obj := obj.(type) {
case *types.Func:
return lint.FuncName(obj)
case *types.Builtin:
return obj.Name()
default:
return ""
}
default:
return ""
}
}
func IsCallToAST(pass *analysis.Pass, node ast.Node, name string) bool {
call, ok := node.(*ast.CallExpr)
if !ok {
return false
}
return CallNameAST(pass, call) == name
}
func IsCallToAnyAST(pass *analysis.Pass, node ast.Node, names ...string) bool {
call, ok := node.(*ast.CallExpr)
if !ok {
return false
}
q := CallNameAST(pass, call)
for _, name := range names {
if q == name {
return true
}
}
return false
}
func Preamble(f *ast.File) string {
cutoff := f.Package
if f.Doc != nil {
cutoff = f.Doc.Pos()
}
var out []string
for _, cmt := range f.Comments {
if cmt.Pos() >= cutoff {
break
}
out = append(out, cmt.Text())
}
return strings.Join(out, "\n")
}
func GroupSpecs(fset *token.FileSet, specs []ast.Spec) [][]ast.Spec {
if len(specs) == 0 {
return nil
}
groups := make([][]ast.Spec, 1)
groups[0] = append(groups[0], specs[0])
for _, spec := range specs[1:] {
g := groups[len(groups)-1]
if fset.PositionFor(spec.Pos(), false).Line-1 !=
fset.PositionFor(g[len(g)-1].End(), false).Line {
groups = append(groups, nil)
}
groups[len(groups)-1] = append(groups[len(groups)-1], spec)
}
return groups
}
func IsObject(obj types.Object, name string) bool {
var path string
if pkg := obj.Pkg(); pkg != nil {
path = pkg.Path() + "."
}
return path+obj.Name() == name
}
type Field struct {
Var *types.Var
Tag string
Path []int
}
// FlattenFields recursively flattens T and embedded structs,
// returning a list of fields. If multiple fields with the same name
// exist, all will be returned.
func FlattenFields(T *types.Struct) []Field {
return flattenFields(T, nil, nil)
}
func flattenFields(T *types.Struct, path []int, seen map[types.Type]bool) []Field {
if seen == nil {
seen = map[types.Type]bool{}
}
if seen[T] {
return nil
}
seen[T] = true
var out []Field
for i := 0; i < T.NumFields(); i++ {
field := T.Field(i)
tag := T.Tag(i)
np := append(path[:len(path):len(path)], i)
if field.Anonymous() {
if s, ok := Dereference(field.Type()).Underlying().(*types.Struct); ok {
out = append(out, flattenFields(s, np, seen)...)
}
} else {
out = append(out, Field{field, tag, np})
}
}
return out
}
func File(pass *analysis.Pass, node Positioner) *ast.File {
m := pass.ResultOf[facts.TokenFile].(map[*token.File]*ast.File)
return m[pass.Fset.File(node.Pos())]
}
// IsGenerated reports whether pos is in a generated file, It ignores
// //line directives.
func IsGenerated(pass *analysis.Pass, pos token.Pos) bool {
_, ok := Generator(pass, pos)
return ok
}
// Generator returns the generator that generated the file containing
// pos. It ignores //line directives.
func Generator(pass *analysis.Pass, pos token.Pos) (facts.Generator, bool) {
file := pass.Fset.PositionFor(pos, false).Filename
m := pass.ResultOf[facts.Generated].(map[string]facts.Generator)
g, ok := m[file]
return g, ok
}
// MayHaveSideEffects reports whether expr may have side effects. If
// the purity argument is nil, this function implements a purely
// syntactic check, meaning that any function call may have side
// effects, regardless of the called function's body. Otherwise,
// purity will be consulted to determine the purity of function calls.
func MayHaveSideEffects(pass *analysis.Pass, expr ast.Expr, purity facts.PurityResult) bool {
switch expr := expr.(type) {
case *ast.BadExpr:
return true
case *ast.Ellipsis:
return MayHaveSideEffects(pass, expr.Elt, purity)
case *ast.FuncLit:
// the literal itself cannot have side ffects, only calling it
// might, which is handled by CallExpr.
return false
case *ast.ArrayType, *ast.StructType, *ast.FuncType, *ast.InterfaceType, *ast.MapType, *ast.ChanType:
// types cannot have side effects
return false
case *ast.BasicLit:
return false
case *ast.BinaryExpr:
return MayHaveSideEffects(pass, expr.X, purity) || MayHaveSideEffects(pass, expr.Y, purity)
case *ast.CallExpr:
if purity == nil {
return true
}
switch obj := typeutil.Callee(pass.TypesInfo, expr).(type) {
case *types.Func:
if _, ok := purity[obj]; !ok {
return true
}
case *types.Builtin:
switch obj.Name() {
case "len", "cap":
default:
return true
}
default:
return true
}
for _, arg := range expr.Args {
if MayHaveSideEffects(pass, arg, purity) {
return true
}
}
return false
case *ast.CompositeLit:
if MayHaveSideEffects(pass, expr.Type, purity) {
return true
}
for _, elt := range expr.Elts {
if MayHaveSideEffects(pass, elt, purity) {
return true
}
}
return false
case *ast.Ident:
return false
case *ast.IndexExpr:
return MayHaveSideEffects(pass, expr.X, purity) || MayHaveSideEffects(pass, expr.Index, purity)
case *ast.KeyValueExpr:
return MayHaveSideEffects(pass, expr.Key, purity) || MayHaveSideEffects(pass, expr.Value, purity)
case *ast.SelectorExpr:
return MayHaveSideEffects(pass, expr.X, purity)
case *ast.SliceExpr:
return MayHaveSideEffects(pass, expr.X, purity) ||
MayHaveSideEffects(pass, expr.Low, purity) ||
MayHaveSideEffects(pass, expr.High, purity) ||
MayHaveSideEffects(pass, expr.Max, purity)
case *ast.StarExpr:
return MayHaveSideEffects(pass, expr.X, purity)
case *ast.TypeAssertExpr:
return MayHaveSideEffects(pass, expr.X, purity)
case *ast.UnaryExpr:
if MayHaveSideEffects(pass, expr.X, purity) {
return true
}
return expr.Op == token.ARROW
case *ast.ParenExpr:
return MayHaveSideEffects(pass, expr.X, purity)
case nil:
return false
default:
panic(fmt.Sprintf("internal error: unhandled type %T", expr))
}
}
func IsGoVersion(pass *analysis.Pass, minor int) bool {
version := pass.Analyzer.Flags.Lookup("go").Value.(flag.Getter).Get().(int)
return version >= minor
}
func Preorder(pass *analysis.Pass, fn func(ast.Node), types ...ast.Node) {
pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder(types, fn)
}

View File

@ -3,6 +3,8 @@ package config
import (
"bytes"
"fmt"
"go/ast"
"go/token"
"os"
"path/filepath"
"reflect"
@ -12,38 +14,57 @@ import (
"golang.org/x/tools/go/analysis"
)
// Dir looks at a list of absolute file names, which should make up a
// single package, and returns the path of the directory that may
// contain a staticcheck.conf file. It returns the empty string if no
// such directory could be determined, for example because all files
// were located in Go's build cache.
func Dir(files []string) string {
if len(files) == 0 {
return ""
}
cache, err := os.UserCacheDir()
if err != nil {
cache = ""
}
var path string
for _, p := range files {
// FIXME(dh): using strings.HasPrefix isn't technically
// correct, but it should be good enough for now.
if cache != "" && strings.HasPrefix(p, cache) {
// File in the build cache of the standard Go build system
continue
}
path = p
break
}
if path == "" {
// The package only consists of generated files.
return ""
}
dir := filepath.Dir(path)
return dir
}
func dirAST(files []*ast.File, fset *token.FileSet) string {
names := make([]string, len(files))
for i, f := range files {
names[i] = fset.PositionFor(f.Pos(), true).Filename
}
return Dir(names)
}
var Analyzer = &analysis.Analyzer{
Name: "config",
Doc: "loads configuration for the current package tree",
Run: func(pass *analysis.Pass) (interface{}, error) {
if len(pass.Files) == 0 {
dir := dirAST(pass.Files, pass.Fset)
if dir == "" {
cfg := DefaultConfig
return &cfg, nil
}
cache, err := os.UserCacheDir()
if err != nil {
cache = ""
}
var path string
for _, f := range pass.Files {
p := pass.Fset.PositionFor(f.Pos(), true).Filename
// FIXME(dh): using strings.HasPrefix isn't technically
// correct, but it should be good enough for now.
if cache != "" && strings.HasPrefix(p, cache) {
// File in the build cache of the standard Go build system
continue
}
path = p
break
}
if path == "" {
// The package only consists of generated files.
cfg := DefaultConfig
return &cfg, nil
}
dir := filepath.Dir(path)
cfg, err := Load(dir)
if err != nil {
return nil, fmt.Errorf("error loading staticcheck.conf: %s", err)
@ -136,7 +157,7 @@ func (c Config) String() string {
}
var DefaultConfig = Config{
Checks: []string{"all", "-ST1000", "-ST1003", "-ST1016"},
Checks: []string{"all", "-ST1000", "-ST1003", "-ST1016", "-ST1020", "-ST1021", "-ST1022"},
Initialisms: []string{
"ACL", "API", "ASCII", "CPU", "CSS", "DNS",
"EOF", "GUID", "HTML", "HTTP", "HTTPS", "ID",
@ -144,20 +165,20 @@ var DefaultConfig = Config{
"SMTP", "SQL", "SSH", "TCP", "TLS", "TTL",
"UDP", "UI", "GID", "UID", "UUID", "URI",
"URL", "UTF8", "VM", "XML", "XMPP", "XSRF",
"XSS", "SIP", "RTP",
"XSS", "SIP", "RTP", "AMQP", "DB", "TS",
},
DotImportWhitelist: []string{},
HTTPStatusCodeWhitelist: []string{"200", "400", "404", "500"},
}
const configName = "staticcheck.conf"
const ConfigName = "staticcheck.conf"
func parseConfigs(dir string) ([]Config, error) {
var out []Config
// TODO(dh): consider stopping at the GOPATH/module boundary
for dir != "" {
f, err := os.Open(filepath.Join(dir, configName))
f, err := os.Open(filepath.Join(dir, ConfigName))
if os.IsNotExist(err) {
ndir := filepath.Dir(dir)
if ndir == dir {

View File

@ -6,7 +6,6 @@ type Deprecation struct {
}
var Stdlib = map[string]Deprecation{
"image/jpeg.Reader": {4, 0},
// FIXME(dh): AllowBinary isn't being detected as deprecated
// because the comment has a newline right after "Deprecated:"
"go/build.AllowBinary": {7, 7},
@ -73,40 +72,48 @@ var Stdlib = map[string]Deprecation{
// This function has no alternative, but also no purpose.
"(*crypto/rc4.Cipher).Reset": {12, 0},
"(net/http/httptest.ResponseRecorder).HeaderMap": {11, 7},
"image.ZP": {13, 0},
"image.ZR": {13, 0},
"(*debug/gosym.LineTable).LineToPC": {2, 2},
"(*debug/gosym.LineTable).PCToLine": {2, 2},
"crypto/tls.VersionSSL30": {13, 0},
"(crypto/tls.Config).NameToCertificate": {14, 14},
"(*crypto/tls.Config).BuildNameToCertificate": {14, 14},
"image/jpeg.Reader": {4, 0},
// All of these have been deprecated in favour of external libraries
"syscall.AttachLsf": {7, 0},
"syscall.DetachLsf": {7, 0},
"syscall.LsfSocket": {7, 0},
"syscall.SetLsfPromisc": {7, 0},
"syscall.LsfJump": {7, 0},
"syscall.LsfStmt": {7, 0},
"syscall.BpfStmt": {7, 0},
"syscall.BpfJump": {7, 0},
"syscall.BpfBuflen": {7, 0},
"syscall.SetBpfBuflen": {7, 0},
"syscall.BpfDatalink": {7, 0},
"syscall.SetBpfDatalink": {7, 0},
"syscall.SetBpfPromisc": {7, 0},
"syscall.FlushBpf": {7, 0},
"syscall.BpfInterface": {7, 0},
"syscall.SetBpfInterface": {7, 0},
"syscall.BpfTimeout": {7, 0},
"syscall.SetBpfTimeout": {7, 0},
"syscall.BpfStats": {7, 0},
"syscall.SetBpfImmediate": {7, 0},
"syscall.SetBpf": {7, 0},
"syscall.CheckBpfVersion": {7, 0},
"syscall.BpfHeadercmpl": {7, 0},
"syscall.SetBpfHeadercmpl": {7, 0},
"syscall.RouteRIB": {8, 0},
"syscall.RoutingMessage": {8, 0},
"syscall.RouteMessage": {8, 0},
"syscall.InterfaceMessage": {8, 0},
"syscall.InterfaceAddrMessage": {8, 0},
"syscall.ParseRoutingMessage": {8, 0},
"syscall.ParseRoutingSockaddr": {8, 0},
"InterfaceAnnounceMessage": {7, 0},
"InterfaceMulticastAddrMessage": {7, 0},
"syscall.FormatMessage": {5, 0},
"syscall.AttachLsf": {7, 0},
"syscall.DetachLsf": {7, 0},
"syscall.LsfSocket": {7, 0},
"syscall.SetLsfPromisc": {7, 0},
"syscall.LsfJump": {7, 0},
"syscall.LsfStmt": {7, 0},
"syscall.BpfStmt": {7, 0},
"syscall.BpfJump": {7, 0},
"syscall.BpfBuflen": {7, 0},
"syscall.SetBpfBuflen": {7, 0},
"syscall.BpfDatalink": {7, 0},
"syscall.SetBpfDatalink": {7, 0},
"syscall.SetBpfPromisc": {7, 0},
"syscall.FlushBpf": {7, 0},
"syscall.BpfInterface": {7, 0},
"syscall.SetBpfInterface": {7, 0},
"syscall.BpfTimeout": {7, 0},
"syscall.SetBpfTimeout": {7, 0},
"syscall.BpfStats": {7, 0},
"syscall.SetBpfImmediate": {7, 0},
"syscall.SetBpf": {7, 0},
"syscall.CheckBpfVersion": {7, 0},
"syscall.BpfHeadercmpl": {7, 0},
"syscall.SetBpfHeadercmpl": {7, 0},
"syscall.RouteRIB": {8, 0},
"syscall.RoutingMessage": {8, 0},
"syscall.RouteMessage": {8, 0},
"syscall.InterfaceMessage": {8, 0},
"syscall.InterfaceAddrMessage": {8, 0},
"syscall.ParseRoutingMessage": {8, 0},
"syscall.ParseRoutingSockaddr": {8, 0},
"syscall.InterfaceAnnounceMessage": {7, 0},
"syscall.InterfaceMulticastAddrMessage": {7, 0},
"syscall.FormatMessage": {5, 0},
}

67
vendor/honnef.co/go/tools/edit/edit.go vendored Normal file
View File

@ -0,0 +1,67 @@
package edit
import (
"bytes"
"go/ast"
"go/format"
"go/token"
"golang.org/x/tools/go/analysis"
"honnef.co/go/tools/pattern"
)
type Ranger interface {
Pos() token.Pos
End() token.Pos
}
type Range [2]token.Pos
func (r Range) Pos() token.Pos { return r[0] }
func (r Range) End() token.Pos { return r[1] }
func ReplaceWithString(fset *token.FileSet, old Ranger, new string) analysis.TextEdit {
return analysis.TextEdit{
Pos: old.Pos(),
End: old.End(),
NewText: []byte(new),
}
}
func ReplaceWithNode(fset *token.FileSet, old Ranger, new ast.Node) analysis.TextEdit {
buf := &bytes.Buffer{}
if err := format.Node(buf, fset, new); err != nil {
panic("internal error: " + err.Error())
}
return analysis.TextEdit{
Pos: old.Pos(),
End: old.End(),
NewText: buf.Bytes(),
}
}
func ReplaceWithPattern(pass *analysis.Pass, after pattern.Pattern, state pattern.State, node Ranger) analysis.TextEdit {
r := pattern.NodeToAST(after.Root, state)
buf := &bytes.Buffer{}
format.Node(buf, pass.Fset, r)
return analysis.TextEdit{
Pos: node.Pos(),
End: node.End(),
NewText: buf.Bytes(),
}
}
func Delete(old Ranger) analysis.TextEdit {
return analysis.TextEdit{
Pos: old.Pos(),
End: old.End(),
NewText: nil,
}
}
func Fix(msg string, edits ...analysis.TextEdit) analysis.SuggestedFix {
return analysis.SuggestedFix{
Message: msg,
TextEdits: edits,
}
}

View File

@ -19,6 +19,7 @@ const (
Goyacc
Cgo
Stringer
ProtocGenGo
)
var (
@ -51,10 +52,16 @@ func isGenerated(path string) (Generator, bool) {
return Goyacc, true
case "by cmd/cgo;":
return Cgo, true
case "by protoc-gen-go.":
return ProtocGenGo, true
}
if strings.HasPrefix(text, `by "stringer `) {
return Stringer, true
}
if strings.HasPrefix(text, `by goyacc `) {
return Goyacc, true
}
return Unknown, true
}
if bytes.Equal(s, oldCgo) {

View File

@ -1,14 +1,13 @@
package facts
import (
"go/token"
"go/types"
"reflect"
"golang.org/x/tools/go/analysis"
"honnef.co/go/tools/functions"
"honnef.co/go/tools/internal/passes/buildssa"
"honnef.co/go/tools/ssa"
"honnef.co/go/tools/internal/passes/buildir"
"honnef.co/go/tools/ir"
)
type IsPure struct{}
@ -22,7 +21,7 @@ var Purity = &analysis.Analyzer{
Name: "fact_purity",
Doc: "Mark pure functions",
Run: purity,
Requires: []*analysis.Analyzer{buildssa.Analyzer},
Requires: []*analysis.Analyzer{buildir.Analyzer},
FactTypes: []analysis.Fact{(*IsPure)(nil)},
ResultType: reflect.TypeOf(PurityResult{}),
}
@ -56,65 +55,68 @@ var pureStdlib = map[string]struct{}{
}
func purity(pass *analysis.Pass) (interface{}, error) {
seen := map[*ssa.Function]struct{}{}
ssapkg := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).Pkg
var check func(ssafn *ssa.Function) (ret bool)
check = func(ssafn *ssa.Function) (ret bool) {
if ssafn.Object() == nil {
seen := map[*ir.Function]struct{}{}
irpkg := pass.ResultOf[buildir.Analyzer].(*buildir.IR).Pkg
var check func(fn *ir.Function) (ret bool)
check = func(fn *ir.Function) (ret bool) {
if fn.Object() == nil {
// TODO(dh): support closures
return false
}
if pass.ImportObjectFact(ssafn.Object(), new(IsPure)) {
if pass.ImportObjectFact(fn.Object(), new(IsPure)) {
return true
}
if ssafn.Pkg != ssapkg {
if fn.Pkg != irpkg {
// Function is in another package but wasn't marked as
// pure, ergo it isn't pure
return false
}
// Break recursion
if _, ok := seen[ssafn]; ok {
if _, ok := seen[fn]; ok {
return false
}
seen[ssafn] = struct{}{}
seen[fn] = struct{}{}
defer func() {
if ret {
pass.ExportObjectFact(ssafn.Object(), &IsPure{})
pass.ExportObjectFact(fn.Object(), &IsPure{})
}
}()
if functions.IsStub(ssafn) {
if functions.IsStub(fn) {
return false
}
if _, ok := pureStdlib[ssafn.Object().(*types.Func).FullName()]; ok {
if _, ok := pureStdlib[fn.Object().(*types.Func).FullName()]; ok {
return true
}
if ssafn.Signature.Results().Len() == 0 {
if fn.Signature.Results().Len() == 0 {
// A function with no return values is empty or is doing some
// work we cannot see (for example because of build tags);
// don't consider it pure.
return false
}
for _, param := range ssafn.Params {
for _, param := range fn.Params {
// TODO(dh): this may not be strictly correct. pure code
// can, to an extent, operate on non-basic types.
if _, ok := param.Type().Underlying().(*types.Basic); !ok {
return false
}
}
if ssafn.Blocks == nil {
// Don't consider external functions pure.
if fn.Blocks == nil {
return false
}
checkCall := func(common *ssa.CallCommon) bool {
checkCall := func(common *ir.CallCommon) bool {
if common.IsInvoke() {
return false
}
builtin, ok := common.Value.(*ssa.Builtin)
builtin, ok := common.Value.(*ir.Builtin)
if !ok {
if common.StaticCallee() != ssafn {
if common.StaticCallee() != fn {
if common.StaticCallee() == nil {
return false
}
@ -124,47 +126,47 @@ func purity(pass *analysis.Pass) (interface{}, error) {
}
} else {
switch builtin.Name() {
case "len", "cap", "make", "new":
case "len", "cap":
default:
return false
}
}
return true
}
for _, b := range ssafn.Blocks {
for _, b := range fn.Blocks {
for _, ins := range b.Instrs {
switch ins := ins.(type) {
case *ssa.Call:
case *ir.Call:
if !checkCall(ins.Common()) {
return false
}
case *ssa.Defer:
case *ir.Defer:
if !checkCall(&ins.Call) {
return false
}
case *ssa.Select:
case *ir.Select:
return false
case *ssa.Send:
case *ir.Send:
return false
case *ssa.Go:
case *ir.Go:
return false
case *ssa.Panic:
case *ir.Panic:
return false
case *ssa.Store:
case *ir.Store:
return false
case *ssa.FieldAddr:
case *ir.FieldAddr:
return false
case *ir.Alloc:
return false
case *ir.Load:
return false
case *ssa.UnOp:
if ins.Op == token.MUL || ins.Op == token.AND {
return false
}
}
}
}
return true
}
for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
check(ssafn)
for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
check(fn)
}
out := PurityResult{}

View File

@ -1,10 +1,10 @@
package functions
import "honnef.co/go/tools/ssa"
import "honnef.co/go/tools/ir"
type Loop struct{ ssa.BlockSet }
type Loop struct{ *ir.BlockSet }
func FindLoops(fn *ssa.Function) []Loop {
func FindLoops(fn *ir.Function) []Loop {
if fn.Blocks == nil {
return nil
}
@ -18,12 +18,12 @@ func FindLoops(fn *ssa.Function) []Loop {
// n is a back-edge to h
// h is the loop header
if n == h {
set := Loop{}
set := Loop{ir.NewBlockSet(len(fn.Blocks))}
set.Add(n)
sets = append(sets, set)
continue
}
set := Loop{}
set := Loop{ir.NewBlockSet(len(fn.Blocks))}
set.Add(h)
set.Add(n)
for _, b := range allPredsBut(n, h, nil) {
@ -35,7 +35,7 @@ func FindLoops(fn *ssa.Function) []Loop {
return sets
}
func allPredsBut(b, but *ssa.BasicBlock, list []*ssa.BasicBlock) []*ssa.BasicBlock {
func allPredsBut(b, but *ir.BasicBlock, list []*ir.BasicBlock) []*ir.BasicBlock {
outer:
for _, pred := range b.Preds {
if pred == but {

View File

@ -1,46 +0,0 @@
package functions
import (
"honnef.co/go/tools/ssa"
)
func filterDebug(instr []ssa.Instruction) []ssa.Instruction {
var out []ssa.Instruction
for _, ins := range instr {
if _, ok := ins.(*ssa.DebugRef); !ok {
out = append(out, ins)
}
}
return out
}
// IsStub reports whether a function is a stub. A function is
// considered a stub if it has no instructions or exactly one
// instruction, which must be either returning only constant values or
// a panic.
func IsStub(fn *ssa.Function) bool {
if len(fn.Blocks) == 0 {
return true
}
if len(fn.Blocks) > 1 {
return false
}
instrs := filterDebug(fn.Blocks[0].Instrs)
if len(instrs) != 1 {
return false
}
switch instrs[0].(type) {
case *ssa.Return:
// Since this is the only instruction, the return value must
// be a constant. We consider all constants as stubs, not just
// the zero value. This does not, unfortunately, cover zero
// initialised structs, as these cause additional
// instructions.
return true
case *ssa.Panic:
return true
default:
return false
}
}

View File

@ -0,0 +1,32 @@
package functions
import (
"honnef.co/go/tools/ir"
)
// IsStub reports whether a function is a stub. A function is
// considered a stub if it has no instructions or if all it does is
// return a constant value.
func IsStub(fn *ir.Function) bool {
for _, b := range fn.Blocks {
for _, instr := range b.Instrs {
switch instr.(type) {
case *ir.Const:
// const naturally has no side-effects
case *ir.Panic:
// panic is a stub if it only uses constants
case *ir.Return:
// return is a stub if it only uses constants
case *ir.DebugRef:
case *ir.Jump:
// if there are no disallowed instructions, then we're
// only jumping to the exit block (or possibly
// somewhere else that's stubby?)
default:
// all other instructions are assumed to do actual work
return false
}
}
}
return true
}

View File

@ -1,11 +1,15 @@
package functions
import "honnef.co/go/tools/ssa"
import (
"go/types"
"honnef.co/go/tools/ir"
)
// Terminates reports whether fn is supposed to return, that is if it
// has at least one theoretic path that returns from the function.
// Explicit panics do not count as terminating.
func Terminates(fn *ssa.Function) bool {
func Terminates(fn *ir.Function) bool {
if fn.Blocks == nil {
// assuming that a function terminates is the conservative
// choice
@ -13,11 +17,53 @@ func Terminates(fn *ssa.Function) bool {
}
for _, block := range fn.Blocks {
if len(block.Instrs) == 0 {
continue
}
if _, ok := block.Instrs[len(block.Instrs)-1].(*ssa.Return); ok {
return true
if _, ok := block.Control().(*ir.Return); ok {
if len(block.Preds) == 0 {
return true
}
for _, pred := range block.Preds {
switch ctrl := pred.Control().(type) {
case *ir.Panic:
// explicit panics do not count as terminating
case *ir.If:
// Check if we got here by receiving from a closed
// time.Tick channel this cannot happen at
// runtime and thus doesn't constitute termination
iff := ctrl
if !ok {
return true
}
ex, ok := iff.Cond.(*ir.Extract)
if !ok {
return true
}
if ex.Index != 1 {
return true
}
recv, ok := ex.Tuple.(*ir.Recv)
if !ok {
return true
}
call, ok := recv.Chan.(*ir.Call)
if !ok {
return true
}
fn, ok := call.Common().Value.(*ir.Function)
if !ok {
return true
}
fn2, ok := fn.Object().(*types.Func)
if !ok {
return true
}
if fn2.FullName() != "time.Tick" {
return true
}
default:
// we've reached the exit block
return true
}
}
}
}
return false

View File

@ -177,7 +177,7 @@ func (c *Cache) get(id ActionID) (Entry, error) {
i++
}
tm, err := strconv.ParseInt(string(etime[i:]), 10, 64)
if err != nil || size < 0 {
if err != nil || tm < 0 {
return missing()
}
@ -265,7 +265,7 @@ func (c *Cache) Trim() {
// We maintain in dir/trim.txt the time of the last completed cache trim.
// If the cache has been trimmed recently enough, do nothing.
// This is the common case.
data, _ := ioutil.ReadFile(filepath.Join(c.dir, "trim.txt"))
data, _ := renameio.ReadFile(filepath.Join(c.dir, "trim.txt"))
t, err := strconv.ParseInt(strings.TrimSpace(string(data)), 10, 64)
if err == nil && now.Sub(time.Unix(t, 0)) < trimInterval {
return
@ -282,7 +282,7 @@ func (c *Cache) Trim() {
// Ignore errors from here: if we don't write the complete timestamp, the
// cache will appear older than it is, and we'll trim it again next time.
renameio.WriteFile(filepath.Join(c.dir, "trim.txt"), []byte(fmt.Sprintf("%d", now.Unix())))
renameio.WriteFile(filepath.Join(c.dir, "trim.txt"), []byte(fmt.Sprintf("%d", now.Unix())), 0666)
}
// trimSubdir trims a single cache subdirectory.
@ -326,7 +326,8 @@ func (c *Cache) putIndexEntry(id ActionID, out OutputID, size int64, allowVerify
// in verify mode we are double-checking that the cache entries
// are entirely reproducible. As just noted, this may be unrealistic
// in some cases but the check is also useful for shaking out real bugs.
entry := []byte(fmt.Sprintf("v1 %x %x %20d %20d\n", id, out, size, time.Now().UnixNano()))
entry := fmt.Sprintf("v1 %x %x %20d %20d\n", id, out, size, time.Now().UnixNano())
if verify && allowVerify {
old, err := c.get(id)
if err == nil && (old.OutputID != out || old.Size != size) {
@ -336,7 +337,28 @@ func (c *Cache) putIndexEntry(id ActionID, out OutputID, size int64, allowVerify
}
}
file := c.fileName(id, "a")
if err := ioutil.WriteFile(file, entry, 0666); err != nil {
// Copy file to cache directory.
mode := os.O_WRONLY | os.O_CREATE
f, err := os.OpenFile(file, mode, 0666)
if err != nil {
return err
}
_, err = f.WriteString(entry)
if err == nil {
// Truncate the file only *after* writing it.
// (This should be a no-op, but truncate just in case of previous corruption.)
//
// This differs from ioutil.WriteFile, which truncates to 0 *before* writing
// via os.O_TRUNC. Truncating only after writing ensures that a second write
// of the same content to the same file is idempotent, and does not — even
// temporarily! — undo the effect of the first write.
err = f.Truncate(int64(len(entry)))
}
if closeErr := f.Close(); err == nil {
err = closeErr
}
if err != nil {
// TODO(bcmills): This Remove potentially races with another go command writing to file.
// Can we eliminate it?
os.Remove(file)

View File

@ -0,0 +1,113 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package buildir defines an Analyzer that constructs the IR
// of an error-free package and returns the set of all
// functions within it. It does not report any diagnostics itself but
// may be used as an input to other analyzers.
//
// THIS INTERFACE IS EXPERIMENTAL AND MAY BE SUBJECT TO INCOMPATIBLE CHANGE.
package buildir
import (
"go/ast"
"go/types"
"reflect"
"golang.org/x/tools/go/analysis"
"honnef.co/go/tools/ir"
)
type willExit struct{}
type willUnwind struct{}
func (*willExit) AFact() {}
func (*willUnwind) AFact() {}
var Analyzer = &analysis.Analyzer{
Name: "buildir",
Doc: "build IR for later passes",
Run: run,
ResultType: reflect.TypeOf(new(IR)),
FactTypes: []analysis.Fact{new(willExit), new(willUnwind)},
}
// IR provides intermediate representation for all the
// non-blank source functions in the current package.
type IR struct {
Pkg *ir.Package
SrcFuncs []*ir.Function
}
func run(pass *analysis.Pass) (interface{}, error) {
// Plundered from ssautil.BuildPackage.
// We must create a new Program for each Package because the
// analysis API provides no place to hang a Program shared by
// all Packages. Consequently, IR Packages and Functions do not
// have a canonical representation across an analysis session of
// multiple packages. This is unlikely to be a problem in
// practice because the analysis API essentially forces all
// packages to be analysed independently, so any given call to
// Analysis.Run on a package will see only IR objects belonging
// to a single Program.
mode := ir.GlobalDebug
prog := ir.NewProgram(pass.Fset, mode)
// Create IR packages for all imports.
// Order is not significant.
created := make(map[*types.Package]bool)
var createAll func(pkgs []*types.Package)
createAll = func(pkgs []*types.Package) {
for _, p := range pkgs {
if !created[p] {
created[p] = true
irpkg := prog.CreatePackage(p, nil, nil, true)
for _, fn := range irpkg.Functions {
if ast.IsExported(fn.Name()) {
var exit willExit
var unwind willUnwind
if pass.ImportObjectFact(fn.Object(), &exit) {
fn.WillExit = true
}
if pass.ImportObjectFact(fn.Object(), &unwind) {
fn.WillUnwind = true
}
}
}
createAll(p.Imports())
}
}
}
createAll(pass.Pkg.Imports())
// Create and build the primary package.
irpkg := prog.CreatePackage(pass.Pkg, pass.Files, pass.TypesInfo, false)
irpkg.Build()
// Compute list of source functions, including literals,
// in source order.
var addAnons func(f *ir.Function)
funcs := make([]*ir.Function, len(irpkg.Functions))
copy(funcs, irpkg.Functions)
addAnons = func(f *ir.Function) {
for _, anon := range f.AnonFuncs {
funcs = append(funcs, anon)
addAnons(anon)
}
}
for _, fn := range irpkg.Functions {
addAnons(fn)
if fn.WillExit {
pass.ExportObjectFact(fn.Object(), new(willExit))
}
if fn.WillUnwind {
pass.ExportObjectFact(fn.Object(), new(willUnwind))
}
}
return &IR{Pkg: irpkg, SrcFuncs: funcs}, nil
}

View File

@ -1,116 +0,0 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package buildssa defines an Analyzer that constructs the SSA
// representation of an error-free package and returns the set of all
// functions within it. It does not report any diagnostics itself but
// may be used as an input to other analyzers.
//
// THIS INTERFACE IS EXPERIMENTAL AND MAY BE SUBJECT TO INCOMPATIBLE CHANGE.
package buildssa
import (
"go/ast"
"go/types"
"reflect"
"golang.org/x/tools/go/analysis"
"honnef.co/go/tools/ssa"
)
var Analyzer = &analysis.Analyzer{
Name: "buildssa",
Doc: "build SSA-form IR for later passes",
Run: run,
ResultType: reflect.TypeOf(new(SSA)),
}
// SSA provides SSA-form intermediate representation for all the
// non-blank source functions in the current package.
type SSA struct {
Pkg *ssa.Package
SrcFuncs []*ssa.Function
}
func run(pass *analysis.Pass) (interface{}, error) {
// Plundered from ssautil.BuildPackage.
// We must create a new Program for each Package because the
// analysis API provides no place to hang a Program shared by
// all Packages. Consequently, SSA Packages and Functions do not
// have a canonical representation across an analysis session of
// multiple packages. This is unlikely to be a problem in
// practice because the analysis API essentially forces all
// packages to be analysed independently, so any given call to
// Analysis.Run on a package will see only SSA objects belonging
// to a single Program.
mode := ssa.GlobalDebug
prog := ssa.NewProgram(pass.Fset, mode)
// Create SSA packages for all imports.
// Order is not significant.
created := make(map[*types.Package]bool)
var createAll func(pkgs []*types.Package)
createAll = func(pkgs []*types.Package) {
for _, p := range pkgs {
if !created[p] {
created[p] = true
prog.CreatePackage(p, nil, nil, true)
createAll(p.Imports())
}
}
}
createAll(pass.Pkg.Imports())
// Create and build the primary package.
ssapkg := prog.CreatePackage(pass.Pkg, pass.Files, pass.TypesInfo, false)
ssapkg.Build()
// Compute list of source functions, including literals,
// in source order.
var funcs []*ssa.Function
var addAnons func(f *ssa.Function)
addAnons = func(f *ssa.Function) {
funcs = append(funcs, f)
for _, anon := range f.AnonFuncs {
addAnons(anon)
}
}
addAnons(ssapkg.Members["init"].(*ssa.Function))
for _, f := range pass.Files {
for _, decl := range f.Decls {
if fdecl, ok := decl.(*ast.FuncDecl); ok {
// SSA will not build a Function
// for a FuncDecl named blank.
// That's arguably too strict but
// relaxing it would break uniqueness of
// names of package members.
if fdecl.Name.Name == "_" {
continue
}
// (init functions have distinct Func
// objects named "init" and distinct
// ssa.Functions named "init#1", ...)
fn := pass.TypesInfo.Defs[fdecl.Name].(*types.Func)
if fn == nil {
panic(fn)
}
f := ssapkg.Prog.FuncValue(fn)
if f == nil {
panic(fn)
}
addAnons(f)
}
}
}
return &SSA{Pkg: ssapkg, SrcFuncs: funcs}, nil
}

View File

@ -8,15 +8,15 @@ package renameio
import (
"bytes"
"io"
"io/ioutil"
"math/rand"
"os"
"path/filepath"
"runtime"
"strings"
"time"
"strconv"
"honnef.co/go/tools/internal/robustio"
)
const patternSuffix = "*.tmp"
const patternSuffix = ".tmp"
// Pattern returns a glob pattern that matches the unrenamed temporary files
// created when writing to filename.
@ -29,14 +29,14 @@ func Pattern(filename string) string {
// final name.
//
// That ensures that the final location, if it exists, is always a complete file.
func WriteFile(filename string, data []byte) (err error) {
return WriteToFile(filename, bytes.NewReader(data))
func WriteFile(filename string, data []byte, perm os.FileMode) (err error) {
return WriteToFile(filename, bytes.NewReader(data), perm)
}
// WriteToFile is a variant of WriteFile that accepts the data as an io.Reader
// instead of a slice.
func WriteToFile(filename string, data io.Reader) (err error) {
f, err := ioutil.TempFile(filepath.Dir(filename), filepath.Base(filename)+patternSuffix)
func WriteToFile(filename string, data io.Reader, perm os.FileMode) (err error) {
f, err := tempFile(filepath.Dir(filename), filepath.Base(filename), perm)
if err != nil {
return err
}
@ -63,21 +63,31 @@ func WriteToFile(filename string, data io.Reader) (err error) {
return err
}
var start time.Time
for {
err := os.Rename(f.Name(), filename)
if err == nil || runtime.GOOS != "windows" || !strings.HasSuffix(err.Error(), "Access is denied.") {
return err
}
// Windows seems to occasionally trigger spurious "Access is denied" errors
// here (see golang.org/issue/31247). We're not sure why. It's probably
// worth a little extra latency to avoid propagating the spurious errors.
if start.IsZero() {
start = time.Now()
} else if time.Since(start) >= 500*time.Millisecond {
return err
}
time.Sleep(5 * time.Millisecond)
}
return robustio.Rename(f.Name(), filename)
}
// tempFile creates a new temporary file with given permission bits.
func tempFile(dir, prefix string, perm os.FileMode) (f *os.File, err error) {
for i := 0; i < 10000; i++ {
name := filepath.Join(dir, prefix+strconv.Itoa(rand.Intn(1000000000))+patternSuffix)
f, err = os.OpenFile(name, os.O_RDWR|os.O_CREATE|os.O_EXCL, perm)
if os.IsExist(err) {
continue
}
break
}
return
}
// ReadFile is like ioutil.ReadFile, but on Windows retries spurious errors that
// may occur if the file is concurrently replaced.
//
// Errors are classified heuristically and retries are bounded, so even this
// function may occasionally return a spurious error on Windows.
// If so, the error will likely wrap one of:
// - syscall.ERROR_ACCESS_DENIED
// - syscall.ERROR_FILE_NOT_FOUND
// - internal/syscall/windows.ERROR_SHARING_VIOLATION
func ReadFile(filename string) ([]byte, error) {
return robustio.ReadFile(filename)
}

View File

@ -0,0 +1,53 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package robustio wraps I/O functions that are prone to failure on Windows,
// transparently retrying errors up to an arbitrary timeout.
//
// Errors are classified heuristically and retries are bounded, so the functions
// in this package do not completely eliminate spurious errors. However, they do
// significantly reduce the rate of failure in practice.
//
// If so, the error will likely wrap one of:
// The functions in this package do not completely eliminate spurious errors,
// but substantially reduce their rate of occurrence in practice.
package robustio
// Rename is like os.Rename, but on Windows retries errors that may occur if the
// file is concurrently read or overwritten.
//
// (See golang.org/issue/31247 and golang.org/issue/32188.)
func Rename(oldpath, newpath string) error {
return rename(oldpath, newpath)
}
// ReadFile is like ioutil.ReadFile, but on Windows retries errors that may
// occur if the file is concurrently replaced.
//
// (See golang.org/issue/31247 and golang.org/issue/32188.)
func ReadFile(filename string) ([]byte, error) {
return readFile(filename)
}
// RemoveAll is like os.RemoveAll, but on Windows retries errors that may occur
// if an executable file in the directory has recently been executed.
//
// (See golang.org/issue/19491.)
func RemoveAll(path string) error {
return removeAll(path)
}
// IsEphemeralError reports whether err is one of the errors that the functions
// in this package attempt to mitigate.
//
// Errors considered ephemeral include:
// - syscall.ERROR_ACCESS_DENIED
// - syscall.ERROR_FILE_NOT_FOUND
// - internal/syscall/windows.ERROR_SHARING_VIOLATION
//
// This set may be expanded in the future; programs must not rely on the
// non-ephemerality of any given error.
func IsEphemeralError(err error) bool {
return isEphemeralError(err)
}

View File

@ -0,0 +1,29 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package robustio
import (
"os"
"syscall"
)
const errFileNotFound = syscall.ENOENT
// isEphemeralError returns true if err may be resolved by waiting.
func isEphemeralError(err error) bool {
switch werr := err.(type) {
case *os.PathError:
err = werr.Err
case *os.LinkError:
err = werr.Err
case *os.SyscallError:
err = werr.Err
}
if errno, ok := err.(syscall.Errno); ok {
return errno == errFileNotFound
}
return false
}

View File

@ -0,0 +1,93 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build windows darwin
package robustio
import (
"io/ioutil"
"math/rand"
"os"
"syscall"
"time"
)
const arbitraryTimeout = 500 * time.Millisecond
const ERROR_SHARING_VIOLATION = 32
// retry retries ephemeral errors from f up to an arbitrary timeout
// to work around filesystem flakiness on Windows and Darwin.
func retry(f func() (err error, mayRetry bool)) error {
var (
bestErr error
lowestErrno syscall.Errno
start time.Time
nextSleep time.Duration = 1 * time.Millisecond
)
for {
err, mayRetry := f()
if err == nil || !mayRetry {
return err
}
if errno, ok := err.(syscall.Errno); ok && (lowestErrno == 0 || errno < lowestErrno) {
bestErr = err
lowestErrno = errno
} else if bestErr == nil {
bestErr = err
}
if start.IsZero() {
start = time.Now()
} else if d := time.Since(start) + nextSleep; d >= arbitraryTimeout {
break
}
time.Sleep(nextSleep)
nextSleep += time.Duration(rand.Int63n(int64(nextSleep)))
}
return bestErr
}
// rename is like os.Rename, but retries ephemeral errors.
//
// On windows it wraps os.Rename, which (as of 2019-06-04) uses MoveFileEx with
// MOVEFILE_REPLACE_EXISTING.
//
// Windows also provides a different system call, ReplaceFile,
// that provides similar semantics, but perhaps preserves more metadata. (The
// documentation on the differences between the two is very sparse.)
//
// Empirical error rates with MoveFileEx are lower under modest concurrency, so
// for now we're sticking with what the os package already provides.
func rename(oldpath, newpath string) (err error) {
return retry(func() (err error, mayRetry bool) {
err = os.Rename(oldpath, newpath)
return err, isEphemeralError(err)
})
}
// readFile is like ioutil.ReadFile, but retries ephemeral errors.
func readFile(filename string) ([]byte, error) {
var b []byte
err := retry(func() (err error, mayRetry bool) {
b, err = ioutil.ReadFile(filename)
// Unlike in rename, we do not retry errFileNotFound here: it can occur
// as a spurious error, but the file may also genuinely not exist, so the
// increase in robustness is probably not worth the extra latency.
return err, isEphemeralError(err) && err != errFileNotFound
})
return b, err
}
func removeAll(path string) error {
return retry(func() (err error, mayRetry bool) {
err = os.RemoveAll(path)
return err, isEphemeralError(err)
})
}

View File

@ -0,0 +1,28 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//+build !windows,!darwin
package robustio
import (
"io/ioutil"
"os"
)
func rename(oldpath, newpath string) error {
return os.Rename(oldpath, newpath)
}
func readFile(filename string) ([]byte, error) {
return ioutil.ReadFile(filename)
}
func removeAll(path string) error {
return os.RemoveAll(path)
}
func isEphemeralError(err error) bool {
return false
}

View File

@ -0,0 +1,33 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package robustio
import (
"os"
"syscall"
)
const errFileNotFound = syscall.ERROR_FILE_NOT_FOUND
// isEphemeralError returns true if err may be resolved by waiting.
func isEphemeralError(err error) bool {
switch werr := err.(type) {
case *os.PathError:
err = werr.Err
case *os.LinkError:
err = werr.Err
case *os.SyscallError:
err = werr.Err
}
if errno, ok := err.(syscall.Errno); ok {
switch errno {
case syscall.ERROR_ACCESS_DENIED,
syscall.ERROR_FILE_NOT_FOUND,
ERROR_SHARING_VIOLATION:
return true
}
}
return false
}

View File

@ -5,23 +5,24 @@ import (
"go/types"
"golang.org/x/tools/go/analysis"
"honnef.co/go/tools/internal/passes/buildssa"
"honnef.co/go/tools/code"
"honnef.co/go/tools/internal/passes/buildir"
"honnef.co/go/tools/ir"
. "honnef.co/go/tools/lint/lintdsl"
"honnef.co/go/tools/ssa"
)
func CheckRangeStringRunes(pass *analysis.Pass) (interface{}, error) {
for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
fn := func(node ast.Node) bool {
for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
cb := func(node ast.Node) bool {
rng, ok := node.(*ast.RangeStmt)
if !ok || !IsBlank(rng.Key) {
if !ok || !code.IsBlank(rng.Key) {
return true
}
v, _ := ssafn.ValueForExpr(rng.X)
v, _ := fn.ValueForExpr(rng.X)
// Check that we're converting from string to []rune
val, _ := v.(*ssa.Convert)
val, _ := v.(*ir.Convert)
if val == nil {
return true
}
@ -47,13 +48,13 @@ func CheckRangeStringRunes(pass *analysis.Pass) (interface{}, error) {
// Expect two refs: one for obtaining the length of the slice,
// one for accessing the elements
if len(FilterDebug(*refs)) != 2 {
if len(code.FilterDebug(*refs)) != 2 {
// TODO(dh): right now, we check that only one place
// refers to our slice. This will miss cases such as
// ranging over the slice twice. Ideally, we'd ensure that
// the slice is only used for ranging over (without
// accessing the key), but that is harder to do because in
// SSA form, ranging over a slice looks like an ordinary
// IR form, ranging over a slice looks like an ordinary
// loop with index increments and slice accesses. We'd
// have to look at the associated AST node to check that
// it's a range statement.
@ -64,7 +65,7 @@ func CheckRangeStringRunes(pass *analysis.Pass) (interface{}, error) {
return true
}
Inspect(ssafn.Syntax(), fn)
Inspect(fn.Source(), cb)
}
return nil, nil
}

View File

@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package ssa
package ir
// Simple block optimizations to simplify the control flow graph.
@ -21,35 +21,34 @@ const debugBlockOpt = false
// markReachable sets Index=-1 for all blocks reachable from b.
func markReachable(b *BasicBlock) {
b.Index = -1
b.gaps = -1
for _, succ := range b.Succs {
if succ.Index == 0 {
if succ.gaps == 0 {
markReachable(succ)
}
}
}
func DeleteUnreachableBlocks(f *Function) {
deleteUnreachableBlocks(f)
}
// deleteUnreachableBlocks marks all reachable blocks of f and
// eliminates (nils) all others, including possibly cyclic subgraphs.
//
func deleteUnreachableBlocks(f *Function) {
const white, black = 0, -1
// We borrow b.Index temporarily as the mark bit.
// We borrow b.gaps temporarily as the mark bit.
for _, b := range f.Blocks {
b.Index = white
b.gaps = white
}
markReachable(f.Blocks[0])
if f.Recover != nil {
markReachable(f.Recover)
}
// In SSI form, we need the exit to be reachable for correct
// post-dominance information. In original form, however, we
// cannot unconditionally mark it reachable because we won't
// be adding fake edges, and this breaks the calculation of
// dominance information.
markReachable(f.Exit)
for i, b := range f.Blocks {
if b.Index == white {
if b.gaps == white {
for _, c := range b.Succs {
if c.Index == black {
if c.gaps == black {
c.removePred(b) // delete white->black edge
}
}
@ -73,6 +72,13 @@ func jumpThreading(f *Function, b *BasicBlock) bool {
if b.Instrs == nil {
return false
}
for _, pred := range b.Preds {
switch pred.Control().(type) {
case *ConstantSwitch:
// don't optimize away the head blocks of switch statements
return false
}
}
if _, ok := b.Instrs[0].(*Jump); !ok {
return false // not just a jump
}
@ -117,10 +123,17 @@ func fuseBlocks(f *Function, a *BasicBlock) bool {
if len(a.Succs) != 1 {
return false
}
if a.Succs[0] == f.Exit {
return false
}
b := a.Succs[0]
if len(b.Preds) != 1 {
return false
}
if _, ok := a.Instrs[len(a.Instrs)-1].(*Panic); ok {
// panics aren't simple jumps, they have side effects.
return false
}
// Degenerate &&/|| ops may result in a straight-line CFG
// containing φ-nodes. (Ideally we'd replace such them with
@ -151,15 +164,16 @@ func fuseBlocks(f *Function, a *BasicBlock) bool {
return true
}
func OptimizeBlocks(f *Function) {
optimizeBlocks(f)
}
// optimizeBlocks() performs some simple block optimizations on a
// completed function: dead block elimination, block fusion, jump
// threading.
//
func optimizeBlocks(f *Function) {
if debugBlockOpt {
f.WriteTo(os.Stderr)
mustSanityCheck(f, nil)
}
deleteUnreachableBlocks(f)
// Loop until no further progress.

View File

@ -2,14 +2,13 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package ssa
package ir
// This file defines the Const SSA value type.
import (
"fmt"
"go/constant"
"go/token"
"go/types"
"strconv"
)
@ -18,7 +17,12 @@ import (
// val must be valid according to the specification of Const.Value.
//
func NewConst(val constant.Value, typ types.Type) *Const {
return &Const{typ, val}
return &Const{
register: register{
typ: typ,
},
Value: val,
}
}
// intConst returns an 'int' constant that evaluates to i.
@ -71,43 +75,25 @@ func zeroConst(t types.Type) *Const {
}
func (c *Const) RelString(from *types.Package) string {
var s string
var p string
if c.Value == nil {
s = "nil"
p = "nil"
} else if c.Value.Kind() == constant.String {
s = constant.StringVal(c.Value)
v := constant.StringVal(c.Value)
const max = 20
// TODO(adonovan): don't cut a rune in half.
if len(s) > max {
s = s[:max-3] + "..." // abbreviate
if len(v) > max {
v = v[:max-3] + "..." // abbreviate
}
s = strconv.Quote(s)
p = strconv.Quote(v)
} else {
s = c.Value.String()
p = c.Value.String()
}
return s + ":" + relType(c.Type(), from)
}
func (c *Const) Name() string {
return c.RelString(nil)
return fmt.Sprintf("Const <%s> {%s}", relType(c.Type(), from), p)
}
func (c *Const) String() string {
return c.Name()
}
func (c *Const) Type() types.Type {
return c.typ
}
func (c *Const) Referrers() *[]Instruction {
return nil
}
func (c *Const) Parent() *Function { return nil }
func (c *Const) Pos() token.Pos {
return token.NoPos
return c.RelString(c.Parent().pkg())
}
// IsNil returns true if this constant represents a typed or untyped nil value.
@ -115,8 +101,6 @@ func (c *Const) IsNil() bool {
return c.Value == nil
}
// TODO(adonovan): move everything below into honnef.co/go/tools/ssa/interp.
// Int64 returns the numeric value of this constant truncated to fit
// a signed 64-bit integer.
//

View File

@ -2,9 +2,9 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package ssa
package ir
// This file implements the CREATE phase of SSA construction.
// This file implements the CREATE phase of IR construction.
// See builder.go for explanation.
import (
@ -18,9 +18,9 @@ import (
"golang.org/x/tools/go/types/typeutil"
)
// NewProgram returns a new SSA Program.
// NewProgram returns a new IR Program.
//
// mode controls diagnostics and checking during SSA construction.
// mode controls diagnostics and checking during IR construction.
//
func NewProgram(fset *token.FileSet, mode BuilderMode) *Program {
prog := &Program{
@ -75,7 +75,6 @@ func memberFromObject(pkg *Package, obj types.Object, syntax ast.Node) {
name: name,
object: obj,
typ: types.NewPointer(obj.Type()), // address
pos: obj.Pos(),
}
pkg.values[obj] = g
pkg.Members[name] = g
@ -90,16 +89,20 @@ func memberFromObject(pkg *Package, obj types.Object, syntax ast.Node) {
name: name,
object: obj,
Signature: sig,
syntax: syntax,
pos: obj.Pos(),
Pkg: pkg,
Prog: pkg.Prog,
}
fn.source = syntax
fn.initHTML(pkg.printFunc)
if syntax == nil {
fn.Synthetic = "loaded from gc object file"
} else {
fn.functionBody = new(functionBody)
}
pkg.values[obj] = fn
pkg.Functions = append(pkg.Functions, fn)
if sig.Recv() == nil {
pkg.Members[name] = fn // package-level function
}
@ -152,35 +155,39 @@ func membersFromDecl(pkg *Package, decl ast.Decl) {
}
}
// CreatePackage constructs and returns an SSA Package from the
// CreatePackage constructs and returns an IR Package from the
// specified type-checked, error-free file ASTs, and populates its
// Members mapping.
//
// importable determines whether this package should be returned by a
// subsequent call to ImportedPackage(pkg.Path()).
//
// The real work of building SSA form for each function is not done
// The real work of building IR form for each function is not done
// until a subsequent call to Package.Build().
//
func (prog *Program) CreatePackage(pkg *types.Package, files []*ast.File, info *types.Info, importable bool) *Package {
p := &Package{
Prog: prog,
Members: make(map[string]Member),
values: make(map[types.Object]Value),
Pkg: pkg,
info: info, // transient (CREATE and BUILD phases)
files: files, // transient (CREATE and BUILD phases)
Prog: prog,
Members: make(map[string]Member),
values: make(map[types.Object]Value),
Pkg: pkg,
info: info, // transient (CREATE and BUILD phases)
files: files, // transient (CREATE and BUILD phases)
printFunc: prog.PrintFunc,
}
// Add init() function.
p.init = &Function{
name: "init",
Signature: new(types.Signature),
Synthetic: "package initializer",
Pkg: p,
Prog: prog,
name: "init",
Signature: new(types.Signature),
Synthetic: "package initializer",
Pkg: p,
Prog: prog,
functionBody: new(functionBody),
}
p.init.initHTML(prog.PrintFunc)
p.Members[p.init.name] = p.init
p.Functions = append(p.Functions, p.init)
// CREATE phase.
// Allocate all package members: vars, funcs, consts and types.
@ -209,15 +216,13 @@ func (prog *Program) CreatePackage(pkg *types.Package, files []*ast.File, info *
}
}
if prog.mode&BareInits == 0 {
// Add initializer guard variable.
initguard := &Global{
Pkg: p,
name: "init$guard",
typ: types.NewPointer(tBool),
}
p.Members[initguard.Name()] = initguard
// Add initializer guard variable.
initguard := &Global{
Pkg: p,
name: "init$guard",
typ: types.NewPointer(tBool),
}
p.Members[initguard.Name()] = initguard
if prog.mode&GlobalDebug != 0 {
p.SetDebugMode(true)
@ -260,10 +265,10 @@ func (prog *Program) AllPackages() []*Package {
//
// TODO(adonovan): rethink this function and the "importable" concept;
// most packages are importable. This function assumes that all
// types.Package.Path values are unique within the ssa.Program, which is
// types.Package.Path values are unique within the ir.Program, which is
// false---yet this function remains very convenient.
// Clients should use (*Program).Package instead where possible.
// SSA doesn't really need a string-keyed map of packages.
// IR doesn't really need a string-keyed map of packages.
//
func (prog *Program) ImportedPackage(path string) *Package {
return prog.imported[path]

View File

@ -2,36 +2,34 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package ssa defines a representation of the elements of Go programs
// Package ir defines a representation of the elements of Go programs
// (packages, types, functions, variables and constants) using a
// static single-assignment (SSA) form intermediate representation
// static single-information (SSI) form intermediate representation
// (IR) for the bodies of functions.
//
// THIS INTERFACE IS EXPERIMENTAL AND IS LIKELY TO CHANGE.
//
// For an introduction to SSA form, see
// For an introduction to SSA form, upon which SSI builds, see
// http://en.wikipedia.org/wiki/Static_single_assignment_form.
// This page provides a broader reading list:
// http://www.dcs.gla.ac.uk/~jsinger/ssa.html.
//
// The level of abstraction of the SSA form is intentionally close to
// For an introduction to SSI form, see The static single information
// form by C. Scott Ananian.
//
// The level of abstraction of the IR form is intentionally close to
// the source language to facilitate construction of source analysis
// tools. It is not intended for machine code generation.
//
// All looping, branching and switching constructs are replaced with
// unstructured control flow. Higher-level control flow constructs
// such as multi-way branch can be reconstructed as needed; see
// ssautil.Switches() for an example.
//
// The simplest way to create the SSA representation of a package is
// The simplest way to create the IR of a package is
// to load typed syntax trees using golang.org/x/tools/go/packages, then
// invoke the ssautil.Packages helper function. See ExampleLoadPackages
// invoke the irutil.Packages helper function. See ExampleLoadPackages
// and ExampleWholeProgram for examples.
// The resulting ssa.Program contains all the packages and their
// members, but SSA code is not created for function bodies until a
// The resulting ir.Program contains all the packages and their
// members, but IR code is not created for function bodies until a
// subsequent call to (*Package).Build or (*Program).Build.
//
// The builder initially builds a naive SSA form in which all local
// The builder initially builds a naive IR form in which all local
// variables are addresses of stack locations with explicit loads and
// stores. Registerisation of eligible locals and φ-node insertion
// using dominance and dataflow are then performed as a second pass
@ -44,7 +42,7 @@
// - Member: a named member of a Go package.
// - Value: an expression that yields a value.
// - Instruction: a statement that consumes values and performs computation.
// - Node: a Value or Instruction (emphasizing its membership in the SSA value graph)
// - Node: a Value or Instruction (emphasizing its membership in the IR value graph)
//
// A computation that yields a result implements both the Value and
// Instruction interfaces. The following table shows for each
@ -53,47 +51,53 @@
// Value? Instruction? Member?
// *Alloc ✔ ✔
// *BinOp ✔ ✔
// *BlankStore ✔
// *Builtin ✔
// *Call ✔ ✔
// *ChangeInterface ✔ ✔
// *ChangeType ✔ ✔
// *Const ✔
// *Const ✔
// *Convert ✔ ✔
// *DebugRef ✔
// *Defer
// *Defer
// *Extract ✔ ✔
// *Field ✔ ✔
// *FieldAddr ✔ ✔
// *FreeVar ✔
// *Function ✔ ✔ (func)
// *Global ✔ ✔ (var)
// *Go
// *Go
// *If ✔
// *Index ✔ ✔
// *IndexAddr ✔ ✔
// *Jump ✔
// *Lookup ✔ ✔
// *Load ✔ ✔
// *MakeChan ✔ ✔
// *MakeClosure ✔ ✔
// *MakeInterface ✔ ✔
// *MakeMap ✔ ✔
// *MakeSlice ✔ ✔
// *MapUpdate ✔
// *MapLookup ✔ ✔
// *MapUpdate ✔ ✔
// *NamedConst ✔ (const)
// *Next ✔ ✔
// *Panic ✔
// *Parameter ✔
// *Parameter ✔
// *Phi ✔ ✔
// *Range ✔ ✔
// *Recv ✔ ✔
// *Return ✔
// *RunDefers ✔
// *Select ✔ ✔
// *Send ✔
// *Send ✔ ✔
// *Sigma ✔ ✔
// *Slice ✔ ✔
// *Store ✔
// *Store ✔ ✔
// *StringLookup ✔ ✔
// *Type ✔ (type)
// *TypeAssert ✔ ✔
// *UnOp ✔ ✔
// *Unreachable ✔
//
// Other key types in this package include: Program, Package, Function
// and BasicBlock.
@ -102,7 +106,7 @@
// resolved internally, i.e. it does not rely on the names of Values,
// Packages, Functions, Types or BasicBlocks for the correct
// interpretation of the program. Only the identities of objects and
// the topology of the SSA and type graphs are semantically
// the topology of the IR and type graphs are semantically
// significant. (There is one exception: Ids, used to identify field
// and method names, contain strings.) Avoidance of name-based
// operations simplifies the implementation of subsequent passes and
@ -111,7 +115,7 @@
// either accurate or unambiguous. The public API exposes a number of
// name-based maps for client convenience.
//
// The ssa/ssautil package provides various utilities that depend only
// The ir/irutil package provides various utilities that depend only
// on the public API of this package.
//
// TODO(adonovan): Consider the exceptional control-flow implications
@ -120,6 +124,6 @@
// TODO(adonovan): write a how-to document for all the various cases
// of trying to determine corresponding elements across the four
// domains of source locations, ast.Nodes, types.Objects,
// ssa.Values/Instructions.
// ir.Values/Instructions.
//
package ssa // import "honnef.co/go/tools/ssa"
package ir // import "honnef.co/go/tools/ir"

View File

@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package ssa
package ir
// This file defines algorithms related to dominance.
@ -20,6 +20,7 @@ package ssa
import (
"bytes"
"fmt"
"io"
"math/big"
"os"
"sort"
@ -27,8 +28,7 @@ import (
// Idom returns the block that immediately dominates b:
// its parent in the dominator tree, if any.
// Neither the entry node (b.Index==0) nor recover node
// (b==b.Parent().Recover()) have a parent.
// The entry node (b.Index==0) does not have a parent.
//
func (b *BasicBlock) Idom() *BasicBlock { return b.dom.idom }
@ -66,144 +66,211 @@ type domInfo struct {
pre, post int32 // pre- and post-order numbering within domtree
}
// ltState holds the working state for Lengauer-Tarjan algorithm
// (during which domInfo.pre is repurposed for CFG DFS preorder number).
type ltState struct {
// Each slice is indexed by b.Index.
sdom []*BasicBlock // b's semidominator
parent []*BasicBlock // b's parent in DFS traversal of CFG
ancestor []*BasicBlock // b's ancestor with least sdom
}
// dfs implements the depth-first search part of the LT algorithm.
func (lt *ltState) dfs(v *BasicBlock, i int32, preorder []*BasicBlock) int32 {
preorder[i] = v
v.dom.pre = i // For now: DFS preorder of spanning tree of CFG
i++
lt.sdom[v.Index] = v
lt.link(nil, v)
for _, w := range v.Succs {
if lt.sdom[w.Index] == nil {
lt.parent[w.Index] = v
i = lt.dfs(w, i, preorder)
}
}
return i
}
// eval implements the EVAL part of the LT algorithm.
func (lt *ltState) eval(v *BasicBlock) *BasicBlock {
// TODO(adonovan): opt: do path compression per simple LT.
u := v
for ; lt.ancestor[v.Index] != nil; v = lt.ancestor[v.Index] {
if lt.sdom[v.Index].dom.pre < lt.sdom[u.Index].dom.pre {
u = v
}
}
return u
}
// link implements the LINK part of the LT algorithm.
func (lt *ltState) link(v, w *BasicBlock) {
lt.ancestor[w.Index] = v
}
// buildDomTree computes the dominator tree of f using the LT algorithm.
// Precondition: all blocks are reachable (e.g. optimizeBlocks has been run).
//
func buildDomTree(f *Function) {
func buildDomTree(fn *Function) {
// The step numbers refer to the original LT paper; the
// reordering is due to Georgiadis.
// Clear any previous domInfo.
for _, b := range f.Blocks {
for _, b := range fn.Blocks {
b.dom = domInfo{}
}
n := len(f.Blocks)
// Allocate space for 5 contiguous [n]*BasicBlock arrays:
// sdom, parent, ancestor, preorder, buckets.
space := make([]*BasicBlock, 5*n)
lt := ltState{
sdom: space[0:n],
parent: space[n : 2*n],
ancestor: space[2*n : 3*n],
idoms := make([]*BasicBlock, len(fn.Blocks))
order := make([]*BasicBlock, 0, len(fn.Blocks))
seen := fn.blockset(0)
var dfs func(b *BasicBlock)
dfs = func(b *BasicBlock) {
if !seen.Add(b) {
return
}
for _, succ := range b.Succs {
dfs(succ)
}
if fn.fakeExits.Has(b) {
dfs(fn.Exit)
}
order = append(order, b)
b.post = len(order) - 1
}
dfs(fn.Blocks[0])
for i := 0; i < len(order)/2; i++ {
o := len(order) - i - 1
order[i], order[o] = order[o], order[i]
}
// Step 1. Number vertices by depth-first preorder.
preorder := space[3*n : 4*n]
root := f.Blocks[0]
prenum := lt.dfs(root, 0, preorder)
recover := f.Recover
if recover != nil {
lt.dfs(recover, prenum, preorder)
}
idoms[fn.Blocks[0].Index] = fn.Blocks[0]
changed := true
for changed {
changed = false
// iterate over all nodes in reverse postorder, except for the
// entry node
for _, b := range order[1:] {
var newIdom *BasicBlock
do := func(p *BasicBlock) {
if idoms[p.Index] == nil {
return
}
if newIdom == nil {
newIdom = p
} else {
finger1 := p
finger2 := newIdom
for finger1 != finger2 {
for finger1.post < finger2.post {
finger1 = idoms[finger1.Index]
}
for finger2.post < finger1.post {
finger2 = idoms[finger2.Index]
}
}
newIdom = finger1
}
}
for _, p := range b.Preds {
do(p)
}
if b == fn.Exit {
for _, p := range fn.Blocks {
if fn.fakeExits.Has(p) {
do(p)
}
}
}
buckets := space[4*n : 5*n]
copy(buckets, preorder)
// In reverse preorder...
for i := int32(n) - 1; i > 0; i-- {
w := preorder[i]
// Step 3. Implicitly define the immediate dominator of each node.
for v := buckets[i]; v != w; v = buckets[v.dom.pre] {
u := lt.eval(v)
if lt.sdom[u.Index].dom.pre < i {
v.dom.idom = u
} else {
v.dom.idom = w
if idoms[b.Index] != newIdom {
idoms[b.Index] = newIdom
changed = true
}
}
}
// Step 2. Compute the semidominators of all nodes.
lt.sdom[w.Index] = lt.parent[w.Index]
for _, v := range w.Preds {
u := lt.eval(v)
if lt.sdom[u.Index].dom.pre < lt.sdom[w.Index].dom.pre {
lt.sdom[w.Index] = lt.sdom[u.Index]
}
for i, b := range idoms {
fn.Blocks[i].dom.idom = b
if b == nil {
// malformed CFG
continue
}
lt.link(lt.parent[w.Index], w)
if lt.parent[w.Index] == lt.sdom[w.Index] {
w.dom.idom = lt.parent[w.Index]
} else {
buckets[i] = buckets[lt.sdom[w.Index].dom.pre]
buckets[lt.sdom[w.Index].dom.pre] = w
if i == b.Index {
continue
}
b.dom.children = append(b.dom.children, fn.Blocks[i])
}
// The final 'Step 3' is now outside the loop.
for v := buckets[0]; v != root; v = buckets[v.dom.pre] {
v.dom.idom = root
}
numberDomTree(fn.Blocks[0], 0, 0)
// Step 4. Explicitly define the immediate dominator of each
// node, in preorder.
for _, w := range preorder[1:] {
if w == root || w == recover {
w.dom.idom = nil
} else {
if w.dom.idom != lt.sdom[w.Index] {
w.dom.idom = w.dom.idom.dom.idom
}
// Calculate Children relation as inverse of Idom.
w.dom.idom.dom.children = append(w.dom.idom.dom.children, w)
}
}
pre, post := numberDomTree(root, 0, 0)
if recover != nil {
numberDomTree(recover, pre, post)
}
// printDomTreeDot(os.Stderr, f) // debugging
// printDomTreeDot(os.Stderr, fn) // debugging
// printDomTreeText(os.Stderr, root, 0) // debugging
if f.Prog.mode&SanityCheckFunctions != 0 {
sanityCheckDomTree(f)
if fn.Prog.mode&SanityCheckFunctions != 0 {
sanityCheckDomTree(fn)
}
}
// buildPostDomTree is like buildDomTree, but builds the post-dominator tree instead.
func buildPostDomTree(fn *Function) {
// The step numbers refer to the original LT paper; the
// reordering is due to Georgiadis.
// Clear any previous domInfo.
for _, b := range fn.Blocks {
b.pdom = domInfo{}
}
idoms := make([]*BasicBlock, len(fn.Blocks))
order := make([]*BasicBlock, 0, len(fn.Blocks))
seen := fn.blockset(0)
var dfs func(b *BasicBlock)
dfs = func(b *BasicBlock) {
if !seen.Add(b) {
return
}
for _, pred := range b.Preds {
dfs(pred)
}
if b == fn.Exit {
for _, p := range fn.Blocks {
if fn.fakeExits.Has(p) {
dfs(p)
}
}
}
order = append(order, b)
b.post = len(order) - 1
}
dfs(fn.Exit)
for i := 0; i < len(order)/2; i++ {
o := len(order) - i - 1
order[i], order[o] = order[o], order[i]
}
idoms[fn.Exit.Index] = fn.Exit
changed := true
for changed {
changed = false
// iterate over all nodes in reverse postorder, except for the
// exit node
for _, b := range order[1:] {
var newIdom *BasicBlock
do := func(p *BasicBlock) {
if idoms[p.Index] == nil {
return
}
if newIdom == nil {
newIdom = p
} else {
finger1 := p
finger2 := newIdom
for finger1 != finger2 {
for finger1.post < finger2.post {
finger1 = idoms[finger1.Index]
}
for finger2.post < finger1.post {
finger2 = idoms[finger2.Index]
}
}
newIdom = finger1
}
}
for _, p := range b.Succs {
do(p)
}
if fn.fakeExits.Has(b) {
do(fn.Exit)
}
if idoms[b.Index] != newIdom {
idoms[b.Index] = newIdom
changed = true
}
}
}
for i, b := range idoms {
fn.Blocks[i].pdom.idom = b
if b == nil {
// malformed CFG
continue
}
if i == b.Index {
continue
}
b.pdom.children = append(b.pdom.children, fn.Blocks[i])
}
numberPostDomTree(fn.Exit, 0, 0)
// printPostDomTreeDot(os.Stderr, fn) // debugging
// printPostDomTreeText(os.Stderr, fn.Exit, 0) // debugging
if fn.Prog.mode&SanityCheckFunctions != 0 { // XXX
sanityCheckDomTree(fn) // XXX
}
}
@ -222,6 +289,21 @@ func numberDomTree(v *BasicBlock, pre, post int32) (int32, int32) {
return pre, post
}
// numberPostDomTree sets the pre- and post-order numbers of a depth-first
// traversal of the post-dominator tree rooted at v. These are used to
// answer post-dominance queries in constant time.
//
func numberPostDomTree(v *BasicBlock, pre, post int32) (int32, int32) {
v.pdom.pre = pre
pre++
for _, child := range v.pdom.children {
pre, post = numberPostDomTree(child, pre, post)
}
v.pdom.post = post
post++
return pre, post
}
// Testing utilities ----------------------------------------
// sanityCheckDomTree checks the correctness of the dominator tree
@ -243,8 +325,8 @@ func sanityCheckDomTree(f *Function) {
all.Set(one).Lsh(&all, uint(n)).Sub(&all, one)
// Initialization.
for i, b := range f.Blocks {
if i == 0 || b == f.Recover {
for i := range f.Blocks {
if i == 0 {
// A root is dominated only by itself.
D[i].SetBit(&D[0], 0, 1)
} else {
@ -258,7 +340,7 @@ func sanityCheckDomTree(f *Function) {
for changed := true; changed; {
changed = false
for i, b := range f.Blocks {
if i == 0 || b == f.Recover {
if i == 0 {
continue
}
// Compute intersection across predecessors.
@ -267,6 +349,13 @@ func sanityCheckDomTree(f *Function) {
for _, pred := range b.Preds {
x.And(&x, &D[pred.Index])
}
if b == f.Exit {
for _, p := range f.Blocks {
if f.fakeExits.Has(p) {
x.And(&x, &D[p.Index])
}
}
}
x.SetBit(&x, i, 1) // a block always dominates itself.
if D[i].Cmp(&x) != 0 {
D[i].Set(&x)
@ -276,14 +365,10 @@ func sanityCheckDomTree(f *Function) {
}
// Check the entire relation. O(n^2).
// The Recover block (if any) must be treated specially so we skip it.
ok := true
for i := 0; i < n; i++ {
for j := 0; j < n; j++ {
b, c := f.Blocks[i], f.Blocks[j]
if c == f.Recover {
continue
}
actual := b.Dominates(c)
expected := D[j].Bit(i) == 1
if actual != expected {
@ -321,7 +406,7 @@ func printDomTreeText(buf *bytes.Buffer, v *BasicBlock, indent int) {
// printDomTreeDot prints the dominator tree of f in AT&T GraphViz
// (.dot) format.
//lint:ignore U1000 used during debugging
func printDomTreeDot(buf *bytes.Buffer, f *Function) {
func printDomTreeDot(buf io.Writer, f *Function) {
fmt.Fprintln(buf, "//", f)
fmt.Fprintln(buf, "digraph domtree {")
for i, b := range f.Blocks {
@ -341,3 +426,36 @@ func printDomTreeDot(buf *bytes.Buffer, f *Function) {
}
fmt.Fprintln(buf, "}")
}
// printDomTree prints the dominator tree as text, using indentation.
//lint:ignore U1000 used during debugging
func printPostDomTreeText(buf io.Writer, v *BasicBlock, indent int) {
fmt.Fprintf(buf, "%*s%s\n", 4*indent, "", v)
for _, child := range v.pdom.children {
printPostDomTreeText(buf, child, indent+1)
}
}
// printDomTreeDot prints the dominator tree of f in AT&T GraphViz
// (.dot) format.
//lint:ignore U1000 used during debugging
func printPostDomTreeDot(buf io.Writer, f *Function) {
fmt.Fprintln(buf, "//", f)
fmt.Fprintln(buf, "digraph pdomtree {")
for _, b := range f.Blocks {
v := b.pdom
fmt.Fprintf(buf, "\tn%d [label=\"%s (%d, %d)\",shape=\"rectangle\"];\n", v.pre, b, v.pre, v.post)
// TODO(adonovan): improve appearance of edges
// belonging to both dominator tree and CFG.
// Dominator tree edge.
if b != f.Exit {
fmt.Fprintf(buf, "\tn%d -> n%d [style=\"solid\",weight=100];\n", v.idom.pdom.pre, v.pre)
}
// CFG edges.
for _, pred := range b.Preds {
fmt.Fprintf(buf, "\tn%d -> n%d [style=\"dotted\",weight=0];\n", pred.pdom.pre, v.pre)
}
}
fmt.Fprintln(buf, "}")
}

View File

@ -2,13 +2,14 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package ssa
package ir
// Helpers for emitting SSA instructions.
// Helpers for emitting IR instructions.
import (
"fmt"
"go/ast"
"go/constant"
"go/token"
"go/types"
)
@ -16,24 +17,32 @@ import (
// emitNew emits to f a new (heap Alloc) instruction allocating an
// object of type typ. pos is the optional source location.
//
func emitNew(f *Function, typ types.Type, pos token.Pos) *Alloc {
func emitNew(f *Function, typ types.Type, source ast.Node) *Alloc {
v := &Alloc{Heap: true}
v.setType(types.NewPointer(typ))
v.setPos(pos)
f.emit(v)
f.emit(v, source)
return v
}
// emitLoad emits to f an instruction to load the address addr into a
// new temporary, and returns the value so defined.
//
func emitLoad(f *Function, addr Value) *UnOp {
v := &UnOp{Op: token.MUL, X: addr}
func emitLoad(f *Function, addr Value, source ast.Node) *Load {
v := &Load{X: addr}
v.setType(deref(addr.Type()))
f.emit(v)
f.emit(v, source)
return v
}
func emitRecv(f *Function, ch Value, commaOk bool, typ types.Type, source ast.Node) Value {
recv := &Recv{
Chan: ch,
CommaOk: commaOk,
}
recv.setType(typ)
return f.emit(recv, source)
}
// emitDebugRef emits to f a DebugRef pseudo-instruction associating
// expression e with value v.
//
@ -61,7 +70,7 @@ func emitDebugRef(f *Function, e ast.Expr, v Value, isAddr bool) {
Expr: e,
IsAddr: isAddr,
object: obj,
})
}, nil)
}
// emitArith emits to f code to compute the binary operation op(x, y)
@ -69,19 +78,19 @@ func emitDebugRef(f *Function, e ast.Expr, v Value, isAddr bool) {
// (Use emitCompare() for comparisons and Builder.logicalBinop() for
// non-eager operations.)
//
func emitArith(f *Function, op token.Token, x, y Value, t types.Type, pos token.Pos) Value {
func emitArith(f *Function, op token.Token, x, y Value, t types.Type, source ast.Node) Value {
switch op {
case token.SHL, token.SHR:
x = emitConv(f, x, t)
x = emitConv(f, x, t, source)
// y may be signed or an 'untyped' constant.
// TODO(adonovan): whence signed values?
if b, ok := y.Type().Underlying().(*types.Basic); ok && b.Info()&types.IsUnsigned == 0 {
y = emitConv(f, y, types.Typ[types.Uint64])
y = emitConv(f, y, types.Typ[types.Uint64], source)
}
case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT:
x = emitConv(f, x, t)
y = emitConv(f, y, t)
x = emitConv(f, x, t, source)
y = emitConv(f, y, t, source)
default:
panic("illegal op in emitArith: " + op.String())
@ -92,15 +101,14 @@ func emitArith(f *Function, op token.Token, x, y Value, t types.Type, pos token.
X: x,
Y: y,
}
v.setPos(pos)
v.setType(t)
return f.emit(v)
return f.emit(v, source)
}
// emitCompare emits to f code compute the boolean result of
// comparison comparison 'x op y'.
//
func emitCompare(f *Function, op token.Token, x, y Value, pos token.Pos) Value {
func emitCompare(f *Function, op token.Token, x, y Value, source ast.Node) Value {
xt := x.Type().Underlying()
yt := y.Type().Underlying()
@ -111,7 +119,7 @@ func emitCompare(f *Function, op token.Token, x, y Value, pos token.Pos) Value {
// if e==true { ... }
// even in the case when e's type is an interface.
// TODO(adonovan): opt: generalise to x==true, false!=y, etc.
if x == vTrue && op == token.EQL {
if x, ok := x.(*Const); ok && op == token.EQL && x.Value != nil && x.Value.Kind() == constant.Bool && constant.BoolVal(x.Value) {
if yt, ok := yt.(*types.Basic); ok && yt.Info()&types.IsBoolean != 0 {
return y
}
@ -120,13 +128,13 @@ func emitCompare(f *Function, op token.Token, x, y Value, pos token.Pos) Value {
if types.Identical(xt, yt) {
// no conversion necessary
} else if _, ok := xt.(*types.Interface); ok {
y = emitConv(f, y, x.Type())
y = emitConv(f, y, x.Type(), source)
} else if _, ok := yt.(*types.Interface); ok {
x = emitConv(f, x, y.Type())
x = emitConv(f, x, y.Type(), source)
} else if _, ok := x.(*Const); ok {
x = emitConv(f, x, y.Type())
x = emitConv(f, x, y.Type(), source)
} else if _, ok := y.(*Const); ok {
y = emitConv(f, y, x.Type())
y = emitConv(f, y, x.Type(), source)
//lint:ignore SA9003 no-op
} else {
// other cases, e.g. channels. No-op.
@ -137,9 +145,8 @@ func emitCompare(f *Function, op token.Token, x, y Value, pos token.Pos) Value {
X: x,
Y: y,
}
v.setPos(pos)
v.setType(tBool)
return f.emit(v)
return f.emit(v, source)
}
// isValuePreserving returns true if a conversion from ut_src to
@ -171,7 +178,7 @@ func isValuePreserving(ut_src, ut_dst types.Type) bool {
// by language assignability rules in assignments, parameter passing,
// etc. Conversions cannot fail dynamically.
//
func emitConv(f *Function, val Value, typ types.Type) Value {
func emitConv(f *Function, val Value, typ types.Type, source ast.Node) Value {
t_src := val.Type()
// Identical types? Conversion is a no-op.
@ -186,7 +193,7 @@ func emitConv(f *Function, val Value, typ types.Type) Value {
if isValuePreserving(ut_src, ut_dst) {
c := &ChangeType{X: val}
c.setType(typ)
return f.emit(c)
return f.emit(c, source)
}
// Conversion to, or construction of a value of, an interface type?
@ -195,23 +202,23 @@ func emitConv(f *Function, val Value, typ types.Type) Value {
if _, ok := ut_src.(*types.Interface); ok {
c := &ChangeInterface{X: val}
c.setType(typ)
return f.emit(c)
return f.emit(c, source)
}
// Untyped nil constant? Return interface-typed nil constant.
if ut_src == tUntypedNil {
return nilConst(typ)
return emitConst(f, nilConst(typ))
}
// Convert (non-nil) "untyped" literals to their default type.
if t, ok := ut_src.(*types.Basic); ok && t.Info()&types.IsUntyped != 0 {
val = emitConv(f, val, DefaultType(ut_src))
val = emitConv(f, val, types.Default(ut_src), source)
}
f.Pkg.Prog.needMethodsOf(val.Type())
mi := &MakeInterface{X: val}
mi.setType(typ)
return f.emit(mi)
return f.emit(mi, source)
}
// Conversion of a compile-time constant value?
@ -222,7 +229,7 @@ func emitConv(f *Function, val Value, typ types.Type) Value {
// constant of the destination type and
// (initially) the same abstract value.
// We don't truncate the value yet.
return NewConst(c.Value, typ)
return emitConst(f, NewConst(c.Value, typ))
}
// We're converting from constant to non-constant type,
@ -237,7 +244,7 @@ func emitConv(f *Function, val Value, typ types.Type) Value {
if ok1 || ok2 {
c := &Convert{X: val}
c.setType(typ)
return f.emit(c)
return f.emit(c, source)
}
panic(fmt.Sprintf("in %s: cannot convert %s (%s) to %s", f, val, val.Type(), typ))
@ -246,72 +253,75 @@ func emitConv(f *Function, val Value, typ types.Type) Value {
// emitStore emits to f an instruction to store value val at location
// addr, applying implicit conversions as required by assignability rules.
//
func emitStore(f *Function, addr, val Value, pos token.Pos) *Store {
func emitStore(f *Function, addr, val Value, source ast.Node) *Store {
s := &Store{
Addr: addr,
Val: emitConv(f, val, deref(addr.Type())),
pos: pos,
Val: emitConv(f, val, deref(addr.Type()), source),
}
f.emit(s)
// make sure we call getMem after the call to emitConv, which may
// itself update the memory state
f.emit(s, source)
return s
}
// emitJump emits to f a jump to target, and updates the control-flow graph.
// Postcondition: f.currentBlock is nil.
//
func emitJump(f *Function, target *BasicBlock) {
func emitJump(f *Function, target *BasicBlock, source ast.Node) *Jump {
b := f.currentBlock
b.emit(new(Jump))
j := new(Jump)
b.emit(j, source)
addEdge(b, target)
f.currentBlock = nil
return j
}
// emitIf emits to f a conditional jump to tblock or fblock based on
// cond, and updates the control-flow graph.
// Postcondition: f.currentBlock is nil.
//
func emitIf(f *Function, cond Value, tblock, fblock *BasicBlock) {
func emitIf(f *Function, cond Value, tblock, fblock *BasicBlock, source ast.Node) *If {
b := f.currentBlock
b.emit(&If{Cond: cond})
stmt := &If{Cond: cond}
b.emit(stmt, source)
addEdge(b, tblock)
addEdge(b, fblock)
f.currentBlock = nil
return stmt
}
// emitExtract emits to f an instruction to extract the index'th
// component of tuple. It returns the extracted value.
//
func emitExtract(f *Function, tuple Value, index int) Value {
func emitExtract(f *Function, tuple Value, index int, source ast.Node) Value {
e := &Extract{Tuple: tuple, Index: index}
e.setType(tuple.Type().(*types.Tuple).At(index).Type())
return f.emit(e)
return f.emit(e, source)
}
// emitTypeAssert emits to f a type assertion value := x.(t) and
// returns the value. x.Type() must be an interface.
//
func emitTypeAssert(f *Function, x Value, t types.Type, pos token.Pos) Value {
func emitTypeAssert(f *Function, x Value, t types.Type, source ast.Node) Value {
a := &TypeAssert{X: x, AssertedType: t}
a.setPos(pos)
a.setType(t)
return f.emit(a)
return f.emit(a, source)
}
// emitTypeTest emits to f a type test value,ok := x.(t) and returns
// a (value, ok) tuple. x.Type() must be an interface.
//
func emitTypeTest(f *Function, x Value, t types.Type, pos token.Pos) Value {
func emitTypeTest(f *Function, x Value, t types.Type, source ast.Node) Value {
a := &TypeAssert{
X: x,
AssertedType: t,
CommaOk: true,
}
a.setPos(pos)
a.setType(types.NewTuple(
newVar("value", t),
varOk,
))
return f.emit(a)
return f.emit(a, source)
}
// emitTailCall emits to f a function call in tail position. The
@ -320,7 +330,7 @@ func emitTypeTest(f *Function, x Value, t types.Type, pos token.Pos) Value {
// Precondition: f does/will not use deferred procedure calls.
// Postcondition: f.currentBlock is nil.
//
func emitTailCall(f *Function, call *Call) {
func emitTailCall(f *Function, call *Call, source ast.Node) {
tresults := f.Signature.Results()
nr := tresults.Len()
if nr == 1 {
@ -328,7 +338,7 @@ func emitTailCall(f *Function, call *Call) {
} else {
call.typ = tresults
}
tuple := f.emit(call)
tuple := f.emit(call, source)
var ret Return
switch nr {
case 0:
@ -337,7 +347,7 @@ func emitTailCall(f *Function, call *Call) {
ret.Results = []Value{tuple}
default:
for i := 0; i < nr; i++ {
v := emitExtract(f, tuple, i)
v := emitExtract(f, tuple, i, source)
// TODO(adonovan): in principle, this is required:
// v = emitConv(f, o.Type, f.Signature.Results[i].Type)
// but in practice emitTailCall is only used when
@ -345,7 +355,11 @@ func emitTailCall(f *Function, call *Call) {
ret.Results = append(ret.Results, v)
}
}
f.emit(&ret)
f.Exit = f.newBasicBlock("exit")
emitJump(f, f.Exit, source)
f.currentBlock = f.Exit
f.emit(&ret, source)
f.currentBlock = nil
}
@ -357,7 +371,7 @@ func emitTailCall(f *Function, call *Call) {
// a field; if it is the value of a struct, the result will be the
// value of a field.
//
func emitImplicitSelections(f *Function, v Value, indices []int) Value {
func emitImplicitSelections(f *Function, v Value, indices []int, source ast.Node) Value {
for _, index := range indices {
fld := deref(v.Type()).Underlying().(*types.Struct).Field(index)
@ -367,10 +381,10 @@ func emitImplicitSelections(f *Function, v Value, indices []int) Value {
Field: index,
}
instr.setType(types.NewPointer(fld.Type()))
v = f.emit(instr)
v = f.emit(instr, source)
// Load the field's value iff indirectly embedded.
if isPointer(fld.Type()) {
v = emitLoad(f, v)
v = emitLoad(f, v, source)
}
} else {
instr := &Field{
@ -378,7 +392,7 @@ func emitImplicitSelections(f *Function, v Value, indices []int) Value {
Field: index,
}
instr.setType(fld.Type())
v = f.emit(instr)
v = f.emit(instr, source)
}
}
return v
@ -398,21 +412,21 @@ func emitFieldSelection(f *Function, v Value, index int, wantAddr bool, id *ast.
X: v,
Field: index,
}
instr.setPos(id.Pos())
instr.setSource(id)
instr.setType(types.NewPointer(fld.Type()))
v = f.emit(instr)
v = f.emit(instr, id)
// Load the field's value iff we don't want its address.
if !wantAddr {
v = emitLoad(f, v)
v = emitLoad(f, v, id)
}
} else {
instr := &Field{
X: v,
Field: index,
}
instr.setPos(id.Pos())
instr.setSource(id)
instr.setType(fld.Type())
v = f.emit(instr)
v = f.emit(instr, id)
}
emitDebugRef(f, id, v, wantAddr)
return v
@ -421,49 +435,16 @@ func emitFieldSelection(f *Function, v Value, index int, wantAddr bool, id *ast.
// zeroValue emits to f code to produce a zero value of type t,
// and returns it.
//
func zeroValue(f *Function, t types.Type) Value {
func zeroValue(f *Function, t types.Type, source ast.Node) Value {
switch t.Underlying().(type) {
case *types.Struct, *types.Array:
return emitLoad(f, f.addLocal(t, token.NoPos))
return emitLoad(f, f.addLocal(t, source), source)
default:
return zeroConst(t)
return emitConst(f, zeroConst(t))
}
}
// createRecoverBlock emits to f a block of code to return after a
// recovered panic, and sets f.Recover to it.
//
// If f's result parameters are named, the code loads and returns
// their current values, otherwise it returns the zero values of their
// type.
//
// Idempotent.
//
func createRecoverBlock(f *Function) {
if f.Recover != nil {
return // already created
}
saved := f.currentBlock
f.Recover = f.newBasicBlock("recover")
f.currentBlock = f.Recover
var results []Value
if f.namedResults != nil {
// Reload NRPs to form value tuple.
for _, r := range f.namedResults {
results = append(results, emitLoad(f, r))
}
} else {
R := f.Signature.Results()
for i, n := 0, R.Len(); i < n; i++ {
T := R.At(i).Type()
// Return zero value of each result type.
results = append(results, zeroValue(f, T))
}
}
f.emit(&Return{Results: results})
f.currentBlock = saved
func emitConst(f *Function, c *Const) *Const {
f.consts = append(f.consts, c)
return c
}

271
vendor/honnef.co/go/tools/ir/exits.go vendored Normal file
View File

@ -0,0 +1,271 @@
package ir
import (
"go/types"
)
func (b *builder) buildExits(fn *Function) {
if obj := fn.Object(); obj != nil {
switch obj.Pkg().Path() {
case "runtime":
switch obj.Name() {
case "exit":
fn.WillExit = true
return
case "throw":
fn.WillExit = true
return
case "Goexit":
fn.WillUnwind = true
return
}
case "github.com/sirupsen/logrus":
switch obj.(*types.Func).FullName() {
case "(*github.com/sirupsen/logrus.Logger).Exit":
// Technically, this method does not unconditionally exit
// the process. It dynamically calls a function stored in
// the logger. If the function is nil, it defaults to
// os.Exit.
//
// The main intent of this method is to terminate the
// process, and that's what the vast majority of people
// will use it for. We'll happily accept some false
// negatives to avoid a lot of false positives.
fn.WillExit = true
return
case "(*github.com/sirupsen/logrus.Logger).Panic",
"(*github.com/sirupsen/logrus.Logger).Panicf",
"(*github.com/sirupsen/logrus.Logger).Panicln":
// These methods will always panic, but that's not
// statically known from the code alone, because they
// take a detour through the generic Log methods.
fn.WillUnwind = true
return
case "(*github.com/sirupsen/logrus.Entry).Panicf",
"(*github.com/sirupsen/logrus.Entry).Panicln":
// Entry.Panic has an explicit panic, but Panicf and
// Panicln do not, relying fully on the generic Log
// method.
fn.WillUnwind = true
return
case "(*github.com/sirupsen/logrus.Logger).Log",
"(*github.com/sirupsen/logrus.Logger).Logf",
"(*github.com/sirupsen/logrus.Logger).Logln":
// TODO(dh): we cannot handle these case. Whether they
// exit or unwind depends on the level, which is set
// via the first argument. We don't currently support
// call-site-specific exit information.
}
}
}
buildDomTree(fn)
isRecoverCall := func(instr Instruction) bool {
if instr, ok := instr.(*Call); ok {
if builtin, ok := instr.Call.Value.(*Builtin); ok {
if builtin.Name() == "recover" {
return true
}
}
}
return false
}
// All panics branch to the exit block, which means that if every
// possible path through the function panics, then all
// predecessors of the exit block must panic.
willPanic := true
for _, pred := range fn.Exit.Preds {
if _, ok := pred.Control().(*Panic); !ok {
willPanic = false
}
}
if willPanic {
recovers := false
recoverLoop:
for _, u := range fn.Blocks {
for _, instr := range u.Instrs {
if instr, ok := instr.(*Defer); ok {
call := instr.Call.StaticCallee()
if call == nil {
// not a static call, so we can't be sure the
// deferred call isn't calling recover
recovers = true
break recoverLoop
}
if len(call.Blocks) == 0 {
// external function, we don't know what's
// happening inside it
//
// TODO(dh): this includes functions from
// imported packages, due to how go/analysis
// works. We could introduce another fact,
// like we've done for exiting and unwinding,
// but it doesn't seem worth it. Virtually all
// uses of recover will be in closures.
recovers = true
break recoverLoop
}
for _, y := range call.Blocks {
for _, instr2 := range y.Instrs {
if isRecoverCall(instr2) {
recovers = true
break recoverLoop
}
}
}
}
}
}
if !recovers {
fn.WillUnwind = true
return
}
}
// TODO(dh): don't check that any specific call dominates the exit
// block. instead, check that all calls combined cover every
// possible path through the function.
exits := NewBlockSet(len(fn.Blocks))
unwinds := NewBlockSet(len(fn.Blocks))
for _, u := range fn.Blocks {
for _, instr := range u.Instrs {
if instr, ok := instr.(CallInstruction); ok {
switch instr.(type) {
case *Defer, *Call:
default:
continue
}
if instr.Common().IsInvoke() {
// give up
return
}
var call *Function
switch instr.Common().Value.(type) {
case *Function, *MakeClosure:
call = instr.Common().StaticCallee()
case *Builtin:
// the only builtins that affect control flow are
// panic and recover, and we've already handled
// those
continue
default:
// dynamic dispatch
return
}
// buildFunction is idempotent. if we're part of a
// (mutually) recursive call chain, then buildFunction
// will immediately return, and fn.WillExit will be false.
if call.Package() == fn.Package() {
b.buildFunction(call)
}
dom := u.Dominates(fn.Exit)
if call.WillExit {
if dom {
fn.WillExit = true
return
}
exits.Add(u)
} else if call.WillUnwind {
if dom {
fn.WillUnwind = true
return
}
unwinds.Add(u)
}
}
}
}
// depth-first search trying to find a path to the exit block that
// doesn't cross any of the blacklisted blocks
seen := NewBlockSet(len(fn.Blocks))
var findPath func(root *BasicBlock, bl *BlockSet) bool
findPath = func(root *BasicBlock, bl *BlockSet) bool {
if root == fn.Exit {
return true
}
if seen.Has(root) {
return false
}
if bl.Has(root) {
return false
}
seen.Add(root)
for _, succ := range root.Succs {
if findPath(succ, bl) {
return true
}
}
return false
}
if exits.Num() > 0 {
if !findPath(fn.Blocks[0], exits) {
fn.WillExit = true
return
}
}
if unwinds.Num() > 0 {
seen.Clear()
if !findPath(fn.Blocks[0], unwinds) {
fn.WillUnwind = true
return
}
}
}
func (b *builder) addUnreachables(fn *Function) {
for _, bb := range fn.Blocks {
for i, instr := range bb.Instrs {
if instr, ok := instr.(*Call); ok {
var call *Function
switch v := instr.Common().Value.(type) {
case *Function:
call = v
case *MakeClosure:
call = v.Fn.(*Function)
}
if call == nil {
continue
}
if call.Package() == fn.Package() {
// make sure we have information on all functions in this package
b.buildFunction(call)
}
if call.WillExit {
// This call will cause the process to terminate.
// Remove remaining instructions in the block and
// replace any control flow with Unreachable.
for _, succ := range bb.Succs {
succ.removePred(bb)
}
bb.Succs = bb.Succs[:0]
bb.Instrs = bb.Instrs[:i+1]
bb.emit(new(Unreachable), instr.Source())
addEdge(bb, fn.Exit)
break
} else if call.WillUnwind {
// This call will cause the goroutine to terminate
// and defers to run (i.e. a panic or
// runtime.Goexit). Remove remaining instructions
// in the block and replace any control flow with
// an unconditional jump to the exit block.
for _, succ := range bb.Succs {
succ.removePred(bb)
}
bb.Succs = bb.Succs[:0]
bb.Instrs = bb.Instrs[:i+1]
bb.emit(new(Jump), instr.Source())
addEdge(bb, fn.Exit)
break
}
}
}
}
}

View File

@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package ssa
package ir
// This file implements the Function and BasicBlock types.
@ -10,6 +10,8 @@ import (
"bytes"
"fmt"
"go/ast"
"go/constant"
"go/format"
"go/token"
"go/types"
"io"
@ -23,6 +25,29 @@ func addEdge(from, to *BasicBlock) {
to.Preds = append(to.Preds, from)
}
// Control returns the last instruction in the block.
func (b *BasicBlock) Control() Instruction {
if len(b.Instrs) == 0 {
return nil
}
return b.Instrs[len(b.Instrs)-1]
}
// SIgmaFor returns the sigma node for v coming from pred.
func (b *BasicBlock) SigmaFor(v Value, pred *BasicBlock) *Sigma {
for _, instr := range b.Instrs {
sigma, ok := instr.(*Sigma)
if !ok {
// no more sigmas
return nil
}
if sigma.From == pred && sigma.X == v {
return sigma
}
}
return nil
}
// Parent returns the function that contains block b.
func (b *BasicBlock) Parent() *Function { return b.parent }
@ -36,7 +61,8 @@ func (b *BasicBlock) String() string {
// emit appends an instruction to the current basic block.
// If the instruction defines a Value, it is returned.
//
func (b *BasicBlock) emit(i Instruction) Value {
func (b *BasicBlock) emit(i Instruction, source ast.Node) Value {
i.setSource(source)
i.setBlock(b)
b.Instrs = append(b.Instrs, i)
v, _ := i.(Value)
@ -54,6 +80,16 @@ func (b *BasicBlock) predIndex(c *BasicBlock) int {
panic(fmt.Sprintf("no edge %s -> %s", c, b))
}
// succIndex returns the i such that b.Succs[i] == c or -1 if there is none.
func (b *BasicBlock) succIndex(c *BasicBlock) int {
for i, succ := range b.Succs {
if succ == c {
return i
}
}
return -1
}
// hasPhi returns true if b.Instrs contains φ-nodes.
func (b *BasicBlock) hasPhi() bool {
_, ok := b.Instrs[0].(*Phi)
@ -96,10 +132,6 @@ func (b *BasicBlock) replaceSucc(p, q *BasicBlock) {
}
}
func (b *BasicBlock) RemovePred(p *BasicBlock) {
b.removePred(p)
}
// removePred removes all occurrences of p in b's
// predecessor list and φ-nodes.
// Ordinarily there should be at most one.
@ -173,23 +205,33 @@ func (f *Function) labelledBlock(label *ast.Ident) *lblock {
// addParam adds a (non-escaping) parameter to f.Params of the
// specified name, type and source position.
//
func (f *Function) addParam(name string, typ types.Type, pos token.Pos) *Parameter {
v := &Parameter{
name: name,
typ: typ,
pos: pos,
parent: f,
func (f *Function) addParam(name string, typ types.Type, source ast.Node) *Parameter {
var b *BasicBlock
if len(f.Blocks) > 0 {
b = f.Blocks[0]
}
v := &Parameter{
name: name,
}
v.setBlock(b)
v.setType(typ)
v.setSource(source)
f.Params = append(f.Params, v)
if b != nil {
// There may be no blocks if this function has no body. We
// still create params, but aren't interested in the
// instruction.
f.Blocks[0].Instrs = append(f.Blocks[0].Instrs, v)
}
return v
}
func (f *Function) addParamObj(obj types.Object) *Parameter {
func (f *Function) addParamObj(obj types.Object, source ast.Node) *Parameter {
name := obj.Name()
if name == "" {
name = fmt.Sprintf("arg%d", len(f.Params))
}
param := f.addParam(name, obj.Type(), obj.Pos())
param := f.addParam(name, obj.Type(), source)
param.object = obj
return param
}
@ -198,25 +240,61 @@ func (f *Function) addParamObj(obj types.Object) *Parameter {
// stack; the function body will load/store the spilled location.
// Subsequent lifting will eliminate spills where possible.
//
func (f *Function) addSpilledParam(obj types.Object) {
param := f.addParamObj(obj)
spill := &Alloc{Comment: obj.Name()}
func (f *Function) addSpilledParam(obj types.Object, source ast.Node) {
param := f.addParamObj(obj, source)
spill := &Alloc{}
spill.setType(types.NewPointer(obj.Type()))
spill.setPos(obj.Pos())
spill.source = source
f.objects[obj] = spill
f.Locals = append(f.Locals, spill)
f.emit(spill)
f.emit(&Store{Addr: spill, Val: param})
f.emit(spill, source)
emitStore(f, spill, param, source)
// f.emit(&Store{Addr: spill, Val: param})
}
// startBody initializes the function prior to generating SSA code for its body.
// startBody initializes the function prior to generating IR code for its body.
// Precondition: f.Type() already set.
//
func (f *Function) startBody() {
f.currentBlock = f.newBasicBlock("entry")
entry := f.newBasicBlock("entry")
f.currentBlock = entry
f.objects = make(map[types.Object]Value) // needed for some synthetics, e.g. init
}
func (f *Function) blockset(i int) *BlockSet {
bs := &f.blocksets[i]
if len(bs.values) != len(f.Blocks) {
if cap(bs.values) >= len(f.Blocks) {
bs.values = bs.values[:len(f.Blocks)]
bs.Clear()
} else {
bs.values = make([]bool, len(f.Blocks))
}
} else {
bs.Clear()
}
return bs
}
func (f *Function) exitBlock() {
old := f.currentBlock
f.Exit = f.newBasicBlock("exit")
f.currentBlock = f.Exit
ret := f.results()
results := make([]Value, len(ret))
// Run function calls deferred in this
// function when explicitly returning from it.
f.emit(new(RunDefers), nil)
for i, r := range ret {
results[i] = emitLoad(f, r, nil)
}
f.emit(&Return{Results: results}, nil)
f.currentBlock = old
}
// createSyntacticParams populates f.Params and generates code (spills
// and named result locals) for all the parameters declared in the
// syntax. In addition it populates the f.objects mapping.
@ -231,11 +309,11 @@ func (f *Function) createSyntacticParams(recv *ast.FieldList, functype *ast.Func
if recv != nil {
for _, field := range recv.List {
for _, n := range field.Names {
f.addSpilledParam(f.Pkg.info.Defs[n])
f.addSpilledParam(f.Pkg.info.Defs[n], n)
}
// Anonymous receiver? No need to spill.
if field.Names == nil {
f.addParamObj(f.Signature.Recv())
f.addParamObj(f.Signature.Recv(), field)
}
}
}
@ -245,11 +323,11 @@ func (f *Function) createSyntacticParams(recv *ast.FieldList, functype *ast.Func
n := len(f.Params) // 1 if has recv, 0 otherwise
for _, field := range functype.Params.List {
for _, n := range field.Names {
f.addSpilledParam(f.Pkg.info.Defs[n])
f.addSpilledParam(f.Pkg.info.Defs[n], n)
}
// Anonymous parameter? No need to spill.
if field.Names == nil {
f.addParamObj(f.Signature.Params().At(len(f.Params) - n))
f.addParamObj(f.Signature.Params().At(len(f.Params)-n), field)
}
}
}
@ -262,24 +340,27 @@ func (f *Function) createSyntacticParams(recv *ast.FieldList, functype *ast.Func
f.namedResults = append(f.namedResults, f.addLocalForIdent(n))
}
}
if len(f.namedResults) == 0 {
sig := f.Signature.Results()
for i := 0; i < sig.Len(); i++ {
// XXX position information
v := f.addLocal(sig.At(i).Type(), nil)
f.implicitResults = append(f.implicitResults, v)
}
}
}
}
// numberRegisters assigns numbers to all SSA registers
// (value-defining Instructions) in f, to aid debugging.
// (Non-Instruction Values are named at construction.)
//
func numberRegisters(f *Function) {
v := 0
func numberNodes(f *Function) {
var base ID
for _, b := range f.Blocks {
for _, instr := range b.Instrs {
switch instr.(type) {
case Value:
instr.(interface {
setNum(int)
}).setNum(v)
v++
if instr == nil {
continue
}
base++
instr.setID(base)
}
}
}
@ -303,17 +384,164 @@ func buildReferrers(f *Function) {
}
}
// finishBody() finalizes the function after SSA code generation of its body.
func (f *Function) emitConsts() {
if len(f.Blocks) == 0 {
f.consts = nil
return
}
// TODO(dh): our deduplication only works on booleans and
// integers. other constants are represented as pointers to
// things.
if len(f.consts) == 0 {
return
} else if len(f.consts) <= 32 {
f.emitConstsFew()
} else {
f.emitConstsMany()
}
}
func (f *Function) emitConstsFew() {
dedup := make([]*Const, 0, 32)
for _, c := range f.consts {
if len(*c.Referrers()) == 0 {
continue
}
found := false
for _, d := range dedup {
if c.typ == d.typ && c.Value == d.Value {
replaceAll(c, d)
found = true
break
}
}
if !found {
dedup = append(dedup, c)
}
}
instrs := make([]Instruction, len(f.Blocks[0].Instrs)+len(dedup))
for i, c := range dedup {
instrs[i] = c
c.setBlock(f.Blocks[0])
}
copy(instrs[len(dedup):], f.Blocks[0].Instrs)
f.Blocks[0].Instrs = instrs
f.consts = nil
}
func (f *Function) emitConstsMany() {
type constKey struct {
typ types.Type
value constant.Value
}
m := make(map[constKey]Value, len(f.consts))
areNil := 0
for i, c := range f.consts {
if len(*c.Referrers()) == 0 {
f.consts[i] = nil
areNil++
continue
}
k := constKey{
typ: c.typ,
value: c.Value,
}
if dup, ok := m[k]; !ok {
m[k] = c
} else {
f.consts[i] = nil
areNil++
replaceAll(c, dup)
}
}
instrs := make([]Instruction, len(f.Blocks[0].Instrs)+len(f.consts)-areNil)
i := 0
for _, c := range f.consts {
if c != nil {
instrs[i] = c
c.setBlock(f.Blocks[0])
i++
}
}
copy(instrs[i:], f.Blocks[0].Instrs)
f.Blocks[0].Instrs = instrs
f.consts = nil
}
// buildFakeExits ensures that every block in the function is
// reachable in reverse from the Exit block. This is required to build
// a full post-dominator tree, and to ensure the exit block's
// inclusion in the dominator tree.
func buildFakeExits(fn *Function) {
// Find back-edges via forward DFS
fn.fakeExits = BlockSet{values: make([]bool, len(fn.Blocks))}
seen := fn.blockset(0)
backEdges := fn.blockset(1)
var dfs func(b *BasicBlock)
dfs = func(b *BasicBlock) {
if !seen.Add(b) {
backEdges.Add(b)
return
}
for _, pred := range b.Succs {
dfs(pred)
}
}
dfs(fn.Blocks[0])
buildLoop:
for {
seen := fn.blockset(2)
var dfs func(b *BasicBlock)
dfs = func(b *BasicBlock) {
if !seen.Add(b) {
return
}
for _, pred := range b.Preds {
dfs(pred)
}
if b == fn.Exit {
for _, b := range fn.Blocks {
if fn.fakeExits.Has(b) {
dfs(b)
}
}
}
}
dfs(fn.Exit)
for _, b := range fn.Blocks {
if !seen.Has(b) && backEdges.Has(b) {
// Block b is not reachable from the exit block. Add a
// fake jump from b to exit, then try again. Note that we
// only add one fake edge at a time, as it may make
// multiple blocks reachable.
//
// We only consider those blocks that have back edges.
// Any unreachable block that doesn't have a back edge
// must flow into a loop, which by definition has a
// back edge. Thus, by looking for loops, we should
// need fewer fake edges overall.
fn.fakeExits.Add(b)
continue buildLoop
}
}
break
}
}
// finishBody() finalizes the function after IR code generation of its body.
func (f *Function) finishBody() {
f.objects = nil
f.currentBlock = nil
f.lblocks = nil
// Don't pin the AST in memory (except in debug mode).
if n := f.syntax; n != nil && !f.debugInfo() {
f.syntax = extentNode{n.Pos(), n.End()}
}
// Remove from f.Locals any Allocs that escape to the heap.
j := 0
for _, l := range f.Locals {
@ -328,86 +556,25 @@ func (f *Function) finishBody() {
}
f.Locals = f.Locals[:j]
// comma-ok receiving from a time.Tick channel will never return
// ok == false, so any branching on the value of ok can be
// replaced with an unconditional jump. This will primarily match
// `for range time.Tick(x)` loops, but it can also match
// user-written code.
for _, block := range f.Blocks {
if len(block.Instrs) < 3 {
continue
}
if len(block.Succs) != 2 {
continue
}
var instrs []*Instruction
for i, ins := range block.Instrs {
if _, ok := ins.(*DebugRef); ok {
continue
}
instrs = append(instrs, &block.Instrs[i])
}
for i, ins := range instrs {
unop, ok := (*ins).(*UnOp)
if !ok || unop.Op != token.ARROW {
continue
}
call, ok := unop.X.(*Call)
if !ok {
continue
}
if call.Common().IsInvoke() {
continue
}
// OPT(dh): surely there is a more efficient way of doing
// this, than using FullName. We should already have
// resolved time.Tick somewhere?
v, ok := call.Common().Value.(*Function)
if !ok {
continue
}
t, ok := v.Object().(*types.Func)
if !ok {
continue
}
if t.FullName() != "time.Tick" {
continue
}
ex, ok := (*instrs[i+1]).(*Extract)
if !ok || ex.Tuple != unop || ex.Index != 1 {
continue
}
ifstmt, ok := (*instrs[i+2]).(*If)
if !ok || ifstmt.Cond != ex {
continue
}
*instrs[i+2] = NewJump(block)
succ := block.Succs[1]
block.Succs = block.Succs[0:1]
succ.RemovePred(block)
}
}
optimizeBlocks(f)
buildReferrers(f)
buildDomTree(f)
buildPostDomTree(f)
if f.Prog.mode&NaiveForm == 0 {
// For debugging pre-state of lifting pass:
// numberRegisters(f)
// f.WriteTo(os.Stderr)
lift(f)
}
f.namedResults = nil // (used by lifting)
// emit constants after lifting, because lifting may produce new constants.
f.emitConsts()
numberRegisters(f)
f.namedResults = nil // (used by lifting)
f.implicitResults = nil
numberNodes(f)
defer f.wr.Close()
f.wr.WriteFunc("start", "start", f)
if f.Prog.mode&PrintFunctions != 0 {
printMu.Lock()
@ -420,6 +587,29 @@ func (f *Function) finishBody() {
}
}
func isUselessPhi(phi *Phi) (Value, bool) {
var v0 Value
for _, e := range phi.Edges {
if e == phi {
continue
}
if v0 == nil {
v0 = e
}
if v0 != e {
if v0, ok := v0.(*Const); ok {
if e, ok := e.(*Const); ok {
if v0.typ == e.typ && v0.Value == e.Value {
continue
}
}
}
return nil, false
}
}
return v0, true
}
func (f *Function) RemoveNilBlocks() {
f.removeNilBlocks()
}
@ -462,26 +652,24 @@ func (f *Function) debugInfo() bool {
// returns it. Its name and type are taken from obj. Subsequent
// calls to f.lookup(obj) will return the same local.
//
func (f *Function) addNamedLocal(obj types.Object) *Alloc {
l := f.addLocal(obj.Type(), obj.Pos())
l.Comment = obj.Name()
func (f *Function) addNamedLocal(obj types.Object, source ast.Node) *Alloc {
l := f.addLocal(obj.Type(), source)
f.objects[obj] = l
return l
}
func (f *Function) addLocalForIdent(id *ast.Ident) *Alloc {
return f.addNamedLocal(f.Pkg.info.Defs[id])
return f.addNamedLocal(f.Pkg.info.Defs[id], id)
}
// addLocal creates an anonymous local variable of type typ, adds it
// to function f and returns it. pos is the optional source location.
//
func (f *Function) addLocal(typ types.Type, pos token.Pos) *Alloc {
func (f *Function) addLocal(typ types.Type, source ast.Node) *Alloc {
v := &Alloc{}
v.setType(types.NewPointer(typ))
v.setPos(pos)
f.Locals = append(f.Locals, v)
f.emit(v)
f.emit(v, source)
return v
}
@ -501,13 +689,12 @@ func (f *Function) lookup(obj types.Object, escaping bool) Value {
// Definition must be in an enclosing function;
// plumb it through intervening closures.
if f.parent == nil {
panic("no ssa.Value for " + obj.String())
panic("no ir.Value for " + obj.String())
}
outer := f.parent.lookup(obj, true) // escaping
v := &FreeVar{
name: obj.Name(),
typ: outer.Type(),
pos: outer.Pos(),
outer: outer,
parent: f,
}
@ -517,8 +704,8 @@ func (f *Function) lookup(obj types.Object, escaping bool) Value {
}
// emit emits the specified instruction to function f.
func (f *Function) emit(instr Instruction) Value {
return f.currentBlock.emit(instr)
func (f *Function) emit(instr Instruction, source ast.Node) Value {
return f.currentBlock.emit(instr, source)
}
// RelString returns the full name of this function, qualified by
@ -637,10 +824,6 @@ func WriteFunction(buf *bytes.Buffer, f *Function) {
fmt.Fprintf(buf, "# Parent: %s\n", f.parent.Name())
}
if f.Recover != nil {
fmt.Fprintf(buf, "# Recover: %s\n", f.Recover)
}
from := f.pkg()
if f.FreeVars != nil {
@ -663,45 +846,38 @@ func WriteFunction(buf *bytes.Buffer, f *Function) {
buf.WriteString("\t(external)\n")
}
// NB. column calculations are confused by non-ASCII
// characters and assume 8-space tabs.
const punchcard = 80 // for old time's sake.
const tabwidth = 8
for _, b := range f.Blocks {
if b == nil {
// Corrupt CFG.
fmt.Fprintf(buf, ".nil:\n")
continue
}
n, _ := fmt.Fprintf(buf, "%d:", b.Index)
bmsg := fmt.Sprintf("%s P:%d S:%d", b.Comment, len(b.Preds), len(b.Succs))
fmt.Fprintf(buf, "%*s%s\n", punchcard-1-n-len(bmsg), "", bmsg)
fmt.Fprintf(buf, "b%d:", b.Index)
if len(b.Preds) > 0 {
fmt.Fprint(buf, " ←")
for _, pred := range b.Preds {
fmt.Fprintf(buf, " b%d", pred.Index)
}
}
if b.Comment != "" {
fmt.Fprintf(buf, " # %s", b.Comment)
}
buf.WriteByte('\n')
if false { // CFG debugging
fmt.Fprintf(buf, "\t# CFG: %s --> %s --> %s\n", b.Preds, b, b.Succs)
}
buf2 := &bytes.Buffer{}
for _, instr := range b.Instrs {
buf.WriteString("\t")
switch v := instr.(type) {
case Value:
l := punchcard - tabwidth
// Left-align the instruction.
if name := v.Name(); name != "" {
n, _ := fmt.Fprintf(buf, "%s = ", name)
l -= n
}
n, _ := buf.WriteString(instr.String())
l -= n
// Right-align the type if there's space.
if t := v.Type(); t != nil {
buf.WriteByte(' ')
ts := relType(t, from)
l -= len(ts) + len(" ") // (spaces before and after type)
if l > 0 {
fmt.Fprintf(buf, "%*s", l, "")
}
buf.WriteString(ts)
fmt.Fprintf(buf, "%s = ", name)
}
buf.WriteString(instr.String())
case nil:
// Be robust against bad transforms.
buf.WriteString("<deleted>")
@ -709,9 +885,30 @@ func WriteFunction(buf *bytes.Buffer, f *Function) {
buf.WriteString(instr.String())
}
buf.WriteString("\n")
if f.Prog.mode&PrintSource != 0 {
if s := instr.Source(); s != nil {
buf2.Reset()
format.Node(buf2, f.Prog.Fset, s)
for {
line, err := buf2.ReadString('\n')
if len(line) == 0 {
break
}
buf.WriteString("\t\t> ")
buf.WriteString(line)
if line[len(line)-1] != '\n' {
buf.WriteString("\n")
}
if err != nil {
break
}
}
}
}
}
buf.WriteString("\n")
}
fmt.Fprintf(buf, "\n")
}
// newBasicBlock adds to f a new basic block and returns it. It does
@ -736,7 +933,7 @@ func (f *Function) newBasicBlock(comment string) *BasicBlock {
// the function object, e.g. Pkg, Params, Blocks.
//
// It is practically impossible for clients to construct well-formed
// SSA functions/packages/programs directly, so we assume this is the
// IR functions/packages/programs directly, so we assume this is the
// job of the Builder alone. NewFunction exists to provide clients a
// little flexibility. For example, analysis tools may wish to
// construct fake Functions for the root of the callgraph, a fake
@ -748,18 +945,17 @@ func (prog *Program) NewFunction(name string, sig *types.Signature, provenance s
return &Function{Prog: prog, name: name, Signature: sig, Synthetic: provenance}
}
//lint:ignore U1000 we may make use of this for functions loaded from export data
type extentNode [2]token.Pos
func (n extentNode) Pos() token.Pos { return n[0] }
func (n extentNode) End() token.Pos { return n[1] }
// Syntax returns an ast.Node whose Pos/End methods provide the
// lexical extent of the function if it was defined by Go source code
// (f.Synthetic==""), or nil otherwise.
//
// If f was built with debug information (see Package.SetDebugRef),
// the result is the *ast.FuncDecl or *ast.FuncLit that declared the
// function. Otherwise, it is an opaque Node providing only position
// information; this avoids pinning the AST in memory.
//
func (f *Function) Syntax() ast.Node { return f.syntax }
func (f *Function) initHTML(name string) {
if name == "" {
return
}
if rel := f.RelString(nil); rel == name {
f.wr = NewHTMLWriter("ir.html", rel, "")
}
}

1124
vendor/honnef.co/go/tools/ir/html.go vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
// +build go1.8
package ssa
package ir
import "go/types"

View File

@ -1,6 +1,6 @@
// +build !go1.8
package ssa
package ir
import "go/types"

View File

@ -0,0 +1,183 @@
// Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package irutil
// This file defines utility functions for constructing programs in IR form.
import (
"go/ast"
"go/token"
"go/types"
"golang.org/x/tools/go/loader"
"golang.org/x/tools/go/packages"
"honnef.co/go/tools/ir"
)
type Options struct {
// Which function, if any, to print in HTML form
PrintFunc string
}
// Packages creates an IR program for a set of packages.
//
// The packages must have been loaded from source syntax using the
// golang.org/x/tools/go/packages.Load function in LoadSyntax or
// LoadAllSyntax mode.
//
// Packages creates an IR package for each well-typed package in the
// initial list, plus all their dependencies. The resulting list of
// packages corresponds to the list of initial packages, and may contain
// a nil if IR code could not be constructed for the corresponding initial
// package due to type errors.
//
// Code for bodies of functions is not built until Build is called on
// the resulting Program. IR code is constructed only for the initial
// packages with well-typed syntax trees.
//
// The mode parameter controls diagnostics and checking during IR construction.
//
func Packages(initial []*packages.Package, mode ir.BuilderMode, opts *Options) (*ir.Program, []*ir.Package) {
return doPackages(initial, mode, false, opts)
}
// AllPackages creates an IR program for a set of packages plus all
// their dependencies.
//
// The packages must have been loaded from source syntax using the
// golang.org/x/tools/go/packages.Load function in LoadAllSyntax mode.
//
// AllPackages creates an IR package for each well-typed package in the
// initial list, plus all their dependencies. The resulting list of
// packages corresponds to the list of initial packages, and may contain
// a nil if IR code could not be constructed for the corresponding
// initial package due to type errors.
//
// Code for bodies of functions is not built until Build is called on
// the resulting Program. IR code is constructed for all packages with
// well-typed syntax trees.
//
// The mode parameter controls diagnostics and checking during IR construction.
//
func AllPackages(initial []*packages.Package, mode ir.BuilderMode, opts *Options) (*ir.Program, []*ir.Package) {
return doPackages(initial, mode, true, opts)
}
func doPackages(initial []*packages.Package, mode ir.BuilderMode, deps bool, opts *Options) (*ir.Program, []*ir.Package) {
var fset *token.FileSet
if len(initial) > 0 {
fset = initial[0].Fset
}
prog := ir.NewProgram(fset, mode)
if opts != nil {
prog.PrintFunc = opts.PrintFunc
}
isInitial := make(map[*packages.Package]bool, len(initial))
for _, p := range initial {
isInitial[p] = true
}
irmap := make(map[*packages.Package]*ir.Package)
packages.Visit(initial, nil, func(p *packages.Package) {
if p.Types != nil && !p.IllTyped {
var files []*ast.File
if deps || isInitial[p] {
files = p.Syntax
}
irmap[p] = prog.CreatePackage(p.Types, files, p.TypesInfo, true)
}
})
var irpkgs []*ir.Package
for _, p := range initial {
irpkgs = append(irpkgs, irmap[p]) // may be nil
}
return prog, irpkgs
}
// CreateProgram returns a new program in IR form, given a program
// loaded from source. An IR package is created for each transitively
// error-free package of lprog.
//
// Code for bodies of functions is not built until Build is called
// on the result.
//
// The mode parameter controls diagnostics and checking during IR construction.
//
// Deprecated: use golang.org/x/tools/go/packages and the Packages
// function instead; see ir.ExampleLoadPackages.
//
func CreateProgram(lprog *loader.Program, mode ir.BuilderMode) *ir.Program {
prog := ir.NewProgram(lprog.Fset, mode)
for _, info := range lprog.AllPackages {
if info.TransitivelyErrorFree {
prog.CreatePackage(info.Pkg, info.Files, &info.Info, info.Importable)
}
}
return prog
}
// BuildPackage builds an IR program with IR for a single package.
//
// It populates pkg by type-checking the specified file ASTs. All
// dependencies are loaded using the importer specified by tc, which
// typically loads compiler export data; IR code cannot be built for
// those packages. BuildPackage then constructs an ir.Program with all
// dependency packages created, and builds and returns the IR package
// corresponding to pkg.
//
// The caller must have set pkg.Path() to the import path.
//
// The operation fails if there were any type-checking or import errors.
//
// See ../ir/example_test.go for an example.
//
func BuildPackage(tc *types.Config, fset *token.FileSet, pkg *types.Package, files []*ast.File, mode ir.BuilderMode) (*ir.Package, *types.Info, error) {
if fset == nil {
panic("no token.FileSet")
}
if pkg.Path() == "" {
panic("package has no import path")
}
info := &types.Info{
Types: make(map[ast.Expr]types.TypeAndValue),
Defs: make(map[*ast.Ident]types.Object),
Uses: make(map[*ast.Ident]types.Object),
Implicits: make(map[ast.Node]types.Object),
Scopes: make(map[ast.Node]*types.Scope),
Selections: make(map[*ast.SelectorExpr]*types.Selection),
}
if err := types.NewChecker(tc, fset, pkg, info).Files(files); err != nil {
return nil, nil, err
}
prog := ir.NewProgram(fset, mode)
// Create IR packages for all imports.
// Order is not significant.
created := make(map[*types.Package]bool)
var createAll func(pkgs []*types.Package)
createAll = func(pkgs []*types.Package) {
for _, p := range pkgs {
if !created[p] {
created[p] = true
prog.CreatePackage(p, nil, nil, true)
createAll(p.Imports())
}
}
}
createAll(pkg.Imports())
// Create and build the primary package.
irpkg := prog.CreatePackage(pkg, files, info, false)
irpkg.Build()
return irpkg, info, nil
}

View File

@ -0,0 +1,264 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package irutil
// This file implements discovery of switch and type-switch constructs
// from low-level control flow.
//
// Many techniques exist for compiling a high-level switch with
// constant cases to efficient machine code. The optimal choice will
// depend on the data type, the specific case values, the code in the
// body of each case, and the hardware.
// Some examples:
// - a lookup table (for a switch that maps constants to constants)
// - a computed goto
// - a binary tree
// - a perfect hash
// - a two-level switch (to partition constant strings by their first byte).
import (
"bytes"
"fmt"
"go/token"
"go/types"
"honnef.co/go/tools/ir"
)
// A ConstCase represents a single constant comparison.
// It is part of a Switch.
type ConstCase struct {
Block *ir.BasicBlock // block performing the comparison
Body *ir.BasicBlock // body of the case
Value *ir.Const // case comparand
}
// A TypeCase represents a single type assertion.
// It is part of a Switch.
type TypeCase struct {
Block *ir.BasicBlock // block performing the type assert
Body *ir.BasicBlock // body of the case
Type types.Type // case type
Binding ir.Value // value bound by this case
}
// A Switch is a logical high-level control flow operation
// (a multiway branch) discovered by analysis of a CFG containing
// only if/else chains. It is not part of the ir.Instruction set.
//
// One of ConstCases and TypeCases has length >= 2;
// the other is nil.
//
// In a value switch, the list of cases may contain duplicate constants.
// A type switch may contain duplicate types, or types assignable
// to an interface type also in the list.
// TODO(adonovan): eliminate such duplicates.
//
type Switch struct {
Start *ir.BasicBlock // block containing start of if/else chain
X ir.Value // the switch operand
ConstCases []ConstCase // ordered list of constant comparisons
TypeCases []TypeCase // ordered list of type assertions
Default *ir.BasicBlock // successor if all comparisons fail
}
func (sw *Switch) String() string {
// We represent each block by the String() of its
// first Instruction, e.g. "print(42:int)".
var buf bytes.Buffer
if sw.ConstCases != nil {
fmt.Fprintf(&buf, "switch %s {\n", sw.X.Name())
for _, c := range sw.ConstCases {
fmt.Fprintf(&buf, "case %s: %s\n", c.Value.Name(), c.Body.Instrs[0])
}
} else {
fmt.Fprintf(&buf, "switch %s.(type) {\n", sw.X.Name())
for _, c := range sw.TypeCases {
fmt.Fprintf(&buf, "case %s %s: %s\n",
c.Binding.Name(), c.Type, c.Body.Instrs[0])
}
}
if sw.Default != nil {
fmt.Fprintf(&buf, "default: %s\n", sw.Default.Instrs[0])
}
fmt.Fprintf(&buf, "}")
return buf.String()
}
// Switches examines the control-flow graph of fn and returns the
// set of inferred value and type switches. A value switch tests an
// ir.Value for equality against two or more compile-time constant
// values. Switches involving link-time constants (addresses) are
// ignored. A type switch type-asserts an ir.Value against two or
// more types.
//
// The switches are returned in dominance order.
//
// The resulting switches do not necessarily correspond to uses of the
// 'switch' keyword in the source: for example, a single source-level
// switch statement with non-constant cases may result in zero, one or
// many Switches, one per plural sequence of constant cases.
// Switches may even be inferred from if/else- or goto-based control flow.
// (In general, the control flow constructs of the source program
// cannot be faithfully reproduced from the IR.)
//
func Switches(fn *ir.Function) []Switch {
// Traverse the CFG in dominance order, so we don't
// enter an if/else-chain in the middle.
var switches []Switch
seen := make(map[*ir.BasicBlock]bool) // TODO(adonovan): opt: use ir.blockSet
for _, b := range fn.DomPreorder() {
if x, k := isComparisonBlock(b); x != nil {
// Block b starts a switch.
sw := Switch{Start: b, X: x}
valueSwitch(&sw, k, seen)
if len(sw.ConstCases) > 1 {
switches = append(switches, sw)
}
}
if y, x, T := isTypeAssertBlock(b); y != nil {
// Block b starts a type switch.
sw := Switch{Start: b, X: x}
typeSwitch(&sw, y, T, seen)
if len(sw.TypeCases) > 1 {
switches = append(switches, sw)
}
}
}
return switches
}
func isSameX(x1 ir.Value, x2 ir.Value) bool {
if x1 == x2 {
return true
}
if x2, ok := x2.(*ir.Sigma); ok {
return isSameX(x1, x2.X)
}
return false
}
func valueSwitch(sw *Switch, k *ir.Const, seen map[*ir.BasicBlock]bool) {
b := sw.Start
x := sw.X
for isSameX(sw.X, x) {
if seen[b] {
break
}
seen[b] = true
sw.ConstCases = append(sw.ConstCases, ConstCase{
Block: b,
Body: b.Succs[0],
Value: k,
})
b = b.Succs[1]
n := 0
for _, instr := range b.Instrs {
switch instr.(type) {
case *ir.If, *ir.BinOp:
n++
case *ir.Sigma, *ir.Phi, *ir.DebugRef:
default:
n += 1000
}
}
if n != 2 {
// Block b contains not just 'if x == k' and σ/ϕ nodes,
// so it may have side effects that
// make it unsafe to elide.
break
}
if len(b.Preds) != 1 {
// Block b has multiple predecessors,
// so it cannot be treated as a case.
break
}
x, k = isComparisonBlock(b)
}
sw.Default = b
}
func typeSwitch(sw *Switch, y ir.Value, T types.Type, seen map[*ir.BasicBlock]bool) {
b := sw.Start
x := sw.X
for isSameX(sw.X, x) {
if seen[b] {
break
}
seen[b] = true
sw.TypeCases = append(sw.TypeCases, TypeCase{
Block: b,
Body: b.Succs[0],
Type: T,
Binding: y,
})
b = b.Succs[1]
n := 0
for _, instr := range b.Instrs {
switch instr.(type) {
case *ir.TypeAssert, *ir.Extract, *ir.If:
n++
case *ir.Sigma, *ir.Phi:
default:
n += 1000
}
}
if n != 4 {
// Block b contains not just
// {TypeAssert; Extract #0; Extract #1; If}
// so it may have side effects that
// make it unsafe to elide.
break
}
if len(b.Preds) != 1 {
// Block b has multiple predecessors,
// so it cannot be treated as a case.
break
}
y, x, T = isTypeAssertBlock(b)
}
sw.Default = b
}
// isComparisonBlock returns the operands (v, k) if a block ends with
// a comparison v==k, where k is a compile-time constant.
//
func isComparisonBlock(b *ir.BasicBlock) (v ir.Value, k *ir.Const) {
if n := len(b.Instrs); n >= 2 {
if i, ok := b.Instrs[n-1].(*ir.If); ok {
if binop, ok := i.Cond.(*ir.BinOp); ok && binop.Block() == b && binop.Op == token.EQL {
if k, ok := binop.Y.(*ir.Const); ok {
return binop.X, k
}
if k, ok := binop.X.(*ir.Const); ok {
return binop.Y, k
}
}
}
}
return
}
// isTypeAssertBlock returns the operands (y, x, T) if a block ends with
// a type assertion "if y, ok := x.(T); ok {".
//
func isTypeAssertBlock(b *ir.BasicBlock) (y, x ir.Value, T types.Type) {
if n := len(b.Instrs); n >= 4 {
if i, ok := b.Instrs[n-1].(*ir.If); ok {
if ext1, ok := i.Cond.(*ir.Extract); ok && ext1.Block() == b && ext1.Index == 1 {
if ta, ok := ext1.Tuple.(*ir.TypeAssert); ok && ta.Block() == b {
// hack: relies upon instruction ordering.
if ext0, ok := b.Instrs[n-3].(*ir.Extract); ok {
return ext0, ta.X, ta.AssertedType
}
}
}
}
}
return
}

View File

@ -0,0 +1,70 @@
package irutil
import (
"honnef.co/go/tools/ir"
)
func Reachable(from, to *ir.BasicBlock) bool {
if from == to {
return true
}
if from.Dominates(to) {
return true
}
found := false
Walk(from, func(b *ir.BasicBlock) bool {
if b == to {
found = true
return false
}
return true
})
return found
}
func Walk(b *ir.BasicBlock, fn func(*ir.BasicBlock) bool) {
seen := map[*ir.BasicBlock]bool{}
wl := []*ir.BasicBlock{b}
for len(wl) > 0 {
b := wl[len(wl)-1]
wl = wl[:len(wl)-1]
if seen[b] {
continue
}
seen[b] = true
if !fn(b) {
continue
}
wl = append(wl, b.Succs...)
}
}
func Vararg(x *ir.Slice) ([]ir.Value, bool) {
var out []ir.Value
slice, ok := x.X.(*ir.Alloc)
if !ok {
return nil, false
}
for _, ref := range *slice.Referrers() {
if ref == x {
continue
}
if ref.Block() != x.Block() {
return nil, false
}
idx, ok := ref.(*ir.IndexAddr)
if !ok {
return nil, false
}
if len(*idx.Referrers()) != 1 {
return nil, false
}
store, ok := (*idx.Referrers())[0].(*ir.Store)
if !ok {
return nil, false
}
out = append(out, store.Val)
}
return out, true
}

View File

@ -0,0 +1,79 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package irutil // import "honnef.co/go/tools/ir/irutil"
import "honnef.co/go/tools/ir"
// This file defines utilities for visiting the IR of
// a Program.
//
// TODO(adonovan): test coverage.
// AllFunctions finds and returns the set of functions potentially
// needed by program prog, as determined by a simple linker-style
// reachability algorithm starting from the members and method-sets of
// each package. The result may include anonymous functions and
// synthetic wrappers.
//
// Precondition: all packages are built.
//
func AllFunctions(prog *ir.Program) map[*ir.Function]bool {
visit := visitor{
prog: prog,
seen: make(map[*ir.Function]bool),
}
visit.program()
return visit.seen
}
type visitor struct {
prog *ir.Program
seen map[*ir.Function]bool
}
func (visit *visitor) program() {
for _, pkg := range visit.prog.AllPackages() {
for _, mem := range pkg.Members {
if fn, ok := mem.(*ir.Function); ok {
visit.function(fn)
}
}
}
for _, T := range visit.prog.RuntimeTypes() {
mset := visit.prog.MethodSets.MethodSet(T)
for i, n := 0, mset.Len(); i < n; i++ {
visit.function(visit.prog.MethodValue(mset.At(i)))
}
}
}
func (visit *visitor) function(fn *ir.Function) {
if !visit.seen[fn] {
visit.seen[fn] = true
var buf [10]*ir.Value // avoid alloc in common case
for _, b := range fn.Blocks {
for _, instr := range b.Instrs {
for _, op := range instr.Operands(buf[:0]) {
if fn, ok := (*op).(*ir.Function); ok {
visit.function(fn)
}
}
}
}
}
}
// MainPackages returns the subset of the specified packages
// named "main" that define a main function.
// The result may include synthetic "testmain" packages.
func MainPackages(pkgs []*ir.Package) []*ir.Package {
var mains []*ir.Package
for _, pkg := range pkgs {
if pkg.Pkg.Name() == "main" && pkg.Func("main") != nil {
mains = append(mains, pkg)
}
}
return mains
}

1063
vendor/honnef.co/go/tools/ir/lift.go vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@ -2,14 +2,13 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package ssa
package ir
// lvalues are the union of addressable expressions and map-index
// expressions.
import (
"go/ast"
"go/token"
"go/types"
)
@ -18,27 +17,24 @@ import (
// pointer to permit updates to elements of maps.
//
type lvalue interface {
store(fn *Function, v Value) // stores v into the location
load(fn *Function) Value // loads the contents of the location
address(fn *Function) Value // address of the location
typ() types.Type // returns the type of the location
store(fn *Function, v Value, source ast.Node) // stores v into the location
load(fn *Function, source ast.Node) Value // loads the contents of the location
address(fn *Function) Value // address of the location
typ() types.Type // returns the type of the location
}
// An address is an lvalue represented by a true pointer.
type address struct {
addr Value
pos token.Pos // source position
expr ast.Expr // source syntax of the value (not address) [debug mode]
expr ast.Expr // source syntax of the value (not address) [debug mode]
}
func (a *address) load(fn *Function) Value {
load := emitLoad(fn, a.addr)
load.pos = a.pos
return load
func (a *address) load(fn *Function, source ast.Node) Value {
return emitLoad(fn, a.addr, source)
}
func (a *address) store(fn *Function, v Value) {
store := emitStore(fn, a.addr, v, a.pos)
func (a *address) store(fn *Function, v Value, source ast.Node) {
store := emitStore(fn, a.addr, v, source)
if a.expr != nil {
// store.Val is v, converted for assignability.
emitDebugRef(fn, a.expr, store.Val, false)
@ -57,38 +53,35 @@ func (a *address) typ() types.Type {
}
// An element is an lvalue represented by m[k], the location of an
// element of a map or string. These locations are not addressable
// element of a map. These locations are not addressable
// since pointers cannot be formed from them, but they do support
// load(), and in the case of maps, store().
// load() and store().
//
type element struct {
m, k Value // map or string
t types.Type // map element type or string byte type
pos token.Pos // source position of colon ({k:v}) or lbrack (m[k]=v)
m, k Value // map
t types.Type // map element type
}
func (e *element) load(fn *Function) Value {
l := &Lookup{
func (e *element) load(fn *Function, source ast.Node) Value {
l := &MapLookup{
X: e.m,
Index: e.k,
}
l.setPos(e.pos)
l.setType(e.t)
return fn.emit(l)
return fn.emit(l, source)
}
func (e *element) store(fn *Function, v Value) {
func (e *element) store(fn *Function, v Value, source ast.Node) {
up := &MapUpdate{
Map: e.m,
Key: e.k,
Value: emitConv(fn, v, e.t),
Value: emitConv(fn, v, e.t, source),
}
up.pos = e.pos
fn.emit(up)
fn.emit(up, source)
}
func (e *element) address(fn *Function) Value {
panic("map/string elements are not addressable")
panic("map elements are not addressable")
}
func (e *element) typ() types.Type {
@ -100,15 +93,15 @@ func (e *element) typ() types.Type {
//
type blank struct{}
func (bl blank) load(fn *Function) Value {
func (bl blank) load(fn *Function, source ast.Node) Value {
panic("blank.load is illegal")
}
func (bl blank) store(fn *Function, v Value) {
func (bl blank) store(fn *Function, v Value, source ast.Node) {
s := &BlankStore{
Val: v,
}
fn.emit(s)
fn.emit(s, source)
}
func (bl blank) address(fn *Function) Value {

View File

@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package ssa
package ir
// This file defines utilities for population of method sets.

View File

@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package ssa
package ir
// This file defines the BuilderMode type and its command-line flag.
@ -15,32 +15,30 @@ import (
//
// *BuilderMode satisfies the flag.Value interface. Example:
//
// var mode = ssa.BuilderMode(0)
// func init() { flag.Var(&mode, "build", ssa.BuilderModeDoc) }
// var mode = ir.BuilderMode(0)
// func init() { flag.Var(&mode, "build", ir.BuilderModeDoc) }
//
type BuilderMode uint
const (
PrintPackages BuilderMode = 1 << iota // Print package inventory to stdout
PrintFunctions // Print function SSA code to stdout
LogSource // Log source locations as SSA builder progresses
PrintFunctions // Print function IR code to stdout
PrintSource // Print source code when printing function IR
LogSource // Log source locations as IR builder progresses
SanityCheckFunctions // Perform sanity checking of function bodies
NaiveForm // Build naïve SSA form: don't replace local loads/stores with registers
BuildSerially // Build packages serially, not in parallel.
NaiveForm // Build naïve IR form: don't replace local loads/stores with registers
GlobalDebug // Enable debug info for all packages
BareInits // Build init functions without guards or calls to dependent inits
)
const BuilderModeDoc = `Options controlling the SSA builder.
const BuilderModeDoc = `Options controlling the IR builder.
The value is a sequence of zero or more of these letters:
C perform sanity [C]hecking of the SSA form.
C perform sanity [C]hecking of the IR form.
D include [D]ebug info for every function.
P print [P]ackage inventory.
F print [F]unction SSA code.
S log [S]ource locations as SSA builder progresses.
L build distinct packages seria[L]ly instead of in parallel.
N build [N]aive SSA form: don't replace local loads/stores with registers.
I build bare [I]nit functions: no init guards or calls to dependent inits.
F print [F]unction IR code.
A print [A]ST nodes responsible for IR instructions
S log [S]ource locations as IR builder progresses.
N build [N]aive IR form: don't replace local loads/stores with registers.
`
func (m BuilderMode) String() string {
@ -54,6 +52,9 @@ func (m BuilderMode) String() string {
if m&PrintFunctions != 0 {
buf.WriteByte('F')
}
if m&PrintSource != 0 {
buf.WriteByte('A')
}
if m&LogSource != 0 {
buf.WriteByte('S')
}
@ -63,9 +64,6 @@ func (m BuilderMode) String() string {
if m&NaiveForm != 0 {
buf.WriteByte('N')
}
if m&BuildSerially != 0 {
buf.WriteByte('L')
}
return buf.String()
}
@ -80,14 +78,14 @@ func (m *BuilderMode) Set(s string) error {
mode |= PrintPackages
case 'F':
mode |= PrintFunctions
case 'A':
mode |= PrintSource
case 'S':
mode |= LogSource | BuildSerially
mode |= LogSource
case 'C':
mode |= SanityCheckFunctions
case 'N':
mode |= NaiveForm
case 'L':
mode |= BuildSerially
default:
return fmt.Errorf("unknown BuilderMode option: %q", c)
}

View File

@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package ssa
package ir
// This file implements the String() methods for all Value and
// Instruction types.
@ -25,6 +25,9 @@ import (
// references are package-qualified.
//
func relName(v Value, i Instruction) string {
if v == nil {
return "<nil>"
}
var from *types.Package
if i != nil {
from = i.Parent().pkg()
@ -32,8 +35,6 @@ func relName(v Value, i Instruction) string {
switch v := v.(type) {
case Member: // *Function or *Global
return v.RelString(from)
case *Const:
return v.RelString(from)
}
return v.Name()
}
@ -58,36 +59,40 @@ func relString(m Member, from *types.Package) string {
func (v *Parameter) String() string {
from := v.Parent().pkg()
return fmt.Sprintf("parameter %s : %s", v.Name(), relType(v.Type(), from))
return fmt.Sprintf("Parameter <%s> {%s}", relType(v.Type(), from), v.name)
}
func (v *FreeVar) String() string {
from := v.Parent().pkg()
return fmt.Sprintf("freevar %s : %s", v.Name(), relType(v.Type(), from))
return fmt.Sprintf("FreeVar <%s> %s", relType(v.Type(), from), v.Name())
}
func (v *Builtin) String() string {
return fmt.Sprintf("builtin %s", v.Name())
return fmt.Sprintf("Builtin %s", v.Name())
}
// Instruction.String()
func (v *Alloc) String() string {
op := "local"
if v.Heap {
op = "new"
}
from := v.Parent().pkg()
return fmt.Sprintf("%s %s (%s)", op, relType(deref(v.Type()), from), v.Comment)
storage := "Stack"
if v.Heap {
storage = "Heap"
}
return fmt.Sprintf("%sAlloc <%s>", storage, relType(v.Type(), from))
}
func (v *Sigma) String() string {
from := v.Parent().pkg()
s := fmt.Sprintf("Sigma <%s> [b%d] %s", relType(v.Type(), from), v.From.Index, v.X.Name())
return s
}
func (v *Phi) String() string {
var b bytes.Buffer
b.WriteString("phi [")
fmt.Fprintf(&b, "Phi <%s>", v.Type())
for i, edge := range v.Edges {
if i > 0 {
b.WriteString(", ")
}
b.WriteString(" ")
// Be robust against malformed CFG.
if v.block == nil {
b.WriteString("??")
@ -97,40 +102,35 @@ func (v *Phi) String() string {
if i < len(v.block.Preds) {
block = v.block.Preds[i].Index
}
fmt.Fprintf(&b, "%d: ", block)
fmt.Fprintf(&b, "%d:", block)
edgeVal := "<nil>" // be robust
if edge != nil {
edgeVal = relName(edge, v)
}
b.WriteString(edgeVal)
}
b.WriteString("]")
if v.Comment != "" {
b.WriteString(" #")
b.WriteString(v.Comment)
}
return b.String()
}
func printCall(v *CallCommon, prefix string, instr Instruction) string {
var b bytes.Buffer
b.WriteString(prefix)
if !v.IsInvoke() {
b.WriteString(relName(v.Value, instr))
} else {
fmt.Fprintf(&b, "invoke %s.%s", relName(v.Value, instr), v.Method.Name())
}
b.WriteString("(")
for i, arg := range v.Args {
if i > 0 {
b.WriteString(", ")
if value, ok := instr.(Value); ok {
fmt.Fprintf(&b, "%s <%s> %s", prefix, relType(value.Type(), instr.Parent().pkg()), relName(v.Value, instr))
} else {
fmt.Fprintf(&b, "%s %s", prefix, relName(v.Value, instr))
}
} else {
if value, ok := instr.(Value); ok {
fmt.Fprintf(&b, "%sInvoke <%s> %s.%s", prefix, relType(value.Type(), instr.Parent().pkg()), relName(v.Value, instr), v.Method.Name())
} else {
fmt.Fprintf(&b, "%sInvoke %s.%s", prefix, relName(v.Value, instr), v.Method.Name())
}
}
for _, arg := range v.Args {
b.WriteString(" ")
b.WriteString(relName(arg, instr))
}
if v.Signature().Variadic() {
b.WriteString("...")
}
b.WriteString(")")
return b.String()
}
@ -139,73 +139,59 @@ func (c *CallCommon) String() string {
}
func (v *Call) String() string {
return printCall(&v.Call, "", v)
return printCall(&v.Call, "Call", v)
}
func (v *BinOp) String() string {
return fmt.Sprintf("%s %s %s", relName(v.X, v), v.Op.String(), relName(v.Y, v))
return fmt.Sprintf("BinOp <%s> {%s} %s %s", relType(v.Type(), v.Parent().pkg()), v.Op.String(), relName(v.X, v), relName(v.Y, v))
}
func (v *UnOp) String() string {
return fmt.Sprintf("%s%s%s", v.Op, relName(v.X, v), commaOk(v.CommaOk))
return fmt.Sprintf("UnOp <%s> {%s} %s", relType(v.Type(), v.Parent().pkg()), v.Op.String(), relName(v.X, v))
}
func (v *Load) String() string {
return fmt.Sprintf("Load <%s> %s", relType(v.Type(), v.Parent().pkg()), relName(v.X, v))
}
func printConv(prefix string, v, x Value) string {
from := v.Parent().pkg()
return fmt.Sprintf("%s %s <- %s (%s)",
return fmt.Sprintf("%s <%s> %s",
prefix,
relType(v.Type(), from),
relType(x.Type(), from),
relName(x, v.(Instruction)))
}
func (v *ChangeType) String() string { return printConv("changetype", v, v.X) }
func (v *Convert) String() string { return printConv("convert", v, v.X) }
func (v *ChangeInterface) String() string { return printConv("change interface", v, v.X) }
func (v *MakeInterface) String() string { return printConv("make", v, v.X) }
func (v *ChangeType) String() string { return printConv("ChangeType", v, v.X) }
func (v *Convert) String() string { return printConv("Convert", v, v.X) }
func (v *ChangeInterface) String() string { return printConv("ChangeInterface", v, v.X) }
func (v *MakeInterface) String() string { return printConv("MakeInterface", v, v.X) }
func (v *MakeClosure) String() string {
from := v.Parent().pkg()
var b bytes.Buffer
fmt.Fprintf(&b, "make closure %s", relName(v.Fn, v))
fmt.Fprintf(&b, "MakeClosure <%s> %s", relType(v.Type(), from), relName(v.Fn, v))
if v.Bindings != nil {
b.WriteString(" [")
for i, c := range v.Bindings {
if i > 0 {
b.WriteString(", ")
}
for _, c := range v.Bindings {
b.WriteString(" ")
b.WriteString(relName(c, v))
}
b.WriteString("]")
}
return b.String()
}
func (v *MakeSlice) String() string {
from := v.Parent().pkg()
return fmt.Sprintf("make %s %s %s",
return fmt.Sprintf("MakeSlice <%s> %s %s",
relType(v.Type(), from),
relName(v.Len, v),
relName(v.Cap, v))
}
func (v *Slice) String() string {
var b bytes.Buffer
b.WriteString("slice ")
b.WriteString(relName(v.X, v))
b.WriteString("[")
if v.Low != nil {
b.WriteString(relName(v.Low, v))
}
b.WriteString(":")
if v.High != nil {
b.WriteString(relName(v.High, v))
}
if v.Max != nil {
b.WriteString(":")
b.WriteString(relName(v.Max, v))
}
b.WriteString("]")
return b.String()
from := v.Parent().pkg()
return fmt.Sprintf("Slice <%s> %s %s %s %s",
relType(v.Type(), from), relName(v.X, v), relName(v.Low, v), relName(v.High, v), relName(v.Max, v))
}
func (v *MakeMap) String() string {
@ -214,22 +200,23 @@ func (v *MakeMap) String() string {
res = relName(v.Reserve, v)
}
from := v.Parent().pkg()
return fmt.Sprintf("make %s %s", relType(v.Type(), from), res)
return fmt.Sprintf("MakeMap <%s> %s", relType(v.Type(), from), res)
}
func (v *MakeChan) String() string {
from := v.Parent().pkg()
return fmt.Sprintf("make %s %s", relType(v.Type(), from), relName(v.Size, v))
return fmt.Sprintf("MakeChan <%s> %s", relType(v.Type(), from), relName(v.Size, v))
}
func (v *FieldAddr) String() string {
from := v.Parent().pkg()
st := deref(v.X.Type()).Underlying().(*types.Struct)
// Be robust against a bad index.
name := "?"
if 0 <= v.Field && v.Field < st.NumFields() {
name = st.Field(v.Field).Name()
}
return fmt.Sprintf("&%s.%s [#%d]", relName(v.X, v), name, v.Field)
return fmt.Sprintf("FieldAddr <%s> [%d] (%s) %s", relType(v.Type(), from), v.Field, name, relName(v.X, v))
}
func (v *Field) String() string {
@ -239,36 +226,49 @@ func (v *Field) String() string {
if 0 <= v.Field && v.Field < st.NumFields() {
name = st.Field(v.Field).Name()
}
return fmt.Sprintf("%s.%s [#%d]", relName(v.X, v), name, v.Field)
from := v.Parent().pkg()
return fmt.Sprintf("Field <%s> [%d] (%s) %s", relType(v.Type(), from), v.Field, name, relName(v.X, v))
}
func (v *IndexAddr) String() string {
return fmt.Sprintf("&%s[%s]", relName(v.X, v), relName(v.Index, v))
from := v.Parent().pkg()
return fmt.Sprintf("IndexAddr <%s> %s %s", relType(v.Type(), from), relName(v.X, v), relName(v.Index, v))
}
func (v *Index) String() string {
return fmt.Sprintf("%s[%s]", relName(v.X, v), relName(v.Index, v))
from := v.Parent().pkg()
return fmt.Sprintf("Index <%s> %s %s", relType(v.Type(), from), relName(v.X, v), relName(v.Index, v))
}
func (v *Lookup) String() string {
return fmt.Sprintf("%s[%s]%s", relName(v.X, v), relName(v.Index, v), commaOk(v.CommaOk))
func (v *MapLookup) String() string {
from := v.Parent().pkg()
return fmt.Sprintf("MapLookup <%s> %s %s", relType(v.Type(), from), relName(v.X, v), relName(v.Index, v))
}
func (v *StringLookup) String() string {
from := v.Parent().pkg()
return fmt.Sprintf("StringLookup <%s> %s %s", relType(v.Type(), from), relName(v.X, v), relName(v.Index, v))
}
func (v *Range) String() string {
return "range " + relName(v.X, v)
from := v.Parent().pkg()
return fmt.Sprintf("Range <%s> %s", relType(v.Type(), from), relName(v.X, v))
}
func (v *Next) String() string {
return "next " + relName(v.Iter, v)
from := v.Parent().pkg()
return fmt.Sprintf("Next <%s> %s", relType(v.Type(), from), relName(v.Iter, v))
}
func (v *TypeAssert) String() string {
from := v.Parent().pkg()
return fmt.Sprintf("typeassert%s %s.(%s)", commaOk(v.CommaOk), relName(v.X, v), relType(v.AssertedType, from))
return fmt.Sprintf("TypeAssert <%s> %s", relType(v.Type(), from), relName(v.X, v))
}
func (v *Extract) String() string {
return fmt.Sprintf("extract %s #%d", relName(v.Tuple, v), v.Index)
from := v.Parent().pkg()
name := v.Tuple.Type().(*types.Tuple).At(v.Index).Name()
return fmt.Sprintf("Extract <%s> [%d] (%s) %s", relType(v.Type(), from), v.Index, name, relName(v.Tuple, v))
}
func (s *Jump) String() string {
@ -277,7 +277,20 @@ func (s *Jump) String() string {
if s.block != nil && len(s.block.Succs) == 1 {
block = s.block.Succs[0].Index
}
return fmt.Sprintf("jump %d", block)
str := fmt.Sprintf("Jump → b%d", block)
if s.Comment != "" {
str = fmt.Sprintf("%s # %s", str, s.Comment)
}
return str
}
func (s *Unreachable) String() string {
// Be robust against malformed CFG.
block := -1
if s.block != nil && len(s.block.Succs) == 1 {
block = s.block.Succs[0].Index
}
return fmt.Sprintf("Unreachable → b%d", block)
}
func (s *If) String() string {
@ -287,41 +300,70 @@ func (s *If) String() string {
tblock = s.block.Succs[0].Index
fblock = s.block.Succs[1].Index
}
return fmt.Sprintf("if %s goto %d else %d", relName(s.Cond, s), tblock, fblock)
return fmt.Sprintf("If %s → b%d b%d", relName(s.Cond, s), tblock, fblock)
}
func (s *ConstantSwitch) String() string {
var b bytes.Buffer
fmt.Fprintf(&b, "ConstantSwitch %s", relName(s.Tag, s))
for _, cond := range s.Conds {
fmt.Fprintf(&b, " %s", relName(cond, s))
}
fmt.Fprint(&b, " →")
for _, succ := range s.block.Succs {
fmt.Fprintf(&b, " b%d", succ.Index)
}
return b.String()
}
func (s *TypeSwitch) String() string {
from := s.Parent().pkg()
var b bytes.Buffer
fmt.Fprintf(&b, "TypeSwitch <%s> %s", relType(s.typ, from), relName(s.Tag, s))
for _, cond := range s.Conds {
fmt.Fprintf(&b, " %q", relType(cond, s.block.parent.pkg()))
}
return b.String()
}
func (s *Go) String() string {
return printCall(&s.Call, "go ", s)
return printCall(&s.Call, "Go", s)
}
func (s *Panic) String() string {
return "panic " + relName(s.X, s)
// Be robust against malformed CFG.
block := -1
if s.block != nil && len(s.block.Succs) == 1 {
block = s.block.Succs[0].Index
}
return fmt.Sprintf("Panic %s → b%d", relName(s.X, s), block)
}
func (s *Return) String() string {
var b bytes.Buffer
b.WriteString("return")
for i, r := range s.Results {
if i == 0 {
b.WriteString(" ")
} else {
b.WriteString(", ")
}
b.WriteString("Return")
for _, r := range s.Results {
b.WriteString(" ")
b.WriteString(relName(r, s))
}
return b.String()
}
func (*RunDefers) String() string {
return "rundefers"
return "RunDefers"
}
func (s *Send) String() string {
return fmt.Sprintf("send %s <- %s", relName(s.Chan, s), relName(s.X, s))
return fmt.Sprintf("Send %s %s", relName(s.Chan, s), relName(s.X, s))
}
func (recv *Recv) String() string {
from := recv.Parent().pkg()
return fmt.Sprintf("Recv <%s> %s", relType(recv.Type(), from), relName(recv.Chan, recv))
}
func (s *Defer) String() string {
return printCall(&s.Call, "defer ", s)
return printCall(&s.Call, "Defer", s)
}
func (s *Select) String() string {
@ -341,21 +383,23 @@ func (s *Select) String() string {
}
non := ""
if !s.Blocking {
non = "non"
non = "Non"
}
return fmt.Sprintf("select %sblocking [%s]", non, b.String())
from := s.Parent().pkg()
return fmt.Sprintf("Select%sBlocking <%s> [%s]", non, relType(s.Type(), from), b.String())
}
func (s *Store) String() string {
return fmt.Sprintf("*%s = %s", relName(s.Addr, s), relName(s.Val, s))
return fmt.Sprintf("Store {%s} %s %s",
s.Val.Type(), relName(s.Addr, s), relName(s.Val, s))
}
func (s *BlankStore) String() string {
return fmt.Sprintf("_ = %s", relName(s.Val, s))
return fmt.Sprintf("BlankStore %s", relName(s.Val, s))
}
func (s *MapUpdate) String() string {
return fmt.Sprintf("%s[%s] = %s", relName(s.Map, s), relName(s.Key, s), relName(s.Value, s))
return fmt.Sprintf("MapUpdate %s %s %s", relName(s.Map, s), relName(s.Key, s), relName(s.Value, s))
}
func (s *DebugRef) String() string {
@ -426,10 +470,3 @@ func WritePackage(buf *bytes.Buffer, p *Package) {
fmt.Fprintf(buf, "\n")
}
func commaOk(x bool) string {
if x {
return ",ok"
}
return ""
}

View File

@ -2,9 +2,9 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package ssa
package ir
// An optional pass for sanity-checking invariants of the SSA representation.
// An optional pass for sanity-checking invariants of the IR representation.
// Currently it checks CFG invariants but little at the instruction level.
import (
@ -23,7 +23,7 @@ type sanity struct {
insane bool
}
// sanityCheck performs integrity checking of the SSA representation
// sanityCheck performs integrity checking of the IR representation
// of the function fn and returns true if it was valid. Diagnostics
// are written to reporter if non-nil, os.Stderr otherwise. Some
// diagnostics are only warnings and do not imply a negative result.
@ -89,8 +89,15 @@ func findDuplicate(blocks []*BasicBlock) *BasicBlock {
func (s *sanity) checkInstr(idx int, instr Instruction) {
switch instr := instr.(type) {
case *If, *Jump, *Return, *Panic:
case *If, *Jump, *Return, *Panic, *Unreachable, *ConstantSwitch:
s.errorf("control flow instruction not at end of block")
case *Sigma:
if idx > 0 {
prev := s.block.Instrs[idx-1]
if _, ok := prev.(*Sigma); !ok {
s.errorf("Sigma instruction follows a non-Sigma: %T", prev)
}
}
case *Phi:
if idx == 0 {
// It suffices to apply this check to just the first phi node.
@ -99,8 +106,10 @@ func (s *sanity) checkInstr(idx int, instr Instruction) {
}
} else {
prev := s.block.Instrs[idx-1]
if _, ok := prev.(*Phi); !ok {
s.errorf("Phi instruction follows a non-Phi: %T", prev)
switch prev.(type) {
case *Phi, *Sigma:
default:
s.errorf("Phi instruction follows a non-Phi, non-Sigma: %T", prev)
}
}
if ne, np := len(instr.Edges), len(s.block.Preds); ne != np {
@ -109,7 +118,7 @@ func (s *sanity) checkInstr(idx int, instr Instruction) {
} else {
for i, e := range instr.Edges {
if e == nil {
s.errorf("phi node '%s' has no value for edge #%d from %s", instr.Comment, i, s.block.Preds[i])
s.errorf("phi node '%v' has no value for edge #%d from %s", instr, i, s.block.Preds[i])
}
}
}
@ -146,7 +155,8 @@ func (s *sanity) checkInstr(idx int, instr Instruction) {
case *Go:
case *Index:
case *IndexAddr:
case *Lookup:
case *MapLookup:
case *StringLookup:
case *MakeChan:
case *MakeClosure:
numFree := len(instr.Fn.(*Function).FreeVars)
@ -175,8 +185,11 @@ func (s *sanity) checkInstr(idx int, instr Instruction) {
case *UnOp:
case *DebugRef:
case *BlankStore:
case *Sigma:
// TODO(adonovan): implement checks.
case *Load:
case *Parameter:
case *Const:
case *Recv:
case *TypeSwitch:
default:
panic(fmt.Sprintf("Unknown instruction type: %T", instr))
}
@ -196,7 +209,9 @@ func (s *sanity) checkInstr(idx int, instr Instruction) {
} else if t == tRangeIter {
// not a proper type; ignore.
} else if b, ok := t.Underlying().(*types.Basic); ok && b.Info()&types.IsUntyped != 0 {
s.errorf("instruction has 'untyped' result: %s = %s : %s", v.Name(), v, t)
if _, ok := v.(*Const); !ok {
s.errorf("instruction has 'untyped' result: %s = %s : %s", v.Name(), v, t)
}
}
s.checkReferrerList(v)
}
@ -239,11 +254,19 @@ func (s *sanity) checkFinalInstr(instr Instruction) {
}
case *Panic:
if nsuccs := len(s.block.Succs); nsuccs != 0 {
s.errorf("Panic-terminated block has %d successors; expected none", nsuccs)
if nsuccs := len(s.block.Succs); nsuccs != 1 {
s.errorf("Panic-terminated block has %d successors; expected one", nsuccs)
return
}
case *Unreachable:
if nsuccs := len(s.block.Succs); nsuccs != 1 {
s.errorf("Unreachable-terminated block has %d successors; expected one", nsuccs)
return
}
case *ConstantSwitch:
default:
s.errorf("non-control flow instruction at end of block")
}
@ -260,9 +283,8 @@ func (s *sanity) checkBlock(b *BasicBlock, index int) {
}
// Check all blocks are reachable.
// (The entry block is always implicitly reachable,
// as is the Recover block, if any.)
if (index > 0 && b != b.parent.Recover) && len(b.Preds) == 0 {
// (The entry block is always implicitly reachable, the exit block may be unreachable.)
if index > 1 && len(b.Preds) == 0 {
s.warnf("unreachable block")
if b.Instrs == nil {
// Since this block is about to be pruned,
@ -395,7 +417,11 @@ func (s *sanity) checkReferrerList(v Value) {
}
for i, ref := range *refs {
if _, ok := s.instrs[ref]; !ok {
s.errorf("%s.Referrers()[%d] = %s is not an instruction belonging to this function", v.Name(), i, ref)
if val, ok := ref.(Value); ok {
s.errorf("%s.Referrers()[%d] = %s = %s is not an instruction belonging to this function", v.Name(), i, val.Name(), val)
} else {
s.errorf("%s.Referrers()[%d] = %s is not an instruction belonging to this function", v.Name(), i, ref)
}
}
}
}
@ -426,7 +452,7 @@ func (s *sanity) checkFunction(fn *Function) bool {
s.errorf("nil Pkg")
}
}
if src, syn := fn.Synthetic == "", fn.Syntax() != nil; src != syn {
if src, syn := fn.Synthetic == "", fn.source != nil; src != syn {
s.errorf("got fromSource=%t, hasSyntax=%t; want same values", src, syn)
}
for i, l := range fn.Locals {
@ -481,9 +507,6 @@ func (s *sanity) checkFunction(fn *Function) bool {
}
s.checkBlock(b, i)
}
if fn.Recover != nil && fn.Blocks[fn.Recover.Index] != fn.Recover {
s.errorf("Recover block is not in Blocks slice")
}
s.block = nil
for i, anon := range fn.AnonFuncs {
@ -522,14 +545,11 @@ func sanityCheckPackage(pkg *Package) {
if obj.Name() != name {
if obj.Name() == "init" && strings.HasPrefix(mem.Name(), "init#") {
// Ok. The name of a declared init function varies between
// its types.Func ("init") and its ssa.Function ("init#%d").
// its types.Func ("init") and its ir.Function ("init#%d").
} else {
panic(fmt.Sprintf("%s: %T.Object().Name() = %s, want %s",
pkg.Pkg.Path(), mem, obj.Name(), name))
}
}
if obj.Pos() != mem.Pos() {
panic(fmt.Sprintf("%s Pos=%d obj.Pos=%d", mem, mem.Pos(), obj.Pos()))
}
}
}

View File

@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package ssa
package ir
// This file defines utilities for working with source positions
// or source-level named entities ("objects").
@ -25,7 +25,7 @@ import (
// Returns nil if not found; reasons might include:
// - the node is not enclosed by any function.
// - the node is within an anonymous function (FuncLit) and
// its SSA function has not been created yet
// its IR function has not been created yet
// (pkg.Build() has not yet been called).
//
func EnclosingFunction(pkg *Package, path []ast.Node) *Function {
@ -46,7 +46,7 @@ outer:
continue outer
}
}
// SSA function not found:
// IR function not found:
// - package not yet built, or maybe
// - builder skipped FuncLit in dead block
// (in principle; but currently the Builder
@ -62,9 +62,9 @@ outer:
// package-level variable.
//
// Unlike EnclosingFunction, the behaviour of this function does not
// depend on whether SSA code for pkg has been built, so it can be
// depend on whether IR code for pkg has been built, so it can be
// used to quickly reject check inputs that will cause
// EnclosingFunction to fail, prior to SSA building.
// EnclosingFunction to fail, prior to IR building.
//
func HasEnclosingFunction(pkg *Package, path []ast.Node) bool {
return findEnclosingPackageLevelFunction(pkg, path) != nil
@ -83,23 +83,14 @@ func findEnclosingPackageLevelFunction(pkg *Package, path []ast.Node) *Function
}
case *ast.FuncDecl:
if decl.Recv == nil && decl.Name.Name == "init" {
// Explicit init() function.
for _, b := range pkg.init.Blocks {
for _, instr := range b.Instrs {
if instr, ok := instr.(*Call); ok {
if callee, ok := instr.Call.Value.(*Function); ok && callee.Pkg == pkg && callee.Pos() == decl.Name.NamePos {
return callee
}
}
}
}
// Hack: return non-nil when SSA is not yet
// Declared function/method.
fn := findNamedFunc(pkg, decl.Pos())
if fn == nil && decl.Recv == nil && decl.Name.Name == "init" {
// Hack: return non-nil when IR is not yet
// built so that HasEnclosingFunction works.
return pkg.init
}
// Declared function/method.
return findNamedFunc(pkg, decl.Name.NamePos)
return fn
}
}
return nil // not in any function
@ -109,29 +100,15 @@ func findEnclosingPackageLevelFunction(pkg *Package, path []ast.Node) *Function
// position pos.
//
func findNamedFunc(pkg *Package, pos token.Pos) *Function {
// Look at all package members and method sets of named types.
// Not very efficient.
for _, mem := range pkg.Members {
switch mem := mem.(type) {
case *Function:
if mem.Pos() == pos {
return mem
}
case *Type:
mset := pkg.Prog.MethodSets.MethodSet(types.NewPointer(mem.Type()))
for i, n := 0, mset.Len(); i < n; i++ {
// Don't call Program.Method: avoid creating wrappers.
obj := mset.At(i).Obj().(*types.Func)
if obj.Pos() == pos {
return pkg.values[obj].(*Function)
}
}
for _, fn := range pkg.Functions {
if fn.Pos() == pos {
return fn
}
}
return nil
}
// ValueForExpr returns the SSA Value that corresponds to non-constant
// ValueForExpr returns the IR Value that corresponds to non-constant
// expression e.
//
// It returns nil if no value was found, e.g.
@ -149,10 +126,10 @@ func findNamedFunc(pkg *Package, pos token.Pos) *Function {
// The types of e (or &e, if isAddr) and the result are equal
// (modulo "untyped" bools resulting from comparisons).
//
// (Tip: to find the ssa.Value given a source position, use
// (Tip: to find the ir.Value given a source position, use
// astutil.PathEnclosingInterval to locate the ast.Node, then
// EnclosingFunction to locate the Function, then ValueForExpr to find
// the ssa.Value.)
// the ir.Value.)
//
func (f *Function) ValueForExpr(e ast.Expr) (value Value, isAddr bool) {
if f.debugInfo() { // (opt)
@ -172,9 +149,9 @@ func (f *Function) ValueForExpr(e ast.Expr) (value Value, isAddr bool) {
// --- Lookup functions for source-level named entities (types.Objects) ---
// Package returns the SSA Package corresponding to the specified
// Package returns the IR Package corresponding to the specified
// type-checker package object.
// It returns nil if no such SSA package has been created.
// It returns nil if no such IR package has been created.
//
func (prog *Program) Package(obj *types.Package) *Package {
return prog.packages[obj]
@ -203,7 +180,7 @@ func (prog *Program) FuncValue(obj *types.Func) *Function {
return fn
}
// ConstValue returns the SSA Value denoted by the source-level named
// ConstValue returns the IR Value denoted by the source-level named
// constant obj.
//
func (prog *Program) ConstValue(obj *types.Const) *Const {
@ -221,12 +198,12 @@ func (prog *Program) ConstValue(obj *types.Const) *Const {
return NewConst(obj.Val(), obj.Type())
}
// VarValue returns the SSA Value that corresponds to a specific
// VarValue returns the IR Value that corresponds to a specific
// identifier denoting the source-level named variable obj.
//
// VarValue returns nil if a local variable was not found, perhaps
// because its package was not built, the debug information was not
// requested during SSA construction, or the value was optimized away.
// requested during IR construction, or the value was optimized away.
//
// ref is the path to an ast.Ident (e.g. from PathEnclosingInterval),
// and that ident must resolve to obj.
@ -252,14 +229,14 @@ func (prog *Program) ConstValue(obj *types.Const) *Const {
//
// It is not specified whether the value or the address is returned in
// any particular case, as it may depend upon optimizations performed
// during SSA code generation, such as registerization, constant
// during IR code generation, such as registerization, constant
// folding, avoidance of materialization of subexpressions, etc.
//
func (prog *Program) VarValue(obj *types.Var, pkg *Package, ref []ast.Node) (value Value, isAddr bool) {
// All references to a var are local to some function, possibly init.
fn := EnclosingFunction(pkg, ref)
if fn == nil {
return // e.g. def of struct field; SSA not built?
return // e.g. def of struct field; IR not built?
}
id := ref[0].(*ast.Ident)

View File

@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package ssa
package ir
// This file defines a number of miscellaneous utility functions.
@ -52,36 +52,6 @@ func recvType(obj *types.Func) types.Type {
return obj.Type().(*types.Signature).Recv().Type()
}
// DefaultType returns the default "typed" type for an "untyped" type;
// it returns the incoming type for all other types. The default type
// for untyped nil is untyped nil.
//
// Exported to ssa/interp.
//
// TODO(adonovan): use go/types.DefaultType after 1.8.
//
func DefaultType(typ types.Type) types.Type {
if t, ok := typ.(*types.Basic); ok {
k := t.Kind()
switch k {
case types.UntypedBool:
k = types.Bool
case types.UntypedInt:
k = types.Int
case types.UntypedRune:
k = types.Rune
case types.UntypedFloat:
k = types.Float64
case types.UntypedComplex:
k = types.Complex128
case types.UntypedString:
k = types.String
}
typ = types.Typ[k]
}
return typ
}
// logStack prints the formatted "start" message to stderr and
// returns a closure that prints the corresponding "end" message.
// Call using 'defer logStack(...)()' to show builder stack on panic.

View File

@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package ssa
package ir
// This file defines synthesis of Functions that delegate to declared
// methods; they come in three kinds:
@ -65,41 +65,42 @@ func makeWrapper(prog *Program, sel *types.Selection) *Function {
defer logStack("make %s to (%s)", description, recv.Type())()
}
fn := &Function{
name: name,
method: sel,
object: obj,
Signature: sig,
Synthetic: description,
Prog: prog,
pos: obj.Pos(),
name: name,
method: sel,
object: obj,
Signature: sig,
Synthetic: description,
Prog: prog,
functionBody: new(functionBody),
}
fn.initHTML(prog.PrintFunc)
fn.startBody()
fn.addSpilledParam(recv)
fn.addSpilledParam(recv, nil)
createParams(fn, start)
indices := sel.Index()
var v Value = fn.Locals[0] // spilled receiver
if isPointer(sel.Recv()) {
v = emitLoad(fn, v)
v = emitLoad(fn, v, nil)
// For simple indirection wrappers, perform an informative nil-check:
// "value method (T).f called using nil *T pointer"
if len(indices) == 1 && !isPointer(recvType(obj)) {
var c Call
c.Call.Value = &Builtin{
name: "ssa:wrapnilchk",
name: "ir:wrapnilchk",
sig: types.NewSignature(nil,
types.NewTuple(anonVar(sel.Recv()), anonVar(tString), anonVar(tString)),
types.NewTuple(anonVar(sel.Recv())), false),
}
c.Call.Args = []Value{
v,
stringConst(deref(sel.Recv()).String()),
stringConst(sel.Obj().Name()),
emitConst(fn, stringConst(deref(sel.Recv()).String())),
emitConst(fn, stringConst(sel.Obj().Name())),
}
c.setType(v.Type())
v = fn.emit(&c)
v = fn.emit(&c, nil)
}
}
@ -111,7 +112,7 @@ func makeWrapper(prog *Program, sel *types.Selection) *Function {
// Load) in preference to value extraction (Field possibly
// preceded by Load).
v = emitImplicitSelections(fn, v, indices[:len(indices)-1])
v = emitImplicitSelections(fn, v, indices[:len(indices)-1], nil)
// Invariant: v is a pointer, either
// value of implicit *C field, or
@ -120,18 +121,18 @@ func makeWrapper(prog *Program, sel *types.Selection) *Function {
var c Call
if r := recvType(obj); !isInterface(r) { // concrete method
if !isPointer(r) {
v = emitLoad(fn, v)
v = emitLoad(fn, v, nil)
}
c.Call.Value = prog.declaredFunc(obj)
c.Call.Args = append(c.Call.Args, v)
} else {
c.Call.Method = obj
c.Call.Value = emitLoad(fn, v)
c.Call.Value = emitLoad(fn, v, nil)
}
for _, arg := range fn.Params[1:] {
c.Call.Args = append(c.Call.Args, arg)
}
emitTailCall(fn, &c)
emitTailCall(fn, &c, nil)
fn.finishBody()
return fn
}
@ -143,7 +144,7 @@ func makeWrapper(prog *Program, sel *types.Selection) *Function {
func createParams(fn *Function, start int) {
tparams := fn.Signature.Params()
for i, n := start, tparams.Len(); i < n; i++ {
fn.addParamObj(tparams.At(i))
fn.addParamObj(tparams.At(i), nil)
}
}
@ -184,13 +185,14 @@ func makeBound(prog *Program, obj *types.Func) *Function {
defer logStack("%s", description)()
}
fn = &Function{
name: obj.Name() + "$bound",
object: obj,
Signature: changeRecv(obj.Type().(*types.Signature), nil), // drop receiver
Synthetic: description,
Prog: prog,
pos: obj.Pos(),
name: obj.Name() + "$bound",
object: obj,
Signature: changeRecv(obj.Type().(*types.Signature), nil), // drop receiver
Synthetic: description,
Prog: prog,
functionBody: new(functionBody),
}
fn.initHTML(prog.PrintFunc)
fv := &FreeVar{name: "recv", typ: recvType(obj), parent: fn}
fn.FreeVars = []*FreeVar{fv}
@ -208,7 +210,7 @@ func makeBound(prog *Program, obj *types.Func) *Function {
for _, arg := range fn.Params {
c.Call.Args = append(c.Call.Args, arg)
}
emitTailCall(fn, &c)
emitTailCall(fn, &c, nil)
fn.finishBody()
prog.bounds[obj] = fn

5
vendor/honnef.co/go/tools/ir/write.go vendored Normal file
View File

@ -0,0 +1,5 @@
package ir
func NewJump(parent *BasicBlock) *Jump {
return &Jump{anInstruction{block: parent}, ""}
}

View File

@ -3,6 +3,7 @@ package lint // import "honnef.co/go/tools/lint"
import (
"bytes"
"encoding/gob"
"fmt"
"go/scanner"
"go/token"
@ -17,6 +18,7 @@ import (
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/packages"
"honnef.co/go/tools/config"
"honnef.co/go/tools/internal/cache"
)
type Documentation struct {
@ -62,7 +64,7 @@ type LineIgnore struct {
Line int
Checks []string
Matched bool
Pos token.Pos
Pos token.Position
}
func (li *LineIgnore) Match(p Problem) bool {
@ -119,6 +121,21 @@ type Problem struct {
Message string
Check string
Severity Severity
Related []Related
}
type Related struct {
Pos token.Position
End token.Position
Message string
}
func (p Problem) Equal(o Problem) bool {
return p.Pos == o.Pos &&
p.End == o.End &&
p.Message == o.Message &&
p.Check == o.Check &&
p.Severity == o.Severity
}
func (p *Problem) String() string {
@ -132,6 +149,7 @@ type Linter struct {
GoVersion int
Config config.Config
Stats Stats
RepeatAnalyzers uint
}
type CumulativeChecker interface {
@ -184,6 +202,7 @@ func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error
return nil, err
}
r.goVersion = l.GoVersion
r.repeatAnalyzers = l.RepeatAnalyzers
pkgs, err := r.Run(cfg, patterns, allowedAnalyzers, hasCumulative)
if err != nil {
@ -264,10 +283,12 @@ func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error
}
atomic.StoreUint32(&r.stats.State, StateCumulative)
var problems []Problem
for _, cum := range l.CumulativeCheckers {
for _, res := range cum.Result() {
pkg := tpkgToPkg[res.Pkg()]
if pkg == nil {
panic(fmt.Sprintf("analyzer %s flagged object %s in package %s, a package that we aren't tracking", cum.Analyzer(), res, res.Pkg()))
}
allowedChecks := FilterChecks(allowedAnalyzers, pkg.cfg.Merge(l.Config).Checks)
if allowedChecks[cum.Analyzer().Name] {
pos := DisplayPosition(pkg.Fset, res.Pos())
@ -278,21 +299,51 @@ func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error
continue
}
p := cum.ProblemObject(pkg.Fset, res)
problems = append(problems, p)
pkg.problems = append(pkg.problems, p)
}
}
}
for _, pkg := range pkgs {
if !pkg.fromSource {
// Don't cache packages that we loaded from the cache
continue
}
cpkg := cachedPackage{
Problems: pkg.problems,
Ignores: pkg.ignores,
Config: pkg.cfg,
}
buf := &bytes.Buffer{}
if err := gob.NewEncoder(buf).Encode(cpkg); err != nil {
return nil, err
}
id := cache.Subkey(pkg.actionID, "data "+r.problemsCacheKey)
if err := r.cache.PutBytes(id, buf.Bytes()); err != nil {
return nil, err
}
}
var problems []Problem
// Deduplicate line ignores. When U1000 processes a package and
// its test variant, it will only emit a single problem for an
// unused object, not two problems. We will, however, have two
// line ignores, one per package. Without deduplication, one line
// ignore will be marked as matched, while the other one won't,
// subsequently reporting a "this linter directive didn't match
// anything" error.
ignores := map[token.Position]Ignore{}
for _, pkg := range pkgs {
for _, ig := range pkg.ignores {
for i := range pkg.problems {
p := &pkg.problems[i]
if ig.Match(*p) {
p.Severity = Ignored
if lig, ok := ig.(*LineIgnore); ok {
ig = ignores[lig.Pos]
if ig == nil {
ignores[lig.Pos] = lig
ig = lig
}
}
for i := range problems {
p := &problems[i]
for i := range pkg.problems {
p := &pkg.problems[i]
if ig.Match(*p) {
p.Severity = Ignored
}
@ -318,6 +369,7 @@ func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error
if !ok {
continue
}
ig = ignores[ig.Pos].(*LineIgnore)
if ig.Matched {
continue
}
@ -338,7 +390,7 @@ func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error
continue
}
p := Problem{
Pos: DisplayPosition(pkg.Fset, ig.Pos),
Pos: ig.Pos,
Message: "this linter directive didn't match anything; should it be removed?",
Check: "",
}
@ -372,7 +424,7 @@ func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error
for i, p := range problems[1:] {
// We may encounter duplicate problems because one file
// can be part of many packages.
if problems[i] != p {
if !problems[i].Equal(p) {
out = append(out, p)
}
}
@ -422,10 +474,6 @@ func FilterChecks(allChecks []*analysis.Analyzer, checks []string) map[string]bo
return allowedChecks
}
type Positioner interface {
Pos() token.Pos
}
func DisplayPosition(fset *token.FileSet, p token.Pos) token.Position {
if p == token.NoPos {
return token.Position{}

View File

@ -4,283 +4,14 @@ package lintdsl
import (
"bytes"
"flag"
"fmt"
"go/ast"
"go/constant"
"go/printer"
"go/token"
"go/types"
"strings"
"go/format"
"golang.org/x/tools/go/analysis"
"honnef.co/go/tools/facts"
"honnef.co/go/tools/lint"
"honnef.co/go/tools/ssa"
"honnef.co/go/tools/pattern"
)
type packager interface {
Package() *ssa.Package
}
func CallName(call *ssa.CallCommon) string {
if call.IsInvoke() {
return ""
}
switch v := call.Value.(type) {
case *ssa.Function:
fn, ok := v.Object().(*types.Func)
if !ok {
return ""
}
return lint.FuncName(fn)
case *ssa.Builtin:
return v.Name()
}
return ""
}
func IsCallTo(call *ssa.CallCommon, name string) bool { return CallName(call) == name }
func IsType(T types.Type, name string) bool { return types.TypeString(T, nil) == name }
func FilterDebug(instr []ssa.Instruction) []ssa.Instruction {
var out []ssa.Instruction
for _, ins := range instr {
if _, ok := ins.(*ssa.DebugRef); !ok {
out = append(out, ins)
}
}
return out
}
func IsExample(fn *ssa.Function) bool {
if !strings.HasPrefix(fn.Name(), "Example") {
return false
}
f := fn.Prog.Fset.File(fn.Pos())
if f == nil {
return false
}
return strings.HasSuffix(f.Name(), "_test.go")
}
func IsPointerLike(T types.Type) bool {
switch T := T.Underlying().(type) {
case *types.Interface, *types.Chan, *types.Map, *types.Signature, *types.Pointer:
return true
case *types.Basic:
return T.Kind() == types.UnsafePointer
}
return false
}
func IsIdent(expr ast.Expr, ident string) bool {
id, ok := expr.(*ast.Ident)
return ok && id.Name == ident
}
// isBlank returns whether id is the blank identifier "_".
// If id == nil, the answer is false.
func IsBlank(id ast.Expr) bool {
ident, _ := id.(*ast.Ident)
return ident != nil && ident.Name == "_"
}
func IsIntLiteral(expr ast.Expr, literal string) bool {
lit, ok := expr.(*ast.BasicLit)
return ok && lit.Kind == token.INT && lit.Value == literal
}
// Deprecated: use IsIntLiteral instead
func IsZero(expr ast.Expr) bool {
return IsIntLiteral(expr, "0")
}
func IsOfType(pass *analysis.Pass, expr ast.Expr, name string) bool {
return IsType(pass.TypesInfo.TypeOf(expr), name)
}
func IsInTest(pass *analysis.Pass, node lint.Positioner) bool {
// FIXME(dh): this doesn't work for global variables with
// initializers
f := pass.Fset.File(node.Pos())
return f != nil && strings.HasSuffix(f.Name(), "_test.go")
}
func IsInMain(pass *analysis.Pass, node lint.Positioner) bool {
if node, ok := node.(packager); ok {
return node.Package().Pkg.Name() == "main"
}
return pass.Pkg.Name() == "main"
}
func SelectorName(pass *analysis.Pass, expr *ast.SelectorExpr) string {
info := pass.TypesInfo
sel := info.Selections[expr]
if sel == nil {
if x, ok := expr.X.(*ast.Ident); ok {
pkg, ok := info.ObjectOf(x).(*types.PkgName)
if !ok {
// This shouldn't happen
return fmt.Sprintf("%s.%s", x.Name, expr.Sel.Name)
}
return fmt.Sprintf("%s.%s", pkg.Imported().Path(), expr.Sel.Name)
}
panic(fmt.Sprintf("unsupported selector: %v", expr))
}
return fmt.Sprintf("(%s).%s", sel.Recv(), sel.Obj().Name())
}
func IsNil(pass *analysis.Pass, expr ast.Expr) bool {
return pass.TypesInfo.Types[expr].IsNil()
}
func BoolConst(pass *analysis.Pass, expr ast.Expr) bool {
val := pass.TypesInfo.ObjectOf(expr.(*ast.Ident)).(*types.Const).Val()
return constant.BoolVal(val)
}
func IsBoolConst(pass *analysis.Pass, expr ast.Expr) bool {
// We explicitly don't support typed bools because more often than
// not, custom bool types are used as binary enums and the
// explicit comparison is desired.
ident, ok := expr.(*ast.Ident)
if !ok {
return false
}
obj := pass.TypesInfo.ObjectOf(ident)
c, ok := obj.(*types.Const)
if !ok {
return false
}
basic, ok := c.Type().(*types.Basic)
if !ok {
return false
}
if basic.Kind() != types.UntypedBool && basic.Kind() != types.Bool {
return false
}
return true
}
func ExprToInt(pass *analysis.Pass, expr ast.Expr) (int64, bool) {
tv := pass.TypesInfo.Types[expr]
if tv.Value == nil {
return 0, false
}
if tv.Value.Kind() != constant.Int {
return 0, false
}
return constant.Int64Val(tv.Value)
}
func ExprToString(pass *analysis.Pass, expr ast.Expr) (string, bool) {
val := pass.TypesInfo.Types[expr].Value
if val == nil {
return "", false
}
if val.Kind() != constant.String {
return "", false
}
return constant.StringVal(val), true
}
// Dereference returns a pointer's element type; otherwise it returns
// T.
func Dereference(T types.Type) types.Type {
if p, ok := T.Underlying().(*types.Pointer); ok {
return p.Elem()
}
return T
}
// DereferenceR returns a pointer's element type; otherwise it returns
// T. If the element type is itself a pointer, DereferenceR will be
// applied recursively.
func DereferenceR(T types.Type) types.Type {
if p, ok := T.Underlying().(*types.Pointer); ok {
return DereferenceR(p.Elem())
}
return T
}
func IsGoVersion(pass *analysis.Pass, minor int) bool {
version := pass.Analyzer.Flags.Lookup("go").Value.(flag.Getter).Get().(int)
return version >= minor
}
func CallNameAST(pass *analysis.Pass, call *ast.CallExpr) string {
switch fun := call.Fun.(type) {
case *ast.SelectorExpr:
fn, ok := pass.TypesInfo.ObjectOf(fun.Sel).(*types.Func)
if !ok {
return ""
}
return lint.FuncName(fn)
case *ast.Ident:
obj := pass.TypesInfo.ObjectOf(fun)
switch obj := obj.(type) {
case *types.Func:
return lint.FuncName(obj)
case *types.Builtin:
return obj.Name()
default:
return ""
}
default:
return ""
}
}
func IsCallToAST(pass *analysis.Pass, node ast.Node, name string) bool {
call, ok := node.(*ast.CallExpr)
if !ok {
return false
}
return CallNameAST(pass, call) == name
}
func IsCallToAnyAST(pass *analysis.Pass, node ast.Node, names ...string) bool {
for _, name := range names {
if IsCallToAST(pass, node, name) {
return true
}
}
return false
}
func Render(pass *analysis.Pass, x interface{}) string {
var buf bytes.Buffer
if err := printer.Fprint(&buf, pass.Fset, x); err != nil {
panic(err)
}
return buf.String()
}
func RenderArgs(pass *analysis.Pass, args []ast.Expr) string {
var ss []string
for _, arg := range args {
ss = append(ss, Render(pass, arg))
}
return strings.Join(ss, ", ")
}
func Preamble(f *ast.File) string {
cutoff := f.Package
if f.Doc != nil {
cutoff = f.Doc.Pos()
}
var out []string
for _, cmt := range f.Comments {
if cmt.Pos() >= cutoff {
break
}
out = append(out, cmt.Text())
}
return strings.Join(out, "\n")
}
func Inspect(node ast.Node, fn func(node ast.Node) bool) {
if node == nil {
return
@ -288,113 +19,40 @@ func Inspect(node ast.Node, fn func(node ast.Node) bool) {
ast.Inspect(node, fn)
}
func GroupSpecs(fset *token.FileSet, specs []ast.Spec) [][]ast.Spec {
if len(specs) == 0 {
return nil
func Match(pass *analysis.Pass, q pattern.Pattern, node ast.Node) (*pattern.Matcher, bool) {
// Note that we ignore q.Relevant callers of Match usually use
// AST inspectors that already filter on nodes we're interested
// in.
m := &pattern.Matcher{TypesInfo: pass.TypesInfo}
ok := m.Match(q.Root, node)
return m, ok
}
func MatchAndEdit(pass *analysis.Pass, before, after pattern.Pattern, node ast.Node) (*pattern.Matcher, []analysis.TextEdit, bool) {
m, ok := Match(pass, before, node)
if !ok {
return m, nil, false
}
groups := make([][]ast.Spec, 1)
groups[0] = append(groups[0], specs[0])
r := pattern.NodeToAST(after.Root, m.State)
buf := &bytes.Buffer{}
format.Node(buf, pass.Fset, r)
edit := []analysis.TextEdit{{
Pos: node.Pos(),
End: node.End(),
NewText: buf.Bytes(),
}}
return m, edit, true
}
for _, spec := range specs[1:] {
g := groups[len(groups)-1]
if fset.PositionFor(spec.Pos(), false).Line-1 !=
fset.PositionFor(g[len(g)-1].End(), false).Line {
groups = append(groups, nil)
}
groups[len(groups)-1] = append(groups[len(groups)-1], spec)
func Selector(x, sel string) *ast.SelectorExpr {
return &ast.SelectorExpr{
X: &ast.Ident{Name: x},
Sel: &ast.Ident{Name: sel},
}
return groups
}
func IsObject(obj types.Object, name string) bool {
var path string
if pkg := obj.Pkg(); pkg != nil {
path = pkg.Path() + "."
}
return path+obj.Name() == name
}
type Field struct {
Var *types.Var
Tag string
Path []int
}
// FlattenFields recursively flattens T and embedded structs,
// returning a list of fields. If multiple fields with the same name
// exist, all will be returned.
func FlattenFields(T *types.Struct) []Field {
return flattenFields(T, nil, nil)
}
func flattenFields(T *types.Struct, path []int, seen map[types.Type]bool) []Field {
if seen == nil {
seen = map[types.Type]bool{}
}
if seen[T] {
return nil
}
seen[T] = true
var out []Field
for i := 0; i < T.NumFields(); i++ {
field := T.Field(i)
tag := T.Tag(i)
np := append(path[:len(path):len(path)], i)
if field.Anonymous() {
if s, ok := Dereference(field.Type()).Underlying().(*types.Struct); ok {
out = append(out, flattenFields(s, np, seen)...)
}
} else {
out = append(out, Field{field, tag, np})
}
}
return out
}
func File(pass *analysis.Pass, node lint.Positioner) *ast.File {
pass.Fset.PositionFor(node.Pos(), true)
m := pass.ResultOf[facts.TokenFile].(map[*token.File]*ast.File)
return m[pass.Fset.File(node.Pos())]
}
// IsGenerated reports whether pos is in a generated file, It ignores
// //line directives.
func IsGenerated(pass *analysis.Pass, pos token.Pos) bool {
_, ok := Generator(pass, pos)
return ok
}
// Generator returns the generator that generated the file containing
// pos. It ignores //line directives.
func Generator(pass *analysis.Pass, pos token.Pos) (facts.Generator, bool) {
file := pass.Fset.PositionFor(pos, false).Filename
m := pass.ResultOf[facts.Generated].(map[string]facts.Generator)
g, ok := m[file]
return g, ok
}
func ReportfFG(pass *analysis.Pass, pos token.Pos, f string, args ...interface{}) {
file := lint.DisplayPosition(pass.Fset, pos).Filename
m := pass.ResultOf[facts.Generated].(map[string]facts.Generator)
if _, ok := m[file]; ok {
return
}
pass.Reportf(pos, f, args...)
}
func ReportNodef(pass *analysis.Pass, node ast.Node, format string, args ...interface{}) {
msg := fmt.Sprintf(format, args...)
pass.Report(analysis.Diagnostic{Pos: node.Pos(), End: node.End(), Message: msg})
}
func ReportNodefFG(pass *analysis.Pass, node ast.Node, format string, args ...interface{}) {
file := lint.DisplayPosition(pass.Fset, node.Pos()).Filename
m := pass.ResultOf[facts.Generated].(map[string]facts.Generator)
if _, ok := m[file]; ok {
return
}
ReportNodef(pass, node, format, args...)
// ExhaustiveTypeSwitch panics when called. It can be used to ensure
// that type switches are exhaustive.
func ExhaustiveTypeSwitch(v interface{}) {
panic(fmt.Sprintf("internal error: unhandled case %T", v))
}

View File

@ -39,7 +39,7 @@ func relativePositionString(pos token.Position) string {
}
type Statter interface {
Stats(total, errors, warnings int)
Stats(total, errors, warnings, ignored int)
}
type Formatter interface {
@ -51,7 +51,10 @@ type Text struct {
}
func (o Text) Format(p lint.Problem) {
fmt.Fprintf(o.W, "%v: %s\n", relativePositionString(p.Pos), p.String())
fmt.Fprintf(o.W, "%s: %s\n", relativePositionString(p.Pos), p.String())
for _, r := range p.Related {
fmt.Fprintf(o.W, "\t%s: %s\n", relativePositionString(r.Pos), r.Message)
}
}
type JSON struct {
@ -76,12 +79,18 @@ func (o JSON) Format(p lint.Problem) {
Line int `json:"line"`
Column int `json:"column"`
}
jp := struct {
Code string `json:"code"`
Severity string `json:"severity,omitempty"`
type related struct {
Location location `json:"location"`
End location `json:"end"`
Message string `json:"message"`
}
jp := struct {
Code string `json:"code"`
Severity string `json:"severity,omitempty"`
Location location `json:"location"`
End location `json:"end"`
Message string `json:"message"`
Related []related `json:"related,omitempty"`
}{
Code: p.Check,
Severity: severity(p.Severity),
@ -97,6 +106,21 @@ func (o JSON) Format(p lint.Problem) {
},
Message: p.Message,
}
for _, r := range p.Related {
jp.Related = append(jp.Related, related{
Location: location{
File: r.Pos.Filename,
Line: r.Pos.Line,
Column: r.Pos.Column,
},
End: location{
File: r.End.Filename,
Line: r.End.Line,
Column: r.End.Column,
},
Message: r.Message,
})
}
_ = json.NewEncoder(o.W).Encode(jp)
}
@ -123,13 +147,16 @@ func (o *Stylish) Format(p lint.Problem) {
o.tw = tabwriter.NewWriter(o.W, 0, 4, 2, ' ', 0)
}
fmt.Fprintf(o.tw, " (%d, %d)\t%s\t%s\n", pos.Line, pos.Column, p.Check, p.Message)
for _, r := range p.Related {
fmt.Fprintf(o.tw, " (%d, %d)\t\t %s\n", r.Pos.Line, r.Pos.Column, r.Message)
}
}
func (o *Stylish) Stats(total, errors, warnings int) {
func (o *Stylish) Stats(total, errors, warnings, ignored int) {
if o.tw != nil {
o.tw.Flush()
fmt.Fprintln(o.W)
}
fmt.Fprintf(o.W, " ✖ %d problems (%d errors, %d warnings)\n",
total, errors, warnings)
fmt.Fprintf(o.W, " ✖ %d problems (%d errors, %d warnings, %d ignored)\n",
total, errors, warnings, ignored)
}

View File

@ -23,7 +23,9 @@ import (
"runtime/pprof"
"strconv"
"strings"
"sync"
"sync/atomic"
"time"
"honnef.co/go/tools/config"
"honnef.co/go/tools/internal/cache"
@ -114,6 +116,8 @@ func FlagSet(name string) *flag.FlagSet {
flags.String("debug.memprofile", "", "Write memory profile to `file`")
flags.Bool("debug.version", false, "Print detailed version information about this program")
flags.Bool("debug.no-compile-errors", false, "Don't print compile errors")
flags.String("debug.measure-analyzers", "", "Write analysis measurements to `file`. `file` will be opened for appending if it already exists.")
flags.Uint("debug.repeat-analyzers", 0, "Run analyzers `num` times")
checks := list{"inherit"}
fail := list{"all"}
@ -153,6 +157,24 @@ func ProcessFlagSet(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, fs *
memProfile := fs.Lookup("debug.memprofile").Value.(flag.Getter).Get().(string)
debugVersion := fs.Lookup("debug.version").Value.(flag.Getter).Get().(bool)
debugNoCompile := fs.Lookup("debug.no-compile-errors").Value.(flag.Getter).Get().(bool)
debugRepeat := fs.Lookup("debug.repeat-analyzers").Value.(flag.Getter).Get().(uint)
var measureAnalyzers func(analysis *analysis.Analyzer, pkg *lint.Package, d time.Duration)
if path := fs.Lookup("debug.measure-analyzers").Value.(flag.Getter).Get().(string); path != "" {
f, err := os.OpenFile(path, os.O_CREATE|os.O_APPEND|os.O_WRONLY, 0600)
if err != nil {
log.Fatal(err)
}
mu := &sync.Mutex{}
measureAnalyzers = func(analysis *analysis.Analyzer, pkg *lint.Package, d time.Duration) {
mu.Lock()
defer mu.Unlock()
if _, err := fmt.Fprintf(f, "%s\t%s\t%d\n", analysis.Name, pkg.ID, d.Nanoseconds()); err != nil {
log.Println("error writing analysis measurements:", err)
}
}
}
cfg := config.Config{}
cfg.Checks = *fs.Lookup("checks").Value.(*list)
@ -218,10 +240,12 @@ func ProcessFlagSet(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, fs *
}
ps, err := Lint(cs, cums, fs.Args(), &Options{
Tags: tags,
LintTests: tests,
GoVersion: goVersion,
Config: cfg,
Tags: tags,
LintTests: tests,
GoVersion: goVersion,
Config: cfg,
PrintAnalyzerMeasurement: measureAnalyzers,
RepeatAnalyzers: debugRepeat,
})
if err != nil {
fmt.Fprintln(os.Stderr, err)
@ -245,6 +269,7 @@ func ProcessFlagSet(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, fs *
total int
errors int
warnings int
ignored int
)
fail := *fs.Lookup("fail").Value.(*list)
@ -262,6 +287,7 @@ func ProcessFlagSet(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, fs *
continue
}
if p.Severity == lint.Ignored && !showIgnored {
ignored++
continue
}
if shouldExit[p.Check] {
@ -273,7 +299,7 @@ func ProcessFlagSet(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, fs *
f.Format(p)
}
if f, ok := f.(format.Statter); ok {
f.Stats(total, errors, warnings)
f.Stats(total, errors, warnings, ignored)
}
if errors > 0 {
exit(1)
@ -284,9 +310,11 @@ func ProcessFlagSet(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, fs *
type Options struct {
Config config.Config
Tags string
LintTests bool
GoVersion int
Tags string
LintTests bool
GoVersion int
PrintAnalyzerMeasurement func(analysis *analysis.Analyzer, pkg *lint.Package, d time.Duration)
RepeatAnalyzers uint
}
func computeSalt() ([]byte, error) {
@ -325,7 +353,9 @@ func Lint(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, paths []string
CumulativeCheckers: cums,
GoVersion: opt.GoVersion,
Config: opt.Config,
RepeatAnalyzers: opt.RepeatAnalyzers,
}
l.Stats.PrintAnalyzerMeasurement = opt.PrintAnalyzerMeasurement
cfg := &packages.Config{}
if opt.LintTests {
cfg.Tests = true
@ -368,7 +398,8 @@ func Lint(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, paths []string
}()
}
return l.Lint(cfg, paths)
ps, err := l.Lint(cfg, paths)
return ps, err
}
var posRe = regexp.MustCompile(`^(.+?):(\d+)(?::(\d+)?)?$`)
@ -390,3 +421,24 @@ func parsePos(pos string) token.Position {
Column: col,
}
}
func InitializeAnalyzers(docs map[string]*lint.Documentation, analyzers map[string]*analysis.Analyzer) map[string]*analysis.Analyzer {
out := make(map[string]*analysis.Analyzer, len(analyzers))
for k, v := range analyzers {
vc := *v
out[k] = &vc
vc.Name = k
doc, ok := docs[k]
if !ok {
panic(fmt.Sprintf("missing documentation for check %s", k))
}
vc.Doc = doc.String()
if vc.Flags.Usage == nil {
fs := flag.NewFlagSet("", flag.PanicOnError)
fs.Var(NewVersionFlag(), "go", "Target Go version")
vc.Flags = *fs
}
}
return out
}

View File

@ -1,6 +1,30 @@
package lint
/*
Package loading
Conceptually, package loading in the runner can be imagined as a
graph-shaped work list. We iteratively pop off leaf nodes (packages
that have no unloaded dependencies) and load data from export data,
our cache, or source.
Specifically, non-initial packages are loaded from export data and the
fact cache if possible, otherwise from source. Initial packages are
loaded from export data, the fact cache and the (problems, ignores,
config) cache if possible, otherwise from source.
The appeal of this approach is that it is both simple to implement and
easily parallelizable. Each leaf node can be processed independently,
and new leaf nodes appear as their dependencies are being processed.
The downside of this approach, however, is that we're doing more work
than necessary. Imagine an initial package A, which has the following
dependency chain: A->B->C->D in the current implementation, we will
load all 4 packages. However, if package A can be loaded fully from
cached information, then none of its dependencies are necessary, and
we could avoid loading them.
Parallelism
Runner implements parallel processing of packages by spawning one
@ -19,6 +43,34 @@ all execute in parallel, while not wasting resources for long linear
chains or trying to process more subgraphs in parallel than the system
can handle.
Caching
We make use of several caches. These caches are Go's export data, our
facts cache, and our (problems, ignores, config) cache.
Initial packages will either be loaded from a combination of all three
caches, or from source. Non-initial packages will either be loaded
from a combination of export data and facts cache, or from source.
The facts cache is separate from the (problems, ignores, config) cache
because when we process non-initial packages, we generate facts, but
we discard problems and ignores.
The facts cache is keyed by (package, analyzer), whereas the
(problems, ignores, config) cache is keyed by (package, list of
analyzes). The difference between the two exists because there are
only a handful of analyses that produce facts, but hundreds of
analyses that don't. Creating one cache entry per fact-generating
analysis is feasible, creating one cache entry per normal analysis has
significant performance and storage overheads.
The downside of keying by the list of analyzes is, naturally, that a
change in list of analyzes changes the cache key. `staticcheck -checks
A` and `staticcheck -checks A,B` will therefore need their own cache
entries and not reuse each other's work. This problem does not affect
the facts cache.
*/
import (
@ -37,6 +89,7 @@ import (
"strings"
"sync"
"sync/atomic"
"time"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/packages"
@ -47,6 +100,11 @@ import (
"honnef.co/go/tools/loader"
)
func init() {
gob.Register(&FileIgnore{})
gob.Register(&LineIgnore{})
}
// If enabled, abuse of the go/analysis API will lead to panics
const sanityCheck = true
@ -58,21 +116,43 @@ const sanityCheck = true
// This may change unused's behavior, however, as it may observe fewer
// interfaces from transitive dependencies.
// OPT(dh): every single package will have the same value for
// canClearTypes. We could move the Package.decUse method to runner to
// eliminate this field. This is probably not worth it, though. There
// are only thousands of packages, so the field only takes up
// kilobytes of memory.
// OPT(dh): do we really need the Package.gen field? it's based
// trivially on pkg.results and merely caches the result of a type
// assertion. How often do we actually use the field?
type Package struct {
// dependents is initially set to 1 plus the number of packages
// that directly import this package. It is atomically decreased
// by 1 every time a dependent has been processed or when the
// package itself has been processed. Once the value reaches zero,
// the package is no longer needed.
dependents uint64
*packages.Package
Imports []*Package
initial bool
Imports []*Package
initial bool
// fromSource is set to true for packages that have been loaded
// from source. This is the case for initial packages, packages
// with missing export data, and packages with no cached facts.
fromSource bool
hash string
done chan struct{}
// hash stores the package hash, as computed by packageHash
hash string
actionID cache.ActionID
done chan struct{}
resultsMu sync.Mutex
// results maps analyzer IDs to analyzer results
// results maps analyzer IDs to analyzer results. it is
// implemented as a deduplicating concurrent cache.
results []*result
cfg *config.Config
cfg *config.Config
// gen maps file names to the code generator that created them
gen map[string]facts.Generator
problems []Problem
ignores []Ignore
@ -82,12 +162,22 @@ type Package struct {
facts []map[types.Object][]analysis.Fact
pkgFacts [][]analysis.Fact
// canClearTypes is set to true if we can discard type
// information after the package and its dependents have been
// processed. This is the case when no cumulative checkers are
// being run.
canClearTypes bool
}
type cachedPackage struct {
Problems []Problem
Ignores []Ignore
Config *config.Config
}
func (pkg *Package) decUse() {
atomic.AddUint64(&pkg.dependents, ^uint64(0))
if atomic.LoadUint64(&pkg.dependents) == 0 {
ret := atomic.AddUint64(&pkg.dependents, ^uint64(0))
if ret == 0 {
// nobody depends on this package anymore
if pkg.canClearTypes {
pkg.Types = nil
@ -108,16 +198,16 @@ type result struct {
}
type Runner struct {
ld loader.Loader
cache *cache.Cache
cache *cache.Cache
goVersion int
stats *Stats
repeatAnalyzers uint
analyzerIDs analyzerIDs
analyzerIDs analyzerIDs
problemsCacheKey string
// limits parallelism of loading packages
loadSem chan struct{}
goVersion int
stats *Stats
}
type analyzerIDs struct {
@ -225,6 +315,13 @@ func (ac *analysisAction) report(pass *analysis.Pass, d analysis.Diagnostic) {
Message: d.Message,
Check: pass.Analyzer.Name,
}
for _, r := range d.Related {
p.Related = append(p.Related, Related{
Pos: DisplayPosition(pass.Fset, r.Pos),
End: DisplayPosition(pass.Fset, r.End),
Message: r.Message,
})
}
ac.problems = append(ac.problems, p)
}
@ -278,6 +375,21 @@ func (r *Runner) runAnalysis(ac *analysisAction) (ret interface{}, err error) {
}
}
func (r *Runner) loadCachedPackage(pkg *Package, analyzers []*analysis.Analyzer) (cachedPackage, bool) {
// OPT(dh): we can cache this computation, it'll be the same for all packages
id := cache.Subkey(pkg.actionID, "data "+r.problemsCacheKey)
b, _, err := r.cache.GetBytes(id)
if err != nil {
return cachedPackage{}, false
}
var cpkg cachedPackage
if err := gob.NewDecoder(bytes.NewReader(b)).Decode(&cpkg); err != nil {
return cachedPackage{}, false
}
return cpkg, true
}
func (r *Runner) loadCachedFacts(a *analysis.Analyzer, pkg *Package) ([]Fact, bool) {
if len(a.FactTypes) == 0 {
return nil, true
@ -285,10 +397,7 @@ func (r *Runner) loadCachedFacts(a *analysis.Analyzer, pkg *Package) ([]Fact, bo
var facts []Fact
// Look in the cache for facts
aID, err := passActionID(pkg, a)
if err != nil {
return nil, false
}
aID := passActionID(pkg, a)
aID = cache.Subkey(aID, "facts")
b, _, err := r.cache.GetBytes(aID)
if err != nil {
@ -378,9 +487,15 @@ func (r *Runner) runAnalysisUser(pass *analysis.Pass, ac *analysisAction) (inter
}
// Then with this analyzer
ret, err := ac.analyzer.Run(pass)
if err != nil {
return nil, err
var ret interface{}
for i := uint(0); i < r.repeatAnalyzers+1; i++ {
var err error
t := time.Now()
ret, err = ac.analyzer.Run(pass)
r.stats.MeasureAnalyzer(ac.analyzer, ac.pkg, time.Since(t))
if err != nil {
return nil, err
}
}
if len(ac.analyzer.FactTypes) > 0 {
@ -404,16 +519,7 @@ func (r *Runner) runAnalysisUser(pass *analysis.Pass, ac *analysisAction) (inter
}
}
buf := &bytes.Buffer{}
if err := gob.NewEncoder(buf).Encode(facts); err != nil {
return nil, err
}
aID, err := passActionID(ac.pkg, ac.analyzer)
if err != nil {
return nil, err
}
aID = cache.Subkey(aID, "facts")
if err := r.cache.PutBytes(aID, buf.Bytes()); err != nil {
if err := r.cacheData(facts, ac.pkg, ac.analyzer, "facts"); err != nil {
return nil, err
}
}
@ -421,6 +527,19 @@ func (r *Runner) runAnalysisUser(pass *analysis.Pass, ac *analysisAction) (inter
return ret, nil
}
func (r *Runner) cacheData(v interface{}, pkg *Package, a *analysis.Analyzer, subkey string) error {
buf := &bytes.Buffer{}
if err := gob.NewEncoder(buf).Encode(v); err != nil {
return err
}
aID := passActionID(pkg, a)
aID = cache.Subkey(aID, subkey)
if err := r.cache.PutBytes(aID, buf.Bytes()); err != nil {
return err
}
return nil
}
func NewRunner(stats *Stats) (*Runner, error) {
cache, err := cache.Default()
if err != nil {
@ -438,9 +557,17 @@ func NewRunner(stats *Stats) (*Runner, error) {
// diagnostics as well as extracted ignore directives.
//
// Note that diagnostics have not been filtered at this point yet, to
// accomodate cumulative analyzes that require additional steps to
// accommodate cumulative analyzes that require additional steps to
// produce diagnostics.
func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analysis.Analyzer, hasCumulative bool) ([]*Package, error) {
checkerNames := make([]string, len(analyzers))
for i, a := range analyzers {
checkerNames[i] = a.Name
}
sort.Strings(checkerNames)
r.problemsCacheKey = strings.Join(checkerNames, " ")
var allAnalyzers []*analysis.Analyzer
r.analyzerIDs = analyzerIDs{m: map[*analysis.Analyzer]int{}}
id := 0
seen := map[*analysis.Analyzer]struct{}{}
@ -450,6 +577,7 @@ func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analy
return
}
seen[a] = struct{}{}
allAnalyzers = append(allAnalyzers, a)
r.analyzerIDs.m[a] = id
id++
for _, f := range a.FactTypes {
@ -468,6 +596,11 @@ func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analy
for _, a := range injectedAnalyses {
dfs(a)
}
// Run all analyzers on all packages (subject to further
// restrictions enforced later). This guarantees that if analyzer
// A1 depends on A2, and A2 has facts, that A2 will run on the
// dependencies of user-provided packages, even though A1 won't.
analyzers = allAnalyzers
var dcfg packages.Config
if cfg != nil {
@ -475,11 +608,10 @@ func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analy
}
atomic.StoreUint32(&r.stats.State, StateGraph)
initialPkgs, err := r.ld.Graph(dcfg, patterns...)
initialPkgs, err := loader.Graph(dcfg, patterns...)
if err != nil {
return nil, err
}
defer r.cache.Trim()
var allPkgs []*Package
@ -507,7 +639,8 @@ func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analy
m[l].Imports = append(m[l].Imports, m[v])
}
m[l].hash, err = packageHash(m[l])
m[l].hash, err = r.packageHash(m[l])
m[l].actionID = packageActionID(m[l])
if err != nil {
m[l].errs = append(m[l].errs, err)
}
@ -564,27 +697,36 @@ func parsePos(pos string) (token.Position, int, error) {
}, len(parts[0]), nil
}
// loadPkg loads a Go package. If the package is in the set of initial
// packages, it will be loaded from source, otherwise it will be
// loaded from export data. In the case that the package was loaded
// from export data, cached facts will also be loaded.
//
// Currently, only cached facts for this package will be loaded, not
// for any of its dependencies.
// loadPkg loads a Go package. It may be loaded from a combination of
// caches, or from source.
func (r *Runner) loadPkg(pkg *Package, analyzers []*analysis.Analyzer) error {
if pkg.Types != nil {
panic(fmt.Sprintf("internal error: %s has already been loaded", pkg.Package))
}
// Load type information
if pkg.initial {
// Load package from source
pkg.fromSource = true
return r.ld.LoadFromSource(pkg.Package)
// Try to load cached package
cpkg, ok := r.loadCachedPackage(pkg, analyzers)
if ok {
pkg.problems = cpkg.Problems
pkg.ignores = cpkg.Ignores
pkg.cfg = cpkg.Config
} else {
pkg.fromSource = true
return loader.LoadFromSource(pkg.Package)
}
}
// At this point we're either working with a non-initial package,
// or we managed to load cached problems for the package. We still
// need export data and facts.
// OPT(dh): we don't need type information for this package if no
// other package depends on it. this may be the case for initial
// packages.
// Load package from export data
if err := r.ld.LoadFromExport(pkg.Package); err != nil {
if err := loader.LoadFromExport(pkg.Package); err != nil {
// We asked Go to give us up to date export data, yet
// we can't load it. There must be something wrong.
//
@ -597,7 +739,7 @@ func (r *Runner) loadPkg(pkg *Package, analyzers []*analysis.Analyzer) error {
// FIXME(dh): we no longer reload from export data, so
// theoretically we should be able to continue
pkg.fromSource = true
if err := r.ld.LoadFromSource(pkg.Package); err != nil {
if err := loader.LoadFromSource(pkg.Package); err != nil {
return err
}
// Make sure this package can't be imported successfully
@ -658,13 +800,14 @@ func (r *Runner) loadPkg(pkg *Package, analyzers []*analysis.Analyzer) error {
dfs(a)
}
if failed {
pkg.fromSource = true
// XXX we added facts to the maps, we need to get rid of those
return r.ld.LoadFromSource(pkg.Package)
if !failed {
return nil
}
return nil
// We failed to load some cached facts
pkg.fromSource = true
// XXX we added facts to the maps, we need to get rid of those
return loader.LoadFromSource(pkg.Package)
}
type analysisError struct {
@ -695,7 +838,7 @@ func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) {
}()
// Ensure all packages have the generated map and config. This is
// required by interna of the runner. Analyses that themselves
// required by internals of the runner. Analyses that themselves
// make use of either have an explicit dependency so that other
// runners work correctly, too.
analyzers = append(analyzers[0:len(analyzers):len(analyzers)], injectedAnalyses...)
@ -766,7 +909,7 @@ func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) {
defer wg.Done()
// Only initial packages and packages with missing
// facts will have been loaded from source.
if pkg.initial || r.hasFacts(a) {
if pkg.initial || len(a.FactTypes) > 0 {
if _, err := r.runAnalysis(ac); err != nil {
errs[i] = analysisError{a, pkg, err}
return
@ -800,6 +943,8 @@ func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) {
// We can't process ignores at this point because `unused` needs
// to see more than one package to make its decision.
//
// OPT(dh): can't we guard this block of code by pkg.initial?
ignores, problems := parseDirectives(pkg.Package)
pkg.ignores = append(pkg.ignores, ignores...)
pkg.problems = append(pkg.problems, problems...)
@ -824,32 +969,6 @@ func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) {
// from processPkg.
}
// hasFacts reports whether an analysis exports any facts. An analysis
// that has a transitive dependency that exports facts is considered
// to be exporting facts.
func (r *Runner) hasFacts(a *analysis.Analyzer) bool {
ret := false
seen := make([]bool, len(r.analyzerIDs.m))
var dfs func(*analysis.Analyzer)
dfs = func(a *analysis.Analyzer) {
if seen[r.analyzerIDs.get(a)] {
return
}
seen[r.analyzerIDs.get(a)] = true
if len(a.FactTypes) > 0 {
ret = true
}
for _, req := range a.Requires {
if ret {
break
}
dfs(req)
}
}
dfs(a)
return ret
}
func parseDirective(s string) (cmd string, args []string) {
if !strings.HasPrefix(s, "//lint:") {
return "", nil
@ -912,7 +1031,7 @@ func parseDirectives(pkg *packages.Package) ([]Ignore, []Problem) {
File: pos.Filename,
Line: pos.Line,
Checks: checks,
Pos: c.Pos(),
Pos: DisplayPosition(pkg.Fset, c.Pos()),
}
case "file-ignore":
ig = &FileIgnore{
@ -932,9 +1051,10 @@ func parseDirectives(pkg *packages.Package) ([]Ignore, []Problem) {
// packageHash computes a package's hash. The hash is based on all Go
// files that make up the package, as well as the hashes of imported
// packages.
func packageHash(pkg *Package) (string, error) {
func (r *Runner) packageHash(pkg *Package) (string, error) {
key := cache.NewHash("package hash")
fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath)
fmt.Fprintf(key, "go %d\n", r.goVersion)
for _, f := range pkg.CompiledGoFiles {
h, err := cache.FileHash(f)
if err != nil {
@ -943,6 +1063,28 @@ func packageHash(pkg *Package) (string, error) {
fmt.Fprintf(key, "file %s %x\n", f, h)
}
// Actually load the configuration to calculate its hash. This
// will take into consideration inheritance of configuration
// files, as well as the default configuration.
//
// OPT(dh): doing this means we'll load the config twice: once for
// computing the hash, and once when analyzing the package from
// source.
cdir := config.Dir(pkg.GoFiles)
if cdir == "" {
fmt.Fprintf(key, "file %s %x\n", config.ConfigName, [cache.HashSize]byte{})
} else {
cfg, err := config.Load(cdir)
if err != nil {
return "", err
}
h := cache.NewHash(config.ConfigName)
if _, err := h.Write([]byte(cfg.String())); err != nil {
return "", err
}
fmt.Fprintf(key, "file %s %x\n", config.ConfigName, h.Sum())
}
imps := make([]*Package, len(pkg.Imports))
copy(imps, pkg.Imports)
sort.Slice(imps, func(i, j int) bool {
@ -959,12 +1101,14 @@ func packageHash(pkg *Package) (string, error) {
return hex.EncodeToString(h[:]), nil
}
// passActionID computes an ActionID for an analysis pass.
func passActionID(pkg *Package, analyzer *analysis.Analyzer) (cache.ActionID, error) {
key := cache.NewHash("action ID")
func packageActionID(pkg *Package) cache.ActionID {
key := cache.NewHash("package ID")
fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath)
fmt.Fprintf(key, "pkghash %s\n", pkg.hash)
fmt.Fprintf(key, "analyzer %s\n", analyzer.Name)
return key.Sum(), nil
return key.Sum()
}
// passActionID computes an ActionID for an analysis pass.
func passActionID(pkg *Package, analyzer *analysis.Analyzer) cache.ActionID {
return cache.Subkey(pkg.actionID, fmt.Sprintf("analyzer %s", analyzer.Name))
}

View File

@ -1,5 +1,11 @@
package lint
import (
"time"
"golang.org/x/tools/go/analysis"
)
const (
StateInitializing = 0
StateGraph = 1
@ -17,4 +23,16 @@ type Stats struct {
Problems uint32
ActiveWorkers uint32
TotalWorkers uint32
PrintAnalyzerMeasurement func(*analysis.Analyzer, *Package, time.Duration)
}
type AnalysisMeasurementKey struct {
Analysis string
Pkg string
}
func (s *Stats) MeasureAnalyzer(analysis *analysis.Analyzer, pkg *Package, d time.Duration) {
if s.PrintAnalyzerMeasurement != nil {
s.PrintAnalyzerMeasurement(analysis, pkg, d)
}
}

View File

@ -1,6 +1,7 @@
package loader
import (
"errors"
"fmt"
"go/ast"
"go/parser"
@ -9,22 +10,17 @@ import (
"go/types"
"log"
"os"
"sync"
"golang.org/x/tools/go/gcexportdata"
"golang.org/x/tools/go/packages"
)
type Loader struct {
exportMu sync.RWMutex
}
// Graph resolves patterns and returns packages with all the
// information required to later load type information, and optionally
// syntax trees.
//
// The provided config can set any setting with the exception of Mode.
func (ld *Loader) Graph(cfg packages.Config, patterns ...string) ([]*packages.Package, error) {
func Graph(cfg packages.Config, patterns ...string) ([]*packages.Package, error) {
cfg.Mode = packages.NeedName | packages.NeedImports | packages.NeedDeps | packages.NeedExportsFile | packages.NeedFiles | packages.NeedCompiledGoFiles | packages.NeedTypesSizes
pkgs, err := packages.Load(&cfg, patterns...)
if err != nil {
@ -34,15 +30,29 @@ func (ld *Loader) Graph(cfg packages.Config, patterns ...string) ([]*packages.Pa
packages.Visit(pkgs, nil, func(pkg *packages.Package) {
pkg.Fset = fset
})
return pkgs, nil
n := 0
for _, pkg := range pkgs {
if len(pkg.CompiledGoFiles) == 0 && len(pkg.Errors) == 0 && pkg.PkgPath != "unsafe" {
// If a package consists only of test files, then
// go/packages incorrectly(?) returns an empty package for
// the non-test variant. Get rid of those packages. See
// #646.
//
// Do not, however, skip packages that have errors. Those,
// too, may have no files, but we want to print the
// errors.
continue
}
pkgs[n] = pkg
n++
}
return pkgs[:n], nil
}
// LoadFromExport loads a package from export data. All of its
// dependencies must have been loaded already.
func (ld *Loader) LoadFromExport(pkg *packages.Package) error {
ld.exportMu.Lock()
defer ld.exportMu.Unlock()
func LoadFromExport(pkg *packages.Package) error {
pkg.IllTyped = true
for path, pkg := range pkg.Imports {
if pkg.Types == nil {
@ -87,10 +97,7 @@ func (ld *Loader) LoadFromExport(pkg *packages.Package) error {
// LoadFromSource loads a package from source. All of its dependencies
// must have been loaded already.
func (ld *Loader) LoadFromSource(pkg *packages.Package) error {
ld.exportMu.RLock()
defer ld.exportMu.RUnlock()
func LoadFromSource(pkg *packages.Package) error {
pkg.IllTyped = true
pkg.Types = types.NewPackage(pkg.PkgPath, pkg.Name)
@ -121,6 +128,12 @@ func (ld *Loader) LoadFromSource(pkg *packages.Package) error {
if path == "unsafe" {
return types.Unsafe, nil
}
if path == "C" {
// go/packages doesn't tell us that cgo preprocessing
// failed. When we subsequently try to parse the package,
// we'll encounter the raw C import.
return nil, errors.New("cgo preprocessing failed")
}
imp := pkg.Imports[path]
if imp == nil {
return nil, nil

View File

@ -0,0 +1,242 @@
package pattern
import (
"fmt"
"go/ast"
"go/token"
"go/types"
"reflect"
)
var astTypes = map[string]reflect.Type{
"Ellipsis": reflect.TypeOf(ast.Ellipsis{}),
"RangeStmt": reflect.TypeOf(ast.RangeStmt{}),
"AssignStmt": reflect.TypeOf(ast.AssignStmt{}),
"IndexExpr": reflect.TypeOf(ast.IndexExpr{}),
"Ident": reflect.TypeOf(ast.Ident{}),
"ValueSpec": reflect.TypeOf(ast.ValueSpec{}),
"GenDecl": reflect.TypeOf(ast.GenDecl{}),
"BinaryExpr": reflect.TypeOf(ast.BinaryExpr{}),
"ForStmt": reflect.TypeOf(ast.ForStmt{}),
"ArrayType": reflect.TypeOf(ast.ArrayType{}),
"DeferStmt": reflect.TypeOf(ast.DeferStmt{}),
"MapType": reflect.TypeOf(ast.MapType{}),
"ReturnStmt": reflect.TypeOf(ast.ReturnStmt{}),
"SliceExpr": reflect.TypeOf(ast.SliceExpr{}),
"StarExpr": reflect.TypeOf(ast.StarExpr{}),
"UnaryExpr": reflect.TypeOf(ast.UnaryExpr{}),
"SendStmt": reflect.TypeOf(ast.SendStmt{}),
"SelectStmt": reflect.TypeOf(ast.SelectStmt{}),
"ImportSpec": reflect.TypeOf(ast.ImportSpec{}),
"IfStmt": reflect.TypeOf(ast.IfStmt{}),
"GoStmt": reflect.TypeOf(ast.GoStmt{}),
"Field": reflect.TypeOf(ast.Field{}),
"SelectorExpr": reflect.TypeOf(ast.SelectorExpr{}),
"StructType": reflect.TypeOf(ast.StructType{}),
"KeyValueExpr": reflect.TypeOf(ast.KeyValueExpr{}),
"FuncType": reflect.TypeOf(ast.FuncType{}),
"FuncLit": reflect.TypeOf(ast.FuncLit{}),
"FuncDecl": reflect.TypeOf(ast.FuncDecl{}),
"ChanType": reflect.TypeOf(ast.ChanType{}),
"CallExpr": reflect.TypeOf(ast.CallExpr{}),
"CaseClause": reflect.TypeOf(ast.CaseClause{}),
"CommClause": reflect.TypeOf(ast.CommClause{}),
"CompositeLit": reflect.TypeOf(ast.CompositeLit{}),
"EmptyStmt": reflect.TypeOf(ast.EmptyStmt{}),
"SwitchStmt": reflect.TypeOf(ast.SwitchStmt{}),
"TypeSwitchStmt": reflect.TypeOf(ast.TypeSwitchStmt{}),
"TypeAssertExpr": reflect.TypeOf(ast.TypeAssertExpr{}),
"TypeSpec": reflect.TypeOf(ast.TypeSpec{}),
"InterfaceType": reflect.TypeOf(ast.InterfaceType{}),
"BranchStmt": reflect.TypeOf(ast.BranchStmt{}),
"IncDecStmt": reflect.TypeOf(ast.IncDecStmt{}),
"BasicLit": reflect.TypeOf(ast.BasicLit{}),
}
func ASTToNode(node interface{}) Node {
switch node := node.(type) {
case *ast.File:
panic("cannot convert *ast.File to Node")
case nil:
return Nil{}
case string:
return String(node)
case token.Token:
return Token(node)
case *ast.ExprStmt:
return ASTToNode(node.X)
case *ast.BlockStmt:
if node == nil {
return Nil{}
}
return ASTToNode(node.List)
case *ast.FieldList:
if node == nil {
return Nil{}
}
return ASTToNode(node.List)
case *ast.BasicLit:
if node == nil {
return Nil{}
}
case *ast.ParenExpr:
return ASTToNode(node.X)
}
if node, ok := node.(ast.Node); ok {
name := reflect.TypeOf(node).Elem().Name()
T, ok := structNodes[name]
if !ok {
panic(fmt.Sprintf("internal error: unhandled type %T", node))
}
if reflect.ValueOf(node).IsNil() {
return Nil{}
}
v := reflect.ValueOf(node).Elem()
objs := make([]Node, T.NumField())
for i := 0; i < T.NumField(); i++ {
f := v.FieldByName(T.Field(i).Name)
objs[i] = ASTToNode(f.Interface())
}
n, err := populateNode(name, objs, false)
if err != nil {
panic(fmt.Sprintf("internal error: %s", err))
}
return n
}
s := reflect.ValueOf(node)
if s.Kind() == reflect.Slice {
if s.Len() == 0 {
return List{}
}
if s.Len() == 1 {
return ASTToNode(s.Index(0).Interface())
}
tail := List{}
for i := s.Len() - 1; i >= 0; i-- {
head := ASTToNode(s.Index(i).Interface())
l := List{
Head: head,
Tail: tail,
}
tail = l
}
return tail
}
panic(fmt.Sprintf("internal error: unhandled type %T", node))
}
func NodeToAST(node Node, state State) interface{} {
switch node := node.(type) {
case Binding:
v, ok := state[node.Name]
if !ok {
// really we want to return an error here
panic("XXX")
}
switch v := v.(type) {
case types.Object:
return &ast.Ident{Name: v.Name()}
default:
return v
}
case Builtin, Any, Object, Function, Not, Or:
panic("XXX")
case List:
if (node == List{}) {
return []ast.Node{}
}
x := []ast.Node{NodeToAST(node.Head, state).(ast.Node)}
x = append(x, NodeToAST(node.Tail, state).([]ast.Node)...)
return x
case Token:
return token.Token(node)
case String:
return string(node)
case Nil:
return nil
}
name := reflect.TypeOf(node).Name()
T, ok := astTypes[name]
if !ok {
panic(fmt.Sprintf("internal error: unhandled type %T", node))
}
v := reflect.ValueOf(node)
out := reflect.New(T)
for i := 0; i < T.NumField(); i++ {
fNode := v.FieldByName(T.Field(i).Name)
if (fNode == reflect.Value{}) {
continue
}
fAST := out.Elem().FieldByName(T.Field(i).Name)
switch fAST.Type().Kind() {
case reflect.Slice:
c := reflect.ValueOf(NodeToAST(fNode.Interface().(Node), state))
if c.Kind() != reflect.Slice {
// it's a single node in the pattern, we have to wrap
// it in a slice
slice := reflect.MakeSlice(fAST.Type(), 1, 1)
slice.Index(0).Set(c)
c = slice
}
switch fAST.Interface().(type) {
case []ast.Node:
switch cc := c.Interface().(type) {
case []ast.Node:
fAST.Set(c)
case []ast.Expr:
var slice []ast.Node
for _, el := range cc {
slice = append(slice, el)
}
fAST.Set(reflect.ValueOf(slice))
default:
panic("XXX")
}
case []ast.Expr:
switch cc := c.Interface().(type) {
case []ast.Node:
var slice []ast.Expr
for _, el := range cc {
slice = append(slice, el.(ast.Expr))
}
fAST.Set(reflect.ValueOf(slice))
case []ast.Expr:
fAST.Set(c)
default:
panic("XXX")
}
default:
panic("XXX")
}
case reflect.Int:
c := reflect.ValueOf(NodeToAST(fNode.Interface().(Node), state))
switch c.Kind() {
case reflect.String:
tok, ok := tokensByString[c.Interface().(string)]
if !ok {
// really we want to return an error here
panic("XXX")
}
fAST.SetInt(int64(tok))
case reflect.Int:
fAST.Set(c)
default:
panic(fmt.Sprintf("internal error: unexpected kind %s", c.Kind()))
}
default:
r := NodeToAST(fNode.Interface().(Node), state)
if r != nil {
fAST.Set(reflect.ValueOf(r))
}
}
}
return out.Interface().(ast.Node)
}

273
vendor/honnef.co/go/tools/pattern/doc.go vendored Normal file
View File

@ -0,0 +1,273 @@
/*
Package pattern implements a simple language for pattern matching Go ASTs.
Design decisions and trade-offs
The language is designed specifically for the task of filtering ASTs
to simplify the implementation of analyses in staticcheck.
It is also intended to be trivial to parse and execute.
To that end, we make certain decisions that make the language more
suited to its task, while making certain queries infeasible.
Furthermore, it is fully expected that the majority of analyses will still require ordinary Go code
to further process the filtered AST, to make use of type information and to enforce complex invariants.
It is not our goal to design a scripting language for writing entire checks in.
The language
At its core, patterns are a representation of Go ASTs, allowing for the use of placeholders to enable pattern matching.
Their syntax is inspired by LISP and Haskell, but unlike LISP, the core unit of patterns isn't the list, but the node.
There is a fixed set of nodes, identified by name, and with the exception of the Or node, all nodes have a fixed number of arguments.
In addition to nodes, there are atoms, which represent basic units such as strings or the nil value.
Pattern matching is implemented via bindings, represented by the Binding node.
A Binding can match nodes and associate them with names, to later recall the nodes.
This allows for expressing "this node must be equal to that node" constraints.
To simplify writing and reading patterns, a small amount of additional syntax exists on top of nodes and atoms.
This additional syntax doesn't add any new features of its own, it simply provides shortcuts to creating nodes and atoms.
To show an example of a pattern, first consider this snippet of Go code:
if x := fn(); x != nil {
for _, v := range x {
println(v, x)
}
}
The corresponding AST expressed as an idiomatic pattern would look as follows:
(IfStmt
(AssignStmt (Ident "x") ":=" (CallExpr (Ident "fn") []))
(BinaryExpr (Ident "x") "!=" (Ident "nil"))
(RangeStmt
(Ident "_") (Ident "v") ":=" (Ident "x")
(CallExpr (Ident "println") [(Ident "v") (Ident "x")]))
nil)
Two things are worth noting about this representation.
First, the [el1 el2 ...] syntax is a short-hand for creating lists.
It is a short-hand for el1:el2:[], which itself is a short-hand for (List el1 (List el2 (List nil nil)).
Second, note the absence of a lot of lists in places that normally accept lists.
For example, assignment assigns a number of right-hands to a number of left-hands, yet our AssignStmt is lacking any form of list.
This is due to the fact that a single node can match a list of exactly one element.
Thus, the two following forms have identical matching behavior:
(AssignStmt (Ident "x") ":=" (CallExpr (Ident "fn") []))
(AssignStmt [(Ident "x")] ":=" [(CallExpr (Ident "fn") [])])
This section serves as an overview of the language's syntax.
More in-depth explanations of the matching behavior as well as an exhaustive list of node types follows in the coming sections.
Pattern matching
TODO write about pattern matching
- inspired by haskell syntax, but much, much simpler and naive
Node types
The language contains two kinds of nodes: those that map to nodes in the AST, and those that implement additional logic.
Nodes that map directly to AST nodes are named identically to the types in the go/ast package.
What follows is an exhaustive list of these nodes:
(ArrayType len elt)
(AssignStmt lhs tok rhs)
(BasicLit kind value)
(BinaryExpr x op y)
(BranchStmt tok label)
(CallExpr fun args)
(CaseClause list body)
(ChanType dir value)
(CommClause comm body)
(CompositeLit type elts)
(DeferStmt call)
(Ellipsis elt)
(EmptyStmt)
(Field names type tag)
(ForStmt init cond post body)
(FuncDecl recv name type body)
(FuncLit type body)
(FuncType params results)
(GenDecl specs)
(GoStmt call)
(Ident name)
(IfStmt init cond body else)
(ImportSpec name path)
(IncDecStmt x tok)
(IndexExpr x index)
(InterfaceType methods)
(KeyValueExpr key value)
(MapType key value)
(RangeStmt key value tok x body)
(ReturnStmt results)
(SelectStmt body)
(SelectorExpr x sel)
(SendStmt chan value)
(SliceExpr x low high max)
(StarExpr x)
(StructType fields)
(SwitchStmt init tag body)
(TypeAssertExpr)
(TypeSpec name type)
(TypeSwitchStmt init assign body)
(UnaryExpr op x)
(ValueSpec names type values)
Additionally, there are the String, Token and nil atoms.
Strings are double-quoted string literals, as in (Ident "someName").
Tokens are also represented as double-quoted string literals, but are converted to token.Token values in contexts that require tokens,
such as in (BinaryExpr x "<" y), where "<" is transparently converted to token.LSS during matching.
The keyword 'nil' denotes the nil value, which represents the absence of any value.
We also defines the (List head tail) node, which is used to represent sequences of elements as a singly linked list.
The head is a single element, and the tail is the remainder of the list.
For example,
(List "foo" (List "bar" (List "baz" (List nil nil))))
represents a list of three elements, "foo", "bar" and "baz". There is dedicated syntax for writing lists, which looks as follows:
["foo" "bar" "baz"]
This syntax is itself syntactic sugar for the following form:
"foo":"bar":"baz":[]
This form is of particular interest for pattern matching, as it allows matching on the head and tail. For example,
"foo":"bar":_
would match any list with at least two elements, where the first two elements are "foo" and "bar". This is equivalent to writing
(List "foo" (List "bar" _))
Note that it is not possible to match from the end of the list.
That is, there is no way to express a query such as "a list of any length where the last element is foo".
Note that unlike in LISP, nil and empty lists are distinct from one another.
In patterns, with respect to lists, nil is akin to Go's untyped nil.
It will match a nil ast.Node, but it will not match a nil []ast.Expr. Nil will, however, match pointers to named types such as *ast.Ident.
Similarly, lists are akin to Go's
slices. An empty list will match both a nil and an empty []ast.Expr, but it will not match a nil ast.Node.
Due to the difference between nil and empty lists, an empty list is represented as (List nil nil), i.e. a list with no head or tail.
Similarly, a list of one element is represented as (List el (List nil nil)). Unlike in LISP, it cannot be represented by (List el nil).
Finally, there are nodes that implement special logic or matching behavior.
(Any) matches any value. The underscore (_) maps to this node, making the following two forms equivalent:
(Ident _)
(Ident (Any))
(Builtin name) matches a built-in identifier or function by name.
This is a type-aware variant of (Ident name).
Instead of only comparing the name, it resolves the object behind the name and makes sure it's a pre-declared identifier.
For example, in the following piece of code
func fn() {
println(true)
true := false
println(true)
}
the pattern
(Builtin "true")
will match exactly once, on the first use of 'true' in the function.
Subsequent occurrences of 'true' no longer refer to the pre-declared identifier.
(Object name) matches an identifier by name, but yields the
types.Object it refers to.
(Function name) matches ast.Idents and ast.SelectorExprs that refer to a function with a given fully qualified name.
For example, "net/url.PathEscape" matches the PathEscape function in the net/url package,
and "(net/url.EscapeError).Error" refers to the Error method on the net/url.EscapeError type,
either on an instance of the type, or on the type itself.
For example, the following patterns match the following lines of code:
(CallExpr (Function "fmt.Println") _) // pattern 1
(CallExpr (Function "(net/url.EscapeError).Error") _) // pattern 2
fmt.Println("hello, world") // matches pattern 1
var x url.EscapeError
x.Error() // matches pattern 2
(url.EscapeError).Error(x) // also matches pattern 2
(Binding name node) creates or uses a binding.
Bindings work like variable assignments, allowing referring to already matched nodes.
As an example, bindings are necessary to match self-assignment of the form "x = x",
since we need to express that the right-hand side is identical to the left-hand side.
If a binding's node is not nil, the matcher will attempt to match a node according to the pattern.
If a binding's node is nil, the binding will either recall an existing value, or match the Any node.
It is an error to provide a non-nil node to a binding that has already been bound.
Referring back to the earlier example, the following pattern will match self-assignment of idents:
(AssignStmt (Binding "lhs" (Ident _)) "=" (Binding "lhs" nil))
Because bindings are a crucial component of pattern matching, there is special syntax for creating and recalling bindings.
Lower-case names refer to bindings. If standing on its own, the name "foo" will be equivalent to (Binding "foo" nil).
If a name is followed by an at-sign (@) then it will create a binding for the node that follows.
Together, this allows us to rewrite the earlier example as follows:
(AssignStmt lhs@(Ident _) "=" lhs)
(Or nodes...) is a variadic node that tries matching each node until one succeeds. For example, the following pattern matches all idents of name "foo" or "bar":
(Ident (Or "foo" "bar"))
We could also have written
(Or (Ident "foo") (Ident "bar"))
and achieved the same result. We can also mix different kinds of nodes:
(Or (Ident "foo") (CallExpr (Ident "bar") _))
When using bindings inside of nodes used inside Or, all or none of the bindings will be bound.
That is, partially matched nodes that ultimately failed to match will not produce any bindings observable outside of the matching attempt.
We can thus write
(Or (Ident name) (CallExpr name))
and 'name' will either be a String if the first option matched, or an Ident or SelectorExpr if the second option matched.
(Not node)
The Not node negates a match. For example, (Not (Ident _)) will match all nodes that aren't identifiers.
ChanDir(0)
Automatic unnesting of AST nodes
The Go AST has several types of nodes that wrap other nodes.
To simplify matching, we automatically unwrap some of these nodes.
These nodes are ExprStmt (for using expressions in a statement context),
ParenExpr (for parenthesized expressions),
DeclStmt (for declarations in a statement context),
and LabeledStmt (for labeled statements).
Thus, the query
(FuncLit _ [(CallExpr _ _)]
will match a function literal containing a single function call,
even though in the actual Go AST, the CallExpr is nested inside an ExprStmt,
as function bodies are made up of sequences of statements.
On the flip-side, there is no way to specifically match these wrapper nodes.
For example, there is no way of searching for unnecessary parentheses, like in the following piece of Go code:
((x)) += 2
*/
package pattern

View File

@ -0,0 +1,50 @@
// +build gofuzz
package pattern
import (
"go/ast"
goparser "go/parser"
"go/token"
"os"
"path/filepath"
"strings"
)
var files []*ast.File
func init() {
fset := token.NewFileSet()
filepath.Walk("/usr/lib/go/src", func(path string, info os.FileInfo, err error) error {
if err != nil {
// XXX error handling
panic(err)
}
if !strings.HasSuffix(path, ".go") {
return nil
}
f, err := goparser.ParseFile(fset, path, nil, 0)
if err != nil {
return nil
}
files = append(files, f)
return nil
})
}
func Fuzz(data []byte) int {
p := &Parser{}
pat, err := p.Parse(string(data))
if err != nil {
if strings.Contains(err.Error(), "internal error") {
panic(err)
}
return 0
}
_ = pat.Root.String()
for _, f := range files {
Match(pat.Root, f)
}
return 1
}

View File

@ -0,0 +1,221 @@
package pattern
import (
"fmt"
"go/token"
"unicode"
"unicode/utf8"
)
type lexer struct {
f *token.File
input string
start int
pos int
width int
items chan item
}
type itemType int
const eof = -1
const (
itemError itemType = iota
itemLeftParen
itemRightParen
itemLeftBracket
itemRightBracket
itemTypeName
itemVariable
itemAt
itemColon
itemBlank
itemString
itemEOF
)
func (typ itemType) String() string {
switch typ {
case itemError:
return "ERROR"
case itemLeftParen:
return "("
case itemRightParen:
return ")"
case itemLeftBracket:
return "["
case itemRightBracket:
return "]"
case itemTypeName:
return "TYPE"
case itemVariable:
return "VAR"
case itemAt:
return "@"
case itemColon:
return ":"
case itemBlank:
return "_"
case itemString:
return "STRING"
case itemEOF:
return "EOF"
default:
return fmt.Sprintf("itemType(%d)", typ)
}
}
type item struct {
typ itemType
val string
pos int
}
type stateFn func(*lexer) stateFn
func (l *lexer) run() {
for state := lexStart; state != nil; {
state = state(l)
}
close(l.items)
}
func (l *lexer) emitValue(t itemType, value string) {
l.items <- item{t, value, l.start}
l.start = l.pos
}
func (l *lexer) emit(t itemType) {
l.items <- item{t, l.input[l.start:l.pos], l.start}
l.start = l.pos
}
func lexStart(l *lexer) stateFn {
switch r := l.next(); {
case r == eof:
l.emit(itemEOF)
return nil
case unicode.IsSpace(r):
l.ignore()
case r == '(':
l.emit(itemLeftParen)
case r == ')':
l.emit(itemRightParen)
case r == '[':
l.emit(itemLeftBracket)
case r == ']':
l.emit(itemRightBracket)
case r == '@':
l.emit(itemAt)
case r == ':':
l.emit(itemColon)
case r == '_':
l.emit(itemBlank)
case r == '"':
l.backup()
return lexString
case unicode.IsUpper(r):
l.backup()
return lexType
case unicode.IsLower(r):
l.backup()
return lexVariable
default:
return l.errorf("unexpected character %c", r)
}
return lexStart
}
func (l *lexer) next() (r rune) {
if l.pos >= len(l.input) {
l.width = 0
return eof
}
r, l.width = utf8.DecodeRuneInString(l.input[l.pos:])
if r == '\n' {
l.f.AddLine(l.pos)
}
l.pos += l.width
return r
}
func (l *lexer) ignore() {
l.start = l.pos
}
func (l *lexer) backup() {
l.pos -= l.width
}
func (l *lexer) errorf(format string, args ...interface{}) stateFn {
// TODO(dh): emit position information in errors
l.items <- item{
itemError,
fmt.Sprintf(format, args...),
l.start,
}
return nil
}
func isAlphaNumeric(r rune) bool {
return r >= '0' && r <= '9' ||
r >= 'a' && r <= 'z' ||
r >= 'A' && r <= 'Z'
}
func lexString(l *lexer) stateFn {
l.next() // skip quote
escape := false
var runes []rune
for {
switch r := l.next(); r {
case eof:
return l.errorf("unterminated string")
case '"':
if !escape {
l.emitValue(itemString, string(runes))
return lexStart
} else {
runes = append(runes, '"')
escape = false
}
case '\\':
if escape {
runes = append(runes, '\\')
escape = false
} else {
escape = true
}
default:
runes = append(runes, r)
}
}
}
func lexType(l *lexer) stateFn {
l.next()
for {
if !isAlphaNumeric(l.next()) {
l.backup()
l.emit(itemTypeName)
return lexStart
}
}
}
func lexVariable(l *lexer) stateFn {
l.next()
for {
if !isAlphaNumeric(l.next()) {
l.backup()
l.emit(itemVariable)
return lexStart
}
}
}

View File

@ -0,0 +1,513 @@
package pattern
import (
"fmt"
"go/ast"
"go/token"
"go/types"
"reflect"
"honnef.co/go/tools/lint"
)
var tokensByString = map[string]Token{
"INT": Token(token.INT),
"FLOAT": Token(token.FLOAT),
"IMAG": Token(token.IMAG),
"CHAR": Token(token.CHAR),
"STRING": Token(token.STRING),
"+": Token(token.ADD),
"-": Token(token.SUB),
"*": Token(token.MUL),
"/": Token(token.QUO),
"%": Token(token.REM),
"&": Token(token.AND),
"|": Token(token.OR),
"^": Token(token.XOR),
"<<": Token(token.SHL),
">>": Token(token.SHR),
"&^": Token(token.AND_NOT),
"+=": Token(token.ADD_ASSIGN),
"-=": Token(token.SUB_ASSIGN),
"*=": Token(token.MUL_ASSIGN),
"/=": Token(token.QUO_ASSIGN),
"%=": Token(token.REM_ASSIGN),
"&=": Token(token.AND_ASSIGN),
"|=": Token(token.OR_ASSIGN),
"^=": Token(token.XOR_ASSIGN),
"<<=": Token(token.SHL_ASSIGN),
">>=": Token(token.SHR_ASSIGN),
"&^=": Token(token.AND_NOT_ASSIGN),
"&&": Token(token.LAND),
"||": Token(token.LOR),
"<-": Token(token.ARROW),
"++": Token(token.INC),
"--": Token(token.DEC),
"==": Token(token.EQL),
"<": Token(token.LSS),
">": Token(token.GTR),
"=": Token(token.ASSIGN),
"!": Token(token.NOT),
"!=": Token(token.NEQ),
"<=": Token(token.LEQ),
">=": Token(token.GEQ),
":=": Token(token.DEFINE),
"...": Token(token.ELLIPSIS),
"IMPORT": Token(token.IMPORT),
"VAR": Token(token.VAR),
"TYPE": Token(token.TYPE),
"CONST": Token(token.CONST),
}
func maybeToken(node Node) (Node, bool) {
if node, ok := node.(String); ok {
if tok, ok := tokensByString[string(node)]; ok {
return tok, true
}
return node, false
}
return node, false
}
func isNil(v interface{}) bool {
if v == nil {
return true
}
if _, ok := v.(Nil); ok {
return true
}
return false
}
type matcher interface {
Match(*Matcher, interface{}) (interface{}, bool)
}
type State = map[string]interface{}
type Matcher struct {
TypesInfo *types.Info
State State
}
func (m *Matcher) fork() *Matcher {
state := make(State, len(m.State))
for k, v := range m.State {
state[k] = v
}
return &Matcher{
TypesInfo: m.TypesInfo,
State: state,
}
}
func (m *Matcher) merge(mc *Matcher) {
m.State = mc.State
}
func (m *Matcher) Match(a Node, b ast.Node) bool {
m.State = State{}
_, ok := match(m, a, b)
return ok
}
func Match(a Node, b ast.Node) (*Matcher, bool) {
m := &Matcher{}
ret := m.Match(a, b)
return m, ret
}
// Match two items, which may be (Node, AST) or (AST, AST)
func match(m *Matcher, l, r interface{}) (interface{}, bool) {
if _, ok := r.(Node); ok {
panic("Node mustn't be on right side of match")
}
switch l := l.(type) {
case *ast.ParenExpr:
return match(m, l.X, r)
case *ast.ExprStmt:
return match(m, l.X, r)
case *ast.DeclStmt:
return match(m, l.Decl, r)
case *ast.LabeledStmt:
return match(m, l.Stmt, r)
case *ast.BlockStmt:
return match(m, l.List, r)
case *ast.FieldList:
return match(m, l.List, r)
}
switch r := r.(type) {
case *ast.ParenExpr:
return match(m, l, r.X)
case *ast.ExprStmt:
return match(m, l, r.X)
case *ast.DeclStmt:
return match(m, l, r.Decl)
case *ast.LabeledStmt:
return match(m, l, r.Stmt)
case *ast.BlockStmt:
if r == nil {
return match(m, l, nil)
}
return match(m, l, r.List)
case *ast.FieldList:
if r == nil {
return match(m, l, nil)
}
return match(m, l, r.List)
case *ast.BasicLit:
if r == nil {
return match(m, l, nil)
}
}
if l, ok := l.(matcher); ok {
return l.Match(m, r)
}
if l, ok := l.(Node); ok {
// Matching of pattern with concrete value
return matchNodeAST(m, l, r)
}
if l == nil || r == nil {
return nil, l == r
}
{
ln, ok1 := l.(ast.Node)
rn, ok2 := r.(ast.Node)
if ok1 && ok2 {
return matchAST(m, ln, rn)
}
}
{
obj, ok := l.(types.Object)
if ok {
switch r := r.(type) {
case *ast.Ident:
return obj, obj == m.TypesInfo.ObjectOf(r)
case *ast.SelectorExpr:
return obj, obj == m.TypesInfo.ObjectOf(r.Sel)
default:
return obj, false
}
}
}
{
ln, ok1 := l.([]ast.Expr)
rn, ok2 := r.([]ast.Expr)
if ok1 || ok2 {
if ok1 && !ok2 {
rn = []ast.Expr{r.(ast.Expr)}
} else if !ok1 && ok2 {
ln = []ast.Expr{l.(ast.Expr)}
}
if len(ln) != len(rn) {
return nil, false
}
for i, ll := range ln {
if _, ok := match(m, ll, rn[i]); !ok {
return nil, false
}
}
return r, true
}
}
{
ln, ok1 := l.([]ast.Stmt)
rn, ok2 := r.([]ast.Stmt)
if ok1 || ok2 {
if ok1 && !ok2 {
rn = []ast.Stmt{r.(ast.Stmt)}
} else if !ok1 && ok2 {
ln = []ast.Stmt{l.(ast.Stmt)}
}
if len(ln) != len(rn) {
return nil, false
}
for i, ll := range ln {
if _, ok := match(m, ll, rn[i]); !ok {
return nil, false
}
}
return r, true
}
}
panic(fmt.Sprintf("unsupported comparison: %T and %T", l, r))
}
// Match a Node with an AST node
func matchNodeAST(m *Matcher, a Node, b interface{}) (interface{}, bool) {
switch b := b.(type) {
case []ast.Stmt:
// 'a' is not a List or we'd be using its Match
// implementation.
if len(b) != 1 {
return nil, false
}
return match(m, a, b[0])
case []ast.Expr:
// 'a' is not a List or we'd be using its Match
// implementation.
if len(b) != 1 {
return nil, false
}
return match(m, a, b[0])
case ast.Node:
ra := reflect.ValueOf(a)
rb := reflect.ValueOf(b).Elem()
if ra.Type().Name() != rb.Type().Name() {
return nil, false
}
for i := 0; i < ra.NumField(); i++ {
af := ra.Field(i)
fieldName := ra.Type().Field(i).Name
bf := rb.FieldByName(fieldName)
if (bf == reflect.Value{}) {
panic(fmt.Sprintf("internal error: could not find field %s in type %t when comparing with %T", fieldName, b, a))
}
ai := af.Interface()
bi := bf.Interface()
if ai == nil {
return b, bi == nil
}
if _, ok := match(m, ai.(Node), bi); !ok {
return b, false
}
}
return b, true
case nil:
return nil, a == Nil{}
default:
panic(fmt.Sprintf("unhandled type %T", b))
}
}
// Match two AST nodes
func matchAST(m *Matcher, a, b ast.Node) (interface{}, bool) {
ra := reflect.ValueOf(a)
rb := reflect.ValueOf(b)
if ra.Type() != rb.Type() {
return nil, false
}
if ra.IsNil() || rb.IsNil() {
return rb, ra.IsNil() == rb.IsNil()
}
ra = ra.Elem()
rb = rb.Elem()
for i := 0; i < ra.NumField(); i++ {
af := ra.Field(i)
bf := rb.Field(i)
if af.Type() == rtTokPos || af.Type() == rtObject || af.Type() == rtCommentGroup {
continue
}
switch af.Kind() {
case reflect.Slice:
if af.Len() != bf.Len() {
return nil, false
}
for j := 0; j < af.Len(); j++ {
if _, ok := match(m, af.Index(j).Interface().(ast.Node), bf.Index(j).Interface().(ast.Node)); !ok {
return nil, false
}
}
case reflect.String:
if af.String() != bf.String() {
return nil, false
}
case reflect.Int:
if af.Int() != bf.Int() {
return nil, false
}
case reflect.Bool:
if af.Bool() != bf.Bool() {
return nil, false
}
case reflect.Ptr, reflect.Interface:
if _, ok := match(m, af.Interface(), bf.Interface()); !ok {
return nil, false
}
default:
panic(fmt.Sprintf("internal error: unhandled kind %s (%T)", af.Kind(), af.Interface()))
}
}
return b, true
}
func (b Binding) Match(m *Matcher, node interface{}) (interface{}, bool) {
if isNil(b.Node) {
v, ok := m.State[b.Name]
if ok {
// Recall value
return match(m, v, node)
}
// Matching anything
b.Node = Any{}
}
// Store value
if _, ok := m.State[b.Name]; ok {
panic(fmt.Sprintf("binding already created: %s", b.Name))
}
new, ret := match(m, b.Node, node)
if ret {
m.State[b.Name] = new
}
return new, ret
}
func (Any) Match(m *Matcher, node interface{}) (interface{}, bool) {
return node, true
}
func (l List) Match(m *Matcher, node interface{}) (interface{}, bool) {
v := reflect.ValueOf(node)
if v.Kind() == reflect.Slice {
if isNil(l.Head) {
return node, v.Len() == 0
}
if v.Len() == 0 {
return nil, false
}
// OPT(dh): don't check the entire tail if head didn't match
_, ok1 := match(m, l.Head, v.Index(0).Interface())
_, ok2 := match(m, l.Tail, v.Slice(1, v.Len()).Interface())
return node, ok1 && ok2
}
// Our empty list does not equal an untyped Go nil. This way, we can
// tell apart an if with no else and an if with an empty else.
return nil, false
}
func (s String) Match(m *Matcher, node interface{}) (interface{}, bool) {
switch o := node.(type) {
case token.Token:
if tok, ok := maybeToken(s); ok {
return match(m, tok, node)
}
return nil, false
case string:
return o, string(s) == o
default:
return nil, false
}
}
func (tok Token) Match(m *Matcher, node interface{}) (interface{}, bool) {
o, ok := node.(token.Token)
if !ok {
return nil, false
}
return o, token.Token(tok) == o
}
func (Nil) Match(m *Matcher, node interface{}) (interface{}, bool) {
return nil, isNil(node)
}
func (builtin Builtin) Match(m *Matcher, node interface{}) (interface{}, bool) {
ident, ok := node.(*ast.Ident)
if !ok {
return nil, false
}
obj := m.TypesInfo.ObjectOf(ident)
if obj != types.Universe.Lookup(ident.Name) {
return nil, false
}
return match(m, builtin.Name, ident.Name)
}
func (obj Object) Match(m *Matcher, node interface{}) (interface{}, bool) {
ident, ok := node.(*ast.Ident)
if !ok {
return nil, false
}
id := m.TypesInfo.ObjectOf(ident)
_, ok = match(m, obj.Name, ident.Name)
return id, ok
}
func (fn Function) Match(m *Matcher, node interface{}) (interface{}, bool) {
var name string
var obj types.Object
switch node := node.(type) {
case *ast.Ident:
obj = m.TypesInfo.ObjectOf(node)
switch obj := obj.(type) {
case *types.Func:
name = lint.FuncName(obj)
case *types.Builtin:
name = obj.Name()
default:
return nil, false
}
case *ast.SelectorExpr:
var ok bool
obj, ok = m.TypesInfo.ObjectOf(node.Sel).(*types.Func)
if !ok {
return nil, false
}
name = lint.FuncName(obj.(*types.Func))
default:
return nil, false
}
_, ok := match(m, fn.Name, name)
return obj, ok
}
func (or Or) Match(m *Matcher, node interface{}) (interface{}, bool) {
for _, opt := range or.Nodes {
mc := m.fork()
if ret, ok := match(mc, opt, node); ok {
m.merge(mc)
return ret, true
}
}
return nil, false
}
func (not Not) Match(m *Matcher, node interface{}) (interface{}, bool) {
_, ok := match(m, not.Node, node)
if ok {
return nil, false
}
return node, true
}
var (
// Types of fields in go/ast structs that we want to skip
rtTokPos = reflect.TypeOf(token.Pos(0))
rtObject = reflect.TypeOf((*ast.Object)(nil))
rtCommentGroup = reflect.TypeOf((*ast.CommentGroup)(nil))
)
var (
_ matcher = Binding{}
_ matcher = Any{}
_ matcher = List{}
_ matcher = String("")
_ matcher = Token(0)
_ matcher = Nil{}
_ matcher = Builtin{}
_ matcher = Object{}
_ matcher = Function{}
_ matcher = Or{}
_ matcher = Not{}
)

View File

@ -0,0 +1,455 @@
package pattern
import (
"fmt"
"go/ast"
"go/token"
"reflect"
)
type Pattern struct {
Root Node
// Relevant contains instances of ast.Node that could potentially
// initiate a successful match of the pattern.
Relevant []reflect.Type
}
func MustParse(s string) Pattern {
p := &Parser{AllowTypeInfo: true}
pat, err := p.Parse(s)
if err != nil {
panic(err)
}
return pat
}
func roots(node Node) []reflect.Type {
switch node := node.(type) {
case Or:
var out []reflect.Type
for _, el := range node.Nodes {
out = append(out, roots(el)...)
}
return out
case Not:
return roots(node.Node)
case Binding:
return roots(node.Node)
case Nil, nil:
// this branch is reached via bindings
return allTypes
default:
Ts, ok := nodeToASTTypes[reflect.TypeOf(node)]
if !ok {
panic(fmt.Sprintf("internal error: unhandled type %T", node))
}
return Ts
}
}
var allTypes = []reflect.Type{
reflect.TypeOf((*ast.RangeStmt)(nil)),
reflect.TypeOf((*ast.AssignStmt)(nil)),
reflect.TypeOf((*ast.IndexExpr)(nil)),
reflect.TypeOf((*ast.Ident)(nil)),
reflect.TypeOf((*ast.ValueSpec)(nil)),
reflect.TypeOf((*ast.GenDecl)(nil)),
reflect.TypeOf((*ast.BinaryExpr)(nil)),
reflect.TypeOf((*ast.ForStmt)(nil)),
reflect.TypeOf((*ast.ArrayType)(nil)),
reflect.TypeOf((*ast.DeferStmt)(nil)),
reflect.TypeOf((*ast.MapType)(nil)),
reflect.TypeOf((*ast.ReturnStmt)(nil)),
reflect.TypeOf((*ast.SliceExpr)(nil)),
reflect.TypeOf((*ast.StarExpr)(nil)),
reflect.TypeOf((*ast.UnaryExpr)(nil)),
reflect.TypeOf((*ast.SendStmt)(nil)),
reflect.TypeOf((*ast.SelectStmt)(nil)),
reflect.TypeOf((*ast.ImportSpec)(nil)),
reflect.TypeOf((*ast.IfStmt)(nil)),
reflect.TypeOf((*ast.GoStmt)(nil)),
reflect.TypeOf((*ast.Field)(nil)),
reflect.TypeOf((*ast.SelectorExpr)(nil)),
reflect.TypeOf((*ast.StructType)(nil)),
reflect.TypeOf((*ast.KeyValueExpr)(nil)),
reflect.TypeOf((*ast.FuncType)(nil)),
reflect.TypeOf((*ast.FuncLit)(nil)),
reflect.TypeOf((*ast.FuncDecl)(nil)),
reflect.TypeOf((*ast.ChanType)(nil)),
reflect.TypeOf((*ast.CallExpr)(nil)),
reflect.TypeOf((*ast.CaseClause)(nil)),
reflect.TypeOf((*ast.CommClause)(nil)),
reflect.TypeOf((*ast.CompositeLit)(nil)),
reflect.TypeOf((*ast.EmptyStmt)(nil)),
reflect.TypeOf((*ast.SwitchStmt)(nil)),
reflect.TypeOf((*ast.TypeSwitchStmt)(nil)),
reflect.TypeOf((*ast.TypeAssertExpr)(nil)),
reflect.TypeOf((*ast.TypeSpec)(nil)),
reflect.TypeOf((*ast.InterfaceType)(nil)),
reflect.TypeOf((*ast.BranchStmt)(nil)),
reflect.TypeOf((*ast.IncDecStmt)(nil)),
reflect.TypeOf((*ast.BasicLit)(nil)),
}
var nodeToASTTypes = map[reflect.Type][]reflect.Type{
reflect.TypeOf(String("")): nil,
reflect.TypeOf(Token(0)): nil,
reflect.TypeOf(List{}): {reflect.TypeOf((*ast.BlockStmt)(nil)), reflect.TypeOf((*ast.FieldList)(nil))},
reflect.TypeOf(Builtin{}): {reflect.TypeOf((*ast.Ident)(nil))},
reflect.TypeOf(Object{}): {reflect.TypeOf((*ast.Ident)(nil))},
reflect.TypeOf(Function{}): {reflect.TypeOf((*ast.Ident)(nil)), reflect.TypeOf((*ast.SelectorExpr)(nil))},
reflect.TypeOf(Any{}): allTypes,
reflect.TypeOf(RangeStmt{}): {reflect.TypeOf((*ast.RangeStmt)(nil))},
reflect.TypeOf(AssignStmt{}): {reflect.TypeOf((*ast.AssignStmt)(nil))},
reflect.TypeOf(IndexExpr{}): {reflect.TypeOf((*ast.IndexExpr)(nil))},
reflect.TypeOf(Ident{}): {reflect.TypeOf((*ast.Ident)(nil))},
reflect.TypeOf(ValueSpec{}): {reflect.TypeOf((*ast.ValueSpec)(nil))},
reflect.TypeOf(GenDecl{}): {reflect.TypeOf((*ast.GenDecl)(nil))},
reflect.TypeOf(BinaryExpr{}): {reflect.TypeOf((*ast.BinaryExpr)(nil))},
reflect.TypeOf(ForStmt{}): {reflect.TypeOf((*ast.ForStmt)(nil))},
reflect.TypeOf(ArrayType{}): {reflect.TypeOf((*ast.ArrayType)(nil))},
reflect.TypeOf(DeferStmt{}): {reflect.TypeOf((*ast.DeferStmt)(nil))},
reflect.TypeOf(MapType{}): {reflect.TypeOf((*ast.MapType)(nil))},
reflect.TypeOf(ReturnStmt{}): {reflect.TypeOf((*ast.ReturnStmt)(nil))},
reflect.TypeOf(SliceExpr{}): {reflect.TypeOf((*ast.SliceExpr)(nil))},
reflect.TypeOf(StarExpr{}): {reflect.TypeOf((*ast.StarExpr)(nil))},
reflect.TypeOf(UnaryExpr{}): {reflect.TypeOf((*ast.UnaryExpr)(nil))},
reflect.TypeOf(SendStmt{}): {reflect.TypeOf((*ast.SendStmt)(nil))},
reflect.TypeOf(SelectStmt{}): {reflect.TypeOf((*ast.SelectStmt)(nil))},
reflect.TypeOf(ImportSpec{}): {reflect.TypeOf((*ast.ImportSpec)(nil))},
reflect.TypeOf(IfStmt{}): {reflect.TypeOf((*ast.IfStmt)(nil))},
reflect.TypeOf(GoStmt{}): {reflect.TypeOf((*ast.GoStmt)(nil))},
reflect.TypeOf(Field{}): {reflect.TypeOf((*ast.Field)(nil))},
reflect.TypeOf(SelectorExpr{}): {reflect.TypeOf((*ast.SelectorExpr)(nil))},
reflect.TypeOf(StructType{}): {reflect.TypeOf((*ast.StructType)(nil))},
reflect.TypeOf(KeyValueExpr{}): {reflect.TypeOf((*ast.KeyValueExpr)(nil))},
reflect.TypeOf(FuncType{}): {reflect.TypeOf((*ast.FuncType)(nil))},
reflect.TypeOf(FuncLit{}): {reflect.TypeOf((*ast.FuncLit)(nil))},
reflect.TypeOf(FuncDecl{}): {reflect.TypeOf((*ast.FuncDecl)(nil))},
reflect.TypeOf(ChanType{}): {reflect.TypeOf((*ast.ChanType)(nil))},
reflect.TypeOf(CallExpr{}): {reflect.TypeOf((*ast.CallExpr)(nil))},
reflect.TypeOf(CaseClause{}): {reflect.TypeOf((*ast.CaseClause)(nil))},
reflect.TypeOf(CommClause{}): {reflect.TypeOf((*ast.CommClause)(nil))},
reflect.TypeOf(CompositeLit{}): {reflect.TypeOf((*ast.CompositeLit)(nil))},
reflect.TypeOf(EmptyStmt{}): {reflect.TypeOf((*ast.EmptyStmt)(nil))},
reflect.TypeOf(SwitchStmt{}): {reflect.TypeOf((*ast.SwitchStmt)(nil))},
reflect.TypeOf(TypeSwitchStmt{}): {reflect.TypeOf((*ast.TypeSwitchStmt)(nil))},
reflect.TypeOf(TypeAssertExpr{}): {reflect.TypeOf((*ast.TypeAssertExpr)(nil))},
reflect.TypeOf(TypeSpec{}): {reflect.TypeOf((*ast.TypeSpec)(nil))},
reflect.TypeOf(InterfaceType{}): {reflect.TypeOf((*ast.InterfaceType)(nil))},
reflect.TypeOf(BranchStmt{}): {reflect.TypeOf((*ast.BranchStmt)(nil))},
reflect.TypeOf(IncDecStmt{}): {reflect.TypeOf((*ast.IncDecStmt)(nil))},
reflect.TypeOf(BasicLit{}): {reflect.TypeOf((*ast.BasicLit)(nil))},
}
var requiresTypeInfo = map[string]bool{
"Function": true,
"Builtin": true,
"Object": true,
}
type Parser struct {
// Allow nodes that rely on type information
AllowTypeInfo bool
lex *lexer
cur item
last *item
items chan item
}
func (p *Parser) Parse(s string) (Pattern, error) {
p.cur = item{}
p.last = nil
p.items = nil
fset := token.NewFileSet()
p.lex = &lexer{
f: fset.AddFile("<input>", -1, len(s)),
input: s,
items: make(chan item),
}
go p.lex.run()
p.items = p.lex.items
root, err := p.node()
if err != nil {
// drain lexer if parsing failed
for range p.lex.items {
}
return Pattern{}, err
}
if item := <-p.lex.items; item.typ != itemEOF {
return Pattern{}, fmt.Errorf("unexpected token %s after end of pattern", item.typ)
}
return Pattern{
Root: root,
Relevant: roots(root),
}, nil
}
func (p *Parser) next() item {
if p.last != nil {
n := *p.last
p.last = nil
return n
}
var ok bool
p.cur, ok = <-p.items
if !ok {
p.cur = item{typ: eof}
}
return p.cur
}
func (p *Parser) rewind() {
p.last = &p.cur
}
func (p *Parser) peek() item {
n := p.next()
p.rewind()
return n
}
func (p *Parser) accept(typ itemType) (item, bool) {
n := p.next()
if n.typ == typ {
return n, true
}
p.rewind()
return item{}, false
}
func (p *Parser) unexpectedToken(valid string) error {
if p.cur.typ == itemError {
return fmt.Errorf("error lexing input: %s", p.cur.val)
}
var got string
switch p.cur.typ {
case itemTypeName, itemVariable, itemString:
got = p.cur.val
default:
got = "'" + p.cur.typ.String() + "'"
}
pos := p.lex.f.Position(token.Pos(p.cur.pos))
return fmt.Errorf("%s: expected %s, found %s", pos, valid, got)
}
func (p *Parser) node() (Node, error) {
if _, ok := p.accept(itemLeftParen); !ok {
return nil, p.unexpectedToken("'('")
}
typ, ok := p.accept(itemTypeName)
if !ok {
return nil, p.unexpectedToken("Node type")
}
var objs []Node
for {
if _, ok := p.accept(itemRightParen); ok {
break
} else {
p.rewind()
obj, err := p.object()
if err != nil {
return nil, err
}
objs = append(objs, obj)
}
}
return p.populateNode(typ.val, objs)
}
func populateNode(typ string, objs []Node, allowTypeInfo bool) (Node, error) {
T, ok := structNodes[typ]
if !ok {
return nil, fmt.Errorf("unknown node %s", typ)
}
if !allowTypeInfo && requiresTypeInfo[typ] {
return nil, fmt.Errorf("Node %s requires type information", typ)
}
pv := reflect.New(T)
v := pv.Elem()
if v.NumField() == 1 {
f := v.Field(0)
if f.Type().Kind() == reflect.Slice {
// Variadic node
f.Set(reflect.AppendSlice(f, reflect.ValueOf(objs)))
return v.Interface().(Node), nil
}
}
if len(objs) != v.NumField() {
return nil, fmt.Errorf("tried to initialize node %s with %d values, expected %d", typ, len(objs), v.NumField())
}
for i := 0; i < v.NumField(); i++ {
f := v.Field(i)
if f.Kind() == reflect.String {
if obj, ok := objs[i].(String); ok {
f.Set(reflect.ValueOf(string(obj)))
} else {
return nil, fmt.Errorf("first argument of (Binding name node) must be string, but got %s", objs[i])
}
} else {
f.Set(reflect.ValueOf(objs[i]))
}
}
return v.Interface().(Node), nil
}
func (p *Parser) populateNode(typ string, objs []Node) (Node, error) {
return populateNode(typ, objs, p.AllowTypeInfo)
}
var structNodes = map[string]reflect.Type{
"Any": reflect.TypeOf(Any{}),
"Ellipsis": reflect.TypeOf(Ellipsis{}),
"List": reflect.TypeOf(List{}),
"Binding": reflect.TypeOf(Binding{}),
"RangeStmt": reflect.TypeOf(RangeStmt{}),
"AssignStmt": reflect.TypeOf(AssignStmt{}),
"IndexExpr": reflect.TypeOf(IndexExpr{}),
"Ident": reflect.TypeOf(Ident{}),
"Builtin": reflect.TypeOf(Builtin{}),
"ValueSpec": reflect.TypeOf(ValueSpec{}),
"GenDecl": reflect.TypeOf(GenDecl{}),
"BinaryExpr": reflect.TypeOf(BinaryExpr{}),
"ForStmt": reflect.TypeOf(ForStmt{}),
"ArrayType": reflect.TypeOf(ArrayType{}),
"DeferStmt": reflect.TypeOf(DeferStmt{}),
"MapType": reflect.TypeOf(MapType{}),
"ReturnStmt": reflect.TypeOf(ReturnStmt{}),
"SliceExpr": reflect.TypeOf(SliceExpr{}),
"StarExpr": reflect.TypeOf(StarExpr{}),
"UnaryExpr": reflect.TypeOf(UnaryExpr{}),
"SendStmt": reflect.TypeOf(SendStmt{}),
"SelectStmt": reflect.TypeOf(SelectStmt{}),
"ImportSpec": reflect.TypeOf(ImportSpec{}),
"IfStmt": reflect.TypeOf(IfStmt{}),
"GoStmt": reflect.TypeOf(GoStmt{}),
"Field": reflect.TypeOf(Field{}),
"SelectorExpr": reflect.TypeOf(SelectorExpr{}),
"StructType": reflect.TypeOf(StructType{}),
"KeyValueExpr": reflect.TypeOf(KeyValueExpr{}),
"FuncType": reflect.TypeOf(FuncType{}),
"FuncLit": reflect.TypeOf(FuncLit{}),
"FuncDecl": reflect.TypeOf(FuncDecl{}),
"ChanType": reflect.TypeOf(ChanType{}),
"CallExpr": reflect.TypeOf(CallExpr{}),
"CaseClause": reflect.TypeOf(CaseClause{}),
"CommClause": reflect.TypeOf(CommClause{}),
"CompositeLit": reflect.TypeOf(CompositeLit{}),
"EmptyStmt": reflect.TypeOf(EmptyStmt{}),
"SwitchStmt": reflect.TypeOf(SwitchStmt{}),
"TypeSwitchStmt": reflect.TypeOf(TypeSwitchStmt{}),
"TypeAssertExpr": reflect.TypeOf(TypeAssertExpr{}),
"TypeSpec": reflect.TypeOf(TypeSpec{}),
"InterfaceType": reflect.TypeOf(InterfaceType{}),
"BranchStmt": reflect.TypeOf(BranchStmt{}),
"IncDecStmt": reflect.TypeOf(IncDecStmt{}),
"BasicLit": reflect.TypeOf(BasicLit{}),
"Object": reflect.TypeOf(Object{}),
"Function": reflect.TypeOf(Function{}),
"Or": reflect.TypeOf(Or{}),
"Not": reflect.TypeOf(Not{}),
}
func (p *Parser) object() (Node, error) {
n := p.next()
switch n.typ {
case itemLeftParen:
p.rewind()
node, err := p.node()
if err != nil {
return node, err
}
if p.peek().typ == itemColon {
p.next()
tail, err := p.object()
if err != nil {
return node, err
}
return List{Head: node, Tail: tail}, nil
}
return node, nil
case itemLeftBracket:
p.rewind()
return p.array()
case itemVariable:
v := n
if v.val == "nil" {
return Nil{}, nil
}
var b Binding
if _, ok := p.accept(itemAt); ok {
o, err := p.node()
if err != nil {
return nil, err
}
b = Binding{
Name: v.val,
Node: o,
}
} else {
p.rewind()
b = Binding{Name: v.val}
}
if p.peek().typ == itemColon {
p.next()
tail, err := p.object()
if err != nil {
return b, err
}
return List{Head: b, Tail: tail}, nil
}
return b, nil
case itemBlank:
return Any{}, nil
case itemString:
return String(n.val), nil
default:
return nil, p.unexpectedToken("object")
}
}
func (p *Parser) array() (Node, error) {
if _, ok := p.accept(itemLeftBracket); !ok {
return nil, p.unexpectedToken("'['")
}
var objs []Node
for {
if _, ok := p.accept(itemRightBracket); ok {
break
} else {
p.rewind()
obj, err := p.object()
if err != nil {
return nil, err
}
objs = append(objs, obj)
}
}
tail := List{}
for i := len(objs) - 1; i >= 0; i-- {
l := List{
Head: objs[i],
Tail: tail,
}
tail = l
}
return tail, nil
}
/*
Node ::= itemLeftParen itemTypeName Object* itemRightParen
Object ::= Node | Array | Binding | itemVariable | itemBlank | itemString
Array := itemLeftBracket Object* itemRightBracket
Array := Object itemColon Object
Binding ::= itemVariable itemAt Node
*/

View File

@ -0,0 +1,497 @@
package pattern
import (
"fmt"
"go/token"
"reflect"
"strings"
)
var (
_ Node = Ellipsis{}
_ Node = Binding{}
_ Node = RangeStmt{}
_ Node = AssignStmt{}
_ Node = IndexExpr{}
_ Node = Ident{}
_ Node = Builtin{}
_ Node = String("")
_ Node = Any{}
_ Node = ValueSpec{}
_ Node = List{}
_ Node = GenDecl{}
_ Node = BinaryExpr{}
_ Node = ForStmt{}
_ Node = ArrayType{}
_ Node = DeferStmt{}
_ Node = MapType{}
_ Node = ReturnStmt{}
_ Node = SliceExpr{}
_ Node = StarExpr{}
_ Node = UnaryExpr{}
_ Node = SendStmt{}
_ Node = SelectStmt{}
_ Node = ImportSpec{}
_ Node = IfStmt{}
_ Node = GoStmt{}
_ Node = Field{}
_ Node = SelectorExpr{}
_ Node = StructType{}
_ Node = KeyValueExpr{}
_ Node = FuncType{}
_ Node = FuncLit{}
_ Node = FuncDecl{}
_ Node = Token(0)
_ Node = ChanType{}
_ Node = CallExpr{}
_ Node = CaseClause{}
_ Node = CommClause{}
_ Node = CompositeLit{}
_ Node = EmptyStmt{}
_ Node = SwitchStmt{}
_ Node = TypeSwitchStmt{}
_ Node = TypeAssertExpr{}
_ Node = TypeSpec{}
_ Node = InterfaceType{}
_ Node = BranchStmt{}
_ Node = IncDecStmt{}
_ Node = BasicLit{}
_ Node = Nil{}
_ Node = Object{}
_ Node = Function{}
_ Node = Not{}
_ Node = Or{}
)
type Function struct {
Name Node
}
type Token token.Token
type Nil struct {
}
type Ellipsis struct {
Elt Node
}
type IncDecStmt struct {
X Node
Tok Node
}
type BranchStmt struct {
Tok Node
Label Node
}
type InterfaceType struct {
Methods Node
}
type TypeSpec struct {
Name Node
Type Node
}
type TypeAssertExpr struct {
X Node
Type Node
}
type TypeSwitchStmt struct {
Init Node
Assign Node
Body Node
}
type SwitchStmt struct {
Init Node
Tag Node
Body Node
}
type EmptyStmt struct {
}
type CompositeLit struct {
Type Node
Elts Node
}
type CommClause struct {
Comm Node
Body Node
}
type CaseClause struct {
List Node
Body Node
}
type CallExpr struct {
Fun Node
Args Node
// XXX handle ellipsis
}
// TODO(dh): add a ChanDir node, and a way of instantiating it.
type ChanType struct {
Dir Node
Value Node
}
type FuncDecl struct {
Recv Node
Name Node
Type Node
Body Node
}
type FuncLit struct {
Type Node
Body Node
}
type FuncType struct {
Params Node
Results Node
}
type KeyValueExpr struct {
Key Node
Value Node
}
type StructType struct {
Fields Node
}
type SelectorExpr struct {
X Node
Sel Node
}
type Field struct {
Names Node
Type Node
Tag Node
}
type GoStmt struct {
Call Node
}
type IfStmt struct {
Init Node
Cond Node
Body Node
Else Node
}
type ImportSpec struct {
Name Node
Path Node
}
type SelectStmt struct {
Body Node
}
type ArrayType struct {
Len Node
Elt Node
}
type DeferStmt struct {
Call Node
}
type MapType struct {
Key Node
Value Node
}
type ReturnStmt struct {
Results Node
}
type SliceExpr struct {
X Node
Low Node
High Node
Max Node
}
type StarExpr struct {
X Node
}
type UnaryExpr struct {
Op Node
X Node
}
type SendStmt struct {
Chan Node
Value Node
}
type Binding struct {
Name string
Node Node
}
type RangeStmt struct {
Key Node
Value Node
Tok Node
X Node
Body Node
}
type AssignStmt struct {
Lhs Node
Tok Node
Rhs Node
}
type IndexExpr struct {
X Node
Index Node
}
type Node interface {
String() string
isNode()
}
type Ident struct {
Name Node
}
type Object struct {
Name Node
}
type Builtin struct {
Name Node
}
type String string
type Any struct{}
type ValueSpec struct {
Names Node
Type Node
Values Node
}
type List struct {
Head Node
Tail Node
}
type GenDecl struct {
Tok Node
Specs Node
}
type BasicLit struct {
Kind Node
Value Node
}
type BinaryExpr struct {
X Node
Op Node
Y Node
}
type ForStmt struct {
Init Node
Cond Node
Post Node
Body Node
}
type Or struct {
Nodes []Node
}
type Not struct {
Node Node
}
func stringify(n Node) string {
v := reflect.ValueOf(n)
var parts []string
parts = append(parts, v.Type().Name())
for i := 0; i < v.NumField(); i++ {
//lint:ignore S1025 false positive in staticcheck 2019.2.3
parts = append(parts, fmt.Sprintf("%s", v.Field(i)))
}
return "(" + strings.Join(parts, " ") + ")"
}
func (stmt AssignStmt) String() string { return stringify(stmt) }
func (expr IndexExpr) String() string { return stringify(expr) }
func (id Ident) String() string { return stringify(id) }
func (spec ValueSpec) String() string { return stringify(spec) }
func (decl GenDecl) String() string { return stringify(decl) }
func (lit BasicLit) String() string { return stringify(lit) }
func (expr BinaryExpr) String() string { return stringify(expr) }
func (stmt ForStmt) String() string { return stringify(stmt) }
func (stmt RangeStmt) String() string { return stringify(stmt) }
func (typ ArrayType) String() string { return stringify(typ) }
func (stmt DeferStmt) String() string { return stringify(stmt) }
func (typ MapType) String() string { return stringify(typ) }
func (stmt ReturnStmt) String() string { return stringify(stmt) }
func (expr SliceExpr) String() string { return stringify(expr) }
func (expr StarExpr) String() string { return stringify(expr) }
func (expr UnaryExpr) String() string { return stringify(expr) }
func (stmt SendStmt) String() string { return stringify(stmt) }
func (spec ImportSpec) String() string { return stringify(spec) }
func (stmt SelectStmt) String() string { return stringify(stmt) }
func (stmt IfStmt) String() string { return stringify(stmt) }
func (stmt IncDecStmt) String() string { return stringify(stmt) }
func (stmt GoStmt) String() string { return stringify(stmt) }
func (field Field) String() string { return stringify(field) }
func (expr SelectorExpr) String() string { return stringify(expr) }
func (typ StructType) String() string { return stringify(typ) }
func (expr KeyValueExpr) String() string { return stringify(expr) }
func (typ FuncType) String() string { return stringify(typ) }
func (lit FuncLit) String() string { return stringify(lit) }
func (decl FuncDecl) String() string { return stringify(decl) }
func (stmt BranchStmt) String() string { return stringify(stmt) }
func (expr CallExpr) String() string { return stringify(expr) }
func (clause CaseClause) String() string { return stringify(clause) }
func (typ ChanType) String() string { return stringify(typ) }
func (clause CommClause) String() string { return stringify(clause) }
func (lit CompositeLit) String() string { return stringify(lit) }
func (stmt EmptyStmt) String() string { return stringify(stmt) }
func (typ InterfaceType) String() string { return stringify(typ) }
func (stmt SwitchStmt) String() string { return stringify(stmt) }
func (expr TypeAssertExpr) String() string { return stringify(expr) }
func (spec TypeSpec) String() string { return stringify(spec) }
func (stmt TypeSwitchStmt) String() string { return stringify(stmt) }
func (nil Nil) String() string { return "nil" }
func (builtin Builtin) String() string { return stringify(builtin) }
func (obj Object) String() string { return stringify(obj) }
func (fn Function) String() string { return stringify(fn) }
func (el Ellipsis) String() string { return stringify(el) }
func (not Not) String() string { return stringify(not) }
func (or Or) String() string {
s := "(Or"
for _, node := range or.Nodes {
s += " "
s += node.String()
}
s += ")"
return s
}
func isProperList(l List) bool {
if l.Head == nil && l.Tail == nil {
return true
}
switch tail := l.Tail.(type) {
case nil:
return false
case List:
return isProperList(tail)
default:
return false
}
}
func (l List) String() string {
if l.Head == nil && l.Tail == nil {
return "[]"
}
if isProperList(l) {
// pretty-print the list
var objs []string
for l.Head != nil {
objs = append(objs, l.Head.String())
l = l.Tail.(List)
}
return fmt.Sprintf("[%s]", strings.Join(objs, " "))
}
return fmt.Sprintf("%s:%s", l.Head, l.Tail)
}
func (bind Binding) String() string {
if bind.Node == nil {
return bind.Name
}
return fmt.Sprintf("%s@%s", bind.Name, bind.Node)
}
func (s String) String() string { return fmt.Sprintf("%q", string(s)) }
func (tok Token) String() string {
return fmt.Sprintf("%q", strings.ToUpper(token.Token(tok).String()))
}
func (Any) String() string { return "_" }
func (AssignStmt) isNode() {}
func (IndexExpr) isNode() {}
func (Ident) isNode() {}
func (ValueSpec) isNode() {}
func (GenDecl) isNode() {}
func (BasicLit) isNode() {}
func (BinaryExpr) isNode() {}
func (ForStmt) isNode() {}
func (RangeStmt) isNode() {}
func (ArrayType) isNode() {}
func (DeferStmt) isNode() {}
func (MapType) isNode() {}
func (ReturnStmt) isNode() {}
func (SliceExpr) isNode() {}
func (StarExpr) isNode() {}
func (UnaryExpr) isNode() {}
func (SendStmt) isNode() {}
func (ImportSpec) isNode() {}
func (SelectStmt) isNode() {}
func (IfStmt) isNode() {}
func (IncDecStmt) isNode() {}
func (GoStmt) isNode() {}
func (Field) isNode() {}
func (SelectorExpr) isNode() {}
func (StructType) isNode() {}
func (KeyValueExpr) isNode() {}
func (FuncType) isNode() {}
func (FuncLit) isNode() {}
func (FuncDecl) isNode() {}
func (BranchStmt) isNode() {}
func (CallExpr) isNode() {}
func (CaseClause) isNode() {}
func (ChanType) isNode() {}
func (CommClause) isNode() {}
func (CompositeLit) isNode() {}
func (EmptyStmt) isNode() {}
func (InterfaceType) isNode() {}
func (SwitchStmt) isNode() {}
func (TypeAssertExpr) isNode() {}
func (TypeSpec) isNode() {}
func (TypeSwitchStmt) isNode() {}
func (Nil) isNode() {}
func (Builtin) isNode() {}
func (Object) isNode() {}
func (Function) isNode() {}
func (Ellipsis) isNode() {}
func (Or) isNode() {}
func (List) isNode() {}
func (String) isNode() {}
func (Token) isNode() {}
func (Any) isNode() {}
func (Binding) isNode() {}
func (Not) isNode() {}

View File

@ -0,0 +1,184 @@
package report
import (
"bytes"
"go/ast"
"go/printer"
"go/token"
"strings"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/ast/astutil"
"honnef.co/go/tools/facts"
"honnef.co/go/tools/lint"
)
type Options struct {
ShortRange bool
FilterGenerated bool
Fixes []analysis.SuggestedFix
Related []analysis.RelatedInformation
}
type Option func(*Options)
func ShortRange() Option {
return func(opts *Options) {
opts.ShortRange = true
}
}
func FilterGenerated() Option {
return func(opts *Options) {
opts.FilterGenerated = true
}
}
func Fixes(fixes ...analysis.SuggestedFix) Option {
return func(opts *Options) {
opts.Fixes = append(opts.Fixes, fixes...)
}
}
func Related(node Positioner, message string) Option {
return func(opts *Options) {
pos, end := getRange(node, opts.ShortRange)
r := analysis.RelatedInformation{
Pos: pos,
End: end,
Message: message,
}
opts.Related = append(opts.Related, r)
}
}
type Positioner interface {
Pos() token.Pos
}
type fullPositioner interface {
Pos() token.Pos
End() token.Pos
}
type sourcer interface {
Source() ast.Node
}
// shortRange returns the position and end of the main component of an
// AST node. For nodes that have no body, the short range is identical
// to the node's Pos and End. For nodes that do have a body, the short
// range excludes the body.
func shortRange(node ast.Node) (pos, end token.Pos) {
switch node := node.(type) {
case *ast.File:
return node.Pos(), node.Name.End()
case *ast.CaseClause:
return node.Pos(), node.Colon + 1
case *ast.CommClause:
return node.Pos(), node.Colon + 1
case *ast.DeferStmt:
return node.Pos(), node.Defer + token.Pos(len("defer"))
case *ast.ExprStmt:
return shortRange(node.X)
case *ast.ForStmt:
if node.Post != nil {
return node.For, node.Post.End()
} else if node.Cond != nil {
return node.For, node.Cond.End()
} else if node.Init != nil {
// +1 to catch the semicolon, for gofmt'ed code
return node.Pos(), node.Init.End() + 1
} else {
return node.Pos(), node.For + token.Pos(len("for"))
}
case *ast.FuncDecl:
return node.Pos(), node.Type.End()
case *ast.FuncLit:
return node.Pos(), node.Type.End()
case *ast.GoStmt:
if _, ok := astutil.Unparen(node.Call.Fun).(*ast.FuncLit); ok {
return node.Pos(), node.Go + token.Pos(len("go"))
} else {
return node.Pos(), node.End()
}
case *ast.IfStmt:
return node.Pos(), node.Cond.End()
case *ast.RangeStmt:
return node.Pos(), node.X.End()
case *ast.SelectStmt:
return node.Pos(), node.Pos() + token.Pos(len("select"))
case *ast.SwitchStmt:
if node.Tag != nil {
return node.Pos(), node.Tag.End()
} else if node.Init != nil {
// +1 to catch the semicolon, for gofmt'ed code
return node.Pos(), node.Init.End() + 1
} else {
return node.Pos(), node.Pos() + token.Pos(len("switch"))
}
case *ast.TypeSwitchStmt:
return node.Pos(), node.Assign.End()
default:
return node.Pos(), node.End()
}
}
func getRange(node Positioner, short bool) (pos, end token.Pos) {
switch node := node.(type) {
case sourcer:
s := node.Source()
if short {
return shortRange(s)
}
return s.Pos(), s.End()
case fullPositioner:
if short {
return shortRange(node)
}
return node.Pos(), node.End()
default:
return node.Pos(), token.NoPos
}
}
func Report(pass *analysis.Pass, node Positioner, message string, opts ...Option) {
cfg := &Options{}
for _, opt := range opts {
opt(cfg)
}
file := lint.DisplayPosition(pass.Fset, node.Pos()).Filename
if cfg.FilterGenerated {
m := pass.ResultOf[facts.Generated].(map[string]facts.Generator)
if _, ok := m[file]; ok {
return
}
}
pos, end := getRange(node, cfg.ShortRange)
d := analysis.Diagnostic{
Pos: pos,
End: end,
Message: message,
SuggestedFixes: cfg.Fixes,
Related: cfg.Related,
}
pass.Report(d)
}
func Render(pass *analysis.Pass, x interface{}) string {
var buf bytes.Buffer
if err := printer.Fprint(&buf, pass.Fset, x); err != nil {
panic(err)
}
return buf.String()
}
func RenderArgs(pass *analysis.Pass, args []ast.Expr) string {
var ss []string
for _, arg := range args {
ss = append(ss, Render(pass, arg))
}
return strings.Join(ss, ", ")
}

View File

@ -1,15 +0,0 @@
# Contributing to gosimple
## Before filing an issue:
### Are you having trouble building gosimple?
Check you have the latest version of its dependencies. Run
```
go get -u honnef.co/go/tools/simple
```
If you still have problems, consider searching for existing issues before filing a new issue.
## Before sending a pull request:
Have you understood the purpose of gosimple? Make sure to carefully read `README`.

View File

@ -1,223 +1,148 @@
package simple
import (
"flag"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"honnef.co/go/tools/facts"
"honnef.co/go/tools/internal/passes/buildssa"
"honnef.co/go/tools/internal/passes/buildir"
"honnef.co/go/tools/lint/lintutil"
)
func newFlagSet() flag.FlagSet {
fs := flag.NewFlagSet("", flag.PanicOnError)
fs.Var(lintutil.NewVersionFlag(), "go", "Target Go version")
return *fs
}
var Analyzers = map[string]*analysis.Analyzer{
var Analyzers = lintutil.InitializeAnalyzers(Docs, map[string]*analysis.Analyzer{
"S1000": {
Name: "S1000",
Run: LintSingleCaseSelect,
Doc: Docs["S1000"].String(),
Run: CheckSingleCaseSelect,
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
Flags: newFlagSet(),
},
"S1001": {
Name: "S1001",
Run: LintLoopCopy,
Doc: Docs["S1001"].String(),
Run: CheckLoopCopy,
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
Flags: newFlagSet(),
},
"S1002": {
Name: "S1002",
Run: LintIfBoolCmp,
Doc: Docs["S1002"].String(),
Run: CheckIfBoolCmp,
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
Flags: newFlagSet(),
},
"S1003": {
Name: "S1003",
Run: LintStringsContains,
Doc: Docs["S1003"].String(),
Run: CheckStringsContains,
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
Flags: newFlagSet(),
},
"S1004": {
Name: "S1004",
Run: LintBytesCompare,
Doc: Docs["S1004"].String(),
Run: CheckBytesCompare,
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
Flags: newFlagSet(),
},
"S1005": {
Name: "S1005",
Run: LintUnnecessaryBlank,
Doc: Docs["S1005"].String(),
Run: CheckUnnecessaryBlank,
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
Flags: newFlagSet(),
},
"S1006": {
Name: "S1006",
Run: LintForTrue,
Doc: Docs["S1006"].String(),
Run: CheckForTrue,
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
Flags: newFlagSet(),
},
"S1007": {
Name: "S1007",
Run: LintRegexpRaw,
Doc: Docs["S1007"].String(),
Run: CheckRegexpRaw,
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
Flags: newFlagSet(),
},
"S1008": {
Name: "S1008",
Run: LintIfReturn,
Doc: Docs["S1008"].String(),
Run: CheckIfReturn,
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
Flags: newFlagSet(),
},
"S1009": {
Name: "S1009",
Run: LintRedundantNilCheckWithLen,
Doc: Docs["S1009"].String(),
Run: CheckRedundantNilCheckWithLen,
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
Flags: newFlagSet(),
},
"S1010": {
Name: "S1010",
Run: LintSlicing,
Doc: Docs["S1010"].String(),
Run: CheckSlicing,
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
Flags: newFlagSet(),
},
"S1011": {
Name: "S1011",
Run: LintLoopAppend,
Doc: Docs["S1011"].String(),
Run: CheckLoopAppend,
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
Flags: newFlagSet(),
},
"S1012": {
Name: "S1012",
Run: LintTimeSince,
Doc: Docs["S1012"].String(),
Run: CheckTimeSince,
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
Flags: newFlagSet(),
},
"S1016": {
Name: "S1016",
Run: LintSimplerStructConversion,
Doc: Docs["S1016"].String(),
Run: CheckSimplerStructConversion,
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
Flags: newFlagSet(),
},
"S1017": {
Name: "S1017",
Run: LintTrim,
Doc: Docs["S1017"].String(),
Run: CheckTrim,
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
Flags: newFlagSet(),
},
"S1018": {
Name: "S1018",
Run: LintLoopSlide,
Doc: Docs["S1018"].String(),
Run: CheckLoopSlide,
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
Flags: newFlagSet(),
},
"S1019": {
Name: "S1019",
Run: LintMakeLenCap,
Doc: Docs["S1019"].String(),
Run: CheckMakeLenCap,
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
Flags: newFlagSet(),
},
"S1020": {
Name: "S1020",
Run: LintAssertNotNil,
Doc: Docs["S1020"].String(),
Run: CheckAssertNotNil,
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
Flags: newFlagSet(),
},
"S1021": {
Name: "S1021",
Run: LintDeclareAssign,
Doc: Docs["S1021"].String(),
Run: CheckDeclareAssign,
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
Flags: newFlagSet(),
},
"S1023": {
Name: "S1023",
Run: LintRedundantBreak,
Doc: Docs["S1023"].String(),
Run: CheckRedundantBreak,
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
Flags: newFlagSet(),
},
"S1024": {
Name: "S1024",
Run: LintTimeUntil,
Doc: Docs["S1024"].String(),
Run: CheckTimeUntil,
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
Flags: newFlagSet(),
},
"S1025": {
Name: "S1025",
Run: LintRedundantSprintf,
Doc: Docs["S1025"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer, inspect.Analyzer, facts.Generated},
Flags: newFlagSet(),
Run: CheckRedundantSprintf,
Requires: []*analysis.Analyzer{buildir.Analyzer, inspect.Analyzer, facts.Generated},
},
"S1028": {
Name: "S1028",
Run: LintErrorsNewSprintf,
Doc: Docs["S1028"].String(),
Run: CheckErrorsNewSprintf,
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
Flags: newFlagSet(),
},
"S1029": {
Name: "S1029",
Run: LintRangeStringRunes,
Doc: Docs["S1029"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer},
Flags: newFlagSet(),
Run: CheckRangeStringRunes,
Requires: []*analysis.Analyzer{buildir.Analyzer},
},
"S1030": {
Name: "S1030",
Run: LintBytesBufferConversions,
Doc: Docs["S1030"].String(),
Run: CheckBytesBufferConversions,
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
Flags: newFlagSet(),
},
"S1031": {
Name: "S1031",
Run: LintNilCheckAroundRange,
Doc: Docs["S1031"].String(),
Run: CheckNilCheckAroundRange,
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
Flags: newFlagSet(),
},
"S1032": {
Name: "S1032",
Run: LintSortHelpers,
Doc: Docs["S1032"].String(),
Run: CheckSortHelpers,
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
Flags: newFlagSet(),
},
"S1033": {
Name: "S1033",
Run: LintGuardedDelete,
Doc: Docs["S1033"].String(),
Run: CheckGuardedDelete,
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
Flags: newFlagSet(),
},
"S1034": {
Name: "S1034",
Run: LintSimplifyTypeSwitch,
Doc: Docs["S1034"].String(),
Run: CheckSimplifyTypeSwitch,
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
Flags: newFlagSet(),
},
}
"S1035": {
Run: CheckRedundantCanonicalHeaderKey,
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
},
"S1036": {
Run: CheckUnnecessaryGuard,
Requires: []*analysis.Analyzer{inspect.Analyzer},
},
"S1037": {
Run: CheckElaborateSleep,
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
},
"S1038": {
Run: CheckPrintSprintf,
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
},
"S1039": {
Run: CheckSprintLiteral,
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated},
},
})

View File

@ -3,7 +3,7 @@ package simple
import "honnef.co/go/tools/lint"
var Docs = map[string]*lint.Documentation{
"S1000": &lint.Documentation{
"S1000": {
Title: `Use plain channel send or receive instead of single-case select`,
Text: `Select statements with a single case can be replaced with a simple
send or receive.
@ -22,7 +22,7 @@ After:
Since: "2017.1",
},
"S1001": &lint.Documentation{
"S1001": {
Title: `Replace for loop with call to copy`,
Text: `Use copy() for copying elements from one slice to another.
@ -38,7 +38,7 @@ After:
Since: "2017.1",
},
"S1002": &lint.Documentation{
"S1002": {
Title: `Omit comparison with boolean constant`,
Text: `Before:
@ -50,7 +50,7 @@ After:
Since: "2017.1",
},
"S1003": &lint.Documentation{
"S1003": {
Title: `Replace call to strings.Index with strings.Contains`,
Text: `Before:
@ -62,7 +62,7 @@ After:
Since: "2017.1",
},
"S1004": &lint.Documentation{
"S1004": {
Title: `Replace call to bytes.Compare with bytes.Equal`,
Text: `Before:
@ -74,7 +74,7 @@ After:
Since: "2017.1",
},
"S1005": &lint.Documentation{
"S1005": {
Title: `Drop unnecessary use of the blank identifier`,
Text: `In many cases, assigning to the blank identifier is unnecessary.
@ -92,13 +92,13 @@ After:
Since: "2017.1",
},
"S1006": &lint.Documentation{
"S1006": {
Title: `Use for { ... } for infinite loops`,
Text: `For infinite loops, using for { ... } is the most idiomatic choice.`,
Since: "2017.1",
},
"S1007": &lint.Documentation{
"S1007": {
Title: `Simplify regular expression by using raw string literal`,
Text: `Raw string literals use ` + "`" + ` instead of " and do not support
any escape sequences. This means that the backslash (\) can be used
@ -117,7 +117,7 @@ After:
Since: "2017.1",
},
"S1008": &lint.Documentation{
"S1008": {
Title: `Simplify returning boolean expression`,
Text: `Before:
@ -132,7 +132,7 @@ After:
Since: "2017.1",
},
"S1009": &lint.Documentation{
"S1009": {
Title: `Omit redundant nil check on slices`,
Text: `The len function is defined for all slices, even nil ones, which have
a length of zero. It is not necessary to check if a slice is not nil
@ -148,14 +148,14 @@ After:
Since: "2017.1",
},
"S1010": &lint.Documentation{
"S1010": {
Title: `Omit default slice index`,
Text: `When slicing, the second index defaults to the length of the value,
making s[n:len(s)] and s[n:] equivalent.`,
Since: "2017.1",
},
"S1011": &lint.Documentation{
"S1011": {
Title: `Use a single append to concatenate two slices`,
Text: `Before:
@ -169,7 +169,7 @@ After:
Since: "2017.1",
},
"S1012": &lint.Documentation{
"S1012": {
Title: `Replace time.Now().Sub(x) with time.Since(x)`,
Text: `The time.Since helper has the same effect as using time.Now().Sub(x)
but is easier to read.
@ -184,7 +184,7 @@ After:
Since: "2017.1",
},
"S1016": &lint.Documentation{
"S1016": {
Title: `Use a type conversion instead of manually copying struct fields`,
Text: `Two struct types with identical fields can be converted between each
other. In older versions of Go, the fields had to have identical
@ -207,7 +207,7 @@ After:
Since: "2017.1",
},
"S1017": &lint.Documentation{
"S1017": {
Title: `Replace manual trimming with strings.TrimPrefix`,
Text: `Instead of using strings.HasPrefix and manual slicing, use the
strings.TrimPrefix function. If the string doesn't start with the
@ -227,7 +227,7 @@ After:
Since: "2017.1",
},
"S1018": &lint.Documentation{
"S1018": {
Title: `Use copy for sliding elements`,
Text: `copy() permits using the same source and destination slice, even with
overlapping ranges. This makes it ideal for sliding elements in a
@ -245,7 +245,7 @@ After:
Since: "2017.1",
},
"S1019": &lint.Documentation{
"S1019": {
Title: `Simplify make call by omitting redundant arguments`,
Text: `The make function has default values for the length and capacity
arguments. For channels and maps, the length defaults to zero.
@ -253,7 +253,7 @@ Additionally, for slices the capacity defaults to the length.`,
Since: "2017.1",
},
"S1020": &lint.Documentation{
"S1020": {
Title: `Omit redundant nil check in type assertion`,
Text: `Before:
@ -265,7 +265,7 @@ After:
Since: "2017.1",
},
"S1021": &lint.Documentation{
"S1021": {
Title: `Merge variable declaration and assignment`,
Text: `Before:
@ -278,7 +278,7 @@ After:
Since: "2017.1",
},
"S1023": &lint.Documentation{
"S1023": {
Title: `Omit redundant control flow`,
Text: `Functions that have no return value do not need a return statement as
the final statement of the function.
@ -289,7 +289,7 @@ statement in a case block.`,
Since: "2017.1",
},
"S1024": &lint.Documentation{
"S1024": {
Title: `Replace x.Sub(time.Now()) with time.Until(x)`,
Text: `The time.Until helper has the same effect as using x.Sub(time.Now())
but is easier to read.
@ -304,7 +304,7 @@ After:
Since: "2017.1",
},
"S1025": &lint.Documentation{
"S1025": {
Title: `Don't use fmt.Sprintf("%s", x) unnecessarily`,
Text: `In many instances, there are easier and more efficient ways of getting
a value's string representation. Whenever a value's underlying type is
@ -336,7 +336,7 @@ to
Since: "2017.1",
},
"S1028": &lint.Documentation{
"S1028": {
Title: `Simplify error construction with fmt.Errorf`,
Text: `Before:
@ -348,7 +348,7 @@ After:
Since: "2017.1",
},
"S1029": &lint.Documentation{
"S1029": {
Title: `Range over the string directly`,
Text: `Ranging over a string will yield byte offsets and runes. If the offset
isn't used, this is functionally equivalent to converting the string
@ -366,7 +366,7 @@ After:
Since: "2017.1",
},
"S1030": &lint.Documentation{
"S1030": {
Title: `Use bytes.Buffer.String or bytes.Buffer.Bytes`,
Text: `bytes.Buffer has both a String and a Bytes method. It is never
necessary to use string(buf.Bytes()) or []byte(buf.String()) simply
@ -374,7 +374,7 @@ use the other method.`,
Since: "2017.1",
},
"S1031": &lint.Documentation{
"S1031": {
Title: `Omit redundant nil check around loop`,
Text: `You can use range on nil slices and maps, the loop will simply never
execute. This makes an additional nil check around the loop
@ -396,7 +396,7 @@ After:
Since: "2017.1",
},
"S1032": &lint.Documentation{
"S1032": {
Title: `Use sort.Ints(x), sort.Float64s(x), and sort.Strings(x)`,
Text: `The sort.Ints, sort.Float64s and sort.Strings functions are easier to
read than sort.Sort(sort.IntSlice(x)), sort.Sort(sort.Float64Slice(x))
@ -412,14 +412,74 @@ After:
Since: "2019.1",
},
"S1033": &lint.Documentation{
"S1033": {
Title: `Unnecessary guard around call to delete`,
Text: `Calling delete on a nil map is a no-op.`,
Since: "2019.2",
},
"S1034": &lint.Documentation{
"S1034": {
Title: `Use result of type assertion to simplify cases`,
Since: "2019.2",
},
"S1035": {
Title: `Redundant call to net/http.CanonicalHeaderKey in method call on net/http.Header`,
Text: `The methods on net/http.Header, namely Add, Del, Get and Set, already
canonicalize the given header name.`,
Since: "2020.1",
},
"S1036": {
Title: `Unnecessary guard around map access`,
Text: `When accessing a map key that doesn't exist yet, one
receives a zero value. Often, the zero value is a suitable value, for example when using append or doing integer math.
The following
if _, ok := m["foo"]; ok {
m["foo"] = append(m["foo"], "bar")
} else {
m["foo"] = []string{"bar"}
}
can be simplified to
m["foo"] = append(m["foo"], "bar")
and
if _, ok := m2["k"]; ok {
m2["k"] += 4
} else {
m2["k"] = 4
}
can be simplified to
m["k"] += 4
`,
Since: "2020.1",
},
"S1037": {
Title: `Elaborate way of sleeping`,
Text: `Using a select statement with a single case receiving
from the result of time.After is a very elaborate way of sleeping that
can much simpler be expressed with a simple call to time.Sleep.`,
Since: "2020.1",
},
"S1038": {
Title: "Unnecessarily complex way of printing formatted string",
Text: `Instead of using fmt.Print(fmt.Sprintf(...)), one can use fmt.Printf(...).`,
Since: "2020.1",
},
"S1039": {
Title: "Unnecessary use of fmt.Sprint",
Text: `Calling fmt.Sprint with a single string argument is unnecessary and identical to using the string directly.`,
Since: "2020.1",
},
}

File diff suppressed because it is too large Load Diff

View File

@ -1,657 +0,0 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package ssa
// This file defines the lifting pass which tries to "lift" Alloc
// cells (new/local variables) into SSA registers, replacing loads
// with the dominating stored value, eliminating loads and stores, and
// inserting φ-nodes as needed.
// Cited papers and resources:
//
// Ron Cytron et al. 1991. Efficiently computing SSA form...
// http://doi.acm.org/10.1145/115372.115320
//
// Cooper, Harvey, Kennedy. 2001. A Simple, Fast Dominance Algorithm.
// Software Practice and Experience 2001, 4:1-10.
// http://www.hipersoft.rice.edu/grads/publications/dom14.pdf
//
// Daniel Berlin, llvmdev mailing list, 2012.
// http://lists.cs.uiuc.edu/pipermail/llvmdev/2012-January/046638.html
// (Be sure to expand the whole thread.)
// TODO(adonovan): opt: there are many optimizations worth evaluating, and
// the conventional wisdom for SSA construction is that a simple
// algorithm well engineered often beats those of better asymptotic
// complexity on all but the most egregious inputs.
//
// Danny Berlin suggests that the Cooper et al. algorithm for
// computing the dominance frontier is superior to Cytron et al.
// Furthermore he recommends that rather than computing the DF for the
// whole function then renaming all alloc cells, it may be cheaper to
// compute the DF for each alloc cell separately and throw it away.
//
// Consider exploiting liveness information to avoid creating dead
// φ-nodes which we then immediately remove.
//
// Also see many other "TODO: opt" suggestions in the code.
import (
"fmt"
"go/token"
"go/types"
"math/big"
"os"
)
// If true, show diagnostic information at each step of lifting.
// Very verbose.
const debugLifting = false
// domFrontier maps each block to the set of blocks in its dominance
// frontier. The outer slice is conceptually a map keyed by
// Block.Index. The inner slice is conceptually a set, possibly
// containing duplicates.
//
// TODO(adonovan): opt: measure impact of dups; consider a packed bit
// representation, e.g. big.Int, and bitwise parallel operations for
// the union step in the Children loop.
//
// domFrontier's methods mutate the slice's elements but not its
// length, so their receivers needn't be pointers.
//
type domFrontier [][]*BasicBlock
func (df domFrontier) add(u, v *BasicBlock) {
p := &df[u.Index]
*p = append(*p, v)
}
// build builds the dominance frontier df for the dominator (sub)tree
// rooted at u, using the Cytron et al. algorithm.
//
// TODO(adonovan): opt: consider Berlin approach, computing pruned SSA
// by pruning the entire IDF computation, rather than merely pruning
// the DF -> IDF step.
func (df domFrontier) build(u *BasicBlock) {
// Encounter each node u in postorder of dom tree.
for _, child := range u.dom.children {
df.build(child)
}
for _, vb := range u.Succs {
if v := vb.dom; v.idom != u {
df.add(u, vb)
}
}
for _, w := range u.dom.children {
for _, vb := range df[w.Index] {
// TODO(adonovan): opt: use word-parallel bitwise union.
if v := vb.dom; v.idom != u {
df.add(u, vb)
}
}
}
}
func buildDomFrontier(fn *Function) domFrontier {
df := make(domFrontier, len(fn.Blocks))
df.build(fn.Blocks[0])
if fn.Recover != nil {
df.build(fn.Recover)
}
return df
}
func removeInstr(refs []Instruction, instr Instruction) []Instruction {
i := 0
for _, ref := range refs {
if ref == instr {
continue
}
refs[i] = ref
i++
}
for j := i; j != len(refs); j++ {
refs[j] = nil // aid GC
}
return refs[:i]
}
// lift replaces local and new Allocs accessed only with
// load/store by SSA registers, inserting φ-nodes where necessary.
// The result is a program in classical pruned SSA form.
//
// Preconditions:
// - fn has no dead blocks (blockopt has run).
// - Def/use info (Operands and Referrers) is up-to-date.
// - The dominator tree is up-to-date.
//
func lift(fn *Function) {
// TODO(adonovan): opt: lots of little optimizations may be
// worthwhile here, especially if they cause us to avoid
// buildDomFrontier. For example:
//
// - Alloc never loaded? Eliminate.
// - Alloc never stored? Replace all loads with a zero constant.
// - Alloc stored once? Replace loads with dominating store;
// don't forget that an Alloc is itself an effective store
// of zero.
// - Alloc used only within a single block?
// Use degenerate algorithm avoiding φ-nodes.
// - Consider synergy with scalar replacement of aggregates (SRA).
// e.g. *(&x.f) where x is an Alloc.
// Perhaps we'd get better results if we generated this as x.f
// i.e. Field(x, .f) instead of Load(FieldIndex(x, .f)).
// Unclear.
//
// But we will start with the simplest correct code.
df := buildDomFrontier(fn)
if debugLifting {
title := false
for i, blocks := range df {
if blocks != nil {
if !title {
fmt.Fprintf(os.Stderr, "Dominance frontier of %s:\n", fn)
title = true
}
fmt.Fprintf(os.Stderr, "\t%s: %s\n", fn.Blocks[i], blocks)
}
}
}
newPhis := make(newPhiMap)
// During this pass we will replace some BasicBlock.Instrs
// (allocs, loads and stores) with nil, keeping a count in
// BasicBlock.gaps. At the end we will reset Instrs to the
// concatenation of all non-dead newPhis and non-nil Instrs
// for the block, reusing the original array if space permits.
// While we're here, we also eliminate 'rundefers'
// instructions in functions that contain no 'defer'
// instructions.
usesDefer := false
// A counter used to generate ~unique ids for Phi nodes, as an
// aid to debugging. We use large numbers to make them highly
// visible. All nodes are renumbered later.
fresh := 1000
// Determine which allocs we can lift and number them densely.
// The renaming phase uses this numbering for compact maps.
numAllocs := 0
for _, b := range fn.Blocks {
b.gaps = 0
b.rundefers = 0
for _, instr := range b.Instrs {
switch instr := instr.(type) {
case *Alloc:
index := -1
if liftAlloc(df, instr, newPhis, &fresh) {
index = numAllocs
numAllocs++
}
instr.index = index
case *Defer:
usesDefer = true
case *RunDefers:
b.rundefers++
}
}
}
// renaming maps an alloc (keyed by index) to its replacement
// value. Initially the renaming contains nil, signifying the
// zero constant of the appropriate type; we construct the
// Const lazily at most once on each path through the domtree.
// TODO(adonovan): opt: cache per-function not per subtree.
renaming := make([]Value, numAllocs)
// Renaming.
rename(fn.Blocks[0], renaming, newPhis)
// Eliminate dead φ-nodes.
removeDeadPhis(fn.Blocks, newPhis)
// Prepend remaining live φ-nodes to each block.
for _, b := range fn.Blocks {
nps := newPhis[b]
j := len(nps)
rundefersToKill := b.rundefers
if usesDefer {
rundefersToKill = 0
}
if j+b.gaps+rundefersToKill == 0 {
continue // fast path: no new phis or gaps
}
// Compact nps + non-nil Instrs into a new slice.
// TODO(adonovan): opt: compact in situ (rightwards)
// if Instrs has sufficient space or slack.
dst := make([]Instruction, len(b.Instrs)+j-b.gaps-rundefersToKill)
for i, np := range nps {
dst[i] = np.phi
}
for _, instr := range b.Instrs {
if instr == nil {
continue
}
if !usesDefer {
if _, ok := instr.(*RunDefers); ok {
continue
}
}
dst[j] = instr
j++
}
b.Instrs = dst
}
// Remove any fn.Locals that were lifted.
j := 0
for _, l := range fn.Locals {
if l.index < 0 {
fn.Locals[j] = l
j++
}
}
// Nil out fn.Locals[j:] to aid GC.
for i := j; i < len(fn.Locals); i++ {
fn.Locals[i] = nil
}
fn.Locals = fn.Locals[:j]
}
// removeDeadPhis removes φ-nodes not transitively needed by a
// non-Phi, non-DebugRef instruction.
func removeDeadPhis(blocks []*BasicBlock, newPhis newPhiMap) {
// First pass: find the set of "live" φ-nodes: those reachable
// from some non-Phi instruction.
//
// We compute reachability in reverse, starting from each φ,
// rather than forwards, starting from each live non-Phi
// instruction, because this way visits much less of the
// Value graph.
livePhis := make(map[*Phi]bool)
for _, npList := range newPhis {
for _, np := range npList {
phi := np.phi
if !livePhis[phi] && phiHasDirectReferrer(phi) {
markLivePhi(livePhis, phi)
}
}
}
// Existing φ-nodes due to && and || operators
// are all considered live (see Go issue 19622).
for _, b := range blocks {
for _, phi := range b.phis() {
markLivePhi(livePhis, phi.(*Phi))
}
}
// Second pass: eliminate unused phis from newPhis.
for block, npList := range newPhis {
j := 0
for _, np := range npList {
if livePhis[np.phi] {
npList[j] = np
j++
} else {
// discard it, first removing it from referrers
for _, val := range np.phi.Edges {
if refs := val.Referrers(); refs != nil {
*refs = removeInstr(*refs, np.phi)
}
}
np.phi.block = nil
}
}
newPhis[block] = npList[:j]
}
}
// markLivePhi marks phi, and all φ-nodes transitively reachable via
// its Operands, live.
func markLivePhi(livePhis map[*Phi]bool, phi *Phi) {
livePhis[phi] = true
for _, rand := range phi.Operands(nil) {
if q, ok := (*rand).(*Phi); ok {
if !livePhis[q] {
markLivePhi(livePhis, q)
}
}
}
}
// phiHasDirectReferrer reports whether phi is directly referred to by
// a non-Phi instruction. Such instructions are the
// roots of the liveness traversal.
func phiHasDirectReferrer(phi *Phi) bool {
for _, instr := range *phi.Referrers() {
if _, ok := instr.(*Phi); !ok {
return true
}
}
return false
}
type BlockSet struct{ big.Int } // (inherit methods from Int)
// add adds b to the set and returns true if the set changed.
func (s *BlockSet) Add(b *BasicBlock) bool {
i := b.Index
if s.Bit(i) != 0 {
return false
}
s.SetBit(&s.Int, i, 1)
return true
}
func (s *BlockSet) Has(b *BasicBlock) bool {
return s.Bit(b.Index) == 1
}
// take removes an arbitrary element from a set s and
// returns its index, or returns -1 if empty.
func (s *BlockSet) Take() int {
l := s.BitLen()
for i := 0; i < l; i++ {
if s.Bit(i) == 1 {
s.SetBit(&s.Int, i, 0)
return i
}
}
return -1
}
// newPhi is a pair of a newly introduced φ-node and the lifted Alloc
// it replaces.
type newPhi struct {
phi *Phi
alloc *Alloc
}
// newPhiMap records for each basic block, the set of newPhis that
// must be prepended to the block.
type newPhiMap map[*BasicBlock][]newPhi
// liftAlloc determines whether alloc can be lifted into registers,
// and if so, it populates newPhis with all the φ-nodes it may require
// and returns true.
//
// fresh is a source of fresh ids for phi nodes.
//
func liftAlloc(df domFrontier, alloc *Alloc, newPhis newPhiMap, fresh *int) bool {
// Don't lift aggregates into registers, because we don't have
// a way to express their zero-constants.
switch deref(alloc.Type()).Underlying().(type) {
case *types.Array, *types.Struct:
return false
}
// Don't lift named return values in functions that defer
// calls that may recover from panic.
if fn := alloc.Parent(); fn.Recover != nil {
for _, nr := range fn.namedResults {
if nr == alloc {
return false
}
}
}
// Compute defblocks, the set of blocks containing a
// definition of the alloc cell.
var defblocks BlockSet
for _, instr := range *alloc.Referrers() {
// Bail out if we discover the alloc is not liftable;
// the only operations permitted to use the alloc are
// loads/stores into the cell, and DebugRef.
switch instr := instr.(type) {
case *Store:
if instr.Val == alloc {
return false // address used as value
}
if instr.Addr != alloc {
panic("Alloc.Referrers is inconsistent")
}
defblocks.Add(instr.Block())
case *UnOp:
if instr.Op != token.MUL {
return false // not a load
}
if instr.X != alloc {
panic("Alloc.Referrers is inconsistent")
}
case *DebugRef:
// ok
default:
return false // some other instruction
}
}
// The Alloc itself counts as a (zero) definition of the cell.
defblocks.Add(alloc.Block())
if debugLifting {
fmt.Fprintln(os.Stderr, "\tlifting ", alloc, alloc.Name())
}
fn := alloc.Parent()
// Φ-insertion.
//
// What follows is the body of the main loop of the insert-φ
// function described by Cytron et al, but instead of using
// counter tricks, we just reset the 'hasAlready' and 'work'
// sets each iteration. These are bitmaps so it's pretty cheap.
//
// TODO(adonovan): opt: recycle slice storage for W,
// hasAlready, defBlocks across liftAlloc calls.
var hasAlready BlockSet
// Initialize W and work to defblocks.
var work BlockSet = defblocks // blocks seen
var W BlockSet // blocks to do
W.Set(&defblocks.Int)
// Traverse iterated dominance frontier, inserting φ-nodes.
for i := W.Take(); i != -1; i = W.Take() {
u := fn.Blocks[i]
for _, v := range df[u.Index] {
if hasAlready.Add(v) {
// Create φ-node.
// It will be prepended to v.Instrs later, if needed.
phi := &Phi{
Edges: make([]Value, len(v.Preds)),
Comment: alloc.Comment,
}
// This is merely a debugging aid:
phi.setNum(*fresh)
*fresh++
phi.pos = alloc.Pos()
phi.setType(deref(alloc.Type()))
phi.block = v
if debugLifting {
fmt.Fprintf(os.Stderr, "\tplace %s = %s at block %s\n", phi.Name(), phi, v)
}
newPhis[v] = append(newPhis[v], newPhi{phi, alloc})
if work.Add(v) {
W.Add(v)
}
}
}
}
return true
}
// replaceAll replaces all intraprocedural uses of x with y,
// updating x.Referrers and y.Referrers.
// Precondition: x.Referrers() != nil, i.e. x must be local to some function.
//
func replaceAll(x, y Value) {
var rands []*Value
pxrefs := x.Referrers()
pyrefs := y.Referrers()
for _, instr := range *pxrefs {
rands = instr.Operands(rands[:0]) // recycle storage
for _, rand := range rands {
if *rand != nil {
if *rand == x {
*rand = y
}
}
}
if pyrefs != nil {
*pyrefs = append(*pyrefs, instr) // dups ok
}
}
*pxrefs = nil // x is now unreferenced
}
// renamed returns the value to which alloc is being renamed,
// constructing it lazily if it's the implicit zero initialization.
//
func renamed(renaming []Value, alloc *Alloc) Value {
v := renaming[alloc.index]
if v == nil {
v = zeroConst(deref(alloc.Type()))
renaming[alloc.index] = v
}
return v
}
// rename implements the (Cytron et al) SSA renaming algorithm, a
// preorder traversal of the dominator tree replacing all loads of
// Alloc cells with the value stored to that cell by the dominating
// store instruction. For lifting, we need only consider loads,
// stores and φ-nodes.
//
// renaming is a map from *Alloc (keyed by index number) to its
// dominating stored value; newPhis[x] is the set of new φ-nodes to be
// prepended to block x.
//
func rename(u *BasicBlock, renaming []Value, newPhis newPhiMap) {
// Each φ-node becomes the new name for its associated Alloc.
for _, np := range newPhis[u] {
phi := np.phi
alloc := np.alloc
renaming[alloc.index] = phi
}
// Rename loads and stores of allocs.
for i, instr := range u.Instrs {
switch instr := instr.(type) {
case *Alloc:
if instr.index >= 0 { // store of zero to Alloc cell
// Replace dominated loads by the zero value.
renaming[instr.index] = nil
if debugLifting {
fmt.Fprintf(os.Stderr, "\tkill alloc %s\n", instr)
}
// Delete the Alloc.
u.Instrs[i] = nil
u.gaps++
}
case *Store:
if alloc, ok := instr.Addr.(*Alloc); ok && alloc.index >= 0 { // store to Alloc cell
// Replace dominated loads by the stored value.
renaming[alloc.index] = instr.Val
if debugLifting {
fmt.Fprintf(os.Stderr, "\tkill store %s; new value: %s\n",
instr, instr.Val.Name())
}
// Remove the store from the referrer list of the stored value.
if refs := instr.Val.Referrers(); refs != nil {
*refs = removeInstr(*refs, instr)
}
// Delete the Store.
u.Instrs[i] = nil
u.gaps++
}
case *UnOp:
if instr.Op == token.MUL {
if alloc, ok := instr.X.(*Alloc); ok && alloc.index >= 0 { // load of Alloc cell
newval := renamed(renaming, alloc)
if debugLifting {
fmt.Fprintf(os.Stderr, "\tupdate load %s = %s with %s\n",
instr.Name(), instr, newval.Name())
}
// Replace all references to
// the loaded value by the
// dominating stored value.
replaceAll(instr, newval)
// Delete the Load.
u.Instrs[i] = nil
u.gaps++
}
}
case *DebugRef:
if alloc, ok := instr.X.(*Alloc); ok && alloc.index >= 0 { // ref of Alloc cell
if instr.IsAddr {
instr.X = renamed(renaming, alloc)
instr.IsAddr = false
// Add DebugRef to instr.X's referrers.
if refs := instr.X.Referrers(); refs != nil {
*refs = append(*refs, instr)
}
} else {
// A source expression denotes the address
// of an Alloc that was optimized away.
instr.X = nil
// Delete the DebugRef.
u.Instrs[i] = nil
u.gaps++
}
}
}
}
// For each φ-node in a CFG successor, rename the edge.
for _, v := range u.Succs {
phis := newPhis[v]
if len(phis) == 0 {
continue
}
i := v.predIndex(u)
for _, np := range phis {
phi := np.phi
alloc := np.alloc
newval := renamed(renaming, alloc)
if debugLifting {
fmt.Fprintf(os.Stderr, "\tsetphi %s edge %s -> %s (#%d) (alloc=%s) := %s\n",
phi.Name(), u, v, i, alloc.Name(), newval.Name())
}
phi.Edges[i] = newval
if prefs := newval.Referrers(); prefs != nil {
*prefs = append(*prefs, phi)
}
}
}
// Continue depth-first recursion over domtree, pushing a
// fresh copy of the renaming map for each subtree.
for i, v := range u.dom.children {
r := renaming
if i < len(u.dom.children)-1 {
// On all but the final iteration, we must make
// a copy to avoid destructive update.
r = make([]Value, len(renaming))
copy(r, renaming)
}
rename(v, r, newPhis)
}
}

View File

@ -1,271 +0,0 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package ssa
// CreateTestMainPackage synthesizes a main package that runs all the
// tests of the supplied packages.
// It is closely coupled to $GOROOT/src/cmd/go/test.go and $GOROOT/src/testing.
//
// TODO(adonovan): throws this all away now that x/tools/go/packages
// provides access to the actual synthetic test main files.
import (
"bytes"
"fmt"
"go/ast"
"go/parser"
"go/types"
"log"
"os"
"strings"
"text/template"
)
// FindTests returns the Test, Benchmark, and Example functions
// (as defined by "go test") defined in the specified package,
// and its TestMain function, if any.
//
// Deprecated: use x/tools/go/packages to access synthetic testmain packages.
func FindTests(pkg *Package) (tests, benchmarks, examples []*Function, main *Function) {
prog := pkg.Prog
// The first two of these may be nil: if the program doesn't import "testing",
// it can't contain any tests, but it may yet contain Examples.
var testSig *types.Signature // func(*testing.T)
var benchmarkSig *types.Signature // func(*testing.B)
var exampleSig = types.NewSignature(nil, nil, nil, false) // func()
// Obtain the types from the parameters of testing.MainStart.
if testingPkg := prog.ImportedPackage("testing"); testingPkg != nil {
mainStart := testingPkg.Func("MainStart")
params := mainStart.Signature.Params()
testSig = funcField(params.At(1).Type())
benchmarkSig = funcField(params.At(2).Type())
// Does the package define this function?
// func TestMain(*testing.M)
if f := pkg.Func("TestMain"); f != nil {
sig := f.Type().(*types.Signature)
starM := mainStart.Signature.Results().At(0).Type() // *testing.M
if sig.Results().Len() == 0 &&
sig.Params().Len() == 1 &&
types.Identical(sig.Params().At(0).Type(), starM) {
main = f
}
}
}
// TODO(adonovan): use a stable order, e.g. lexical.
for _, mem := range pkg.Members {
if f, ok := mem.(*Function); ok &&
ast.IsExported(f.Name()) &&
strings.HasSuffix(prog.Fset.Position(f.Pos()).Filename, "_test.go") {
switch {
case testSig != nil && isTestSig(f, "Test", testSig):
tests = append(tests, f)
case benchmarkSig != nil && isTestSig(f, "Benchmark", benchmarkSig):
benchmarks = append(benchmarks, f)
case isTestSig(f, "Example", exampleSig):
examples = append(examples, f)
default:
continue
}
}
}
return
}
// Like isTest, but checks the signature too.
func isTestSig(f *Function, prefix string, sig *types.Signature) bool {
return isTest(f.Name(), prefix) && types.Identical(f.Signature, sig)
}
// Given the type of one of the three slice parameters of testing.Main,
// returns the function type.
func funcField(slice types.Type) *types.Signature {
return slice.(*types.Slice).Elem().Underlying().(*types.Struct).Field(1).Type().(*types.Signature)
}
// isTest tells whether name looks like a test (or benchmark, according to prefix).
// It is a Test (say) if there is a character after Test that is not a lower-case letter.
// We don't want TesticularCancer.
// Plundered from $GOROOT/src/cmd/go/test.go
func isTest(name, prefix string) bool {
if !strings.HasPrefix(name, prefix) {
return false
}
if len(name) == len(prefix) { // "Test" is ok
return true
}
return ast.IsExported(name[len(prefix):])
}
// CreateTestMainPackage creates and returns a synthetic "testmain"
// package for the specified package if it defines tests, benchmarks or
// executable examples, or nil otherwise. The new package is named
// "main" and provides a function named "main" that runs the tests,
// similar to the one that would be created by the 'go test' tool.
//
// Subsequent calls to prog.AllPackages include the new package.
// The package pkg must belong to the program prog.
//
// Deprecated: use x/tools/go/packages to access synthetic testmain packages.
func (prog *Program) CreateTestMainPackage(pkg *Package) *Package {
if pkg.Prog != prog {
log.Fatal("Package does not belong to Program")
}
// Template data
var data struct {
Pkg *Package
Tests, Benchmarks, Examples []*Function
Main *Function
Go18 bool
}
data.Pkg = pkg
// Enumerate tests.
data.Tests, data.Benchmarks, data.Examples, data.Main = FindTests(pkg)
if data.Main == nil &&
data.Tests == nil && data.Benchmarks == nil && data.Examples == nil {
return nil
}
// Synthesize source for testmain package.
path := pkg.Pkg.Path() + "$testmain"
tmpl := testmainTmpl
if testingPkg := prog.ImportedPackage("testing"); testingPkg != nil {
// In Go 1.8, testing.MainStart's first argument is an interface, not a func.
data.Go18 = types.IsInterface(testingPkg.Func("MainStart").Signature.Params().At(0).Type())
} else {
// The program does not import "testing", but FindTests
// returned non-nil, which must mean there were Examples
// but no Test, Benchmark, or TestMain functions.
// We'll simply call them from testmain.main; this will
// ensure they don't panic, but will not check any
// "Output:" comments.
// (We should not execute an Example that has no
// "Output:" comment, but it's impossible to tell here.)
tmpl = examplesOnlyTmpl
}
var buf bytes.Buffer
if err := tmpl.Execute(&buf, data); err != nil {
log.Fatalf("internal error expanding template for %s: %v", path, err)
}
if false { // debugging
fmt.Fprintln(os.Stderr, buf.String())
}
// Parse and type-check the testmain package.
f, err := parser.ParseFile(prog.Fset, path+".go", &buf, parser.Mode(0))
if err != nil {
log.Fatalf("internal error parsing %s: %v", path, err)
}
conf := types.Config{
DisableUnusedImportCheck: true,
Importer: importer{pkg},
}
files := []*ast.File{f}
info := &types.Info{
Types: make(map[ast.Expr]types.TypeAndValue),
Defs: make(map[*ast.Ident]types.Object),
Uses: make(map[*ast.Ident]types.Object),
Implicits: make(map[ast.Node]types.Object),
Scopes: make(map[ast.Node]*types.Scope),
Selections: make(map[*ast.SelectorExpr]*types.Selection),
}
testmainPkg, err := conf.Check(path, prog.Fset, files, info)
if err != nil {
log.Fatalf("internal error type-checking %s: %v", path, err)
}
// Create and build SSA code.
testmain := prog.CreatePackage(testmainPkg, files, info, false)
testmain.SetDebugMode(false)
testmain.Build()
testmain.Func("main").Synthetic = "test main function"
testmain.Func("init").Synthetic = "package initializer"
return testmain
}
// An implementation of types.Importer for an already loaded SSA program.
type importer struct {
pkg *Package // package under test; may be non-importable
}
func (imp importer) Import(path string) (*types.Package, error) {
if p := imp.pkg.Prog.ImportedPackage(path); p != nil {
return p.Pkg, nil
}
if path == imp.pkg.Pkg.Path() {
return imp.pkg.Pkg, nil
}
return nil, fmt.Errorf("not found") // can't happen
}
var testmainTmpl = template.Must(template.New("testmain").Parse(`
package main
import "io"
import "os"
import "testing"
import p {{printf "%q" .Pkg.Pkg.Path}}
{{if .Go18}}
type deps struct{}
func (deps) ImportPath() string { return "" }
func (deps) MatchString(pat, str string) (bool, error) { return true, nil }
func (deps) StartCPUProfile(io.Writer) error { return nil }
func (deps) StartTestLog(io.Writer) {}
func (deps) StopCPUProfile() {}
func (deps) StopTestLog() error { return nil }
func (deps) WriteHeapProfile(io.Writer) error { return nil }
func (deps) WriteProfileTo(string, io.Writer, int) error { return nil }
var match deps
{{else}}
func match(_, _ string) (bool, error) { return true, nil }
{{end}}
func main() {
tests := []testing.InternalTest{
{{range .Tests}}
{ {{printf "%q" .Name}}, p.{{.Name}} },
{{end}}
}
benchmarks := []testing.InternalBenchmark{
{{range .Benchmarks}}
{ {{printf "%q" .Name}}, p.{{.Name}} },
{{end}}
}
examples := []testing.InternalExample{
{{range .Examples}}
{Name: {{printf "%q" .Name}}, F: p.{{.Name}}},
{{end}}
}
m := testing.MainStart(match, tests, benchmarks, examples)
{{with .Main}}
p.{{.Name}}(m)
{{else}}
os.Exit(m.Run())
{{end}}
}
`))
var examplesOnlyTmpl = template.Must(template.New("examples").Parse(`
package main
import p {{printf "%q" .Pkg.Pkg.Path}}
func main() {
{{range .Examples}}
p.{{.Name}}()
{{end}}
}
`))

View File

@ -1,5 +0,0 @@
package ssa
func NewJump(parent *BasicBlock) *Jump {
return &Jump{anInstruction{parent}}
}

View File

@ -1,58 +0,0 @@
package ssautil
import (
"honnef.co/go/tools/ssa"
)
func Reachable(from, to *ssa.BasicBlock) bool {
if from == to {
return true
}
if from.Dominates(to) {
return true
}
found := false
Walk(from, func(b *ssa.BasicBlock) bool {
if b == to {
found = true
return false
}
return true
})
return found
}
func Walk(b *ssa.BasicBlock, fn func(*ssa.BasicBlock) bool) {
seen := map[*ssa.BasicBlock]bool{}
wl := []*ssa.BasicBlock{b}
for len(wl) > 0 {
b := wl[len(wl)-1]
wl = wl[:len(wl)-1]
if seen[b] {
continue
}
seen[b] = true
if !fn(b) {
continue
}
wl = append(wl, b.Succs...)
}
}
func Vararg(x *ssa.Slice) ([]ssa.Value, bool) {
var out []ssa.Value
slice, ok := x.X.(*ssa.Alloc)
if !ok || slice.Comment != "varargs" {
return nil, false
}
for _, ref := range *slice.Referrers() {
idx, ok := ref.(*ssa.IndexAddr)
if !ok {
continue
}
v := (*idx.Referrers())[0].(*ssa.Store).Val
out = append(out, v)
}
return out, true
}

View File

@ -1,15 +0,0 @@
# Contributing to staticcheck
## Before filing an issue:
### Are you having trouble building staticcheck?
Check you have the latest version of its dependencies. Run
```
go get -u honnef.co/go/tools/staticcheck
```
If you still have problems, consider searching for existing issues before filing a new issue.
## Before sending a pull request:
Have you understood the purpose of staticcheck? Make sure to carefully read `README`.

View File

@ -1,525 +1,267 @@
package staticcheck
import (
"flag"
"honnef.co/go/tools/facts"
"honnef.co/go/tools/internal/passes/buildssa"
"honnef.co/go/tools/internal/passes/buildir"
"honnef.co/go/tools/lint/lintutil"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
)
func newFlagSet() flag.FlagSet {
fs := flag.NewFlagSet("", flag.PanicOnError)
fs.Var(lintutil.NewVersionFlag(), "go", "Target Go version")
return *fs
func makeCallCheckerAnalyzer(rules map[string]CallCheck, extraReqs ...*analysis.Analyzer) *analysis.Analyzer {
reqs := []*analysis.Analyzer{buildir.Analyzer, facts.TokenFile}
reqs = append(reqs, extraReqs...)
return &analysis.Analyzer{
Run: callChecker(rules),
Requires: reqs,
}
}
var Analyzers = map[string]*analysis.Analyzer{
"SA1000": {
Name: "SA1000",
Run: callChecker(checkRegexpRules),
Doc: Docs["SA1000"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
Flags: newFlagSet(),
},
var Analyzers = lintutil.InitializeAnalyzers(Docs, map[string]*analysis.Analyzer{
"SA1000": makeCallCheckerAnalyzer(checkRegexpRules),
"SA1001": {
Name: "SA1001",
Run: CheckTemplate,
Doc: Docs["SA1001"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
Flags: newFlagSet(),
},
"SA1002": {
Name: "SA1002",
Run: callChecker(checkTimeParseRules),
Doc: Docs["SA1002"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
Flags: newFlagSet(),
},
"SA1003": {
Name: "SA1003",
Run: callChecker(checkEncodingBinaryRules),
Doc: Docs["SA1003"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
Flags: newFlagSet(),
},
"SA1002": makeCallCheckerAnalyzer(checkTimeParseRules),
"SA1003": makeCallCheckerAnalyzer(checkEncodingBinaryRules),
"SA1004": {
Name: "SA1004",
Run: CheckTimeSleepConstant,
Doc: Docs["SA1004"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
Flags: newFlagSet(),
},
"SA1005": {
Name: "SA1005",
Run: CheckExec,
Doc: Docs["SA1005"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
Flags: newFlagSet(),
},
"SA1006": {
Name: "SA1006",
Run: CheckUnsafePrintf,
Doc: Docs["SA1006"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
Flags: newFlagSet(),
},
"SA1007": {
Name: "SA1007",
Run: callChecker(checkURLsRules),
Doc: Docs["SA1007"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
Flags: newFlagSet(),
},
"SA1007": makeCallCheckerAnalyzer(checkURLsRules),
"SA1008": {
Name: "SA1008",
Run: CheckCanonicalHeaderKey,
Doc: Docs["SA1008"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
Flags: newFlagSet(),
},
"SA1010": {
Name: "SA1010",
Run: callChecker(checkRegexpFindAllRules),
Doc: Docs["SA1010"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
Flags: newFlagSet(),
},
"SA1011": {
Name: "SA1011",
Run: callChecker(checkUTF8CutsetRules),
Doc: Docs["SA1011"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
Flags: newFlagSet(),
},
"SA1010": makeCallCheckerAnalyzer(checkRegexpFindAllRules),
"SA1011": makeCallCheckerAnalyzer(checkUTF8CutsetRules),
"SA1012": {
Name: "SA1012",
Run: CheckNilContext,
Doc: Docs["SA1012"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
Flags: newFlagSet(),
},
"SA1013": {
Name: "SA1013",
Run: CheckSeeker,
Doc: Docs["SA1013"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
Flags: newFlagSet(),
},
"SA1014": {
Name: "SA1014",
Run: callChecker(checkUnmarshalPointerRules),
Doc: Docs["SA1014"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
Flags: newFlagSet(),
},
"SA1014": makeCallCheckerAnalyzer(checkUnmarshalPointerRules),
"SA1015": {
Name: "SA1015",
Run: CheckLeakyTimeTick,
Doc: Docs["SA1015"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer},
Flags: newFlagSet(),
Requires: []*analysis.Analyzer{buildir.Analyzer},
},
"SA1016": {
Name: "SA1016",
Run: CheckUntrappableSignal,
Doc: Docs["SA1016"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
Flags: newFlagSet(),
},
"SA1017": {
Name: "SA1017",
Run: callChecker(checkUnbufferedSignalChanRules),
Doc: Docs["SA1017"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
Flags: newFlagSet(),
},
"SA1018": {
Name: "SA1018",
Run: callChecker(checkStringsReplaceZeroRules),
Doc: Docs["SA1018"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
Flags: newFlagSet(),
},
"SA1017": makeCallCheckerAnalyzer(checkUnbufferedSignalChanRules),
"SA1018": makeCallCheckerAnalyzer(checkStringsReplaceZeroRules),
"SA1019": {
Name: "SA1019",
Run: CheckDeprecated,
Doc: Docs["SA1019"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Deprecated},
Flags: newFlagSet(),
},
"SA1020": {
Name: "SA1020",
Run: callChecker(checkListenAddressRules),
Doc: Docs["SA1020"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
Flags: newFlagSet(),
},
"SA1021": {
Name: "SA1021",
Run: callChecker(checkBytesEqualIPRules),
Doc: Docs["SA1021"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
Flags: newFlagSet(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Deprecated, facts.Generated},
},
"SA1020": makeCallCheckerAnalyzer(checkListenAddressRules),
"SA1021": makeCallCheckerAnalyzer(checkBytesEqualIPRules),
"SA1023": {
Name: "SA1023",
Run: CheckWriterBufferModified,
Doc: Docs["SA1023"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer},
Flags: newFlagSet(),
},
"SA1024": {
Name: "SA1024",
Run: callChecker(checkUniqueCutsetRules),
Doc: Docs["SA1024"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
Flags: newFlagSet(),
Requires: []*analysis.Analyzer{buildir.Analyzer},
},
"SA1024": makeCallCheckerAnalyzer(checkUniqueCutsetRules),
"SA1025": {
Name: "SA1025",
Run: CheckTimerResetReturnValue,
Doc: Docs["SA1025"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer},
Flags: newFlagSet(),
},
"SA1026": {
Name: "SA1026",
Run: callChecker(checkUnsupportedMarshal),
Doc: Docs["SA1026"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
Flags: newFlagSet(),
},
"SA1027": {
Name: "SA1027",
Run: callChecker(checkAtomicAlignment),
Doc: Docs["SA1027"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
Flags: newFlagSet(),
Requires: []*analysis.Analyzer{buildir.Analyzer},
},
"SA1026": makeCallCheckerAnalyzer(checkUnsupportedMarshal),
"SA1027": makeCallCheckerAnalyzer(checkAtomicAlignment),
"SA1028": makeCallCheckerAnalyzer(checkSortSliceRules),
"SA1029": makeCallCheckerAnalyzer(checkWithValueKeyRules),
"SA2000": {
Name: "SA2000",
Run: CheckWaitgroupAdd,
Doc: Docs["SA2000"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
Flags: newFlagSet(),
},
"SA2001": {
Name: "SA2001",
Run: CheckEmptyCriticalSection,
Doc: Docs["SA2001"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
Flags: newFlagSet(),
},
"SA2002": {
Name: "SA2002",
Run: CheckConcurrentTesting,
Doc: Docs["SA2002"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer},
Flags: newFlagSet(),
Requires: []*analysis.Analyzer{buildir.Analyzer},
},
"SA2003": {
Name: "SA2003",
Run: CheckDeferLock,
Doc: Docs["SA2003"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer},
Flags: newFlagSet(),
Requires: []*analysis.Analyzer{buildir.Analyzer},
},
"SA3000": {
Name: "SA3000",
Run: CheckTestMainExit,
Doc: Docs["SA3000"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
Flags: newFlagSet(),
},
"SA3001": {
Name: "SA3001",
Run: CheckBenchmarkN,
Doc: Docs["SA3001"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
Flags: newFlagSet(),
},
"SA4000": {
Name: "SA4000",
Run: CheckLhsRhsIdentical,
Doc: Docs["SA4000"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.TokenFile, facts.Generated},
Flags: newFlagSet(),
},
"SA4001": {
Name: "SA4001",
Run: CheckIneffectiveCopy,
Doc: Docs["SA4001"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
Flags: newFlagSet(),
},
"SA4002": {
Name: "SA4002",
Run: CheckDiffSizeComparison,
Doc: Docs["SA4002"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
Flags: newFlagSet(),
},
"SA4003": {
Name: "SA4003",
Run: CheckExtremeComparison,
Doc: Docs["SA4003"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
Flags: newFlagSet(),
},
"SA4004": {
Name: "SA4004",
Run: CheckIneffectiveLoop,
Doc: Docs["SA4004"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
Flags: newFlagSet(),
},
"SA4006": {
Name: "SA4006",
Run: CheckUnreadVariableValues,
Doc: Docs["SA4006"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer, facts.Generated},
Flags: newFlagSet(),
Requires: []*analysis.Analyzer{buildir.Analyzer, facts.Generated},
},
"SA4008": {
Name: "SA4008",
Run: CheckLoopCondition,
Doc: Docs["SA4008"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer},
Flags: newFlagSet(),
Requires: []*analysis.Analyzer{buildir.Analyzer},
},
"SA4009": {
Name: "SA4009",
Run: CheckArgOverwritten,
Doc: Docs["SA4009"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer},
Flags: newFlagSet(),
Requires: []*analysis.Analyzer{buildir.Analyzer},
},
"SA4010": {
Name: "SA4010",
Run: CheckIneffectiveAppend,
Doc: Docs["SA4010"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer},
Flags: newFlagSet(),
Requires: []*analysis.Analyzer{buildir.Analyzer},
},
"SA4011": {
Name: "SA4011",
Run: CheckScopedBreak,
Doc: Docs["SA4011"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
Flags: newFlagSet(),
},
"SA4012": {
Name: "SA4012",
Run: CheckNaNComparison,
Doc: Docs["SA4012"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer},
Flags: newFlagSet(),
Requires: []*analysis.Analyzer{buildir.Analyzer},
},
"SA4013": {
Name: "SA4013",
Run: CheckDoubleNegation,
Doc: Docs["SA4013"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
Flags: newFlagSet(),
},
"SA4014": {
Name: "SA4014",
Run: CheckRepeatedIfElse,
Doc: Docs["SA4014"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
Flags: newFlagSet(),
},
"SA4015": {
Name: "SA4015",
Run: callChecker(checkMathIntRules),
Doc: Docs["SA4015"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
Flags: newFlagSet(),
},
"SA4015": makeCallCheckerAnalyzer(checkMathIntRules),
"SA4016": {
Name: "SA4016",
Run: CheckSillyBitwiseOps,
Doc: Docs["SA4016"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer, facts.TokenFile},
Flags: newFlagSet(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.TokenFile},
},
"SA4017": {
Name: "SA4017",
Run: CheckPureFunctions,
Doc: Docs["SA4017"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer, facts.Purity},
Flags: newFlagSet(),
Requires: []*analysis.Analyzer{buildir.Analyzer, facts.Purity},
},
"SA4018": {
Name: "SA4018",
Run: CheckSelfAssignment,
Doc: Docs["SA4018"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated, facts.TokenFile},
Flags: newFlagSet(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated, facts.TokenFile, facts.Purity},
},
"SA4019": {
Name: "SA4019",
Run: CheckDuplicateBuildConstraints,
Doc: Docs["SA4019"].String(),
Requires: []*analysis.Analyzer{facts.Generated},
Flags: newFlagSet(),
},
"SA4020": {
Name: "SA4020",
Run: CheckUnreachableTypeCases,
Doc: Docs["SA4020"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
Flags: newFlagSet(),
},
"SA4021": {
Name: "SA4021",
Run: CheckSingleArgAppend,
Doc: Docs["SA4021"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated, facts.TokenFile},
Flags: newFlagSet(),
},
"SA5000": {
Name: "SA5000",
Run: CheckNilMaps,
Doc: Docs["SA5000"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer},
Flags: newFlagSet(),
Requires: []*analysis.Analyzer{buildir.Analyzer},
},
"SA5001": {
Name: "SA5001",
Run: CheckEarlyDefer,
Doc: Docs["SA5001"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
Flags: newFlagSet(),
},
"SA5002": {
Name: "SA5002",
Run: CheckInfiniteEmptyLoop,
Doc: Docs["SA5002"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
Flags: newFlagSet(),
},
"SA5003": {
Name: "SA5003",
Run: CheckDeferInInfiniteLoop,
Doc: Docs["SA5003"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
Flags: newFlagSet(),
},
"SA5004": {
Name: "SA5004",
Run: CheckLoopEmptyDefault,
Doc: Docs["SA5004"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
Flags: newFlagSet(),
},
"SA5005": {
Name: "SA5005",
Run: CheckCyclicFinalizer,
Doc: Docs["SA5005"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer},
Flags: newFlagSet(),
Requires: []*analysis.Analyzer{buildir.Analyzer},
},
"SA5007": {
Name: "SA5007",
Run: CheckInfiniteRecursion,
Doc: Docs["SA5007"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer},
Flags: newFlagSet(),
Requires: []*analysis.Analyzer{buildir.Analyzer},
},
"SA5008": {
Name: "SA5008",
Run: CheckStructTags,
Doc: Docs["SA5008"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
Flags: newFlagSet(),
},
"SA5009": {
Name: "SA5009",
Run: callChecker(checkPrintfRules),
Doc: Docs["SA5009"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
Flags: newFlagSet(),
"SA5009": makeCallCheckerAnalyzer(checkPrintfRules),
"SA5010": {
Run: CheckImpossibleTypeAssertion,
Requires: []*analysis.Analyzer{buildir.Analyzer, facts.TokenFile},
},
"SA5011": {
Run: CheckMaybeNil,
Requires: []*analysis.Analyzer{buildir.Analyzer},
},
"SA6000": {
Name: "SA6000",
Run: callChecker(checkRegexpMatchLoopRules),
Doc: Docs["SA6000"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
Flags: newFlagSet(),
},
"SA6000": makeCallCheckerAnalyzer(checkRegexpMatchLoopRules),
"SA6001": {
Name: "SA6001",
Run: CheckMapBytesKey,
Doc: Docs["SA6001"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer},
Flags: newFlagSet(),
},
"SA6002": {
Name: "SA6002",
Run: callChecker(checkSyncPoolValueRules),
Doc: Docs["SA6002"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer},
Flags: newFlagSet(),
Requires: []*analysis.Analyzer{buildir.Analyzer},
},
"SA6002": makeCallCheckerAnalyzer(checkSyncPoolValueRules),
"SA6003": {
Name: "SA6003",
Run: CheckRangeStringRunes,
Doc: Docs["SA6003"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer},
Flags: newFlagSet(),
Requires: []*analysis.Analyzer{buildir.Analyzer},
},
"SA6005": {
Name: "SA6005",
Run: CheckToLowerToUpperComparison,
Doc: Docs["SA6005"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
Flags: newFlagSet(),
},
"SA9001": {
Name: "SA9001",
Run: CheckDubiousDeferInChannelRangeLoop,
Doc: Docs["SA9001"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
Flags: newFlagSet(),
},
"SA9002": {
Name: "SA9002",
Run: CheckNonOctalFileMode,
Doc: Docs["SA9002"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
Flags: newFlagSet(),
},
"SA9003": {
Name: "SA9003",
Run: CheckEmptyBranch,
Doc: Docs["SA9003"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer, facts.TokenFile, facts.Generated},
Flags: newFlagSet(),
Requires: []*analysis.Analyzer{buildir.Analyzer, facts.TokenFile, facts.Generated},
},
"SA9004": {
Name: "SA9004",
Run: CheckMissingEnumTypesInDeclaration,
Doc: Docs["SA9004"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
Flags: newFlagSet(),
},
// Filtering generated code because it may include empty structs generated from data models.
"SA9005": {
Name: "SA9005",
Run: callChecker(checkNoopMarshal),
Doc: Docs["SA9005"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer, facts.Generated, facts.TokenFile},
Flags: newFlagSet(),
"SA9005": makeCallCheckerAnalyzer(checkNoopMarshal, facts.Generated),
"SA4022": {
Run: CheckAddressIsNil,
Requires: []*analysis.Analyzer{inspect.Analyzer},
},
}
})

View File

@ -4,12 +4,12 @@ import (
"go/ast"
"strings"
. "honnef.co/go/tools/lint/lintdsl"
"honnef.co/go/tools/code"
)
func buildTags(f *ast.File) [][]string {
var out [][]string
for _, line := range strings.Split(Preamble(f), "\n") {
for _, line := range strings.Split(code.Preamble(f), "\n") {
if !strings.HasPrefix(line, "+build ") {
continue
}

View File

@ -3,22 +3,22 @@ package staticcheck
import "honnef.co/go/tools/lint"
var Docs = map[string]*lint.Documentation{
"SA1000": &lint.Documentation{
"SA1000": {
Title: `Invalid regular expression`,
Since: "2017.1",
},
"SA1001": &lint.Documentation{
"SA1001": {
Title: `Invalid template`,
Since: "2017.1",
},
"SA1002": &lint.Documentation{
"SA1002": {
Title: `Invalid format in time.Parse`,
Since: "2017.1",
},
"SA1003": &lint.Documentation{
"SA1003": {
Title: `Unsupported argument to functions in encoding/binary`,
Text: `The encoding/binary package can only serialize types with known sizes.
This precludes the use of the int and uint types, as their sizes
@ -29,7 +29,7 @@ Before Go 1.8, bool wasn't supported, either.`,
Since: "2017.1",
},
"SA1004": &lint.Documentation{
"SA1004": {
Title: `Suspiciously small untyped constant in time.Sleep`,
Text: `The time.Sleep function takes a time.Duration as its only argument.
Durations are expressed in nanoseconds. Thus, calling time.Sleep(1)
@ -41,12 +41,12 @@ large durations. These can be combined with arithmetic to express
arbitrary durations, for example '5 * time.Second' for 5 seconds.
If you truly meant to sleep for a tiny amount of time, use
'n * time.Nanosecond' to signal to staticcheck that you did mean to sleep
'n * time.Nanosecond' to signal to Staticcheck that you did mean to sleep
for some amount of nanoseconds.`,
Since: "2017.1",
},
"SA1005": &lint.Documentation{
"SA1005": {
Title: `Invalid first argument to exec.Command`,
Text: `os/exec runs programs directly (using variants of the fork and exec
system calls on Unix systems). This shouldn't be confused with running
@ -69,7 +69,7 @@ Windows, will have a /bin/sh program:
Since: "2017.1",
},
"SA1006": &lint.Documentation{
"SA1006": {
Title: `Printf with dynamic first argument and no further arguments`,
Text: `Using fmt.Printf with a dynamic first argument can lead to unexpected
output. The first argument is a format string, where certain character
@ -93,12 +93,12 @@ and pass the string as an argument.`,
Since: "2017.1",
},
"SA1007": &lint.Documentation{
"SA1007": {
Title: `Invalid URL in net/url.Parse`,
Since: "2017.1",
},
"SA1008": &lint.Documentation{
"SA1008": {
Title: `Non-canonical key in http.Header map`,
Text: `Keys in http.Header maps are canonical, meaning they follow a specific
combination of uppercase and lowercase letters. Methods such as
@ -123,39 +123,39 @@ http.CanonicalHeaderKey.`,
Since: "2017.1",
},
"SA1010": &lint.Documentation{
"SA1010": {
Title: `(*regexp.Regexp).FindAll called with n == 0, which will always return zero results`,
Text: `If n >= 0, the function returns at most n matches/submatches. To
return all results, specify a negative number.`,
Since: "2017.1",
},
"SA1011": &lint.Documentation{
"SA1011": {
Title: `Various methods in the strings package expect valid UTF-8, but invalid input is provided`,
Since: "2017.1",
},
"SA1012": &lint.Documentation{
"SA1012": {
Title: `A nil context.Context is being passed to a function, consider using context.TODO instead`,
Since: "2017.1",
},
"SA1013": &lint.Documentation{
"SA1013": {
Title: `io.Seeker.Seek is being called with the whence constant as the first argument, but it should be the second`,
Since: "2017.1",
},
"SA1014": &lint.Documentation{
"SA1014": {
Title: `Non-pointer value passed to Unmarshal or Decode`,
Since: "2017.1",
},
"SA1015": &lint.Documentation{
"SA1015": {
Title: `Using time.Tick in a way that will leak. Consider using time.NewTicker, and only use time.Tick in tests, commands and endless functions`,
Since: "2017.1",
},
"SA1016": &lint.Documentation{
"SA1016": {
Title: `Trapping a signal that cannot be trapped`,
Text: `Not all signals can be intercepted by a process. Speficially, on
UNIX-like systems, the syscall.SIGKILL and syscall.SIGSTOP signals are
@ -164,7 +164,7 @@ kernel. It is therefore pointless to try and handle these signals.`,
Since: "2017.1",
},
"SA1017": &lint.Documentation{
"SA1017": {
Title: `Channels used with os/signal.Notify should be buffered`,
Text: `The os/signal package uses non-blocking channel sends when delivering
signals. If the receiving end of the channel isn't ready and the
@ -175,24 +175,24 @@ signal value, a buffer of size 1 is sufficient.`,
Since: "2017.1",
},
"SA1018": &lint.Documentation{
"SA1018": {
Title: `strings.Replace called with n == 0, which does nothing`,
Text: `With n == 0, zero instances will be replaced. To replace all
instances, use a negative number, or use strings.ReplaceAll.`,
Since: "2017.1",
},
"SA1019": &lint.Documentation{
"SA1019": {
Title: `Using a deprecated function, variable, constant or field`,
Since: "2017.1",
},
"SA1020": &lint.Documentation{
"SA1020": {
Title: `Using an invalid host:port pair with a net.Listen-related function`,
Since: "2017.1",
},
"SA1021": &lint.Documentation{
"SA1021": {
Title: `Using bytes.Equal to compare two net.IP`,
Text: `A net.IP stores an IPv4 or IPv6 address as a slice of bytes. The
length of the slice for an IPv4 address, however, can be either 4 or
@ -202,13 +202,13 @@ be used, as it takes both representations into account.`,
Since: "2017.1",
},
"SA1023": &lint.Documentation{
"SA1023": {
Title: `Modifying the buffer in an io.Writer implementation`,
Text: `Write must not modify the slice data, even temporarily.`,
Since: "2017.1",
},
"SA1024": &lint.Documentation{
"SA1024": {
Title: `A string cutset contains duplicate characters`,
Text: `The strings.TrimLeft and strings.TrimRight functions take cutsets, not
prefixes. A cutset is treated as a set of characters to remove from a
@ -223,17 +223,17 @@ In order to remove one string from another, use strings.TrimPrefix instead.`,
Since: "2017.1",
},
"SA1025": &lint.Documentation{
"SA1025": {
Title: `It is not possible to use (*time.Timer).Reset's return value correctly`,
Since: "2019.1",
},
"SA1026": &lint.Documentation{
"SA1026": {
Title: `Cannot marshal channels or functions`,
Since: "2019.2",
},
"SA1027": &lint.Documentation{
"SA1027": {
Title: `Atomic access to 64-bit variable must be 64-bit aligned`,
Text: `On ARM, x86-32, and 32-bit MIPS, it is the caller's responsibility to
arrange for 64-bit alignment of 64-bit words accessed atomically. The
@ -245,12 +245,32 @@ in a struct.`,
Since: "2019.2",
},
"SA2000": &lint.Documentation{
"SA1028": {
Title: `sort.Slice can only be used on slices`,
Text: `The first argument of sort.Slice must be a slice.`,
Since: "2020.1",
},
"SA1029": {
Title: `Inappropriate key in call to context.WithValue`,
Text: `The provided key must be comparable and should not be
of type string or any other built-in type to avoid collisions between
packages using context. Users of WithValue should define their own
types for keys.
To avoid allocating when assigning to an interface{},
context keys often have concrete type struct{}. Alternatively,
exported context key variables' static type should be a pointer or
interface.`,
Since: "2020.1",
},
"SA2000": {
Title: `sync.WaitGroup.Add called inside the goroutine, leading to a race condition`,
Since: "2017.1",
},
"SA2001": &lint.Documentation{
"SA2001": {
Title: `Empty critical section, did you mean to defer the unlock?`,
Text: `Empty critical sections of the kind
@ -271,17 +291,17 @@ rare false positive.`,
Since: "2017.1",
},
"SA2002": &lint.Documentation{
"SA2002": {
Title: `Called testing.T.FailNow or SkipNow in a goroutine, which isn't allowed`,
Since: "2017.1",
},
"SA2003": &lint.Documentation{
"SA2003": {
Title: `Deferred Lock right after locking, likely meant to defer Unlock instead`,
Since: "2017.1",
},
"SA3000": &lint.Documentation{
"SA3000": {
Title: `TestMain doesn't call os.Exit, hiding test failures`,
Text: `Test executables (and in turn 'go test') exit with a non-zero status
code if any tests failed. When specifying your own TestMain function,
@ -292,7 +312,7 @@ os.Exit(m.Run()).`,
Since: "2017.1",
},
"SA3001": &lint.Documentation{
"SA3001": {
Title: `Assigning to b.N in benchmarks distorts the results`,
Text: `The testing package dynamically sets b.N to improve the reliability of
benchmarks and uses it in computations to determine the duration of a
@ -301,102 +321,102 @@ falsify results.`,
Since: "2017.1",
},
"SA4000": &lint.Documentation{
"SA4000": {
Title: `Boolean expression has identical expressions on both sides`,
Since: "2017.1",
},
"SA4001": &lint.Documentation{
"SA4001": {
Title: `&*x gets simplified to x, it does not copy x`,
Since: "2017.1",
},
"SA4002": &lint.Documentation{
"SA4002": {
Title: `Comparing strings with known different sizes has predictable results`,
Since: "2017.1",
},
"SA4003": &lint.Documentation{
"SA4003": {
Title: `Comparing unsigned values against negative values is pointless`,
Since: "2017.1",
},
"SA4004": &lint.Documentation{
"SA4004": {
Title: `The loop exits unconditionally after one iteration`,
Since: "2017.1",
},
"SA4005": &lint.Documentation{
"SA4005": {
Title: `Field assignment that will never be observed. Did you mean to use a pointer receiver?`,
Since: "2017.1",
},
"SA4006": &lint.Documentation{
"SA4006": {
Title: `A value assigned to a variable is never read before being overwritten. Forgotten error check or dead code?`,
Since: "2017.1",
},
"SA4008": &lint.Documentation{
"SA4008": {
Title: `The variable in the loop condition never changes, are you incrementing the wrong variable?`,
Since: "2017.1",
},
"SA4009": &lint.Documentation{
"SA4009": {
Title: `A function argument is overwritten before its first use`,
Since: "2017.1",
},
"SA4010": &lint.Documentation{
"SA4010": {
Title: `The result of append will never be observed anywhere`,
Since: "2017.1",
},
"SA4011": &lint.Documentation{
"SA4011": {
Title: `Break statement with no effect. Did you mean to break out of an outer loop?`,
Since: "2017.1",
},
"SA4012": &lint.Documentation{
"SA4012": {
Title: `Comparing a value against NaN even though no value is equal to NaN`,
Since: "2017.1",
},
"SA4013": &lint.Documentation{
"SA4013": {
Title: `Negating a boolean twice (!!b) is the same as writing b. This is either redundant, or a typo.`,
Since: "2017.1",
},
"SA4014": &lint.Documentation{
"SA4014": {
Title: `An if/else if chain has repeated conditions and no side-effects; if the condition didn't match the first time, it won't match the second time, either`,
Since: "2017.1",
},
"SA4015": &lint.Documentation{
"SA4015": {
Title: `Calling functions like math.Ceil on floats converted from integers doesn't do anything useful`,
Since: "2017.1",
},
"SA4016": &lint.Documentation{
"SA4016": {
Title: `Certain bitwise operations, such as x ^ 0, do not do anything useful`,
Since: "2017.1",
},
"SA4017": &lint.Documentation{
"SA4017": {
Title: `A pure function's return value is discarded, making the call pointless`,
Since: "2017.1",
},
"SA4018": &lint.Documentation{
"SA4018": {
Title: `Self-assignment of variables`,
Since: "2017.1",
},
"SA4019": &lint.Documentation{
"SA4019": {
Title: `Multiple, identical build constraints in the same file`,
Since: "2017.1",
},
"SA4020": &lint.Documentation{
"SA4020": {
Title: `Unreachable case clause in a type switch`,
Text: `In a type switch like the following
@ -467,27 +487,33 @@ and therefore doSomething()'s return value implements both.`,
Since: "2019.2",
},
"SA4021": &lint.Documentation{
"SA4021": {
Title: `x = append(y) is equivalent to x = y`,
Since: "2019.2",
},
"SA5000": &lint.Documentation{
"SA4022": {
Title: `Comparing the address of a variable against nil`,
Text: `Code such as 'if &x == nil' is meaningless, because taking the address of a variable always yields a non-nil pointer.`,
Since: "2020.1",
},
"SA5000": {
Title: `Assignment to nil map`,
Since: "2017.1",
},
"SA5001": &lint.Documentation{
"SA5001": {
Title: `Defering Close before checking for a possible error`,
Since: "2017.1",
},
"SA5002": &lint.Documentation{
"SA5002": {
Title: `The empty for loop (for {}) spins and can block the scheduler`,
Since: "2017.1",
},
"SA5003": &lint.Documentation{
"SA5003": {
Title: `Defers in infinite loops will never execute`,
Text: `Defers are scoped to the surrounding function, not the surrounding
block. In a function that never returns, i.e. one containing an
@ -495,12 +521,12 @@ infinite loop, defers will never execute.`,
Since: "2017.1",
},
"SA5004": &lint.Documentation{
"SA5004": {
Title: `for { select { ... with an empty default branch spins`,
Since: "2017.1",
},
"SA5005": &lint.Documentation{
"SA5005": {
Title: `The finalizer references the finalized object, preventing garbage collection`,
Text: `A finalizer is a function associated with an object that runs when the
garbage collector is ready to collect said object, that is when the
@ -516,12 +542,12 @@ to zero before the object is being passed to the finalizer.`,
Since: "2017.1",
},
"SA5006": &lint.Documentation{
"SA5006": {
Title: `Slice index out of bounds`,
Since: "2017.1",
},
"SA5007": &lint.Documentation{
"SA5007": {
Title: `Infinite recursive call`,
Text: `A function that calls itself recursively needs to have an exit
condition. Otherwise it will recurse forever, until the system runs
@ -535,22 +561,112 @@ should be used instead.`,
Since: "2017.1",
},
"SA5008": &lint.Documentation{
"SA5008": {
Title: `Invalid struct tag`,
Since: "2019.2",
},
"SA5009": &lint.Documentation{
"SA5009": {
Title: `Invalid Printf call`,
Since: "2019.2",
},
"SA6000": &lint.Documentation{
"SA5010": {
Title: `Impossible type assertion`,
Text: `Some type assertions can be statically proven to be
impossible. This is the case when the method sets of both
arguments of the type assertion conflict with each other, for
example by containing the same method with different
signatures.
The Go compiler already applies this check when asserting from an
interface value to a concrete type. If the concrete type misses
methods from the interface, or if function signatures don't match,
then the type assertion can never succeed.
This check applies the same logic when asserting from one interface to
another. If both interface types contain the same method but with
different signatures, then the type assertion can never succeed,
either.`,
Since: "2020.1",
},
"SA5011": {
Title: `Possible nil pointer dereference`,
Text: `A pointer is being dereferenced unconditionally, while
also being checked against nil in another place. This suggests that
the pointer may be nil and dereferencing it may panic. This is
commonly a result of improperly ordered code or missing return
statements. Consider the following examples:
func fn(x *int) {
fmt.Println(*x)
// This nil check is equally important for the previous dereference
if x != nil {
foo(*x)
}
}
func TestFoo(t *testing.T) {
x := compute()
if x == nil {
t.Errorf("nil pointer received")
}
// t.Errorf does not abort the test, so if x is nil, the next line will panic.
foo(*x)
}
Staticcheck tries to deduce which functions abort control flow.
For example, it is aware that a function will not continue
execution after a call to panic or log.Fatal. However, sometimes
this detection fails, in particular in the presence of
conditionals. Consider the following example:
func Log(msg string, level int) {
fmt.Println(msg)
if level == levelFatal {
os.Exit(1)
}
}
func Fatal(msg string) {
Log(msg, levelFatal)
}
func fn(x *int) {
if x == nil {
Fatal("unexpected nil pointer")
}
fmt.Println(*x)
}
Staticcheck will flag the dereference of x, even though it is perfectly
safe. Staticcheck is not able to deduce that a call to
Fatal will exit the program. For the time being, the easiest
workaround is to modify the definition of Fatal like so:
func Fatal(msg string) {
Log(msg, levelFatal)
panic("unreachable")
}
We also hard-code functions from common logging packages such as
logrus. Please file an issue if we're missing support for a
popular package.`,
Since: "2020.1",
},
"SA6000": {
Title: `Using regexp.Match or related in a loop, should use regexp.Compile`,
Since: "2017.1",
},
"SA6001": &lint.Documentation{
"SA6001": {
Title: `Missing an optimization opportunity when indexing maps by byte slices`,
Text: `Map keys must be comparable, which precludes the use of byte slices.
@ -580,7 +696,7 @@ f5f5a8b6209f84961687d993b93ea0d397f5d5bf in the Go repository.`,
Since: "2017.1",
},
"SA6002": &lint.Documentation{
"SA6002": {
Title: `Storing non-pointer values in sync.Pool allocates memory`,
Text: `A sync.Pool is used to avoid unnecessary allocations and reduce the
amount of work the garbage collector has to do.
@ -597,7 +713,7 @@ that discuss this problem.`,
Since: "2017.1",
},
"SA6003": &lint.Documentation{
"SA6003": {
Title: `Converting a string to a slice of runes before ranging over it`,
Text: `You may want to loop over the runes in a string. Instead of converting
the string to a slice of runes and looping over that, you can loop
@ -619,7 +735,7 @@ the slice of runes.`,
Since: "2017.1",
},
"SA6005": &lint.Documentation{
"SA6005": {
Title: `Inefficient string comparison with strings.ToLower or strings.ToUpper`,
Text: `Converting two strings to the same case and comparing them like so
@ -643,22 +759,22 @@ https://blog.digitalocean.com/how-to-efficiently-compare-strings-in-go/`,
Since: "2019.2",
},
"SA9001": &lint.Documentation{
"SA9001": {
Title: `Defers in range loops may not run when you expect them to`,
Since: "2017.1",
},
"SA9002": &lint.Documentation{
"SA9002": {
Title: `Using a non-octal os.FileMode that looks like it was meant to be in octal.`,
Since: "2017.1",
},
"SA9003": &lint.Documentation{
"SA9003": {
Title: `Empty body in an if or else branch`,
Since: "2017.1",
},
"SA9004": &lint.Documentation{
"SA9004": {
Title: `Only the first constant has an explicit type`,
Text: `In a constant declaration such as the following:
@ -750,7 +866,7 @@ as EnumSecond has no explicit type, and thus defaults to int.`,
Since: "2019.1",
},
"SA9005": &lint.Documentation{
"SA9005": {
Title: `Trying to marshal a struct with no public fields nor custom marshaling`,
Text: `The encoding/json and encoding/xml packages only operate on exported
fields in structs, not unexported ones. It is usually an error to try

View File

@ -1,25 +0,0 @@
package staticcheck
import (
"reflect"
"golang.org/x/tools/go/analysis"
"honnef.co/go/tools/internal/passes/buildssa"
"honnef.co/go/tools/ssa"
"honnef.co/go/tools/staticcheck/vrp"
)
var valueRangesAnalyzer = &analysis.Analyzer{
Name: "vrp",
Doc: "calculate value ranges of functions",
Run: func(pass *analysis.Pass) (interface{}, error) {
m := map[*ssa.Function]vrp.Ranges{}
for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
vr := vrp.BuildGraph(ssafn).Solve()
m[ssafn] = vr
}
return m, nil
},
Requires: []*analysis.Analyzer{buildssa.Analyzer},
ResultType: reflect.TypeOf(map[*ssa.Function]vrp.Ranges{}),
}

File diff suppressed because it is too large Load Diff

View File

@ -14,9 +14,8 @@ import (
"unicode/utf8"
"golang.org/x/tools/go/analysis"
. "honnef.co/go/tools/lint/lintdsl"
"honnef.co/go/tools/ssa"
"honnef.co/go/tools/staticcheck/vrp"
"honnef.co/go/tools/code"
"honnef.co/go/tools/ir"
)
const (
@ -27,10 +26,10 @@ const (
type Call struct {
Pass *analysis.Pass
Instr ssa.CallInstruction
Instr ir.CallInstruction
Args []*Argument
Parent *ssa.Function
Parent *ir.Function
invalids []string
}
@ -44,22 +43,21 @@ type Argument struct {
invalids []string
}
type Value struct {
Value ir.Value
}
func (arg *Argument) Invalid(msg string) {
arg.invalids = append(arg.invalids, msg)
}
type Value struct {
Value ssa.Value
Range vrp.Range
}
type CallCheck func(call *Call)
func extractConsts(v ssa.Value) []*ssa.Const {
func extractConsts(v ir.Value) []*ir.Const {
switch v := v.(type) {
case *ssa.Const:
return []*ssa.Const{v}
case *ssa.MakeInterface:
case *ir.Const:
return []*ir.Const{v}
case *ir.MakeInterface:
return extractConsts(v.X)
default:
return nil
@ -118,20 +116,6 @@ func ValidateURL(v Value) error {
return nil
}
func IntValue(v Value, z vrp.Z) bool {
r, ok := v.Range.(vrp.IntInterval)
if !ok || !r.IsKnown() {
return false
}
if r.Lower != r.Upper {
return false
}
if r.Lower.Cmp(z) == 0 {
return true
}
return false
}
func InvalidUTF8(v Value) bool {
for _, c := range extractConsts(v.Value) {
if c.Value == nil {
@ -149,13 +133,21 @@ func InvalidUTF8(v Value) bool {
}
func UnbufferedChannel(v Value) bool {
r, ok := v.Range.(vrp.ChannelInterval)
if !ok || !r.IsKnown() {
// TODO(dh): this check of course misses many cases of unbuffered
// channels, such as any in phi or sigma nodes. We'll eventually
// replace this function.
val := v.Value
if ct, ok := val.(*ir.ChangeType); ok {
val = ct.X
}
mk, ok := val.(*ir.MakeChan)
if !ok {
return false
}
if r.Size.Lower.Cmp(vrp.NewZ(0)) == 0 &&
r.Size.Upper.Cmp(vrp.NewZ(0)) == 0 {
return true
if k, ok := mk.Size.(*ir.Const); ok && k.Value.Kind() == constant.Int {
if v, ok := constant.Int64Val(k.Value); ok && v == 0 {
return true
}
}
return false
}
@ -169,7 +161,7 @@ func Pointer(v Value) bool {
}
func ConvertedFromInt(v Value) bool {
conv, ok := v.Value.(*ssa.Convert)
conv, ok := v.Value.(*ir.Convert)
if !ok {
return false
}
@ -193,7 +185,7 @@ func validEncodingBinaryType(pass *analysis.Pass, typ types.Type) bool {
types.Float32, types.Float64, types.Complex64, types.Complex128, types.Invalid:
return true
case types.Bool:
return IsGoVersion(pass, 8)
return code.IsGoVersion(pass, 8)
}
return false
case *types.Struct:
@ -232,8 +224,10 @@ func CanBinaryMarshal(pass *analysis.Pass, v Value) bool {
func RepeatZeroTimes(name string, arg int) CallCheck {
return func(call *Call) {
arg := call.Args[arg]
if IntValue(arg.Value, vrp.NewZ(0)) {
arg.Invalid(fmt.Sprintf("calling %s with n == 0 will return no results, did you mean -1?", name))
if k, ok := arg.Value.Value.(*ir.Const); ok && k.Value.Kind() == constant.Int {
if v, ok := constant.Int64Val(k.Value); ok && v == 0 {
arg.Invalid(fmt.Sprintf("calling %s with n == 0 will return no results, did you mean -1?", name))
}
}
}
}
@ -293,8 +287,8 @@ func ValidHostPort(v Value) bool {
// ConvertedFrom reports whether value v was converted from type typ.
func ConvertedFrom(v Value, typ string) bool {
change, ok := v.Value.(*ssa.ChangeType)
return ok && IsType(change.X.Type(), typ)
change, ok := v.Value.(*ir.ChangeType)
return ok && code.IsType(change.X.Type(), typ)
}
func UniqueStringCutset(v Value) bool {

View File

@ -1,73 +0,0 @@
package vrp
import (
"fmt"
"honnef.co/go/tools/ssa"
)
type ChannelInterval struct {
Size IntInterval
}
func (c ChannelInterval) Union(other Range) Range {
i, ok := other.(ChannelInterval)
if !ok {
i = ChannelInterval{EmptyIntInterval}
}
if c.Size.Empty() || !c.Size.IsKnown() {
return i
}
if i.Size.Empty() || !i.Size.IsKnown() {
return c
}
return ChannelInterval{
Size: c.Size.Union(i.Size).(IntInterval),
}
}
func (c ChannelInterval) String() string {
return c.Size.String()
}
func (c ChannelInterval) IsKnown() bool {
return c.Size.IsKnown()
}
type MakeChannelConstraint struct {
aConstraint
Buffer ssa.Value
}
type ChannelChangeTypeConstraint struct {
aConstraint
X ssa.Value
}
func NewMakeChannelConstraint(buffer, y ssa.Value) Constraint {
return &MakeChannelConstraint{NewConstraint(y), buffer}
}
func NewChannelChangeTypeConstraint(x, y ssa.Value) Constraint {
return &ChannelChangeTypeConstraint{NewConstraint(y), x}
}
func (c *MakeChannelConstraint) Operands() []ssa.Value { return []ssa.Value{c.Buffer} }
func (c *ChannelChangeTypeConstraint) Operands() []ssa.Value { return []ssa.Value{c.X} }
func (c *MakeChannelConstraint) String() string {
return fmt.Sprintf("%s = make(chan, %s)", c.Y().Name(), c.Buffer.Name())
}
func (c *ChannelChangeTypeConstraint) String() string {
return fmt.Sprintf("%s = changetype(%s)", c.Y().Name(), c.X.Name())
}
func (c *MakeChannelConstraint) Eval(g *Graph) Range {
i, ok := g.Range(c.Buffer).(IntInterval)
if !ok {
return ChannelInterval{NewIntInterval(NewZ(0), PInfinity)}
}
if i.Lower.Sign() == -1 {
i.Lower = NewZ(0)
}
return ChannelInterval{i}
}
func (c *ChannelChangeTypeConstraint) Eval(g *Graph) Range { return g.Range(c.X) }

View File

@ -1,476 +0,0 @@
package vrp
import (
"fmt"
"go/token"
"go/types"
"math/big"
"honnef.co/go/tools/ssa"
)
type Zs []Z
func (zs Zs) Len() int {
return len(zs)
}
func (zs Zs) Less(i int, j int) bool {
return zs[i].Cmp(zs[j]) == -1
}
func (zs Zs) Swap(i int, j int) {
zs[i], zs[j] = zs[j], zs[i]
}
type Z struct {
infinity int8
integer *big.Int
}
func NewZ(n int64) Z {
return NewBigZ(big.NewInt(n))
}
func NewBigZ(n *big.Int) Z {
return Z{integer: n}
}
func (z1 Z) Infinite() bool {
return z1.infinity != 0
}
func (z1 Z) Add(z2 Z) Z {
if z2.Sign() == -1 {
return z1.Sub(z2.Negate())
}
if z1 == NInfinity {
return NInfinity
}
if z1 == PInfinity {
return PInfinity
}
if z2 == PInfinity {
return PInfinity
}
if !z1.Infinite() && !z2.Infinite() {
n := &big.Int{}
n.Add(z1.integer, z2.integer)
return NewBigZ(n)
}
panic(fmt.Sprintf("%s + %s is not defined", z1, z2))
}
func (z1 Z) Sub(z2 Z) Z {
if z2.Sign() == -1 {
return z1.Add(z2.Negate())
}
if !z1.Infinite() && !z2.Infinite() {
n := &big.Int{}
n.Sub(z1.integer, z2.integer)
return NewBigZ(n)
}
if z1 != PInfinity && z2 == PInfinity {
return NInfinity
}
if z1.Infinite() && !z2.Infinite() {
return Z{infinity: z1.infinity}
}
if z1 == PInfinity && z2 == PInfinity {
return PInfinity
}
panic(fmt.Sprintf("%s - %s is not defined", z1, z2))
}
func (z1 Z) Mul(z2 Z) Z {
if (z1.integer != nil && z1.integer.Sign() == 0) ||
(z2.integer != nil && z2.integer.Sign() == 0) {
return NewBigZ(&big.Int{})
}
if z1.infinity != 0 || z2.infinity != 0 {
return Z{infinity: int8(z1.Sign() * z2.Sign())}
}
n := &big.Int{}
n.Mul(z1.integer, z2.integer)
return NewBigZ(n)
}
func (z1 Z) Negate() Z {
if z1.infinity == 1 {
return NInfinity
}
if z1.infinity == -1 {
return PInfinity
}
n := &big.Int{}
n.Neg(z1.integer)
return NewBigZ(n)
}
func (z1 Z) Sign() int {
if z1.infinity != 0 {
return int(z1.infinity)
}
return z1.integer.Sign()
}
func (z1 Z) String() string {
if z1 == NInfinity {
return "-∞"
}
if z1 == PInfinity {
return "∞"
}
return fmt.Sprintf("%d", z1.integer)
}
func (z1 Z) Cmp(z2 Z) int {
if z1.infinity == z2.infinity && z1.infinity != 0 {
return 0
}
if z1 == PInfinity {
return 1
}
if z1 == NInfinity {
return -1
}
if z2 == NInfinity {
return 1
}
if z2 == PInfinity {
return -1
}
return z1.integer.Cmp(z2.integer)
}
func MaxZ(zs ...Z) Z {
if len(zs) == 0 {
panic("Max called with no arguments")
}
if len(zs) == 1 {
return zs[0]
}
ret := zs[0]
for _, z := range zs[1:] {
if z.Cmp(ret) == 1 {
ret = z
}
}
return ret
}
func MinZ(zs ...Z) Z {
if len(zs) == 0 {
panic("Min called with no arguments")
}
if len(zs) == 1 {
return zs[0]
}
ret := zs[0]
for _, z := range zs[1:] {
if z.Cmp(ret) == -1 {
ret = z
}
}
return ret
}
var NInfinity = Z{infinity: -1}
var PInfinity = Z{infinity: 1}
var EmptyIntInterval = IntInterval{true, PInfinity, NInfinity}
func InfinityFor(v ssa.Value) IntInterval {
if b, ok := v.Type().Underlying().(*types.Basic); ok {
if (b.Info() & types.IsUnsigned) != 0 {
return NewIntInterval(NewZ(0), PInfinity)
}
}
return NewIntInterval(NInfinity, PInfinity)
}
type IntInterval struct {
known bool
Lower Z
Upper Z
}
func NewIntInterval(l, u Z) IntInterval {
if u.Cmp(l) == -1 {
return EmptyIntInterval
}
return IntInterval{known: true, Lower: l, Upper: u}
}
func (i IntInterval) IsKnown() bool {
return i.known
}
func (i IntInterval) Empty() bool {
return i.Lower == PInfinity && i.Upper == NInfinity
}
func (i IntInterval) IsMaxRange() bool {
return i.Lower == NInfinity && i.Upper == PInfinity
}
func (i1 IntInterval) Intersection(i2 IntInterval) IntInterval {
if !i1.IsKnown() {
return i2
}
if !i2.IsKnown() {
return i1
}
if i1.Empty() || i2.Empty() {
return EmptyIntInterval
}
i3 := NewIntInterval(MaxZ(i1.Lower, i2.Lower), MinZ(i1.Upper, i2.Upper))
if i3.Lower.Cmp(i3.Upper) == 1 {
return EmptyIntInterval
}
return i3
}
func (i1 IntInterval) Union(other Range) Range {
i2, ok := other.(IntInterval)
if !ok {
i2 = EmptyIntInterval
}
if i1.Empty() || !i1.IsKnown() {
return i2
}
if i2.Empty() || !i2.IsKnown() {
return i1
}
return NewIntInterval(MinZ(i1.Lower, i2.Lower), MaxZ(i1.Upper, i2.Upper))
}
func (i1 IntInterval) Add(i2 IntInterval) IntInterval {
if i1.Empty() || i2.Empty() {
return EmptyIntInterval
}
l1, u1, l2, u2 := i1.Lower, i1.Upper, i2.Lower, i2.Upper
return NewIntInterval(l1.Add(l2), u1.Add(u2))
}
func (i1 IntInterval) Sub(i2 IntInterval) IntInterval {
if i1.Empty() || i2.Empty() {
return EmptyIntInterval
}
l1, u1, l2, u2 := i1.Lower, i1.Upper, i2.Lower, i2.Upper
return NewIntInterval(l1.Sub(u2), u1.Sub(l2))
}
func (i1 IntInterval) Mul(i2 IntInterval) IntInterval {
if i1.Empty() || i2.Empty() {
return EmptyIntInterval
}
x1, x2 := i1.Lower, i1.Upper
y1, y2 := i2.Lower, i2.Upper
return NewIntInterval(
MinZ(x1.Mul(y1), x1.Mul(y2), x2.Mul(y1), x2.Mul(y2)),
MaxZ(x1.Mul(y1), x1.Mul(y2), x2.Mul(y1), x2.Mul(y2)),
)
}
func (i1 IntInterval) String() string {
if !i1.IsKnown() {
return "[⊥, ⊥]"
}
if i1.Empty() {
return "{}"
}
return fmt.Sprintf("[%s, %s]", i1.Lower, i1.Upper)
}
type IntArithmeticConstraint struct {
aConstraint
A ssa.Value
B ssa.Value
Op token.Token
Fn func(IntInterval, IntInterval) IntInterval
}
type IntAddConstraint struct{ *IntArithmeticConstraint }
type IntSubConstraint struct{ *IntArithmeticConstraint }
type IntMulConstraint struct{ *IntArithmeticConstraint }
type IntConversionConstraint struct {
aConstraint
X ssa.Value
}
type IntIntersectionConstraint struct {
aConstraint
ranges Ranges
A ssa.Value
B ssa.Value
Op token.Token
I IntInterval
resolved bool
}
type IntIntervalConstraint struct {
aConstraint
I IntInterval
}
func NewIntArithmeticConstraint(a, b, y ssa.Value, op token.Token, fn func(IntInterval, IntInterval) IntInterval) *IntArithmeticConstraint {
return &IntArithmeticConstraint{NewConstraint(y), a, b, op, fn}
}
func NewIntAddConstraint(a, b, y ssa.Value) Constraint {
return &IntAddConstraint{NewIntArithmeticConstraint(a, b, y, token.ADD, IntInterval.Add)}
}
func NewIntSubConstraint(a, b, y ssa.Value) Constraint {
return &IntSubConstraint{NewIntArithmeticConstraint(a, b, y, token.SUB, IntInterval.Sub)}
}
func NewIntMulConstraint(a, b, y ssa.Value) Constraint {
return &IntMulConstraint{NewIntArithmeticConstraint(a, b, y, token.MUL, IntInterval.Mul)}
}
func NewIntConversionConstraint(x, y ssa.Value) Constraint {
return &IntConversionConstraint{NewConstraint(y), x}
}
func NewIntIntersectionConstraint(a, b ssa.Value, op token.Token, ranges Ranges, y ssa.Value) Constraint {
return &IntIntersectionConstraint{
aConstraint: NewConstraint(y),
ranges: ranges,
A: a,
B: b,
Op: op,
}
}
func NewIntIntervalConstraint(i IntInterval, y ssa.Value) Constraint {
return &IntIntervalConstraint{NewConstraint(y), i}
}
func (c *IntArithmeticConstraint) Operands() []ssa.Value { return []ssa.Value{c.A, c.B} }
func (c *IntConversionConstraint) Operands() []ssa.Value { return []ssa.Value{c.X} }
func (c *IntIntersectionConstraint) Operands() []ssa.Value { return []ssa.Value{c.A} }
func (s *IntIntervalConstraint) Operands() []ssa.Value { return nil }
func (c *IntArithmeticConstraint) String() string {
return fmt.Sprintf("%s = %s %s %s", c.Y().Name(), c.A.Name(), c.Op, c.B.Name())
}
func (c *IntConversionConstraint) String() string {
return fmt.Sprintf("%s = %s(%s)", c.Y().Name(), c.Y().Type(), c.X.Name())
}
func (c *IntIntersectionConstraint) String() string {
return fmt.Sprintf("%s = %s %s %s (%t branch)", c.Y().Name(), c.A.Name(), c.Op, c.B.Name(), c.Y().(*ssa.Sigma).Branch)
}
func (c *IntIntervalConstraint) String() string { return fmt.Sprintf("%s = %s", c.Y().Name(), c.I) }
func (c *IntArithmeticConstraint) Eval(g *Graph) Range {
i1, i2 := g.Range(c.A).(IntInterval), g.Range(c.B).(IntInterval)
if !i1.IsKnown() || !i2.IsKnown() {
return IntInterval{}
}
return c.Fn(i1, i2)
}
func (c *IntConversionConstraint) Eval(g *Graph) Range {
s := &types.StdSizes{
// XXX is it okay to assume the largest word size, or do we
// need to be platform specific?
WordSize: 8,
MaxAlign: 1,
}
fromI := g.Range(c.X).(IntInterval)
toI := g.Range(c.Y()).(IntInterval)
fromT := c.X.Type().Underlying().(*types.Basic)
toT := c.Y().Type().Underlying().(*types.Basic)
fromB := s.Sizeof(c.X.Type())
toB := s.Sizeof(c.Y().Type())
if !fromI.IsKnown() {
return toI
}
if !toI.IsKnown() {
return fromI
}
// uint<N> -> sint/uint<M>, M > N: [max(0, l1), min(2**N-1, u2)]
if (fromT.Info()&types.IsUnsigned != 0) &&
toB > fromB {
n := big.NewInt(1)
n.Lsh(n, uint(fromB*8))
n.Sub(n, big.NewInt(1))
return NewIntInterval(
MaxZ(NewZ(0), fromI.Lower),
MinZ(NewBigZ(n), toI.Upper),
)
}
// sint<N> -> sint<M>, M > N; [max(-∞, l1), min(2**N-1, u2)]
if (fromT.Info()&types.IsUnsigned == 0) &&
(toT.Info()&types.IsUnsigned == 0) &&
toB > fromB {
n := big.NewInt(1)
n.Lsh(n, uint(fromB*8))
n.Sub(n, big.NewInt(1))
return NewIntInterval(
MaxZ(NInfinity, fromI.Lower),
MinZ(NewBigZ(n), toI.Upper),
)
}
return fromI
}
func (c *IntIntersectionConstraint) Eval(g *Graph) Range {
xi := g.Range(c.A).(IntInterval)
if !xi.IsKnown() {
return c.I
}
return xi.Intersection(c.I)
}
func (c *IntIntervalConstraint) Eval(*Graph) Range { return c.I }
func (c *IntIntersectionConstraint) Futures() []ssa.Value {
return []ssa.Value{c.B}
}
func (c *IntIntersectionConstraint) Resolve() {
r, ok := c.ranges[c.B].(IntInterval)
if !ok {
c.I = InfinityFor(c.Y())
return
}
switch c.Op {
case token.EQL:
c.I = r
case token.GTR:
c.I = NewIntInterval(r.Lower.Add(NewZ(1)), PInfinity)
case token.GEQ:
c.I = NewIntInterval(r.Lower, PInfinity)
case token.LSS:
// TODO(dh): do we need 0 instead of NInfinity for uints?
c.I = NewIntInterval(NInfinity, r.Upper.Sub(NewZ(1)))
case token.LEQ:
c.I = NewIntInterval(NInfinity, r.Upper)
case token.NEQ:
c.I = InfinityFor(c.Y())
default:
panic("unsupported op " + c.Op.String())
}
}
func (c *IntIntersectionConstraint) IsKnown() bool {
return c.I.IsKnown()
}
func (c *IntIntersectionConstraint) MarkUnresolved() {
c.resolved = false
}
func (c *IntIntersectionConstraint) MarkResolved() {
c.resolved = true
}
func (c *IntIntersectionConstraint) IsResolved() bool {
return c.resolved
}

View File

@ -1,273 +0,0 @@
package vrp
// TODO(dh): most of the constraints have implementations identical to
// that of strings. Consider reusing them.
import (
"fmt"
"go/types"
"honnef.co/go/tools/ssa"
)
type SliceInterval struct {
Length IntInterval
}
func (s SliceInterval) Union(other Range) Range {
i, ok := other.(SliceInterval)
if !ok {
i = SliceInterval{EmptyIntInterval}
}
if s.Length.Empty() || !s.Length.IsKnown() {
return i
}
if i.Length.Empty() || !i.Length.IsKnown() {
return s
}
return SliceInterval{
Length: s.Length.Union(i.Length).(IntInterval),
}
}
func (s SliceInterval) String() string { return s.Length.String() }
func (s SliceInterval) IsKnown() bool { return s.Length.IsKnown() }
type SliceAppendConstraint struct {
aConstraint
A ssa.Value
B ssa.Value
}
type SliceSliceConstraint struct {
aConstraint
X ssa.Value
Lower ssa.Value
Upper ssa.Value
}
type ArraySliceConstraint struct {
aConstraint
X ssa.Value
Lower ssa.Value
Upper ssa.Value
}
type SliceIntersectionConstraint struct {
aConstraint
X ssa.Value
I IntInterval
}
type SliceLengthConstraint struct {
aConstraint
X ssa.Value
}
type MakeSliceConstraint struct {
aConstraint
Size ssa.Value
}
type SliceIntervalConstraint struct {
aConstraint
I IntInterval
}
func NewSliceAppendConstraint(a, b, y ssa.Value) Constraint {
return &SliceAppendConstraint{NewConstraint(y), a, b}
}
func NewSliceSliceConstraint(x, lower, upper, y ssa.Value) Constraint {
return &SliceSliceConstraint{NewConstraint(y), x, lower, upper}
}
func NewArraySliceConstraint(x, lower, upper, y ssa.Value) Constraint {
return &ArraySliceConstraint{NewConstraint(y), x, lower, upper}
}
func NewSliceIntersectionConstraint(x ssa.Value, i IntInterval, y ssa.Value) Constraint {
return &SliceIntersectionConstraint{NewConstraint(y), x, i}
}
func NewSliceLengthConstraint(x, y ssa.Value) Constraint {
return &SliceLengthConstraint{NewConstraint(y), x}
}
func NewMakeSliceConstraint(size, y ssa.Value) Constraint {
return &MakeSliceConstraint{NewConstraint(y), size}
}
func NewSliceIntervalConstraint(i IntInterval, y ssa.Value) Constraint {
return &SliceIntervalConstraint{NewConstraint(y), i}
}
func (c *SliceAppendConstraint) Operands() []ssa.Value { return []ssa.Value{c.A, c.B} }
func (c *SliceSliceConstraint) Operands() []ssa.Value {
ops := []ssa.Value{c.X}
if c.Lower != nil {
ops = append(ops, c.Lower)
}
if c.Upper != nil {
ops = append(ops, c.Upper)
}
return ops
}
func (c *ArraySliceConstraint) Operands() []ssa.Value {
ops := []ssa.Value{c.X}
if c.Lower != nil {
ops = append(ops, c.Lower)
}
if c.Upper != nil {
ops = append(ops, c.Upper)
}
return ops
}
func (c *SliceIntersectionConstraint) Operands() []ssa.Value { return []ssa.Value{c.X} }
func (c *SliceLengthConstraint) Operands() []ssa.Value { return []ssa.Value{c.X} }
func (c *MakeSliceConstraint) Operands() []ssa.Value { return []ssa.Value{c.Size} }
func (s *SliceIntervalConstraint) Operands() []ssa.Value { return nil }
func (c *SliceAppendConstraint) String() string {
return fmt.Sprintf("%s = append(%s, %s)", c.Y().Name(), c.A.Name(), c.B.Name())
}
func (c *SliceSliceConstraint) String() string {
var lname, uname string
if c.Lower != nil {
lname = c.Lower.Name()
}
if c.Upper != nil {
uname = c.Upper.Name()
}
return fmt.Sprintf("%s[%s:%s]", c.X.Name(), lname, uname)
}
func (c *ArraySliceConstraint) String() string {
var lname, uname string
if c.Lower != nil {
lname = c.Lower.Name()
}
if c.Upper != nil {
uname = c.Upper.Name()
}
return fmt.Sprintf("%s[%s:%s]", c.X.Name(), lname, uname)
}
func (c *SliceIntersectionConstraint) String() string {
return fmt.Sprintf("%s = %s.%t ⊓ %s", c.Y().Name(), c.X.Name(), c.Y().(*ssa.Sigma).Branch, c.I)
}
func (c *SliceLengthConstraint) String() string {
return fmt.Sprintf("%s = len(%s)", c.Y().Name(), c.X.Name())
}
func (c *MakeSliceConstraint) String() string {
return fmt.Sprintf("%s = make(slice, %s)", c.Y().Name(), c.Size.Name())
}
func (c *SliceIntervalConstraint) String() string { return fmt.Sprintf("%s = %s", c.Y().Name(), c.I) }
func (c *SliceAppendConstraint) Eval(g *Graph) Range {
l1 := g.Range(c.A).(SliceInterval).Length
var l2 IntInterval
switch r := g.Range(c.B).(type) {
case SliceInterval:
l2 = r.Length
case StringInterval:
l2 = r.Length
default:
return SliceInterval{}
}
if !l1.IsKnown() || !l2.IsKnown() {
return SliceInterval{}
}
return SliceInterval{
Length: l1.Add(l2),
}
}
func (c *SliceSliceConstraint) Eval(g *Graph) Range {
lr := NewIntInterval(NewZ(0), NewZ(0))
if c.Lower != nil {
lr = g.Range(c.Lower).(IntInterval)
}
ur := g.Range(c.X).(SliceInterval).Length
if c.Upper != nil {
ur = g.Range(c.Upper).(IntInterval)
}
if !lr.IsKnown() || !ur.IsKnown() {
return SliceInterval{}
}
ls := []Z{
ur.Lower.Sub(lr.Lower),
ur.Upper.Sub(lr.Lower),
ur.Lower.Sub(lr.Upper),
ur.Upper.Sub(lr.Upper),
}
// TODO(dh): if we don't truncate lengths to 0 we might be able to
// easily detect slices with high < low. we'd need to treat -∞
// specially, though.
for i, l := range ls {
if l.Sign() == -1 {
ls[i] = NewZ(0)
}
}
return SliceInterval{
Length: NewIntInterval(MinZ(ls...), MaxZ(ls...)),
}
}
func (c *ArraySliceConstraint) Eval(g *Graph) Range {
lr := NewIntInterval(NewZ(0), NewZ(0))
if c.Lower != nil {
lr = g.Range(c.Lower).(IntInterval)
}
var l int64
switch typ := c.X.Type().(type) {
case *types.Array:
l = typ.Len()
case *types.Pointer:
l = typ.Elem().(*types.Array).Len()
}
ur := NewIntInterval(NewZ(l), NewZ(l))
if c.Upper != nil {
ur = g.Range(c.Upper).(IntInterval)
}
if !lr.IsKnown() || !ur.IsKnown() {
return SliceInterval{}
}
ls := []Z{
ur.Lower.Sub(lr.Lower),
ur.Upper.Sub(lr.Lower),
ur.Lower.Sub(lr.Upper),
ur.Upper.Sub(lr.Upper),
}
// TODO(dh): if we don't truncate lengths to 0 we might be able to
// easily detect slices with high < low. we'd need to treat -∞
// specially, though.
for i, l := range ls {
if l.Sign() == -1 {
ls[i] = NewZ(0)
}
}
return SliceInterval{
Length: NewIntInterval(MinZ(ls...), MaxZ(ls...)),
}
}
func (c *SliceIntersectionConstraint) Eval(g *Graph) Range {
xi := g.Range(c.X).(SliceInterval)
if !xi.IsKnown() {
return c.I
}
return SliceInterval{
Length: xi.Length.Intersection(c.I),
}
}
func (c *SliceLengthConstraint) Eval(g *Graph) Range {
i := g.Range(c.X).(SliceInterval).Length
if !i.IsKnown() {
return NewIntInterval(NewZ(0), PInfinity)
}
return i
}
func (c *MakeSliceConstraint) Eval(g *Graph) Range {
i, ok := g.Range(c.Size).(IntInterval)
if !ok {
return SliceInterval{NewIntInterval(NewZ(0), PInfinity)}
}
if i.Lower.Sign() == -1 {
i.Lower = NewZ(0)
}
return SliceInterval{i}
}
func (c *SliceIntervalConstraint) Eval(*Graph) Range { return SliceInterval{c.I} }

View File

@ -1,258 +0,0 @@
package vrp
import (
"fmt"
"go/token"
"go/types"
"honnef.co/go/tools/ssa"
)
type StringInterval struct {
Length IntInterval
}
func (s StringInterval) Union(other Range) Range {
i, ok := other.(StringInterval)
if !ok {
i = StringInterval{EmptyIntInterval}
}
if s.Length.Empty() || !s.Length.IsKnown() {
return i
}
if i.Length.Empty() || !i.Length.IsKnown() {
return s
}
return StringInterval{
Length: s.Length.Union(i.Length).(IntInterval),
}
}
func (s StringInterval) String() string {
return s.Length.String()
}
func (s StringInterval) IsKnown() bool {
return s.Length.IsKnown()
}
type StringSliceConstraint struct {
aConstraint
X ssa.Value
Lower ssa.Value
Upper ssa.Value
}
type StringIntersectionConstraint struct {
aConstraint
ranges Ranges
A ssa.Value
B ssa.Value
Op token.Token
I IntInterval
resolved bool
}
type StringConcatConstraint struct {
aConstraint
A ssa.Value
B ssa.Value
}
type StringLengthConstraint struct {
aConstraint
X ssa.Value
}
type StringIntervalConstraint struct {
aConstraint
I IntInterval
}
func NewStringSliceConstraint(x, lower, upper, y ssa.Value) Constraint {
return &StringSliceConstraint{NewConstraint(y), x, lower, upper}
}
func NewStringIntersectionConstraint(a, b ssa.Value, op token.Token, ranges Ranges, y ssa.Value) Constraint {
return &StringIntersectionConstraint{
aConstraint: NewConstraint(y),
ranges: ranges,
A: a,
B: b,
Op: op,
}
}
func NewStringConcatConstraint(a, b, y ssa.Value) Constraint {
return &StringConcatConstraint{NewConstraint(y), a, b}
}
func NewStringLengthConstraint(x ssa.Value, y ssa.Value) Constraint {
return &StringLengthConstraint{NewConstraint(y), x}
}
func NewStringIntervalConstraint(i IntInterval, y ssa.Value) Constraint {
return &StringIntervalConstraint{NewConstraint(y), i}
}
func (c *StringSliceConstraint) Operands() []ssa.Value {
vs := []ssa.Value{c.X}
if c.Lower != nil {
vs = append(vs, c.Lower)
}
if c.Upper != nil {
vs = append(vs, c.Upper)
}
return vs
}
func (c *StringIntersectionConstraint) Operands() []ssa.Value { return []ssa.Value{c.A} }
func (c StringConcatConstraint) Operands() []ssa.Value { return []ssa.Value{c.A, c.B} }
func (c *StringLengthConstraint) Operands() []ssa.Value { return []ssa.Value{c.X} }
func (s *StringIntervalConstraint) Operands() []ssa.Value { return nil }
func (c *StringSliceConstraint) String() string {
var lname, uname string
if c.Lower != nil {
lname = c.Lower.Name()
}
if c.Upper != nil {
uname = c.Upper.Name()
}
return fmt.Sprintf("%s[%s:%s]", c.X.Name(), lname, uname)
}
func (c *StringIntersectionConstraint) String() string {
return fmt.Sprintf("%s = %s %s %s (%t branch)", c.Y().Name(), c.A.Name(), c.Op, c.B.Name(), c.Y().(*ssa.Sigma).Branch)
}
func (c StringConcatConstraint) String() string {
return fmt.Sprintf("%s = %s + %s", c.Y().Name(), c.A.Name(), c.B.Name())
}
func (c *StringLengthConstraint) String() string {
return fmt.Sprintf("%s = len(%s)", c.Y().Name(), c.X.Name())
}
func (c *StringIntervalConstraint) String() string { return fmt.Sprintf("%s = %s", c.Y().Name(), c.I) }
func (c *StringSliceConstraint) Eval(g *Graph) Range {
lr := NewIntInterval(NewZ(0), NewZ(0))
if c.Lower != nil {
lr = g.Range(c.Lower).(IntInterval)
}
ur := g.Range(c.X).(StringInterval).Length
if c.Upper != nil {
ur = g.Range(c.Upper).(IntInterval)
}
if !lr.IsKnown() || !ur.IsKnown() {
return StringInterval{}
}
ls := []Z{
ur.Lower.Sub(lr.Lower),
ur.Upper.Sub(lr.Lower),
ur.Lower.Sub(lr.Upper),
ur.Upper.Sub(lr.Upper),
}
// TODO(dh): if we don't truncate lengths to 0 we might be able to
// easily detect slices with high < low. we'd need to treat -∞
// specially, though.
for i, l := range ls {
if l.Sign() == -1 {
ls[i] = NewZ(0)
}
}
return StringInterval{
Length: NewIntInterval(MinZ(ls...), MaxZ(ls...)),
}
}
func (c *StringIntersectionConstraint) Eval(g *Graph) Range {
var l IntInterval
switch r := g.Range(c.A).(type) {
case StringInterval:
l = r.Length
case IntInterval:
l = r
}
if !l.IsKnown() {
return StringInterval{c.I}
}
return StringInterval{
Length: l.Intersection(c.I),
}
}
func (c StringConcatConstraint) Eval(g *Graph) Range {
i1, i2 := g.Range(c.A).(StringInterval), g.Range(c.B).(StringInterval)
if !i1.Length.IsKnown() || !i2.Length.IsKnown() {
return StringInterval{}
}
return StringInterval{
Length: i1.Length.Add(i2.Length),
}
}
func (c *StringLengthConstraint) Eval(g *Graph) Range {
i := g.Range(c.X).(StringInterval).Length
if !i.IsKnown() {
return NewIntInterval(NewZ(0), PInfinity)
}
return i
}
func (c *StringIntervalConstraint) Eval(*Graph) Range { return StringInterval{c.I} }
func (c *StringIntersectionConstraint) Futures() []ssa.Value {
return []ssa.Value{c.B}
}
func (c *StringIntersectionConstraint) Resolve() {
if (c.A.Type().Underlying().(*types.Basic).Info() & types.IsString) != 0 {
// comparing two strings
r, ok := c.ranges[c.B].(StringInterval)
if !ok {
c.I = NewIntInterval(NewZ(0), PInfinity)
return
}
switch c.Op {
case token.EQL:
c.I = r.Length
case token.GTR, token.GEQ:
c.I = NewIntInterval(r.Length.Lower, PInfinity)
case token.LSS, token.LEQ:
c.I = NewIntInterval(NewZ(0), r.Length.Upper)
case token.NEQ:
default:
panic("unsupported op " + c.Op.String())
}
} else {
r, ok := c.ranges[c.B].(IntInterval)
if !ok {
c.I = NewIntInterval(NewZ(0), PInfinity)
return
}
// comparing two lengths
switch c.Op {
case token.EQL:
c.I = r
case token.GTR:
c.I = NewIntInterval(r.Lower.Add(NewZ(1)), PInfinity)
case token.GEQ:
c.I = NewIntInterval(r.Lower, PInfinity)
case token.LSS:
c.I = NewIntInterval(NInfinity, r.Upper.Sub(NewZ(1)))
case token.LEQ:
c.I = NewIntInterval(NInfinity, r.Upper)
case token.NEQ:
default:
panic("unsupported op " + c.Op.String())
}
}
}
func (c *StringIntersectionConstraint) IsKnown() bool {
return c.I.IsKnown()
}
func (c *StringIntersectionConstraint) MarkUnresolved() {
c.resolved = false
}
func (c *StringIntersectionConstraint) MarkResolved() {
c.resolved = true
}
func (c *StringIntersectionConstraint) IsResolved() bool {
return c.resolved
}

File diff suppressed because it is too large Load Diff

View File

@ -1,111 +1,81 @@
package stylecheck
import (
"flag"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"honnef.co/go/tools/config"
"honnef.co/go/tools/facts"
"honnef.co/go/tools/internal/passes/buildssa"
"honnef.co/go/tools/internal/passes/buildir"
"honnef.co/go/tools/lint/lintutil"
)
func newFlagSet() flag.FlagSet {
fs := flag.NewFlagSet("", flag.PanicOnError)
fs.Var(lintutil.NewVersionFlag(), "go", "Target Go version")
return *fs
}
var Analyzers = map[string]*analysis.Analyzer{
var Analyzers = lintutil.InitializeAnalyzers(Docs, map[string]*analysis.Analyzer{
"ST1000": {
Name: "ST1000",
Run: CheckPackageComment,
Doc: Docs["ST1000"].String(),
Requires: []*analysis.Analyzer{},
Flags: newFlagSet(),
Run: CheckPackageComment,
},
"ST1001": {
Name: "ST1001",
Run: CheckDotImports,
Doc: Docs["ST1001"].String(),
Requires: []*analysis.Analyzer{facts.Generated, config.Analyzer},
Flags: newFlagSet(),
},
"ST1003": {
Name: "ST1003",
Run: CheckNames,
Doc: Docs["ST1003"].String(),
Requires: []*analysis.Analyzer{facts.Generated, config.Analyzer},
Flags: newFlagSet(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated, config.Analyzer},
},
"ST1005": {
Name: "ST1005",
Run: CheckErrorStrings,
Doc: Docs["ST1005"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer},
Flags: newFlagSet(),
Requires: []*analysis.Analyzer{buildir.Analyzer},
},
"ST1006": {
Name: "ST1006",
Run: CheckReceiverNames,
Doc: Docs["ST1006"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer, facts.Generated},
Flags: newFlagSet(),
Requires: []*analysis.Analyzer{buildir.Analyzer, facts.Generated},
},
"ST1008": {
Name: "ST1008",
Run: CheckErrorReturn,
Doc: Docs["ST1008"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer},
Flags: newFlagSet(),
Requires: []*analysis.Analyzer{buildir.Analyzer},
},
"ST1011": {
Name: "ST1011",
Run: CheckTimeNames,
Doc: Docs["ST1011"].String(),
Flags: newFlagSet(),
Run: CheckTimeNames,
Requires: []*analysis.Analyzer{inspect.Analyzer},
},
"ST1012": {
Name: "ST1012",
Run: CheckErrorVarNames,
Doc: Docs["ST1012"].String(),
Requires: []*analysis.Analyzer{config.Analyzer},
Flags: newFlagSet(),
},
"ST1013": {
Name: "ST1013",
Run: CheckHTTPStatusCodes,
Doc: Docs["ST1013"].String(),
Requires: []*analysis.Analyzer{facts.Generated, facts.TokenFile, config.Analyzer},
Flags: newFlagSet(),
Run: CheckHTTPStatusCodes,
// TODO(dh): why does this depend on facts.TokenFile?
Requires: []*analysis.Analyzer{facts.Generated, facts.TokenFile, config.Analyzer, inspect.Analyzer},
},
"ST1015": {
Name: "ST1015",
Run: CheckDefaultCaseOrder,
Doc: Docs["ST1015"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated, facts.TokenFile},
Flags: newFlagSet(),
},
"ST1016": {
Name: "ST1016",
Run: CheckReceiverNamesIdentical,
Doc: Docs["ST1016"].String(),
Requires: []*analysis.Analyzer{buildssa.Analyzer},
Flags: newFlagSet(),
Requires: []*analysis.Analyzer{buildir.Analyzer, facts.Generated},
},
"ST1017": {
Name: "ST1017",
Run: CheckYodaConditions,
Doc: Docs["ST1017"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated, facts.TokenFile},
Flags: newFlagSet(),
},
"ST1018": {
Name: "ST1018",
Run: CheckInvisibleCharacters,
Doc: Docs["ST1018"].String(),
Requires: []*analysis.Analyzer{inspect.Analyzer},
Flags: newFlagSet(),
},
}
"ST1019": {
Run: CheckDuplicatedImports,
Requires: []*analysis.Analyzer{facts.Generated, config.Analyzer},
},
"ST1020": {
Run: CheckExportedFunctionDocs,
Requires: []*analysis.Analyzer{facts.Generated, inspect.Analyzer},
},
"ST1021": {
Run: CheckExportedTypeDocs,
Requires: []*analysis.Analyzer{facts.Generated, inspect.Analyzer},
},
"ST1022": {
Run: CheckExportedVarDocs,
Requires: []*analysis.Analyzer{facts.Generated, inspect.Analyzer},
},
})

View File

@ -3,7 +3,7 @@ package stylecheck
import "honnef.co/go/tools/lint"
var Docs = map[string]*lint.Documentation{
"ST1000": &lint.Documentation{
"ST1000": {
Title: `Incorrect or missing package comment`,
Text: `Packages must have a package comment that is formatted according to
the guidelines laid out in
@ -12,7 +12,7 @@ https://github.com/golang/go/wiki/CodeReviewComments#package-comments.`,
NonDefault: true,
},
"ST1001": &lint.Documentation{
"ST1001": {
Title: `Dot imports are discouraged`,
Text: `Dot imports that aren't in external test packages are discouraged.
@ -42,7 +42,7 @@ Quoting Go Code Review Comments:
Options: []string{"dot_import_whitelist"},
},
"ST1003": &lint.Documentation{
"ST1003": {
Title: `Poorly chosen identifier`,
Text: `Identifiers, such as variable and package names, follow certain rules.
@ -57,7 +57,7 @@ See the following links for details:
Options: []string{"initialisms"},
},
"ST1005": &lint.Documentation{
"ST1005": {
Title: `Incorrectly formatted error string`,
Text: `Error strings follow a set of guidelines to ensure uniformity and good
composability.
@ -73,7 +73,7 @@ Quoting Go Code Review Comments:
Since: "2019.1",
},
"ST1006": &lint.Documentation{
"ST1006": {
Title: `Poorly chosen receiver name`,
Text: `Quoting Go Code Review Comments:
@ -91,13 +91,13 @@ Quoting Go Code Review Comments:
Since: "2019.1",
},
"ST1008": &lint.Documentation{
"ST1008": {
Title: `A function's error value should be its last return value`,
Text: `A function's error value should be its last return value.`,
Since: `2019.1`,
},
"ST1011": &lint.Documentation{
"ST1011": {
Title: `Poorly chosen name for variable of type time.Duration`,
Text: `time.Duration values represent an amount of time, which is represented
as a count of nanoseconds. An expression like 5 * time.Microsecond
@ -107,14 +107,14 @@ Milli.`,
Since: `2019.1`,
},
"ST1012": &lint.Documentation{
"ST1012": {
Title: `Poorly chosen name for error variable`,
Text: `Error variables that are part of an API should be called errFoo or
ErrFoo.`,
Since: "2019.1",
},
"ST1013": &lint.Documentation{
"ST1013": {
Title: `Should use constants for HTTP error codes, not magic numbers`,
Text: `HTTP has a tremendous number of status codes. While some of those are
well known (200, 400, 404, 500), most of them are not. The net/http
@ -126,18 +126,18 @@ readability of your code.`,
Options: []string{"http_status_code_whitelist"},
},
"ST1015": &lint.Documentation{
"ST1015": {
Title: `A switch's default case should be the first or last case`,
Since: "2019.1",
},
"ST1016": &lint.Documentation{
"ST1016": {
Title: `Use consistent method receiver names`,
Since: "2019.1",
NonDefault: true,
},
"ST1017": &lint.Documentation{
"ST1017": {
Title: `Don't use Yoda conditions`,
Text: `Yoda conditions are conditions of the kind 'if 42 == x', where the
literal is on the left side of the comparison. These are a common
@ -147,8 +147,85 @@ bug, we prefer the more idiomatic 'if x == 42'.`,
Since: "2019.2",
},
"ST1018": &lint.Documentation{
"ST1018": {
Title: `Avoid zero-width and control characters in string literals`,
Since: "2019.2",
},
"ST1019": {
Title: `Importing the same package multiple times`,
Text: `Go allows importing the same package multiple times, as long as
different import aliases are being used. That is, the following
bit of code is valid:
import (
"fmt"
fumpt "fmt"
format "fmt"
_ "fmt"
)
However, this is very rarely done on purpose. Usually, it is a
sign of code that got refactored, accidentally adding duplicate
import statements. It is also a rarely known feature, which may
contribute to confusion.
Do note that sometimes, this feature may be used
intentionally (see for example
https://github.com/golang/go/commit/3409ce39bfd7584523b7a8c150a310cea92d879d)
if you want to allow this pattern in your code base, you're
advised to disable this check.`,
Since: "2020.1",
},
"ST1020": {
Title: "The documentation of an exported function should start with the function's name",
Text: `Doc comments work best as complete sentences, which
allow a wide variety of automated presentations. The first sentence
should be a one-sentence summary that starts with the name being
declared.
If every doc comment begins with the name of the item it describes,
you can use the doc subcommand of the go tool and run the output
through grep.
See https://golang.org/doc/effective_go.html#commentary for more
information on how to write good documentation.`,
Since: "2020.1",
NonDefault: true,
},
"ST1021": {
Title: "The documentation of an exported type should start with type's name",
Text: `Doc comments work best as complete sentences, which
allow a wide variety of automated presentations. The first sentence
should be a one-sentence summary that starts with the name being
declared.
If every doc comment begins with the name of the item it describes,
you can use the doc subcommand of the go tool and run the output
through grep.
See https://golang.org/doc/effective_go.html#commentary for more
information on how to write good documentation.`,
Since: "2020.1",
NonDefault: true,
},
"ST1022": {
Title: "The documentation of an exported variable or constant should start with variable's name",
Text: `Doc comments work best as complete sentences, which
allow a wide variety of automated presentations. The first sentence
should be a one-sentence summary that starts with the name being
declared.
If every doc comment begins with the name of the item it describes,
you can use the doc subcommand of the go tool and run the output
through grep.
See https://golang.org/doc/effective_go.html#commentary for more
information on how to write good documentation.`,
Since: "2020.1",
NonDefault: true,
},
}

View File

@ -6,15 +6,20 @@ import (
"go/constant"
"go/token"
"go/types"
"sort"
"strconv"
"strings"
"unicode"
"unicode/utf8"
"honnef.co/go/tools/code"
"honnef.co/go/tools/config"
"honnef.co/go/tools/internal/passes/buildssa"
"honnef.co/go/tools/edit"
"honnef.co/go/tools/internal/passes/buildir"
"honnef.co/go/tools/ir"
. "honnef.co/go/tools/lint/lintdsl"
"honnef.co/go/tools/ssa"
"honnef.co/go/tools/pattern"
"honnef.co/go/tools/report"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
@ -36,14 +41,14 @@ func CheckPackageComment(pass *analysis.Pass) (interface{}, error) {
}
hasDocs := false
for _, f := range pass.Files {
if IsInTest(pass, f) {
if code.IsInTest(pass, f) {
continue
}
if f.Doc != nil && len(f.Doc.List) > 0 {
hasDocs = true
prefix := "Package " + f.Name.Name + " "
if !strings.HasPrefix(strings.TrimSpace(f.Doc.Text()), prefix) {
ReportNodef(pass, f.Doc, `package comment should be of the form "%s..."`, prefix)
report.Report(pass, f.Doc, fmt.Sprintf(`package comment should be of the form "%s..."`, prefix))
}
f.Doc.Text()
}
@ -51,10 +56,10 @@ func CheckPackageComment(pass *analysis.Pass) (interface{}, error) {
if !hasDocs {
for _, f := range pass.Files {
if IsInTest(pass, f) {
if code.IsInTest(pass, f) {
continue
}
ReportNodef(pass, f, "at least one file in a package should have a package comment")
report.Report(pass, f, "at least one file in a package should have a package comment", report.ShortRange())
}
}
return nil, nil
@ -72,8 +77,38 @@ func CheckDotImports(pass *analysis.Pass) (interface{}, error) {
}
}
if imp.Name != nil && imp.Name.Name == "." && !IsInTest(pass, f) {
ReportNodefFG(pass, imp, "should not use dot imports")
if imp.Name != nil && imp.Name.Name == "." && !code.IsInTest(pass, f) {
report.Report(pass, imp, "should not use dot imports", report.FilterGenerated())
}
}
}
return nil, nil
}
func CheckDuplicatedImports(pass *analysis.Pass) (interface{}, error) {
for _, f := range pass.Files {
// Collect all imports by their import path
imports := make(map[string][]*ast.ImportSpec, len(f.Imports))
for _, imp := range f.Imports {
imports[imp.Path.Value] = append(imports[imp.Path.Value], imp)
}
for path, value := range imports {
if path[1:len(path)-1] == "unsafe" {
// Don't flag unsafe. Cgo generated code imports
// unsafe using the blank identifier, and most
// user-written cgo code also imports unsafe
// explicitly.
continue
}
// If there's more than one import per path, we flag that
if len(value) > 1 {
s := fmt.Sprintf("package %s is being imported more than once", path)
opts := []report.Option{report.FilterGenerated()}
for _, imp := range value[1:] {
opts = append(opts, report.Related(imp, fmt.Sprintf("other import of %s", path)))
}
report.Report(pass, value[0], s, opts...)
}
}
}
@ -83,7 +118,7 @@ func CheckDotImports(pass *analysis.Pass) (interface{}, error) {
func CheckBlankImports(pass *analysis.Pass) (interface{}, error) {
fset := pass.Fset
for _, f := range pass.Files {
if IsInMain(pass, f) || IsInTest(pass, f) {
if code.IsMainLike(pass) || code.IsInTest(pass, f) {
continue
}
@ -117,7 +152,7 @@ func CheckBlankImports(pass *analysis.Pass) (interface{}, error) {
for i, imp := range f.Imports {
pos := fset.Position(imp.Pos())
if !IsBlank(imp.Name) {
if !code.IsBlank(imp.Name) {
continue
}
// Only flag the first blank import in a group of imports,
@ -126,13 +161,13 @@ func CheckBlankImports(pass *analysis.Pass) (interface{}, error) {
if i > 0 {
prev := f.Imports[i-1]
prevPos := fset.Position(prev.Pos())
if pos.Line-1 == prevPos.Line && IsBlank(prev.Name) {
if pos.Line-1 == prevPos.Line && code.IsBlank(prev.Name) {
continue
}
}
if imp.Doc == nil && imp.Comment == nil && !skip[imp] {
ReportNodef(pass, imp, "a blank import should be only in a main or test package, or have a comment justifying it")
report.Report(pass, imp, "a blank import should be only in a main or test package, or have a comment justifying it")
}
}
}
@ -152,7 +187,7 @@ func CheckIncDec(pass *analysis.Pass) (interface{}, error) {
return
}
if (len(assign.Lhs) != 1 || len(assign.Rhs) != 1) ||
!IsIntLiteral(assign.Rhs[0], "1") {
!code.IsIntLiteral(assign.Rhs[0], "1") {
return
}
@ -164,15 +199,15 @@ func CheckIncDec(pass *analysis.Pass) (interface{}, error) {
suffix = "--"
}
ReportNodef(pass, assign, "should replace %s with %s%s", Render(pass, assign), Render(pass, assign.Lhs[0]), suffix)
report.Report(pass, assign, fmt.Sprintf("should replace %s with %s%s", report.Render(pass, assign), report.Render(pass, assign.Lhs[0]), suffix))
}
pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn)
code.Preorder(pass, fn, (*ast.AssignStmt)(nil))
return nil, nil
}
func CheckErrorReturn(pass *analysis.Pass) (interface{}, error) {
fnLoop:
for _, fn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
sig := fn.Type().(*types.Signature)
rets := sig.Results()
if rets == nil || rets.Len() < 2 {
@ -186,7 +221,7 @@ fnLoop:
}
for i := rets.Len() - 2; i >= 0; i-- {
if rets.At(i).Type() == types.Universe.Lookup("error").Type() {
pass.Reportf(rets.At(i).Pos(), "error should be returned as the last argument")
report.Report(pass, rets.At(i), "error should be returned as the last argument", report.ShortRange())
continue fnLoop
}
}
@ -197,23 +232,23 @@ fnLoop:
// CheckUnexportedReturn checks that exported functions on exported
// types do not return unexported types.
func CheckUnexportedReturn(pass *analysis.Pass) (interface{}, error) {
for _, fn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
if fn.Synthetic != "" || fn.Parent() != nil {
continue
}
if !ast.IsExported(fn.Name()) || IsInMain(pass, fn) || IsInTest(pass, fn) {
if !ast.IsExported(fn.Name()) || code.IsMain(pass) || code.IsInTest(pass, fn) {
continue
}
sig := fn.Type().(*types.Signature)
if sig.Recv() != nil && !ast.IsExported(Dereference(sig.Recv().Type()).(*types.Named).Obj().Name()) {
if sig.Recv() != nil && !ast.IsExported(code.Dereference(sig.Recv().Type()).(*types.Named).Obj().Name()) {
continue
}
res := sig.Results()
for i := 0; i < res.Len(); i++ {
if named, ok := DereferenceR(res.At(i).Type()).(*types.Named); ok &&
if named, ok := code.DereferenceR(res.At(i).Type()).(*types.Named); ok &&
!ast.IsExported(named.Obj().Name()) &&
named != types.Universe.Lookup("error").Type() {
pass.Reportf(fn.Pos(), "should not return unexported type")
report.Report(pass, fn, "should not return unexported type")
}
}
}
@ -221,22 +256,22 @@ func CheckUnexportedReturn(pass *analysis.Pass) (interface{}, error) {
}
func CheckReceiverNames(pass *analysis.Pass) (interface{}, error) {
ssapkg := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).Pkg
for _, m := range ssapkg.Members {
irpkg := pass.ResultOf[buildir.Analyzer].(*buildir.IR).Pkg
for _, m := range irpkg.Members {
if T, ok := m.Object().(*types.TypeName); ok && !T.IsAlias() {
ms := typeutil.IntuitiveMethodSet(T.Type(), nil)
for _, sel := range ms {
fn := sel.Obj().(*types.Func)
recv := fn.Type().(*types.Signature).Recv()
if Dereference(recv.Type()) != T.Type() {
if code.Dereference(recv.Type()) != T.Type() {
// skip embedded methods
continue
}
if recv.Name() == "self" || recv.Name() == "this" {
ReportfFG(pass, recv.Pos(), `receiver name should be a reflection of its identity; don't use generic names such as "this" or "self"`)
report.Report(pass, recv, `receiver name should be a reflection of its identity; don't use generic names such as "this" or "self"`, report.FilterGenerated())
}
if recv.Name() == "_" {
ReportfFG(pass, recv.Pos(), "receiver name should not be an underscore, omit the name if it is unused")
report.Report(pass, recv, "receiver name should not be an underscore, omit the name if it is unused", report.FilterGenerated())
}
}
}
@ -245,8 +280,8 @@ func CheckReceiverNames(pass *analysis.Pass) (interface{}, error) {
}
func CheckReceiverNamesIdentical(pass *analysis.Pass) (interface{}, error) {
ssapkg := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).Pkg
for _, m := range ssapkg.Members {
irpkg := pass.ResultOf[buildir.Analyzer].(*buildir.IR).Pkg
for _, m := range irpkg.Members {
names := map[string]int{}
var firstFn *types.Func
@ -255,7 +290,11 @@ func CheckReceiverNamesIdentical(pass *analysis.Pass) (interface{}, error) {
for _, sel := range ms {
fn := sel.Obj().(*types.Func)
recv := fn.Type().(*types.Signature).Recv()
if Dereference(recv.Type()) != T.Type() {
if code.IsGenerated(pass, recv.Pos()) {
// Don't concern ourselves with methods in generated code
continue
}
if code.Dereference(recv.Type()) != T.Type() {
// skip embedded methods
continue
}
@ -273,8 +312,9 @@ func CheckReceiverNamesIdentical(pass *analysis.Pass) (interface{}, error) {
for name, count := range names {
seen = append(seen, fmt.Sprintf("%dx %q", count, name))
}
sort.Strings(seen)
pass.Reportf(firstFn.Pos(), "methods on the same type should have the same receiver name (seen %s)", strings.Join(seen, ", "))
report.Report(pass, firstFn, fmt.Sprintf("methods on the same type should have the same receiver name (seen %s)", strings.Join(seen, ", ")))
}
}
return nil, nil
@ -284,7 +324,7 @@ func CheckContextFirstArg(pass *analysis.Pass) (interface{}, error) {
// TODO(dh): this check doesn't apply to test helpers. Example from the stdlib:
// func helperCommandContext(t *testing.T, ctx context.Context, s ...string) (cmd *exec.Cmd) {
fnLoop:
for _, fn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
if fn.Synthetic != "" || fn.Parent() != nil {
continue
}
@ -298,7 +338,7 @@ fnLoop:
for i := 1; i < params.Len(); i++ {
param := params.At(i)
if types.TypeString(param.Type(), nil) == "context.Context" {
pass.Reportf(param.Pos(), "context.Context should be the first argument of a function")
report.Report(pass, param, "context.Context should be the first argument of a function", report.ShortRange())
continue fnLoop
}
}
@ -307,20 +347,20 @@ fnLoop:
}
func CheckErrorStrings(pass *analysis.Pass) (interface{}, error) {
objNames := map[*ssa.Package]map[string]bool{}
ssapkg := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).Pkg
objNames[ssapkg] = map[string]bool{}
for _, m := range ssapkg.Members {
if typ, ok := m.(*ssa.Type); ok {
objNames[ssapkg][typ.Name()] = true
objNames := map[*ir.Package]map[string]bool{}
irpkg := pass.ResultOf[buildir.Analyzer].(*buildir.IR).Pkg
objNames[irpkg] = map[string]bool{}
for _, m := range irpkg.Members {
if typ, ok := m.(*ir.Type); ok {
objNames[irpkg][typ.Name()] = true
}
}
for _, fn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
objNames[fn.Package()][fn.Name()] = true
}
for _, fn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs {
if IsInTest(pass, fn) {
for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs {
if code.IsInTest(pass, fn) {
// We don't care about malformed error messages in tests;
// they're usually for direct human consumption, not part
// of an API
@ -329,15 +369,15 @@ func CheckErrorStrings(pass *analysis.Pass) (interface{}, error) {
for _, block := range fn.Blocks {
instrLoop:
for _, ins := range block.Instrs {
call, ok := ins.(*ssa.Call)
call, ok := ins.(*ir.Call)
if !ok {
continue
}
if !IsCallTo(call.Common(), "errors.New") && !IsCallTo(call.Common(), "fmt.Errorf") {
if !code.IsCallToAny(call.Common(), "errors.New", "fmt.Errorf") {
continue
}
k, ok := call.Common().Args[0].(*ssa.Const)
k, ok := call.Common().Args[0].(*ir.Const)
if !ok {
continue
}
@ -348,7 +388,7 @@ func CheckErrorStrings(pass *analysis.Pass) (interface{}, error) {
}
switch s[len(s)-1] {
case '.', ':', '!', '\n':
pass.Reportf(call.Pos(), "error strings should not end with punctuation or a newline")
report.Report(pass, call, "error strings should not end with punctuation or a newline")
}
idx := strings.IndexByte(s, ' ')
if idx == -1 {
@ -382,7 +422,7 @@ func CheckErrorStrings(pass *analysis.Pass) (interface{}, error) {
//
// It could still be a proper noun, though.
pass.Reportf(call.Pos(), "error strings should not be capitalized")
report.Report(pass, call, "error strings should not be capitalized")
}
}
}
@ -397,34 +437,47 @@ func CheckTimeNames(pass *analysis.Pass) (interface{}, error) {
"Usec", "Usecs", "Microseconds",
"MS", "Ms",
}
fn := func(T types.Type, names []*ast.Ident) {
if !IsType(T, "time.Duration") && !IsType(T, "*time.Duration") {
return
}
fn := func(names []*ast.Ident) {
for _, name := range names {
if _, ok := pass.TypesInfo.Defs[name]; !ok {
continue
}
T := pass.TypesInfo.TypeOf(name)
if !code.IsType(T, "time.Duration") && !code.IsType(T, "*time.Duration") {
continue
}
for _, suffix := range suffixes {
if strings.HasSuffix(name.Name, suffix) {
ReportNodef(pass, name, "var %s is of type %v; don't use unit-specific suffix %q", name.Name, T, suffix)
report.Report(pass, name, fmt.Sprintf("var %s is of type %v; don't use unit-specific suffix %q", name.Name, T, suffix))
break
}
}
}
}
for _, f := range pass.Files {
ast.Inspect(f, func(node ast.Node) bool {
switch node := node.(type) {
case *ast.ValueSpec:
T := pass.TypesInfo.TypeOf(node.Type)
fn(T, node.Names)
case *ast.FieldList:
for _, field := range node.List {
T := pass.TypesInfo.TypeOf(field.Type)
fn(T, field.Names)
fn2 := func(node ast.Node) {
switch node := node.(type) {
case *ast.ValueSpec:
fn(node.Names)
case *ast.FieldList:
for _, field := range node.List {
fn(field.Names)
}
case *ast.AssignStmt:
if node.Tok != token.DEFINE {
break
}
var names []*ast.Ident
for _, lhs := range node.Lhs {
if lhs, ok := lhs.(*ast.Ident); ok {
names = append(names, lhs)
}
}
return true
})
fn(names)
}
}
code.Preorder(pass, fn2, (*ast.ValueSpec)(nil), (*ast.FieldList)(nil), (*ast.AssignStmt)(nil))
return nil, nil
}
@ -443,16 +496,21 @@ func CheckErrorVarNames(pass *analysis.Pass) (interface{}, error) {
for i, name := range spec.Names {
val := spec.Values[i]
if !IsCallToAST(pass, val, "errors.New") && !IsCallToAST(pass, val, "fmt.Errorf") {
if !code.IsCallToAnyAST(pass, val, "errors.New", "fmt.Errorf") {
continue
}
if pass.Pkg.Path() == "net/http" && strings.HasPrefix(name.Name, "http2err") {
// special case for internal variable names of
// bundled HTTP 2 code in net/http
continue
}
prefix := "err"
if name.IsExported() {
prefix = "Err"
}
if !strings.HasPrefix(name.Name, prefix) {
ReportNodef(pass, name, "error var %s should have name of the form %sFoo", name.Name, prefix)
report.Report(pass, name, fmt.Sprintf("error var %s should have name of the form %sFoo", name.Name, prefix))
}
}
}
@ -528,17 +586,11 @@ func CheckHTTPStatusCodes(pass *analysis.Pass) (interface{}, error) {
for _, code := range config.For(pass).HTTPStatusCodeWhitelist {
whitelist[code] = true
}
fn := func(node ast.Node) bool {
if node == nil {
return true
}
call, ok := node.(*ast.CallExpr)
if !ok {
return true
}
fn := func(node ast.Node) {
call := node.(*ast.CallExpr)
var arg int
switch CallNameAST(pass, call) {
switch code.CallNameAST(pass, call) {
case "net/http.Error":
arg = 2
case "net/http.Redirect":
@ -548,31 +600,29 @@ func CheckHTTPStatusCodes(pass *analysis.Pass) (interface{}, error) {
case "net/http.RedirectHandler":
arg = 1
default:
return true
return
}
lit, ok := call.Args[arg].(*ast.BasicLit)
if !ok {
return true
return
}
if whitelist[lit.Value] {
return true
return
}
n, err := strconv.Atoi(lit.Value)
if err != nil {
return true
return
}
s, ok := httpStatusCodes[n]
if !ok {
return true
return
}
ReportNodefFG(pass, lit, "should use constant http.%s instead of numeric literal %d", s, n)
return true
}
// OPT(dh): replace with inspector
for _, f := range pass.Files {
ast.Inspect(f, fn)
report.Report(pass, lit, fmt.Sprintf("should use constant http.%s instead of numeric literal %d", s, n),
report.FilterGenerated(),
report.Fixes(edit.Fix(fmt.Sprintf("use http.%s instead of %d", s, n), edit.ReplaceWithString(pass.Fset, lit, "http."+s))))
}
code.Preorder(pass, fn, (*ast.CallExpr)(nil))
return nil, nil
}
@ -582,31 +632,29 @@ func CheckDefaultCaseOrder(pass *analysis.Pass) (interface{}, error) {
list := stmt.Body.List
for i, c := range list {
if c.(*ast.CaseClause).List == nil && i != 0 && i != len(list)-1 {
ReportNodefFG(pass, c, "default case should be first or last in switch statement")
report.Report(pass, c, "default case should be first or last in switch statement", report.FilterGenerated())
break
}
}
}
pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.SwitchStmt)(nil)}, fn)
code.Preorder(pass, fn, (*ast.SwitchStmt)(nil))
return nil, nil
}
var (
checkYodaConditionsQ = pattern.MustParse(`(BinaryExpr left@(BasicLit _ _) tok@(Or "==" "!=") right@(Not (BasicLit _ _)))`)
checkYodaConditionsR = pattern.MustParse(`(BinaryExpr right tok left)`)
)
func CheckYodaConditions(pass *analysis.Pass) (interface{}, error) {
fn := func(node ast.Node) {
cond := node.(*ast.BinaryExpr)
if cond.Op != token.EQL && cond.Op != token.NEQ {
return
if _, edits, ok := MatchAndEdit(pass, checkYodaConditionsQ, checkYodaConditionsR, node); ok {
report.Report(pass, node, "don't use Yoda conditions",
report.FilterGenerated(),
report.Fixes(edit.Fix("un-Yoda-fy", edits...)))
}
if _, ok := cond.X.(*ast.BasicLit); !ok {
return
}
if _, ok := cond.Y.(*ast.BasicLit); ok {
// Don't flag lit == lit conditions, just in case
return
}
ReportNodefFG(pass, cond, "don't use Yoda conditions")
}
pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn)
code.Preorder(pass, fn, (*ast.BinaryExpr)(nil))
return nil, nil
}
@ -616,14 +664,251 @@ func CheckInvisibleCharacters(pass *analysis.Pass) (interface{}, error) {
if lit.Kind != token.STRING {
return
}
for _, r := range lit.Value {
type invalid struct {
r rune
off int
}
var invalids []invalid
hasFormat := false
hasControl := false
for off, r := range lit.Value {
if unicode.Is(unicode.Cf, r) {
ReportNodef(pass, lit, "string literal contains the Unicode format character %U, consider using the %q escape sequence", r, r)
invalids = append(invalids, invalid{r, off})
hasFormat = true
} else if unicode.Is(unicode.Cc, r) && r != '\n' && r != '\t' && r != '\r' {
ReportNodef(pass, lit, "string literal contains the Unicode control character %U, consider using the %q escape sequence", r, r)
invalids = append(invalids, invalid{r, off})
hasControl = true
}
}
switch len(invalids) {
case 0:
return
case 1:
var kind string
if hasFormat {
kind = "format"
} else if hasControl {
kind = "control"
} else {
panic("unreachable")
}
r := invalids[0]
msg := fmt.Sprintf("string literal contains the Unicode %s character %U, consider using the %q escape sequence instead", kind, r.r, r.r)
replacement := strconv.QuoteRune(r.r)
replacement = replacement[1 : len(replacement)-1]
edit := analysis.SuggestedFix{
Message: fmt.Sprintf("replace %s character %U with %q", kind, r.r, r.r),
TextEdits: []analysis.TextEdit{{
Pos: lit.Pos() + token.Pos(r.off),
End: lit.Pos() + token.Pos(r.off) + token.Pos(utf8.RuneLen(r.r)),
NewText: []byte(replacement),
}},
}
delete := analysis.SuggestedFix{
Message: fmt.Sprintf("delete %s character %U", kind, r),
TextEdits: []analysis.TextEdit{{
Pos: lit.Pos() + token.Pos(r.off),
End: lit.Pos() + token.Pos(r.off) + token.Pos(utf8.RuneLen(r.r)),
}},
}
report.Report(pass, lit, msg, report.Fixes(edit, delete))
default:
var kind string
if hasFormat && hasControl {
kind = "format and control"
} else if hasFormat {
kind = "format"
} else if hasControl {
kind = "control"
} else {
panic("unreachable")
}
msg := fmt.Sprintf("string literal contains Unicode %s characters, consider using escape sequences instead", kind)
var edits []analysis.TextEdit
var deletions []analysis.TextEdit
for _, r := range invalids {
replacement := strconv.QuoteRune(r.r)
replacement = replacement[1 : len(replacement)-1]
edits = append(edits, analysis.TextEdit{
Pos: lit.Pos() + token.Pos(r.off),
End: lit.Pos() + token.Pos(r.off) + token.Pos(utf8.RuneLen(r.r)),
NewText: []byte(replacement),
})
deletions = append(deletions, analysis.TextEdit{
Pos: lit.Pos() + token.Pos(r.off),
End: lit.Pos() + token.Pos(r.off) + token.Pos(utf8.RuneLen(r.r)),
})
}
edit := analysis.SuggestedFix{
Message: fmt.Sprintf("replace all %s characters with escape sequences", kind),
TextEdits: edits,
}
delete := analysis.SuggestedFix{
Message: fmt.Sprintf("delete all %s characters", kind),
TextEdits: deletions,
}
report.Report(pass, lit, msg, report.Fixes(edit, delete))
}
}
pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BasicLit)(nil)}, fn)
code.Preorder(pass, fn, (*ast.BasicLit)(nil))
return nil, nil
}
func CheckExportedFunctionDocs(pass *analysis.Pass) (interface{}, error) {
fn := func(node ast.Node) {
if code.IsInTest(pass, node) {
return
}
decl := node.(*ast.FuncDecl)
if decl.Doc == nil {
return
}
if !ast.IsExported(decl.Name.Name) {
return
}
kind := "function"
if decl.Recv != nil {
kind = "method"
switch T := decl.Recv.List[0].Type.(type) {
case *ast.StarExpr:
if !ast.IsExported(T.X.(*ast.Ident).Name) {
return
}
case *ast.Ident:
if !ast.IsExported(T.Name) {
return
}
default:
ExhaustiveTypeSwitch(T)
}
}
prefix := decl.Name.Name + " "
if !strings.HasPrefix(decl.Doc.Text(), prefix) {
report.Report(pass, decl.Doc, fmt.Sprintf(`comment on exported %s %s should be of the form "%s..."`, kind, decl.Name.Name, prefix), report.FilterGenerated())
}
}
code.Preorder(pass, fn, (*ast.FuncDecl)(nil))
return nil, nil
}
func CheckExportedTypeDocs(pass *analysis.Pass) (interface{}, error) {
var genDecl *ast.GenDecl
fn := func(node ast.Node, push bool) bool {
if !push {
genDecl = nil
return false
}
if code.IsInTest(pass, node) {
return false
}
switch node := node.(type) {
case *ast.GenDecl:
if node.Tok == token.IMPORT {
return false
}
genDecl = node
return true
case *ast.TypeSpec:
if !ast.IsExported(node.Name.Name) {
return false
}
doc := node.Doc
if doc == nil {
if len(genDecl.Specs) != 1 {
// more than one spec in the GenDecl, don't validate the
// docstring
return false
}
if genDecl.Lparen.IsValid() {
// 'type ( T )' is weird, don't guess the user's intention
return false
}
doc = genDecl.Doc
if doc == nil {
return false
}
}
s := doc.Text()
articles := [...]string{"A", "An", "The"}
for _, a := range articles {
if strings.HasPrefix(s, a+" ") {
s = s[len(a)+1:]
break
}
}
if !strings.HasPrefix(s, node.Name.Name+" ") {
report.Report(pass, doc, fmt.Sprintf(`comment on exported type %s should be of the form "%s ..." (with optional leading article)`, node.Name.Name, node.Name.Name), report.FilterGenerated())
}
return false
case *ast.FuncLit, *ast.FuncDecl:
return false
default:
ExhaustiveTypeSwitch(node)
return false
}
}
pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Nodes([]ast.Node{(*ast.GenDecl)(nil), (*ast.TypeSpec)(nil), (*ast.FuncLit)(nil), (*ast.FuncDecl)(nil)}, fn)
return nil, nil
}
func CheckExportedVarDocs(pass *analysis.Pass) (interface{}, error) {
var genDecl *ast.GenDecl
fn := func(node ast.Node, push bool) bool {
if !push {
genDecl = nil
return false
}
if code.IsInTest(pass, node) {
return false
}
switch node := node.(type) {
case *ast.GenDecl:
if node.Tok == token.IMPORT {
return false
}
genDecl = node
return true
case *ast.ValueSpec:
if genDecl.Lparen.IsValid() || len(node.Names) > 1 {
// Don't try to guess the user's intention
return false
}
name := node.Names[0].Name
if !ast.IsExported(name) {
return false
}
if genDecl.Doc == nil {
return false
}
prefix := name + " "
if !strings.HasPrefix(genDecl.Doc.Text(), prefix) {
kind := "var"
if genDecl.Tok == token.CONST {
kind = "const"
}
report.Report(pass, genDecl.Doc, fmt.Sprintf(`comment on exported %s %s should be of the form "%s..."`, kind, name, prefix), report.FilterGenerated())
}
return false
case *ast.FuncLit, *ast.FuncDecl:
return false
default:
ExhaustiveTypeSwitch(node)
return false
}
}
pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Nodes([]ast.Node{(*ast.GenDecl)(nil), (*ast.ValueSpec)(nil), (*ast.FuncLit)(nil), (*ast.FuncDecl)(nil)}, fn)
return nil, nil
}

View File

@ -4,14 +4,16 @@
package stylecheck
import (
"fmt"
"go/ast"
"go/token"
"strings"
"unicode"
"golang.org/x/tools/go/analysis"
"honnef.co/go/tools/code"
"honnef.co/go/tools/config"
. "honnef.co/go/tools/lint/lintdsl"
"honnef.co/go/tools/report"
)
// knownNameExceptions is a set of names that are known to be exempt from naming checks.
@ -46,7 +48,7 @@ func CheckNames(pass *analysis.Pass) (interface{}, error) {
// Handle two common styles from other languages that don't belong in Go.
if len(id.Name) >= 5 && allCaps(id.Name) && strings.Contains(id.Name, "_") {
ReportfFG(pass, id.Pos(), "should not use ALL_CAPS in Go names; use CamelCase instead")
report.Report(pass, id, "should not use ALL_CAPS in Go names; use CamelCase instead", report.FilterGenerated())
return
}
@ -56,10 +58,10 @@ func CheckNames(pass *analysis.Pass) (interface{}, error) {
}
if len(id.Name) > 2 && strings.Contains(id.Name[1:len(id.Name)-1], "_") {
ReportfFG(pass, id.Pos(), "should not use underscores in Go names; %s %s should be %s", thing, id.Name, should)
report.Report(pass, id, fmt.Sprintf("should not use underscores in Go names; %s %s should be %s", thing, id.Name, should), report.FilterGenerated())
return
}
ReportfFG(pass, id.Pos(), "%s %s should be %s", thing, id.Name, should)
report.Report(pass, id, fmt.Sprintf("%s %s should be %s", thing, id.Name, should), report.FilterGenerated())
}
checkList := func(fl *ast.FieldList, thing string, initialisms map[string]bool) {
if fl == nil {
@ -80,101 +82,111 @@ func CheckNames(pass *analysis.Pass) (interface{}, error) {
for _, f := range pass.Files {
// Package names need slightly different handling than other names.
if !strings.HasSuffix(f.Name.Name, "_test") && strings.Contains(f.Name.Name, "_") {
ReportfFG(pass, f.Pos(), "should not use underscores in package names")
report.Report(pass, f, "should not use underscores in package names", report.FilterGenerated())
}
if strings.IndexFunc(f.Name.Name, unicode.IsUpper) != -1 {
ReportfFG(pass, f.Pos(), "should not use MixedCaps in package name; %s should be %s", f.Name.Name, strings.ToLower(f.Name.Name))
report.Report(pass, f, fmt.Sprintf("should not use MixedCaps in package name; %s should be %s", f.Name.Name, strings.ToLower(f.Name.Name)), report.FilterGenerated())
}
}
ast.Inspect(f, func(node ast.Node) bool {
switch v := node.(type) {
case *ast.AssignStmt:
if v.Tok != token.DEFINE {
return true
}
for _, exp := range v.Lhs {
if id, ok := exp.(*ast.Ident); ok {
check(id, "var", initialisms)
}
}
case *ast.FuncDecl:
// Functions with no body are defined elsewhere (in
// assembly, or via go:linkname). These are likely to
// be something very low level (such as the runtime),
// where our rules don't apply.
if v.Body == nil {
return true
fn := func(node ast.Node) {
switch v := node.(type) {
case *ast.AssignStmt:
if v.Tok != token.DEFINE {
return
}
for _, exp := range v.Lhs {
if id, ok := exp.(*ast.Ident); ok {
check(id, "var", initialisms)
}
}
case *ast.FuncDecl:
// Functions with no body are defined elsewhere (in
// assembly, or via go:linkname). These are likely to
// be something very low level (such as the runtime),
// where our rules don't apply.
if v.Body == nil {
return
}
if IsInTest(pass, v) && (strings.HasPrefix(v.Name.Name, "Example") || strings.HasPrefix(v.Name.Name, "Test") || strings.HasPrefix(v.Name.Name, "Benchmark")) {
return true
}
if code.IsInTest(pass, v) && (strings.HasPrefix(v.Name.Name, "Example") || strings.HasPrefix(v.Name.Name, "Test") || strings.HasPrefix(v.Name.Name, "Benchmark")) {
return
}
thing := "func"
if v.Recv != nil {
thing = "method"
}
thing := "func"
if v.Recv != nil {
thing = "method"
}
if !isTechnicallyExported(v) {
check(v.Name, thing, initialisms)
}
if !isTechnicallyExported(v) {
check(v.Name, thing, initialisms)
}
checkList(v.Type.Params, thing+" parameter", initialisms)
checkList(v.Type.Results, thing+" result", initialisms)
case *ast.GenDecl:
if v.Tok == token.IMPORT {
return true
}
var thing string
switch v.Tok {
case token.CONST:
thing = "const"
case token.TYPE:
thing = "type"
case token.VAR:
thing = "var"
}
for _, spec := range v.Specs {
switch s := spec.(type) {
case *ast.TypeSpec:
check(s.Name, thing, initialisms)
case *ast.ValueSpec:
for _, id := range s.Names {
check(id, thing, initialisms)
}
}
}
case *ast.InterfaceType:
// Do not check interface method names.
// They are often constrainted by the method names of concrete types.
for _, x := range v.Methods.List {
ft, ok := x.Type.(*ast.FuncType)
if !ok { // might be an embedded interface name
continue
}
checkList(ft.Params, "interface method parameter", initialisms)
checkList(ft.Results, "interface method result", initialisms)
}
case *ast.RangeStmt:
if v.Tok == token.ASSIGN {
return true
}
if id, ok := v.Key.(*ast.Ident); ok {
check(id, "range var", initialisms)
}
if id, ok := v.Value.(*ast.Ident); ok {
check(id, "range var", initialisms)
}
case *ast.StructType:
for _, f := range v.Fields.List {
for _, id := range f.Names {
check(id, "struct field", initialisms)
checkList(v.Type.Params, thing+" parameter", initialisms)
checkList(v.Type.Results, thing+" result", initialisms)
case *ast.GenDecl:
if v.Tok == token.IMPORT {
return
}
var thing string
switch v.Tok {
case token.CONST:
thing = "const"
case token.TYPE:
thing = "type"
case token.VAR:
thing = "var"
}
for _, spec := range v.Specs {
switch s := spec.(type) {
case *ast.TypeSpec:
check(s.Name, thing, initialisms)
case *ast.ValueSpec:
for _, id := range s.Names {
check(id, thing, initialisms)
}
}
}
return true
})
case *ast.InterfaceType:
// Do not check interface method names.
// They are often constrained by the method names of concrete types.
for _, x := range v.Methods.List {
ft, ok := x.Type.(*ast.FuncType)
if !ok { // might be an embedded interface name
continue
}
checkList(ft.Params, "interface method parameter", initialisms)
checkList(ft.Results, "interface method result", initialisms)
}
case *ast.RangeStmt:
if v.Tok == token.ASSIGN {
return
}
if id, ok := v.Key.(*ast.Ident); ok {
check(id, "range var", initialisms)
}
if id, ok := v.Value.(*ast.Ident); ok {
check(id, "range var", initialisms)
}
case *ast.StructType:
for _, f := range v.Fields.List {
for _, id := range f.Names {
check(id, "struct field", initialisms)
}
}
}
}
needle := []ast.Node{
(*ast.AssignStmt)(nil),
(*ast.FuncDecl)(nil),
(*ast.GenDecl)(nil),
(*ast.InterfaceType)(nil),
(*ast.RangeStmt)(nil),
(*ast.StructType)(nil),
}
code.Preorder(pass, fn, needle...)
return nil, nil
}

View File

@ -11,11 +11,11 @@ import (
"sync/atomic"
"golang.org/x/tools/go/analysis"
"honnef.co/go/tools/code"
"honnef.co/go/tools/go/types/typeutil"
"honnef.co/go/tools/internal/passes/buildssa"
"honnef.co/go/tools/internal/passes/buildir"
"honnef.co/go/tools/ir"
"honnef.co/go/tools/lint"
"honnef.co/go/tools/lint/lintdsl"
"honnef.co/go/tools/ssa"
)
// The graph we construct omits nodes along a path that do not
@ -103,7 +103,7 @@ import (
from method to type.
- (8.3) All interface methods are marked as used, even if they never get
called. This is to accomodate sum types (unexported interface
called. This is to accommodate sum types (unexported interface
method that must exist but never gets called.)
- (8.4) All embedded interfaces are marked as used. This is an
@ -419,8 +419,8 @@ type pkg struct {
Pkg *types.Package
TypesInfo *types.Info
TypesSizes types.Sizes
SSA *ssa.Package
SrcFuncs []*ssa.Function
IR *ir.Package
SrcFuncs []*ir.Function
}
type Checker struct {
@ -450,7 +450,7 @@ func (c *Checker) Analyzer() *analysis.Analyzer {
Name: name,
Doc: "Unused code",
Run: c.Run,
Requires: []*analysis.Analyzer{buildssa.Analyzer},
Requires: []*analysis.Analyzer{buildir.Analyzer},
}
}
@ -477,15 +477,15 @@ func (c *Checker) Run(pass *analysis.Pass) (interface{}, error) {
c.initialPackages[pass.Pkg] = struct{}{}
c.mu.Unlock()
ssapkg := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA)
irpkg := pass.ResultOf[buildir.Analyzer].(*buildir.IR)
pkg := &pkg{
Fset: pass.Fset,
Files: pass.Files,
Pkg: pass.Pkg,
TypesInfo: pass.TypesInfo,
TypesSizes: pass.TypesSizes,
SSA: ssapkg.Pkg,
SrcFuncs: ssapkg.SrcFuncs,
IR: irpkg.Pkg,
SrcFuncs: irpkg.SrcFuncs,
}
c.processPkg(c.graph, pkg)
@ -638,10 +638,9 @@ func (c *Checker) results() []types.Object {
c.debugf("digraph{\n")
debugNode(c.graph.Root)
c.graph.Nodes.Range(func(k, v interface{}) bool {
debugNode(v.(*Node))
return true
})
for _, v := range c.graph.Nodes {
debugNode(v)
}
c.graph.TypeNodes.Iterate(func(key types.Type, value interface{}) {
debugNode(value.(*Node))
})
@ -655,10 +654,9 @@ func (c *Checker) results() []types.Object {
// don't flag its receiver. if a named type is unused, don't
// flag its methods.
c.graph.Nodes.Range(func(k, v interface{}) bool {
c.graph.quieten(v.(*Node))
return true
})
for _, v := range c.graph.Nodes {
c.graph.quieten(v)
}
c.graph.TypeNodes.Iterate(func(_ types.Type, value interface{}) {
c.graph.quieten(value.(*Node))
})
@ -688,10 +686,9 @@ func (c *Checker) results() []types.Object {
}
c.debugf("n%d [color=gray];\n", node.id)
}
c.graph.Nodes.Range(func(k, v interface{}) bool {
report(v.(*Node))
return true
})
for _, v := range c.graph.Nodes {
report(v)
}
c.graph.TypeNodes.Iterate(func(_ types.Type, value interface{}) {
report(value.(*Node))
})
@ -779,8 +776,6 @@ type Graph struct {
fset *token.FileSet
Root *Node
seenTypes typeutil.Map
Nodes sync.Map // map[interface{}]*Node
objNodes sync.Map // map[objNodeKey]*Node
// read-only
wholeProgram bool
@ -788,6 +783,8 @@ type Graph struct {
// need synchronisation
mu sync.Mutex
TypeNodes typeutil.Map
Nodes map[interface{}]*Node
objNodes map[objNodeKey]*Node
}
type context struct {
@ -796,13 +793,13 @@ type context struct {
seenFns map[string]struct{}
seenTypes *typeutil.Map
nodeCounter uint64
// local cache for the map in Graph
typeNodes typeutil.Map
}
func NewGraph() *Graph {
g := &Graph{}
g := &Graph{
Nodes: map[interface{}]*Node{},
objNodes: map[objNodeKey]*Node{},
}
g.Root = g.newNode(&context{}, nil)
return g
}
@ -844,49 +841,48 @@ type Node struct {
}
func (g *Graph) nodeMaybe(obj types.Object) (*Node, bool) {
if node, ok := g.Nodes.Load(obj); ok {
return node.(*Node), true
g.mu.Lock()
defer g.mu.Unlock()
if node, ok := g.Nodes[obj]; ok {
return node, true
}
return nil, false
}
func (g *Graph) node(ctx *context, obj interface{}) (node *Node, new bool) {
if t, ok := obj.(types.Type); ok {
if v := ctx.typeNodes.At(t); v != nil {
g.mu.Lock()
defer g.mu.Unlock()
switch obj := obj.(type) {
case types.Type:
if v := g.TypeNodes.At(obj); v != nil {
return v.(*Node), false
}
g.mu.Lock()
defer g.mu.Unlock()
if v := g.TypeNodes.At(t); v != nil {
return v.(*Node), false
}
node := g.newNode(ctx, t)
g.TypeNodes.Set(t, node)
ctx.typeNodes.Set(t, node)
node := g.newNode(ctx, obj)
g.TypeNodes.Set(obj, node)
return node, true
}
case types.Object:
if node, ok := g.Nodes[obj]; ok {
return node, false
}
if node, ok := g.Nodes.Load(obj); ok {
return node.(*Node), false
}
if obj, ok := obj.(types.Object); ok {
key := objNodeKeyFor(g.fset, obj)
if o, ok := g.objNodes.Load(key); ok {
onode := o.(*Node)
if onode, ok := g.objNodes[key]; ok {
return onode, false
}
node = g.newNode(ctx, obj)
g.Nodes.Store(obj, node)
g.objNodes.Store(key, node)
g.Nodes[obj] = node
g.objNodes[key] = node
return node, true
default:
if node, ok := g.Nodes[obj]; ok {
return node, false
}
node = g.newNode(ctx, obj)
g.Nodes[obj] = node
return node, true
}
node = g.newNode(ctx, obj)
g.Nodes.Store(obj, node)
return node, true
}
func (g *Graph) newNode(ctx *context, obj interface{}) *Node {
@ -1067,7 +1063,7 @@ func (g *Graph) entry(pkg *pkg) {
ctx.seenTypes = &typeutil.Map{}
}
scopes := map[*types.Scope]*ssa.Function{}
scopes := map[*types.Scope]*ir.Function{}
for _, fn := range pkg.SrcFuncs {
if fn.Object() != nil {
scope := fn.Object().(*types.Func).Scope()
@ -1087,12 +1083,12 @@ func (g *Graph) entry(pkg *pkg) {
// (1.8) packages use symbols linked via go:linkname
fields := strings.Fields(c.Text)
if len(fields) == 3 {
if m, ok := pkg.SSA.Members[fields[1]]; ok {
if m, ok := pkg.IR.Members[fields[1]]; ok {
var obj types.Object
switch m := m.(type) {
case *ssa.Global:
case *ir.Global:
obj = m.Object()
case *ssa.Function:
case *ir.Function:
obj = m.Object()
default:
panic(fmt.Sprintf("unhandled type: %T", m))
@ -1106,7 +1102,7 @@ func (g *Graph) entry(pkg *pkg) {
}
}
surroundingFunc := func(obj types.Object) *ssa.Function {
surroundingFunc := func(obj types.Object) *ir.Function {
scope := obj.Parent()
for scope != nil {
if fn := scopes[scope]; fn != nil {
@ -1117,10 +1113,10 @@ func (g *Graph) entry(pkg *pkg) {
return nil
}
// SSA form won't tell us about locally scoped types that aren't
// IR form won't tell us about locally scoped types that aren't
// being used. Walk the list of Defs to get all named types.
//
// SSA form also won't tell us about constants; use Defs and Uses
// IR form also won't tell us about constants; use Defs and Uses
// to determine which constants exist and which are being used.
for _, obj := range pkg.TypesInfo.Defs {
switch obj := obj.(type) {
@ -1143,7 +1139,7 @@ func (g *Graph) entry(pkg *pkg) {
if fn.Object() != nil {
ctx.see(fn.Object())
}
node := fn.Syntax()
node := fn.Source()
if node == nil {
continue
}
@ -1221,7 +1217,7 @@ func (g *Graph) entry(pkg *pkg) {
case *ast.GenDecl:
switch n.Tok {
case token.CONST:
groups := lintdsl.GroupSpecs(pkg.Fset, n.Specs)
groups := code.GroupSpecs(pkg.Fset, n.Specs)
for _, specs := range groups {
if len(specs) > 1 {
cg := &ConstGroup{}
@ -1293,11 +1289,11 @@ func (g *Graph) entry(pkg *pkg) {
})
}
for _, m := range pkg.SSA.Members {
for _, m := range pkg.IR.Members {
switch m := m.(type) {
case *ssa.NamedConst:
case *ir.NamedConst:
// nothing to do, we collect all constants from Defs
case *ssa.Global:
case *ir.Global:
if m.Object() != nil {
ctx.see(m.Object())
if g.trackExportedIdentifier(ctx, m.Object()) {
@ -1305,7 +1301,7 @@ func (g *Graph) entry(pkg *pkg) {
ctx.use(m.Object(), nil, edgeExportedVariable)
}
}
case *ssa.Function:
case *ir.Function:
mObj := owningObject(m)
if mObj != nil {
ctx.see(mObj)
@ -1331,8 +1327,8 @@ func (g *Graph) entry(pkg *pkg) {
// (9.8) runtime functions that may be called from user code via the compiler
ctx.use(mObj, nil, edgeRuntimeFunction)
}
if m.Syntax() != nil {
doc := m.Syntax().(*ast.FuncDecl).Doc
if m.Source() != nil {
doc := m.Source().(*ast.FuncDecl).Doc
if doc != nil {
for _, cmt := range doc.List {
if strings.HasPrefix(cmt.Text, "//go:cgo_export_") {
@ -1343,7 +1339,7 @@ func (g *Graph) entry(pkg *pkg) {
}
}
g.function(ctx, m)
case *ssa.Type:
case *ir.Type:
if m.Object() != nil {
ctx.see(m.Object())
if g.trackExportedIdentifier(ctx, m.Object()) {
@ -1383,7 +1379,7 @@ func (g *Graph) entry(pkg *pkg) {
// (8.0) handle interfaces
for _, t := range notIfaces {
ms := pkg.SSA.Prog.MethodSets.MethodSet(t)
ms := pkg.IR.Prog.MethodSets.MethodSet(t)
for _, iface := range ifaces {
if sels, ok := g.implements(t, iface, ms); ok {
for _, sel := range sels {
@ -1400,19 +1396,19 @@ func (g *Graph) useMethod(ctx *context, t types.Type, sel *types.Selection, by i
path := sel.Index()
assert(obj != nil)
if len(path) > 1 {
base := lintdsl.Dereference(t).Underlying().(*types.Struct)
base := code.Dereference(t).Underlying().(*types.Struct)
for _, idx := range path[:len(path)-1] {
next := base.Field(idx)
// (6.3) structs use embedded fields that help implement interfaces
ctx.see(base)
ctx.seeAndUse(next, base, edgeProvidesMethod)
base, _ = lintdsl.Dereference(next.Type()).Underlying().(*types.Struct)
base, _ = code.Dereference(next.Type()).Underlying().(*types.Struct)
}
}
ctx.seeAndUse(obj, by, kind)
}
func owningObject(fn *ssa.Function) types.Object {
func owningObject(fn *ir.Function) types.Object {
if fn.Object() != nil {
return fn.Object()
}
@ -1422,8 +1418,8 @@ func owningObject(fn *ssa.Function) types.Object {
return nil
}
func (g *Graph) function(ctx *context, fn *ssa.Function) {
if fn.Package() != nil && fn.Package() != ctx.pkg.SSA {
func (g *Graph) function(ctx *context, fn *ir.Function) {
if fn.Package() != nil && fn.Package() != ctx.pkg.IR {
return
}
@ -1503,7 +1499,7 @@ func (g *Graph) typ(ctx *context, t types.Type, parent types.Type) {
// the pointer type to get the full method set
T = types.NewPointer(T)
}
ms := ctx.pkg.SSA.Prog.MethodSets.MethodSet(T)
ms := ctx.pkg.IR.Prog.MethodSets.MethodSet(T)
for j := 0; j < ms.Len(); j++ {
if ms.At(j).Obj().Exported() {
// (6.4) structs use embedded fields that have exported methods (recursively)
@ -1516,7 +1512,7 @@ func (g *Graph) typ(ctx *context, t types.Type, parent types.Type) {
seen := map[*types.Struct]struct{}{}
var hasExportedField func(t types.Type) bool
hasExportedField = func(T types.Type) bool {
t, ok := lintdsl.Dereference(T).Underlying().(*types.Struct)
t, ok := code.Dereference(T).Underlying().(*types.Struct)
if !ok {
return false
}
@ -1565,7 +1561,7 @@ func (g *Graph) typ(ctx *context, t types.Type, parent types.Type) {
// (2.1) named types use exported methods
ctx.use(t.Method(i), t, edgeExportedMethod)
}
g.function(ctx, ctx.pkg.SSA.Prog.FuncValue(t.Method(i)))
g.function(ctx, ctx.pkg.IR.Prog.FuncValue(t.Method(i)))
}
g.typ(ctx, t.Underlying(), t)
@ -1646,22 +1642,22 @@ func (g *Graph) signature(ctx *context, sig *types.Signature, fn types.Object) {
}
}
func (g *Graph) instructions(ctx *context, fn *ssa.Function) {
func (g *Graph) instructions(ctx *context, fn *ir.Function) {
fnObj := owningObject(fn)
for _, b := range fn.Blocks {
for _, instr := range b.Instrs {
ops := instr.Operands(nil)
switch instr.(type) {
case *ssa.Store:
case *ir.Store:
// (9.7) variable _reads_ use variables, writes do not
ops = ops[1:]
case *ssa.DebugRef:
case *ir.DebugRef:
ops = nil
}
for _, arg := range ops {
walkPhi(*arg, func(v ssa.Value) {
walkPhi(*arg, func(v ir.Value) {
switch v := v.(type) {
case *ssa.Function:
case *ir.Function:
// (4.3) functions use closures and bound methods.
// (4.5) functions use functions they call
// (9.5) instructions use their operands
@ -1670,11 +1666,11 @@ func (g *Graph) instructions(ctx *context, fn *ssa.Function) {
ctx.seeAndUse(owningObject(v), fnObj, edgeInstructionOperand)
}
g.function(ctx, v)
case *ssa.Const:
case *ir.Const:
// (9.6) instructions use their operands' types
ctx.seeAndUse(v.Type(), fnObj, edgeType)
g.typ(ctx, v.Type(), nil)
case *ssa.Global:
case *ir.Global:
if v.Object() != nil {
// (9.5) instructions use their operands
ctx.seeAndUse(v.Object(), fnObj, edgeInstructionOperand)
@ -1682,8 +1678,8 @@ func (g *Graph) instructions(ctx *context, fn *ssa.Function) {
}
})
}
if v, ok := instr.(ssa.Value); ok {
if _, ok := v.(*ssa.Range); !ok {
if v, ok := instr.(ir.Value); ok {
if _, ok := v.(*ir.Range); !ok {
// See https://github.com/golang/go/issues/19670
// (4.8) instructions use their types
@ -1693,29 +1689,29 @@ func (g *Graph) instructions(ctx *context, fn *ssa.Function) {
}
}
switch instr := instr.(type) {
case *ssa.Field:
case *ir.Field:
st := instr.X.Type().Underlying().(*types.Struct)
field := st.Field(instr.Field)
// (4.7) functions use fields they access
ctx.seeAndUse(field, fnObj, edgeFieldAccess)
case *ssa.FieldAddr:
st := lintdsl.Dereference(instr.X.Type()).Underlying().(*types.Struct)
case *ir.FieldAddr:
st := code.Dereference(instr.X.Type()).Underlying().(*types.Struct)
field := st.Field(instr.Field)
// (4.7) functions use fields they access
ctx.seeAndUse(field, fnObj, edgeFieldAccess)
case *ssa.Store:
case *ir.Store:
// nothing to do, handled generically by operands
case *ssa.Call:
case *ir.Call:
c := instr.Common()
if !c.IsInvoke() {
// handled generically as an instruction operand
if g.wholeProgram {
// (e3) special case known reflection-based method callers
switch lintdsl.CallName(c) {
switch code.CallName(c) {
case "net/rpc.Register", "net/rpc.RegisterName", "(*net/rpc.Server).Register", "(*net/rpc.Server).RegisterName":
var arg ssa.Value
switch lintdsl.CallName(c) {
var arg ir.Value
switch code.CallName(c) {
case "net/rpc.Register":
arg = c.Args[0]
case "net/rpc.RegisterName":
@ -1725,10 +1721,10 @@ func (g *Graph) instructions(ctx *context, fn *ssa.Function) {
case "(*net/rpc.Server).RegisterName":
arg = c.Args[2]
}
walkPhi(arg, func(v ssa.Value) {
if v, ok := v.(*ssa.MakeInterface); ok {
walkPhi(v.X, func(vv ssa.Value) {
ms := ctx.pkg.SSA.Prog.MethodSets.MethodSet(vv.Type())
walkPhi(arg, func(v ir.Value) {
if v, ok := v.(*ir.MakeInterface); ok {
walkPhi(v.X, func(vv ir.Value) {
ms := ctx.pkg.IR.Prog.MethodSets.MethodSet(vv.Type())
for i := 0; i < ms.Len(); i++ {
if ms.At(i).Obj().Exported() {
g.useMethod(ctx, vv.Type(), ms.At(i), fnObj, edgeNetRPCRegister)
@ -1743,13 +1739,13 @@ func (g *Graph) instructions(ctx *context, fn *ssa.Function) {
// (4.5) functions use functions/interface methods they call
ctx.seeAndUse(c.Method, fnObj, edgeInterfaceCall)
}
case *ssa.Return:
case *ir.Return:
// nothing to do, handled generically by operands
case *ssa.ChangeType:
case *ir.ChangeType:
// conversion type handled generically
s1, ok1 := lintdsl.Dereference(instr.Type()).Underlying().(*types.Struct)
s2, ok2 := lintdsl.Dereference(instr.X.Type()).Underlying().(*types.Struct)
s1, ok1 := code.Dereference(instr.Type()).Underlying().(*types.Struct)
s2, ok2 := code.Dereference(instr.X.Type()).Underlying().(*types.Struct)
if ok1 && ok2 {
// Converting between two structs. The fields are
// relevant for the conversion, but only if the
@ -1768,13 +1764,13 @@ func (g *Graph) instructions(ctx *context, fn *ssa.Function) {
ctx.seeAndUse(s2.Field(i), s1.Field(i), edgeStructConversion)
}
}
case *ssa.MakeInterface:
case *ir.MakeInterface:
// nothing to do, handled generically by operands
case *ssa.Slice:
case *ir.Slice:
// nothing to do, handled generically by operands
case *ssa.RunDefers:
case *ir.RunDefers:
// nothing to do, the deferred functions are already marked use by defering them.
case *ssa.Convert:
case *ir.Convert:
// to unsafe.Pointer
if typ, ok := instr.Type().(*types.Basic); ok && typ.Kind() == types.UnsafePointer {
if ptr, ok := instr.X.Type().Underlying().(*types.Pointer); ok {
@ -1797,61 +1793,79 @@ func (g *Graph) instructions(ctx *context, fn *ssa.Function) {
}
}
}
case *ssa.TypeAssert:
case *ir.TypeAssert:
// nothing to do, handled generically by instruction
// type (possibly a tuple, which contains the asserted
// to type). redundantly handled by the type of
// ssa.Extract, too
case *ssa.MakeClosure:
// ir.Extract, too
case *ir.MakeClosure:
// nothing to do, handled generically by operands
case *ssa.Alloc:
case *ir.Alloc:
// nothing to do
case *ssa.UnOp:
case *ir.UnOp:
// nothing to do
case *ssa.BinOp:
case *ir.BinOp:
// nothing to do
case *ssa.If:
case *ir.If:
// nothing to do
case *ssa.Jump:
case *ir.Jump:
// nothing to do
case *ssa.IndexAddr:
case *ir.Unreachable:
// nothing to do
case *ssa.Extract:
case *ir.IndexAddr:
// nothing to do
case *ssa.Panic:
case *ir.Extract:
// nothing to do
case *ssa.DebugRef:
case *ir.Panic:
// nothing to do
case *ssa.BlankStore:
case *ir.DebugRef:
// nothing to do
case *ssa.Phi:
case *ir.BlankStore:
// nothing to do
case *ssa.MakeMap:
case *ir.Phi:
// nothing to do
case *ssa.MapUpdate:
case *ir.Sigma:
// nothing to do
case *ssa.Lookup:
case *ir.MakeMap:
// nothing to do
case *ssa.MakeSlice:
case *ir.MapUpdate:
// nothing to do
case *ssa.Send:
case *ir.MapLookup:
// nothing to do
case *ssa.MakeChan:
case *ir.StringLookup:
// nothing to do
case *ssa.Range:
case *ir.MakeSlice:
// nothing to do
case *ssa.Next:
case *ir.Send:
// nothing to do
case *ssa.Index:
case *ir.MakeChan:
// nothing to do
case *ssa.Select:
case *ir.Range:
// nothing to do
case *ssa.ChangeInterface:
case *ir.Next:
// nothing to do
case *ir.Index:
// nothing to do
case *ir.Select:
// nothing to do
case *ir.ChangeInterface:
// nothing to do
case *ir.Load:
// nothing to do
case *ir.Go:
// nothing to do
case *ir.Defer:
// nothing to do
case *ir.Parameter:
// nothing to do
case *ir.Const:
// nothing to do
case *ir.Recv:
// nothing to do
case *ir.TypeSwitch:
// nothing to do
case *ir.ConstantSwitch:
// nothing to do
case *ssa.Go:
// nothing to do, handled generically by operands
case *ssa.Defer:
// nothing to do, handled generically by operands
default:
panic(fmt.Sprintf("unreachable: %T", instr))
}
@ -1892,22 +1906,22 @@ func isNoCopyType(typ types.Type) bool {
return true
}
func walkPhi(v ssa.Value, fn func(v ssa.Value)) {
phi, ok := v.(*ssa.Phi)
func walkPhi(v ir.Value, fn func(v ir.Value)) {
phi, ok := v.(*ir.Phi)
if !ok {
fn(v)
return
}
seen := map[ssa.Value]struct{}{}
var impl func(v *ssa.Phi)
impl = func(v *ssa.Phi) {
seen := map[ir.Value]struct{}{}
var impl func(v *ir.Phi)
impl = func(v *ir.Phi) {
if _, ok := seen[v]; ok {
return
}
seen[v] = struct{}{}
for _, e := range v.Edges {
if ev, ok := e.(*ssa.Phi); ok {
if ev, ok := e.(*ir.Phi); ok {
impl(ev)
} else {
fn(e)

View File

@ -7,7 +7,7 @@ import (
"runtime"
)
const Version = "2019.2.3"
const Version = "2020.1.4"
// version returns a version descriptor and reports whether the
// version is a known release.

14
vendor/modules.txt vendored
View File

@ -379,29 +379,33 @@ gopkg.in/ini.v1
gopkg.in/yaml.v2
# gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c
gopkg.in/yaml.v3
# honnef.co/go/tools v0.0.1-2019.2.3
# honnef.co/go/tools v0.0.1-2020.1.4
honnef.co/go/tools/arg
honnef.co/go/tools/cmd/staticcheck
honnef.co/go/tools/code
honnef.co/go/tools/config
honnef.co/go/tools/deprecated
honnef.co/go/tools/edit
honnef.co/go/tools/facts
honnef.co/go/tools/functions
honnef.co/go/tools/go/types/typeutil
honnef.co/go/tools/internal/cache
honnef.co/go/tools/internal/passes/buildssa
honnef.co/go/tools/internal/passes/buildir
honnef.co/go/tools/internal/renameio
honnef.co/go/tools/internal/robustio
honnef.co/go/tools/internal/sharedcheck
honnef.co/go/tools/ir
honnef.co/go/tools/ir/irutil
honnef.co/go/tools/lint
honnef.co/go/tools/lint/lintdsl
honnef.co/go/tools/lint/lintutil
honnef.co/go/tools/lint/lintutil/format
honnef.co/go/tools/loader
honnef.co/go/tools/pattern
honnef.co/go/tools/printf
honnef.co/go/tools/report
honnef.co/go/tools/simple
honnef.co/go/tools/ssa
honnef.co/go/tools/ssautil
honnef.co/go/tools/staticcheck
honnef.co/go/tools/staticcheck/vrp
honnef.co/go/tools/stylecheck
honnef.co/go/tools/unused
honnef.co/go/tools/version