Added tools to go mod
the build was successful Details

This commit is contained in:
konrad 2018-11-29 01:18:32 +01:00
parent d25a7119d6
commit 3ff775b4aa
Signed by: konrad
GPG Key ID: F40E70337AB24C9B
68 changed files with 43915 additions and 3 deletions

View File

@ -13,12 +13,9 @@ pipeline:
image: vikunja/golang-build:latest
pull: true
group: build
environment:
TAGS: bindata sqlite
commands:
- make lint
- make fmt-check
# - make got-swag # Commented out until we figured out how to get this working on drone
- make ineffassign-check
- make misspell-check
- make build

9
go.mod
View File

@ -1 +1,10 @@
module git.kolaente.de/sofaraum/client
require (
github.com/client9/misspell v0.3.4
github.com/fzipp/gocyclo v0.0.0-20150627053110-6acd4345c835
github.com/gordonklaus/ineffassign v0.0.0-20180909121442-1003c8bd00dc
github.com/karalabe/xgo v0.0.0-20181007145344-72da7d1d3970
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3
golang.org/x/tools v0.0.0-20181128225727-c5b00d9557fd // indirect
)

12
go.sum Normal file
View File

@ -0,0 +1,12 @@
github.com/client9/misspell v0.3.4 h1:ta993UF76GwbvJcIo3Y68y/M3WxlpEHPWIGDkJYwzJI=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/fzipp/gocyclo v0.0.0-20150627053110-6acd4345c835 h1:roDmqJ4Qes7hrDOsWsMCce0vQHz3xiMPjJ9m4c2eeNs=
github.com/fzipp/gocyclo v0.0.0-20150627053110-6acd4345c835/go.mod h1:BjL/N0+C+j9uNX+1xcNuM9vdSIcXCZrQZUYbXOFbgN8=
github.com/gordonklaus/ineffassign v0.0.0-20180909121442-1003c8bd00dc h1:cJlkeAx1QYgO5N80aF5xRGstVsRQwgLR7uA2FnP1ZjY=
github.com/gordonklaus/ineffassign v0.0.0-20180909121442-1003c8bd00dc/go.mod h1:cuNKsD1zp2v6XfE/orVX2QE1LC+i254ceGcVeDT3pTU=
github.com/karalabe/xgo v0.0.0-20181007145344-72da7d1d3970 h1:0+1ZURVRim6FxA/jhWhJklsgoWc69q1sxlIu2Ztnhy0=
github.com/karalabe/xgo v0.0.0-20181007145344-72da7d1d3970/go.mod h1:iYGcTYIPUvEWhFo6aKUuLchs+AV4ssYdyuBbQJZGcBk=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3 h1:x/bBzNauLQAlE3fLku/xy92Y8QwKX5HZymrMz2IiKFc=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/tools v0.0.0-20181128225727-c5b00d9557fd h1:iGtqU5YFAjHfcmM6XLsTOpIm8HrEmDpt+XiZaxrJhyE=
golang.org/x/tools v0.0.0-20181128225727-c5b00d9557fd/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=

47
tools.go Normal file
View File

@ -0,0 +1,47 @@
// Sofaraum client is the client software which collects statistics about
// wifi devices nearby and then sends them to the Sofaraum Server.
// Copyright (c) 2018.
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
// Sofaraum client is the client software which collects statistics about
// wifi devices nearby and then sends them to the Sofaraum Server.
// Copyright (c) 2018.
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
// +build tools
package tools
// This file is needed for go mod to recognize the tools we use.
import (
_ "github.com/client9/misspell/cmd/misspell"
_ "github.com/fzipp/gocyclo"
_ "github.com/gordonklaus/ineffassign"
_ "github.com/karalabe/xgo"
_ "golang.org/x/lint/golint"
)

34
vendor/github.com/client9/misspell/.gitignore generated vendored Normal file
View File

@ -0,0 +1,34 @@
dist/
bin/
vendor/
# editor turds
*~
*.gz
*.bz2
*.csv
# Compiled Object files, Static and Dynamic libs (Shared Objects)
*.o
*.a
*.so
# Folders
_obj
_test
# Architecture specific extensions/prefixes
*.[568vq]
[568vq].out
*.cgo1.go
*.cgo2.c
_cgo_defun.c
_cgo_gotypes.go
_cgo_export.*
_testmain.go
*.exe
*.test
*.prof

20
vendor/github.com/client9/misspell/.travis.yml generated vendored Normal file
View File

@ -0,0 +1,20 @@
sudo: required
dist: trusty
group: edge
language: go
go:
- "1.10"
git:
depth: 1
script:
- ./scripts/travis.sh
# calls goreleaser when a new tag is pushed
deploy:
- provider: script
skip_cleanup: true
script: curl -sL http://git.io/goreleaser | bash
on:
tags: true
condition: $TRAVIS_OS_NAME = linux

37
vendor/github.com/client9/misspell/Dockerfile generated vendored Normal file
View File

@ -0,0 +1,37 @@
FROM golang:1.10.0-alpine
# cache buster
RUN echo 4
# git is needed for "go get" below
RUN apk add --no-cache git make
# these are my standard testing / linting tools
RUN /bin/true \
&& go get -u github.com/golang/dep/cmd/dep \
&& go get -u github.com/alecthomas/gometalinter \
&& gometalinter --install \
&& rm -rf /go/src /go/pkg
#
# * SCOWL word list
#
# Downloads
# http://wordlist.aspell.net/dicts/
# --> http://app.aspell.net/create
#
# use en_US large size
# use regular size for others
ENV SOURCE_US_BIG http://app.aspell.net/create?max_size=70&spelling=US&max_variant=2&diacritic=both&special=hacker&special=roman-numerals&download=wordlist&encoding=utf-8&format=inline
# should be able tell difference between English variations using this
ENV SOURCE_US http://app.aspell.net/create?max_size=60&spelling=US&max_variant=1&diacritic=both&download=wordlist&encoding=utf-8&format=inline
ENV SOURCE_GB_ISE http://app.aspell.net/create?max_size=60&spelling=GBs&max_variant=2&diacritic=both&download=wordlist&encoding=utf-8&format=inline
ENV SOURCE_GB_IZE http://app.aspell.net/create?max_size=60&spelling=GBz&max_variant=2&diacritic=both&download=wordlist&encoding=utf-8&format=inline
ENV SOURCE_CA http://app.aspell.net/create?max_size=60&spelling=CA&max_variant=2&diacritic=both&download=wordlist&encoding=utf-8&format=inline
RUN /bin/true \
&& mkdir /scowl-wl \
&& wget -O /scowl-wl/words-US-60.txt ${SOURCE_US} \
&& wget -O /scowl-wl/words-GB-ise-60.txt ${SOURCE_GB_ISE}

24
vendor/github.com/client9/misspell/Gopkg.lock generated vendored Normal file
View File

@ -0,0 +1,24 @@
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
[[projects]]
name = "github.com/gobwas/glob"
packages = [
".",
"compiler",
"match",
"syntax",
"syntax/ast",
"syntax/lexer",
"util/runes",
"util/strings"
]
revision = "5ccd90ef52e1e632236f7326478d4faa74f99438"
version = "v0.2.3"
[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
inputs-digest = "087ea4c49358ea8258ad9edfe514cd5ce9975c889c258e5ec7b5d2b720aae113"
solver-name = "gps-cdcl"
solver-version = 1

34
vendor/github.com/client9/misspell/Gopkg.toml generated vendored Normal file
View File

@ -0,0 +1,34 @@
# Gopkg.toml example
#
# Refer to https://golang.github.io/dep/docs/Gopkg.toml.html
# for detailed Gopkg.toml documentation.
#
# required = ["github.com/user/thing/cmd/thing"]
# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"]
#
# [[constraint]]
# name = "github.com/user/project"
# version = "1.0.0"
#
# [[constraint]]
# name = "github.com/user/project2"
# branch = "dev"
# source = "github.com/myfork/project2"
#
# [[override]]
# name = "github.com/x/y"
# version = "2.4.0"
#
# [prune]
# non-go = false
# go-tests = true
# unused-packages = true
[[constraint]]
name = "github.com/gobwas/glob"
version = "0.2.3"
[prune]
go-tests = true
unused-packages = true

22
vendor/github.com/client9/misspell/LICENSE generated vendored Normal file
View File

@ -0,0 +1,22 @@
The MIT License (MIT)
Copyright (c) 2015-2017 Nick Galbreath
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

74
vendor/github.com/client9/misspell/Makefile generated vendored Normal file
View File

@ -0,0 +1,74 @@
CONTAINER=nickg/misspell
install: ## install misspell into GOPATH/bin
go install ./cmd/misspell
build: hooks ## build and lint misspell
./scripts/build.sh
test: ## run all tests
go test .
# real publishing is done only by travis
publish: ## test goreleaser
./scripts/goreleaser-dryrun.sh
# the grep in line 2 is to remove misspellings in the spelling dictionary
# that trigger false positives!!
falsepositives: /scowl-wl
cat /scowl-wl/words-US-60.txt | \
grep -i -v -E "payed|Tyre|Euclidian|nonoccurence|dependancy|reenforced|accidently|surprize|dependance|idealogy|binominal|causalities|conquerer|withing|casette|analyse|analogue|dialogue|paralyse|catalogue|archaeolog|clarinettist|catalyses|cancell|chisell|ageing|cataloguing" | \
misspell -debug -error
cat /scowl-wl/words-GB-ise-60.txt | \
grep -v -E "payed|nonoccurence|withing" | \
misspell -locale=UK -debug -error
# cat /scowl-wl/words-GB-ize-60.txt | \
# grep -v -E "withing" | \
# misspell -debug -error
# cat /scowl-wl/words-CA-60.txt | \
# grep -v -E "withing" | \
# misspell -debug -error
bench: ## run benchmarks
go test -bench '.*'
clean: ## clean up time
rm -rf dist/ bin/
go clean ./...
git gc --aggressive
ci: ## run test like travis-ci does, requires docker
docker run --rm \
-v $(PWD):/go/src/github.com/client9/misspell \
-w /go/src/github.com/client9/misspell \
${CONTAINER} \
make build falsepositives
docker-build: ## build a docker test image
docker build -t ${CONTAINER} .
docker-pull: ## pull latest test image
docker pull ${CONTAINER}
docker-console: ## log into the test image
docker run --rm -it \
-v $(PWD):/go/src/github.com/client9/misspell \
-w /go/src/github.com/client9/misspell \
${CONTAINER} sh
.git/hooks/pre-commit: scripts/pre-commit.sh
cp -f scripts/pre-commit.sh .git/hooks/pre-commit
.git/hooks/commit-msg: scripts/commit-msg.sh
cp -f scripts/commit-msg.sh .git/hooks/commit-msg
hooks: .git/hooks/pre-commit .git/hooks/commit-msg ## install git precommit hooks
.PHONY: help ci console docker-build bench
# https://www.client9.com/self-documenting-makefiles/
help:
@awk -F ':|##' '/^[^\t].+?:.*?##/ {\
printf "\033[36m%-30s\033[0m %s\n", $$1, $$NF \
}' $(MAKEFILE_LIST)
.DEFAULT_GOAL=help
.PHONY=help

424
vendor/github.com/client9/misspell/README.md generated vendored Normal file
View File

@ -0,0 +1,424 @@
[![Build Status](https://travis-ci.org/client9/misspell.svg?branch=master)](https://travis-ci.org/client9/misspell) [![Go Report Card](https://goreportcard.com/badge/github.com/client9/misspell)](https://goreportcard.com/report/github.com/client9/misspell) [![GoDoc](https://godoc.org/github.com/client9/misspell?status.svg)](https://godoc.org/github.com/client9/misspell) [![Coverage](http://gocover.io/_badge/github.com/client9/misspell)](http://gocover.io/github.com/client9/misspell) [![license](https://img.shields.io/badge/license-MIT-blue.svg?style=flat)](https://raw.githubusercontent.com/client9/misspell/master/LICENSE)
Correct commonly misspelled English words... quickly.
### Install
If you just want a binary and to start using `misspell`:
```
curl -L -o ./install-misspell.sh https://git.io/misspell
sh ./install-misspell.sh
```
Both will install as `./bin/misspell`. You can adjust the download location using the `-b` flag. File a ticket if you want another platform supported.
If you use [Go](https://golang.org/), the best way to run `misspell` is by using [gometalinter](#gometalinter). Otherwise, install `misspell` the old-fashioned way:
```
go get -u github.com/client9/misspell/cmd/misspell
```
and misspell will be in your `GOPATH`
Also if you like to live dangerously, one could do
```bash
curl -L https://git.io/misspell | bash
```
### Usage
```bash
$ misspell all.html your.txt important.md files.go
your.txt:42:10 found "langauge" a misspelling of "language"
# ^ file, line, column
```
```
$ misspell -help
Usage of misspell:
-debug
Debug matching, very slow
-error
Exit with 2 if misspelling found
-f string
'csv', 'sqlite3' or custom Golang template for output
-i string
ignore the following corrections, comma separated
-j int
Number of workers, 0 = number of CPUs
-legal
Show legal information and exit
-locale string
Correct spellings using locale perferances for US or UK. Default is to use a neutral variety of English. Setting locale to US will correct the British spelling of 'colour' to 'color'
-o string
output file or [stderr|stdout|] (default "stdout")
-q Do not emit misspelling output
-source string
Source mode: auto=guess, go=golang source, text=plain or markdown-like text (default "auto")
-w Overwrite file with corrections (default is just to display)
```
## FAQ
* [Automatic Corrections](#correct)
* [Converting UK spellings to US](#locale)
* [Using pipes and stdin](#stdin)
* [Golang special support](#golang)
* [gometalinter support](#gometalinter)
* [CSV Output](#csv)
* [Using SQLite3](#sqlite)
* [Changing output format](#output)
* [Checking a folder recursively](#recursive)
* [Performance](#performance)
* [Known Issues](#issues)
* [Debugging](#debug)
* [False Negatives and missing words](#missing)
* [Origin of Word Lists](#words)
* [Software License](#license)
* [Problem statement](#problem)
* [Other spelling correctors](#others)
* [Other ideas](#otherideas)
<a name="correct"></a>
### How can I make the corrections automatically?
Just add the `-w` flag!
```
$ misspell -w all.html your.txt important.md files.go
your.txt:9:21:corrected "langauge" to "language"
# ^ File is rewritten only if a misspelling is found
```
<a name="locale"></a>
### How do I convert British spellings to American (or vice-versa)?
Add the `-locale US` flag!
```bash
$ misspell -locale US important.txt
important.txt:10:20 found "colour" a misspelling of "color"
```
Add the `-locale UK` flag!
```bash
$ echo "My favorite color is blue" | misspell -locale UK
stdin:1:3:found "favorite color" a misspelling of "favourite colour"
```
Help is appreciated as I'm neither British nor an
expert in the English language.
<a name="recursive"></a>
### How do you check an entire folder recursively?
Just list a directory you'd like to check
```bash
misspell .
misspell aDirectory anotherDirectory aFile
```
You can also run misspell recursively using the following shell tricks:
```bash
misspell directory/**/*
```
or
```bash
find . -type f | xargs misspell
```
You can select a type of file as well. The following examples selects all `.txt` files that are *not* in the `vendor` directory:
```bash
find . -type f -name '*.txt' | grep -v vendor/ | xargs misspell -error
```
<a name="stdin"></a>
### Can I use pipes or `stdin` for input?
Yes!
Print messages to `stderr` only:
```bash
$ echo "zeebra" | misspell
stdin:1:0:found "zeebra" a misspelling of "zebra"
```
Print messages to `stderr`, and corrected text to `stdout`:
```bash
$ echo "zeebra" | misspell -w
stdin:1:0:corrected "zeebra" to "zebra"
zebra
```
Only print the corrected text to `stdout`:
```bash
$ echo "zeebra" | misspell -w -q
zebra
```
<a name="golang"></a>
### Are there special rules for golang source files?
Yes! If the file ends in `.go`, then misspell will only check spelling in
comments.
If you want to force a file to be checked as a golang source, use `-source=go`
on the command line. Conversely, you can check a golang source as if it were
pure text by using `-source=text`. You might want to do this since many
variable names have misspellings in them!
### Can I check only-comments in other other programming languages?
I'm told the using `-source=go` works well for ruby, javascript, java, c and
c++.
It doesn't work well for python and bash.
<a name="gometalinter"></a>
### Does this work with gometalinter?
[gometalinter](https://github.com/alecthomas/gometalinter) runs
multiple golang linters. Starting on [2016-06-12](https://github.com/alecthomas/gometalinter/pull/134)
gometalinter supports `misspell` natively but it is disabled by default.
```bash
# update your copy of gometalinter
go get -u github.com/alecthomas/gometalinter
# install updates and misspell
gometalinter --install --update
```
To use, just enable `misspell`
```
gometalinter --enable misspell ./...
```
Note that gometalinter only checks golang files, and uses the default options
of `misspell`
You may wish to run this on your plaintext (.txt) and/or markdown files too.
<a name="csv"></a>
### How Can I Get CSV Output?
Using `-f csv`, the output is standard comma-seprated values with headers in the first row.
```
misspell -f csv *
file,line,column,typo,corrected
"README.md",9,22,langauge,language
"README.md",47,25,langauge,language
```
<a name="sqlite"></a>
### How can I export to SQLite3?
Using `-f sqlite`, the output is a [sqlite3](https://www.sqlite.org/index.html) dump-file.
```bash
$ misspell -f sqlite * > /tmp/misspell.sql
$ cat /tmp/misspell.sql
PRAGMA foreign_keys=OFF;
BEGIN TRANSACTION;
CREATE TABLE misspell(
"file" TEXT,
"line" INTEGER,i
"column" INTEGER,i
"typo" TEXT,
"corrected" TEXT
);
INSERT INTO misspell VALUES("install.txt",202,31,"immediatly","immediately");
# etc...
COMMIT;
```
```bash
$ sqlite3 -init /tmp/misspell.sql :memory: 'select count(*) from misspell'
1
```
With some tricks you can directly pipe output to sqlite3 by using `-init /dev/stdin`:
```
misspell -f sqlite * | sqlite3 -init /dev/stdin -column -cmd '.width 60 15' ':memory' \
'select substr(file,35),typo,count(*) as count from misspell group by file, typo order by count desc;'
```
<a name="ignore"></a>
### How can I ignore rules?
Using the `-i "comma,separated,rules"` flag you can specify corrections to ignore.
For example, if you were to run `misspell -w -error -source=text` against document that contains the string `Guy Finkelshteyn Braswell`, misspell would change the text to `Guy Finkelstheyn Bras well`. You can then
determine the rules to ignore by reverting the change and running the with the `-debug` flag. You can then see
that the corrections were `htey -> they` and `aswell -> as well`. To ignore these two rules, you add `-i "htey,aswell"` to
your command. With debug mode on, you can see it print the corrections, but it will no longer make them.
<a name="output"></a>
### How can I change the output format?
Using the `-f template` flag you can pass in a
[golang text template](https://golang.org/pkg/text/template/) to format the output.
One can use `printf "%q" VALUE` to safely quote a value.
The default template is compatible with [gometalinter](https://github.com/alecthomas/gometalinter)
```
{{ .Filename }}:{{ .Line }}:{{ .Column }}:corrected {{ printf "%q" .Original }} to "{{ printf "%q" .Corrected }}"
```
To just print probable misspellings:
```
-f '{{ .Original }}'
```
<a name="problem"></a>
### What problem does this solve?
This corrects commonly misspelled English words in computer source
code, and other text-based formats (`.txt`, `.md`, etc).
It is designed to run quickly so it can be
used as a [pre-commit hook](https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks)
with minimal burden on the developer.
It does not work with binary formats (e.g. Word, etc).
It is not a complete spell-checking program nor a grammar checker.
<a name="others"></a>
### What are other misspelling correctors and what's wrong with them?
Some other misspelling correctors:
* https://github.com/vlajos/misspell_fixer
* https://github.com/lyda/misspell-check
* https://github.com/lucasdemarchi/codespell
They all work but had problems that prevented me from using them at scale:
* slow, all of the above check one misspelling at a time (i.e. linear) using regexps
* not MIT/Apache2 licensed (or equivalent)
* have dependencies that don't work for me (python3, bash, linux sed, etc)
* don't understand American vs. British English and sometimes makes unwelcome "corrections"
That said, they might be perfect for you and many have more features
than this project!
<a name="performance"></a>
### How fast is it?
Misspell is easily 100x to 1000x faster than other spelling correctors. You
should be able to check and correct 1000 files in under 250ms.
This uses the mighty power of golang's
[strings.Replacer](https://golang.org/pkg/strings/#Replacer) which is
a implementation or variation of the
[AhoCorasick algorithm](https://en.wikipedia.org/wiki/AhoCorasick_algorithm).
This makes multiple substring matches *simultaneously*.
In addition this uses multiple CPU cores to work on multiple files.
<a name="issues"></a>
### What problems does it have?
Unlike the other projects, this doesn't know what a "word" is. There may be
more false positives and false negatives due to this. On the other hand, it
sometimes catches things others don't.
Either way, please file bugs and we'll fix them!
Since it operates in parallel to make corrections, it can be non-obvious to
determine exactly what word was corrected.
<a name="debug"></a>
### It's making mistakes. How can I debug?
Run using `-debug` flag on the file you want. It should then print what word
it is trying to correct. Then [file a
bug](https://github.com/client9/misspell/issues) describing the problem.
Thanks!
<a name="missing"></a>
### Why is it making mistakes or missing items in golang files?
The matching function is *case-sensitive*, so variable names that are multiple
worlds either in all-upper or all-lower case sometimes can cause false
positives. For instance a variable named `bodyreader` could trigger a false
positive since `yrea` is in the middle that could be corrected to `year`.
Other problems happen if the variable name uses a English contraction that
should use an apostrophe. The best way of fixing this is to use the
[Effective Go naming
conventions](https://golang.org/doc/effective_go.html#mixed-caps) and use
[camelCase](https://en.wikipedia.org/wiki/CamelCase) for variable names. You
can check your code using [golint](https://github.com/golang/lint)
<a name="license"></a>
### What license is this?
The main code is [MIT](https://github.com/client9/misspell/blob/master/LICENSE).
Misspell also makes uses of the Golang standard library and contains a modified version of Golang's [strings.Replacer](https://golang.org/pkg/strings/#Replacer)
which are covered under a [BSD License](https://github.com/golang/go/blob/master/LICENSE). Type `misspell -legal` for more details or see [legal.go](https://github.com/client9/misspell/blob/master/legal.go)
<a name="words"></a>
### Where do the word lists come from?
It started with a word list from
[Wikipedia](https://en.wikipedia.org/wiki/Wikipedia:Lists_of_common_misspellings/For_machines).
Unfortunately, this list had to be highly edited as many of the words are
obsolete or based from mistakes on mechanical typewriters (I'm guessing).
Additional words were added based on actually mistakes seen in
the wild (meaning self-generated).
Variations of UK and US spellings are based on many sources including:
* http://www.tysto.com/uk-us-spelling-list.html (with heavy editing, many are incorrect)
* http://www.oxforddictionaries.com/us/words/american-and-british-spelling-american (excellent site but incomplete)
* Diffing US and UK [scowl dictionaries](http://wordlist.aspell.net)
American English is more accepting of spelling variations than is British
English, so "what is American or not" is subject to opinion. Corrections and help welcome.
<a name="otherideas"></a>
### What are some other enhancements that could be done?
Here's some ideas for enhancements:
*Capitalization of proper nouns* could be done (e.g. weekday and month names, country names, language names)
*Opinionated US spellings* US English has a number of words with alternate
spellings. Think [adviser vs.
advisor](http://grammarist.com/spelling/adviser-advisor/). While "advisor" is not wrong, the opinionated US
locale would correct "advisor" to "adviser".
*Versioning* Some type of versioning is needed so reporting mistakes and errors is easier.
*Feedback* Mistakes would be sent to some server for agregation and feedback review.
*Contractions and Apostrophes* This would optionally correct "isnt" to
"isn't", etc.

38
vendor/github.com/client9/misspell/RELEASE-HOWTO.md generated vendored Normal file
View File

@ -0,0 +1,38 @@
# Release HOWTO
since I forget.
1. Review existing tags and pick new release number
```sh
git tag
```
2. Tag locally
```sh
git tag -a v0.1.0 -m "First release"
```
If things get screwed up, delete the tag with
```sh
git tag -d v0.1.0
```
3. Test goreleaser
TODO: how to install goreleaser
```sh
./scripts/goreleaser-dryrun.sh
```
4. Push
```bash
git push origin v0.1.0
```
5. Verify release and edit notes. See https://github.com/client9/misspell/releases

62
vendor/github.com/client9/misspell/ascii.go generated vendored Normal file
View File

@ -0,0 +1,62 @@
package misspell
// ByteToUpper converts an ascii byte to upper cases
// Uses a branchless algorithm
func ByteToUpper(x byte) byte {
b := byte(0x80) | x
c := b - byte(0x61)
d := ^(b - byte(0x7b))
e := (c & d) & (^x & 0x7f)
return x - (e >> 2)
}
// ByteToLower converts an ascii byte to lower case
// uses a branchless algorithm
func ByteToLower(eax byte) byte {
ebx := eax&byte(0x7f) + byte(0x25)
ebx = ebx&byte(0x7f) + byte(0x1a)
ebx = ((ebx & ^eax) >> 2) & byte(0x20)
return eax + ebx
}
// ByteEqualFold does ascii compare, case insensitive
func ByteEqualFold(a, b byte) bool {
return a == b || ByteToLower(a) == ByteToLower(b)
}
// StringEqualFold ASCII case-insensitive comparison
// golang toUpper/toLower for both bytes and strings
// appears to be Unicode based which is super slow
// based from https://codereview.appspot.com/5180044/patch/14007/21002
func StringEqualFold(s1, s2 string) bool {
if len(s1) != len(s2) {
return false
}
for i := 0; i < len(s1); i++ {
c1 := s1[i]
c2 := s2[i]
// c1 & c2
if c1 != c2 {
c1 |= 'a' - 'A'
c2 |= 'a' - 'A'
if c1 != c2 || c1 < 'a' || c1 > 'z' {
return false
}
}
}
return true
}
// StringHasPrefixFold is similar to strings.HasPrefix but comparison
// is done ignoring ASCII case.
// /
func StringHasPrefixFold(s1, s2 string) bool {
// prefix is bigger than input --> false
if len(s1) < len(s2) {
return false
}
if len(s1) == len(s2) {
return StringEqualFold(s1, s2)
}
return StringEqualFold(s1[:len(s2)], s2)
}

59
vendor/github.com/client9/misspell/case.go generated vendored Normal file
View File

@ -0,0 +1,59 @@
package misspell
import (
"strings"
)
// WordCase is an enum of various word casing styles
type WordCase int
// Various WordCase types.. likely to be not correct
const (
CaseUnknown WordCase = iota
CaseLower
CaseUpper
CaseTitle
)
// CaseStyle returns what case style a word is in
func CaseStyle(word string) WordCase {
upperCount := 0
lowerCount := 0
// this iterates over RUNES not BYTES
for i := 0; i < len(word); i++ {
ch := word[i]
switch {
case ch >= 'a' && ch <= 'z':
lowerCount++
case ch >= 'A' && ch <= 'Z':
upperCount++
}
}
switch {
case upperCount != 0 && lowerCount == 0:
return CaseUpper
case upperCount == 0 && lowerCount != 0:
return CaseLower
case upperCount == 1 && lowerCount > 0 && word[0] >= 'A' && word[0] <= 'Z':
return CaseTitle
}
return CaseUnknown
}
// CaseVariations returns
// If AllUpper or First-Letter-Only is upcased: add the all upper case version
// If AllLower, add the original, the title and upcase forms
// If Mixed, return the original, and the all upcase form
//
func CaseVariations(word string, style WordCase) []string {
switch style {
case CaseLower:
return []string{word, strings.ToUpper(word[0:1]) + word[1:], strings.ToUpper(word)}
case CaseUpper:
return []string{strings.ToUpper(word)}
default:
return []string{word, strings.ToUpper(word)}
}
}

326
vendor/github.com/client9/misspell/cmd/misspell/main.go generated vendored Normal file
View File

@ -0,0 +1,326 @@
// The misspell command corrects commonly misspelled English words in source files.
package main
import (
"bytes"
"flag"
"fmt"
"io"
"io/ioutil"
"log"
"os"
"path/filepath"
"runtime"
"strings"
"text/template"
"time"
"github.com/client9/misspell"
)
var (
defaultWrite *template.Template
defaultRead *template.Template
stdout *log.Logger
debug *log.Logger
version = "dev"
)
const (
// Note for gometalinter it must be "File:Line:Column: Msg"
// note space beteen ": Msg"
defaultWriteTmpl = `{{ .Filename }}:{{ .Line }}:{{ .Column }}: corrected "{{ .Original }}" to "{{ .Corrected }}"`
defaultReadTmpl = `{{ .Filename }}:{{ .Line }}:{{ .Column }}: "{{ .Original }}" is a misspelling of "{{ .Corrected }}"`
csvTmpl = `{{ printf "%q" .Filename }},{{ .Line }},{{ .Column }},{{ .Original }},{{ .Corrected }}`
csvHeader = `file,line,column,typo,corrected`
sqliteTmpl = `INSERT INTO misspell VALUES({{ printf "%q" .Filename }},{{ .Line }},{{ .Column }},{{ printf "%q" .Original }},{{ printf "%q" .Corrected }});`
sqliteHeader = `PRAGMA foreign_keys=OFF;
BEGIN TRANSACTION;
CREATE TABLE misspell(
"file" TEXT, "line" INTEGER, "column" INTEGER, "typo" TEXT, "corrected" TEXT
);`
sqliteFooter = "COMMIT;"
)
func worker(writeit bool, r *misspell.Replacer, mode string, files <-chan string, results chan<- int) {
count := 0
for filename := range files {
orig, err := misspell.ReadTextFile(filename)
if err != nil {
log.Println(err)
continue
}
if len(orig) == 0 {
continue
}
debug.Printf("Processing %s", filename)
var updated string
var changes []misspell.Diff
if mode == "go" {
updated, changes = r.ReplaceGo(orig)
} else {
updated, changes = r.Replace(orig)
}
if len(changes) == 0 {
continue
}
count += len(changes)
for _, diff := range changes {
// add in filename
diff.Filename = filename
// output can be done by doing multiple goroutines
// and can clobber os.Stdout.
//
// the log package can be used simultaneously from multiple goroutines
var output bytes.Buffer
if writeit {
defaultWrite.Execute(&output, diff)
} else {
defaultRead.Execute(&output, diff)
}
// goroutine-safe print to os.Stdout
stdout.Println(output.String())
}
if writeit {
ioutil.WriteFile(filename, []byte(updated), 0)
}
}
results <- count
}
func main() {
t := time.Now()
var (
workers = flag.Int("j", 0, "Number of workers, 0 = number of CPUs")
writeit = flag.Bool("w", false, "Overwrite file with corrections (default is just to display)")
quietFlag = flag.Bool("q", false, "Do not emit misspelling output")
outFlag = flag.String("o", "stdout", "output file or [stderr|stdout|]")
format = flag.String("f", "", "'csv', 'sqlite3' or custom Golang template for output")
ignores = flag.String("i", "", "ignore the following corrections, comma separated")
locale = flag.String("locale", "", "Correct spellings using locale perferances for US or UK. Default is to use a neutral variety of English. Setting locale to US will correct the British spelling of 'colour' to 'color'")
mode = flag.String("source", "auto", "Source mode: auto=guess, go=golang source, text=plain or markdown-like text")
debugFlag = flag.Bool("debug", false, "Debug matching, very slow")
exitError = flag.Bool("error", false, "Exit with 2 if misspelling found")
showVersion = flag.Bool("v", false, "Show version and exit")
showLegal = flag.Bool("legal", false, "Show legal information and exit")
)
flag.Parse()
if *showVersion {
fmt.Println(version)
return
}
if *showLegal {
fmt.Println(misspell.Legal)
return
}
if *debugFlag {
debug = log.New(os.Stderr, "DEBUG ", 0)
} else {
debug = log.New(ioutil.Discard, "", 0)
}
r := misspell.Replacer{
Replacements: misspell.DictMain,
Debug: *debugFlag,
}
//
// Figure out regional variations
//
switch strings.ToUpper(*locale) {
case "":
// nothing
case "US":
r.AddRuleList(misspell.DictAmerican)
case "UK", "GB":
r.AddRuleList(misspell.DictBritish)
case "NZ", "AU", "CA":
log.Fatalf("Help wanted. https://github.com/client9/misspell/issues/6")
default:
log.Fatalf("Unknown locale: %q", *locale)
}
//
// Stuff to ignore
//
if len(*ignores) > 0 {
r.RemoveRule(strings.Split(*ignores, ","))
}
//
// Source input mode
//
switch *mode {
case "auto":
case "go":
case "text":
default:
log.Fatalf("Mode must be one of auto=guess, go=golang source, text=plain or markdown-like text")
}
//
// Custom output
//
switch {
case *format == "csv":
tmpl := template.Must(template.New("csv").Parse(csvTmpl))
defaultWrite = tmpl
defaultRead = tmpl
stdout.Println(csvHeader)
case *format == "sqlite" || *format == "sqlite3":
tmpl := template.Must(template.New("sqlite3").Parse(sqliteTmpl))
defaultWrite = tmpl
defaultRead = tmpl
stdout.Println(sqliteHeader)
case len(*format) > 0:
t, err := template.New("custom").Parse(*format)
if err != nil {
log.Fatalf("Unable to compile log format: %s", err)
}
defaultWrite = t
defaultRead = t
default: // format == ""
defaultWrite = template.Must(template.New("defaultWrite").Parse(defaultWriteTmpl))
defaultRead = template.Must(template.New("defaultRead").Parse(defaultReadTmpl))
}
// we cant't just write to os.Stdout directly since we have multiple goroutine
// all writing at the same time causing broken output. Log is routine safe.
// we see it so it doesn't use a prefix or include a time stamp.
switch {
case *quietFlag || *outFlag == "/dev/null":
stdout = log.New(ioutil.Discard, "", 0)
case *outFlag == "/dev/stderr" || *outFlag == "stderr":
stdout = log.New(os.Stderr, "", 0)
case *outFlag == "/dev/stdout" || *outFlag == "stdout":
stdout = log.New(os.Stdout, "", 0)
case *outFlag == "" || *outFlag == "-":
stdout = log.New(os.Stdout, "", 0)
default:
fo, err := os.Create(*outFlag)
if err != nil {
log.Fatalf("unable to create outfile %q: %s", *outFlag, err)
}
defer fo.Close()
stdout = log.New(fo, "", 0)
}
//
// Number of Workers / CPU to use
//
if *workers < 0 {
log.Fatalf("-j must >= 0")
}
if *workers == 0 {
*workers = runtime.NumCPU()
}
if *debugFlag {
*workers = 1
}
//
// Done with Flags.
// Compile the Replacer and process files
//
r.Compile()
args := flag.Args()
debug.Printf("initialization complete in %v", time.Since(t))
// stdin/stdout
if len(args) == 0 {
// if we are working with pipes/stdin/stdout
// there is no concurrency, so we can directly
// send data to the writers
var fileout io.Writer
var errout io.Writer
switch *writeit {
case true:
// if we ARE writing the corrected stream
// the corrected stream goes to stdout
// and the misspelling errors goes to stderr
// so we can do something like this:
// curl something | misspell -w | gzip > afile.gz
fileout = os.Stdout
errout = os.Stderr
case false:
// if we are not writing out the corrected stream
// then work just like files. Misspelling errors
// are sent to stdout
fileout = ioutil.Discard
errout = os.Stdout
}
count := 0
next := func(diff misspell.Diff) {
count++
// don't even evaluate the output templates
if *quietFlag {
return
}
diff.Filename = "stdin"
if *writeit {
defaultWrite.Execute(errout, diff)
} else {
defaultRead.Execute(errout, diff)
}
errout.Write([]byte{'\n'})
}
err := r.ReplaceReader(os.Stdin, fileout, next)
if err != nil {
os.Exit(1)
}
switch *format {
case "sqlite", "sqlite3":
fileout.Write([]byte(sqliteFooter))
}
if count != 0 && *exitError {
// error
os.Exit(2)
}
return
}
c := make(chan string, 64)
results := make(chan int, *workers)
for i := 0; i < *workers; i++ {
go worker(*writeit, &r, *mode, c, results)
}
for _, filename := range args {
filepath.Walk(filename, func(path string, info os.FileInfo, err error) error {
if err == nil && !info.IsDir() {
c <- path
}
return nil
})
}
close(c)
count := 0
for i := 0; i < *workers; i++ {
changed := <-results
count += changed
}
switch *format {
case "sqlite", "sqlite3":
stdout.Println(sqliteFooter)
}
if count != 0 && *exitError {
os.Exit(2)
}
}

38
vendor/github.com/client9/misspell/goreleaser.yml generated vendored Normal file
View File

@ -0,0 +1,38 @@
# goreleaser.yml
# https://github.com/goreleaser/goreleaser
project_name: misspell
builds:
-
main: cmd/misspell/main.go
binary: misspell
ldflags: -s -w -X main.version={{.Version}}
goos:
- darwin
- linux
- windows
goarch:
- amd64
env:
- CGO_ENABLED=0
ignore:
- goos: darwin
goarch: 386
- goos: windows
goarch: 386
archive:
name_template: "{{ .Binary }}_{{ .Version }}_{{ .Os }}_{{ .Arch }}"
replacements:
amd64: 64bit
386: 32bit
darwin: mac
files:
- none*
checksum:
name_template: "{{ .ProjectName }}_{{ .Version }}_checksums.txt"
snapshot:
name_template: "SNAPSHOT-{{.Commit}}"

318
vendor/github.com/client9/misspell/install-misspell.sh generated vendored Normal file
View File

@ -0,0 +1,318 @@
#!/bin/sh
set -e
# Code generated by godownloader. DO NOT EDIT.
#
usage() {
this=$1
cat <<EOF
$this: download go binaries for client9/misspell
Usage: $this [-b] bindir [version]
-b sets bindir or installation directory, default "./bin"
[version] is a version number from
https://github.com/client9/misspell/releases
If version is missing, then an attempt to find the latest will be found.
Generated by godownloader
https://github.com/goreleaser/godownloader
EOF
exit 2
}
parse_args() {
#BINDIR is ./bin unless set be ENV
# over-ridden by flag below
BINDIR=${BINDIR:-./bin}
while getopts "b:h?" arg; do
case "$arg" in
b) BINDIR="$OPTARG" ;;
h | \?) usage "$0" ;;
esac
done
shift $((OPTIND - 1))
VERSION=$1
}
# this function wraps all the destructive operations
# if a curl|bash cuts off the end of the script due to
# network, either nothing will happen or will syntax error
# out preventing half-done work
execute() {
TMPDIR=$(mktmpdir)
echo "$PREFIX: downloading ${TARBALL_URL}"
http_download "${TMPDIR}/${TARBALL}" "${TARBALL_URL}"
echo "$PREFIX: verifying checksums"
http_download "${TMPDIR}/${CHECKSUM}" "${CHECKSUM_URL}"
hash_sha256_verify "${TMPDIR}/${TARBALL}" "${TMPDIR}/${CHECKSUM}"
(cd "${TMPDIR}" && untar "${TARBALL}")
install -d "${BINDIR}"
install "${TMPDIR}/${BINARY}" "${BINDIR}/"
echo "$PREFIX: installed as ${BINDIR}/${BINARY}"
}
is_supported_platform() {
platform=$1
found=1
case "$platform" in
darwin/amd64) found=0 ;;
linux/amd64) found=0 ;;
esac
case "$platform" in
darwin/386) found=1 ;;
esac
return $found
}
check_platform() {
if is_supported_platform "$PLATFORM"; then
# optional logging goes here
true
else
echo "${PREFIX}: platform $PLATFORM is not supported. Make sure this script is up-to-date and file request at https://github.com/${PREFIX}/issues/new"
exit 1
fi
}
adjust_version() {
if [ -z "${VERSION}" ]; then
echo "$PREFIX: checking GitHub for latest version"
VERSION=$(github_last_release "$OWNER/$REPO")
fi
# if version starts with 'v', remove it
VERSION=${VERSION#v}
}
adjust_format() {
# change format (tar.gz or zip) based on ARCH
true
}
adjust_os() {
# adjust archive name based on OS
case ${OS} in
386) OS=32bit ;;
amd64) OS=64bit ;;
darwin) OS=mac ;;
esac
true
}
adjust_arch() {
# adjust archive name based on ARCH
case ${ARCH} in
386) ARCH=32bit ;;
amd64) ARCH=64bit ;;
darwin) ARCH=mac ;;
esac
true
}
cat /dev/null <<EOF
------------------------------------------------------------------------
https://github.com/client9/shlib - portable posix shell functions
Public domain - http://unlicense.org
https://github.com/client9/shlib/blob/master/LICENSE.md
but credit (and pull requests) appreciated.
------------------------------------------------------------------------
EOF
is_command() {
command -v "$1" >/dev/null
}
uname_os() {
os=$(uname -s | tr '[:upper:]' '[:lower:]')
echo "$os"
}
uname_arch() {
arch=$(uname -m)
case $arch in
x86_64) arch="amd64" ;;
x86) arch="386" ;;
i686) arch="386" ;;
i386) arch="386" ;;
aarch64) arch="arm64" ;;
armv5*) arch="arm5" ;;
armv6*) arch="arm6" ;;
armv7*) arch="arm7" ;;
esac
echo ${arch}
}
uname_os_check() {
os=$(uname_os)
case "$os" in
darwin) return 0 ;;
dragonfly) return 0 ;;
freebsd) return 0 ;;
linux) return 0 ;;
android) return 0 ;;
nacl) return 0 ;;
netbsd) return 0 ;;
openbsd) return 0 ;;
plan9) return 0 ;;
solaris) return 0 ;;
windows) return 0 ;;
esac
echo "$0: uname_os_check: internal error '$(uname -s)' got converted to '$os' which is not a GOOS value. Please file bug at https://github.com/client9/shlib"
return 1
}
uname_arch_check() {
arch=$(uname_arch)
case "$arch" in
386) return 0 ;;
amd64) return 0 ;;
arm64) return 0 ;;
armv5) return 0 ;;
armv6) return 0 ;;
armv7) return 0 ;;
ppc64) return 0 ;;
ppc64le) return 0 ;;
mips) return 0 ;;
mipsle) return 0 ;;
mips64) return 0 ;;
mips64le) return 0 ;;
s390x) return 0 ;;
amd64p32) return 0 ;;
esac
echo "$0: uname_arch_check: internal error '$(uname -m)' got converted to '$arch' which is not a GOARCH value. Please file bug report at https://github.com/client9/shlib"
return 1
}
untar() {
tarball=$1
case "${tarball}" in
*.tar.gz | *.tgz) tar -xzf "${tarball}" ;;
*.tar) tar -xf "${tarball}" ;;
*.zip) unzip "${tarball}" ;;
*)
echo "Unknown archive format for ${tarball}"
return 1
;;
esac
}
mktmpdir() {
test -z "$TMPDIR" && TMPDIR="$(mktemp -d)"
mkdir -p "${TMPDIR}"
echo "${TMPDIR}"
}
http_download() {
local_file=$1
source_url=$2
header=$3
headerflag=''
destflag=''
if is_command curl; then
cmd='curl --fail -sSL'
destflag='-o'
headerflag='-H'
elif is_command wget; then
cmd='wget -q'
destflag='-O'
headerflag='--header'
else
echo "http_download: unable to find wget or curl"
return 1
fi
if [ -z "$header" ]; then
$cmd $destflag "$local_file" "$source_url"
else
$cmd $headerflag "$header" $destflag "$local_file" "$source_url"
fi
}
github_api() {
local_file=$1
source_url=$2
header=""
case "$source_url" in
https://api.github.com*)
test -z "$GITHUB_TOKEN" || header="Authorization: token $GITHUB_TOKEN"
;;
esac
http_download "$local_file" "$source_url" "$header"
}
github_last_release() {
owner_repo=$1
giturl="https://api.github.com/repos/${owner_repo}/releases/latest"
html=$(github_api - "$giturl")
version=$(echo "$html" | grep -m 1 "\"tag_name\":" | cut -f4 -d'"')
test -z "$version" && return 1
echo "$version"
}
hash_sha256() {
TARGET=${1:-/dev/stdin}
if is_command gsha256sum; then
hash=$(gsha256sum "$TARGET") || return 1
echo "$hash" | cut -d ' ' -f 1
elif is_command sha256sum; then
hash=$(sha256sum "$TARGET") || return 1
echo "$hash" | cut -d ' ' -f 1
elif is_command shasum; then
hash=$(shasum -a 256 "$TARGET" 2>/dev/null) || return 1
echo "$hash" | cut -d ' ' -f 1
elif is_command openssl; then
hash=$(openssl -dst openssl dgst -sha256 "$TARGET") || return 1
echo "$hash" | cut -d ' ' -f a
else
echo "hash_sha256: unable to find command to compute sha-256 hash"
return 1
fi
}
hash_sha256_verify() {
TARGET=$1
checksums=$2
if [ -z "$checksums" ]; then
echo "hash_sha256_verify: checksum file not specified in arg2"
return 1
fi
BASENAME=${TARGET##*/}
want=$(grep "${BASENAME}" "${checksums}" 2>/dev/null | tr '\t' ' ' | cut -d ' ' -f 1)
if [ -z "$want" ]; then
echo "hash_sha256_verify: unable to find checksum for '${TARGET}' in '${checksums}'"
return 1
fi
got=$(hash_sha256 "$TARGET")
if [ "$want" != "$got" ]; then
echo "hash_sha256_verify: checksum for '$TARGET' did not verify ${want} vs $got"
return 1
fi
}
cat /dev/null <<EOF
------------------------------------------------------------------------
End of functions from https://github.com/client9/shlib
------------------------------------------------------------------------
EOF
OWNER=client9
REPO=misspell
BINARY=misspell
FORMAT=tar.gz
OS=$(uname_os)
ARCH=$(uname_arch)
PREFIX="$OWNER/$REPO"
PLATFORM="${OS}/${ARCH}"
GITHUB_DOWNLOAD=https://github.com/${OWNER}/${REPO}/releases/download
uname_os_check "$OS"
uname_arch_check "$ARCH"
parse_args "$@"
check_platform
adjust_version
adjust_format
adjust_os
adjust_arch
echo "$PREFIX: found version ${VERSION} for ${OS}/${ARCH}"
NAME=${BINARY}_${VERSION}_${OS}_${ARCH}
TARBALL=${NAME}.${FORMAT}
TARBALL_URL=${GITHUB_DOWNLOAD}/v${VERSION}/${TARBALL}
CHECKSUM=${REPO}_checksums.txt
CHECKSUM_URL=${GITHUB_DOWNLOAD}/v${VERSION}/${CHECKSUM}
# Adjust binary name if windows
if [ "$OS" = "windows" ]; then
BINARY="${BINARY}.exe"
fi
execute

48
vendor/github.com/client9/misspell/legal.go generated vendored Normal file
View File

@ -0,0 +1,48 @@
// Package misspell corrects commonly misspelled English words in source files.
package misspell
// Legal provides licensing info.
const Legal = `
Execept where noted below, the source code for misspell is
copyright Nick Galbreath and distribution is allowed under a
MIT license. See the following for details:
* https://github.com/client9/misspell/blob/master/LICENSE
* https://tldrlegal.com/license/mit-license
Misspell makes uses of the Golang standard library and
contains a modified version of Golang's strings.Replacer
which are covered under a BSD License.
* https://golang.org/pkg/strings/#Replacer
* https://golang.org/src/strings/replace.go
* https://github.com/golang/go/blob/master/LICENSE
Copyright (c) 2009 The Go Authors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
`

210
vendor/github.com/client9/misspell/mime.go generated vendored Normal file
View File

@ -0,0 +1,210 @@
package misspell
import (
"bytes"
"fmt"
"io"
"io/ioutil"
"net/http"
"os"
"path/filepath"
"strings"
)
// The number of possible binary formats is very large
// items that might be checked into a repo or be an
// artifact of a build. Additions welcome.
//
// Golang's internal table is very small and can't be
// relied on. Even then things like ".js" have a mime
// type of "application/javascipt" which isn't very helpful.
// "[x]" means we have sniff test and suffix test should be eliminated
var binary = map[string]bool{
".a": true, // [ ] archive
".bin": true, // [ ] binary
".bz2": true, // [ ] compression
".class": true, // [x] Java class file
".dll": true, // [ ] shared library
".exe": true, // [ ] binary
".gif": true, // [ ] image
".gpg": true, // [x] text, but really all base64
".gz": true, // [ ] compression
".ico": true, // [ ] image
".jar": true, // [x] archive
".jpeg": true, // [ ] image
".jpg": true, // [ ] image
".mp3": true, // [ ] audio
".mp4": true, // [ ] video
".mpeg": true, // [ ] video
".o": true, // [ ] object file
".pdf": true, // [x] pdf
".png": true, // [x] image
".pyc": true, // [ ] Python bytecode
".pyo": true, // [ ] Python bytecode
".so": true, // [x] shared library
".swp": true, // [ ] vim swap file
".tar": true, // [ ] archive
".tiff": true, // [ ] image
".woff": true, // [ ] font
".woff2": true, // [ ] font
".xz": true, // [ ] compression
".z": true, // [ ] compression
".zip": true, // [x] archive
}
// isBinaryFilename returns true if the file is likely to be binary
//
// Better heuristics could be done here, in particular a binary
// file is unlikely to be UTF-8 encoded. However this is cheap
// and will solve the immediate need of making sure common
// binary formats are not corrupted by mistake.
func isBinaryFilename(s string) bool {
return binary[strings.ToLower(filepath.Ext(s))]
}
var scm = map[string]bool{
".bzr": true,
".git": true,
".hg": true,
".svn": true,
"CVS": true,
}
// isSCMPath returns true if the path is likely part of a (private) SCM
// directory. E.g. ./git/something = true
func isSCMPath(s string) bool {
// hack for .git/COMMIT_EDITMSG and .git/TAG_EDITMSG
// normally we don't look at anything in .git
// but COMMIT_EDITMSG and TAG_EDITMSG are used as
// temp files for git commits. Allowing misspell to inspect
// these files allows for commit-msg hooks
// https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks
if strings.Contains(filepath.Base(s), "EDITMSG") {
return false
}
parts := strings.Split(filepath.Clean(s), string(filepath.Separator))
for _, dir := range parts {
if scm[dir] {
return true
}
}
return false
}
var magicHeaders = [][]byte{
// Issue #68
// PGP messages and signatures are "text" but really just
// blobs of base64-text and should not be misspell-checked
[]byte("-----BEGIN PGP MESSAGE-----"),
[]byte("-----BEGIN PGP SIGNATURE-----"),
// ELF
{0x7f, 0x45, 0x4c, 0x46},
// Postscript
{0x25, 0x21, 0x50, 0x53},
// PDF
{0x25, 0x50, 0x44, 0x46},
// Java class file
// https://en.wikipedia.org/wiki/Java_class_file
{0xCA, 0xFE, 0xBA, 0xBE},
// PNG
// https://en.wikipedia.org/wiki/Portable_Network_Graphics
{0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a},
// ZIP, JAR, ODF, OOXML
{0x50, 0x4B, 0x03, 0x04},
{0x50, 0x4B, 0x05, 0x06},
{0x50, 0x4B, 0x07, 0x08},
}
func isTextFile(raw []byte) bool {
for _, magic := range magicHeaders {
if bytes.HasPrefix(raw, magic) {
return false
}
}
// allow any text/ type with utf-8 encoding
// DetectContentType sometimes returns charset=utf-16 for XML stuff
// in which case ignore.
mime := http.DetectContentType(raw)
return strings.HasPrefix(mime, "text/") && strings.HasSuffix(mime, "charset=utf-8")
}
// ReadTextFile returns the contents of a file, first testing if it is a text file
// returns ("", nil) if not a text file
// returns ("", error) if error
// returns (string, nil) if text
//
// unfortunately, in worse case, this does
// 1 stat
// 1 open,read,close of 512 bytes
// 1 more stat,open, read everything, close (via ioutil.ReadAll)
// This could be kinder to the filesystem.
//
// This uses some heuristics of the file's extension (e.g. .zip, .txt) and
// uses a sniffer to determine if the file is text or not.
// Using file extensions isn't great, but probably
// good enough for real-world use.
// Golang's built in sniffer is problematic for differnet reasons. It's
// optimized for HTML, and is very limited in detection. It would be good
// to explicitly add some tests for ELF/DWARF formats to make sure we never
// corrupt binary files.
func ReadTextFile(filename string) (string, error) {
if isBinaryFilename(filename) {
return "", nil
}
if isSCMPath(filename) {
return "", nil
}
fstat, err := os.Stat(filename)
if err != nil {
return "", fmt.Errorf("Unable to stat %q: %s", filename, err)
}
// directory: nothing to do.
if fstat.IsDir() {
return "", nil
}
// avoid reading in multi-gig files
// if input is large, read the first 512 bytes to sniff type
// if not-text, then exit
isText := false
if fstat.Size() > 50000 {
fin, err := os.Open(filename)
if err != nil {
return "", fmt.Errorf("Unable to open large file %q: %s", filename, err)
}
defer fin.Close()
buf := make([]byte, 512)
_, err = io.ReadFull(fin, buf)
if err != nil {
return "", fmt.Errorf("Unable to read 512 bytes from %q: %s", filename, err)
}
if !isTextFile(buf) {
return "", nil
}
// set so we don't double check this file
isText = true
}
// read in whole file
raw, err := ioutil.ReadFile(filename)
if err != nil {
return "", fmt.Errorf("Unable to read all %q: %s", filename, err)
}
if !isText && !isTextFile(raw) {
return "", nil
}
return string(raw), nil
}

85
vendor/github.com/client9/misspell/notwords.go generated vendored Normal file
View File

@ -0,0 +1,85 @@
package misspell
import (
"bytes"
"regexp"
"strings"
)
var (
reEmail = regexp.MustCompile(`[a-zA-Z0-9_.%+-]+@[a-zA-Z0-9-.]+\.[a-zA-Z]{2,6}[^a-zA-Z]`)
reHost = regexp.MustCompile(`[a-zA-Z0-9-.]+\.[a-zA-Z]+`)
reBackslash = regexp.MustCompile(`\\[a-z]`)
)
// RemovePath attempts to strip away embedded file system paths, e.g.
// /foo/bar or /static/myimg.png
//
// TODO: windows style
//
func RemovePath(s string) string {
out := bytes.Buffer{}
var idx int
for len(s) > 0 {
if idx = strings.IndexByte(s, '/'); idx == -1 {
out.WriteString(s)
break
}
if idx > 0 {
idx--
}
var chclass string
switch s[idx] {
case '/', ' ', '\n', '\t', '\r':
chclass = " \n\r\t"
case '[':
chclass = "]\n"
case '(':
chclass = ")\n"
default:
out.WriteString(s[:idx+2])
s = s[idx+2:]
continue
}
endx := strings.IndexAny(s[idx+1:], chclass)
if endx != -1 {
out.WriteString(s[:idx+1])
out.Write(bytes.Repeat([]byte{' '}, endx))
s = s[idx+endx+1:]
} else {
out.WriteString(s)
break
}
}
return out.String()
}
// replaceWithBlanks returns a string with the same number of spaces as the input
func replaceWithBlanks(s string) string {
return strings.Repeat(" ", len(s))
}
// RemoveEmail remove email-like strings, e.g. "nickg+junk@xfoobar.com", "nickg@xyz.abc123.biz"
func RemoveEmail(s string) string {
return reEmail.ReplaceAllStringFunc(s, replaceWithBlanks)
}
// RemoveHost removes host-like strings "foobar.com" "abc123.fo1231.biz"
func RemoveHost(s string) string {
return reHost.ReplaceAllStringFunc(s, replaceWithBlanks)
}
// RemoveBackslashEscapes removes characters that are preceeded by a backslash
// commonly found in printf format stringd "\nto"
func removeBackslashEscapes(s string) string {
return reBackslash.ReplaceAllStringFunc(s, replaceWithBlanks)
}
// RemoveNotWords blanks out all the not words
func RemoveNotWords(s string) string {
// do most selective/specific first
return removeBackslashEscapes(RemoveHost(RemoveEmail(RemovePath(StripURL(s)))))
}

246
vendor/github.com/client9/misspell/replace.go generated vendored Normal file
View File

@ -0,0 +1,246 @@
package misspell
import (
"bufio"
"bytes"
"io"
"regexp"
"strings"
"text/scanner"
)
func max(x, y int) int {
if x > y {
return x
}
return y
}
func inArray(haystack []string, needle string) bool {
for _, word := range haystack {
if needle == word {
return true
}
}
return false
}
var wordRegexp = regexp.MustCompile(`[a-zA-Z0-9']+`)
// Diff is datastructure showing what changed in a single line
type Diff struct {
Filename string
FullLine string
Line int
Column int
Original string
Corrected string
}
// Replacer is the main struct for spelling correction
type Replacer struct {
Replacements []string
Debug bool
engine *StringReplacer
corrected map[string]string
}
// New creates a new default Replacer using the main rule list
func New() *Replacer {
r := Replacer{
Replacements: DictMain,
}
r.Compile()
return &r
}
// RemoveRule deletes existings rules.
// TODO: make inplace to save memory
func (r *Replacer) RemoveRule(ignore []string) {
newwords := make([]string, 0, len(r.Replacements))
for i := 0; i < len(r.Replacements); i += 2 {
if inArray(ignore, r.Replacements[i]) {
continue
}
newwords = append(newwords, r.Replacements[i:i+2]...)
}
r.engine = nil
r.Replacements = newwords
}
// AddRuleList appends new rules.
// Input is in the same form as Strings.Replacer: [ old1, new1, old2, new2, ....]
// Note: does not check for duplictes
func (r *Replacer) AddRuleList(additions []string) {
r.engine = nil
r.Replacements = append(r.Replacements, additions...)
}
// Compile compiles the rules. Required before using the Replace functions
func (r *Replacer) Compile() {
r.corrected = make(map[string]string, len(r.Replacements)/2)
for i := 0; i < len(r.Replacements); i += 2 {
r.corrected[r.Replacements[i]] = r.Replacements[i+1]
}
r.engine = NewStringReplacer(r.Replacements...)
}
/*
line1 and line2 are different
extract words from each line1
replace word -> newword
if word == new-word
continue
if new-word in list of replacements
continue
new word not original, and not in list of replacements
some substring got mixed up. UNdo
*/
func (r *Replacer) recheckLine(s string, lineNum int, buf io.Writer, next func(Diff)) {
first := 0
redacted := RemoveNotWords(s)
idx := wordRegexp.FindAllStringIndex(redacted, -1)
for _, ab := range idx {
word := s[ab[0]:ab[1]]
newword := r.engine.Replace(word)
if newword == word {
// no replacement done
continue
}
// ignore camelCase words
// https://github.com/client9/misspell/issues/113
if CaseStyle(word) == CaseUnknown {
continue
}
if StringEqualFold(r.corrected[strings.ToLower(word)], newword) {
// word got corrected into something we know
io.WriteString(buf, s[first:ab[0]])
io.WriteString(buf, newword)
first = ab[1]
next(Diff{
FullLine: s,
Line: lineNum,
Original: word,
Corrected: newword,
Column: ab[0],
})
continue
}
// Word got corrected into something unknown. Ignore it
}
io.WriteString(buf, s[first:])
}
// ReplaceGo is a specialized routine for correcting Golang source
// files. Currently only checks comments, not identifiers for
// spelling.
func (r *Replacer) ReplaceGo(input string) (string, []Diff) {
var s scanner.Scanner
s.Init(strings.NewReader(input))
s.Mode = scanner.ScanIdents | scanner.ScanFloats | scanner.ScanChars | scanner.ScanStrings | scanner.ScanRawStrings | scanner.ScanComments
lastPos := 0
output := ""
Loop:
for {
switch s.Scan() {
case scanner.Comment:
origComment := s.TokenText()
newComment := r.engine.Replace(origComment)
if origComment != newComment {
// s.Pos().Offset is the end of the current token
// subtract len(origComment) to get the start of the token
offset := s.Pos().Offset
output = output + input[lastPos:offset-len(origComment)] + newComment
lastPos = offset
}
case scanner.EOF:
break Loop
}
}
if lastPos == 0 {
// no changes, no copies
return input, nil
}
if lastPos < len(input) {
output = output + input[lastPos:]
}
diffs := make([]Diff, 0, 8)
buf := bytes.NewBuffer(make([]byte, 0, max(len(input), len(output))+100))
// faster that making a bytes.Buffer and bufio.ReadString
outlines := strings.SplitAfter(output, "\n")
inlines := strings.SplitAfter(input, "\n")
for i := 0; i < len(inlines); i++ {
if inlines[i] == outlines[i] {
buf.WriteString(outlines[i])
continue
}
r.recheckLine(inlines[i], i+1, buf, func(d Diff) {
diffs = append(diffs, d)
})
}
return buf.String(), diffs
}
// Replace is corrects misspellings in input, returning corrected version
// along with a list of diffs.
func (r *Replacer) Replace(input string) (string, []Diff) {
output := r.engine.Replace(input)
if input == output {
return input, nil
}
diffs := make([]Diff, 0, 8)
buf := bytes.NewBuffer(make([]byte, 0, max(len(input), len(output))+100))
// faster that making a bytes.Buffer and bufio.ReadString
outlines := strings.SplitAfter(output, "\n")
inlines := strings.SplitAfter(input, "\n")
for i := 0; i < len(inlines); i++ {
if inlines[i] == outlines[i] {
buf.WriteString(outlines[i])
continue
}
r.recheckLine(inlines[i], i+1, buf, func(d Diff) {
diffs = append(diffs, d)
})
}
return buf.String(), diffs
}
// ReplaceReader applies spelling corrections to a reader stream. Diffs are
// emitted through a callback.
func (r *Replacer) ReplaceReader(raw io.Reader, w io.Writer, next func(Diff)) error {
var (
err error
line string
lineNum int
)
reader := bufio.NewReader(raw)
for err == nil {
lineNum++
line, err = reader.ReadString('\n')
// if it's EOF, then line has the last line
// don't like the check of err here and
// in for loop
if err != nil && err != io.EOF {
return err
}
// easily 5x faster than regexp+map
if line == r.engine.Replace(line) {
io.WriteString(w, line)
continue
}
// but it can be inaccurate, so we need to double check
r.recheckLine(line, lineNum, w, next)
}
return nil
}

336
vendor/github.com/client9/misspell/stringreplacer.go generated vendored Normal file
View File

@ -0,0 +1,336 @@
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package misspell
import (
"io"
// "log"
"strings"
)
// StringReplacer replaces a list of strings with replacements.
// It is safe for concurrent use by multiple goroutines.
type StringReplacer struct {
r replacer
}
// replacer is the interface that a replacement algorithm needs to implement.
type replacer interface {
Replace(s string) string
WriteString(w io.Writer, s string) (n int, err error)
}
// NewStringReplacer returns a new Replacer from a list of old, new string pairs.
// Replacements are performed in order, without overlapping matches.
func NewStringReplacer(oldnew ...string) *StringReplacer {
if len(oldnew)%2 == 1 {
panic("strings.NewReplacer: odd argument count")
}
return &StringReplacer{r: makeGenericReplacer(oldnew)}
}
// Replace returns a copy of s with all replacements performed.
func (r *StringReplacer) Replace(s string) string {
return r.r.Replace(s)
}
// WriteString writes s to w with all replacements performed.
func (r *StringReplacer) WriteString(w io.Writer, s string) (n int, err error) {
return r.r.WriteString(w, s)
}
// trieNode is a node in a lookup trie for prioritized key/value pairs. Keys
// and values may be empty. For example, the trie containing keys "ax", "ay",
// "bcbc", "x" and "xy" could have eight nodes:
//
// n0 -
// n1 a-
// n2 .x+
// n3 .y+
// n4 b-
// n5 .cbc+
// n6 x+
// n7 .y+
//
// n0 is the root node, and its children are n1, n4 and n6; n1's children are
// n2 and n3; n4's child is n5; n6's child is n7. Nodes n0, n1 and n4 (marked
// with a trailing "-") are partial keys, and nodes n2, n3, n5, n6 and n7
// (marked with a trailing "+") are complete keys.
type trieNode struct {
// value is the value of the trie node's key/value pair. It is empty if
// this node is not a complete key.
value string
// priority is the priority (higher is more important) of the trie node's
// key/value pair; keys are not necessarily matched shortest- or longest-
// first. Priority is positive if this node is a complete key, and zero
// otherwise. In the example above, positive/zero priorities are marked
// with a trailing "+" or "-".
priority int
// A trie node may have zero, one or more child nodes:
// * if the remaining fields are zero, there are no children.
// * if prefix and next are non-zero, there is one child in next.
// * if table is non-zero, it defines all the children.
//
// Prefixes are preferred over tables when there is one child, but the
// root node always uses a table for lookup efficiency.
// prefix is the difference in keys between this trie node and the next.
// In the example above, node n4 has prefix "cbc" and n4's next node is n5.
// Node n5 has no children and so has zero prefix, next and table fields.
prefix string
next *trieNode
// table is a lookup table indexed by the next byte in the key, after
// remapping that byte through genericReplacer.mapping to create a dense
// index. In the example above, the keys only use 'a', 'b', 'c', 'x' and
// 'y', which remap to 0, 1, 2, 3 and 4. All other bytes remap to 5, and
// genericReplacer.tableSize will be 5. Node n0's table will be
// []*trieNode{ 0:n1, 1:n4, 3:n6 }, where the 0, 1 and 3 are the remapped
// 'a', 'b' and 'x'.
table []*trieNode
}
func (t *trieNode) add(key, val string, priority int, r *genericReplacer) {
if key == "" {
if t.priority == 0 {
t.value = val
t.priority = priority
}
return
}
if t.prefix != "" {
// Need to split the prefix among multiple nodes.
var n int // length of the longest common prefix
for ; n < len(t.prefix) && n < len(key); n++ {
if t.prefix[n] != key[n] {
break
}
}
if n == len(t.prefix) {
t.next.add(key[n:], val, priority, r)
} else if n == 0 {
// First byte differs, start a new lookup table here. Looking up
// what is currently t.prefix[0] will lead to prefixNode, and
// looking up key[0] will lead to keyNode.
var prefixNode *trieNode
if len(t.prefix) == 1 {
prefixNode = t.next
} else {
prefixNode = &trieNode{
prefix: t.prefix[1:],
next: t.next,
}
}
keyNode := new(trieNode)
t.table = make([]*trieNode, r.tableSize)
t.table[r.mapping[t.prefix[0]]] = prefixNode
t.table[r.mapping[key[0]]] = keyNode
t.prefix = ""
t.next = nil
keyNode.add(key[1:], val, priority, r)
} else {
// Insert new node after the common section of the prefix.
next := &trieNode{
prefix: t.prefix[n:],
next: t.next,
}
t.prefix = t.prefix[:n]
t.next = next
next.add(key[n:], val, priority, r)
}
} else if t.table != nil {
// Insert into existing table.
m := r.mapping[key[0]]
if t.table[m] == nil {
t.table[m] = new(trieNode)
}
t.table[m].add(key[1:], val, priority, r)
} else {
t.prefix = key
t.next = new(trieNode)
t.next.add("", val, priority, r)
}
}
func (r *genericReplacer) lookup(s string, ignoreRoot bool) (val string, keylen int, found bool) {
// Iterate down the trie to the end, and grab the value and keylen with
// the highest priority.
bestPriority := 0
node := &r.root
n := 0
for node != nil {
if node.priority > bestPriority && !(ignoreRoot && node == &r.root) {
bestPriority = node.priority
val = node.value
keylen = n
found = true
}
if s == "" {
break
}
if node.table != nil {
index := r.mapping[ByteToLower(s[0])]
if int(index) == r.tableSize {
break
}
node = node.table[index]
s = s[1:]
n++
} else if node.prefix != "" && StringHasPrefixFold(s, node.prefix) {
n += len(node.prefix)
s = s[len(node.prefix):]
node = node.next
} else {
break
}
}
return
}
// genericReplacer is the fully generic algorithm.
// It's used as a fallback when nothing faster can be used.
type genericReplacer struct {
root trieNode
// tableSize is the size of a trie node's lookup table. It is the number
// of unique key bytes.
tableSize int
// mapping maps from key bytes to a dense index for trieNode.table.
mapping [256]byte
}
func makeGenericReplacer(oldnew []string) *genericReplacer {
r := new(genericReplacer)
// Find each byte used, then assign them each an index.
for i := 0; i < len(oldnew); i += 2 {
key := strings.ToLower(oldnew[i])
for j := 0; j < len(key); j++ {
r.mapping[key[j]] = 1
}
}
for _, b := range r.mapping {
r.tableSize += int(b)
}
var index byte
for i, b := range r.mapping {
if b == 0 {
r.mapping[i] = byte(r.tableSize)
} else {
r.mapping[i] = index
index++
}
}
// Ensure root node uses a lookup table (for performance).
r.root.table = make([]*trieNode, r.tableSize)
for i := 0; i < len(oldnew); i += 2 {
r.root.add(strings.ToLower(oldnew[i]), oldnew[i+1], len(oldnew)-i, r)
}
return r
}
type appendSliceWriter []byte
// Write writes to the buffer to satisfy io.Writer.
func (w *appendSliceWriter) Write(p []byte) (int, error) {
*w = append(*w, p...)
return len(p), nil
}
// WriteString writes to the buffer without string->[]byte->string allocations.
func (w *appendSliceWriter) WriteString(s string) (int, error) {
*w = append(*w, s...)
return len(s), nil
}
type stringWriterIface interface {
WriteString(string) (int, error)
}
type stringWriter struct {
w io.Writer
}
func (w stringWriter) WriteString(s string) (int, error) {
return w.w.Write([]byte(s))
}
func getStringWriter(w io.Writer) stringWriterIface {
sw, ok := w.(stringWriterIface)
if !ok {
sw = stringWriter{w}
}
return sw
}
func (r *genericReplacer) Replace(s string) string {
buf := make(appendSliceWriter, 0, len(s))
r.WriteString(&buf, s)
return string(buf)
}
func (r *genericReplacer) WriteString(w io.Writer, s string) (n int, err error) {
sw := getStringWriter(w)
var last, wn int
var prevMatchEmpty bool
for i := 0; i <= len(s); {
// Fast path: s[i] is not a prefix of any pattern.
if i != len(s) && r.root.priority == 0 {
index := int(r.mapping[ByteToLower(s[i])])
if index == r.tableSize || r.root.table[index] == nil {
i++
continue
}
}
// Ignore the empty match iff the previous loop found the empty match.
val, keylen, match := r.lookup(s[i:], prevMatchEmpty)
prevMatchEmpty = match && keylen == 0
if match {
orig := s[i : i+keylen]
switch CaseStyle(orig) {
case CaseUnknown:
// pretend we didn't match
// i++
// continue
case CaseUpper:
val = strings.ToUpper(val)
case CaseLower:
val = strings.ToLower(val)
case CaseTitle:
if len(val) < 2 {
val = strings.ToUpper(val)
} else {
val = strings.ToUpper(val[:1]) + strings.ToLower(val[1:])
}
}
wn, err = sw.WriteString(s[last:i])
n += wn
if err != nil {
return
}
//log.Printf("%d: Going to correct %q with %q", i, s[i:i+keylen], val)
wn, err = sw.WriteString(val)
n += wn
if err != nil {
return
}
i += keylen
last = i
continue
}
i++
}
if last != len(s) {
wn, err = sw.WriteString(s[last:])
n += wn
}
return
}

View File

@ -0,0 +1,421 @@
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package misspell_test
import (
"bytes"
"fmt"
"strings"
"testing"
. "github.com/client9/misspell"
)
var htmlEscaper = NewStringReplacer(
"&", "&amp;",
"<", "&lt;",
">", "&gt;",
`"`, "&quot;",
"'", "&apos;",
)
var htmlUnescaper = NewStringReplacer(
"&amp;", "&",
"&lt;", "<",
"&gt;", ">",
"&quot;", `"`,
"&apos;", "'",
)
// The http package's old HTML escaping function.
func oldHTMLEscape(s string) string {
s = strings.Replace(s, "&", "&amp;", -1)
s = strings.Replace(s, "<", "&lt;", -1)
s = strings.Replace(s, ">", "&gt;", -1)
s = strings.Replace(s, `"`, "&quot;", -1)
s = strings.Replace(s, "'", "&apos;", -1)
return s
}
var capitalLetters = NewStringReplacer("a", "A", "b", "B")
// TestReplacer tests the replacer implementations.
func TestReplacer(t *testing.T) {
type testCase struct {
r *StringReplacer
in, out string
}
var testCases []testCase
// str converts 0xff to "\xff". This isn't just string(b) since that converts to UTF-8.
str := func(b byte) string {
return string([]byte{b})
}
var s []string
// inc maps "\x00"->"\x01", ..., "a"->"b", "b"->"c", ..., "\xff"->"\x00".
for i := 0; i < 256; i++ {
s = append(s, str(byte(i)), str(byte(i+1)))
}
inc := NewStringReplacer(s...)
// Test cases with 1-byte old strings, 1-byte new strings.
testCases = append(testCases,
testCase{capitalLetters, "brad", "BrAd"},
testCase{capitalLetters, strings.Repeat("a", (32<<10)+123), strings.Repeat("A", (32<<10)+123)},
testCase{capitalLetters, "", ""},
testCase{inc, "brad", "csbe"},
testCase{inc, "\x00\xff", "\x01\x00"},
testCase{inc, "", ""},
testCase{NewStringReplacer("a", "1", "a", "2"), "brad", "br1d"},
)
// repeat maps "a"->"a", "b"->"bb", "c"->"ccc", ...
s = nil
for i := 0; i < 256; i++ {
n := i + 1 - 'a'
if n < 1 {
n = 1
}
s = append(s, str(byte(i)), strings.Repeat(str(byte(i)), n))
}
repeat := NewStringReplacer(s...)
// Test cases with 1-byte old strings, variable length new strings.
testCases = append(testCases,
testCase{htmlEscaper, "No changes", "No changes"},
testCase{htmlEscaper, "I <3 escaping & stuff", "I &lt;3 escaping &amp; stuff"},
testCase{htmlEscaper, "&&&", "&amp;&amp;&amp;"},
testCase{htmlEscaper, "", ""},
testCase{repeat, "brad", "bbrrrrrrrrrrrrrrrrrradddd"},
testCase{repeat, "abba", "abbbba"},
testCase{repeat, "", ""},
testCase{NewStringReplacer("a", "11", "a", "22"), "brad", "br11d"},
)
// The remaining test cases have variable length old strings.
testCases = append(testCases,
testCase{htmlUnescaper, "&amp;amp;", "&amp;"},
testCase{htmlUnescaper, "&lt;b&gt;HTML&apos;s neat&lt;/b&gt;", "<b>HTML's neat</b>"},
testCase{htmlUnescaper, "", ""},
testCase{NewStringReplacer("a", "1", "a", "2", "xxx", "xxx"), "brad", "br1d"},
testCase{NewStringReplacer("a", "1", "aa", "2", "aaa", "3"), "aaaa", "1111"},
testCase{NewStringReplacer("aaa", "3", "aa", "2", "a", "1"), "aaaa", "31"},
)
// gen1 has multiple old strings of variable length. There is no
// overall non-empty common prefix, but some pairwise common prefixes.
gen1 := NewStringReplacer(
"aaa", "3[aaa]",
"aa", "2[aa]",
"a", "1[a]",
"i", "i",
"longerst", "most long",
"longer", "medium",
"long", "short",
"xx", "xx",
"x", "X",
"X", "Y",
"Y", "Z",
)
testCases = append(testCases,
testCase{gen1, "fooaaabar", "foo3[aaa]b1[a]r"},
testCase{gen1, "long, longerst, longer", "short, most long, medium"},
testCase{gen1, "xxxxx", "xxxxX"},
testCase{gen1, "XiX", "YiY"},
testCase{gen1, "", ""},
)
// gen2 has multiple old strings with no pairwise common prefix.
gen2 := NewStringReplacer(
"roses", "red",
"violets", "blue",
"sugar", "sweet",
)
testCases = append(testCases,
testCase{gen2, "roses are red, violets are blue...", "red are red, blue are blue..."},
testCase{gen2, "", ""},
)
// gen3 has multiple old strings with an overall common prefix.
gen3 := NewStringReplacer(
"abracadabra", "poof",
"abracadabrakazam", "splat",
"abraham", "lincoln",
"abrasion", "scrape",
"abraham", "isaac",
)
testCases = append(testCases,
testCase{gen3, "abracadabrakazam abraham", "poofkazam lincoln"},
testCase{gen3, "abrasion abracad", "scrape abracad"},
testCase{gen3, "abba abram abrasive", "abba abram abrasive"},
testCase{gen3, "", ""},
)
// foo{1,2,3,4} have multiple old strings with an overall common prefix
// and 1- or 2- byte extensions from the common prefix.
foo1 := NewStringReplacer(
"foo1", "A",
"foo2", "B",
"foo3", "C",
)
foo2 := NewStringReplacer(
"foo1", "A",
"foo2", "B",
"foo31", "C",
"foo32", "D",
)
foo3 := NewStringReplacer(
"foo11", "A",
"foo12", "B",
"foo31", "C",
"foo32", "D",
)
foo4 := NewStringReplacer(
"foo12", "B",
"foo32", "D",
)
testCases = append(testCases,
testCase{foo1, "fofoofoo12foo32oo", "fofooA2C2oo"},
testCase{foo1, "", ""},
testCase{foo2, "fofoofoo12foo32oo", "fofooA2Doo"},
testCase{foo2, "", ""},
testCase{foo3, "fofoofoo12foo32oo", "fofooBDoo"},
testCase{foo3, "", ""},
testCase{foo4, "fofoofoo12foo32oo", "fofooBDoo"},
testCase{foo4, "", ""},
)
// genAll maps "\x00\x01\x02...\xfe\xff" to "[all]", amongst other things.
allBytes := make([]byte, 256)
for i := range allBytes {
allBytes[i] = byte(i)
}
allString := string(allBytes)
genAll := NewStringReplacer(
allString, "[all]",
"\xff", "[ff]",
"\x00", "[00]",
)
testCases = append(testCases,
testCase{genAll, allString, "[all]"},
testCase{genAll, "a\xff" + allString + "\x00", "a[ff][all][00]"},
testCase{genAll, "", ""},
)
// Test cases with empty old strings.
blankToX1 := NewStringReplacer("", "X")
blankToX2 := NewStringReplacer("", "X", "", "")
blankHighPriority := NewStringReplacer("", "X", "o", "O")
blankLowPriority := NewStringReplacer("o", "O", "", "X")
blankNoOp1 := NewStringReplacer("", "")
blankNoOp2 := NewStringReplacer("", "", "", "A")
blankFoo := NewStringReplacer("", "X", "foobar", "R", "foobaz", "Z")
testCases = append(testCases,
testCase{blankToX1, "foo", "XfXoXoX"},
testCase{blankToX1, "", "X"},
testCase{blankToX2, "foo", "XfXoXoX"},
testCase{blankToX2, "", "X"},
testCase{blankHighPriority, "oo", "XOXOX"},
testCase{blankHighPriority, "ii", "XiXiX"},
testCase{blankHighPriority, "oiio", "XOXiXiXOX"},
testCase{blankHighPriority, "iooi", "XiXOXOXiX"},
testCase{blankHighPriority, "", "X"},
testCase{blankLowPriority, "oo", "OOX"},
testCase{blankLowPriority, "ii", "XiXiX"},
testCase{blankLowPriority, "oiio", "OXiXiOX"},
testCase{blankLowPriority, "iooi", "XiOOXiX"},
testCase{blankLowPriority, "", "X"},
testCase{blankNoOp1, "foo", "foo"},
testCase{blankNoOp1, "", ""},
testCase{blankNoOp2, "foo", "foo"},
testCase{blankNoOp2, "", ""},
testCase{blankFoo, "foobarfoobaz", "XRXZX"},
testCase{blankFoo, "foobar-foobaz", "XRX-XZX"},
testCase{blankFoo, "", "X"},
)
// single string replacer
abcMatcher := NewStringReplacer("abc", "[match]")
testCases = append(testCases,
testCase{abcMatcher, "", ""},
testCase{abcMatcher, "ab", "ab"},
testCase{abcMatcher, "abc", "[match]"},
testCase{abcMatcher, "abcd", "[match]d"},
testCase{abcMatcher, "cabcabcdabca", "c[match][match]d[match]a"},
)
// Issue 6659 cases (more single string replacer)
noHello := NewStringReplacer("Hello", "")
testCases = append(testCases,
testCase{noHello, "Hello", ""},
testCase{noHello, "Hellox", "x"},
testCase{noHello, "xHello", "x"},
testCase{noHello, "xHellox", "xx"},
)
// No-arg test cases.
nop := NewStringReplacer()
testCases = append(testCases,
testCase{nop, "abc", "abc"},
testCase{nop, "", ""},
)
// Run the test cases.
for i, tc := range testCases {
if s := tc.r.Replace(tc.in); s != tc.out {
t.Errorf("%d. strings.Replace(%q) = %q, want %q", i, tc.in, s, tc.out)
}
var buf bytes.Buffer
n, err := tc.r.WriteString(&buf, tc.in)
if err != nil {
t.Errorf("%d. WriteString: %v", i, err)
continue
}
got := buf.String()
if got != tc.out {
t.Errorf("%d. WriteString(%q) wrote %q, want %q", i, tc.in, got, tc.out)
continue
}
if n != len(tc.out) {
t.Errorf("%d. WriteString(%q) wrote correct string but reported %d bytes; want %d (%q)",
i, tc.in, n, len(tc.out), tc.out)
}
}
}
type errWriter struct{}
func (errWriter) Write(p []byte) (n int, err error) {
return 0, fmt.Errorf("unwritable")
}
func BenchmarkGenericNoMatch(b *testing.B) {
str := strings.Repeat("A", 100) + strings.Repeat("B", 100)
generic := NewStringReplacer("a", "A", "b", "B", "12", "123") // varying lengths forces generic
for i := 0; i < b.N; i++ {
generic.Replace(str)
}
}
func BenchmarkGenericMatch1(b *testing.B) {
str := strings.Repeat("a", 100) + strings.Repeat("b", 100)
generic := NewStringReplacer("a", "A", "b", "B", "12", "123")
for i := 0; i < b.N; i++ {
generic.Replace(str)
}
}
func BenchmarkGenericMatch2(b *testing.B) {
str := strings.Repeat("It&apos;s &lt;b&gt;HTML&lt;/b&gt;!", 100)
for i := 0; i < b.N; i++ {
htmlUnescaper.Replace(str)
}
}
func benchmarkSingleString(b *testing.B, pattern, text string) {
r := NewStringReplacer(pattern, "[match]")
b.SetBytes(int64(len(text)))
b.ResetTimer()
for i := 0; i < b.N; i++ {
r.Replace(text)
}
}
func BenchmarkSingleMaxSkipping(b *testing.B) {
benchmarkSingleString(b, strings.Repeat("b", 25), strings.Repeat("a", 10000))
}
func BenchmarkSingleLongSuffixFail(b *testing.B) {
benchmarkSingleString(b, "b"+strings.Repeat("a", 500), strings.Repeat("a", 1002))
}
func BenchmarkSingleMatch(b *testing.B) {
benchmarkSingleString(b, "abcdef", strings.Repeat("abcdefghijklmno", 1000))
}
func BenchmarkByteByteNoMatch(b *testing.B) {
str := strings.Repeat("A", 100) + strings.Repeat("B", 100)
for i := 0; i < b.N; i++ {
capitalLetters.Replace(str)
}
}
func BenchmarkByteByteMatch(b *testing.B) {
str := strings.Repeat("a", 100) + strings.Repeat("b", 100)
for i := 0; i < b.N; i++ {
capitalLetters.Replace(str)
}
}
func BenchmarkByteStringMatch(b *testing.B) {
str := "<" + strings.Repeat("a", 99) + strings.Repeat("b", 99) + ">"
for i := 0; i < b.N; i++ {
htmlEscaper.Replace(str)
}
}
func BenchmarkHTMLEscapeNew(b *testing.B) {
str := "I <3 to escape HTML & other text too."
for i := 0; i < b.N; i++ {
htmlEscaper.Replace(str)
}
}
func BenchmarkHTMLEscapeOld(b *testing.B) {
str := "I <3 to escape HTML & other text too."
for i := 0; i < b.N; i++ {
oldHTMLEscape(str)
}
}
func BenchmarkByteStringReplacerWriteString(b *testing.B) {
str := strings.Repeat("I <3 to escape HTML & other text too.", 100)
buf := new(bytes.Buffer)
for i := 0; i < b.N; i++ {
htmlEscaper.WriteString(buf, str)
buf.Reset()
}
}
func BenchmarkByteReplacerWriteString(b *testing.B) {
str := strings.Repeat("abcdefghijklmnopqrstuvwxyz", 100)
buf := new(bytes.Buffer)
for i := 0; i < b.N; i++ {
capitalLetters.WriteString(buf, str)
buf.Reset()
}
}
// BenchmarkByteByteReplaces compares byteByteImpl against multiple Replaces.
func BenchmarkByteByteReplaces(b *testing.B) {
str := strings.Repeat("a", 100) + strings.Repeat("b", 100)
for i := 0; i < b.N; i++ {
strings.Replace(strings.Replace(str, "a", "A", -1), "b", "B", -1)
}
}

17
vendor/github.com/client9/misspell/url.go generated vendored Normal file
View File

@ -0,0 +1,17 @@
package misspell
import (
"regexp"
)
// Regexp for URL https://mathiasbynens.be/demo/url-regex
//
// original @imme_emosol (54 chars) has trouble with dashes in hostname
// @(https?|ftp)://(-\.)?([^\s/?\.#-]+\.?)+(/[^\s]*)?$@iS
var reURL = regexp.MustCompile(`(?i)(https?|ftp)://(-\.)?([^\s/?\.#]+\.?)+(/[^\s]*)?`)
// StripURL attemps to replace URLs with blank spaces, e.g.
// "xxx http://foo.com/ yyy -> "xxx yyyy"
func StripURL(s string) string {
return reURL.ReplaceAllStringFunc(s, replaceWithBlanks)
}

31158
vendor/github.com/client9/misspell/words.go generated vendored Normal file

File diff suppressed because it is too large Load Diff

7
vendor/github.com/fzipp/gocyclo/CONTRIBUTORS generated vendored Normal file
View File

@ -0,0 +1,7 @@
# Names should be added to this file like so:
# Name <email address>
# Please keep the list sorted.
Frederik Zipp <fzipp@gmx.de>
Harshavardhana <harsha@harshavardhana.net>

27
vendor/github.com/fzipp/gocyclo/LICENSE generated vendored Normal file
View File

@ -0,0 +1,27 @@
Copyright (c) 2013 Frederik Zipp. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of the copyright owner nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

31
vendor/github.com/fzipp/gocyclo/README.md generated vendored Normal file
View File

@ -0,0 +1,31 @@
Gocyclo calculates cyclomatic complexities of functions in Go source code.
The cyclomatic complexity of a function is calculated according to the
following rules:
1 is the base complexity of a function
+1 for each 'if', 'for', 'case', '&&' or '||'
To install, run
$ go get github.com/fzipp/gocyclo
and put the resulting binary in one of your PATH directories if
`$GOPATH/bin` isn't already in your PATH.
Usage:
$ gocyclo [<flag> ...] <Go file or directory> ...
Examples:
$ gocyclo .
$ gocyclo main.go
$ gocyclo -top 10 src/
$ gocyclo -over 25 docker
$ gocyclo -avg .
The output fields for each line are:
<complexity> <package> <function> <file:row:column>

225
vendor/github.com/fzipp/gocyclo/gocyclo.go generated vendored Normal file
View File

@ -0,0 +1,225 @@
// Copyright 2013 Frederik Zipp. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Gocyclo calculates the cyclomatic complexities of functions and
// methods in Go source code.
//
// Usage:
// gocyclo [<flag> ...] <Go file or directory> ...
//
// Flags:
// -over N show functions with complexity > N only and
// return exit code 1 if the output is non-empty
// -top N show the top N most complex functions only
// -avg show the average complexity
//
// The output fields for each line are:
// <complexity> <package> <function> <file:row:column>
package main
import (
"flag"
"fmt"
"go/ast"
"go/parser"
"go/token"
"io"
"log"
"os"
"path/filepath"
"sort"
"strings"
)
const usageDoc = `Calculate cyclomatic complexities of Go functions.
Usage:
gocyclo [flags] <Go file or directory> ...
Flags:
-over N show functions with complexity > N only and
return exit code 1 if the set is non-empty
-top N show the top N most complex functions only
-avg show the average complexity over all functions,
not depending on whether -over or -top are set
The output fields for each line are:
<complexity> <package> <function> <file:row:column>
`
func usage() {
fmt.Fprintf(os.Stderr, usageDoc)
os.Exit(2)
}
var (
over = flag.Int("over", 0, "show functions with complexity > N only")
top = flag.Int("top", -1, "show the top N most complex functions only")
avg = flag.Bool("avg", false, "show the average complexity")
)
func main() {
log.SetFlags(0)
log.SetPrefix("gocyclo: ")
flag.Usage = usage
flag.Parse()
args := flag.Args()
if len(args) == 0 {
usage()
}
stats := analyze(args)
sort.Sort(byComplexity(stats))
written := writeStats(os.Stdout, stats)
if *avg {
showAverage(stats)
}
if *over > 0 && written > 0 {
os.Exit(1)
}
}
func analyze(paths []string) []stat {
var stats []stat
for _, path := range paths {
if isDir(path) {
stats = analyzeDir(path, stats)
} else {
stats = analyzeFile(path, stats)
}
}
return stats
}
func isDir(filename string) bool {
fi, err := os.Stat(filename)
return err == nil && fi.IsDir()
}
func analyzeFile(fname string, stats []stat) []stat {
fset := token.NewFileSet()
f, err := parser.ParseFile(fset, fname, nil, 0)
if err != nil {
log.Fatal(err)
}
return buildStats(f, fset, stats)
}
func analyzeDir(dirname string, stats []stat) []stat {
filepath.Walk(dirname, func(path string, info os.FileInfo, err error) error {
if err == nil && !info.IsDir() && strings.HasSuffix(path, ".go") {
stats = analyzeFile(path, stats)
}
return err
})
return stats
}
func writeStats(w io.Writer, sortedStats []stat) int {
for i, stat := range sortedStats {
if i == *top {
return i
}
if stat.Complexity <= *over {
return i
}
fmt.Fprintln(w, stat)
}
return len(sortedStats)
}
func showAverage(stats []stat) {
fmt.Printf("Average: %.3g\n", average(stats))
}
func average(stats []stat) float64 {
total := 0
for _, s := range stats {
total += s.Complexity
}
return float64(total) / float64(len(stats))
}
type stat struct {
PkgName string
FuncName string
Complexity int
Pos token.Position
}
func (s stat) String() string {
return fmt.Sprintf("%d %s %s %s", s.Complexity, s.PkgName, s.FuncName, s.Pos)
}
type byComplexity []stat
func (s byComplexity) Len() int { return len(s) }
func (s byComplexity) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
func (s byComplexity) Less(i, j int) bool {
return s[i].Complexity >= s[j].Complexity
}
func buildStats(f *ast.File, fset *token.FileSet, stats []stat) []stat {
for _, decl := range f.Decls {
if fn, ok := decl.(*ast.FuncDecl); ok {
stats = append(stats, stat{
PkgName: f.Name.Name,
FuncName: funcName(fn),
Complexity: complexity(fn),
Pos: fset.Position(fn.Pos()),
})
}
}
return stats
}
// funcName returns the name representation of a function or method:
// "(Type).Name" for methods or simply "Name" for functions.
func funcName(fn *ast.FuncDecl) string {
if fn.Recv != nil {
if fn.Recv.NumFields() > 0 {
typ := fn.Recv.List[0].Type
return fmt.Sprintf("(%s).%s", recvString(typ), fn.Name)
}
}
return fn.Name.Name
}
// recvString returns a string representation of recv of the
// form "T", "*T", or "BADRECV" (if not a proper receiver type).
func recvString(recv ast.Expr) string {
switch t := recv.(type) {
case *ast.Ident:
return t.Name
case *ast.StarExpr:
return "*" + recvString(t.X)
}
return "BADRECV"
}
// complexity calculates the cyclomatic complexity of a function.
func complexity(fn *ast.FuncDecl) int {
v := complexityVisitor{}
ast.Walk(&v, fn)
return v.Complexity
}
type complexityVisitor struct {
// Complexity is the cyclomatic complexity
Complexity int
}
// Visit implements the ast.Visitor interface.
func (v *complexityVisitor) Visit(n ast.Node) ast.Visitor {
switch n := n.(type) {
case *ast.FuncDecl, *ast.IfStmt, *ast.ForStmt, *ast.RangeStmt, *ast.CaseClause, *ast.CommClause:
v.Complexity++
case *ast.BinaryExpr:
if n.Op == token.LAND || n.Op == token.LOR {
v.Complexity++
}
}
return v
}

30
vendor/github.com/gordonklaus/ineffassign/.gitignore generated vendored Normal file
View File

@ -0,0 +1,30 @@
ineffassign
# Created by https://www.gitignore.io/api/go
### Go ###
# Compiled Object files, Static and Dynamic libs (Shared Objects)
*.o
*.a
*.so
# Folders
_obj
_test
# Architecture specific extensions/prefixes
*.[568vq]
[568vq].out
*.cgo1.go
*.cgo2.c
_cgo_defun.c
_cgo_gotypes.go
_cgo_export.*
_testmain.go
*.exe
*.test
*.prof

21
vendor/github.com/gordonklaus/ineffassign/LICENSE generated vendored Normal file
View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2016 Gordon Klaus and contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

4
vendor/github.com/gordonklaus/ineffassign/README.md generated vendored Normal file
View File

@ -0,0 +1,4 @@
# ineffassign
Detect ineffectual assignments in Go code.
This tool misses some cases because does not consider any type information in its analysis. (For example, assignments to struct fields are never marked as ineffectual.) It should, however, never give any false positives.

7
vendor/github.com/gordonklaus/ineffassign/bugs generated vendored Normal file
View File

@ -0,0 +1,7 @@
cmd/compile/internal/big/floatconv.go:367:2 m
cmd/cover/cover_test.go:62:2 err
cmd/pprof/internal/profile/profile.go:131:10 err
math/big/ftoa.go:285:2 m
net/file_unix.go:66:7 err
golang.org/x/mobile/app/android.go:175:2 queue
golang.org/x/net/icmp/listen_posix.go:83:6 err

View File

@ -0,0 +1,623 @@
package main
import (
"flag"
"fmt"
"go/ast"
"go/parser"
"go/token"
"os"
"path/filepath"
"sort"
"strings"
)
const invalidArgumentExitCode = 3
var dontRecurseFlag = flag.Bool("n", false, "don't recursively check paths")
func main() {
flag.Parse()
if len(flag.Args()) == 0 {
fmt.Println("missing argument: filepath")
os.Exit(invalidArgumentExitCode)
}
lintFailed := false
for _, path := range flag.Args() {
root, err := filepath.Abs(path)
if err != nil {
fmt.Printf("Error finding absolute path: %s", err)
os.Exit(invalidArgumentExitCode)
}
if walkPath(root) {
lintFailed = true
}
}
if lintFailed {
os.Exit(1)
}
}
func walkPath(root string) bool {
lintFailed := false
filepath.Walk(root, func(path string, fi os.FileInfo, err error) error {
if err != nil {
fmt.Printf("Error during filesystem walk: %v\n", err)
return nil
}
if fi.IsDir() {
if path != root && (*dontRecurseFlag ||
filepath.Base(path) == "testdata" ||
filepath.Base(path) == "vendor") {
return filepath.SkipDir
}
return nil
}
if !strings.HasSuffix(path, ".go") {
return nil
}
fset, _, ineff := checkPath(path)
for _, id := range ineff {
fmt.Printf("%s: ineffectual assignment to %s\n", fset.Position(id.Pos()), id.Name)
lintFailed = true
}
return nil
})
return lintFailed
}
func checkPath(path string) (*token.FileSet, []*ast.CommentGroup, []*ast.Ident) {
fset := token.NewFileSet()
f, err := parser.ParseFile(fset, path, nil, parser.ParseComments)
if err != nil {
return nil, nil, nil
}
bld := &builder{vars: map[*ast.Object]*variable{}}
bld.walk(f)
chk := &checker{vars: bld.vars, seen: map[*block]bool{}}
for _, b := range bld.roots {
chk.check(b)
}
sort.Sort(chk.ineff)
return fset, f.Comments, chk.ineff
}
type builder struct {
roots []*block
block *block
vars map[*ast.Object]*variable
results []*ast.FieldList
breaks branchStack
continues branchStack
gotos branchStack
labelStmt *ast.LabeledStmt
}
type block struct {
children []*block
ops map[*ast.Object][]operation
}
func (b *block) addChild(c *block) {
b.children = append(b.children, c)
}
type operation struct {
id *ast.Ident
assign bool
}
type variable struct {
fundept int
escapes bool
}
func (bld *builder) walk(n ast.Node) {
if n != nil {
ast.Walk(bld, n)
}
}
func (bld *builder) Visit(n ast.Node) ast.Visitor {
switch n := n.(type) {
case *ast.FuncDecl:
if n.Body != nil {
bld.fun(n.Type, n.Body)
}
case *ast.FuncLit:
bld.fun(n.Type, n.Body)
case *ast.IfStmt:
bld.walk(n.Init)
bld.walk(n.Cond)
b0 := bld.block
bld.newBlock(b0)
bld.walk(n.Body)
b1 := bld.block
if n.Else != nil {
bld.newBlock(b0)
bld.walk(n.Else)
b0 = bld.block
}
bld.newBlock(b0, b1)
case *ast.ForStmt:
lbl := bld.stmtLabel(n)
brek := bld.breaks.push(lbl)
continu := bld.continues.push(lbl)
bld.walk(n.Init)
start := bld.newBlock(bld.block)
bld.walk(n.Cond)
cond := bld.block
bld.newBlock(cond)
bld.walk(n.Body)
continu.setDestination(bld.newBlock(bld.block))
bld.walk(n.Post)
bld.block.addChild(start)
brek.setDestination(bld.newBlock(cond))
bld.breaks.pop()
bld.continues.pop()
case *ast.RangeStmt:
lbl := bld.stmtLabel(n)
brek := bld.breaks.push(lbl)
continu := bld.continues.push(lbl)
bld.walk(n.X)
pre := bld.newBlock(bld.block)
start := bld.newBlock(pre)
if n.Key != nil {
lhs := []ast.Expr{n.Key}
if n.Value != nil {
lhs = append(lhs, n.Value)
}
bld.walk(&ast.AssignStmt{Lhs: lhs, Tok: n.Tok, TokPos: n.TokPos, Rhs: []ast.Expr{&ast.Ident{NamePos: n.X.End()}}})
}
bld.walk(n.Body)
bld.block.addChild(start)
continu.setDestination(pre)
brek.setDestination(bld.newBlock(pre, bld.block))
bld.breaks.pop()
bld.continues.pop()
case *ast.SwitchStmt:
bld.walk(n.Init)
bld.walk(n.Tag)
bld.swtch(n, n.Body.List)
case *ast.TypeSwitchStmt:
bld.walk(n.Init)
bld.walk(n.Assign)
bld.swtch(n, n.Body.List)
case *ast.SelectStmt:
brek := bld.breaks.push(bld.stmtLabel(n))
for _, c := range n.Body.List {
c := c.(*ast.CommClause).Comm
if s, ok := c.(*ast.AssignStmt); ok {
bld.walk(s.Rhs[0])
} else {
bld.walk(c)
}
}
b0 := bld.block
exits := make([]*block, len(n.Body.List))
dfault := false
for i, c := range n.Body.List {
c := c.(*ast.CommClause)
bld.newBlock(b0)
bld.walk(c)
exits[i] = bld.block
dfault = dfault || c.Comm == nil
}
if !dfault {
exits = append(exits, b0)
}
brek.setDestination(bld.newBlock(exits...))
bld.breaks.pop()
case *ast.LabeledStmt:
bld.gotos.get(n.Label).setDestination(bld.newBlock(bld.block))
bld.labelStmt = n
bld.walk(n.Stmt)
case *ast.BranchStmt:
switch n.Tok {
case token.BREAK:
bld.breaks.get(n.Label).addSource(bld.block)
bld.newBlock()
case token.CONTINUE:
bld.continues.get(n.Label).addSource(bld.block)
bld.newBlock()
case token.GOTO:
bld.gotos.get(n.Label).addSource(bld.block)
bld.newBlock()
}
case *ast.AssignStmt:
if n.Tok == token.QUO_ASSIGN || n.Tok == token.REM_ASSIGN {
bld.maybePanic()
}
for _, x := range n.Rhs {
bld.walk(x)
}
for i, x := range n.Lhs {
if id, ok := ident(x); ok {
if n.Tok >= token.ADD_ASSIGN && n.Tok <= token.AND_NOT_ASSIGN {
bld.use(id)
}
// Don't treat explicit initialization to zero as assignment; it is often used as shorthand for a bare declaration.
if n.Tok == token.DEFINE && i < len(n.Rhs) && isZeroInitializer(n.Rhs[i]) {
bld.use(id)
} else {
bld.assign(id)
}
} else {
bld.walk(x)
}
}
case *ast.GenDecl:
if n.Tok == token.VAR {
for _, s := range n.Specs {
s := s.(*ast.ValueSpec)
for _, x := range s.Values {
bld.walk(x)
}
for _, id := range s.Names {
if len(s.Values) > 0 {
bld.assign(id)
} else {
bld.use(id)
}
}
}
}
case *ast.IncDecStmt:
if id, ok := ident(n.X); ok {
bld.use(id)
bld.assign(id)
} else {
bld.walk(n.X)
}
case *ast.Ident:
bld.use(n)
case *ast.ReturnStmt:
for _, x := range n.Results {
bld.walk(x)
}
res := bld.results[len(bld.results)-1]
if res == nil {
break
}
for _, f := range res.List {
for _, id := range f.Names {
if n.Results != nil {
bld.assign(id)
}
bld.use(id)
}
}
case *ast.SendStmt:
bld.maybePanic()
return bld
case *ast.BinaryExpr:
if n.Op == token.EQL || n.Op == token.QUO || n.Op == token.REM {
bld.maybePanic()
}
return bld
case *ast.CallExpr:
bld.maybePanic()
return bld
case *ast.IndexExpr:
bld.maybePanic()
return bld
case *ast.UnaryExpr:
id, ok := ident(n.X)
if ix, isIx := n.X.(*ast.IndexExpr); isIx {
// We don't care about indexing into slices, but without type information we can do no better.
id, ok = ident(ix.X)
}
if ok && n.Op == token.AND {
if v, ok := bld.vars[id.Obj]; ok {
v.escapes = true
}
}
return bld
case *ast.SelectorExpr:
bld.maybePanic()
// A method call (possibly delayed via a method value) might implicitly take
// the address of its receiver, causing it to escape.
// We can't do any better here without knowing the variable's type.
if id, ok := ident(n.X); ok {
if v, ok := bld.vars[id.Obj]; ok {
v.escapes = true
}
}
return bld
case *ast.SliceExpr:
bld.maybePanic()
// We don't care about slicing into slices, but without type information we can do no better.
if id, ok := ident(n.X); ok {
if v, ok := bld.vars[id.Obj]; ok {
v.escapes = true
}
}
return bld
case *ast.StarExpr:
bld.maybePanic()
return bld
case *ast.TypeAssertExpr:
bld.maybePanic()
return bld
default:
return bld
}
return nil
}
func isZeroInitializer(x ast.Expr) bool {
// Assume that a call expression of a single argument is a conversion expression. We can't do better without type information.
if c, ok := x.(*ast.CallExpr); ok {
switch c.Fun.(type) {
case *ast.Ident, *ast.SelectorExpr:
default:
return false
}
if len(c.Args) != 1 {
return false
}
x = c.Args[0]
}
b, ok := x.(*ast.BasicLit)
if !ok {
return false
}
switch b.Value {
case "0", "0.0", "0.", ".0", `""`:
return true
}
return false
}
func (bld *builder) fun(typ *ast.FuncType, body *ast.BlockStmt) {
for _, v := range bld.vars {
v.fundept++
}
bld.results = append(bld.results, typ.Results)
b := bld.block
bld.newBlock()
bld.roots = append(bld.roots, bld.block)
bld.walk(typ)
bld.walk(body)
bld.block = b
bld.results = bld.results[:len(bld.results)-1]
for _, v := range bld.vars {
v.fundept--
}
}
func (bld *builder) swtch(stmt ast.Stmt, cases []ast.Stmt) {
brek := bld.breaks.push(bld.stmtLabel(stmt))
b0 := bld.block
list := b0
exits := make([]*block, 0, len(cases)+1)
var dfault, fallthru *block
for _, c := range cases {
c := c.(*ast.CaseClause)
if c.List != nil {
list = bld.newBlock(list)
for _, x := range c.List {
bld.walk(x)
}
}
parents := []*block{}
if c.List != nil {
parents = append(parents, list)
}
if fallthru != nil {
parents = append(parents, fallthru)
fallthru = nil
}
bld.newBlock(parents...)
if c.List == nil {
dfault = bld.block
}
for _, s := range c.Body {
bld.walk(s)
if s, ok := s.(*ast.BranchStmt); ok && s.Tok == token.FALLTHROUGH {
fallthru = bld.block
}
}
if fallthru == nil {
exits = append(exits, bld.block)
}
}
if dfault != nil {
list.addChild(dfault)
} else {
exits = append(exits, b0)
}
brek.setDestination(bld.newBlock(exits...))
bld.breaks.pop()
}
// An operation that might panic marks named function results as used.
func (bld *builder) maybePanic() {
if len(bld.results) == 0 {
return
}
res := bld.results[len(bld.results)-1]
if res == nil {
return
}
for _, f := range res.List {
for _, id := range f.Names {
bld.use(id)
}
}
}
func (bld *builder) newBlock(parents ...*block) *block {
bld.block = &block{ops: map[*ast.Object][]operation{}}
for _, b := range parents {
b.addChild(bld.block)
}
return bld.block
}
func (bld *builder) stmtLabel(s ast.Stmt) *ast.Object {
if ls := bld.labelStmt; ls != nil && ls.Stmt == s {
return ls.Label.Obj
}
return nil
}
func (bld *builder) assign(id *ast.Ident) {
bld.newOp(id, true)
}
func (bld *builder) use(id *ast.Ident) {
bld.newOp(id, false)
}
func (bld *builder) newOp(id *ast.Ident, assign bool) {
if id.Name == "_" || id.Obj == nil {
return
}
v, ok := bld.vars[id.Obj]
if !ok {
v = &variable{}
bld.vars[id.Obj] = v
}
v.escapes = v.escapes || v.fundept > 0 || bld.block == nil
if b := bld.block; b != nil {
b.ops[id.Obj] = append(b.ops[id.Obj], operation{id, assign})
}
}
type branchStack []*branch
type branch struct {
label *ast.Object
srcs []*block
dst *block
}
func (s *branchStack) push(lbl *ast.Object) *branch {
br := &branch{label: lbl}
*s = append(*s, br)
return br
}
func (s *branchStack) get(lbl *ast.Ident) *branch {
for i := len(*s) - 1; i >= 0; i-- {
if br := (*s)[i]; lbl == nil || br.label == lbl.Obj {
return br
}
}
// Guard against invalid code (break/continue outside of loop).
if lbl == nil {
return &branch{}
}
return s.push(lbl.Obj)
}
func (br *branch) addSource(src *block) {
br.srcs = append(br.srcs, src)
if br.dst != nil {
src.addChild(br.dst)
}
}
func (br *branch) setDestination(dst *block) {
br.dst = dst
for _, src := range br.srcs {
src.addChild(dst)
}
}
func (s *branchStack) pop() {
*s = (*s)[:len(*s)-1]
}
func ident(x ast.Expr) (*ast.Ident, bool) {
if p, ok := x.(*ast.ParenExpr); ok {
return ident(p.X)
}
id, ok := x.(*ast.Ident)
return id, ok
}
type checker struct {
vars map[*ast.Object]*variable
seen map[*block]bool
ineff idents
}
func (chk *checker) check(b *block) {
if chk.seen[b] {
return
}
chk.seen[b] = true
for obj, ops := range b.ops {
if chk.vars[obj].escapes {
continue
}
ops:
for i, op := range ops {
if !op.assign {
continue
}
if i+1 < len(ops) {
if ops[i+1].assign {
chk.ineff = append(chk.ineff, op.id)
}
continue
}
seen := map[*block]bool{}
for _, b := range b.children {
if used(obj, b, seen) {
continue ops
}
}
chk.ineff = append(chk.ineff, op.id)
}
}
for _, b := range b.children {
chk.check(b)
}
}
func used(obj *ast.Object, b *block, seen map[*block]bool) bool {
if seen[b] {
return false
}
seen[b] = true
if ops := b.ops[obj]; len(ops) > 0 {
return !ops[0].assign
}
for _, b := range b.children {
if used(obj, b, seen) {
return true
}
}
return false
}
type idents []*ast.Ident
func (ids idents) Len() int { return len(ids) }
func (ids idents) Less(i, j int) bool { return ids[i].Pos() < ids[j].Pos() }
func (ids idents) Swap(i, j int) { ids[i], ids[j] = ids[j], ids[i] }

25
vendor/github.com/gordonklaus/ineffassign/list generated vendored Normal file
View File

@ -0,0 +1,25 @@
/Users/gordon/go/src/code.google.com/p/freetype-go/freetype/truetype/truetype.go:493:5: offset assigned and not used
/Users/gordon/go/src/code.google.com/p/freetype-go/freetype/truetype/truetype.go:289:11: offset assigned and not used
/Users/gordon/go/src/code.google.com/p/freetype-go/freetype/truetype/truetype_test.go:224:2: prefix assigned and not used
/Users/gordon/go/src/code.google.com/p/freetype-go/freetype/truetype/truetype_test.go:239:3: s assigned and not used
/Users/gordon/go/src/github.com/gordonklaus/flux/go/types/resolver.go:372:2: seenPkgs assigned and not used
/Users/gordon/go/src/github.com/gopherjs/gopherjs/compiler/package.go:195:7: recvType assigned and not used
/Users/gordon/go/src/golang.org/x/crypto/ocsp/ocsp.go:340:2: rest assigned and not used
/Users/gordon/go/src/golang.org/x/crypto/openpgp/packet/opaque_test.go:35:6: err assigned and not used
/Users/gordon/go/src/golang.org/x/crypto/otr/otr.go:641:6: in assigned and not used
/Users/gordon/go/src/golang.org/x/crypto/otr/otr_test.go:198:17: err assigned and not used
/Users/gordon/go/src/golang.org/x/crypto/ssh/benchmark_test.go:94:17: err assigned and not used
/Users/gordon/go/src/golang.org/x/mobile/app/android.go:175:2: queue assigned and not used
/Users/gordon/go/src/golang.org/x/mobile/cmd/gomobile/bind.go:411:2: w assigned and not used
/Users/gordon/go/src/golang.org/x/mobile/cmd/gomobile/build.go:231:8: err assigned and not used
/Users/gordon/go/src/golang.org/x/net/icmp/listen_posix.go:83:6: err assigned and not used
/Users/gordon/go/src/golang.org/x/net/ipv4/control_unix.go:99:5: b assigned and not used
/Users/gordon/go/src/golang.org/x/net/ipv4/control_unix.go:148:4: b assigned and not used
/Users/gordon/go/src/golang.org/x/net/ipv6/control_unix.go:90:4: b assigned and not used
/Users/gordon/go/src/golang.org/x/net/ipv6/control_unix.go:162:4: b assigned and not used
/Users/gordon/go/src/golang.org/x/net/websocket/hybi.go:298:3: n assigned and not used
/Users/gordon/go/src/golang.org/x/tools/cmd/callgraph/main.go:164:2: args assigned and not used
/Users/gordon/go/src/golang.org/x/tools/cmd/cover/cover_test.go:52:2: err assigned and not used
/Users/gordon/go/src/golang.org/x/tools/go/gcimporter/exportdata.go:74:13: size assigned and not used
/Users/gordon/go/src/golang.org/x/tools/oracle/oracle.go:268:2: iprog assigned and not used
/Users/gordon/go/src/golang.org/x/tools/oracle/oracle_test.go:299:2: iprog assigned and not used

131
vendor/github.com/gordonklaus/ineffassign/liststd generated vendored Normal file
View File

@ -0,0 +1,131 @@
/usr/local/go/src/bufio/scan.go:388:6: ineffectual assignment to width
/usr/local/go/src/bufio/scan.go:396:6: ineffectual assignment to width
/usr/local/go/src/bytes/buffer_test.go:141:6: ineffectual assignment to err
/usr/local/go/src/bytes/buffer_test.go:164:3: ineffectual assignment to c
/usr/local/go/src/cmd/cgo/out.go:799:3: ineffectual assignment to gccResult
/usr/local/go/src/cmd/compile/internal/big/ratconv.go:170:4: ineffectual assignment to err
/usr/local/go/src/cmd/compile/internal/gc/bimport.go:330:2: ineffectual assignment to file
/usr/local/go/src/cmd/compile/internal/gc/cgen.go:3332:3: ineffectual assignment to max
/usr/local/go/src/cmd/compile/internal/gc/export.go:379:2: ineffectual assignment to size
/usr/local/go/src/cmd/compile/internal/gc/global_test.go:51:2: ineffectual assignment to out
/usr/local/go/src/cmd/compile/internal/gc/lex.go:281:4: ineffectual assignment to c1
/usr/local/go/src/cmd/compile/internal/gc/reg.go:1373:2: ineffectual assignment to firstf
/usr/local/go/src/cmd/compile/internal/gc/reg.go:1381:3: ineffectual assignment to firstf
/usr/local/go/src/cmd/compile/internal/s390x/peep.go:1048:3: ineffectual assignment to size
/usr/local/go/src/cmd/compile/internal/s390x/peep.go:1139:3: ineffectual assignment to size
/usr/local/go/src/cmd/compile/internal/ssa/loopbce.go:44:3: ineffectual assignment to entry
/usr/local/go/src/cmd/cover/html.go:64:8: ineffectual assignment to err
/usr/local/go/src/cmd/cover/html.go:66:8: ineffectual assignment to err
/usr/local/go/src/cmd/go/build.go:3355:3: ineffectual assignment to cgoLDFLAGS
/usr/local/go/src/cmd/internal/goobj/read.go:532:3: ineffectual assignment to data
/usr/local/go/src/cmd/internal/obj/arm64/obj7.go:600:2: ineffectual assignment to aoffset
/usr/local/go/src/cmd/internal/obj/mips/asm0.go:1049:3: ineffectual assignment to v
/usr/local/go/src/cmd/internal/obj/mips/asm0.go:1101:3: ineffectual assignment to v
/usr/local/go/src/cmd/internal/obj/s390x/objz.go:609:3: ineffectual assignment to pLast
/usr/local/go/src/cmd/internal/pprof/profile/encode.go:279:12: ineffectual assignment to err
/usr/local/go/src/cmd/link/internal/ld/dwarf.go:1426:2: ineffectual assignment to unitstart
/usr/local/go/src/cmd/link/internal/ld/dwarf.go:1427:2: ineffectual assignment to headerstart
/usr/local/go/src/cmd/link/internal/ld/dwarf.go:1428:2: ineffectual assignment to headerend
/usr/local/go/src/cmd/link/internal/ld/elf.go:2272:3: ineffectual assignment to resoff
/usr/local/go/src/cmd/vet/print.go:227:9: ineffectual assignment to w
/usr/local/go/src/cmd/yacc/yacc.go:770:2: ineffectual assignment to val
/usr/local/go/src/cmd/yacc/yacc.go:3127:2: ineffectual assignment to i
/usr/local/go/src/compress/bzip2/huffman.go:114:4: ineffectual assignment to length
/usr/local/go/src/compress/flate/reader_test.go:53:3: ineffectual assignment to buf0
/usr/local/go/src/compress/flate/writer_test.go:29:3: ineffectual assignment to buf0
/usr/local/go/src/compress/gzip/gzip_test.go:211:5: ineffectual assignment to err
/usr/local/go/src/compress/lzw/reader_test.go:148:4: ineffectual assignment to buf0
/usr/local/go/src/compress/lzw/writer_test.go:146:3: ineffectual assignment to buf0
/usr/local/go/src/container/list/list_test.go:286:2: ineffectual assignment to e1
/usr/local/go/src/container/list/list_test.go:286:6: ineffectual assignment to e2
/usr/local/go/src/container/list/list_test.go:286:10: ineffectual assignment to e3
/usr/local/go/src/container/list/list_test.go:286:14: ineffectual assignment to e4
/usr/local/go/src/crypto/elliptic/p224.go:722:10: ineffectual assignment to bytes
/usr/local/go/src/crypto/tls/handshake_messages.go:289:3: ineffectual assignment to z
/usr/local/go/src/crypto/x509/verify.go:110:5: ineffectual assignment to certName
/usr/local/go/src/database/sql/sql_test.go:1705:4: ineffectual assignment to numOpen
/usr/local/go/src/database/sql/sql_test.go:1839:5: ineffectual assignment to err
/usr/local/go/src/debug/dwarf/type.go:540:5: ineffectual assignment to haveBitOffset
/usr/local/go/src/debug/elf/file.go:1014:3: ineffectual assignment to suffix
/usr/local/go/src/debug/gosym/pclntab_test.go:256:2: ineffectual assignment to off
/usr/local/go/src/debug/pe/file_test.go:309:2: ineffectual assignment to err
/usr/local/go/src/encoding/base32/base32_test.go:120:4: ineffectual assignment to count
/usr/local/go/src/encoding/base64/base64_test.go:174:4: ineffectual assignment to count
/usr/local/go/src/encoding/gob/decgen.go:187:6: ineffectual assignment to err
/usr/local/go/src/encoding/gob/encgen.go:166:6: ineffectual assignment to err
/usr/local/go/src/encoding/json/encode.go:1071:2: ineffectual assignment to count
/usr/local/go/src/encoding/json/encode.go:1169:6: ineffectual assignment to advance
/usr/local/go/src/encoding/xml/xml.go:1030:6: ineffectual assignment to ok
/usr/local/go/src/fmt/print.go:936:2: ineffectual assignment to afterIndex
/usr/local/go/src/fmt/print.go:1051:15: ineffectual assignment to afterIndex
/usr/local/go/src/go/ast/filter.go:84:3: ineffectual assignment to keepField
/usr/local/go/src/go/internal/gcimporter/bimport.go:215:2: ineffectual assignment to file
/usr/local/go/src/go/printer/nodes.go:439:4: ineffectual assignment to extraTabs
/usr/local/go/src/go/printer/printer_test.go:155:8: ineffectual assignment to err
/usr/local/go/src/go/types/conversions.go:49:2: ineffectual assignment to final
/usr/local/go/src/html/template/css.go:160:2: ineffectual assignment to r
/usr/local/go/src/html/template/css.go:160:5: ineffectual assignment to w
/usr/local/go/src/html/template/html.go:141:2: ineffectual assignment to r
/usr/local/go/src/html/template/html.go:141:5: ineffectual assignment to w
/usr/local/go/src/html/template/js.go:249:2: ineffectual assignment to r
/usr/local/go/src/html/template/js.go:249:5: ineffectual assignment to w
/usr/local/go/src/image/decode_test.go:125:9: ineffectual assignment to err
/usr/local/go/src/image/png/reader.go:689:2: ineffectual assignment to n
/usr/local/go/src/image/png/writer.go:269:3: ineffectual assignment to best
/usr/local/go/src/io/io_test.go:245:2: ineffectual assignment to n
/usr/local/go/src/io/ioutil/ioutil.go:149:2: ineffectual assignment to readSize
/usr/local/go/src/io/ioutil/ioutil_test.go:24:2: ineffectual assignment to contents
/usr/local/go/src/log/syslog/syslog_test.go:236:5: ineffectual assignment to err
/usr/local/go/src/log/syslog/syslog_test.go:240:5: ineffectual assignment to err
/usr/local/go/src/math/big/ratconv.go:176:4: ineffectual assignment to err
/usr/local/go/src/mime/multipart/multipart_test.go:408:2: ineffectual assignment to p
/usr/local/go/src/net/dial_test.go:381:6: ineffectual assignment to err
/usr/local/go/src/net/dnsname_test.go:36:6: ineffectual assignment to char63
/usr/local/go/src/net/dnsname_test.go:37:6: ineffectual assignment to char64
/usr/local/go/src/net/fd_plan9.go:64:4: ineffectual assignment to err
/usr/local/go/src/net/fd_windows.go:166:3: ineffectual assignment to err
/usr/local/go/src/net/http/fs.go:413:5: ineffectual assignment to name
/usr/local/go/src/net/http/h2_bundle.go:6249:4: ineffectual assignment to n
/usr/local/go/src/net/http/request_test.go:155:13: ineffectual assignment to err
/usr/local/go/src/net/http/serve_test.go:4053:13: ineffectual assignment to err
/usr/local/go/src/net/http/transport_test.go:729:8: ineffectual assignment to err
/usr/local/go/src/net/http/transport_test.go:2345:3: ineffectual assignment to slurp
/usr/local/go/src/net/parse.go:27:2: ineffectual assignment to i
/usr/local/go/src/net/rpc/server.go:270:3: ineffectual assignment to str
/usr/local/go/src/net/udpsock_plan9.go:80:16: ineffectual assignment to i
/usr/local/go/src/os/env_test.go:109:2: ineffectual assignment to value
/usr/local/go/src/os/os_test.go:1080:5: ineffectual assignment to err
/usr/local/go/src/os/path_test.go:122:2: ineffectual assignment to testit
/usr/local/go/src/reflect/type.go:2379:3: ineffectual assignment to name
/usr/local/go/src/regexp/exec.go:123:2: ineffectual assignment to r
/usr/local/go/src/regexp/exec.go:124:2: ineffectual assignment to width
/usr/local/go/src/regexp/exec.go:321:2: ineffectual assignment to r
/usr/local/go/src/regexp/exec.go:322:2: ineffectual assignment to width
/usr/local/go/src/regexp/onepass.go:338:15: ineffectual assignment to matchArg
/usr/local/go/src/regexp/syntax/parse.go:577:2: ineffectual assignment to start
/usr/local/go/src/runtime/lfstack_test.go:48:2: ineffectual assignment to nodes
/usr/local/go/src/runtime/mbitmap.go:1458:3: ineffectual assignment to i
/usr/local/go/src/runtime/mfinal_test.go:60:4: ineffectual assignment to v
/usr/local/go/src/runtime/mfinal_test.go:98:3: ineffectual assignment to v
/usr/local/go/src/runtime/mgcmark.go:414:2: ineffectual assignment to stolen
/usr/local/go/src/runtime/mgcsweep.go:188:2: ineffectual assignment to nfree
/usr/local/go/src/runtime/os_plan9.go:307:2: ineffectual assignment to n
/usr/local/go/src/runtime/pprof/pprof.go:465:5: ineffectual assignment to ok
/usr/local/go/src/runtime/pprof/pprof.go:608:5: ineffectual assignment to ok
/usr/local/go/src/runtime/pprof/pprof.go:751:5: ineffectual assignment to ok
/usr/local/go/src/runtime/proc.go:4227:3: ineffectual assignment to xname
/usr/local/go/src/runtime/runtime1.go:360:3: ineffectual assignment to field
/usr/local/go/src/runtime/runtime_mmap_test.go:25:2: ineffectual assignment to p
/usr/local/go/src/runtime/softfloat64.go:228:3: ineffectual assignment to f
/usr/local/go/src/runtime/softfloat64.go:228:6: ineffectual assignment to g
/usr/local/go/src/runtime/stack_test.go:106:4: ineffectual assignment to s
/usr/local/go/src/strconv/quote.go:23:6: ineffectual assignment to width
/usr/local/go/src/sync/atomic/atomic_test.go:1122:2: ineffectual assignment to new
/usr/local/go/src/sync/atomic/atomic_test.go:1150:2: ineffectual assignment to new
/usr/local/go/src/syscall/dir_plan9.go:88:2: ineffectual assignment to b
/usr/local/go/src/syscall/dir_plan9.go:131:13: ineffectual assignment to b
/usr/local/go/src/syscall/exec_plan9.go:281:2: ineffectual assignment to r1
/usr/local/go/src/syscall/mksyscall_windows.go:310:2: ineffectual assignment to s
/usr/local/go/src/syscall/syscall_bsd_test.go:23:2: ineffectual assignment to n
/usr/local/go/src/syscall/syscall_unix_test.go:187:17: ineffectual assignment to err
/usr/local/go/src/text/template/multi_test.go:249:9: ineffectual assignment to err

21
vendor/github.com/karalabe/xgo/LICENSE generated vendored Normal file
View File

@ -0,0 +1,21 @@
The MIT License
Copyright (c) 2014 Péter Szilágyi <peterke@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

272
vendor/github.com/karalabe/xgo/README.md generated vendored Normal file
View File

@ -0,0 +1,272 @@
# xgo - Go CGO cross compiler
Although Go strives to be a cross platform language, cross compilation from one
platform to another is not as simple as it could be, as you need the Go sources
bootstrapped to each platform and architecture.
The first step towards cross compiling was Dave Cheney's [golang-crosscompile](https://github.com/davecheney/golang-crosscompile)
package, which automatically bootstrapped the necessary sources based on your
existing Go installation. Although this was enough for a lot of cases, certain
drawbacks became apparent where the official libraries used CGO internally: any
dependency to third party platform code is unavailable, hence those parts don't
cross compile nicely (native DNS resolution, system certificate access, etc).
A step forward in enabling cross compilation was Alan Shreve's [gonative](https://github.com/inconshreveable/gonative)
package, which instead of bootstrapping the different platforms based on the
existing Go installation, downloaded the official pre-compiled binaries from the
golang website and injected those into the local toolchain. Since the pre-built
binaries already contained the necessary platform specific code, the few missing
dependencies were resolved, and true cross compilation could commence... of pure
Go code.
However, there was still one feature missing: cross compiling Go code that used
CGO itself, which isn't trivial since you need access to OS specific headers and
libraries. This becomes very annoying when you need access only to some trivial
OS specific functionality (e.g. query the CPU load), but need to configure and
maintain separate build environments to do it.
## Enter xgo
My solution to the challenge of cross compiling Go code with embedded C/C++ snippets
(i.e. CGO_ENABLED=1) is based on the concept of [lightweight Linux containers](http://en.wikipedia.org/wiki/LXC).
All the necessary Go tool-chains, C cross compilers and platform headers/libraries
have been assembled into a single Docker container, which can then be called as if
a single command to compile a Go package to various platforms and architectures.
## Installation
Although you could build the container manually, it is available as an automatic
trusted build from Docker's container registry (not insignificant in size):
docker pull karalabe/xgo-latest
To prevent having to remember a potentially complex Docker command every time,
a lightweight Go wrapper was written on top of it.
go get github.com/karalabe/xgo
## Usage
Simply specify the import path you want to build, and xgo will do the rest:
$ xgo github.com/project-iris/iris
...
$ ls -al
-rwxr-xr-x 1 root root 9995000 Nov 24 16:44 iris-android-16-arm
-rwxr-xr-x 1 root root 6776500 Nov 24 16:44 iris-darwin-10.6-386
-rwxr-xr-x 1 root root 8755532 Nov 24 16:44 iris-darwin-10.6-amd64
-rwxr-xr-x 1 root root 7114176 Nov 24 16:45 iris-ios-5.0-arm
-rwxr-xr-x 1 root root 10135248 Nov 24 16:44 iris-linux-386
-rwxr-xr-x 1 root root 12598472 Nov 24 16:44 iris-linux-amd64
-rwxr-xr-x 1 root root 10040464 Nov 24 16:44 iris-linux-arm
-rwxr-xr-x 1 root root 7516368 Nov 24 16:44 iris-windows-4.0-386.exe
-rwxr-xr-x 1 root root 9549416 Nov 24 16:44 iris-windows-4.0-amd64.exe
If the path is not a canonical import path, but rather a local path (starts with
a dot `.` or a dash `/`), xgo will use the local GOPATH contents for the cross
compilation.
### Build flags
A handful of flags can be passed to `go build`. The currently supported ones are
- `-v`: prints the names of packages as they are compiled
- `-x`: prints the build commands as compilation progresses
- `-race`: enables data race detection (supported only on amd64, rest built without)
- `-tags='tag list'`: list of build tags to consider satisfied during the build
- `-ldflags='flag list'`: arguments to pass on each go tool link invocation
- `-buildmode=mode`: binary type to produce by the compiler
### Go releases
As newer versions of the language runtime, libraries and tools get released,
these will get incorporated into xgo too as extensions layers to the base cross
compilation image (only Go 1.3 and above will be supported).
You can select which Go release to work with through the `-go` command line flag
to xgo and if the specific release was already integrated, it will automatically
be retrieved and installed.
$ xgo -go 1.6.1 github.com/project-iris/iris
Additionally, a few wildcard release strings are also supported:
- `latest` will use the latest Go release (this is the default)
- `1.6.x` will use the latest point release of a specific Go version
- `1.6-develop` will use the develop branch of a specific Go version
- `develop` will use the develop branch of the entire Go repository
### Output prefixing
xgo by default uses the name of the package being cross compiled as the output
file prefix. This can be overridden with the `-out` flag.
$ xgo -out iris-v0.3.2 github.com/project-iris/iris
...
$ ls -al
-rwxr-xr-x 1 root root 9995000 Nov 24 16:44 iris-v0.3.2-android-16-arm
-rwxr-xr-x 1 root root 6776500 Nov 24 16:44 iris-v0.3.2-darwin-10.6-386
-rwxr-xr-x 1 root root 8755532 Nov 24 16:44 iris-v0.3.2-darwin-10.6-amd64
-rwxr-xr-x 1 root root 7114176 Nov 24 16:45 iris-v0.3.2-ios-5.0-arm
-rwxr-xr-x 1 root root 10135248 Nov 24 16:44 iris-v0.3.2-linux-386
-rwxr-xr-x 1 root root 12598472 Nov 24 16:44 iris-v0.3.2-linux-amd64
-rwxr-xr-x 1 root root 10040464 Nov 24 16:44 iris-v0.3.2-linux-arm
-rwxr-xr-x 1 root root 7516368 Nov 24 16:44 iris-v0.3.2-windows-4.0-386.exe
-rwxr-xr-x 1 root root 9549416 Nov 24 16:44 iris-v0.3.2-windows-4.0-amd64.exe
### Branch selection
Similarly to `go get`, xgo also uses the `master` branch of a repository during
source code retrieval. To switch to a different branch before compilation pass
the desired branch name through the `--branch` argument.
$ xgo --branch release-branch.go1.4 golang.org/x/tools/cmd/goimports
...
$ ls -al
-rwxr-xr-x 1 root root 4171248 Nov 24 16:40 goimports-android-16-arm
-rwxr-xr-x 1 root root 4139868 Nov 24 16:40 goimports-darwin-10.6-386
-rwxr-xr-x 1 root root 5186720 Nov 24 16:40 goimports-darwin-10.6-amd64
-rwxr-xr-x 1 root root 3202364 Nov 24 16:40 goimports-ios-5.0-arm
-rwxr-xr-x 1 root root 4189456 Nov 24 16:40 goimports-linux-386
-rwxr-xr-x 1 root root 5264136 Nov 24 16:40 goimports-linux-amd64
-rwxr-xr-x 1 root root 4209416 Nov 24 16:40 goimports-linux-arm
-rwxr-xr-x 1 root root 4348416 Nov 24 16:40 goimports-windows-4.0-386.exe
-rwxr-xr-x 1 root root 5415424 Nov 24 16:40 goimports-windows-4.0-amd64.exe
### Remote selection
Yet again similarly to `go get`, xgo uses the repository remote corresponding to
the import path being built. To switch to a different remote while preserving the
original import path, use the `--remote` argument.
$ xgo --remote github.com/golang/tools golang.org/x/tools/cmd/goimports
...
### Package selection
If you used the above *branch* or *remote* selection machanisms, it may happen
that the path you are trying to build is only present in the specific branch and
not the default repository, causing Go to fail at locating it. To circumvent this,
you may specify only the repository root for xgo, and use an additional `--pkg`
parameter to select the exact package within, honoring any prior *branch* and
*remote* selections.
$ xgo --pkg cmd/goimports golang.org/x/tools
...
$ ls -al
-rwxr-xr-x 1 root root 4194956 Nov 24 16:38 goimports-android-16-arm
-rwxr-xr-x 1 root root 4164448 Nov 24 16:38 goimports-darwin-10.6-386
-rwxr-xr-x 1 root root 5223584 Nov 24 16:38 goimports-darwin-10.6-amd64
-rwxr-xr-x 1 root root 3222848 Nov 24 16:39 goimports-ios-5.0-arm
-rwxr-xr-x 1 root root 4217184 Nov 24 16:38 goimports-linux-386
-rwxr-xr-x 1 root root 5295768 Nov 24 16:38 goimports-linux-amd64
-rwxr-xr-x 1 root root 4233120 Nov 24 16:38 goimports-linux-arm
-rwxr-xr-x 1 root root 4373504 Nov 24 16:38 goimports-windows-4.0-386.exe
-rwxr-xr-x 1 root root 5450240 Nov 24 16:38 goimports-windows-4.0-amd64.exe
This argument may at some point be integrated into the import path itself, but for
now it exists as an independent build parameter. Also, there is not possibility
for now to build mulitple commands in one go.
### Limit build targets
By default `xgo` will try and build the specified package to all platforms and
architectures supported by the underlying Go runtime. If you wish to restrict
the build to only a few target systems, use the comma separated `--targets` CLI
argument:
* `--targets=linux/arm`: builds only the ARMv5 Linux binaries (`arm-6`/`arm-7` allowed)
* `--targets=windows/*,darwin/*`: builds all Windows and OSX binaries
* `--targets=*/arm`: builds ARM binaries for all platforms
* `--targets=*/*`: builds all suppoted targets (default)
The supported targets are:
* Platforms: `android`, `darwin`, `ios`, `linux`, `windows`
* Achitectures: `386`, `amd64`, `arm-5`, `arm-6`, `arm-7`, `arm64`, `mips`, `mipsle`, `mips64`, `mips64le`
### Platform versions
By default `xgo` tries to cross compile to the lowest possible versions of every
supported platform, in order to produce binaries that are portable among various
versions of the same operating system. This however can lead to issues if a used
dependency is only supported by more recent systems. As such, `xgo` supports the
selection of specific platform versions by appending them to the OS target string.
* `--targets=ios-8.1/*`: cross compile to iOS 8.1
* `--targets=android-16/*`: cross compile to Android Jelly Bean
* `--targets=darwin-10.9/*`: cross compile to Mac OS X Mavericks
* `--targets=windows-6.0/*`: cross compile to Windows Vista
The supported platforms are:
* All Android APIs up to Android Lollipop 5.0 ([API level ids](https://source.android.com/source/build-numbers.html))
* All Windows APIs up to Windows 8.1 limited by `mingw-w64` ([API level ids](https://en.wikipedia.org/wiki/Windows_NT#Releases))
* OSX APIs in the range of 10.6 - 10.11
* All iOS APIs up to iOS 9.3
### Mobile libraries
Apart from the usual runnable binaries, `xgo` also supports building library
archives for Android (`android/aar`) and iOS (`ios/framework`). Opposed to
`gomobile` however `xgo` does not derive library APIs from the Go code, so
proper CGO C external methods must be defined within the package.
In the case of Android archives, all architectures will be bundled that are
supported by the requested Android platform version. For iOS frameworks `xgo`
will bundle armv7 and arm64 by default, and also the x86_64 simulator builds
if the iPhoneSimulator.sdk was injected by the user:
* Create a new docker image based on xgo: `FROM karalabe/xgo-latest`
* Inject the simulator SDK: `ADD iPhoneSimulator9.3.sdk.tar.xz /iPhoneSimulator9.3.sdk.tar.xz`
* Bootstrap the simulator SDK: `$UPDATE_IOS /iPhoneSimulator9.3.sdk.tar.xz`
### CGO dependencies
The main differentiator of xgo versus other cross compilers is support for basic
embedded C/C++ code and target-platform specific OS SDK availability. The current
xgo release introduces an experimental CGO *dependency* cross compilation, enabling
building Go programs that require external C/C++ libraries.
It is assumed that the dependent C/C++ library is `configure/make` based, was
properly prepared for cross compilation and is available as a tarball download
(`.tar`, `.tar.gz` or `.tar.bz2`). Further plans include extending this to cmake
based projects, if need arises (please open an issue if it's important to you).
Such dependencies can be added via the `--deps` argument. They will be retrieved
prior to starting the cross compilation and the packages cached to save bandwidth
on subsequent calls.
A complex sample for such a scenario is building the Ethereum CLI node, which has
the GNU Multiple Precision Arithmetic Library as it's dependency.
$ xgo --deps=https://gmplib.org/download/gmp/gmp-6.1.0.tar.bz2 \
--targets=windows/* github.com/ethereum/go-ethereum/cmd/geth
...
$ ls -al
-rwxr-xr-x 1 root root 16315679 Nov 24 16:39 geth-windows-4.0-386.exe
-rwxr-xr-x 1 root root 19452036 Nov 24 16:38 geth-windows-4.0-amd64.exe
Some trivial arguments may be passed to the dependencies' configure script via
`--depsargs`.
$ xgo --deps=https://gmplib.org/download/gmp/gmp-6.1.0.tar.bz2 \
--targets=ios/* --depsargs=--disable-assembly \
github.com/ethereum/go-ethereum/cmd/geth
...
$ ls -al
-rwxr-xr-x 1 root root 14804160 Nov 24 16:32 geth-ios-5.0-arm
Note, that since xgo needs to cross compile the dependencies for each platform
and architecture separately, build time can increase significantly.

86
vendor/github.com/karalabe/xgo/testsuite.go generated vendored Normal file
View File

@ -0,0 +1,86 @@
// Go CGO cross compiler
// Copyright (c) 2016 Péter Szilágyi. All rights reserved.
//
// Released under the MIT license.
// This is a manual test suite to run the cross compiler against various known
// projects, codebases and repositories to ensure at least a baseline guarantee
// that things work as they supposed to.
//
// Run as: go run testsuite.go
// +build ignore
package main
import (
"log"
"os"
"os/exec"
"path/filepath"
)
// layers defines all the docker layers needed for the final xgo image. The last
// one will be used to run the test suite against.
var layers = []struct {
tag string
dir string
}{
{"karalabe/xgo-base", "base"},
{"karalabe/xgo-1.6.2", "go-1.6.2"},
{"karalabe/xgo-1.6.x", "go-1.6.x"},
{"karalabe/xgo-latest", "go-latest"},
//{"karalabe/xgo-latest-ios", "go-latest-ios"}, // Non-public layer (XCode licensing)
}
// tests defaines all the input test cases and associated arguments the cross
// compiler should be ran for and with which arguments.
var tests = []struct {
path string
args []string
}{
// Tiny test cases to smoke test cross compilations
{"github.com/karalabe/xgo/tests/embedded_c", nil},
{"github.com/karalabe/xgo/tests/embedded_cpp", nil},
// Baseline projects to ensure minimal requirements
//{"github.com/project-iris/iris", nil}, // Deps failed, disable
{"github.com/ethereum/go-ethereum/cmd/geth", []string{"--branch", "develop"}},
// Third party projects using xgo, smoke test that they don't break
{"github.com/rwcarlsen/cyan/cmd/cyan", nil},
{"github.com/cockroachdb/cockroach", []string{"--targets", "darwin-10.11/amd64"}},
}
func main() {
// Retrieve the current working directory to locate the dockerfiles
pwd, err := os.Getwd()
if err != nil {
log.Fatalf("Failed to retrieve local working directory: %v", err)
}
if _, err := os.Stat(filepath.Join(pwd, "docker", "base")); err != nil {
log.Fatalf("Failed to locate docker image: %v", err)
}
// Assemble the multi-layered xgo docker image
for _, layer := range layers {
cmd := exec.Command("docker", "build", "--tag", layer.tag, filepath.Join(pwd, "docker", layer.dir))
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
if err := cmd.Run(); err != nil {
log.Fatalf("Failed to build xgo layer: %v", err)
}
}
// Iterate over each of the test cases and run them
for i, test := range tests {
cmd := exec.Command("docker", append([]string{"run", "--entrypoint", "xgo", layers[len(layers)-1].tag, "-v"}, append(test.args, test.path)...)...)
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
if err := cmd.Run(); err != nil {
log.Fatalf("Test #%d: cross compilation failed: %v", i, err)
}
}
}

373
vendor/github.com/karalabe/xgo/xgo.go generated vendored Normal file
View File

@ -0,0 +1,373 @@
// Go CGO cross compiler
// Copyright (c) 2014 Péter Szilágyi. All rights reserved.
//
// Released under the MIT license.
// Wrapper around the GCO cross compiler docker container.
package main
import (
"bytes"
"flag"
"fmt"
"go/build"
"io"
"log"
"net/http"
"os"
"os/exec"
"os/user"
"path/filepath"
"strconv"
"strings"
)
// Path where to cache external dependencies
var depsCache string
func init() {
// Initialize the external dependency cache path to a few possible locations
if home := os.Getenv("HOME"); home != "" {
depsCache = filepath.Join(home, ".xgo-cache")
return
}
if user, err := user.Current(); user != nil && err == nil && user.HomeDir != "" {
depsCache = filepath.Join(user.HomeDir, ".xgo-cache")
return
}
depsCache = filepath.Join(os.TempDir(), "xgo-cache")
}
// Cross compilation docker containers
var dockerBase = "karalabe/xgo-base"
var dockerDist = "karalabe/xgo-"
// Command line arguments to fine tune the compilation
var (
goVersion = flag.String("go", "latest", "Go release to use for cross compilation")
srcPackage = flag.String("pkg", "", "Sub-package to build if not root import")
srcRemote = flag.String("remote", "", "Version control remote repository to build")
srcBranch = flag.String("branch", "", "Version control branch to build")
outPrefix = flag.String("out", "", "Prefix to use for output naming (empty = package name)")
outFolder = flag.String("dest", "", "Destination folder to put binaries in (empty = current)")
crossDeps = flag.String("deps", "", "CGO dependencies (configure/make based archives)")
crossArgs = flag.String("depsargs", "", "CGO dependency configure arguments")
targets = flag.String("targets", "*/*", "Comma separated targets to build for")
dockerImage = flag.String("image", "", "Use custom docker image instead of official distribution")
)
// ConfigFlags is a simple set of flags to define the environment and dependencies.
type ConfigFlags struct {
Repository string // Root import path to build
Package string // Sub-package to build if not root import
Prefix string // Prefix to use for output naming
Remote string // Version control remote repository to build
Branch string // Version control branch to build
Dependencies string // CGO dependencies (configure/make based archives)
Arguments string // CGO dependency configure arguments
Targets []string // Targets to build for
}
// Command line arguments to pass to go build
var (
buildVerbose = flag.Bool("v", false, "Print the names of packages as they are compiled")
buildSteps = flag.Bool("x", false, "Print the command as executing the builds")
buildRace = flag.Bool("race", false, "Enable data race detection (supported only on amd64)")
buildTags = flag.String("tags", "", "List of build tags to consider satisfied during the build")
buildLdFlags = flag.String("ldflags", "", "Arguments to pass on each go tool link invocation")
buildMode = flag.String("buildmode", "default", "Indicates which kind of object file to build")
)
// BuildFlags is a simple collection of flags to fine tune a build.
type BuildFlags struct {
Verbose bool // Print the names of packages as they are compiled
Steps bool // Print the command as executing the builds
Race bool // Enable data race detection (supported only on amd64)
Tags string // List of build tags to consider satisfied during the build
LdFlags string // Arguments to pass on each go tool link invocation
Mode string // Indicates which kind of object file to build
}
func main() {
// Retrieve the CLI flags and the execution environment
flag.Parse()
xgoInXgo := os.Getenv("XGO_IN_XGO") == "1"
if xgoInXgo {
depsCache = "/deps-cache"
}
// Only use docker images if we're not already inside out own image
image := ""
if !xgoInXgo {
// Ensure docker is available
if err := checkDocker(); err != nil {
log.Fatalf("Failed to check docker installation: %v.", err)
}
// Validate the command line arguments
if len(flag.Args()) != 1 {
log.Fatalf("Usage: %s [options] <go import path>", os.Args[0])
}
// Select the image to use, either official or custom
image = dockerDist + *goVersion
if *dockerImage != "" {
image = *dockerImage
}
// Check that all required images are available
found, err := checkDockerImage(image)
switch {
case err != nil:
log.Fatalf("Failed to check docker image availability: %v.", err)
case !found:
fmt.Println("not found!")
if err := pullDockerImage(image); err != nil {
log.Fatalf("Failed to pull docker image from the registry: %v.", err)
}
default:
fmt.Println("found.")
}
}
// Cache all external dependencies to prevent always hitting the internet
if *crossDeps != "" {
if err := os.MkdirAll(depsCache, 0751); err != nil {
log.Fatalf("Failed to create dependency cache: %v.", err)
}
// Download all missing dependencies
for _, dep := range strings.Split(*crossDeps, " ") {
if url := strings.TrimSpace(dep); len(url) > 0 {
path := filepath.Join(depsCache, filepath.Base(url))
if _, err := os.Stat(path); err != nil {
fmt.Printf("Downloading new dependency: %s...\n", url)
out, err := os.Create(path)
if err != nil {
log.Fatalf("Failed to create dependency file: %v.", err)
}
res, err := http.Get(url)
if err != nil {
log.Fatalf("Failed to retrieve dependency: %v.", err)
}
defer res.Body.Close()
if _, err := io.Copy(out, res.Body); err != nil {
log.Fatalf("Failed to download dependency: %v", err)
}
out.Close()
fmt.Printf("New dependency cached: %s.\n", path)
} else {
fmt.Printf("Dependency already cached: %s.\n", path)
}
}
}
}
// Assemble the cross compilation environment and build options
config := &ConfigFlags{
Repository: flag.Args()[0],
Package: *srcPackage,
Remote: *srcRemote,
Branch: *srcBranch,
Prefix: *outPrefix,
Dependencies: *crossDeps,
Arguments: *crossArgs,
Targets: strings.Split(*targets, ","),
}
flags := &BuildFlags{
Verbose: *buildVerbose,
Steps: *buildSteps,
Race: *buildRace,
Tags: *buildTags,
LdFlags: *buildLdFlags,
Mode: *buildMode,
}
folder, err := os.Getwd()
if err != nil {
log.Fatalf("Failed to retrieve the working directory: %v.", err)
}
if *outFolder != "" {
folder, err = filepath.Abs(*outFolder)
if err != nil {
log.Fatalf("Failed to resolve destination path (%s): %v.", *outFolder, err)
}
}
// Execute the cross compilation, either in a container or the current system
if !xgoInXgo {
err = compile(image, config, flags, folder)
} else {
err = compileContained(config, flags, folder)
}
if err != nil {
log.Fatalf("Failed to cross compile package: %v.", err)
}
}
// Checks whether a docker installation can be found and is functional.
func checkDocker() error {
fmt.Println("Checking docker installation...")
if err := run(exec.Command("docker", "version")); err != nil {
return err
}
fmt.Println()
return nil
}
// Checks whether a required docker image is available locally.
func checkDockerImage(image string) (bool, error) {
fmt.Printf("Checking for required docker image %s... ", image)
out, err := exec.Command("docker", "images", "--no-trunc").Output()
if err != nil {
return false, err
}
return bytes.Contains(out, []byte(image)), nil
}
// Pulls an image from the docker registry.
func pullDockerImage(image string) error {
fmt.Printf("Pulling %s from docker registry...\n", image)
return run(exec.Command("docker", "pull", image))
}
// compile cross builds a requested package according to the given build specs
// using a specific docker cross compilation image.
func compile(image string, config *ConfigFlags, flags *BuildFlags, folder string) error {
// If a local build was requested, find the import path and mount all GOPATH sources
locals, mounts, paths := []string{}, []string{}, []string{}
if strings.HasPrefix(config.Repository, string(filepath.Separator)) || strings.HasPrefix(config.Repository, ".") {
// Resolve the repository import path from the file path
config.Repository = resolveImportPath(config.Repository)
// Iterate over all the local libs and export the mount points
if os.Getenv("GOPATH") == "" {
log.Fatalf("No $GOPATH is set or forwarded to xgo")
}
for _, gopath := range strings.Split(os.Getenv("GOPATH"), string(os.PathListSeparator)) {
// Since docker sandboxes volumes, resolve any symlinks manually
sources := filepath.Join(gopath, "src")
filepath.Walk(sources, func(path string, info os.FileInfo, err error) error {
// Skip any folders that errored out
if err != nil {
log.Printf("Failed to access GOPATH element %s: %v", path, err)
return nil
}
// Skip anything that's not a symlink
if info.Mode()&os.ModeSymlink == 0 {
return nil
}
// Resolve the symlink and skip if it's not a folder
target, err := filepath.EvalSymlinks(path)
if err != nil {
return nil
}
if info, err = os.Stat(target); err != nil || !info.IsDir() {
return nil
}
// Skip if the symlink points within GOPATH
if filepath.HasPrefix(target, sources) {
return nil
}
// Folder needs explicit mounting due to docker symlink security
locals = append(locals, target)
mounts = append(mounts, filepath.Join("/ext-go", strconv.Itoa(len(locals)), "src", strings.TrimPrefix(path, sources)))
paths = append(paths, filepath.Join("/ext-go", strconv.Itoa(len(locals))))
return nil
})
// Export the main mount point for this GOPATH entry
locals = append(locals, sources)
mounts = append(mounts, filepath.Join("/ext-go", strconv.Itoa(len(locals)), "src"))
paths = append(paths, filepath.Join("/ext-go", strconv.Itoa(len(locals))))
}
}
// Assemble and run the cross compilation command
fmt.Printf("Cross compiling %s...\n", config.Repository)
args := []string{
"run", "--rm",
"-v", folder + ":/build",
"-v", depsCache + ":/deps-cache:ro",
"-e", "REPO_REMOTE=" + config.Remote,
"-e", "REPO_BRANCH=" + config.Branch,
"-e", "PACK=" + config.Package,
"-e", "DEPS=" + config.Dependencies,
"-e", "ARGS=" + config.Arguments,
"-e", "OUT=" + config.Prefix,
"-e", fmt.Sprintf("FLAG_V=%v", flags.Verbose),
"-e", fmt.Sprintf("FLAG_X=%v", flags.Steps),
"-e", fmt.Sprintf("FLAG_RACE=%v", flags.Race),
"-e", fmt.Sprintf("FLAG_TAGS=%s", flags.Tags),
"-e", fmt.Sprintf("FLAG_LDFLAGS=%s", flags.LdFlags),
"-e", fmt.Sprintf("FLAG_BUILDMODE=%s", flags.Mode),
"-e", "TARGETS=" + strings.Replace(strings.Join(config.Targets, " "), "*", ".", -1),
}
for i := 0; i < len(locals); i++ {
args = append(args, []string{"-v", fmt.Sprintf("%s:%s:ro", locals[i], mounts[i])}...)
}
args = append(args, []string{"-e", "EXT_GOPATH=" + strings.Join(paths, ":")}...)
args = append(args, []string{image, config.Repository}...)
return run(exec.Command("docker", args...))
}
// compileContained cross builds a requested package according to the given build
// specs using the current system opposed to running in a container. This is meant
// to be used for cross compilation already from within an xgo image, allowing the
// inheritance and bundling of the root xgo images.
func compileContained(config *ConfigFlags, flags *BuildFlags, folder string) error {
// If a local build was requested, resolve the import path
local := strings.HasPrefix(config.Repository, string(filepath.Separator)) || strings.HasPrefix(config.Repository, ".")
if local {
config.Repository = resolveImportPath(config.Repository)
}
// Fine tune the original environment variables with those required by the build script
env := []string{
"REPO_REMOTE=" + config.Remote,
"REPO_BRANCH=" + config.Branch,
"PACK=" + config.Package,
"DEPS=" + config.Dependencies,
"ARGS=" + config.Arguments,
"OUT=" + config.Prefix,
fmt.Sprintf("FLAG_V=%v", flags.Verbose),
fmt.Sprintf("FLAG_X=%v", flags.Steps),
fmt.Sprintf("FLAG_RACE=%v", flags.Race),
fmt.Sprintf("FLAG_TAGS=%s", flags.Tags),
fmt.Sprintf("FLAG_LDFLAGS=%s", flags.LdFlags),
fmt.Sprintf("FLAG_BUILDMODE=%s", flags.Mode),
"TARGETS=" + strings.Replace(strings.Join(config.Targets, " "), "*", ".", -1),
}
if local {
env = append(env, "EXT_GOPATH=/non-existent-path-to-signal-local-build")
}
// Assemble and run the local cross compilation command
fmt.Printf("Cross compiling %s...\n", config.Repository)
cmd := exec.Command("/build.sh", config.Repository)
cmd.Env = append(os.Environ(), env...)
return run(cmd)
}
// resolveImportPath converts a package given by a relative path to a Go import
// path using the local GOPATH environment.
func resolveImportPath(path string) string {
abs, err := filepath.Abs(path)
if err != nil {
log.Fatalf("Failed to locate requested package: %v.", err)
}
stat, err := os.Stat(abs)
if err != nil || !stat.IsDir() {
log.Fatalf("Requested path invalid.")
}
pack, err := build.ImportDir(abs, build.FindOnly)
if err != nil {
log.Fatalf("Failed to resolve import path: %v.", err)
}
return pack.ImportPath
}
// Executes a command synchronously, redirecting its output to stdout.
func run(cmd *exec.Cmd) error {
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
return cmd.Run()
}

19
vendor/golang.org/x/lint/.travis.yml generated vendored Normal file
View File

@ -0,0 +1,19 @@
sudo: false
language: go
go:
- 1.10.x
- 1.11.x
- master
go_import_path: golang.org/x/lint
install:
- go get -t -v ./...
script:
- go test -v -race ./...
matrix:
allow_failures:
- go: master
fast_finish: true

15
vendor/golang.org/x/lint/CONTRIBUTING.md generated vendored Normal file
View File

@ -0,0 +1,15 @@
# Contributing to Golint
## Before filing an issue:
### Are you having trouble building golint?
Check you have the latest version of its dependencies. Run
```
go get -u golang.org/x/lint/golint
```
If you still have problems, consider searching for existing issues before filing a new issue.
## Before sending a pull request:
Have you understood the purpose of golint? Make sure to carefully read `README`.

27
vendor/golang.org/x/lint/LICENSE generated vendored Normal file
View File

@ -0,0 +1,27 @@
Copyright (c) 2013 The Go Authors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

86
vendor/golang.org/x/lint/README.md generated vendored Normal file
View File

@ -0,0 +1,86 @@
Golint is a linter for Go source code.
[![Build Status](https://travis-ci.org/golang/lint.svg?branch=master)](https://travis-ci.org/golang/lint)
## Installation
Golint requires a
[supported release of Go](https://golang.org/doc/devel/release.html#policy).
go get -u golang.org/x/lint/golint
## Usage
Invoke `golint` with one or more filenames, directories, or packages named
by its import path. Golint uses the same
[import path syntax](https://golang.org/cmd/go/#hdr-Import_path_syntax) as
the `go` command and therefore
also supports relative import paths like `./...`. Additionally the `...`
wildcard can be used as suffix on relative and absolute file paths to recurse
into them.
The output of this tool is a list of suggestions in Vim quickfix format,
which is accepted by lots of different editors.
## Purpose
Golint differs from gofmt. Gofmt reformats Go source code, whereas
golint prints out style mistakes.
Golint differs from govet. Govet is concerned with correctness, whereas
golint is concerned with coding style. Golint is in use at Google, and it
seeks to match the accepted style of the open source Go project.
The suggestions made by golint are exactly that: suggestions.
Golint is not perfect, and has both false positives and false negatives.
Do not treat its output as a gold standard. We will not be adding pragmas
or other knobs to suppress specific warnings, so do not expect or require
code to be completely "lint-free".
In short, this tool is not, and will never be, trustworthy enough for its
suggestions to be enforced automatically, for example as part of a build process.
Golint makes suggestions for many of the mechanically checkable items listed in
[Effective Go](https://golang.org/doc/effective_go.html) and the
[CodeReviewComments wiki page](https://golang.org/wiki/CodeReviewComments).
## Scope
Golint is meant to carry out the stylistic conventions put forth in
[Effective Go](https://golang.org/doc/effective_go.html) and
[CodeReviewComments](https://golang.org/wiki/CodeReviewComments).
Changes that are not aligned with those documents will not be considered.
## Contributions
Contributions to this project are welcome provided they are [in scope](#scope),
though please send mail before starting work on anything major.
Contributors retain their copyright, so we need you to fill out
[a short form](https://developers.google.com/open-source/cla/individual)
before we can accept your contribution.
## Vim
Add this to your ~/.vimrc:
set rtp+=$GOPATH/src/golang.org/x/lint/misc/vim
If you have multiple entries in your GOPATH, replace `$GOPATH` with the right value.
Running `:Lint` will run golint on the current file and populate the quickfix list.
Optionally, add this to your `~/.vimrc` to automatically run `golint` on `:w`
autocmd BufWritePost,FileWritePost *.go execute 'Lint' | cwindow
## Emacs
Add this to your `.emacs` file:
(add-to-list 'load-path (concat (getenv "GOPATH") "/src/github.com/golang/lint/misc/emacs"))
(require 'golint)
If you have multiple entries in your GOPATH, replace `$GOPATH` with the right value.
Running M-x golint will run golint on the current file.
For more usage, see [Compilation-Mode](http://www.gnu.org/software/emacs/manual/html_node/emacs/Compilation-Mode.html).

159
vendor/golang.org/x/lint/golint/golint.go generated vendored Normal file
View File

@ -0,0 +1,159 @@
// Copyright (c) 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
// golint lints the Go source files named on its command line.
package main
import (
"flag"
"fmt"
"go/build"
"io/ioutil"
"os"
"path/filepath"
"strings"
"golang.org/x/lint"
)
var (
minConfidence = flag.Float64("min_confidence", 0.8, "minimum confidence of a problem to print it")
setExitStatus = flag.Bool("set_exit_status", false, "set exit status to 1 if any issues are found")
suggestions int
)
func usage() {
fmt.Fprintf(os.Stderr, "Usage of %s:\n", os.Args[0])
fmt.Fprintf(os.Stderr, "\tgolint [flags] # runs on package in current directory\n")
fmt.Fprintf(os.Stderr, "\tgolint [flags] [packages]\n")
fmt.Fprintf(os.Stderr, "\tgolint [flags] [directories] # where a '/...' suffix includes all sub-directories\n")
fmt.Fprintf(os.Stderr, "\tgolint [flags] [files] # all must belong to a single package\n")
fmt.Fprintf(os.Stderr, "Flags:\n")
flag.PrintDefaults()
}
func main() {
flag.Usage = usage
flag.Parse()
if flag.NArg() == 0 {
lintDir(".")
} else {
// dirsRun, filesRun, and pkgsRun indicate whether golint is applied to
// directory, file or package targets. The distinction affects which
// checks are run. It is no valid to mix target types.
var dirsRun, filesRun, pkgsRun int
var args []string
for _, arg := range flag.Args() {
if strings.HasSuffix(arg, "/...") && isDir(arg[:len(arg)-len("/...")]) {
dirsRun = 1
for _, dirname := range allPackagesInFS(arg) {
args = append(args, dirname)
}
} else if isDir(arg) {
dirsRun = 1
args = append(args, arg)
} else if exists(arg) {
filesRun = 1
args = append(args, arg)
} else {
pkgsRun = 1
args = append(args, arg)
}
}
if dirsRun+filesRun+pkgsRun != 1 {
usage()
os.Exit(2)
}
switch {
case dirsRun == 1:
for _, dir := range args {
lintDir(dir)
}
case filesRun == 1:
lintFiles(args...)
case pkgsRun == 1:
for _, pkg := range importPaths(args) {
lintPackage(pkg)
}
}
}
if *setExitStatus && suggestions > 0 {
fmt.Fprintf(os.Stderr, "Found %d lint suggestions; failing.\n", suggestions)
os.Exit(1)
}
}
func isDir(filename string) bool {
fi, err := os.Stat(filename)
return err == nil && fi.IsDir()
}
func exists(filename string) bool {
_, err := os.Stat(filename)
return err == nil
}
func lintFiles(filenames ...string) {
files := make(map[string][]byte)
for _, filename := range filenames {
src, err := ioutil.ReadFile(filename)
if err != nil {
fmt.Fprintln(os.Stderr, err)
continue
}
files[filename] = src
}
l := new(lint.Linter)
ps, err := l.LintFiles(files)
if err != nil {
fmt.Fprintf(os.Stderr, "%v\n", err)
return
}
for _, p := range ps {
if p.Confidence >= *minConfidence {
fmt.Printf("%v: %s\n", p.Position, p.Text)
suggestions++
}
}
}
func lintDir(dirname string) {
pkg, err := build.ImportDir(dirname, 0)
lintImportedPackage(pkg, err)
}
func lintPackage(pkgname string) {
pkg, err := build.Import(pkgname, ".", 0)
lintImportedPackage(pkg, err)
}
func lintImportedPackage(pkg *build.Package, err error) {
if err != nil {
if _, nogo := err.(*build.NoGoError); nogo {
// Don't complain if the failure is due to no Go source files.
return
}
fmt.Fprintln(os.Stderr, err)
return
}
var files []string
files = append(files, pkg.GoFiles...)
files = append(files, pkg.CgoFiles...)
files = append(files, pkg.TestGoFiles...)
if pkg.Dir != "." {
for i, f := range files {
files[i] = filepath.Join(pkg.Dir, f)
}
}
// TODO(dsymonds): Do foo_test too (pkg.XTestGoFiles)
lintFiles(files...)
}

309
vendor/golang.org/x/lint/golint/import.go generated vendored Normal file
View File

@ -0,0 +1,309 @@
package main
/*
This file holds a direct copy of the import path matching code of
https://github.com/golang/go/blob/master/src/cmd/go/main.go. It can be
replaced when https://golang.org/issue/8768 is resolved.
It has been updated to follow upstream changes in a few ways.
*/
import (
"fmt"
"go/build"
"log"
"os"
"path"
"path/filepath"
"regexp"
"runtime"
"strings"
)
var (
buildContext = build.Default
goroot = filepath.Clean(runtime.GOROOT())
gorootSrc = filepath.Join(goroot, "src")
)
// importPathsNoDotExpansion returns the import paths to use for the given
// command line, but it does no ... expansion.
func importPathsNoDotExpansion(args []string) []string {
if len(args) == 0 {
return []string{"."}
}
var out []string
for _, a := range args {
// Arguments are supposed to be import paths, but
// as a courtesy to Windows developers, rewrite \ to /
// in command-line arguments. Handles .\... and so on.
if filepath.Separator == '\\' {
a = strings.Replace(a, `\`, `/`, -1)
}
// Put argument in canonical form, but preserve leading ./.
if strings.HasPrefix(a, "./") {
a = "./" + path.Clean(a)
if a == "./." {
a = "."
}
} else {
a = path.Clean(a)
}
if a == "all" || a == "std" {
out = append(out, allPackages(a)...)
continue
}
out = append(out, a)
}
return out
}
// importPaths returns the import paths to use for the given command line.
func importPaths(args []string) []string {
args = importPathsNoDotExpansion(args)
var out []string
for _, a := range args {
if strings.Contains(a, "...") {
if build.IsLocalImport(a) {
out = append(out, allPackagesInFS(a)...)
} else {
out = append(out, allPackages(a)...)
}
continue
}
out = append(out, a)
}
return out
}
// matchPattern(pattern)(name) reports whether
// name matches pattern. Pattern is a limited glob
// pattern in which '...' means 'any string' and there
// is no other special syntax.
func matchPattern(pattern string) func(name string) bool {
re := regexp.QuoteMeta(pattern)
re = strings.Replace(re, `\.\.\.`, `.*`, -1)
// Special case: foo/... matches foo too.
if strings.HasSuffix(re, `/.*`) {
re = re[:len(re)-len(`/.*`)] + `(/.*)?`
}
reg := regexp.MustCompile(`^` + re + `$`)
return func(name string) bool {
return reg.MatchString(name)
}
}
// hasPathPrefix reports whether the path s begins with the
// elements in prefix.
func hasPathPrefix(s, prefix string) bool {
switch {
default:
return false
case len(s) == len(prefix):
return s == prefix
case len(s) > len(prefix):
if prefix != "" && prefix[len(prefix)-1] == '/' {
return strings.HasPrefix(s, prefix)
}
return s[len(prefix)] == '/' && s[:len(prefix)] == prefix
}
}
// treeCanMatchPattern(pattern)(name) reports whether
// name or children of name can possibly match pattern.
// Pattern is the same limited glob accepted by matchPattern.
func treeCanMatchPattern(pattern string) func(name string) bool {
wildCard := false
if i := strings.Index(pattern, "..."); i >= 0 {
wildCard = true
pattern = pattern[:i]
}
return func(name string) bool {
return len(name) <= len(pattern) && hasPathPrefix(pattern, name) ||
wildCard && strings.HasPrefix(name, pattern)
}
}
// allPackages returns all the packages that can be found
// under the $GOPATH directories and $GOROOT matching pattern.
// The pattern is either "all" (all packages), "std" (standard packages)
// or a path including "...".
func allPackages(pattern string) []string {
pkgs := matchPackages(pattern)
if len(pkgs) == 0 {
fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern)
}
return pkgs
}
func matchPackages(pattern string) []string {
match := func(string) bool { return true }
treeCanMatch := func(string) bool { return true }
if pattern != "all" && pattern != "std" {
match = matchPattern(pattern)
treeCanMatch = treeCanMatchPattern(pattern)
}
have := map[string]bool{
"builtin": true, // ignore pseudo-package that exists only for documentation
}
if !buildContext.CgoEnabled {
have["runtime/cgo"] = true // ignore during walk
}
var pkgs []string
// Commands
cmd := filepath.Join(goroot, "src/cmd") + string(filepath.Separator)
filepath.Walk(cmd, func(path string, fi os.FileInfo, err error) error {
if err != nil || !fi.IsDir() || path == cmd {
return nil
}
name := path[len(cmd):]
if !treeCanMatch(name) {
return filepath.SkipDir
}
// Commands are all in cmd/, not in subdirectories.
if strings.Contains(name, string(filepath.Separator)) {
return filepath.SkipDir
}
// We use, e.g., cmd/gofmt as the pseudo import path for gofmt.
name = "cmd/" + name
if have[name] {
return nil
}
have[name] = true
if !match(name) {
return nil
}
_, err = buildContext.ImportDir(path, 0)
if err != nil {
if _, noGo := err.(*build.NoGoError); !noGo {
log.Print(err)
}
return nil
}
pkgs = append(pkgs, name)
return nil
})
for _, src := range buildContext.SrcDirs() {
if (pattern == "std" || pattern == "cmd") && src != gorootSrc {
continue
}
src = filepath.Clean(src) + string(filepath.Separator)
root := src
if pattern == "cmd" {
root += "cmd" + string(filepath.Separator)
}
filepath.Walk(root, func(path string, fi os.FileInfo, err error) error {
if err != nil || !fi.IsDir() || path == src {
return nil
}
// Avoid .foo, _foo, and testdata directory trees.
_, elem := filepath.Split(path)
if strings.HasPrefix(elem, ".") || strings.HasPrefix(elem, "_") || elem == "testdata" {
return filepath.SkipDir
}
name := filepath.ToSlash(path[len(src):])
if pattern == "std" && (strings.Contains(name, ".") || name == "cmd") {
// The name "std" is only the standard library.
// If the name is cmd, it's the root of the command tree.
return filepath.SkipDir
}
if !treeCanMatch(name) {
return filepath.SkipDir
}
if have[name] {
return nil
}
have[name] = true
if !match(name) {
return nil
}
_, err = buildContext.ImportDir(path, 0)
if err != nil {
if _, noGo := err.(*build.NoGoError); noGo {
return nil
}
}
pkgs = append(pkgs, name)
return nil
})
}
return pkgs
}
// allPackagesInFS is like allPackages but is passed a pattern
// beginning ./ or ../, meaning it should scan the tree rooted
// at the given directory. There are ... in the pattern too.
func allPackagesInFS(pattern string) []string {
pkgs := matchPackagesInFS(pattern)
if len(pkgs) == 0 {
fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern)
}
return pkgs
}
func matchPackagesInFS(pattern string) []string {
// Find directory to begin the scan.
// Could be smarter but this one optimization
// is enough for now, since ... is usually at the
// end of a path.
i := strings.Index(pattern, "...")
dir, _ := path.Split(pattern[:i])
// pattern begins with ./ or ../.
// path.Clean will discard the ./ but not the ../.
// We need to preserve the ./ for pattern matching
// and in the returned import paths.
prefix := ""
if strings.HasPrefix(pattern, "./") {
prefix = "./"
}
match := matchPattern(pattern)
var pkgs []string
filepath.Walk(dir, func(path string, fi os.FileInfo, err error) error {
if err != nil || !fi.IsDir() {
return nil
}
if path == dir {
// filepath.Walk starts at dir and recurses. For the recursive case,
// the path is the result of filepath.Join, which calls filepath.Clean.
// The initial case is not Cleaned, though, so we do this explicitly.
//
// This converts a path like "./io/" to "io". Without this step, running
// "cd $GOROOT/src/pkg; go list ./io/..." would incorrectly skip the io
// package, because prepending the prefix "./" to the unclean path would
// result in "././io", and match("././io") returns false.
path = filepath.Clean(path)
}
// Avoid .foo, _foo, and testdata directory trees, but do not avoid "." or "..".
_, elem := filepath.Split(path)
dot := strings.HasPrefix(elem, ".") && elem != "." && elem != ".."
if dot || strings.HasPrefix(elem, "_") || elem == "testdata" {
return filepath.SkipDir
}
name := prefix + filepath.ToSlash(path)
if !match(name) {
return nil
}
if _, err = build.ImportDir(path, 0); err != nil {
if _, noGo := err.(*build.NoGoError); !noGo {
log.Print(err)
}
return nil
}
pkgs = append(pkgs, name)
return nil
})
return pkgs
}

13
vendor/golang.org/x/lint/golint/importcomment.go generated vendored Normal file
View File

@ -0,0 +1,13 @@
// Copyright (c) 2018 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
// +build go1.12
// Require use of the correct import path only for Go 1.12+ users, so
// any breakages coincide with people updating their CI configs or
// whatnot.
package main // import "golang.org/x/lint/golint"

1671
vendor/golang.org/x/lint/lint.go generated vendored Normal file

File diff suppressed because it is too large Load Diff

3
vendor/golang.org/x/tools/AUTHORS generated vendored Normal file
View File

@ -0,0 +1,3 @@
# This source code refers to The Go Authors for copyright purposes.
# The master list of authors is in the main Go distribution,
# visible at http://tip.golang.org/AUTHORS.

3
vendor/golang.org/x/tools/CONTRIBUTORS generated vendored Normal file
View File

@ -0,0 +1,3 @@
# This source code was written by the Go contributors.
# The master list of contributors is in the main Go distribution,
# visible at http://tip.golang.org/CONTRIBUTORS.

27
vendor/golang.org/x/tools/LICENSE generated vendored Normal file
View File

@ -0,0 +1,27 @@
Copyright (c) 2009 The Go Authors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

22
vendor/golang.org/x/tools/PATENTS generated vendored Normal file
View File

@ -0,0 +1,22 @@
Additional IP Rights Grant (Patents)
"This implementation" means the copyrightable works distributed by
Google as part of the Go project.
Google hereby grants to You a perpetual, worldwide, non-exclusive,
no-charge, royalty-free, irrevocable (except as stated in this section)
patent license to make, have made, use, offer to sell, sell, import,
transfer and otherwise run, modify and propagate the contents of this
implementation of Go, where such license applies only to those patent
claims, both currently owned or controlled by Google and acquired in
the future, licensable by Google that are necessarily infringed by this
implementation of Go. This grant does not include claims that would be
infringed only as a consequence of further modification of this
implementation. If you or your agent or exclusive licensee institute or
order or agree to the institution of patent litigation against any
entity (including a cross-claim or counterclaim in a lawsuit) alleging
that this implementation of Go or any code incorporated within this
implementation of Go constitutes direct or contributory patent
infringement, or inducement of patent infringement, then any patent
rights granted to you under this License for this implementation of Go
shall terminate as of the date such litigation is filed.

627
vendor/golang.org/x/tools/go/ast/astutil/enclosing.go generated vendored Normal file
View File

@ -0,0 +1,627 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package astutil
// This file defines utilities for working with source positions.
import (
"fmt"
"go/ast"
"go/token"
"sort"
)
// PathEnclosingInterval returns the node that encloses the source
// interval [start, end), and all its ancestors up to the AST root.
//
// The definition of "enclosing" used by this function considers
// additional whitespace abutting a node to be enclosed by it.
// In this example:
//
// z := x + y // add them
// <-A->
// <----B----->
//
// the ast.BinaryExpr(+) node is considered to enclose interval B
// even though its [Pos()..End()) is actually only interval A.
// This behaviour makes user interfaces more tolerant of imperfect
// input.
//
// This function treats tokens as nodes, though they are not included
// in the result. e.g. PathEnclosingInterval("+") returns the
// enclosing ast.BinaryExpr("x + y").
//
// If start==end, the 1-char interval following start is used instead.
//
// The 'exact' result is true if the interval contains only path[0]
// and perhaps some adjacent whitespace. It is false if the interval
// overlaps multiple children of path[0], or if it contains only
// interior whitespace of path[0].
// In this example:
//
// z := x + y // add them
// <--C--> <---E-->
// ^
// D
//
// intervals C, D and E are inexact. C is contained by the
// z-assignment statement, because it spans three of its children (:=,
// x, +). So too is the 1-char interval D, because it contains only
// interior whitespace of the assignment. E is considered interior
// whitespace of the BlockStmt containing the assignment.
//
// Precondition: [start, end) both lie within the same file as root.
// TODO(adonovan): return (nil, false) in this case and remove precond.
// Requires FileSet; see loader.tokenFileContainsPos.
//
// Postcondition: path is never nil; it always contains at least 'root'.
//
func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Node, exact bool) {
// fmt.Printf("EnclosingInterval %d %d\n", start, end) // debugging
// Precondition: node.[Pos..End) and adjoining whitespace contain [start, end).
var visit func(node ast.Node) bool
visit = func(node ast.Node) bool {
path = append(path, node)
nodePos := node.Pos()
nodeEnd := node.End()
// fmt.Printf("visit(%T, %d, %d)\n", node, nodePos, nodeEnd) // debugging
// Intersect [start, end) with interval of node.
if start < nodePos {
start = nodePos
}
if end > nodeEnd {
end = nodeEnd
}
// Find sole child that contains [start, end).
children := childrenOf(node)
l := len(children)
for i, child := range children {
// [childPos, childEnd) is unaugmented interval of child.
childPos := child.Pos()
childEnd := child.End()
// [augPos, augEnd) is whitespace-augmented interval of child.
augPos := childPos
augEnd := childEnd
if i > 0 {
augPos = children[i-1].End() // start of preceding whitespace
}
if i < l-1 {
nextChildPos := children[i+1].Pos()
// Does [start, end) lie between child and next child?
if start >= augEnd && end <= nextChildPos {
return false // inexact match
}
augEnd = nextChildPos // end of following whitespace
}
// fmt.Printf("\tchild %d: [%d..%d)\tcontains interval [%d..%d)?\n",
// i, augPos, augEnd, start, end) // debugging
// Does augmented child strictly contain [start, end)?
if augPos <= start && end <= augEnd {
_, isToken := child.(tokenNode)
return isToken || visit(child)
}
// Does [start, end) overlap multiple children?
// i.e. left-augmented child contains start
// but LR-augmented child does not contain end.
if start < childEnd && end > augEnd {
break
}
}
// No single child contained [start, end),
// so node is the result. Is it exact?
// (It's tempting to put this condition before the
// child loop, but it gives the wrong result in the
// case where a node (e.g. ExprStmt) and its sole
// child have equal intervals.)
if start == nodePos && end == nodeEnd {
return true // exact match
}
return false // inexact: overlaps multiple children
}
if start > end {
start, end = end, start
}
if start < root.End() && end > root.Pos() {
if start == end {
end = start + 1 // empty interval => interval of size 1
}
exact = visit(root)
// Reverse the path:
for i, l := 0, len(path); i < l/2; i++ {
path[i], path[l-1-i] = path[l-1-i], path[i]
}
} else {
// Selection lies within whitespace preceding the
// first (or following the last) declaration in the file.
// The result nonetheless always includes the ast.File.
path = append(path, root)
}
return
}
// tokenNode is a dummy implementation of ast.Node for a single token.
// They are used transiently by PathEnclosingInterval but never escape
// this package.
//
type tokenNode struct {
pos token.Pos
end token.Pos
}
func (n tokenNode) Pos() token.Pos {
return n.pos
}
func (n tokenNode) End() token.Pos {
return n.end
}
func tok(pos token.Pos, len int) ast.Node {
return tokenNode{pos, pos + token.Pos(len)}
}
// childrenOf returns the direct non-nil children of ast.Node n.
// It may include fake ast.Node implementations for bare tokens.
// it is not safe to call (e.g.) ast.Walk on such nodes.
//
func childrenOf(n ast.Node) []ast.Node {
var children []ast.Node
// First add nodes for all true subtrees.
ast.Inspect(n, func(node ast.Node) bool {
if node == n { // push n
return true // recur
}
if node != nil { // push child
children = append(children, node)
}
return false // no recursion
})
// Then add fake Nodes for bare tokens.
switch n := n.(type) {
case *ast.ArrayType:
children = append(children,
tok(n.Lbrack, len("[")),
tok(n.Elt.End(), len("]")))
case *ast.AssignStmt:
children = append(children,
tok(n.TokPos, len(n.Tok.String())))
case *ast.BasicLit:
children = append(children,
tok(n.ValuePos, len(n.Value)))
case *ast.BinaryExpr:
children = append(children, tok(n.OpPos, len(n.Op.String())))
case *ast.BlockStmt:
children = append(children,
tok(n.Lbrace, len("{")),
tok(n.Rbrace, len("}")))
case *ast.BranchStmt:
children = append(children,
tok(n.TokPos, len(n.Tok.String())))
case *ast.CallExpr:
children = append(children,
tok(n.Lparen, len("(")),
tok(n.Rparen, len(")")))
if n.Ellipsis != 0 {
children = append(children, tok(n.Ellipsis, len("...")))
}
case *ast.CaseClause:
if n.List == nil {
children = append(children,
tok(n.Case, len("default")))
} else {
children = append(children,
tok(n.Case, len("case")))
}
children = append(children, tok(n.Colon, len(":")))
case *ast.ChanType:
switch n.Dir {
case ast.RECV:
children = append(children, tok(n.Begin, len("<-chan")))
case ast.SEND:
children = append(children, tok(n.Begin, len("chan<-")))
case ast.RECV | ast.SEND:
children = append(children, tok(n.Begin, len("chan")))
}
case *ast.CommClause:
if n.Comm == nil {
children = append(children,
tok(n.Case, len("default")))
} else {
children = append(children,
tok(n.Case, len("case")))
}
children = append(children, tok(n.Colon, len(":")))
case *ast.Comment:
// nop
case *ast.CommentGroup:
// nop
case *ast.CompositeLit:
children = append(children,
tok(n.Lbrace, len("{")),
tok(n.Rbrace, len("{")))
case *ast.DeclStmt:
// nop
case *ast.DeferStmt:
children = append(children,
tok(n.Defer, len("defer")))
case *ast.Ellipsis:
children = append(children,
tok(n.Ellipsis, len("...")))
case *ast.EmptyStmt:
// nop
case *ast.ExprStmt:
// nop
case *ast.Field:
// TODO(adonovan): Field.{Doc,Comment,Tag}?
case *ast.FieldList:
children = append(children,
tok(n.Opening, len("(")),
tok(n.Closing, len(")")))
case *ast.File:
// TODO test: Doc
children = append(children,
tok(n.Package, len("package")))
case *ast.ForStmt:
children = append(children,
tok(n.For, len("for")))
case *ast.FuncDecl:
// TODO(adonovan): FuncDecl.Comment?
// Uniquely, FuncDecl breaks the invariant that
// preorder traversal yields tokens in lexical order:
// in fact, FuncDecl.Recv precedes FuncDecl.Type.Func.
//
// As a workaround, we inline the case for FuncType
// here and order things correctly.
//
children = nil // discard ast.Walk(FuncDecl) info subtrees
children = append(children, tok(n.Type.Func, len("func")))
if n.Recv != nil {
children = append(children, n.Recv)
}
children = append(children, n.Name)
if n.Type.Params != nil {
children = append(children, n.Type.Params)
}
if n.Type.Results != nil {
children = append(children, n.Type.Results)
}
if n.Body != nil {
children = append(children, n.Body)
}
case *ast.FuncLit:
// nop
case *ast.FuncType:
if n.Func != 0 {
children = append(children,
tok(n.Func, len("func")))
}
case *ast.GenDecl:
children = append(children,
tok(n.TokPos, len(n.Tok.String())))
if n.Lparen != 0 {
children = append(children,
tok(n.Lparen, len("(")),
tok(n.Rparen, len(")")))
}
case *ast.GoStmt:
children = append(children,
tok(n.Go, len("go")))
case *ast.Ident:
children = append(children,
tok(n.NamePos, len(n.Name)))
case *ast.IfStmt:
children = append(children,
tok(n.If, len("if")))
case *ast.ImportSpec:
// TODO(adonovan): ImportSpec.{Doc,EndPos}?
case *ast.IncDecStmt:
children = append(children,
tok(n.TokPos, len(n.Tok.String())))
case *ast.IndexExpr:
children = append(children,
tok(n.Lbrack, len("{")),
tok(n.Rbrack, len("}")))
case *ast.InterfaceType:
children = append(children,
tok(n.Interface, len("interface")))
case *ast.KeyValueExpr:
children = append(children,
tok(n.Colon, len(":")))
case *ast.LabeledStmt:
children = append(children,
tok(n.Colon, len(":")))
case *ast.MapType:
children = append(children,
tok(n.Map, len("map")))
case *ast.ParenExpr:
children = append(children,
tok(n.Lparen, len("(")),
tok(n.Rparen, len(")")))
case *ast.RangeStmt:
children = append(children,
tok(n.For, len("for")),
tok(n.TokPos, len(n.Tok.String())))
case *ast.ReturnStmt:
children = append(children,
tok(n.Return, len("return")))
case *ast.SelectStmt:
children = append(children,
tok(n.Select, len("select")))
case *ast.SelectorExpr:
// nop
case *ast.SendStmt:
children = append(children,
tok(n.Arrow, len("<-")))
case *ast.SliceExpr:
children = append(children,
tok(n.Lbrack, len("[")),
tok(n.Rbrack, len("]")))
case *ast.StarExpr:
children = append(children, tok(n.Star, len("*")))
case *ast.StructType:
children = append(children, tok(n.Struct, len("struct")))
case *ast.SwitchStmt:
children = append(children, tok(n.Switch, len("switch")))
case *ast.TypeAssertExpr:
children = append(children,
tok(n.Lparen-1, len(".")),
tok(n.Lparen, len("(")),
tok(n.Rparen, len(")")))
case *ast.TypeSpec:
// TODO(adonovan): TypeSpec.{Doc,Comment}?
case *ast.TypeSwitchStmt:
children = append(children, tok(n.Switch, len("switch")))
case *ast.UnaryExpr:
children = append(children, tok(n.OpPos, len(n.Op.String())))
case *ast.ValueSpec:
// TODO(adonovan): ValueSpec.{Doc,Comment}?
case *ast.BadDecl, *ast.BadExpr, *ast.BadStmt:
// nop
}
// TODO(adonovan): opt: merge the logic of ast.Inspect() into
// the switch above so we can make interleaved callbacks for
// both Nodes and Tokens in the right order and avoid the need
// to sort.
sort.Sort(byPos(children))
return children
}
type byPos []ast.Node
func (sl byPos) Len() int {
return len(sl)
}
func (sl byPos) Less(i, j int) bool {
return sl[i].Pos() < sl[j].Pos()
}
func (sl byPos) Swap(i, j int) {
sl[i], sl[j] = sl[j], sl[i]
}
// NodeDescription returns a description of the concrete type of n suitable
// for a user interface.
//
// TODO(adonovan): in some cases (e.g. Field, FieldList, Ident,
// StarExpr) we could be much more specific given the path to the AST
// root. Perhaps we should do that.
//
func NodeDescription(n ast.Node) string {
switch n := n.(type) {
case *ast.ArrayType:
return "array type"
case *ast.AssignStmt:
return "assignment"
case *ast.BadDecl:
return "bad declaration"
case *ast.BadExpr:
return "bad expression"
case *ast.BadStmt:
return "bad statement"
case *ast.BasicLit:
return "basic literal"
case *ast.BinaryExpr:
return fmt.Sprintf("binary %s operation", n.Op)
case *ast.BlockStmt:
return "block"
case *ast.BranchStmt:
switch n.Tok {
case token.BREAK:
return "break statement"
case token.CONTINUE:
return "continue statement"
case token.GOTO:
return "goto statement"
case token.FALLTHROUGH:
return "fall-through statement"
}
case *ast.CallExpr:
if len(n.Args) == 1 && !n.Ellipsis.IsValid() {
return "function call (or conversion)"
}
return "function call"
case *ast.CaseClause:
return "case clause"
case *ast.ChanType:
return "channel type"
case *ast.CommClause:
return "communication clause"
case *ast.Comment:
return "comment"
case *ast.CommentGroup:
return "comment group"
case *ast.CompositeLit:
return "composite literal"
case *ast.DeclStmt:
return NodeDescription(n.Decl) + " statement"
case *ast.DeferStmt:
return "defer statement"
case *ast.Ellipsis:
return "ellipsis"
case *ast.EmptyStmt:
return "empty statement"
case *ast.ExprStmt:
return "expression statement"
case *ast.Field:
// Can be any of these:
// struct {x, y int} -- struct field(s)
// struct {T} -- anon struct field
// interface {I} -- interface embedding
// interface {f()} -- interface method
// func (A) func(B) C -- receiver, param(s), result(s)
return "field/method/parameter"
case *ast.FieldList:
return "field/method/parameter list"
case *ast.File:
return "source file"
case *ast.ForStmt:
return "for loop"
case *ast.FuncDecl:
return "function declaration"
case *ast.FuncLit:
return "function literal"
case *ast.FuncType:
return "function type"
case *ast.GenDecl:
switch n.Tok {
case token.IMPORT:
return "import declaration"
case token.CONST:
return "constant declaration"
case token.TYPE:
return "type declaration"
case token.VAR:
return "variable declaration"
}
case *ast.GoStmt:
return "go statement"
case *ast.Ident:
return "identifier"
case *ast.IfStmt:
return "if statement"
case *ast.ImportSpec:
return "import specification"
case *ast.IncDecStmt:
if n.Tok == token.INC {
return "increment statement"
}
return "decrement statement"
case *ast.IndexExpr:
return "index expression"
case *ast.InterfaceType:
return "interface type"
case *ast.KeyValueExpr:
return "key/value association"
case *ast.LabeledStmt:
return "statement label"
case *ast.MapType:
return "map type"
case *ast.Package:
return "package"
case *ast.ParenExpr:
return "parenthesized " + NodeDescription(n.X)
case *ast.RangeStmt:
return "range loop"
case *ast.ReturnStmt:
return "return statement"
case *ast.SelectStmt:
return "select statement"
case *ast.SelectorExpr:
return "selector"
case *ast.SendStmt:
return "channel send"
case *ast.SliceExpr:
return "slice expression"
case *ast.StarExpr:
return "*-operation" // load/store expr or pointer type
case *ast.StructType:
return "struct type"
case *ast.SwitchStmt:
return "switch statement"
case *ast.TypeAssertExpr:
return "type assertion"
case *ast.TypeSpec:
return "type specification"
case *ast.TypeSwitchStmt:
return "type switch"
case *ast.UnaryExpr:
return fmt.Sprintf("unary %s operation", n.Op)
case *ast.ValueSpec:
return "value specification"
}
panic(fmt.Sprintf("unexpected node type: %T", n))
}

481
vendor/golang.org/x/tools/go/ast/astutil/imports.go generated vendored Normal file
View File

@ -0,0 +1,481 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package astutil contains common utilities for working with the Go AST.
package astutil // import "golang.org/x/tools/go/ast/astutil"
import (
"fmt"
"go/ast"
"go/token"
"strconv"
"strings"
)
// AddImport adds the import path to the file f, if absent.
func AddImport(fset *token.FileSet, f *ast.File, path string) (added bool) {
return AddNamedImport(fset, f, "", path)
}
// AddNamedImport adds the import with the given name and path to the file f, if absent.
// If name is not empty, it is used to rename the import.
//
// For example, calling
// AddNamedImport(fset, f, "pathpkg", "path")
// adds
// import pathpkg "path"
func AddNamedImport(fset *token.FileSet, f *ast.File, name, path string) (added bool) {
if imports(f, name, path) {
return false
}
newImport := &ast.ImportSpec{
Path: &ast.BasicLit{
Kind: token.STRING,
Value: strconv.Quote(path),
},
}
if name != "" {
newImport.Name = &ast.Ident{Name: name}
}
// Find an import decl to add to.
// The goal is to find an existing import
// whose import path has the longest shared
// prefix with path.
var (
bestMatch = -1 // length of longest shared prefix
lastImport = -1 // index in f.Decls of the file's final import decl
impDecl *ast.GenDecl // import decl containing the best match
impIndex = -1 // spec index in impDecl containing the best match
isThirdPartyPath = isThirdParty(path)
)
for i, decl := range f.Decls {
gen, ok := decl.(*ast.GenDecl)
if ok && gen.Tok == token.IMPORT {
lastImport = i
// Do not add to import "C", to avoid disrupting the
// association with its doc comment, breaking cgo.
if declImports(gen, "C") {
continue
}
// Match an empty import decl if that's all that is available.
if len(gen.Specs) == 0 && bestMatch == -1 {
impDecl = gen
}
// Compute longest shared prefix with imports in this group and find best
// matched import spec.
// 1. Always prefer import spec with longest shared prefix.
// 2. While match length is 0,
// - for stdlib package: prefer first import spec.
// - for third party package: prefer first third party import spec.
// We cannot use last import spec as best match for third party package
// because grouped imports are usually placed last by goimports -local
// flag.
// See issue #19190.
seenAnyThirdParty := false
for j, spec := range gen.Specs {
impspec := spec.(*ast.ImportSpec)
p := importPath(impspec)
n := matchLen(p, path)
if n > bestMatch || (bestMatch == 0 && !seenAnyThirdParty && isThirdPartyPath) {
bestMatch = n
impDecl = gen
impIndex = j
}
seenAnyThirdParty = seenAnyThirdParty || isThirdParty(p)
}
}
}
// If no import decl found, add one after the last import.
if impDecl == nil {
impDecl = &ast.GenDecl{
Tok: token.IMPORT,
}
if lastImport >= 0 {
impDecl.TokPos = f.Decls[lastImport].End()
} else {
// There are no existing imports.
// Our new import, preceded by a blank line, goes after the package declaration
// and after the comment, if any, that starts on the same line as the
// package declaration.
impDecl.TokPos = f.Package
file := fset.File(f.Package)
pkgLine := file.Line(f.Package)
for _, c := range f.Comments {
if file.Line(c.Pos()) > pkgLine {
break
}
// +2 for a blank line
impDecl.TokPos = c.End() + 2
}
}
f.Decls = append(f.Decls, nil)
copy(f.Decls[lastImport+2:], f.Decls[lastImport+1:])
f.Decls[lastImport+1] = impDecl
}
// Insert new import at insertAt.
insertAt := 0
if impIndex >= 0 {
// insert after the found import
insertAt = impIndex + 1
}
impDecl.Specs = append(impDecl.Specs, nil)
copy(impDecl.Specs[insertAt+1:], impDecl.Specs[insertAt:])
impDecl.Specs[insertAt] = newImport
pos := impDecl.Pos()
if insertAt > 0 {
// If there is a comment after an existing import, preserve the comment
// position by adding the new import after the comment.
if spec, ok := impDecl.Specs[insertAt-1].(*ast.ImportSpec); ok && spec.Comment != nil {
pos = spec.Comment.End()
} else {
// Assign same position as the previous import,
// so that the sorter sees it as being in the same block.
pos = impDecl.Specs[insertAt-1].Pos()
}
}
if newImport.Name != nil {
newImport.Name.NamePos = pos
}
newImport.Path.ValuePos = pos
newImport.EndPos = pos
// Clean up parens. impDecl contains at least one spec.
if len(impDecl.Specs) == 1 {
// Remove unneeded parens.
impDecl.Lparen = token.NoPos
} else if !impDecl.Lparen.IsValid() {
// impDecl needs parens added.
impDecl.Lparen = impDecl.Specs[0].Pos()
}
f.Imports = append(f.Imports, newImport)
if len(f.Decls) <= 1 {
return true
}
// Merge all the import declarations into the first one.
var first *ast.GenDecl
for i := 0; i < len(f.Decls); i++ {
decl := f.Decls[i]
gen, ok := decl.(*ast.GenDecl)
if !ok || gen.Tok != token.IMPORT || declImports(gen, "C") {
continue
}
if first == nil {
first = gen
continue // Don't touch the first one.
}
// We now know there is more than one package in this import
// declaration. Ensure that it ends up parenthesized.
first.Lparen = first.Pos()
// Move the imports of the other import declaration to the first one.
for _, spec := range gen.Specs {
spec.(*ast.ImportSpec).Path.ValuePos = first.Pos()
first.Specs = append(first.Specs, spec)
}
f.Decls = append(f.Decls[:i], f.Decls[i+1:]...)
i--
}
return true
}
func isThirdParty(importPath string) bool {
// Third party package import path usually contains "." (".com", ".org", ...)
// This logic is taken from golang.org/x/tools/imports package.
return strings.Contains(importPath, ".")
}
// DeleteImport deletes the import path from the file f, if present.
// If there are duplicate import declarations, all matching ones are deleted.
func DeleteImport(fset *token.FileSet, f *ast.File, path string) (deleted bool) {
return DeleteNamedImport(fset, f, "", path)
}
// DeleteNamedImport deletes the import with the given name and path from the file f, if present.
// If there are duplicate import declarations, all matching ones are deleted.
func DeleteNamedImport(fset *token.FileSet, f *ast.File, name, path string) (deleted bool) {
var delspecs []*ast.ImportSpec
var delcomments []*ast.CommentGroup
// Find the import nodes that import path, if any.
for i := 0; i < len(f.Decls); i++ {
decl := f.Decls[i]
gen, ok := decl.(*ast.GenDecl)
if !ok || gen.Tok != token.IMPORT {
continue
}
for j := 0; j < len(gen.Specs); j++ {
spec := gen.Specs[j]
impspec := spec.(*ast.ImportSpec)
if importName(impspec) != name || importPath(impspec) != path {
continue
}
// We found an import spec that imports path.
// Delete it.
delspecs = append(delspecs, impspec)
deleted = true
copy(gen.Specs[j:], gen.Specs[j+1:])
gen.Specs = gen.Specs[:len(gen.Specs)-1]
// If this was the last import spec in this decl,
// delete the decl, too.
if len(gen.Specs) == 0 {
copy(f.Decls[i:], f.Decls[i+1:])
f.Decls = f.Decls[:len(f.Decls)-1]
i--
break
} else if len(gen.Specs) == 1 {
if impspec.Doc != nil {
delcomments = append(delcomments, impspec.Doc)
}
if impspec.Comment != nil {
delcomments = append(delcomments, impspec.Comment)
}
for _, cg := range f.Comments {
// Found comment on the same line as the import spec.
if cg.End() < impspec.Pos() && fset.Position(cg.End()).Line == fset.Position(impspec.Pos()).Line {
delcomments = append(delcomments, cg)
break
}
}
spec := gen.Specs[0].(*ast.ImportSpec)
// Move the documentation right after the import decl.
if spec.Doc != nil {
for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Doc.Pos()).Line {
fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line)
}
}
for _, cg := range f.Comments {
if cg.End() < spec.Pos() && fset.Position(cg.End()).Line == fset.Position(spec.Pos()).Line {
for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Pos()).Line {
fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line)
}
break
}
}
}
if j > 0 {
lastImpspec := gen.Specs[j-1].(*ast.ImportSpec)
lastLine := fset.Position(lastImpspec.Path.ValuePos).Line
line := fset.Position(impspec.Path.ValuePos).Line
// We deleted an entry but now there may be
// a blank line-sized hole where the import was.
if line-lastLine > 1 {
// There was a blank line immediately preceding the deleted import,
// so there's no need to close the hole.
// Do nothing.
} else if line != fset.File(gen.Rparen).LineCount() {
// There was no blank line. Close the hole.
fset.File(gen.Rparen).MergeLine(line)
}
}
j--
}
}
// Delete imports from f.Imports.
for i := 0; i < len(f.Imports); i++ {
imp := f.Imports[i]
for j, del := range delspecs {
if imp == del {
copy(f.Imports[i:], f.Imports[i+1:])
f.Imports = f.Imports[:len(f.Imports)-1]
copy(delspecs[j:], delspecs[j+1:])
delspecs = delspecs[:len(delspecs)-1]
i--
break
}
}
}
// Delete comments from f.Comments.
for i := 0; i < len(f.Comments); i++ {
cg := f.Comments[i]
for j, del := range delcomments {
if cg == del {
copy(f.Comments[i:], f.Comments[i+1:])
f.Comments = f.Comments[:len(f.Comments)-1]
copy(delcomments[j:], delcomments[j+1:])
delcomments = delcomments[:len(delcomments)-1]
i--
break
}
}
}
if len(delspecs) > 0 {
panic(fmt.Sprintf("deleted specs from Decls but not Imports: %v", delspecs))
}
return
}
// RewriteImport rewrites any import of path oldPath to path newPath.
func RewriteImport(fset *token.FileSet, f *ast.File, oldPath, newPath string) (rewrote bool) {
for _, imp := range f.Imports {
if importPath(imp) == oldPath {
rewrote = true
// record old End, because the default is to compute
// it using the length of imp.Path.Value.
imp.EndPos = imp.End()
imp.Path.Value = strconv.Quote(newPath)
}
}
return
}
// UsesImport reports whether a given import is used.
func UsesImport(f *ast.File, path string) (used bool) {
spec := importSpec(f, path)
if spec == nil {
return
}
name := spec.Name.String()
switch name {
case "<nil>":
// If the package name is not explicitly specified,
// make an educated guess. This is not guaranteed to be correct.
lastSlash := strings.LastIndex(path, "/")
if lastSlash == -1 {
name = path
} else {
name = path[lastSlash+1:]
}
case "_", ".":
// Not sure if this import is used - err on the side of caution.
return true
}
ast.Walk(visitFn(func(n ast.Node) {
sel, ok := n.(*ast.SelectorExpr)
if ok && isTopName(sel.X, name) {
used = true
}
}), f)
return
}
type visitFn func(node ast.Node)
func (fn visitFn) Visit(node ast.Node) ast.Visitor {
fn(node)
return fn
}
// imports reports whether f has an import with the specified name and path.
func imports(f *ast.File, name, path string) bool {
for _, s := range f.Imports {
if importName(s) == name && importPath(s) == path {
return true
}
}
return false
}
// importSpec returns the import spec if f imports path,
// or nil otherwise.
func importSpec(f *ast.File, path string) *ast.ImportSpec {
for _, s := range f.Imports {
if importPath(s) == path {
return s
}
}
return nil
}
// importName returns the name of s,
// or "" if the import is not named.
func importName(s *ast.ImportSpec) string {
if s.Name == nil {
return ""
}
return s.Name.Name
}
// importPath returns the unquoted import path of s,
// or "" if the path is not properly quoted.
func importPath(s *ast.ImportSpec) string {
t, err := strconv.Unquote(s.Path.Value)
if err != nil {
return ""
}
return t
}
// declImports reports whether gen contains an import of path.
func declImports(gen *ast.GenDecl, path string) bool {
if gen.Tok != token.IMPORT {
return false
}
for _, spec := range gen.Specs {
impspec := spec.(*ast.ImportSpec)
if importPath(impspec) == path {
return true
}
}
return false
}
// matchLen returns the length of the longest path segment prefix shared by x and y.
func matchLen(x, y string) int {
n := 0
for i := 0; i < len(x) && i < len(y) && x[i] == y[i]; i++ {
if x[i] == '/' {
n++
}
}
return n
}
// isTopName returns true if n is a top-level unresolved identifier with the given name.
func isTopName(n ast.Expr, name string) bool {
id, ok := n.(*ast.Ident)
return ok && id.Name == name && id.Obj == nil
}
// Imports returns the file imports grouped by paragraph.
func Imports(fset *token.FileSet, f *ast.File) [][]*ast.ImportSpec {
var groups [][]*ast.ImportSpec
for _, decl := range f.Decls {
genDecl, ok := decl.(*ast.GenDecl)
if !ok || genDecl.Tok != token.IMPORT {
break
}
group := []*ast.ImportSpec{}
var lastLine int
for _, spec := range genDecl.Specs {
importSpec := spec.(*ast.ImportSpec)
pos := importSpec.Path.ValuePos
line := fset.Position(pos).Line
if lastLine > 0 && pos > 0 && line-lastLine > 1 {
groups = append(groups, group)
group = []*ast.ImportSpec{}
}
group = append(group, importSpec)
lastLine = line
}
groups = append(groups, group)
}
return groups
}

477
vendor/golang.org/x/tools/go/ast/astutil/rewrite.go generated vendored Normal file
View File

@ -0,0 +1,477 @@
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package astutil
import (
"fmt"
"go/ast"
"reflect"
"sort"
)
// An ApplyFunc is invoked by Apply for each node n, even if n is nil,
// before and/or after the node's children, using a Cursor describing
// the current node and providing operations on it.
//
// The return value of ApplyFunc controls the syntax tree traversal.
// See Apply for details.
type ApplyFunc func(*Cursor) bool
// Apply traverses a syntax tree recursively, starting with root,
// and calling pre and post for each node as described below.
// Apply returns the syntax tree, possibly modified.
//
// If pre is not nil, it is called for each node before the node's
// children are traversed (pre-order). If pre returns false, no
// children are traversed, and post is not called for that node.
//
// If post is not nil, and a prior call of pre didn't return false,
// post is called for each node after its children are traversed
// (post-order). If post returns false, traversal is terminated and
// Apply returns immediately.
//
// Only fields that refer to AST nodes are considered children;
// i.e., token.Pos, Scopes, Objects, and fields of basic types
// (strings, etc.) are ignored.
//
// Children are traversed in the order in which they appear in the
// respective node's struct definition. A package's files are
// traversed in the filenames' alphabetical order.
//
func Apply(root ast.Node, pre, post ApplyFunc) (result ast.Node) {
parent := &struct{ ast.Node }{root}
defer func() {
if r := recover(); r != nil && r != abort {
panic(r)
}
result = parent.Node
}()
a := &application{pre: pre, post: post}
a.apply(parent, "Node", nil, root)
return
}
var abort = new(int) // singleton, to signal termination of Apply
// A Cursor describes a node encountered during Apply.
// Information about the node and its parent is available
// from the Node, Parent, Name, and Index methods.
//
// If p is a variable of type and value of the current parent node
// c.Parent(), and f is the field identifier with name c.Name(),
// the following invariants hold:
//
// p.f == c.Node() if c.Index() < 0
// p.f[c.Index()] == c.Node() if c.Index() >= 0
//
// The methods Replace, Delete, InsertBefore, and InsertAfter
// can be used to change the AST without disrupting Apply.
type Cursor struct {
parent ast.Node
name string
iter *iterator // valid if non-nil
node ast.Node
}
// Node returns the current Node.
func (c *Cursor) Node() ast.Node { return c.node }
// Parent returns the parent of the current Node.
func (c *Cursor) Parent() ast.Node { return c.parent }
// Name returns the name of the parent Node field that contains the current Node.
// If the parent is a *ast.Package and the current Node is a *ast.File, Name returns
// the filename for the current Node.
func (c *Cursor) Name() string { return c.name }
// Index reports the index >= 0 of the current Node in the slice of Nodes that
// contains it, or a value < 0 if the current Node is not part of a slice.
// The index of the current node changes if InsertBefore is called while
// processing the current node.
func (c *Cursor) Index() int {
if c.iter != nil {
return c.iter.index
}
return -1
}
// field returns the current node's parent field value.
func (c *Cursor) field() reflect.Value {
return reflect.Indirect(reflect.ValueOf(c.parent)).FieldByName(c.name)
}
// Replace replaces the current Node with n.
// The replacement node is not walked by Apply.
func (c *Cursor) Replace(n ast.Node) {
if _, ok := c.node.(*ast.File); ok {
file, ok := n.(*ast.File)
if !ok {
panic("attempt to replace *ast.File with non-*ast.File")
}
c.parent.(*ast.Package).Files[c.name] = file
return
}
v := c.field()
if i := c.Index(); i >= 0 {
v = v.Index(i)
}
v.Set(reflect.ValueOf(n))
}
// Delete deletes the current Node from its containing slice.
// If the current Node is not part of a slice, Delete panics.
// As a special case, if the current node is a package file,
// Delete removes it from the package's Files map.
func (c *Cursor) Delete() {
if _, ok := c.node.(*ast.File); ok {
delete(c.parent.(*ast.Package).Files, c.name)
return
}
i := c.Index()
if i < 0 {
panic("Delete node not contained in slice")
}
v := c.field()
l := v.Len()
reflect.Copy(v.Slice(i, l), v.Slice(i+1, l))
v.Index(l - 1).Set(reflect.Zero(v.Type().Elem()))
v.SetLen(l - 1)
c.iter.step--
}
// InsertAfter inserts n after the current Node in its containing slice.
// If the current Node is not part of a slice, InsertAfter panics.
// Apply does not walk n.
func (c *Cursor) InsertAfter(n ast.Node) {
i := c.Index()
if i < 0 {
panic("InsertAfter node not contained in slice")
}
v := c.field()
v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem())))
l := v.Len()
reflect.Copy(v.Slice(i+2, l), v.Slice(i+1, l))
v.Index(i + 1).Set(reflect.ValueOf(n))
c.iter.step++
}
// InsertBefore inserts n before the current Node in its containing slice.
// If the current Node is not part of a slice, InsertBefore panics.
// Apply will not walk n.
func (c *Cursor) InsertBefore(n ast.Node) {
i := c.Index()
if i < 0 {
panic("InsertBefore node not contained in slice")
}
v := c.field()
v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem())))
l := v.Len()
reflect.Copy(v.Slice(i+1, l), v.Slice(i, l))
v.Index(i).Set(reflect.ValueOf(n))
c.iter.index++
}
// application carries all the shared data so we can pass it around cheaply.
type application struct {
pre, post ApplyFunc
cursor Cursor
iter iterator
}
func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast.Node) {
// convert typed nil into untyped nil
if v := reflect.ValueOf(n); v.Kind() == reflect.Ptr && v.IsNil() {
n = nil
}
// avoid heap-allocating a new cursor for each apply call; reuse a.cursor instead
saved := a.cursor
a.cursor.parent = parent
a.cursor.name = name
a.cursor.iter = iter
a.cursor.node = n
if a.pre != nil && !a.pre(&a.cursor) {
a.cursor = saved
return
}
// walk children
// (the order of the cases matches the order of the corresponding node types in go/ast)
switch n := n.(type) {
case nil:
// nothing to do
// Comments and fields
case *ast.Comment:
// nothing to do
case *ast.CommentGroup:
if n != nil {
a.applyList(n, "List")
}
case *ast.Field:
a.apply(n, "Doc", nil, n.Doc)
a.applyList(n, "Names")
a.apply(n, "Type", nil, n.Type)
a.apply(n, "Tag", nil, n.Tag)
a.apply(n, "Comment", nil, n.Comment)
case *ast.FieldList:
a.applyList(n, "List")
// Expressions
case *ast.BadExpr, *ast.Ident, *ast.BasicLit:
// nothing to do
case *ast.Ellipsis:
a.apply(n, "Elt", nil, n.Elt)
case *ast.FuncLit:
a.apply(n, "Type", nil, n.Type)
a.apply(n, "Body", nil, n.Body)
case *ast.CompositeLit:
a.apply(n, "Type", nil, n.Type)
a.applyList(n, "Elts")
case *ast.ParenExpr:
a.apply(n, "X", nil, n.X)
case *ast.SelectorExpr:
a.apply(n, "X", nil, n.X)
a.apply(n, "Sel", nil, n.Sel)
case *ast.IndexExpr:
a.apply(n, "X", nil, n.X)
a.apply(n, "Index", nil, n.Index)
case *ast.SliceExpr:
a.apply(n, "X", nil, n.X)
a.apply(n, "Low", nil, n.Low)
a.apply(n, "High", nil, n.High)
a.apply(n, "Max", nil, n.Max)
case *ast.TypeAssertExpr:
a.apply(n, "X", nil, n.X)
a.apply(n, "Type", nil, n.Type)
case *ast.CallExpr:
a.apply(n, "Fun", nil, n.Fun)
a.applyList(n, "Args")
case *ast.StarExpr:
a.apply(n, "X", nil, n.X)
case *ast.UnaryExpr:
a.apply(n, "X", nil, n.X)
case *ast.BinaryExpr:
a.apply(n, "X", nil, n.X)
a.apply(n, "Y", nil, n.Y)
case *ast.KeyValueExpr:
a.apply(n, "Key", nil, n.Key)
a.apply(n, "Value", nil, n.Value)
// Types
case *ast.ArrayType:
a.apply(n, "Len", nil, n.Len)
a.apply(n, "Elt", nil, n.Elt)
case *ast.StructType:
a.apply(n, "Fields", nil, n.Fields)
case *ast.FuncType:
a.apply(n, "Params", nil, n.Params)
a.apply(n, "Results", nil, n.Results)
case *ast.InterfaceType:
a.apply(n, "Methods", nil, n.Methods)
case *ast.MapType:
a.apply(n, "Key", nil, n.Key)
a.apply(n, "Value", nil, n.Value)
case *ast.ChanType:
a.apply(n, "Value", nil, n.Value)
// Statements
case *ast.BadStmt:
// nothing to do
case *ast.DeclStmt:
a.apply(n, "Decl", nil, n.Decl)
case *ast.EmptyStmt:
// nothing to do
case *ast.LabeledStmt:
a.apply(n, "Label", nil, n.Label)
a.apply(n, "Stmt", nil, n.Stmt)
case *ast.ExprStmt:
a.apply(n, "X", nil, n.X)
case *ast.SendStmt:
a.apply(n, "Chan", nil, n.Chan)
a.apply(n, "Value", nil, n.Value)
case *ast.IncDecStmt:
a.apply(n, "X", nil, n.X)
case *ast.AssignStmt:
a.applyList(n, "Lhs")
a.applyList(n, "Rhs")
case *ast.GoStmt:
a.apply(n, "Call", nil, n.Call)
case *ast.DeferStmt:
a.apply(n, "Call", nil, n.Call)
case *ast.ReturnStmt:
a.applyList(n, "Results")
case *ast.BranchStmt:
a.apply(n, "Label", nil, n.Label)
case *ast.BlockStmt:
a.applyList(n, "List")
case *ast.IfStmt:
a.apply(n, "Init", nil, n.Init)
a.apply(n, "Cond", nil, n.Cond)
a.apply(n, "Body", nil, n.Body)
a.apply(n, "Else", nil, n.Else)
case *ast.CaseClause:
a.applyList(n, "List")
a.applyList(n, "Body")
case *ast.SwitchStmt:
a.apply(n, "Init", nil, n.Init)
a.apply(n, "Tag", nil, n.Tag)
a.apply(n, "Body", nil, n.Body)
case *ast.TypeSwitchStmt:
a.apply(n, "Init", nil, n.Init)
a.apply(n, "Assign", nil, n.Assign)
a.apply(n, "Body", nil, n.Body)
case *ast.CommClause:
a.apply(n, "Comm", nil, n.Comm)
a.applyList(n, "Body")
case *ast.SelectStmt:
a.apply(n, "Body", nil, n.Body)
case *ast.ForStmt:
a.apply(n, "Init", nil, n.Init)
a.apply(n, "Cond", nil, n.Cond)
a.apply(n, "Post", nil, n.Post)
a.apply(n, "Body", nil, n.Body)
case *ast.RangeStmt:
a.apply(n, "Key", nil, n.Key)
a.apply(n, "Value", nil, n.Value)
a.apply(n, "X", nil, n.X)
a.apply(n, "Body", nil, n.Body)
// Declarations
case *ast.ImportSpec:
a.apply(n, "Doc", nil, n.Doc)
a.apply(n, "Name", nil, n.Name)
a.apply(n, "Path", nil, n.Path)
a.apply(n, "Comment", nil, n.Comment)
case *ast.ValueSpec:
a.apply(n, "Doc", nil, n.Doc)
a.applyList(n, "Names")
a.apply(n, "Type", nil, n.Type)
a.applyList(n, "Values")
a.apply(n, "Comment", nil, n.Comment)
case *ast.TypeSpec:
a.apply(n, "Doc", nil, n.Doc)
a.apply(n, "Name", nil, n.Name)
a.apply(n, "Type", nil, n.Type)
a.apply(n, "Comment", nil, n.Comment)
case *ast.BadDecl:
// nothing to do
case *ast.GenDecl:
a.apply(n, "Doc", nil, n.Doc)
a.applyList(n, "Specs")
case *ast.FuncDecl:
a.apply(n, "Doc", nil, n.Doc)
a.apply(n, "Recv", nil, n.Recv)
a.apply(n, "Name", nil, n.Name)
a.apply(n, "Type", nil, n.Type)
a.apply(n, "Body", nil, n.Body)
// Files and packages
case *ast.File:
a.apply(n, "Doc", nil, n.Doc)
a.apply(n, "Name", nil, n.Name)
a.applyList(n, "Decls")
// Don't walk n.Comments; they have either been walked already if
// they are Doc comments, or they can be easily walked explicitly.
case *ast.Package:
// collect and sort names for reproducible behavior
var names []string
for name := range n.Files {
names = append(names, name)
}
sort.Strings(names)
for _, name := range names {
a.apply(n, name, nil, n.Files[name])
}
default:
panic(fmt.Sprintf("Apply: unexpected node type %T", n))
}
if a.post != nil && !a.post(&a.cursor) {
panic(abort)
}
a.cursor = saved
}
// An iterator controls iteration over a slice of nodes.
type iterator struct {
index, step int
}
func (a *application) applyList(parent ast.Node, name string) {
// avoid heap-allocating a new iterator for each applyList call; reuse a.iter instead
saved := a.iter
a.iter.index = 0
for {
// must reload parent.name each time, since cursor modifications might change it
v := reflect.Indirect(reflect.ValueOf(parent)).FieldByName(name)
if a.iter.index >= v.Len() {
break
}
// element x may be nil in a bad AST - be cautious
var x ast.Node
if e := v.Index(a.iter.index); e.IsValid() {
x = e.Interface().(ast.Node)
}
a.iter.step = 1
a.apply(parent, name, &a.iter, x)
a.iter.index += a.iter.step
}
a.iter = saved
}

14
vendor/golang.org/x/tools/go/ast/astutil/util.go generated vendored Normal file
View File

@ -0,0 +1,14 @@
package astutil
import "go/ast"
// Unparen returns e with any enclosing parentheses stripped.
func Unparen(e ast.Expr) ast.Expr {
for {
p, ok := e.(*ast.ParenExpr)
if !ok {
return e
}
e = p.X
}
}

View File

@ -0,0 +1,109 @@
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package gcexportdata provides functions for locating, reading, and
// writing export data files containing type information produced by the
// gc compiler. This package supports go1.7 export data format and all
// later versions.
//
// Although it might seem convenient for this package to live alongside
// go/types in the standard library, this would cause version skew
// problems for developer tools that use it, since they must be able to
// consume the outputs of the gc compiler both before and after a Go
// update such as from Go 1.7 to Go 1.8. Because this package lives in
// golang.org/x/tools, sites can update their version of this repo some
// time before the Go 1.8 release and rebuild and redeploy their
// developer tools, which will then be able to consume both Go 1.7 and
// Go 1.8 export data files, so they will work before and after the
// Go update. (See discussion at https://github.com/golang/go/issues/15651.)
//
package gcexportdata // import "golang.org/x/tools/go/gcexportdata"
import (
"bufio"
"bytes"
"fmt"
"go/token"
"go/types"
"io"
"io/ioutil"
"golang.org/x/tools/go/internal/gcimporter"
)
// Find returns the name of an object (.o) or archive (.a) file
// containing type information for the specified import path,
// using the workspace layout conventions of go/build.
// If no file was found, an empty filename is returned.
//
// A relative srcDir is interpreted relative to the current working directory.
//
// Find also returns the package's resolved (canonical) import path,
// reflecting the effects of srcDir and vendoring on importPath.
func Find(importPath, srcDir string) (filename, path string) {
return gcimporter.FindPkg(importPath, srcDir)
}
// NewReader returns a reader for the export data section of an object
// (.o) or archive (.a) file read from r. The new reader may provide
// additional trailing data beyond the end of the export data.
func NewReader(r io.Reader) (io.Reader, error) {
buf := bufio.NewReader(r)
_, err := gcimporter.FindExportData(buf)
// If we ever switch to a zip-like archive format with the ToC
// at the end, we can return the correct portion of export data,
// but for now we must return the entire rest of the file.
return buf, err
}
// Read reads export data from in, decodes it, and returns type
// information for the package.
// The package name is specified by path.
// File position information is added to fset.
//
// Read may inspect and add to the imports map to ensure that references
// within the export data to other packages are consistent. The caller
// must ensure that imports[path] does not exist, or exists but is
// incomplete (see types.Package.Complete), and Read inserts the
// resulting package into this map entry.
//
// On return, the state of the reader is undefined.
func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package, path string) (*types.Package, error) {
data, err := ioutil.ReadAll(in)
if err != nil {
return nil, fmt.Errorf("reading export data for %q: %v", path, err)
}
if bytes.HasPrefix(data, []byte("!<arch>")) {
return nil, fmt.Errorf("can't read export data for %q directly from an archive file (call gcexportdata.NewReader first to extract export data)", path)
}
// The App Engine Go runtime v1.6 uses the old export data format.
// TODO(adonovan): delete once v1.7 has been around for a while.
if bytes.HasPrefix(data, []byte("package ")) {
return gcimporter.ImportData(imports, path, path, bytes.NewReader(data))
}
// The indexed export format starts with an 'i'; the older
// binary export format starts with a 'c', 'd', or 'v'
// (from "version"). Select appropriate importer.
if len(data) > 0 && data[0] == 'i' {
_, pkg, err := gcimporter.IImportData(fset, imports, data[1:], path)
return pkg, err
}
_, pkg, err := gcimporter.BImportData(fset, imports, data, path)
return pkg, err
}
// Write writes encoded type information for the specified package to out.
// The FileSet provides file position information for named objects.
func Write(out io.Writer, fset *token.FileSet, pkg *types.Package) error {
b, err := gcimporter.BExportData(fset, pkg)
if err != nil {
return err
}
_, err = out.Write(b)
return err
}

73
vendor/golang.org/x/tools/go/gcexportdata/importer.go generated vendored Normal file
View File

@ -0,0 +1,73 @@
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package gcexportdata
import (
"fmt"
"go/token"
"go/types"
"os"
)
// NewImporter returns a new instance of the types.Importer interface
// that reads type information from export data files written by gc.
// The Importer also satisfies types.ImporterFrom.
//
// Export data files are located using "go build" workspace conventions
// and the build.Default context.
//
// Use this importer instead of go/importer.For("gc", ...) to avoid the
// version-skew problems described in the documentation of this package,
// or to control the FileSet or access the imports map populated during
// package loading.
//
func NewImporter(fset *token.FileSet, imports map[string]*types.Package) types.ImporterFrom {
return importer{fset, imports}
}
type importer struct {
fset *token.FileSet
imports map[string]*types.Package
}
func (imp importer) Import(importPath string) (*types.Package, error) {
return imp.ImportFrom(importPath, "", 0)
}
func (imp importer) ImportFrom(importPath, srcDir string, mode types.ImportMode) (_ *types.Package, err error) {
filename, path := Find(importPath, srcDir)
if filename == "" {
if importPath == "unsafe" {
// Even for unsafe, call Find first in case
// the package was vendored.
return types.Unsafe, nil
}
return nil, fmt.Errorf("can't find import: %s", importPath)
}
if pkg, ok := imp.imports[path]; ok && pkg.Complete() {
return pkg, nil // cache hit
}
// open file
f, err := os.Open(filename)
if err != nil {
return nil, err
}
defer func() {
f.Close()
if err != nil {
// add file name to error
err = fmt.Errorf("reading export data: %s: %v", filename, err)
}
}()
r, err := NewReader(f)
if err != nil {
return nil, err
}
return Read(r, imp.fset, imp.imports, path)
}

99
vendor/golang.org/x/tools/go/gcexportdata/main.go generated vendored Normal file
View File

@ -0,0 +1,99 @@
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build ignore
// The gcexportdata command is a diagnostic tool that displays the
// contents of gc export data files.
package main
import (
"flag"
"fmt"
"go/token"
"go/types"
"log"
"os"
"golang.org/x/tools/go/gcexportdata"
"golang.org/x/tools/go/types/typeutil"
)
var packageFlag = flag.String("package", "", "alternative package to print")
func main() {
log.SetPrefix("gcexportdata: ")
log.SetFlags(0)
flag.Usage = func() {
fmt.Fprintln(os.Stderr, "usage: gcexportdata [-package path] file.a")
}
flag.Parse()
if flag.NArg() != 1 {
flag.Usage()
os.Exit(2)
}
filename := flag.Args()[0]
f, err := os.Open(filename)
if err != nil {
log.Fatal(err)
}
r, err := gcexportdata.NewReader(f)
if err != nil {
log.Fatalf("%s: %s", filename, err)
}
// Decode the package.
const primary = "<primary>"
imports := make(map[string]*types.Package)
fset := token.NewFileSet()
pkg, err := gcexportdata.Read(r, fset, imports, primary)
if err != nil {
log.Fatalf("%s: %s", filename, err)
}
// Optionally select an indirectly mentioned package.
if *packageFlag != "" {
pkg = imports[*packageFlag]
if pkg == nil {
fmt.Fprintf(os.Stderr, "export data file %s does not mention %s; has:\n",
filename, *packageFlag)
for p := range imports {
if p != primary {
fmt.Fprintf(os.Stderr, "\t%s\n", p)
}
}
os.Exit(1)
}
}
// Print all package-level declarations, including non-exported ones.
fmt.Printf("package %s\n", pkg.Name())
for _, imp := range pkg.Imports() {
fmt.Printf("import %q\n", imp.Path())
}
qual := func(p *types.Package) string {
if pkg == p {
return ""
}
return p.Name()
}
scope := pkg.Scope()
for _, name := range scope.Names() {
obj := scope.Lookup(name)
fmt.Printf("%s: %s\n",
fset.Position(obj.Pos()),
types.ObjectString(obj, qual))
// For types, print each method.
if _, ok := obj.(*types.TypeName); ok {
for _, method := range typeutil.IntuitiveMethodSet(obj.Type(), nil) {
fmt.Printf("%s: %s\n",
fset.Position(method.Obj().Pos()),
types.SelectionString(method, qual))
}
}
}
}

View File

@ -0,0 +1,852 @@
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Binary package export.
// This file was derived from $GOROOT/src/cmd/compile/internal/gc/bexport.go;
// see that file for specification of the format.
package gcimporter
import (
"bytes"
"encoding/binary"
"fmt"
"go/ast"
"go/constant"
"go/token"
"go/types"
"math"
"math/big"
"sort"
"strings"
)
// If debugFormat is set, each integer and string value is preceded by a marker
// and position information in the encoding. This mechanism permits an importer
// to recognize immediately when it is out of sync. The importer recognizes this
// mode automatically (i.e., it can import export data produced with debugging
// support even if debugFormat is not set at the time of import). This mode will
// lead to massively larger export data (by a factor of 2 to 3) and should only
// be enabled during development and debugging.
//
// NOTE: This flag is the first flag to enable if importing dies because of
// (suspected) format errors, and whenever a change is made to the format.
const debugFormat = false // default: false
// If trace is set, debugging output is printed to std out.
const trace = false // default: false
// Current export format version. Increase with each format change.
// Note: The latest binary (non-indexed) export format is at version 6.
// This exporter is still at level 4, but it doesn't matter since
// the binary importer can handle older versions just fine.
// 6: package height (CL 105038) -- NOT IMPLEMENTED HERE
// 5: improved position encoding efficiency (issue 20080, CL 41619) -- NOT IMPLEMEMTED HERE
// 4: type name objects support type aliases, uses aliasTag
// 3: Go1.8 encoding (same as version 2, aliasTag defined but never used)
// 2: removed unused bool in ODCL export (compiler only)
// 1: header format change (more regular), export package for _ struct fields
// 0: Go1.7 encoding
const exportVersion = 4
// trackAllTypes enables cycle tracking for all types, not just named
// types. The existing compiler invariants assume that unnamed types
// that are not completely set up are not used, or else there are spurious
// errors.
// If disabled, only named types are tracked, possibly leading to slightly
// less efficient encoding in rare cases. It also prevents the export of
// some corner-case type declarations (but those are not handled correctly
// with with the textual export format either).
// TODO(gri) enable and remove once issues caused by it are fixed
const trackAllTypes = false
type exporter struct {
fset *token.FileSet
out bytes.Buffer
// object -> index maps, indexed in order of serialization
strIndex map[string]int
pkgIndex map[*types.Package]int
typIndex map[types.Type]int
// position encoding
posInfoFormat bool
prevFile string
prevLine int
// debugging support
written int // bytes written
indent int // for trace
}
// internalError represents an error generated inside this package.
type internalError string
func (e internalError) Error() string { return "gcimporter: " + string(e) }
func internalErrorf(format string, args ...interface{}) error {
return internalError(fmt.Sprintf(format, args...))
}
// BExportData returns binary export data for pkg.
// If no file set is provided, position info will be missing.
func BExportData(fset *token.FileSet, pkg *types.Package) (b []byte, err error) {
defer func() {
if e := recover(); e != nil {
if ierr, ok := e.(internalError); ok {
err = ierr
return
}
// Not an internal error; panic again.
panic(e)
}
}()
p := exporter{
fset: fset,
strIndex: map[string]int{"": 0}, // empty string is mapped to 0
pkgIndex: make(map[*types.Package]int),
typIndex: make(map[types.Type]int),
posInfoFormat: true, // TODO(gri) might become a flag, eventually
}
// write version info
// The version string must start with "version %d" where %d is the version
// number. Additional debugging information may follow after a blank; that
// text is ignored by the importer.
p.rawStringln(fmt.Sprintf("version %d", exportVersion))
var debug string
if debugFormat {
debug = "debug"
}
p.rawStringln(debug) // cannot use p.bool since it's affected by debugFormat; also want to see this clearly
p.bool(trackAllTypes)
p.bool(p.posInfoFormat)
// --- generic export data ---
// populate type map with predeclared "known" types
for index, typ := range predeclared {
p.typIndex[typ] = index
}
if len(p.typIndex) != len(predeclared) {
return nil, internalError("duplicate entries in type map?")
}
// write package data
p.pkg(pkg, true)
if trace {
p.tracef("\n")
}
// write objects
objcount := 0
scope := pkg.Scope()
for _, name := range scope.Names() {
if !ast.IsExported(name) {
continue
}
if trace {
p.tracef("\n")
}
p.obj(scope.Lookup(name))
objcount++
}
// indicate end of list
if trace {
p.tracef("\n")
}
p.tag(endTag)
// for self-verification only (redundant)
p.int(objcount)
if trace {
p.tracef("\n")
}
// --- end of export data ---
return p.out.Bytes(), nil
}
func (p *exporter) pkg(pkg *types.Package, emptypath bool) {
if pkg == nil {
panic(internalError("unexpected nil pkg"))
}
// if we saw the package before, write its index (>= 0)
if i, ok := p.pkgIndex[pkg]; ok {
p.index('P', i)
return
}
// otherwise, remember the package, write the package tag (< 0) and package data
if trace {
p.tracef("P%d = { ", len(p.pkgIndex))
defer p.tracef("} ")
}
p.pkgIndex[pkg] = len(p.pkgIndex)
p.tag(packageTag)
p.string(pkg.Name())
if emptypath {
p.string("")
} else {
p.string(pkg.Path())
}
}
func (p *exporter) obj(obj types.Object) {
switch obj := obj.(type) {
case *types.Const:
p.tag(constTag)
p.pos(obj)
p.qualifiedName(obj)
p.typ(obj.Type())
p.value(obj.Val())
case *types.TypeName:
if obj.IsAlias() {
p.tag(aliasTag)
p.pos(obj)
p.qualifiedName(obj)
} else {
p.tag(typeTag)
}
p.typ(obj.Type())
case *types.Var:
p.tag(varTag)
p.pos(obj)
p.qualifiedName(obj)
p.typ(obj.Type())
case *types.Func:
p.tag(funcTag)
p.pos(obj)
p.qualifiedName(obj)
sig := obj.Type().(*types.Signature)
p.paramList(sig.Params(), sig.Variadic())
p.paramList(sig.Results(), false)
default:
panic(internalErrorf("unexpected object %v (%T)", obj, obj))
}
}
func (p *exporter) pos(obj types.Object) {
if !p.posInfoFormat {
return
}
file, line := p.fileLine(obj)
if file == p.prevFile {
// common case: write line delta
// delta == 0 means different file or no line change
delta := line - p.prevLine
p.int(delta)
if delta == 0 {
p.int(-1) // -1 means no file change
}
} else {
// different file
p.int(0)
// Encode filename as length of common prefix with previous
// filename, followed by (possibly empty) suffix. Filenames
// frequently share path prefixes, so this can save a lot
// of space and make export data size less dependent on file
// path length. The suffix is unlikely to be empty because
// file names tend to end in ".go".
n := commonPrefixLen(p.prevFile, file)
p.int(n) // n >= 0
p.string(file[n:]) // write suffix only
p.prevFile = file
p.int(line)
}
p.prevLine = line
}
func (p *exporter) fileLine(obj types.Object) (file string, line int) {
if p.fset != nil {
pos := p.fset.Position(obj.Pos())
file = pos.Filename
line = pos.Line
}
return
}
func commonPrefixLen(a, b string) int {
if len(a) > len(b) {
a, b = b, a
}
// len(a) <= len(b)
i := 0
for i < len(a) && a[i] == b[i] {
i++
}
return i
}
func (p *exporter) qualifiedName(obj types.Object) {
p.string(obj.Name())
p.pkg(obj.Pkg(), false)
}
func (p *exporter) typ(t types.Type) {
if t == nil {
panic(internalError("nil type"))
}
// Possible optimization: Anonymous pointer types *T where
// T is a named type are common. We could canonicalize all
// such types *T to a single type PT = *T. This would lead
// to at most one *T entry in typIndex, and all future *T's
// would be encoded as the respective index directly. Would
// save 1 byte (pointerTag) per *T and reduce the typIndex
// size (at the cost of a canonicalization map). We can do
// this later, without encoding format change.
// if we saw the type before, write its index (>= 0)
if i, ok := p.typIndex[t]; ok {
p.index('T', i)
return
}
// otherwise, remember the type, write the type tag (< 0) and type data
if trackAllTypes {
if trace {
p.tracef("T%d = {>\n", len(p.typIndex))
defer p.tracef("<\n} ")
}
p.typIndex[t] = len(p.typIndex)
}
switch t := t.(type) {
case *types.Named:
if !trackAllTypes {
// if we don't track all types, track named types now
p.typIndex[t] = len(p.typIndex)
}
p.tag(namedTag)
p.pos(t.Obj())
p.qualifiedName(t.Obj())
p.typ(t.Underlying())
if !types.IsInterface(t) {
p.assocMethods(t)
}
case *types.Array:
p.tag(arrayTag)
p.int64(t.Len())
p.typ(t.Elem())
case *types.Slice:
p.tag(sliceTag)
p.typ(t.Elem())
case *dddSlice:
p.tag(dddTag)
p.typ(t.elem)
case *types.Struct:
p.tag(structTag)
p.fieldList(t)
case *types.Pointer:
p.tag(pointerTag)
p.typ(t.Elem())
case *types.Signature:
p.tag(signatureTag)
p.paramList(t.Params(), t.Variadic())
p.paramList(t.Results(), false)
case *types.Interface:
p.tag(interfaceTag)
p.iface(t)
case *types.Map:
p.tag(mapTag)
p.typ(t.Key())
p.typ(t.Elem())
case *types.Chan:
p.tag(chanTag)
p.int(int(3 - t.Dir())) // hack
p.typ(t.Elem())
default:
panic(internalErrorf("unexpected type %T: %s", t, t))
}
}
func (p *exporter) assocMethods(named *types.Named) {
// Sort methods (for determinism).
var methods []*types.Func
for i := 0; i < named.NumMethods(); i++ {
methods = append(methods, named.Method(i))
}
sort.Sort(methodsByName(methods))
p.int(len(methods))
if trace && methods != nil {
p.tracef("associated methods {>\n")
}
for i, m := range methods {
if trace && i > 0 {
p.tracef("\n")
}
p.pos(m)
name := m.Name()
p.string(name)
if !exported(name) {
p.pkg(m.Pkg(), false)
}
sig := m.Type().(*types.Signature)
p.paramList(types.NewTuple(sig.Recv()), false)
p.paramList(sig.Params(), sig.Variadic())
p.paramList(sig.Results(), false)
p.int(0) // dummy value for go:nointerface pragma - ignored by importer
}
if trace && methods != nil {
p.tracef("<\n} ")
}
}
type methodsByName []*types.Func
func (x methodsByName) Len() int { return len(x) }
func (x methodsByName) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
func (x methodsByName) Less(i, j int) bool { return x[i].Name() < x[j].Name() }
func (p *exporter) fieldList(t *types.Struct) {
if trace && t.NumFields() > 0 {
p.tracef("fields {>\n")
defer p.tracef("<\n} ")
}
p.int(t.NumFields())
for i := 0; i < t.NumFields(); i++ {
if trace && i > 0 {
p.tracef("\n")
}
p.field(t.Field(i))
p.string(t.Tag(i))
}
}
func (p *exporter) field(f *types.Var) {
if !f.IsField() {
panic(internalError("field expected"))
}
p.pos(f)
p.fieldName(f)
p.typ(f.Type())
}
func (p *exporter) iface(t *types.Interface) {
// TODO(gri): enable importer to load embedded interfaces,
// then emit Embeddeds and ExplicitMethods separately here.
p.int(0)
n := t.NumMethods()
if trace && n > 0 {
p.tracef("methods {>\n")
defer p.tracef("<\n} ")
}
p.int(n)
for i := 0; i < n; i++ {
if trace && i > 0 {
p.tracef("\n")
}
p.method(t.Method(i))
}
}
func (p *exporter) method(m *types.Func) {
sig := m.Type().(*types.Signature)
if sig.Recv() == nil {
panic(internalError("method expected"))
}
p.pos(m)
p.string(m.Name())
if m.Name() != "_" && !ast.IsExported(m.Name()) {
p.pkg(m.Pkg(), false)
}
// interface method; no need to encode receiver.
p.paramList(sig.Params(), sig.Variadic())
p.paramList(sig.Results(), false)
}
func (p *exporter) fieldName(f *types.Var) {
name := f.Name()
if f.Anonymous() {
// anonymous field - we distinguish between 3 cases:
// 1) field name matches base type name and is exported
// 2) field name matches base type name and is not exported
// 3) field name doesn't match base type name (alias name)
bname := basetypeName(f.Type())
if name == bname {
if ast.IsExported(name) {
name = "" // 1) we don't need to know the field name or package
} else {
name = "?" // 2) use unexported name "?" to force package export
}
} else {
// 3) indicate alias and export name as is
// (this requires an extra "@" but this is a rare case)
p.string("@")
}
}
p.string(name)
if name != "" && !ast.IsExported(name) {
p.pkg(f.Pkg(), false)
}
}
func basetypeName(typ types.Type) string {
switch typ := deref(typ).(type) {
case *types.Basic:
return typ.Name()
case *types.Named:
return typ.Obj().Name()
default:
return "" // unnamed type
}
}
func (p *exporter) paramList(params *types.Tuple, variadic bool) {
// use negative length to indicate unnamed parameters
// (look at the first parameter only since either all
// names are present or all are absent)
n := params.Len()
if n > 0 && params.At(0).Name() == "" {
n = -n
}
p.int(n)
for i := 0; i < params.Len(); i++ {
q := params.At(i)
t := q.Type()
if variadic && i == params.Len()-1 {
t = &dddSlice{t.(*types.Slice).Elem()}
}
p.typ(t)
if n > 0 {
name := q.Name()
p.string(name)
if name != "_" {
p.pkg(q.Pkg(), false)
}
}
p.string("") // no compiler-specific info
}
}
func (p *exporter) value(x constant.Value) {
if trace {
p.tracef("= ")
}
switch x.Kind() {
case constant.Bool:
tag := falseTag
if constant.BoolVal(x) {
tag = trueTag
}
p.tag(tag)
case constant.Int:
if v, exact := constant.Int64Val(x); exact {
// common case: x fits into an int64 - use compact encoding
p.tag(int64Tag)
p.int64(v)
return
}
// uncommon case: large x - use float encoding
// (powers of 2 will be encoded efficiently with exponent)
p.tag(floatTag)
p.float(constant.ToFloat(x))
case constant.Float:
p.tag(floatTag)
p.float(x)
case constant.Complex:
p.tag(complexTag)
p.float(constant.Real(x))
p.float(constant.Imag(x))
case constant.String:
p.tag(stringTag)
p.string(constant.StringVal(x))
case constant.Unknown:
// package contains type errors
p.tag(unknownTag)
default:
panic(internalErrorf("unexpected value %v (%T)", x, x))
}
}
func (p *exporter) float(x constant.Value) {
if x.Kind() != constant.Float {
panic(internalErrorf("unexpected constant %v, want float", x))
}
// extract sign (there is no -0)
sign := constant.Sign(x)
if sign == 0 {
// x == 0
p.int(0)
return
}
// x != 0
var f big.Float
if v, exact := constant.Float64Val(x); exact {
// float64
f.SetFloat64(v)
} else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int {
// TODO(gri): add big.Rat accessor to constant.Value.
r := valueToRat(num)
f.SetRat(r.Quo(r, valueToRat(denom)))
} else {
// Value too large to represent as a fraction => inaccessible.
// TODO(gri): add big.Float accessor to constant.Value.
f.SetFloat64(math.MaxFloat64) // FIXME
}
// extract exponent such that 0.5 <= m < 1.0
var m big.Float
exp := f.MantExp(&m)
// extract mantissa as *big.Int
// - set exponent large enough so mant satisfies mant.IsInt()
// - get *big.Int from mant
m.SetMantExp(&m, int(m.MinPrec()))
mant, acc := m.Int(nil)
if acc != big.Exact {
panic(internalError("internal error"))
}
p.int(sign)
p.int(exp)
p.string(string(mant.Bytes()))
}
func valueToRat(x constant.Value) *big.Rat {
// Convert little-endian to big-endian.
// I can't believe this is necessary.
bytes := constant.Bytes(x)
for i := 0; i < len(bytes)/2; i++ {
bytes[i], bytes[len(bytes)-1-i] = bytes[len(bytes)-1-i], bytes[i]
}
return new(big.Rat).SetInt(new(big.Int).SetBytes(bytes))
}
func (p *exporter) bool(b bool) bool {
if trace {
p.tracef("[")
defer p.tracef("= %v] ", b)
}
x := 0
if b {
x = 1
}
p.int(x)
return b
}
// ----------------------------------------------------------------------------
// Low-level encoders
func (p *exporter) index(marker byte, index int) {
if index < 0 {
panic(internalError("invalid index < 0"))
}
if debugFormat {
p.marker('t')
}
if trace {
p.tracef("%c%d ", marker, index)
}
p.rawInt64(int64(index))
}
func (p *exporter) tag(tag int) {
if tag >= 0 {
panic(internalError("invalid tag >= 0"))
}
if debugFormat {
p.marker('t')
}
if trace {
p.tracef("%s ", tagString[-tag])
}
p.rawInt64(int64(tag))
}
func (p *exporter) int(x int) {
p.int64(int64(x))
}
func (p *exporter) int64(x int64) {
if debugFormat {
p.marker('i')
}
if trace {
p.tracef("%d ", x)
}
p.rawInt64(x)
}
func (p *exporter) string(s string) {
if debugFormat {
p.marker('s')
}
if trace {
p.tracef("%q ", s)
}
// if we saw the string before, write its index (>= 0)
// (the empty string is mapped to 0)
if i, ok := p.strIndex[s]; ok {
p.rawInt64(int64(i))
return
}
// otherwise, remember string and write its negative length and bytes
p.strIndex[s] = len(p.strIndex)
p.rawInt64(-int64(len(s)))
for i := 0; i < len(s); i++ {
p.rawByte(s[i])
}
}
// marker emits a marker byte and position information which makes
// it easy for a reader to detect if it is "out of sync". Used for
// debugFormat format only.
func (p *exporter) marker(m byte) {
p.rawByte(m)
// Enable this for help tracking down the location
// of an incorrect marker when running in debugFormat.
if false && trace {
p.tracef("#%d ", p.written)
}
p.rawInt64(int64(p.written))
}
// rawInt64 should only be used by low-level encoders.
func (p *exporter) rawInt64(x int64) {
var tmp [binary.MaxVarintLen64]byte
n := binary.PutVarint(tmp[:], x)
for i := 0; i < n; i++ {
p.rawByte(tmp[i])
}
}
// rawStringln should only be used to emit the initial version string.
func (p *exporter) rawStringln(s string) {
for i := 0; i < len(s); i++ {
p.rawByte(s[i])
}
p.rawByte('\n')
}
// rawByte is the bottleneck interface to write to p.out.
// rawByte escapes b as follows (any encoding does that
// hides '$'):
//
// '$' => '|' 'S'
// '|' => '|' '|'
//
// Necessary so other tools can find the end of the
// export data by searching for "$$".
// rawByte should only be used by low-level encoders.
func (p *exporter) rawByte(b byte) {
switch b {
case '$':
// write '$' as '|' 'S'
b = 'S'
fallthrough
case '|':
// write '|' as '|' '|'
p.out.WriteByte('|')
p.written++
}
p.out.WriteByte(b)
p.written++
}
// tracef is like fmt.Printf but it rewrites the format string
// to take care of indentation.
func (p *exporter) tracef(format string, args ...interface{}) {
if strings.ContainsAny(format, "<>\n") {
var buf bytes.Buffer
for i := 0; i < len(format); i++ {
// no need to deal with runes
ch := format[i]
switch ch {
case '>':
p.indent++
continue
case '<':
p.indent--
continue
}
buf.WriteByte(ch)
if ch == '\n' {
for j := p.indent; j > 0; j-- {
buf.WriteString(". ")
}
}
}
format = buf.String()
}
fmt.Printf(format, args...)
}
// Debugging support.
// (tagString is only used when tracing is enabled)
var tagString = [...]string{
// Packages
-packageTag: "package",
// Types
-namedTag: "named type",
-arrayTag: "array",
-sliceTag: "slice",
-dddTag: "ddd",
-structTag: "struct",
-pointerTag: "pointer",
-signatureTag: "signature",
-interfaceTag: "interface",
-mapTag: "map",
-chanTag: "chan",
// Values
-falseTag: "false",
-trueTag: "true",
-int64Tag: "int64",
-floatTag: "float",
-fractionTag: "fraction",
-complexTag: "complex",
-stringTag: "string",
-unknownTag: "unknown",
// Type aliases
-aliasTag: "alias",
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,93 @@
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file is a copy of $GOROOT/src/go/internal/gcimporter/exportdata.go.
// This file implements FindExportData.
package gcimporter
import (
"bufio"
"fmt"
"io"
"strconv"
"strings"
)
func readGopackHeader(r *bufio.Reader) (name string, size int, err error) {
// See $GOROOT/include/ar.h.
hdr := make([]byte, 16+12+6+6+8+10+2)
_, err = io.ReadFull(r, hdr)
if err != nil {
return
}
// leave for debugging
if false {
fmt.Printf("header: %s", hdr)
}
s := strings.TrimSpace(string(hdr[16+12+6+6+8:][:10]))
size, err = strconv.Atoi(s)
if err != nil || hdr[len(hdr)-2] != '`' || hdr[len(hdr)-1] != '\n' {
err = fmt.Errorf("invalid archive header")
return
}
name = strings.TrimSpace(string(hdr[:16]))
return
}
// FindExportData positions the reader r at the beginning of the
// export data section of an underlying GC-created object/archive
// file by reading from it. The reader must be positioned at the
// start of the file before calling this function. The hdr result
// is the string before the export data, either "$$" or "$$B".
//
func FindExportData(r *bufio.Reader) (hdr string, err error) {
// Read first line to make sure this is an object file.
line, err := r.ReadSlice('\n')
if err != nil {
err = fmt.Errorf("can't find export data (%v)", err)
return
}
if string(line) == "!<arch>\n" {
// Archive file. Scan to __.PKGDEF.
var name string
if name, _, err = readGopackHeader(r); err != nil {
return
}
// First entry should be __.PKGDEF.
if name != "__.PKGDEF" {
err = fmt.Errorf("go archive is missing __.PKGDEF")
return
}
// Read first line of __.PKGDEF data, so that line
// is once again the first line of the input.
if line, err = r.ReadSlice('\n'); err != nil {
err = fmt.Errorf("can't find export data (%v)", err)
return
}
}
// Now at __.PKGDEF in archive or still at beginning of file.
// Either way, line should begin with "go object ".
if !strings.HasPrefix(string(line), "go object ") {
err = fmt.Errorf("not a Go object file")
return
}
// Skip over object header to export data.
// Begins after first line starting with $$.
for line[0] != '$' {
if line, err = r.ReadSlice('\n'); err != nil {
err = fmt.Errorf("can't find export data (%v)", err)
return
}
}
hdr = string(line)
return
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,598 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Indexed package import.
// See cmd/compile/internal/gc/iexport.go for the export data format.
// This file is a copy of $GOROOT/src/go/internal/gcimporter/iimport.go.
package gcimporter
import (
"bytes"
"encoding/binary"
"fmt"
"go/constant"
"go/token"
"go/types"
"io"
"sort"
)
type intReader struct {
*bytes.Reader
path string
}
func (r *intReader) int64() int64 {
i, err := binary.ReadVarint(r.Reader)
if err != nil {
errorf("import %q: read varint error: %v", r.path, err)
}
return i
}
func (r *intReader) uint64() uint64 {
i, err := binary.ReadUvarint(r.Reader)
if err != nil {
errorf("import %q: read varint error: %v", r.path, err)
}
return i
}
const predeclReserved = 32
type itag uint64
const (
// Types
definedType itag = iota
pointerType
sliceType
arrayType
chanType
mapType
signatureType
structType
interfaceType
)
// IImportData imports a package from the serialized package data
// and returns the number of bytes consumed and a reference to the package.
// If the export data version is not recognized or the format is otherwise
// compromised, an error is returned.
func IImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) {
const currentVersion = 0
version := -1
defer func() {
if e := recover(); e != nil {
if version > currentVersion {
err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e)
} else {
err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e)
}
}
}()
r := &intReader{bytes.NewReader(data), path}
version = int(r.uint64())
switch version {
case currentVersion:
default:
errorf("unknown iexport format version %d", version)
}
sLen := int64(r.uint64())
dLen := int64(r.uint64())
whence, _ := r.Seek(0, io.SeekCurrent)
stringData := data[whence : whence+sLen]
declData := data[whence+sLen : whence+sLen+dLen]
r.Seek(sLen+dLen, io.SeekCurrent)
p := iimporter{
ipath: path,
stringData: stringData,
stringCache: make(map[uint64]string),
pkgCache: make(map[uint64]*types.Package),
declData: declData,
pkgIndex: make(map[*types.Package]map[string]uint64),
typCache: make(map[uint64]types.Type),
fake: fakeFileSet{
fset: fset,
files: make(map[string]*token.File),
},
}
for i, pt := range predeclared {
p.typCache[uint64(i)] = pt
}
pkgList := make([]*types.Package, r.uint64())
for i := range pkgList {
pkgPathOff := r.uint64()
pkgPath := p.stringAt(pkgPathOff)
pkgName := p.stringAt(r.uint64())
_ = r.uint64() // package height; unused by go/types
if pkgPath == "" {
pkgPath = path
}
pkg := imports[pkgPath]
if pkg == nil {
pkg = types.NewPackage(pkgPath, pkgName)
imports[pkgPath] = pkg
} else if pkg.Name() != pkgName {
errorf("conflicting names %s and %s for package %q", pkg.Name(), pkgName, path)
}
p.pkgCache[pkgPathOff] = pkg
nameIndex := make(map[string]uint64)
for nSyms := r.uint64(); nSyms > 0; nSyms-- {
name := p.stringAt(r.uint64())
nameIndex[name] = r.uint64()
}
p.pkgIndex[pkg] = nameIndex
pkgList[i] = pkg
}
localpkg := pkgList[0]
names := make([]string, 0, len(p.pkgIndex[localpkg]))
for name := range p.pkgIndex[localpkg] {
names = append(names, name)
}
sort.Strings(names)
for _, name := range names {
p.doDecl(localpkg, name)
}
for _, typ := range p.interfaceList {
typ.Complete()
}
// record all referenced packages as imports
list := append(([]*types.Package)(nil), pkgList[1:]...)
sort.Sort(byPath(list))
localpkg.SetImports(list)
// package was imported completely and without errors
localpkg.MarkComplete()
consumed, _ := r.Seek(0, io.SeekCurrent)
return int(consumed), localpkg, nil
}
type iimporter struct {
ipath string
stringData []byte
stringCache map[uint64]string
pkgCache map[uint64]*types.Package
declData []byte
pkgIndex map[*types.Package]map[string]uint64
typCache map[uint64]types.Type
fake fakeFileSet
interfaceList []*types.Interface
}
func (p *iimporter) doDecl(pkg *types.Package, name string) {
// See if we've already imported this declaration.
if obj := pkg.Scope().Lookup(name); obj != nil {
return
}
off, ok := p.pkgIndex[pkg][name]
if !ok {
errorf("%v.%v not in index", pkg, name)
}
r := &importReader{p: p, currPkg: pkg}
r.declReader.Reset(p.declData[off:])
r.obj(name)
}
func (p *iimporter) stringAt(off uint64) string {
if s, ok := p.stringCache[off]; ok {
return s
}
slen, n := binary.Uvarint(p.stringData[off:])
if n <= 0 {
errorf("varint failed")
}
spos := off + uint64(n)
s := string(p.stringData[spos : spos+slen])
p.stringCache[off] = s
return s
}
func (p *iimporter) pkgAt(off uint64) *types.Package {
if pkg, ok := p.pkgCache[off]; ok {
return pkg
}
path := p.stringAt(off)
errorf("missing package %q in %q", path, p.ipath)
return nil
}
func (p *iimporter) typAt(off uint64, base *types.Named) types.Type {
if t, ok := p.typCache[off]; ok && (base == nil || !isInterface(t)) {
return t
}
if off < predeclReserved {
errorf("predeclared type missing from cache: %v", off)
}
r := &importReader{p: p}
r.declReader.Reset(p.declData[off-predeclReserved:])
t := r.doType(base)
if base == nil || !isInterface(t) {
p.typCache[off] = t
}
return t
}
type importReader struct {
p *iimporter
declReader bytes.Reader
currPkg *types.Package
prevFile string
prevLine int64
}
func (r *importReader) obj(name string) {
tag := r.byte()
pos := r.pos()
switch tag {
case 'A':
typ := r.typ()
r.declare(types.NewTypeName(pos, r.currPkg, name, typ))
case 'C':
typ, val := r.value()
r.declare(types.NewConst(pos, r.currPkg, name, typ, val))
case 'F':
sig := r.signature(nil)
r.declare(types.NewFunc(pos, r.currPkg, name, sig))
case 'T':
// Types can be recursive. We need to setup a stub
// declaration before recursing.
obj := types.NewTypeName(pos, r.currPkg, name, nil)
named := types.NewNamed(obj, nil, nil)
r.declare(obj)
underlying := r.p.typAt(r.uint64(), named).Underlying()
named.SetUnderlying(underlying)
if !isInterface(underlying) {
for n := r.uint64(); n > 0; n-- {
mpos := r.pos()
mname := r.ident()
recv := r.param()
msig := r.signature(recv)
named.AddMethod(types.NewFunc(mpos, r.currPkg, mname, msig))
}
}
case 'V':
typ := r.typ()
r.declare(types.NewVar(pos, r.currPkg, name, typ))
default:
errorf("unexpected tag: %v", tag)
}
}
func (r *importReader) declare(obj types.Object) {
obj.Pkg().Scope().Insert(obj)
}
func (r *importReader) value() (typ types.Type, val constant.Value) {
typ = r.typ()
switch b := typ.Underlying().(*types.Basic); b.Info() & types.IsConstType {
case types.IsBoolean:
val = constant.MakeBool(r.bool())
case types.IsString:
val = constant.MakeString(r.string())
case types.IsInteger:
val = r.mpint(b)
case types.IsFloat:
val = r.mpfloat(b)
case types.IsComplex:
re := r.mpfloat(b)
im := r.mpfloat(b)
val = constant.BinaryOp(re, token.ADD, constant.MakeImag(im))
default:
errorf("unexpected type %v", typ) // panics
panic("unreachable")
}
return
}
func intSize(b *types.Basic) (signed bool, maxBytes uint) {
if (b.Info() & types.IsUntyped) != 0 {
return true, 64
}
switch b.Kind() {
case types.Float32, types.Complex64:
return true, 3
case types.Float64, types.Complex128:
return true, 7
}
signed = (b.Info() & types.IsUnsigned) == 0
switch b.Kind() {
case types.Int8, types.Uint8:
maxBytes = 1
case types.Int16, types.Uint16:
maxBytes = 2
case types.Int32, types.Uint32:
maxBytes = 4
default:
maxBytes = 8
}
return
}
func (r *importReader) mpint(b *types.Basic) constant.Value {
signed, maxBytes := intSize(b)
maxSmall := 256 - maxBytes
if signed {
maxSmall = 256 - 2*maxBytes
}
if maxBytes == 1 {
maxSmall = 256
}
n, _ := r.declReader.ReadByte()
if uint(n) < maxSmall {
v := int64(n)
if signed {
v >>= 1
if n&1 != 0 {
v = ^v
}
}
return constant.MakeInt64(v)
}
v := -n
if signed {
v = -(n &^ 1) >> 1
}
if v < 1 || uint(v) > maxBytes {
errorf("weird decoding: %v, %v => %v", n, signed, v)
}
buf := make([]byte, v)
io.ReadFull(&r.declReader, buf)
// convert to little endian
// TODO(gri) go/constant should have a more direct conversion function
// (e.g., once it supports a big.Float based implementation)
for i, j := 0, len(buf)-1; i < j; i, j = i+1, j-1 {
buf[i], buf[j] = buf[j], buf[i]
}
x := constant.MakeFromBytes(buf)
if signed && n&1 != 0 {
x = constant.UnaryOp(token.SUB, x, 0)
}
return x
}
func (r *importReader) mpfloat(b *types.Basic) constant.Value {
x := r.mpint(b)
if constant.Sign(x) == 0 {
return x
}
exp := r.int64()
switch {
case exp > 0:
x = constant.Shift(x, token.SHL, uint(exp))
case exp < 0:
d := constant.Shift(constant.MakeInt64(1), token.SHL, uint(-exp))
x = constant.BinaryOp(x, token.QUO, d)
}
return x
}
func (r *importReader) ident() string {
return r.string()
}
func (r *importReader) qualifiedIdent() (*types.Package, string) {
name := r.string()
pkg := r.pkg()
return pkg, name
}
func (r *importReader) pos() token.Pos {
delta := r.int64()
if delta != deltaNewFile {
r.prevLine += delta
} else if l := r.int64(); l == -1 {
r.prevLine += deltaNewFile
} else {
r.prevFile = r.string()
r.prevLine = l
}
if r.prevFile == "" && r.prevLine == 0 {
return token.NoPos
}
return r.p.fake.pos(r.prevFile, int(r.prevLine))
}
func (r *importReader) typ() types.Type {
return r.p.typAt(r.uint64(), nil)
}
func isInterface(t types.Type) bool {
_, ok := t.(*types.Interface)
return ok
}
func (r *importReader) pkg() *types.Package { return r.p.pkgAt(r.uint64()) }
func (r *importReader) string() string { return r.p.stringAt(r.uint64()) }
func (r *importReader) doType(base *types.Named) types.Type {
switch k := r.kind(); k {
default:
errorf("unexpected kind tag in %q: %v", r.p.ipath, k)
return nil
case definedType:
pkg, name := r.qualifiedIdent()
r.p.doDecl(pkg, name)
return pkg.Scope().Lookup(name).(*types.TypeName).Type()
case pointerType:
return types.NewPointer(r.typ())
case sliceType:
return types.NewSlice(r.typ())
case arrayType:
n := r.uint64()
return types.NewArray(r.typ(), int64(n))
case chanType:
dir := chanDir(int(r.uint64()))
return types.NewChan(dir, r.typ())
case mapType:
return types.NewMap(r.typ(), r.typ())
case signatureType:
r.currPkg = r.pkg()
return r.signature(nil)
case structType:
r.currPkg = r.pkg()
fields := make([]*types.Var, r.uint64())
tags := make([]string, len(fields))
for i := range fields {
fpos := r.pos()
fname := r.ident()
ftyp := r.typ()
emb := r.bool()
tag := r.string()
fields[i] = types.NewField(fpos, r.currPkg, fname, ftyp, emb)
tags[i] = tag
}
return types.NewStruct(fields, tags)
case interfaceType:
r.currPkg = r.pkg()
embeddeds := make([]types.Type, r.uint64())
for i := range embeddeds {
_ = r.pos()
embeddeds[i] = r.typ()
}
methods := make([]*types.Func, r.uint64())
for i := range methods {
mpos := r.pos()
mname := r.ident()
// TODO(mdempsky): Matches bimport.go, but I
// don't agree with this.
var recv *types.Var
if base != nil {
recv = types.NewVar(token.NoPos, r.currPkg, "", base)
}
msig := r.signature(recv)
methods[i] = types.NewFunc(mpos, r.currPkg, mname, msig)
}
typ := newInterface(methods, embeddeds)
r.p.interfaceList = append(r.p.interfaceList, typ)
return typ
}
}
func (r *importReader) kind() itag {
return itag(r.uint64())
}
func (r *importReader) signature(recv *types.Var) *types.Signature {
params := r.paramList()
results := r.paramList()
variadic := params.Len() > 0 && r.bool()
return types.NewSignature(recv, params, results, variadic)
}
func (r *importReader) paramList() *types.Tuple {
xs := make([]*types.Var, r.uint64())
for i := range xs {
xs[i] = r.param()
}
return types.NewTuple(xs...)
}
func (r *importReader) param() *types.Var {
pos := r.pos()
name := r.ident()
typ := r.typ()
return types.NewParam(pos, r.currPkg, name, typ)
}
func (r *importReader) bool() bool {
return r.uint64() != 0
}
func (r *importReader) int64() int64 {
n, err := binary.ReadVarint(&r.declReader)
if err != nil {
errorf("readVarint: %v", err)
}
return n
}
func (r *importReader) uint64() uint64 {
n, err := binary.ReadUvarint(&r.declReader)
if err != nil {
errorf("readUvarint: %v", err)
}
return n
}
func (r *importReader) byte() byte {
x, err := r.declReader.ReadByte()
if err != nil {
errorf("declReader.ReadByte: %v", err)
}
return x
}

View File

@ -0,0 +1,21 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build !go1.11
package gcimporter
import "go/types"
func newInterface(methods []*types.Func, embeddeds []types.Type) *types.Interface {
named := make([]*types.Named, len(embeddeds))
for i, e := range embeddeds {
var ok bool
named[i], ok = e.(*types.Named)
if !ok {
panic("embedding of non-defined interfaces in interfaces is not supported before Go 1.11")
}
}
return types.NewInterface(methods, named)
}

View File

@ -0,0 +1,13 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build go1.11
package gcimporter
import "go/types"
func newInterface(methods []*types.Func, embeddeds []types.Type) *types.Interface {
return types.NewInterfaceType(methods, embeddeds)
}

16
vendor/modules.txt vendored Normal file
View File

@ -0,0 +1,16 @@
# github.com/client9/misspell v0.3.4
github.com/client9/misspell/cmd/misspell
github.com/client9/misspell
# github.com/fzipp/gocyclo v0.0.0-20150627053110-6acd4345c835
github.com/fzipp/gocyclo
# github.com/gordonklaus/ineffassign v0.0.0-20180909121442-1003c8bd00dc
github.com/gordonklaus/ineffassign
# github.com/karalabe/xgo v0.0.0-20181007145344-72da7d1d3970
github.com/karalabe/xgo
# golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3
golang.org/x/lint/golint
golang.org/x/lint
# golang.org/x/tools v0.0.0-20181128225727-c5b00d9557fd
golang.org/x/tools/go/ast/astutil
golang.org/x/tools/go/gcexportdata
golang.org/x/tools/go/internal/gcimporter