From 21694649838feb301841bf7d0cf3e4bf479e9a22 Mon Sep 17 00:00:00 2001 From: konrad Date: Wed, 16 Oct 2019 20:52:29 +0000 Subject: [PATCH] Task Attachments (#104) --- .gitignore | 4 +- Featurecreep.md | 9 +- Makefile | 2 +- config.yml.sample | 6 + docs/content/doc/setup/config.md | 6 + docs/content/doc/usage/errors.md | 2 + go.mod | 34 +- go.sum | 83 + pkg/cmd/cmd.go | 8 + pkg/config/config.go | 22 +- pkg/db/db.go | 60 + pkg/db/test_fixtures.go | 36 + pkg/files/db.go | 49 + pkg/files/error.go | 52 + pkg/files/filehandling.go | 97 + pkg/files/files.go | 104 ++ pkg/files/files_test.go | 131 ++ pkg/files/fixtures/files.yml | 5 + pkg/files/main_test.go | 29 + pkg/integrations/integrations.go | 11 +- pkg/integrations/task_test.go | 12 +- pkg/migration/20191008194238.go | 48 + pkg/migration/20191010131430.go | 50 + pkg/migration/migration.go | 8 +- pkg/models/error.go | 57 + pkg/models/fixtures/files.yml | 1 + pkg/models/fixtures/task_attachments.yml | 11 + pkg/models/main_test.go | 16 +- pkg/models/models.go | 19 +- pkg/models/task_attachment.go | 186 ++ pkg/models/task_attachment_rights.go | 46 + pkg/models/task_attachment_test.go | 152 ++ pkg/models/task_readall_test.go | 27 +- pkg/models/tasks.go | 33 + pkg/models/test_fixtures.go | 35 - pkg/models/unit_tests.go | 38 +- pkg/models/users_list_test.go | 3 +- pkg/routes/api/v1/info.go | 2 + pkg/routes/api/v1/list_by_namespace.go | 2 +- pkg/routes/api/v1/task_attachment.go | 143 ++ pkg/routes/routes.go | 12 +- pkg/swagger/docs.go | 559 +++--- pkg/swagger/swagger.json | 532 +++--- pkg/swagger/swagger.yaml | 427 +++-- vendor/github.com/KyleBanks/depth/.gitignore | 16 + vendor/github.com/KyleBanks/depth/.travis.yml | 9 + vendor/github.com/KyleBanks/depth/LICENSE | 21 + vendor/github.com/KyleBanks/depth/Makefile | 32 + vendor/github.com/KyleBanks/depth/README.md | 232 +++ vendor/github.com/KyleBanks/depth/depth.go | 129 ++ vendor/github.com/KyleBanks/depth/pkg.go | 184 ++ vendor/github.com/alecthomas/template/go.mod | 1 + .../cpuguy83/go-md2man/v2/LICENSE.md | 21 + .../cpuguy83/go-md2man/v2/md2man/md2man.go | 14 + .../cpuguy83/go-md2man/v2/md2man/roff.go | 345 ++++ .../go-openapi/jsonpointer/.travis.yml | 10 +- .../github.com/go-openapi/jsonpointer/go.mod | 11 +- .../github.com/go-openapi/jsonpointer/go.sum | 27 +- .../go-openapi/jsonpointer/pointer.go | 2 +- .../go-openapi/jsonreference/.travis.yml | 11 +- .../go-openapi/jsonreference/go.mod | 17 +- .../go-openapi/jsonreference/go.sum | 50 +- .../github.com/go-openapi/spec/.golangci.yml | 2 +- vendor/github.com/go-openapi/spec/.travis.yml | 13 +- vendor/github.com/go-openapi/spec/bindata.go | 8 +- vendor/github.com/go-openapi/spec/go.mod | 25 +- vendor/github.com/go-openapi/spec/go.sum | 64 +- vendor/github.com/go-openapi/spec/items.go | 7 + vendor/github.com/go-openapi/spec/schema.go | 7 + .../go-openapi/spec/schema_loader.go | 1 + vendor/github.com/go-openapi/spec/swagger.go | 124 ++ vendor/github.com/go-openapi/swag/.gitignore | 1 + vendor/github.com/go-openapi/swag/.travis.yml | 11 +- vendor/github.com/go-openapi/swag/README.md | 1 - vendor/github.com/go-openapi/swag/convert.go | 7 +- vendor/github.com/go-openapi/swag/doc.go | 1 - vendor/github.com/go-openapi/swag/go.mod | 13 +- vendor/github.com/go-openapi/swag/go.sum | 21 +- vendor/github.com/go-openapi/swag/json.go | 2 +- .../github.com/go-openapi/swag/name_lexem.go | 87 + vendor/github.com/go-openapi/swag/split.go | 262 +++ vendor/github.com/go-openapi/swag/util.go | 111 +- vendor/github.com/go-openapi/swag/yaml.go | 47 +- .../golang/protobuf/proto/properties.go | 5 +- vendor/github.com/labstack/echo/v4/go.sum | 1 + .../mailru/easyjson/jlexer/lexer.go | 2 +- vendor/github.com/mattn/go-colorable/go.sum | 2 + vendor/github.com/mattn/go-isatty/go.mod | 4 +- vendor/github.com/mattn/go-isatty/go.sum | 6 +- .../mattn/go-isatty/isatty_plan9.go | 22 + .../mattn/go-isatty/isatty_windows.go | 39 +- vendor/github.com/prometheus/procfs/go.mod | 2 + .../russross/blackfriday/v2/.gitignore | 8 + .../russross/blackfriday/v2/.travis.yml | 17 + .../russross/blackfriday/v2/LICENSE.txt | 29 + .../russross/blackfriday/v2/README.md | 291 +++ .../russross/blackfriday/v2/block.go | 1590 +++++++++++++++++ .../github.com/russross/blackfriday/v2/doc.go | 18 + .../github.com/russross/blackfriday/v2/esc.go | 34 + .../github.com/russross/blackfriday/v2/go.mod | 1 + .../russross/blackfriday/v2/html.go | 949 ++++++++++ .../russross/blackfriday/v2/inline.go | 1228 +++++++++++++ .../russross/blackfriday/v2/markdown.go | 950 ++++++++++ .../russross/blackfriday/v2/node.go | 354 ++++ .../russross/blackfriday/v2/smartypants.go | 457 +++++ .../sanitized_anchor_name/.travis.yml | 16 + .../shurcooL/sanitized_anchor_name/LICENSE | 21 + .../shurcooL/sanitized_anchor_name/README.md | 36 + .../shurcooL/sanitized_anchor_name/go.mod | 1 + .../shurcooL/sanitized_anchor_name/main.go | 29 + .../testify/assert/assertion_format.go | 82 + .../testify/assert/assertion_forward.go | 164 ++ .../testify/assert/assertion_order.go | 309 ++++ .../stretchr/testify/assert/assertions.go | 96 +- vendor/github.com/swaggo/swag/.gitignore | 6 + vendor/github.com/swaggo/swag/.travis.yml | 3 +- vendor/github.com/swaggo/swag/Makefile | 31 +- vendor/github.com/swaggo/swag/README.md | 110 +- .../github.com/swaggo/swag/cmd/swag/main.go | 35 +- vendor/github.com/swaggo/swag/debug.go | 4 +- vendor/github.com/swaggo/swag/gen/gen.go | 183 +- vendor/github.com/swaggo/swag/go.mod | 31 +- vendor/github.com/swaggo/swag/go.sum | 89 +- vendor/github.com/swaggo/swag/operation.go | 292 +-- vendor/github.com/swaggo/swag/parser.go | 911 ++++++---- vendor/github.com/swaggo/swag/property.go | 32 +- vendor/github.com/swaggo/swag/schema.go | 7 +- vendor/github.com/swaggo/swag/version.go | 2 +- vendor/github.com/urfave/cli/.gitignore | 1 + vendor/github.com/urfave/cli/.travis.yml | 40 +- vendor/github.com/urfave/cli/CHANGELOG.md | 73 +- .../github.com/urfave/cli/CODE_OF_CONDUCT.md | 74 + vendor/github.com/urfave/cli/CONTRIBUTING.md | 18 + vendor/github.com/urfave/cli/README.md | 316 +++- vendor/github.com/urfave/cli/app.go | 113 +- vendor/github.com/urfave/cli/appveyor.yml | 21 +- vendor/github.com/urfave/cli/build.go | 164 ++ vendor/github.com/urfave/cli/category.go | 2 +- vendor/github.com/urfave/cli/cli.go | 2 +- vendor/github.com/urfave/cli/command.go | 149 +- vendor/github.com/urfave/cli/context.go | 93 +- vendor/github.com/urfave/cli/docs.go | 148 ++ vendor/github.com/urfave/cli/fish.go | 194 ++ vendor/github.com/urfave/cli/flag-types.json | 93 - vendor/github.com/urfave/cli/flag.go | 659 ++----- vendor/github.com/urfave/cli/flag_bool.go | 109 ++ vendor/github.com/urfave/cli/flag_bool_t.go | 110 ++ vendor/github.com/urfave/cli/flag_duration.go | 106 ++ vendor/github.com/urfave/cli/flag_float64.go | 106 ++ .../github.com/urfave/cli/flag_generated.go | 627 ------- vendor/github.com/urfave/cli/flag_generic.go | 110 ++ vendor/github.com/urfave/cli/flag_int.go | 105 ++ vendor/github.com/urfave/cli/flag_int64.go | 106 ++ .../github.com/urfave/cli/flag_int64_slice.go | 141 ++ .../github.com/urfave/cli/flag_int_slice.go | 142 ++ vendor/github.com/urfave/cli/flag_string.go | 98 + .../urfave/cli/flag_string_slice.go | 138 ++ vendor/github.com/urfave/cli/flag_uint.go | 106 ++ vendor/github.com/urfave/cli/flag_uint64.go | 106 ++ vendor/github.com/urfave/cli/funcs.go | 16 + .../github.com/urfave/cli/generate-flag-types | 255 --- vendor/github.com/urfave/cli/go.mod | 9 + vendor/github.com/urfave/cli/go.sum | 14 + vendor/github.com/urfave/cli/help.go | 182 +- vendor/github.com/urfave/cli/parse.go | 80 + vendor/github.com/urfave/cli/runtests | 122 -- vendor/github.com/urfave/cli/sort.go | 29 + vendor/github.com/urfave/cli/template.go | 121 ++ vendor/golang.org/x/crypto/acme/acme.go | 317 +++- .../x/crypto/acme/autocert/autocert.go | 314 ++-- .../x/crypto/acme/autocert/cache.go | 8 +- vendor/golang.org/x/crypto/acme/http.go | 28 +- vendor/golang.org/x/crypto/acme/jws.go | 52 +- vendor/golang.org/x/crypto/acme/rfc8555.go | 392 ++++ vendor/golang.org/x/crypto/acme/types.go | 307 +++- .../golang.org/x/sys/unix/affinity_linux.go | 42 +- vendor/golang.org/x/sys/unix/dirent.go | 2 +- vendor/golang.org/x/sys/unix/endian_little.go | 2 +- vendor/golang.org/x/sys/unix/ioctl.go | 41 +- vendor/golang.org/x/sys/unix/mkall.sh | 4 +- vendor/golang.org/x/sys/unix/mkasm_darwin.go | 61 +- vendor/golang.org/x/sys/unix/mkerrors.sh | 53 +- vendor/golang.org/x/sys/unix/mksyscall.go | 7 +- vendor/golang.org/x/sys/unix/syscall_aix.go | 39 +- .../golang.org/x/sys/unix/syscall_aix_ppc.go | 4 + .../x/sys/unix/syscall_aix_ppc64.go | 4 + vendor/golang.org/x/sys/unix/syscall_bsd.go | 2 - .../x/sys/unix/syscall_darwin.1_12.go | 29 + .../x/sys/unix/syscall_darwin.1_13.go | 103 ++ .../golang.org/x/sys/unix/syscall_darwin.go | 40 +- .../x/sys/unix/syscall_darwin_386.1_11.go | 9 + .../x/sys/unix/syscall_darwin_386.go | 8 +- .../x/sys/unix/syscall_darwin_amd64.1_11.go | 9 + .../x/sys/unix/syscall_darwin_amd64.go | 8 +- .../x/sys/unix/syscall_darwin_arm.1_11.go | 11 + .../x/sys/unix/syscall_darwin_arm.go | 16 +- .../x/sys/unix/syscall_darwin_arm64.1_11.go | 11 + .../x/sys/unix/syscall_darwin_arm64.go | 16 +- .../x/sys/unix/syscall_darwin_libSystem.go | 2 + .../x/sys/unix/syscall_dragonfly.go | 41 +- .../x/sys/unix/syscall_dragonfly_amd64.go | 4 + .../golang.org/x/sys/unix/syscall_freebsd.go | 41 +- .../x/sys/unix/syscall_freebsd_386.go | 4 + .../x/sys/unix/syscall_freebsd_amd64.go | 4 + .../x/sys/unix/syscall_freebsd_arm.go | 4 + .../x/sys/unix/syscall_freebsd_arm64.go | 4 + vendor/golang.org/x/sys/unix/syscall_linux.go | 173 +- .../x/sys/unix/syscall_linux_386.go | 4 + .../x/sys/unix/syscall_linux_amd64.go | 4 + .../x/sys/unix/syscall_linux_arm.go | 4 + .../x/sys/unix/syscall_linux_arm64.go | 4 + .../x/sys/unix/syscall_linux_mips64x.go | 4 + .../x/sys/unix/syscall_linux_mipsx.go | 4 + .../x/sys/unix/syscall_linux_ppc64x.go | 4 + .../x/sys/unix/syscall_linux_riscv64.go | 4 + .../x/sys/unix/syscall_linux_s390x.go | 4 + .../x/sys/unix/syscall_linux_sparc64.go | 4 + .../golang.org/x/sys/unix/syscall_netbsd.go | 41 +- .../x/sys/unix/syscall_netbsd_386.go | 4 + .../x/sys/unix/syscall_netbsd_amd64.go | 4 + .../x/sys/unix/syscall_netbsd_arm.go | 4 + .../x/sys/unix/syscall_netbsd_arm64.go | 4 + .../golang.org/x/sys/unix/syscall_openbsd.go | 41 +- .../x/sys/unix/syscall_openbsd_386.go | 4 + .../x/sys/unix/syscall_openbsd_amd64.go | 4 + .../x/sys/unix/syscall_openbsd_arm.go | 4 + .../x/sys/unix/syscall_openbsd_arm64.go | 4 + .../golang.org/x/sys/unix/syscall_solaris.go | 32 +- .../x/sys/unix/syscall_solaris_amd64.go | 4 + .../x/sys/unix/zerrors_darwin_386.go | 3 +- .../x/sys/unix/zerrors_darwin_amd64.go | 3 +- .../x/sys/unix/zerrors_darwin_arm.go | 3 +- .../x/sys/unix/zerrors_darwin_arm64.go | 3 +- .../x/sys/unix/zerrors_dragonfly_amd64.go | 1 + .../x/sys/unix/zerrors_freebsd_386.go | 3 +- .../x/sys/unix/zerrors_freebsd_amd64.go | 3 +- .../x/sys/unix/zerrors_freebsd_arm.go | 3 +- .../x/sys/unix/zerrors_freebsd_arm64.go | 3 +- .../x/sys/unix/zerrors_linux_386.go | 151 +- .../x/sys/unix/zerrors_linux_amd64.go | 151 +- .../x/sys/unix/zerrors_linux_arm.go | 151 +- .../x/sys/unix/zerrors_linux_arm64.go | 153 +- .../x/sys/unix/zerrors_linux_mips.go | 151 +- .../x/sys/unix/zerrors_linux_mips64.go | 151 +- .../x/sys/unix/zerrors_linux_mips64le.go | 151 +- .../x/sys/unix/zerrors_linux_mipsle.go | 151 +- .../x/sys/unix/zerrors_linux_ppc64.go | 151 +- .../x/sys/unix/zerrors_linux_ppc64le.go | 151 +- .../x/sys/unix/zerrors_linux_riscv64.go | 151 +- .../x/sys/unix/zerrors_linux_s390x.go | 151 +- .../x/sys/unix/zerrors_linux_sparc64.go | 151 +- .../x/sys/unix/zerrors_netbsd_386.go | 3 +- .../x/sys/unix/zerrors_netbsd_amd64.go | 3 +- .../x/sys/unix/zerrors_netbsd_arm.go | 3 +- .../x/sys/unix/zerrors_netbsd_arm64.go | 3 +- .../x/sys/unix/zerrors_openbsd_386.go | 17 +- .../x/sys/unix/zerrors_openbsd_amd64.go | 6 +- .../x/sys/unix/zerrors_openbsd_arm.go | 11 +- .../x/sys/unix/zerrors_openbsd_arm64.go | 1 + .../x/sys/unix/zerrors_solaris_amd64.go | 3 +- .../x/sys/unix/zsyscall_darwin_386.1_11.go | 93 +- .../x/sys/unix/zsyscall_darwin_386.1_13.go | 41 + .../x/sys/unix/zsyscall_darwin_386.1_13.s | 12 + .../x/sys/unix/zsyscall_darwin_386.go | 114 +- .../x/sys/unix/zsyscall_darwin_386.s | 12 +- .../x/sys/unix/zsyscall_darwin_amd64.1_11.go | 93 +- .../x/sys/unix/zsyscall_darwin_amd64.1_13.go | 41 + .../x/sys/unix/zsyscall_darwin_amd64.1_13.s | 12 + .../x/sys/unix/zsyscall_darwin_amd64.go | 99 +- .../x/sys/unix/zsyscall_darwin_amd64.s | 10 +- .../x/sys/unix/zsyscall_darwin_arm.1_11.go | 33 +- .../x/sys/unix/zsyscall_darwin_arm.1_13.go | 41 + .../x/sys/unix/zsyscall_darwin_arm.1_13.s | 12 + .../x/sys/unix/zsyscall_darwin_arm.go | 56 +- .../x/sys/unix/zsyscall_darwin_arm.s | 6 +- .../x/sys/unix/zsyscall_darwin_arm64.1_11.go | 33 +- .../x/sys/unix/zsyscall_darwin_arm64.1_13.go | 41 + .../x/sys/unix/zsyscall_darwin_arm64.1_13.s | 12 + .../x/sys/unix/zsyscall_darwin_arm64.go | 56 +- .../x/sys/unix/zsyscall_darwin_arm64.s | 6 +- .../x/sys/unix/zsyscall_dragonfly_amd64.go | 5 +- .../x/sys/unix/zsyscall_freebsd_386.go | 5 +- .../x/sys/unix/zsyscall_freebsd_amd64.go | 45 +- .../x/sys/unix/zsyscall_freebsd_arm.go | 45 +- .../x/sys/unix/zsyscall_freebsd_arm64.go | 45 +- .../x/sys/unix/zsyscall_linux_386.go | 30 + .../x/sys/unix/zsyscall_linux_amd64.go | 30 + .../x/sys/unix/zsyscall_linux_arm.go | 30 + .../x/sys/unix/zsyscall_linux_arm64.go | 30 + .../x/sys/unix/zsyscall_linux_mips.go | 30 + .../x/sys/unix/zsyscall_linux_mips64.go | 30 + .../x/sys/unix/zsyscall_linux_mips64le.go | 30 + .../x/sys/unix/zsyscall_linux_mipsle.go | 30 + .../x/sys/unix/zsyscall_linux_ppc64.go | 30 + .../x/sys/unix/zsyscall_linux_ppc64le.go | 30 + .../x/sys/unix/zsyscall_linux_riscv64.go | 30 + .../x/sys/unix/zsyscall_linux_s390x.go | 30 + .../x/sys/unix/zsyscall_linux_sparc64.go | 30 + .../x/sys/unix/zsyscall_netbsd_386.go | 37 +- .../x/sys/unix/zsyscall_netbsd_amd64.go | 37 +- .../x/sys/unix/zsyscall_netbsd_arm.go | 37 +- .../x/sys/unix/zsyscall_netbsd_arm64.go | 37 +- .../x/sys/unix/zsyscall_openbsd_386.go | 37 +- .../x/sys/unix/zsyscall_openbsd_amd64.go | 37 +- .../x/sys/unix/zsyscall_openbsd_arm.go | 37 +- .../x/sys/unix/zsyscall_openbsd_arm64.go | 37 +- .../x/sys/unix/zsyscall_solaris_amd64.go | 5 +- .../x/sys/unix/zsysnum_linux_386.go | 8 + .../x/sys/unix/zsysnum_linux_amd64.go | 8 + .../x/sys/unix/zsysnum_linux_arm.go | 8 + .../x/sys/unix/zsysnum_linux_arm64.go | 7 + .../x/sys/unix/zsysnum_linux_mips.go | 7 + .../x/sys/unix/zsysnum_linux_mips64.go | 7 + .../x/sys/unix/zsysnum_linux_mips64le.go | 7 + .../x/sys/unix/zsysnum_linux_mipsle.go | 7 + .../x/sys/unix/zsysnum_linux_ppc64.go | 8 + .../x/sys/unix/zsysnum_linux_ppc64le.go | 8 + .../x/sys/unix/zsysnum_linux_riscv64.go | 8 + .../x/sys/unix/zsysnum_linux_s390x.go | 8 + .../x/sys/unix/zsysnum_linux_sparc64.go | 7 + .../golang.org/x/sys/unix/ztypes_linux_386.go | 174 +- .../x/sys/unix/ztypes_linux_amd64.go | 175 +- .../golang.org/x/sys/unix/ztypes_linux_arm.go | 174 +- .../x/sys/unix/ztypes_linux_arm64.go | 175 +- .../x/sys/unix/ztypes_linux_mips.go | 174 +- .../x/sys/unix/ztypes_linux_mips64.go | 175 +- .../x/sys/unix/ztypes_linux_mips64le.go | 175 +- .../x/sys/unix/ztypes_linux_mipsle.go | 174 +- .../x/sys/unix/ztypes_linux_ppc64.go | 175 +- .../x/sys/unix/ztypes_linux_ppc64le.go | 175 +- .../x/sys/unix/ztypes_linux_riscv64.go | 176 +- .../x/sys/unix/ztypes_linux_s390x.go | 175 +- .../x/sys/unix/ztypes_linux_sparc64.go | 175 +- .../x/tools/go/buildutil/overlay.go | 2 +- .../x/tools/go/gcexportdata/gcexportdata.go | 2 +- .../x/tools/go/internal/gcimporter/bimport.go | 11 +- .../x/tools/go/internal/gcimporter/iexport.go | 9 +- .../x/tools/go/internal/gcimporter/iimport.go | 50 +- .../tools/go/internal/packagesdriver/sizes.go | 27 +- vendor/golang.org/x/tools/go/loader/loader.go | 10 +- .../x/tools/go/packages/external.go | 29 +- .../golang.org/x/tools/go/packages/golist.go | 187 +- .../x/tools/go/packages/golist_overlay.go | 42 +- .../x/tools/go/packages/packages.go | 118 +- .../x/tools/internal/gopathwalk/walk.go | 36 +- vendor/gopkg.in/yaml.v2/decode.go | 38 + vendor/gopkg.in/yaml.v2/resolve.go | 2 +- vendor/gopkg.in/yaml.v2/scannerc.go | 16 + vendor/modules.txt | 40 +- 349 files changed, 22540 insertions(+), 5267 deletions(-) create mode 100644 pkg/db/test_fixtures.go create mode 100644 pkg/files/db.go create mode 100644 pkg/files/error.go create mode 100644 pkg/files/filehandling.go create mode 100644 pkg/files/files.go create mode 100644 pkg/files/files_test.go create mode 100644 pkg/files/fixtures/files.yml create mode 100644 pkg/files/main_test.go create mode 100644 pkg/migration/20191008194238.go create mode 100644 pkg/migration/20191010131430.go create mode 120000 pkg/models/fixtures/files.yml create mode 100644 pkg/models/fixtures/task_attachments.yml create mode 100644 pkg/models/task_attachment.go create mode 100644 pkg/models/task_attachment_rights.go create mode 100644 pkg/models/task_attachment_test.go delete mode 100644 pkg/models/test_fixtures.go create mode 100644 pkg/routes/api/v1/task_attachment.go create mode 100644 vendor/github.com/KyleBanks/depth/.gitignore create mode 100644 vendor/github.com/KyleBanks/depth/.travis.yml create mode 100644 vendor/github.com/KyleBanks/depth/LICENSE create mode 100644 vendor/github.com/KyleBanks/depth/Makefile create mode 100644 vendor/github.com/KyleBanks/depth/README.md create mode 100644 vendor/github.com/KyleBanks/depth/depth.go create mode 100644 vendor/github.com/KyleBanks/depth/pkg.go create mode 100644 vendor/github.com/alecthomas/template/go.mod create mode 100644 vendor/github.com/cpuguy83/go-md2man/v2/LICENSE.md create mode 100644 vendor/github.com/cpuguy83/go-md2man/v2/md2man/md2man.go create mode 100644 vendor/github.com/cpuguy83/go-md2man/v2/md2man/roff.go create mode 100644 vendor/github.com/go-openapi/swag/name_lexem.go create mode 100644 vendor/github.com/go-openapi/swag/split.go create mode 100644 vendor/github.com/mattn/go-isatty/isatty_plan9.go create mode 100644 vendor/github.com/russross/blackfriday/v2/.gitignore create mode 100644 vendor/github.com/russross/blackfriday/v2/.travis.yml create mode 100644 vendor/github.com/russross/blackfriday/v2/LICENSE.txt create mode 100644 vendor/github.com/russross/blackfriday/v2/README.md create mode 100644 vendor/github.com/russross/blackfriday/v2/block.go create mode 100644 vendor/github.com/russross/blackfriday/v2/doc.go create mode 100644 vendor/github.com/russross/blackfriday/v2/esc.go create mode 100644 vendor/github.com/russross/blackfriday/v2/go.mod create mode 100644 vendor/github.com/russross/blackfriday/v2/html.go create mode 100644 vendor/github.com/russross/blackfriday/v2/inline.go create mode 100644 vendor/github.com/russross/blackfriday/v2/markdown.go create mode 100644 vendor/github.com/russross/blackfriday/v2/node.go create mode 100644 vendor/github.com/russross/blackfriday/v2/smartypants.go create mode 100644 vendor/github.com/shurcooL/sanitized_anchor_name/.travis.yml create mode 100644 vendor/github.com/shurcooL/sanitized_anchor_name/LICENSE create mode 100644 vendor/github.com/shurcooL/sanitized_anchor_name/README.md create mode 100644 vendor/github.com/shurcooL/sanitized_anchor_name/go.mod create mode 100644 vendor/github.com/shurcooL/sanitized_anchor_name/main.go create mode 100644 vendor/github.com/stretchr/testify/assert/assertion_order.go create mode 100644 vendor/github.com/urfave/cli/CODE_OF_CONDUCT.md create mode 100644 vendor/github.com/urfave/cli/CONTRIBUTING.md create mode 100644 vendor/github.com/urfave/cli/build.go create mode 100644 vendor/github.com/urfave/cli/docs.go create mode 100644 vendor/github.com/urfave/cli/fish.go delete mode 100644 vendor/github.com/urfave/cli/flag-types.json create mode 100644 vendor/github.com/urfave/cli/flag_bool.go create mode 100644 vendor/github.com/urfave/cli/flag_bool_t.go create mode 100644 vendor/github.com/urfave/cli/flag_duration.go create mode 100644 vendor/github.com/urfave/cli/flag_float64.go delete mode 100644 vendor/github.com/urfave/cli/flag_generated.go create mode 100644 vendor/github.com/urfave/cli/flag_generic.go create mode 100644 vendor/github.com/urfave/cli/flag_int.go create mode 100644 vendor/github.com/urfave/cli/flag_int64.go create mode 100644 vendor/github.com/urfave/cli/flag_int64_slice.go create mode 100644 vendor/github.com/urfave/cli/flag_int_slice.go create mode 100644 vendor/github.com/urfave/cli/flag_string.go create mode 100644 vendor/github.com/urfave/cli/flag_string_slice.go create mode 100644 vendor/github.com/urfave/cli/flag_uint.go create mode 100644 vendor/github.com/urfave/cli/flag_uint64.go delete mode 100644 vendor/github.com/urfave/cli/generate-flag-types create mode 100644 vendor/github.com/urfave/cli/go.mod create mode 100644 vendor/github.com/urfave/cli/go.sum create mode 100644 vendor/github.com/urfave/cli/parse.go delete mode 100644 vendor/github.com/urfave/cli/runtests create mode 100644 vendor/github.com/urfave/cli/sort.go create mode 100644 vendor/github.com/urfave/cli/template.go create mode 100644 vendor/golang.org/x/crypto/acme/rfc8555.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_darwin.1_12.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_darwin.1_13.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_darwin_386.1_11.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_darwin_amd64.1_11.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_darwin_arm.1_11.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_darwin_arm64.1_11.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_darwin_386.1_13.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_darwin_386.1_13.s create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_darwin_amd64.1_13.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_darwin_amd64.1_13.s create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_darwin_arm.1_13.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_darwin_arm.1_13.s create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_darwin_arm64.1_13.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_darwin_arm64.1_13.s diff --git a/.gitignore b/.gitignore index a4207dcba..5d2d20e22 100644 --- a/.gitignore +++ b/.gitignore @@ -17,4 +17,6 @@ debian/ logs/ docs/public/ docs/resources/ -pkg/static/templates_vfsdata.go \ No newline at end of file +pkg/static/templates_vfsdata.go +files/ +!pkg/files/ diff --git a/Featurecreep.md b/Featurecreep.md index ac4214124..1ab69174a 100644 --- a/Featurecreep.md +++ b/Featurecreep.md @@ -212,7 +212,13 @@ Sorry for some of them being in German, I'll tranlate them at some point. * [x] "One endpoint to rule them all" -> Array-addable * [x] Description should be longtext * [x] Percent done - For now just a float, may later depend on how many sub tasks are done or so -* [ ] Attachments +* [x] Attachments + * Extra file handling package + * Files are saved "somewhere" (configurable) using their ID -> now only fs, later other things, based on afero + * A files table which holds metadata like filename, created by etc + * The files sub package should have methods to return files as byte slice or so + * -> The concrete implementation (in this case task attachments) handles the rights for a file + * task attachments are just a n:m between a task and a file id * [x] Related tasks -> settable with a "kind" of relation like blocked, subtask, or just related or so * [x] Should be possible to relate tasks which are not in the same list * [x] Replace the old subtask implementation @@ -270,6 +276,7 @@ Sorry for some of them being in German, I'll tranlate them at some point. * [ ] IMAP-Integration -> Man schickt eine email an Vikunja und es macht daraus dann nen task -> Achtung missbrauchsmöglichkeiten * [ ] In und Out webhooks, mit Templates vom Payload * [ ] Some kind of milestones for tasks +* [ ] Global/Per Namespace labels * [ ] Get Weekly/Daily summary of the tasks to come per mail * [ ] Boards (Boards) * [ ] Sprints diff --git a/Makefile b/Makefile index 53b81a2d3..e05555d7b 100644 --- a/Makefile +++ b/Makefile @@ -214,7 +214,7 @@ gocyclo-check: go get -u github.com/fzipp/gocyclo; \ go install $(GOFLAGS) github.com/fzipp/gocyclo; \ fi - for S in $(GOFILES); do gocyclo -over 19 $$S || exit 1; done; + for S in $(GOFILES); do gocyclo -over 23 $$S || exit 1; done; .PHONY: static-check static-check: diff --git a/config.yml.sample b/config.yml.sample index 722d9a78d..78c2211e7 100644 --- a/config.yml.sample +++ b/config.yml.sample @@ -106,3 +106,9 @@ ratelimit: limit: 100 # The store where the limit counter for each user is stored. Possible values are "memory" or "redis" store: memory + +files: + # The path where files are stored + basepath: ./files # relative to the binary + # The maximum size of a file, in bytes. + maxsize: 21474836480 # 20 MB diff --git a/docs/content/doc/setup/config.md b/docs/content/doc/setup/config.md index 406dca818..615cf103c 100644 --- a/docs/content/doc/setup/config.md +++ b/docs/content/doc/setup/config.md @@ -149,4 +149,10 @@ ratelimit: limit: 100 # The store where the limit counter for each user is stored. Possible values are "memory" or "redis" store: memory + +files: + # The path where files are stored + basepath: ./files # relative to the binary + # The maximum size of a file, in bytes. + maxsize: 21474836480 # 20 MB {{< /highlight >}} diff --git a/docs/content/doc/usage/errors.md b/docs/content/doc/usage/errors.md index 498fe5022..6d9267845 100644 --- a/docs/content/doc/usage/errors.md +++ b/docs/content/doc/usage/errors.md @@ -43,6 +43,8 @@ This document describes the different errors Vikunja can return. | 4008 | 409 | The user tried to create a task relation which already exists. | | 4009 | 404 | The task relation does not exist. | | 4010 | 400 | Cannot relate a task with itself. | +| 4011 | 404 | The task attachment does not exist. | +| 4012 | 400 | The task attachment is too large. | | 5001 | 404 | The namspace does not exist. | | 5003 | 403 | The user does not have access to the specified namespace. | | 5006 | 400 | The namespace name cannot be empty. | diff --git a/go.mod b/go.mod index 9d3b12f3c..ff3e1d3ce 100644 --- a/go.mod +++ b/go.mod @@ -19,18 +19,18 @@ module code.vikunja.io/api require ( cloud.google.com/go v0.34.0 // indirect code.vikunja.io/web v0.0.0-20190628075253-b457b5a1a332 - github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc + github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a github.com/beevik/etree v1.1.0 // indirect github.com/client9/misspell v0.3.4 + github.com/cpuguy83/go-md2man/v2 v2.0.0 // indirect + github.com/creack/pty v1.1.9 // indirect github.com/d4l3k/messagediff v1.2.1 // indirect github.com/dgrijalva/jwt-go v3.2.0+incompatible github.com/fzipp/gocyclo v0.0.0-20150627053110-6acd4345c835 github.com/garyburd/redigo v1.6.0 // indirect - github.com/go-openapi/jsonpointer v0.19.0 // indirect - github.com/go-openapi/jsonreference v0.19.0 // indirect - github.com/go-openapi/spec v0.19.0 // indirect - github.com/go-openapi/swag v0.19.0 // indirect + github.com/go-openapi/jsonreference v0.19.3 // indirect + github.com/go-openapi/spec v0.19.3 // indirect github.com/go-redis/redis v6.15.2+incompatible github.com/go-sql-driver/mysql v1.4.1 github.com/go-xorm/builder v0.3.4 @@ -38,14 +38,18 @@ require ( github.com/go-xorm/tests v0.5.6 // indirect github.com/go-xorm/xorm v0.7.1 github.com/go-xorm/xorm-redis-cache v0.0.0-20180727005610-859b313566b2 + github.com/golang/protobuf v1.3.2 // indirect github.com/gordonklaus/ineffassign v0.0.0-20180909121442-1003c8bd00dc github.com/imdario/mergo v0.3.7 github.com/inconshreveable/mousetrap v1.0.0 // indirect github.com/jgautheron/goconst v0.0.0-20170703170152-9740945f5dcb + github.com/json-iterator/go v1.1.7 // indirect + github.com/kr/pty v1.1.8 // indirect github.com/labstack/echo/v4 v4.1.7-0.20190627175217-8fb7b5be270f github.com/labstack/gommon v0.2.9 github.com/laurent22/ical-go v0.1.1-0.20181107184520-7e5d6ade8eef - github.com/mailru/easyjson v0.0.0-20190403194419-1ea4449da983 // indirect + github.com/mailru/easyjson v0.7.0 // indirect + github.com/mattn/go-isatty v0.0.10 // indirect github.com/mattn/go-oci8 v0.0.0-20181130072307-052f5d97b9b6 // indirect github.com/mattn/go-runewidth v0.0.4 // indirect github.com/mattn/go-sqlite3 v1.10.0 @@ -54,24 +58,34 @@ require ( github.com/onsi/gomega v1.4.3 // indirect github.com/op/go-logging v0.0.0-20160315200505-970db520ece7 github.com/pelletier/go-toml v1.4.0 // indirect + github.com/pkg/errors v0.8.1 // indirect github.com/prometheus/client_golang v0.9.2 github.com/samedi/caldav-go v3.0.0+incompatible github.com/shurcooL/httpfs v0.0.0-20190527155220-6a4d4a70508b github.com/shurcooL/vfsgen v0.0.0-20181202132449-6a9ea43bcacd - github.com/spf13/afero v1.2.2 // indirect + github.com/spf13/afero v1.2.2 github.com/spf13/cobra v0.0.3 github.com/spf13/jwalterweatherman v1.1.0 // indirect github.com/spf13/viper v1.3.2 - github.com/stretchr/testify v1.3.0 - github.com/swaggo/swag v1.5.0 + github.com/stretchr/testify v1.4.0 + github.com/swaggo/swag v1.6.3 + github.com/ugorji/go v1.1.7 // indirect github.com/ulule/limiter/v3 v3.3.0 - golang.org/x/crypto v0.0.0-20190621222207-cc06ce4a13d4 + github.com/urfave/cli v1.22.1 // indirect + golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550 golang.org/x/lint v0.0.0-20190409202823-959b441ac422 + golang.org/x/net v0.0.0-20191011234655-491137f69257 // indirect + golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e // indirect + golang.org/x/sys v0.0.0-20191010194322-b09406accb47 // indirect + golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a // indirect + golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898 // indirect google.golang.org/appengine v1.5.0 // indirect gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect + gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 // indirect gopkg.in/d4l3k/messagediff.v1 v1.2.1 gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df gopkg.in/testfixtures.v2 v2.5.3 + gopkg.in/yaml.v2 v2.2.4 // indirect honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a src.techknowlogick.com/xgo v0.0.0-20190507142556-a5b29ecb0ff4 src.techknowlogick.com/xormigrate v0.0.0-20190321151057-24497c23c09c diff --git a/go.sum b/go.sum index 750d81d06..5fcd27bad 100644 --- a/go.sum +++ b/go.sum @@ -9,6 +9,8 @@ code.vikunja.io/web v0.0.0-20190628075253-b457b5a1a332 h1:gXxyLkjhgN+vqrLvPyqySc code.vikunja.io/web v0.0.0-20190628075253-b457b5a1a332/go.mod h1:cuP1/ieGWAZzgQGw+QPt6Y5F0fVb/8Ol5NV4QSezGdo= github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc= +github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE= github.com/PuerkitoBio/purell v1.1.0 h1:rmGxhojJlM0tuKtfdvliR84CFHljx9ag64t2xmVkjK4= github.com/PuerkitoBio/purell v1.1.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= github.com/PuerkitoBio/purell v1.1.1 h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tNFfI= @@ -17,6 +19,8 @@ github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc h1:cAKDfWh5VpdgMhJosfJnn5/FoN2SRZ4p7fJNX58YPaU= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 h1:JYp7IbQjafoB+tBA3gMyHYHrpOtNuDiK/uB5uXxq5wM= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a h1:idn718Q4B6AGu/h5Sxe66HYVdqdGu2l9Iebqhi/AEoA= github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY= @@ -32,6 +36,11 @@ github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMe github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/cpuguy83/go-md2man/v2 v2.0.0 h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM= +github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/cweill/gotests v1.5.3 h1:k3t4wW/x/YNixWZJhUIn+mivmK5iV1tJVOwVYkx0UcU= github.com/d4l3k/messagediff v1.2.1 h1:ZcAIMYsUg0EAp9X+tt8/enBE/Q8Yd5kzPynLyKptt9U= github.com/d4l3k/messagediff v1.2.1/go.mod h1:Oozbb1TVXFac9FtSIxHBMnBCq2qeH/2KkEQxENCrlLo= @@ -50,27 +59,41 @@ github.com/garyburd/redigo v1.6.0 h1:0VruCpn7yAIIu7pWVClQC8wxCJEcG3nyzpMSHKi1PQc github.com/garyburd/redigo v1.6.0/go.mod h1:NR3MbYisc3/PwhQ00EMzDiPmrwpPxAn5GI05/YaO1SY= github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/gin-contrib/gzip v0.0.1/go.mod h1:fGBJBCdt6qCZuCAOwWuFhBB4OOq9EFqlo5dEaFhhu5w= github.com/gin-contrib/sse v0.0.0-20170109093832-22d885f9ecc7/go.mod h1:VJ0WA2NBN22VlZ2dKZQPAPnyWw5XTlK1KymzLKsr59s= github.com/gin-contrib/sse v0.0.0-20190301062529-5545eab6dad3/go.mod h1:VJ0WA2NBN22VlZ2dKZQPAPnyWw5XTlK1KymzLKsr59s= +github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= github.com/gin-gonic/gin v1.3.0/go.mod h1:7cKuhb5qV2ggCFctp2fJQ+ErvciLZrIeoOSOm6mUr7Y= +github.com/gin-gonic/gin v1.4.0/go.mod h1:OW2EZn3DO8Ln9oIKOvM++LBO+5UPHJJDH72/q/3rZdM= github.com/go-chi/chi v3.3.3+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ= github.com/go-openapi/jsonpointer v0.17.0 h1:nH6xp8XdXHx8dqveo0ZuJBluCO2qGrPbDNZ0dwoRHP0= github.com/go-openapi/jsonpointer v0.17.0/go.mod h1:cOnomiV+CVVwFLk0A/MExoFMjwdsUdVpsRhURCKh+3M= github.com/go-openapi/jsonpointer v0.18.0/go.mod h1:cOnomiV+CVVwFLk0A/MExoFMjwdsUdVpsRhURCKh+3M= github.com/go-openapi/jsonpointer v0.19.0 h1:FTUMcX77w5rQkClIzDtTxvn6Bsa894CcrzNj2MMfeg8= github.com/go-openapi/jsonpointer v0.19.0/go.mod h1:cOnomiV+CVVwFLk0A/MExoFMjwdsUdVpsRhURCKh+3M= +github.com/go-openapi/jsonpointer v0.19.2/go.mod h1:3akKfEdA7DF1sugOqz1dVQHBcuDBPKZGEoHC/NkiQRg= +github.com/go-openapi/jsonpointer v0.19.3 h1:gihV7YNZK1iK6Tgwwsxo2rJbD1GTbdm72325Bq8FI3w= +github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= github.com/go-openapi/jsonreference v0.17.0 h1:yJW3HCkTHg7NOA+gZ83IPHzUSnUzGXhGmsdiCcMexbA= github.com/go-openapi/jsonreference v0.17.0/go.mod h1:g4xxGn04lDIRh0GJb5QlpE3HfopLOL6uZrK/VgnsK9I= github.com/go-openapi/jsonreference v0.18.0/go.mod h1:g4xxGn04lDIRh0GJb5QlpE3HfopLOL6uZrK/VgnsK9I= github.com/go-openapi/jsonreference v0.19.0 h1:BqWKpV1dFd+AuiKlgtddwVIFQsuMpxfBDBHGfM2yNpk= github.com/go-openapi/jsonreference v0.19.0/go.mod h1:g4xxGn04lDIRh0GJb5QlpE3HfopLOL6uZrK/VgnsK9I= +github.com/go-openapi/jsonreference v0.19.2/go.mod h1:jMjeRr2HHw6nAVajTXJ4eiUwohSTlpa0o73RUL1owJc= +github.com/go-openapi/jsonreference v0.19.3 h1:5cxNfTy0UVC3X8JL5ymxzyoUZmo8iZb+jeTWn7tUa8o= +github.com/go-openapi/jsonreference v0.19.3/go.mod h1:rjx6GuL8TTa9VaixXglHmQmIL98+wF9xc8zWvFonSJ8= github.com/go-openapi/spec v0.18.0/go.mod h1:XkF/MOi14NmjsfZ8VtAKf8pIlbZzyoTvZsdfssdxcBI= github.com/go-openapi/spec v0.19.0 h1:A4SZ6IWh3lnjH0rG0Z5lkxazMGBECtrZcbyYQi+64k4= github.com/go-openapi/spec v0.19.0/go.mod h1:XkF/MOi14NmjsfZ8VtAKf8pIlbZzyoTvZsdfssdxcBI= +github.com/go-openapi/spec v0.19.3 h1:0XRyw8kguri6Yw4SxhsQA/atC88yqrk0+G4YhI2wabc= +github.com/go-openapi/spec v0.19.3/go.mod h1:FpwSN1ksY1eteniUU7X0N/BgJ7a4WvBFVA8Lj9mJglo= github.com/go-openapi/swag v0.17.0/go.mod h1:AByQ+nYG6gQg71GINrmuDXCPWdL640yX49/kXLo40Tg= github.com/go-openapi/swag v0.18.0/go.mod h1:AByQ+nYG6gQg71GINrmuDXCPWdL640yX49/kXLo40Tg= github.com/go-openapi/swag v0.19.0 h1:Kg7Wl7LkTPlmc393QZQ/5rQadPhi7pBVEMZxyTi0Ii8= github.com/go-openapi/swag v0.19.0/go.mod h1:AByQ+nYG6gQg71GINrmuDXCPWdL640yX49/kXLo40Tg= +github.com/go-openapi/swag v0.19.2/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= +github.com/go-openapi/swag v0.19.5 h1:lTz6Ys4CmqqCQmZPBlbQENR1/GucA2bzYTE12Pw4tFY= +github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= github.com/go-redis/redis v6.14.0+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA= github.com/go-redis/redis v6.15.2+incompatible h1:9SpNVG76gr6InJGxoZ6IuuxaCOQwDAhzyXg+Bs+0Sb4= github.com/go-redis/redis v6.15.2+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA= @@ -97,8 +120,11 @@ github.com/golang/protobuf v1.2.0 h1:P3YflyNX/ehuJFLhxviNdFxQPkGK5cDcApsge1SqnvM github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1 h1:YF8+flBXS5eO826T4nzqPrxfhQThhXl0YzfuUPu4SBg= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2 h1:6nsPYzhq5kReh6QImI3k5qWzO4PEbvbIW2cwSfR/6xs= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/google/go-cmp v0.2.0 h1:+dTQ8DZQJz0Mb/HjFlkptS1FeQ4cWSnN941F8aEG4SQ= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/gordonklaus/ineffassign v0.0.0-20180909121442-1003c8bd00dc h1:cJlkeAx1QYgO5N80aF5xRGstVsRQwgLR7uA2FnP1ZjY= github.com/gordonklaus/ineffassign v0.0.0-20180909121442-1003c8bd00dc/go.mod h1:cuNKsD1zp2v6XfE/orVX2QE1LC+i254ceGcVeDT3pTU= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= @@ -118,7 +144,9 @@ github.com/jgautheron/goconst v0.0.0-20170703170152-9740945f5dcb/go.mod h1:82Txj github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc= github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= github.com/json-iterator/go v0.0.0-20180806060727-1624edc4454b/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.5/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/kolaente/caldav-go v3.0.1-0.20190524174923-9e5cd1688227+incompatible h1:PkEEpmbrFXlMul8cOplR8nkcIM/NDbx+H6fq2+vaKAA= github.com/kolaente/caldav-go v3.0.1-0.20190524174923-9e5cd1688227+incompatible/go.mod h1:y1UhTNI4g0hVymJrI6yJ5/ohy09hNBeU8iJEZjgdDOw= github.com/kolaente/echo/v4 v4.0.0-20190507190305-3725a216d803/go.mod h1:3LbYC6VkwmUnmLPZ8WFdHdQHG77e9GQbjyhWdb1QvC4= @@ -129,6 +157,8 @@ github.com/kolaente/echo/v4 v4.0.0-20190622213746-34000ea0f7d3/go.mod h1:kU/7Pwz github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA= +github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/labstack/echo v3.3.10+incompatible h1:pGRcYk231ExFAyoAjAfD85kQzRJCRI8bbnE7CX5OEgg= @@ -153,6 +183,10 @@ github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329/go.mod h1:C1wdFJiN github.com/mailru/easyjson v0.0.0-20190312143242-1de009706dbe/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/mailru/easyjson v0.0.0-20190403194419-1ea4449da983 h1:wL11wNW7dhKIcRCHSm4sHKPWz0tt4mwBsVodG7+Xyqg= github.com/mailru/easyjson v0.0.0-20190403194419-1ea4449da983/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.7.0 h1:aizVhC/NAAcKWb+5QsU1iNOZb4Yws5UO2I+aIprQITM= +github.com/mailru/easyjson v0.7.0/go.mod h1:KAzv3t3aY1NaHWoQz1+4F1ccyAH66Jk7yos7ldAVICs= github.com/mattn/go-colorable v0.1.1 h1:G1f5SKeVxmagw/IyvzvtZE4Gybcc4Tr1tf7I8z0XgOg= github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ= github.com/mattn/go-colorable v0.1.2 h1:/bC9yWikZXAL9uJdulbSfyVNIR3n3trXl+v8+1sx8mU= @@ -164,6 +198,8 @@ github.com/mattn/go-isatty v0.0.7 h1:UvyT9uN+3r7yLEYSlJsbQGdsaB/a0DlgWP3pql6iwOc github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.8 h1:HLtExJ+uU2HOZ+wI0Tt5DtUDrx8yhUqDcp7fYERX4CE= github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.10 h1:qxFzApOv4WsAL965uUPIsXzAKCZxN2p9UqdhFS4ZW10= +github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84= github.com/mattn/go-oci8 v0.0.0-20181115070430-6eefff3c767c/go.mod h1:/M9VLO+lUPmxvoOK2PfWRZ8mTtB4q1Hy9lEGijv9Nr8= github.com/mattn/go-oci8 v0.0.0-20181130072307-052f5d97b9b6 h1:gheNi9lnffYyVyqQzJqY7lo+M3bCDVw5fLU/jSuCMhc= github.com/mattn/go-oci8 v0.0.0-20181130072307-052f5d97b9b6/go.mod h1:/M9VLO+lUPmxvoOK2PfWRZ8mTtB4q1Hy9lEGijv9Nr8= @@ -176,6 +212,7 @@ github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0j github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE= github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= @@ -207,12 +244,16 @@ github.com/prometheus/common v0.0.0-20181126121408-4724e9255275 h1:PnBWHBf+6L0jO github.com/prometheus/common v0.0.0-20181126121408-4724e9255275/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= github.com/prometheus/procfs v0.0.0-20181204211112-1dc9a6cbc91a h1:9a8MnZMP0X2nLJdBg+pBmGgkJlSaKC2KaQmTCk1XDtE= github.com/prometheus/procfs v0.0.0-20181204211112-1dc9a6cbc91a/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q= +github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/satori/go.uuid v1.2.0 h1:0uYX9dsZ2yD7q2RtLRtPSdGDWzjeM3TbMJP9utgA0ww= github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24 h1:pntxY8Ary0t43dCZ5dqY4YTJCObLY1kIXl0uzMv+7DE= github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4= github.com/shurcooL/httpfs v0.0.0-20190527155220-6a4d4a70508b h1:4kg1wyftSKxLtnPAvcRWakIPpokB9w780/KwrNLnfPA= github.com/shurcooL/httpfs v0.0.0-20190527155220-6a4d4a70508b/go.mod h1:ZY1cvUeJuFPAdZ/B6v7RHavJWZn2YPVFQ1OSXhCGOkg= +github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo= +github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= github.com/shurcooL/vfsgen v0.0.0-20181202132449-6a9ea43bcacd h1:ug7PpSOB5RBPK1Kg6qskGBoP3Vnj/aNYFTznWvlkGo0= github.com/shurcooL/vfsgen v0.0.0-20181202132449-6a9ea43bcacd/go.mod h1:TrYk7fJVaAttu97ZZKrO9UbRa8izdowaMIZcxYMbVaw= github.com/spf13/afero v1.1.2 h1:m8/z1t7/fwjysjQRYbP0RD+bUIF/8tJwPdEZsI83ACI= @@ -237,19 +278,34 @@ github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1 github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/swaggo/files v0.0.0-20190704085106-630677cd5c14/go.mod h1:gxQT6pBGRuIGunNf/+tSOB5OHvguWi8Tbt82WOkf35E= github.com/swaggo/gin-swagger v1.1.0/go.mod h1:FQlm07YuT1glfN3hQiO11UQ2m39vOCZ/aa3WWr5E+XU= +github.com/swaggo/gin-swagger v1.2.0/go.mod h1:qlH2+W7zXGZkczuL+r2nEBR2JTT+/lX05Nn6vPhc7OI= github.com/swaggo/swag v1.4.0/go.mod h1:hog2WgeMOrQ/LvQ+o1YGTeT+vWVrbi0SiIslBtxKTyM= github.com/swaggo/swag v1.5.0 h1:haK8VG3hj+v/c8hQ4f3U+oYpkdI/26m9LAUTXHOv+2U= github.com/swaggo/swag v1.5.0/go.mod h1:+xZrnu5Ut3GcUkKAJm9spnOooIS1WB1cUOkLNPrvrE0= +github.com/swaggo/swag v1.5.1/go.mod h1:1Bl9F/ZBpVWh22nY0zmYyASPO1lI/zIwRDrpZU+tv8Y= +github.com/swaggo/swag v1.6.3 h1:N+uVPGP4H2hXoss2pt5dctoSUPKKRInr6qcTMOm0usI= +github.com/swaggo/swag v1.6.3/go.mod h1:wcc83tB4Mb2aNiL/HP4MFeQdpHUrca+Rp/DRNgWAUio= github.com/ugorji/go v1.1.1/go.mod h1:hnLbHMwcvSihnDhEfx2/BzKp2xb0Y+ErdfYcrs9tkJQ= github.com/ugorji/go v1.1.2/go.mod h1:hnLbHMwcvSihnDhEfx2/BzKp2xb0Y+ErdfYcrs9tkJQ= +github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc= +github.com/ugorji/go v1.1.5-pre/go.mod h1:FwP/aQVg39TXzItUBMwnWp9T9gPQnXw4Poh4/oBQZ/0= +github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw= +github.com/ugorji/go/codec v0.0.0-20181022190402-e5e69e061d4f/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= github.com/ugorji/go/codec v0.0.0-20181209151446-772ced7fd4c2/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= github.com/ugorji/go/codec v0.0.0-20190320090025-2dc34c0b8780/go.mod h1:iT03XoTwV7xq/+UGwKO3UbC1nNNlopQiY61beSdrtOA= +github.com/ugorji/go/codec v1.1.5-pre/go.mod h1:tULtS6Gy1AE1yCENaw4Vb//HLH5njI2tfCQDUqRd8fI= +github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY= github.com/ulule/limiter/v3 v3.3.0 h1:DuMRthpkl1wW9Em6xOVw5HMHnbDumSIDydiMqP0PTXs= github.com/ulule/limiter/v3 v3.3.0/go.mod h1:E6sfg3hfRgW+yFvkE/rZf6YLqXYFMWTmZaZKvdEiQsA= github.com/urfave/cli v1.20.0 h1:fDqGv3UG/4jbVl/QkFwEdddtEDjh/5Ov6X+0B/3bPaw= github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= +github.com/urfave/cli v1.22.1 h1:+mkCCcOFKPnCmVYVcURKps1Xe+3zP90gSYGNfRkjoIY= +github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= github.com/valyala/fasttemplate v1.0.1 h1:tY9CJiPnMXf1ERmG2EyK7gNUd+c6RKGD0IfU8WdUSz8= @@ -265,8 +321,11 @@ golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734/go.mod h1:yigFU9vqHzYiE8U golang.org/x/crypto v0.0.0-20190506204251-e1dfcc566284 h1:rlLehGeYg6jfoyz/eDqDU1iRXLKfR42nnNh57ytKEWo= golang.org/x/crypto v0.0.0-20190506204251-e1dfcc566284/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190621222207-cc06ce4a13d4 h1:ydJNl0ENAG67pFbB+9tfhiL2pYqLhfoaZFw/cjLhY4A= golang.org/x/crypto v0.0.0-20190621222207-cc06ce4a13d4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550 h1:ObdrDkeb4kJdCP557AjRjq69pTHfNouLtWZG7j9rPN8= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/lint v0.0.0-20190409202823-959b441ac422 h1:QzoH/1pFpZguR8NrRHLcO6jKqfv2zpuSqZLgdm7ZmjI= golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -276,6 +335,7 @@ golang.org/x/net v0.0.0-20181005035420-146acd28ed58/go.mod h1:mL1N/T3taQHkDXs73r golang.org/x/net v0.0.0-20181011144130-49bb7cea24b1/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181201002055-351d144fa1fc h1:a3CU5tJYVj92DY2LaA1kUkrsqD5/3mLDhx2NcNqyW+0= golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190322120337-addf6b3196f6/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= @@ -283,14 +343,22 @@ golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c h1:uOCk1iQW6Vc18bnC13MfzScl+wdKBmM9Y9kU7Z83/lw= golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190607181551-461777fb6f67/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190611141213-3f473d35a33a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190620200207-3b0461eec859 h1:R/3boaszxrf1GEUWTVDzSKVwLmSJpwZ1yqXm8j0v2QI= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191011234655-491137f69257 h1:ry8e2D+cwaV6hk7lb3aRTjjZo24shrbK0e11QEOkTIg= +golang.org/x/net v0.0.0-20191011234655-491137f69257/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f h1:Bl/8QSvNqXvPGPGXa2z5xUTmV7VDcZyvRZ+QQXkXTZQ= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181228144115-9a3f9b0469bb/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190322080309-f49334f85ddc/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -299,10 +367,15 @@ golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b h1:ag/x1USPSsqHud38I9BAC88qd golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190602015325-4c4f7f33c9ed/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190609082536-301114b31cce/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190610200419-93c9922d18ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190616124812-15dcb6c0061f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190621203818-d432491b9138 h1:t8BZD9RDjkm9/h7yYN6kE8oaeov5r9aztkB7zKA5Tkg= golang.org/x/sys v0.0.0-20190621203818-d432491b9138/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190626221950-04f50cda93cb h1:fgwFCsaw9buMuxNd6+DQfAuSFqbNiQZpcgJQAgJsK6k= golang.org/x/sys v0.0.0-20190626221950-04f50cda93cb/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191010194322-b09406accb47 h1:/XfQ9z7ib8eEJX2hdgFTZJ/ntt0swNk5oYBziWeTCvY= +golang.org/x/sys v0.0.0-20191010194322-b09406accb47/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs= @@ -311,11 +384,18 @@ golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGm golang.org/x/tools v0.0.0-20190110015856-aa033095749b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190322203728-c1a832b0ad89/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190606050223-4d9ae51c2468/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190608022120-eacb66d2a7c3/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190611222205-d73e1c7e250b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190614205625-5aca471b1d59/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac h1:MQEvx39qSf8vyrx3XRaOe+j1UDIzKwkYOVObRgGPVqI= golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190628034336-212fb13d595e h1:ZlQjfVdpDxeqxRfmO30CdqWWzTvgRCj0MxaUVfxEG1k= golang.org/x/tools v0.0.0-20190628034336-212fb13d595e/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a h1:TwMENskLwU2NnWBzrJGEWHqSiGUkO/B4rfyhwqDxDYQ= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/appengine v1.3.0 h1:FBSsiFRMz3LBeXIomRnVzrQwSDj4ibvcRexLG0LZGQk= google.golang.org/appengine v1.3.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -327,6 +407,7 @@ gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+ gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/d4l3k/messagediff.v1 v1.2.1 h1:70AthpjunwzUiarMHyED52mj9UwtAnE89l1Gmrt3EU0= gopkg.in/d4l3k/messagediff.v1 v1.2.1/go.mod h1:EUzikiKadqXWcD1AzJLagx0j/BeeWGtn++04Xniyg44= gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4= @@ -345,6 +426,8 @@ gopkg.in/yaml.v2 v2.2.1 h1:mUhvW9EsL+naU5Q3cakzfE91YhliOondGd6ZrsDBHQE= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4 h1:/eiJrUcujPVeJ3xlSWaiNi3uSVmDGBK1pDHUHAnao1I= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a h1:LJwr7TCTghdatWv40WobzlKXc9c4s8oGa7QKJUtHhWA= honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= src.techknowlogick.com/xgo v0.0.0-20190507142556-a5b29ecb0ff4 h1:DZKMg4qnT7UIyB5ZaC6ZqltF2K5KhA1oQ2PdxOLZ3jg= diff --git a/pkg/cmd/cmd.go b/pkg/cmd/cmd.go index af411b13f..a01ab9d4d 100644 --- a/pkg/cmd/cmd.go +++ b/pkg/cmd/cmd.go @@ -18,6 +18,7 @@ package cmd import ( "code.vikunja.io/api/pkg/config" + "code.vikunja.io/api/pkg/files" "code.vikunja.io/api/pkg/log" "code.vikunja.io/api/pkg/mail" "code.vikunja.io/api/pkg/migration" @@ -74,6 +75,13 @@ func initialize() { if err != nil { log.Fatal(err.Error()) } + err = files.SetEngine() + if err != nil { + log.Fatal(err.Error()) + } + + // Initialize the files handler + files.InitFileHandler() // Start the mail daemon mail.StartMailDaemon() diff --git a/pkg/config/config.go b/pkg/config/config.go index e98f3d102..5ddad2440 100644 --- a/pkg/config/config.go +++ b/pkg/config/config.go @@ -85,6 +85,9 @@ const ( RateLimitPeriod Key = `ratelimit.period` RateLimitLimit Key = `ratelimit.limit` RateLimitStore Key = `ratelimit.store` + + FilesBasePath Key = `files.basepath` + FilesMaxSize Key = `files.maxsize` ) // GetString returns a string config value @@ -122,10 +125,9 @@ func (k Key) setDefault(i interface{}) { viper.SetDefault(string(k), i) } -// InitConfig initializes the config, sets defaults etc. -func InitConfig() { - - // Set defaults +// InitDefaultConfig sets default config values +// This is an extra function so we can call it when initializing tests without initializing the full config +func InitDefaultConfig() { // Service config random, err := random(32) if err != nil { @@ -193,6 +195,16 @@ func InitConfig() { RateLimitLimit.setDefault(100) RateLimitPeriod.setDefault(60) RateLimitStore.setDefault("memory") + // Files + FilesBasePath.setDefault("files") + FilesMaxSize.setDefault(21474836480) // 20 MB +} + +// InitConfig initializes the config, sets defaults etc. +func InitConfig() { + + // Set defaults + InitDefaultConfig() // Init checking for environment variables viper.SetEnvPrefix("vikunja") @@ -205,7 +217,7 @@ func InitConfig() { viper.AddConfigPath("~/.config/vikunja") viper.AddConfigPath(".") viper.SetConfigName("config") - err = viper.ReadInConfig() + err := viper.ReadInConfig() if err != nil { log.Println(err.Error()) log.Println("Using defaults.") diff --git a/pkg/db/db.go b/pkg/db/db.go index bd2b570cc..bfbf2a95f 100644 --- a/pkg/db/db.go +++ b/pkg/db/db.go @@ -19,18 +19,30 @@ package db import ( "code.vikunja.io/api/pkg/config" "code.vikunja.io/api/pkg/log" + "encoding/gob" "fmt" "github.com/go-xorm/core" "github.com/go-xorm/xorm" + "os" "strconv" "time" + xrc "github.com/go-xorm/xorm-redis-cache" + _ "github.com/go-sql-driver/mysql" // Because. _ "github.com/mattn/go-sqlite3" // Because. ) +// We only want one instance of the engine, so we can reate it once and reuse it +var x *xorm.Engine + // CreateDBEngine initializes a db engine from the config func CreateDBEngine() (engine *xorm.Engine, err error) { + + if x != nil { + return x, nil + } + // If the database type is not set, this likely means we need to initialize the config first if config.DatabaseType.GetString() == "" { config.InitConfig() @@ -54,9 +66,57 @@ func CreateDBEngine() (engine *xorm.Engine, err error) { engine.ShowSQL(config.LogDatabase.GetString() != "off") engine.SetLogger(xorm.NewSimpleLogger(log.GetLogWriter("database"))) + // Cache + // We have to initialize the cache here to avoid import cycles + if config.CacheEnabled.GetBool() { + switch config.CacheType.GetString() { + case "memory": + cacher := xorm.NewLRUCacher(xorm.NewMemoryStore(), config.CacheMaxElementSize.GetInt()) + engine.SetDefaultCacher(cacher) + case "redis": + cacher := xrc.NewRedisCacher(config.RedisEnabled.GetString(), config.RedisPassword.GetString(), xrc.DEFAULT_EXPIRATION, engine.Logger()) + engine.SetDefaultCacher(cacher) + default: + log.Info("Did not find a valid cache type. Caching disabled. Please refer to the docs for poosible cache types.") + } + } + + x = engine return } +// CreateTestEngine creates an instance of the db engine which lives in memory +func CreateTestEngine() (engine *xorm.Engine, err error) { + + if x != nil { + return x, nil + } + + if os.Getenv("VIKUNJA_TESTS_USE_CONFIG") == "1" { + config.InitConfig() + engine, err = CreateDBEngine() + if err != nil { + return nil, err + } + } else { + engine, err = xorm.NewEngine("sqlite3", "file::memory:?cache=shared") + if err != nil { + return nil, err + } + } + + engine.SetMapper(core.GonicMapper{}) + engine.ShowSQL(os.Getenv("UNIT_TESTS_VERBOSE") == "1") + engine.SetLogger(xorm.NewSimpleLogger(log.GetLogWriter("database"))) + x = engine + return +} + +// RegisterTableStructsForCache registers tables in gob encoding for redis cache +func RegisterTableStructsForCache(val interface{}) { + gob.Register(val) +} + func initMysqlEngine() (engine *xorm.Engine, err error) { connStr := fmt.Sprintf( "%s:%s@tcp(%s)/%s?charset=utf8&parseTime=true", diff --git a/pkg/db/test_fixtures.go b/pkg/db/test_fixtures.go new file mode 100644 index 000000000..8cb992f6f --- /dev/null +++ b/pkg/db/test_fixtures.go @@ -0,0 +1,36 @@ +// Copyright 2019 Vikunja and contriubtors. All rights reserved. +// +// This file is part of Vikunja. +// +// Vikunja is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// Vikunja is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with Vikunja. If not, see . + +package db + +import ( + "gopkg.in/testfixtures.v2" +) + +var fixtures *testfixtures.Context + +// InitFixtures initialize test fixtures for a test database +func InitFixtures(helper testfixtures.Helper, dir string) (err error) { + testfixtures.SkipDatabaseNameCheck(true) + fixtures, err = testfixtures.NewFolder(x.DB().DB, helper, dir) + return err +} + +// LoadFixtures load fixtures for a test database +func LoadFixtures() error { + return fixtures.Load() +} diff --git a/pkg/files/db.go b/pkg/files/db.go new file mode 100644 index 000000000..97123d4ee --- /dev/null +++ b/pkg/files/db.go @@ -0,0 +1,49 @@ +// Vikunja is a todo-list application to facilitate your life. +// Copyright 2019 Vikunja and contributors. All rights reserved. +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +package files + +import ( + "code.vikunja.io/api/pkg/config" + "code.vikunja.io/api/pkg/db" + "code.vikunja.io/api/pkg/log" + "github.com/go-xorm/xorm" +) + +var x *xorm.Engine + +// SetEngine sets the xorm.Engine +func SetEngine() (err error) { + x, err = db.CreateDBEngine() + if err != nil { + log.Criticalf("Could not connect to db: %v", err.Error()) + return + } + + // Cache + if config.CacheEnabled.GetBool() && config.CacheType.GetString() == "redis" { + db.RegisterTableStructsForCache(GetTables()) + } + + return nil +} + +// GetTables returns all structs which are also a table. +func GetTables() []interface{} { + return []interface{}{ + &File{}, + } +} diff --git a/pkg/files/error.go b/pkg/files/error.go new file mode 100644 index 000000000..980987b60 --- /dev/null +++ b/pkg/files/error.go @@ -0,0 +1,52 @@ +// Copyright 2019 Vikunja and contriubtors. All rights reserved. +// +// This file is part of Vikunja. +// +// Vikunja is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// Vikunja is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with Vikunja. If not, see . + +package files + +import "fmt" + +// ErrFileDoesNotExist defines an error where a file does not exist in the db +type ErrFileDoesNotExist struct { + FileID int64 +} + +// Error is the error implementation of ErrFileDoesNotExist +func (err ErrFileDoesNotExist) Error() string { + return fmt.Sprintf("file %d does not exist", err.FileID) +} + +//IsErrFileDoesNotExist checks if an error is ErrFileDoesNotExist +func IsErrFileDoesNotExist(err error) bool { + _, ok := err.(ErrFileDoesNotExist) + return ok +} + +// ErrFileIsTooLarge defines an error where a file is larger than the configured limit +type ErrFileIsTooLarge struct { + Size int64 +} + +// Error is the error implementation of ErrFileIsTooLarge +func (err ErrFileIsTooLarge) Error() string { + return fmt.Sprintf("file is too large [Size: %d]", err.Size) +} + +//IsErrFileIsTooLarge checks if an error is ErrFileIsTooLarge +func IsErrFileIsTooLarge(err error) bool { + _, ok := err.(ErrFileIsTooLarge) + return ok +} diff --git a/pkg/files/filehandling.go b/pkg/files/filehandling.go new file mode 100644 index 000000000..3b5dc77f1 --- /dev/null +++ b/pkg/files/filehandling.go @@ -0,0 +1,97 @@ +// Vikunja is a todo-list application to facilitate your life. +// Copyright 2019 Vikunja and contributors. All rights reserved. +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +package files + +import ( + "code.vikunja.io/api/pkg/config" + "code.vikunja.io/api/pkg/db" + "code.vikunja.io/api/pkg/log" + "github.com/spf13/afero" + "github.com/stretchr/testify/assert" + "gopkg.in/testfixtures.v2" + "os" + "path/filepath" + "testing" +) + +// This file handles storing and retrieving a file for different backends +var fs afero.Fs +var afs *afero.Afero + +// InitFileHandler creates a new file handler for the file backend we want to use +func InitFileHandler() { + fs = afero.NewOsFs() + afs = &afero.Afero{Fs: fs} +} + +// InitTestFileHandler initializes a new memory file system for testing +func InitTestFileHandler() { + fs = afero.NewMemMapFs() + afs = &afero.Afero{Fs: fs} +} + +func initFixtures(t *testing.T) { + // Init db fixtures + err := db.LoadFixtures() + assert.NoError(t, err) + + InitTestFileFixtures(t) +} + +//InitTestFileFixtures initializes file fixtures +func InitTestFileFixtures(t *testing.T) { + // Init fixture files + filename := config.FilesBasePath.GetString() + "/1" + err := afero.WriteFile(afs, filename, []byte("testfile1"), 0644) + assert.NoError(t, err) +} + +// InitTests handles the actual bootstrapping of the test env +func InitTests() { + var err error + x, err = db.CreateTestEngine() + if err != nil { + log.Fatal(err) + } + + err = x.Sync2(GetTables()...) + if err != nil { + log.Fatal(err) + } + + config.InitDefaultConfig() + // We need to set the root path even if we're not using the config, otherwise fixtures are not loaded correctly + config.ServiceRootpath.Set(os.Getenv("VIKUNJA_SERVICE_ROOTPATH")) + + // Sync fixtures + var fixturesHelper testfixtures.Helper = &testfixtures.SQLite{} + if config.DatabaseType.GetString() == "mysql" { + fixturesHelper = &testfixtures.MySQL{} + } + fixturesDir := filepath.Join(config.ServiceRootpath.GetString(), "pkg", "files", "fixtures") + err = db.InitFixtures(fixturesHelper, fixturesDir) + if err != nil { + log.Fatal(err) + } + + InitTestFileHandler() +} + +// FileStat stats a file. This is an exported function to be able to test this from outide of the package +func FileStat(filename string) (os.FileInfo, error) { + return afs.Stat(filename) +} diff --git a/pkg/files/files.go b/pkg/files/files.go new file mode 100644 index 000000000..8f4a63e97 --- /dev/null +++ b/pkg/files/files.go @@ -0,0 +1,104 @@ +// Vikunja is a todo-list application to facilitate your life. +// Copyright 2019 Vikunja and contributors. All rights reserved. +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +package files + +import ( + "code.vikunja.io/api/pkg/config" + "code.vikunja.io/web" + "github.com/spf13/afero" + "io" + "strconv" + "time" +) + +// File holds all information about a file +type File struct { + ID int64 `xorm:"int(11) autoincr not null unique pk" json:"id"` + Name string `xorm:"text not null" json:"name"` + Mime string `xorm:"text null" json:"mime"` + Size int64 `xorm:"int(11) not null" json:"size"` + + Created time.Time `xorm:"-" json:"created"` + + CreatedUnix int64 `xorm:"created" json:"-"` + CreatedByID int64 `xorm:"int(11) not null" json:"-"` + + File afero.File `xorm:"-" json:"-"` +} + +// TableName is the table name for the files table +func (File) TableName() string { + return "files" +} + +func (f *File) getFileName() string { + return config.FilesBasePath.GetString() + "/" + strconv.FormatInt(f.ID, 10) +} + +// LoadFileByID returns a file by its ID +func (f *File) LoadFileByID() (err error) { + f.File, err = afs.Open(f.getFileName()) + return +} + +// LoadFileMetaByID loads everything about a file without loading the actual file +func (f *File) LoadFileMetaByID() (err error) { + exists, err := x.Where("id = ?", f.ID).Get(f) + if !exists { + return ErrFileDoesNotExist{FileID: f.ID} + } + f.Created = time.Unix(f.CreatedUnix, 0) + return +} + +// Create creates a new file from an FileHeader +func Create(f io.ReadCloser, realname string, realsize int64, a web.Auth) (file *File, err error) { + + if realsize > config.FilesMaxSize.GetInt64() { + return nil, ErrFileIsTooLarge{Size: realsize} + } + + // We first insert the file into the db to get it's ID + file = &File{ + Name: realname, + Size: realsize, + CreatedByID: a.GetID(), + } + + _, err = x.Insert(file) + if err != nil { + return + } + + // Save the file to storage with its new ID as path + err = afs.WriteReader(file.getFileName(), f) + return +} + +// Delete removes a file from the DB and the file system +func (f *File) Delete() (err error) { + deleted, err := x.Where("id = ?", f.ID).Delete(f) + if err != nil { + return err + } + if deleted == 0 { + return ErrFileDoesNotExist{FileID: f.ID} + } + + err = afs.Remove(f.getFileName()) + return +} diff --git a/pkg/files/files_test.go b/pkg/files/files_test.go new file mode 100644 index 000000000..1b03b6055 --- /dev/null +++ b/pkg/files/files_test.go @@ -0,0 +1,131 @@ +// Copyright 2019 Vikunja and contriubtors. All rights reserved. +// +// This file is part of Vikunja. +// +// Vikunja is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// Vikunja is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with Vikunja. If not, see . + +package files + +import ( + "github.com/stretchr/testify/assert" + "io" + "os" + "testing" +) + +type testfile struct { + content []byte + done bool +} + +func (t *testfile) Read(p []byte) (n int, err error) { + if t.done { + return 0, io.EOF + } + copy(p, t.content) + t.done = true + return len(p), nil +} + +func (t *testfile) Close() error { + return nil +} + +type testauth struct { + id int64 +} + +func (a *testauth) GetID() int64 { + return a.id +} + +func TestCreate(t *testing.T) { + t.Run("Normal", func(t *testing.T) { + initFixtures(t) + tf := &testfile{ + content: []byte("testfile"), + } + ta := &testauth{id: 1} + _, err := Create(tf, "testfile", 100, ta) + assert.NoError(t, err) + + // Check the file was created correctly + file := &File{ID: 2} + err = file.LoadFileMetaByID() + assert.NoError(t, err) + assert.Equal(t, int64(1), file.CreatedByID) + assert.Equal(t, "testfile", file.Name) + assert.Equal(t, int64(100), file.Size) + + }) + t.Run("Too Large", func(t *testing.T) { + initFixtures(t) + tf := &testfile{ + content: []byte("testfile"), + } + ta := &testauth{id: 1} + _, err := Create(tf, "testfile", 99999999999, ta) + assert.Error(t, err) + assert.True(t, IsErrFileIsTooLarge(err)) + }) +} + +func TestFile_Delete(t *testing.T) { + t.Run("Normal", func(t *testing.T) { + initFixtures(t) + f := &File{ID: 1} + err := f.Delete() + assert.NoError(t, err) + }) + t.Run("Nonexisting", func(t *testing.T) { + initFixtures(t) + f := &File{ID: 9999} + err := f.Delete() + assert.Error(t, err) + assert.True(t, IsErrFileDoesNotExist(err)) + }) +} + +func TestFile_LoadFileByID(t *testing.T) { + t.Run("Normal", func(t *testing.T) { + initFixtures(t) + f := &File{ID: 1} + err := f.LoadFileByID() + assert.NoError(t, err) + }) + t.Run("Nonexisting", func(t *testing.T) { + initFixtures(t) + f := &File{ID: 9999} + err := f.LoadFileByID() + assert.Error(t, err) + assert.True(t, os.IsNotExist(err)) + }) +} + +func TestFile_LoadFileMetaByID(t *testing.T) { + t.Run("Normal", func(t *testing.T) { + initFixtures(t) + f := &File{ID: 1} + err := f.LoadFileMetaByID() + assert.NoError(t, err) + assert.Equal(t, "test", f.Name) + }) + t.Run("Nonexisting", func(t *testing.T) { + initFixtures(t) + f := &File{ID: 9999} + err := f.LoadFileMetaByID() + assert.Error(t, err) + assert.True(t, IsErrFileDoesNotExist(err)) + }) +} diff --git a/pkg/files/fixtures/files.yml b/pkg/files/fixtures/files.yml new file mode 100644 index 000000000..fe94f348a --- /dev/null +++ b/pkg/files/fixtures/files.yml @@ -0,0 +1,5 @@ +- id: 1 + name: test + size: 100 + created_unix: 1570998791 + created_by_id: 1 diff --git a/pkg/files/main_test.go b/pkg/files/main_test.go new file mode 100644 index 000000000..31505a2e0 --- /dev/null +++ b/pkg/files/main_test.go @@ -0,0 +1,29 @@ +// Copyright 2019 Vikunja and contriubtors. All rights reserved. +// +// This file is part of Vikunja. +// +// Vikunja is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// Vikunja is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with Vikunja. If not, see . + +package files + +import ( + "os" + "testing" +) + +// TestMain is the main test function used to bootstrap the test env +func TestMain(m *testing.M) { + InitTests() + os.Exit(m.Run()) +} diff --git a/pkg/integrations/integrations.go b/pkg/integrations/integrations.go index 98e304bd4..3785d6a4a 100644 --- a/pkg/integrations/integrations.go +++ b/pkg/integrations/integrations.go @@ -18,6 +18,8 @@ package integrations import ( "code.vikunja.io/api/pkg/config" + "code.vikunja.io/api/pkg/db" + "code.vikunja.io/api/pkg/files" "code.vikunja.io/api/pkg/models" "code.vikunja.io/api/pkg/routes" v1 "code.vikunja.io/api/pkg/routes/api/v1" @@ -29,6 +31,7 @@ import ( "net/http" "net/http/httptest" "net/url" + "os" "strings" "testing" ) @@ -73,10 +76,14 @@ var ( ) func setupTestEnv() (e *echo.Echo, err error) { - config.InitConfig() + config.InitDefaultConfig() + // We need to set the root path even if we're not using the config, otherwise fixtures are not loaded correctly + config.ServiceRootpath.Set(os.Getenv("VIKUNJA_SERVICE_ROOTPATH")) + // Some tests use the file engine, so we'll need to initialize that + files.InitTests() models.SetupTests(config.ServiceRootpath.GetString()) - err = models.LoadFixtures() + err = db.LoadFixtures() if err != nil { return } diff --git a/pkg/integrations/task_test.go b/pkg/integrations/task_test.go index 4481e3787..5b2b1f1f2 100644 --- a/pkg/integrations/task_test.go +++ b/pkg/integrations/task_test.go @@ -80,33 +80,33 @@ func TestTask(t *testing.T) { t.Run("by priority", func(t *testing.T) { rec, err := testHandler.testReadAllWithUser(url.Values{"sort": []string{"priority"}}, nil) assert.NoError(t, err) - assert.Contains(t, rec.Body.String(), `[{"id":3,"text":"task #3 high prio","description":"","done":false,"doneAt":0,"dueDate":0,"reminderDates":null,"listID":1,"repeatAfter":0,"priority":100,"startDate":0,"endDate":0,"assignees":null,"labels":null,"hexColor":"","percentDone":0,"related_tasks":{},"created":1543626724,"updated":1543626724,"createdBy":{"id":1,"username":"user1","avatarUrl":"111d68d06e2d317b5a59c2c6c5bad808","created":0,"updated":0}},{"id":4,"text":"task #4 low prio","description":"","done":false,"doneAt":0,"dueDate":0,"reminderDates":null,"listID":1,"repeatAfter":0,"priority":1`) + assert.Contains(t, rec.Body.String(), `[{"id":3,"text":"task #3 high prio","description":"","done":false,"doneAt":0,"dueDate":0,"reminderDates":null,"listID":1,"repeatAfter":0,"priority":100,"startDate":0,"endDate":0,"assignees":null,"labels":null,"hexColor":"","percentDone":0,"related_tasks":{},"attachments":null,"created":1543626724,"updated":1543626724,"createdBy":{"id":1,"username":"user1","avatarUrl":"111d68d06e2d317b5a59c2c6c5bad808","created":0,"updated":0}},{"id":4,"text":"task #4 low prio","description":"","done":false,"doneAt":0,"dueDate":0,"reminderDates":null,"listID":1,"repeatAfter":0,"priority":1,`) }) t.Run("by priority desc", func(t *testing.T) { rec, err := testHandler.testReadAllWithUser(url.Values{"sort": []string{"prioritydesc"}}, nil) assert.NoError(t, err) - assert.Contains(t, rec.Body.String(), `[{"id":3,"text":"task #3 high prio","description":"","done":false,"doneAt":0,"dueDate":0,"reminderDates":null,"listID":1,"repeatAfter":0,"priority":100,"startDate":0,"endDate":0,"assignees":null,"labels":null,"hexColor":"","percentDone":0,"related_tasks":{},"created":1543626724,"updated":1543626724,"createdBy":{"id":1,"username":"user1","avatarUrl":"111d68d06e2d317b5a59c2c6c5bad808","created":0,"updated":0}},{"id":4,"text":"task #4 low prio","description":"","done":false,"doneAt":0,"dueDate":0,"reminderDates":null,"listID":1,"repeatAfter":0,"priority":1`) + assert.Contains(t, rec.Body.String(), `[{"id":3,"text":"task #3 high prio","description":"","done":false,"doneAt":0,"dueDate":0,"reminderDates":null,"listID":1,"repeatAfter":0,"priority":100,"startDate":0,"endDate":0,"assignees":null,"labels":null,"hexColor":"","percentDone":0,"related_tasks":{},"attachments":null,"created":1543626724,"updated":1543626724,"createdBy":{"id":1,"username":"user1","avatarUrl":"111d68d06e2d317b5a59c2c6c5bad808","created":0,"updated":0}},{"id":4,"text":"task #4 low prio","description":"","done":false,"doneAt":0,"dueDate":0,"reminderDates":null,"listID":1,"repeatAfter":0,"priority":1,`) }) t.Run("by priority asc", func(t *testing.T) { rec, err := testHandler.testReadAllWithUser(url.Values{"sort": []string{"priorityasc"}}, nil) assert.NoError(t, err) - assert.Contains(t, rec.Body.String(), `{"id":33,"text":"task #33 with percent done","description":"","done":false,"doneAt":0,"dueDate":0,"reminderDates":null,"listID":1,"repeatAfter":0,"priority":0,"startDate":0,"endDate":0,"assignees":null,"labels":null,"hexColor":"","percentDone":0.5,"related_tasks":{},"created":1543626724,"updated":1543626724,"createdBy":{"id":1,"username":"user1","avatarUrl":"111d68d06e2d317b5a59c2c6c5bad808","created":0,"updated":0}},{"id":4,"text":"task #4 low prio","description":"","done":false,"doneAt":0,"dueDate":0,"reminderDates":null,"listID":1,"repeatAfter":0,"priority":1,"startDate":0,"endDate":0,"assignees":null,"labels":null,"hexColor":"","percentDone":0,"related_tasks":{},"created":1543626724,"updated":1543626724,"createdBy":{"id":1,"username":"user1","avatarUrl":"111d68d06e2d317b5a59c2c6c5bad808","created":0,"updated":0}},{"id":3,"text":"task #3 high prio","description":"","done":false,"doneAt":0,"dueDate":0,"reminderDates":null,"listID":1,"repeatAfter":0,"priority":100,"startDate":0,"endDate":0,"assignees":null,"labels":null,"hexColor":"","percentDone":0,"related_tasks":{},"created":1543626724,"updated":1543626724,"createdBy":{"id":1,"username":"user1","avatarUrl":"111d68d06e2d317b5a59c2c6c5bad808","created":0,"updated":0}}]`) + assert.Contains(t, rec.Body.String(), `{"id":33,"text":"task #33 with percent done","description":"","done":false,"doneAt":0,"dueDate":0,"reminderDates":null,"listID":1,"repeatAfter":0,"priority":0,"startDate":0,"endDate":0,"assignees":null,"labels":null,"hexColor":"","percentDone":0.5,"related_tasks":{},"attachments":null,"created":1543626724,"updated":1543626724,"createdBy":{"id":1,"username":"user1","avatarUrl":"111d68d06e2d317b5a59c2c6c5bad808","created":0,"updated":0}},{"id":4,"text":"task #4 low prio","description":"","done":false,"doneAt":0,"dueDate":0,"reminderDates":null,"listID":1,"repeatAfter":0,"priority":1,"startDate":0,"endDate":0,"assignees":null,"labels":null,"hexColor":"","percentDone":0,"related_tasks":{},"attachments":null,"created":1543626724,"updated":1543626724,"createdBy":{"id":1,"username":"user1","avatarUrl":"111d68d06e2d317b5a59c2c6c5bad808","created":0,"updated":0}},{"id":3,"text":"task #3 high prio","description":"","done":false,"doneAt":0,"dueDate":0,"reminderDates":null,"listID":1,"repeatAfter":0,"priority":100,"startDate":0,"endDate":0,"assignees":null,"labels":null,"hexColor":"","percentDone":0,"related_tasks":{},"attachments":null,"created":1543626724,"updated":1543626724,"createdBy":{"id":1,"username":"user1","avatarUrl":"111d68d06e2d317b5a59c2c6c5bad808","created":0,"updated":0}}]`) }) // should equal duedate desc t.Run("by duedate", func(t *testing.T) { rec, err := testHandler.testReadAllWithUser(url.Values{"sort": []string{"duedate"}}, nil) assert.NoError(t, err) - assert.Contains(t, rec.Body.String(), `[{"id":5,"text":"task #5 higher due date","description":"","done":false,"doneAt":0,"dueDate":1543636724,"reminderDates":null,"listID":1,"repeatAfter":0,"priority":0,"startDate":0,"endDate":0,"assignees":null,"labels":null,"hexColor":"","percentDone":0,"related_tasks":{},"created":1543626724,"updated":1543626724,"createdBy":{"id":1,"username":"user1","avatarUrl":"111d68d06e2d317b5a59c2c6c5bad808","created":0,"updated":0}},{"id":6,"text":"task #6 lower due date"`) + assert.Contains(t, rec.Body.String(), `[{"id":5,"text":"task #5 higher due date","description":"","done":false,"doneAt":0,"dueDate":1543636724,"reminderDates":null,"listID":1,"repeatAfter":0,"priority":0,"startDate":0,"endDate":0,"assignees":null,"labels":null,"hexColor":"","percentDone":0,"related_tasks":{},"attachments":null,"created":1543626724,"updated":1543626724,"createdBy":{"id":1,"username":"user1","avatarUrl":"111d68d06e2d317b5a59c2c6c5bad808","created":0,"updated":0}},{"id":6,"text":"task #6 lower due date"`) }) t.Run("by duedate desc", func(t *testing.T) { rec, err := testHandler.testReadAllWithUser(url.Values{"sort": []string{"duedatedesc"}}, nil) assert.NoError(t, err) - assert.Contains(t, rec.Body.String(), `[{"id":5,"text":"task #5 higher due date","description":"","done":false,"doneAt":0,"dueDate":1543636724,"reminderDates":null,"listID":1,"repeatAfter":0,"priority":0,"startDate":0,"endDate":0,"assignees":null,"labels":null,"hexColor":"","percentDone":0,"related_tasks":{},"created":1543626724,"updated":1543626724,"createdBy":{"id":1,"username":"user1","avatarUrl":"111d68d06e2d317b5a59c2c6c5bad808","created":0,"updated":0}},{"id":6,"text":"task #6 lower due date"`) + assert.Contains(t, rec.Body.String(), `[{"id":5,"text":"task #5 higher due date","description":"","done":false,"doneAt":0,"dueDate":1543636724,"reminderDates":null,"listID":1,"repeatAfter":0,"priority":0,"startDate":0,"endDate":0,"assignees":null,"labels":null,"hexColor":"","percentDone":0,"related_tasks":{},"attachments":null,"created":1543626724,"updated":1543626724,"createdBy":{"id":1,"username":"user1","avatarUrl":"111d68d06e2d317b5a59c2c6c5bad808","created":0,"updated":0}},{"id":6,"text":"task #6 lower due date"`) }) t.Run("by duedate asc", func(t *testing.T) { rec, err := testHandler.testReadAllWithUser(url.Values{"sort": []string{"duedateasc"}}, nil) assert.NoError(t, err) - assert.Contains(t, rec.Body.String(), `{"id":6,"text":"task #6 lower due date","description":"","done":false,"doneAt":0,"dueDate":1543616724,"reminderDates":null,"listID":1,"repeatAfter":0,"priority":0,"startDate":0,"endDate":0,"assignees":null,"labels":null,"hexColor":"","percentDone":0,"related_tasks":{},"created":1543626724,"updated":1543626724,"createdBy":{"id":1,"username":"user1","avatarUrl":"111d68d06e2d317b5a59c2c6c5bad808","created":0,"updated":0}},{"id":5,"text":"task #5 higher due date","description":"","done":false,"doneAt":0,"dueDate":1543636724,"reminderDates":null,"listID":1,"repeatAfter":0,"priority":0,"startDate":0,"endDate":0,"assignees":null,"labels":null,"hexColor":"","percentDone":0,"related_tasks":{},"created":1543626724,"updated":1543626724,"createdBy":{"id":1,"username":"user1","avatarUrl":"111d68d06e2d317b5a59c2c6c5bad808","created":0,"updated":0}}]`) + assert.Contains(t, rec.Body.String(), `{"id":6,"text":"task #6 lower due date","description":"","done":false,"doneAt":0,"dueDate":1543616724,"reminderDates":null,"listID":1,"repeatAfter":0,"priority":0,"startDate":0,"endDate":0,"assignees":null,"labels":null,"hexColor":"","percentDone":0,"related_tasks":{},"attachments":null,"created":1543626724,"updated":1543626724,"createdBy":{"id":1,"username":"user1","avatarUrl":"111d68d06e2d317b5a59c2c6c5bad808","created":0,"updated":0}},{"id":5,"text":"task #5 higher due date","description":"","done":false,"doneAt":0,"dueDate":1543636724,"reminderDates":null,"listID":1,"repeatAfter":0,"priority":0,"startDate":0,"endDate":0,"assignees":null,"labels":null,"hexColor":"","percentDone":0,"related_tasks":{},"attachments":null,"created":1543626724,"updated":1543626724,"createdBy":{"id":1,"username":"user1","avatarUrl":"111d68d06e2d317b5a59c2c6c5bad808","created":0,"updated":0}}]`) }) t.Run("invalid parameter", func(t *testing.T) { // Invalid parameter should not sort at all diff --git a/pkg/migration/20191008194238.go b/pkg/migration/20191008194238.go new file mode 100644 index 000000000..d8802e5c0 --- /dev/null +++ b/pkg/migration/20191008194238.go @@ -0,0 +1,48 @@ +// Vikunja is a todo-list application to facilitate your life. +// Copyright 2019 Vikunja and contributors. All rights reserved. +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +package migration + +import ( + "github.com/go-xorm/xorm" + "src.techknowlogick.com/xormigrate" +) + +type file20191008194238 struct { + ID int64 `xorm:"int(11) autoincr not null unique pk" json:"id"` + Name string `xorm:"text not null" json:"name"` + Mime string `xorm:"text null" json:"mime"` + Size int64 `xorm:"int(11) not null default 0" json:"size"` + CreatedUnix int64 `xorm:"created" json:"inserted_unix"` + CreatedByID int64 `xorm:"int(11) not null" json:"-"` +} + +func (file20191008194238) TableName() string { + return "files" +} + +func init() { + migrations = append(migrations, &xormigrate.Migration{ + ID: "20191008194238", + Description: "Added files table", + Migrate: func(tx *xorm.Engine) error { + return tx.Sync2(file20191008194238{}) + }, + Rollback: func(tx *xorm.Engine) error { + return tx.DropTables(file20191008194238{}) + }, + }) +} diff --git a/pkg/migration/20191010131430.go b/pkg/migration/20191010131430.go new file mode 100644 index 000000000..d2e278210 --- /dev/null +++ b/pkg/migration/20191010131430.go @@ -0,0 +1,50 @@ +// Copyright 2019 Vikunja and contriubtors. All rights reserved. +// +// This file is part of Vikunja. +// +// Vikunja is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// Vikunja is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with Vikunja. If not, see . + +package migration + +import ( + "github.com/go-xorm/xorm" + "src.techknowlogick.com/xormigrate" +) + +type taskAttachment20191010131430 struct { + ID int64 `xorm:"int(11) autoincr not null unique pk" json:"id" param:"attachment"` + TaskID int64 `xorm:"int(11) not null" json:"task_id" param:"task"` + FileID int64 `xorm:"int(11) not null" json:"-"` + + CreatedByID int64 `xorm:"int(11) not null" json:"-"` + + Created int64 `xorm:"created"` +} + +func (taskAttachment20191010131430) TableName() string { + return "task_attachments" +} + +func init() { + migrations = append(migrations, &xormigrate.Migration{ + ID: "20191010131430", + Description: "Added task attachments table", + Migrate: func(tx *xorm.Engine) error { + return tx.Sync2(taskAttachment20191010131430{}) + }, + Rollback: func(tx *xorm.Engine) error { + return tx.DropTables(taskAttachment20191010131430{}) + }, + }) +} diff --git a/pkg/migration/migration.go b/pkg/migration/migration.go index 91a04baa1..1ef89b2b6 100644 --- a/pkg/migration/migration.go +++ b/pkg/migration/migration.go @@ -19,6 +19,7 @@ package migration import ( "code.vikunja.io/api/pkg/config" "code.vikunja.io/api/pkg/db" + "code.vikunja.io/api/pkg/files" "code.vikunja.io/api/pkg/log" "code.vikunja.io/api/pkg/models" "github.com/go-xorm/xorm" @@ -134,7 +135,8 @@ func modifyColumn(x *xorm.Engine, tableName, col, newDefinition string) error { } func initSchema(tx *xorm.Engine) error { - return tx.Sync2( - models.GetTables()..., - ) + schemeBeans := []interface{}{} + schemeBeans = append(schemeBeans, models.GetTables()...) + schemeBeans = append(schemeBeans, files.GetTables()...) + return tx.Sync2(schemeBeans...) } diff --git a/pkg/models/error.go b/pkg/models/error.go index 895c599f9..804099438 100644 --- a/pkg/models/error.go +++ b/pkg/models/error.go @@ -17,6 +17,7 @@ package models import ( + "code.vikunja.io/api/pkg/config" "code.vikunja.io/web" "fmt" "net/http" @@ -731,6 +732,62 @@ func (err ErrRelationTasksCannotBeTheSame) HTTPError() web.HTTPError { } } +// ErrTaskAttachmentDoesNotExist represents an error where the user tries to relate a task with itself +type ErrTaskAttachmentDoesNotExist struct { + TaskID int64 + AttachmentID int64 + FileID int64 +} + +// IsErrTaskAttachmentDoesNotExist checks if an error is ErrTaskAttachmentDoesNotExist. +func IsErrTaskAttachmentDoesNotExist(err error) bool { + _, ok := err.(ErrTaskAttachmentDoesNotExist) + return ok +} + +func (err ErrTaskAttachmentDoesNotExist) Error() string { + return fmt.Sprintf("Task attachment does not exist [TaskID: %d, AttachmentID: %d, FileID: %d]", err.TaskID, err.AttachmentID, err.FileID) +} + +// ErrCodeTaskAttachmentDoesNotExist holds the unique world-error code of this error +const ErrCodeTaskAttachmentDoesNotExist = 4011 + +// HTTPError holds the http error description +func (err ErrTaskAttachmentDoesNotExist) HTTPError() web.HTTPError { + return web.HTTPError{ + HTTPCode: http.StatusNotFound, + Code: ErrCodeTaskAttachmentDoesNotExist, + Message: "This task attachment does not exist.", + } +} + +// ErrTaskAttachmentIsTooLarge represents an error where the user tries to relate a task with itself +type ErrTaskAttachmentIsTooLarge struct { + Size int64 +} + +// IsErrTaskAttachmentIsTooLarge checks if an error is ErrTaskAttachmentIsTooLarge. +func IsErrTaskAttachmentIsTooLarge(err error) bool { + _, ok := err.(ErrTaskAttachmentIsTooLarge) + return ok +} + +func (err ErrTaskAttachmentIsTooLarge) Error() string { + return fmt.Sprintf("Task attachment is too large [Size: %d]", err.Size) +} + +// ErrCodeTaskAttachmentIsTooLarge holds the unique world-error code of this error +const ErrCodeTaskAttachmentIsTooLarge = 4012 + +// HTTPError holds the http error description +func (err ErrTaskAttachmentIsTooLarge) HTTPError() web.HTTPError { + return web.HTTPError{ + HTTPCode: http.StatusBadRequest, + Code: ErrCodeTaskAttachmentIsTooLarge, + Message: fmt.Sprintf("The task attachment exceeds the configured file size of %d bytes, filesize was %d", config.FilesMaxSize.GetInt64(), err.Size), + } +} + // ================= // Namespace errors // ================= diff --git a/pkg/models/fixtures/files.yml b/pkg/models/fixtures/files.yml new file mode 120000 index 000000000..aa8894413 --- /dev/null +++ b/pkg/models/fixtures/files.yml @@ -0,0 +1 @@ +../../files/fixtures/files.yml \ No newline at end of file diff --git a/pkg/models/fixtures/task_attachments.yml b/pkg/models/fixtures/task_attachments.yml new file mode 100644 index 000000000..0d9dd0f89 --- /dev/null +++ b/pkg/models/fixtures/task_attachments.yml @@ -0,0 +1,11 @@ +- id: 1 + task_id: 1 + file_id: 1 + created_by_id: 1 + created: 0 +# The file for this attachment does not exist +- id: 2 + task_id: 1 + file_id: 9999 + created_by_id: 1 + created: 0 diff --git a/pkg/models/main_test.go b/pkg/models/main_test.go index bd8ebd649..46e5e9381 100644 --- a/pkg/models/main_test.go +++ b/pkg/models/main_test.go @@ -18,10 +18,22 @@ package models import ( "code.vikunja.io/api/pkg/config" + "code.vikunja.io/api/pkg/files" + "os" "testing" ) func TestMain(m *testing.M) { - config.InitConfig() - MainTest(m, config.ServiceRootpath.GetString()) + + // Set default config + config.InitDefaultConfig() + // We need to set the root path even if we're not using the config, otherwise fixtures are not loaded correctly + config.ServiceRootpath.Set(os.Getenv("VIKUNJA_SERVICE_ROOTPATH")) + + // Some tests use the file engine, so we'll need to initialize that + files.InitTests() + + SetupTests(config.ServiceRootpath.GetString()) + + os.Exit(m.Run()) } diff --git a/pkg/models/models.go b/pkg/models/models.go index 37088c081..36d9387f6 100644 --- a/pkg/models/models.go +++ b/pkg/models/models.go @@ -20,10 +20,9 @@ import ( "code.vikunja.io/api/pkg/config" "code.vikunja.io/api/pkg/db" "code.vikunja.io/api/pkg/log" - "encoding/gob" _ "github.com/go-sql-driver/mysql" // Because. "github.com/go-xorm/xorm" - xrc "github.com/go-xorm/xorm-redis-cache" + _ "github.com/mattn/go-sqlite3" // Because. ) @@ -50,6 +49,7 @@ func GetTables() []interface{} { &TaskReminder{}, &LinkSharing{}, &TaskRelation{}, + &TaskAttachment{}, } } @@ -62,19 +62,8 @@ func SetEngine() (err error) { } // Cache - // We have to initialize the cache here to avoid import cycles - if config.CacheEnabled.GetBool() { - switch config.CacheType.GetString() { - case "memory": - cacher := xorm.NewLRUCacher(xorm.NewMemoryStore(), config.CacheMaxElementSize.GetInt()) - x.SetDefaultCacher(cacher) - case "redis": - cacher := xrc.NewRedisCacher(config.RedisEnabled.GetString(), config.RedisPassword.GetString(), xrc.DEFAULT_EXPIRATION, x.Logger()) - x.SetDefaultCacher(cacher) - gob.Register(GetTables()) - default: - log.Info("Did not find a valid cache type. Caching disabled. Please refer to the docs for poosible cache types.") - } + if config.CacheEnabled.GetBool() && config.CacheType.GetString() == "redis" { + db.RegisterTableStructsForCache(GetTables()) } return nil diff --git a/pkg/models/task_attachment.go b/pkg/models/task_attachment.go new file mode 100644 index 000000000..e285e76a7 --- /dev/null +++ b/pkg/models/task_attachment.go @@ -0,0 +1,186 @@ +// Vikunja is a todo-list application to facilitate your life. +// Copyright 2019 Vikunja and contributors. All rights reserved. +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +package models + +import ( + "code.vikunja.io/api/pkg/files" + "code.vikunja.io/web" + "io" + "time" +) + +// TaskAttachment is the definition of a task attachment +type TaskAttachment struct { + ID int64 `xorm:"int(11) autoincr not null unique pk" json:"id" param:"attachment"` + TaskID int64 `xorm:"int(11) not null" json:"task_id" param:"task"` + FileID int64 `xorm:"int(11) not null" json:"-"` + + CreatedByID int64 `xorm:"int(11) not null" json:"-"` + CreatedBy *User `xorm:"-" json:"created_by"` + + File *files.File `xorm:"-" json:"file"` + + Created int64 `xorm:"created" json:"created"` + + web.CRUDable `xorm:"-" json:"-"` + web.Rights `xorm:"-" json:"-"` +} + +// TableName returns the table name for task attachments +func (TaskAttachment) TableName() string { + return "task_attachments" +} + +// NewAttachment creates a new task attachment +// Note: I'm not sure if only accepting an io.ReadCloser and not an afero.File or os.File instead is a good way of doing things. +func (ta *TaskAttachment) NewAttachment(f io.ReadCloser, realname string, realsize int64, a web.Auth) error { + + // Store the file + file, err := files.Create(f, realname, realsize, a) + if err != nil { + if files.IsErrFileIsTooLarge(err) { + return ErrTaskAttachmentIsTooLarge{Size: realsize} + } + return err + } + ta.File = file + + // Add an entry to the db + ta.FileID = file.ID + ta.CreatedByID = a.GetID() + _, err = x.Insert(ta) + if err != nil { + // remove the uploaded file if adding it to the db fails + if err2 := file.Delete(); err2 != nil { + return err2 + } + return err + } + + return nil +} + +// ReadOne returns a task attachment +func (ta *TaskAttachment) ReadOne() (err error) { + exists, err := x.Where("id = ?", ta.ID).Get(ta) + if err != nil { + return + } + if !exists { + return ErrTaskAttachmentDoesNotExist{ + TaskID: ta.TaskID, + AttachmentID: ta.ID, + } + } + + // Get the file + ta.File = &files.File{ID: ta.FileID} + err = ta.File.LoadFileMetaByID() + return +} + +// ReadAll returns a list with all attachments +// @Summary Get all attachments for one task. +// @Description Get all task attachments for one task. +// @tags task +// @Accept json +// @Produce json +// @Param id path int true "Task ID" +// @Security JWTKeyAuth +// @Success 200 {array} models.TaskAttachment "All attachments for this task" +// @Failure 403 {object} models.Message "No access to this task." +// @Failure 404 {object} models.Message "The task does not exist." +// @Failure 500 {object} models.Message "Internal error" +// @Router /tasks/{id}/attachments [get] +func (ta *TaskAttachment) ReadAll(s string, a web.Auth, page int) (interface{}, error) { + attachments := []*TaskAttachment{} + + err := x. + Limit(getLimitFromPageIndex(page)). + Where("task_id = ?", ta.TaskID). + Find(&attachments) + if err != nil { + return nil, err + } + + fileIDs := make([]int64, 0, len(attachments)) + userIDs := make([]int64, 0, len(attachments)) + for _, r := range attachments { + fileIDs = append(fileIDs, r.FileID) + userIDs = append(userIDs, r.CreatedByID) + } + + fs := make(map[int64]*files.File) + err = x.In("id", fileIDs).Find(&fs) + if err != nil { + return nil, err + } + + us := make(map[int64]*User) + err = x.In("id", userIDs).Find(&us) + if err != nil { + return nil, err + } + + for _, r := range attachments { + // If the actual file does not exist, don't try to load it as that would fail with nil panic + if _, exists := fs[r.FileID]; !exists { + continue + } + r.File = fs[r.FileID] + r.File.Created = time.Unix(r.File.CreatedUnix, 0) + r.CreatedBy = us[r.CreatedByID] + } + + return attachments, err +} + +// Delete removes an attachment +// @Summary Delete an attachment +// @Description Delete an attachment. +// @tags task +// @Accept json +// @Produce json +// @Param id path int true "Task ID" +// @Param attachmentID path int true "Attachment ID" +// @Security JWTKeyAuth +// @Success 200 {object} models.Message "The attachment was deleted successfully." +// @Failure 403 {object} models.Message "No access to this task." +// @Failure 404 {object} models.Message "The task does not exist." +// @Failure 500 {object} models.Message "Internal error" +// @Router /tasks/{id}/attachments/{attachmentID} [delete] +func (ta *TaskAttachment) Delete() error { + // Load the attachment + err := ta.ReadOne() + if err != nil && !files.IsErrFileDoesNotExist(err) { + return err + } + + // Delete it + _, err = x.Where("task_id = ? AND id = ?", ta.TaskID, ta.ID).Delete(ta) + if err != nil { + return err + } + + // Delete the underlying file + err = ta.File.Delete() + // If the file does not exist, we don't want to error out + if err != nil && files.IsErrFileDoesNotExist(err) { + return nil + } + return err +} diff --git a/pkg/models/task_attachment_rights.go b/pkg/models/task_attachment_rights.go new file mode 100644 index 000000000..161538110 --- /dev/null +++ b/pkg/models/task_attachment_rights.go @@ -0,0 +1,46 @@ +// Vikunja is a todo-list application to facilitate your life. +// Copyright 2019 Vikunja and contributors. All rights reserved. +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +package models + +import "code.vikunja.io/web" + +// CanRead checks if the user can see an attachment +func (ta *TaskAttachment) CanRead(a web.Auth) (bool, error) { + t, err := GetTaskByIDSimple(ta.TaskID) + if err != nil { + return false, err + } + return t.CanRead(a) +} + +// CanDelete checks if the user can delete an attachment +func (ta *TaskAttachment) CanDelete(a web.Auth) (bool, error) { + t, err := GetTaskByIDSimple(ta.TaskID) + if err != nil { + return false, err + } + return t.CanWrite(a) +} + +// CanCreate checks if the user can create an attachment +func (ta *TaskAttachment) CanCreate(a web.Auth) (bool, error) { + t, err := GetTaskByIDSimple(ta.TaskID) + if err != nil { + return false, err + } + return t.CanCreate(a) +} diff --git a/pkg/models/task_attachment_test.go b/pkg/models/task_attachment_test.go new file mode 100644 index 000000000..d81b58467 --- /dev/null +++ b/pkg/models/task_attachment_test.go @@ -0,0 +1,152 @@ +// Copyright 2019 Vikunja and contriubtors. All rights reserved. +// +// This file is part of Vikunja. +// +// Vikunja is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// Vikunja is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with Vikunja. If not, see . + +package models + +import ( + "code.vikunja.io/api/pkg/config" + "code.vikunja.io/api/pkg/files" + "github.com/stretchr/testify/assert" + "io" + "os" + "strconv" + "testing" +) + +func TestTaskAttachment_ReadOne(t *testing.T) { + t.Run("Normal File", func(t *testing.T) { + files.InitTestFileFixtures(t) + ta := &TaskAttachment{ + ID: 1, + } + err := ta.ReadOne() + assert.NoError(t, err) + assert.NotNil(t, ta.File) + assert.True(t, ta.File.ID == ta.FileID && ta.FileID != 0) + + // Load the actual attachment file and check its content + err = ta.File.LoadFileByID() + assert.NoError(t, err) + assert.Equal(t, config.FilesBasePath.GetString()+"/1", ta.File.File.Name()) + content := make([]byte, 9) + read, err := ta.File.File.Read(content) + assert.NoError(t, err) + assert.Equal(t, 9, read) + assert.Equal(t, []byte("testfile1"), content) + }) + t.Run("Nonexisting Attachment", func(t *testing.T) { + files.InitTestFileFixtures(t) + ta := &TaskAttachment{ + ID: 9999, + } + err := ta.ReadOne() + assert.Error(t, err) + assert.True(t, IsErrTaskAttachmentDoesNotExist(err)) + }) + t.Run("Existing Attachment, Nonexisting File", func(t *testing.T) { + files.InitTestFileFixtures(t) + ta := &TaskAttachment{ + ID: 2, + } + err := ta.ReadOne() + assert.Error(t, err) + assert.EqualError(t, err, "file 9999 does not exist") + }) +} + +type testfile struct { + content []byte + done bool +} + +func (t *testfile) Read(p []byte) (n int, err error) { + if t.done { + return 0, io.EOF + } + copy(p, t.content) + t.done = true + return len(p), nil +} + +func (t *testfile) Close() error { + return nil +} + +func TestTaskAttachment_NewAttachment(t *testing.T) { + files.InitTestFileFixtures(t) + // Assert the file is being stored correctly + ta := TaskAttachment{ + TaskID: 1, + } + tf := &testfile{ + content: []byte("testingstuff"), + } + testuser := &User{ID: 1} + + err := ta.NewAttachment(tf, "testfile", 100, testuser) + assert.NoError(t, err) + assert.NotEqual(t, 0, ta.FileID) + _, err = files.FileStat("files/" + strconv.FormatInt(ta.FileID, 10)) + assert.NoError(t, err) + assert.False(t, os.IsNotExist(err)) + assert.Equal(t, testuser.ID, ta.CreatedByID) + + // Check the file was inserted correctly + ta.File = &files.File{ID: ta.FileID} + err = ta.File.LoadFileMetaByID() + assert.NoError(t, err) + assert.Equal(t, testuser.ID, ta.File.CreatedByID) + assert.Equal(t, "testfile", ta.File.Name) + assert.Equal(t, int64(100), ta.File.Size) + + // Extra test for max size test +} + +func TestTaskAttachment_ReadAll(t *testing.T) { + files.InitTestFileFixtures(t) + ta := &TaskAttachment{TaskID: 1} + as, err := ta.ReadAll("", &User{ID: 1}, 0) + attachments, _ := as.([]*TaskAttachment) + assert.NoError(t, err) + assert.Len(t, attachments, 3) + assert.Equal(t, "test", attachments[0].File.Name) +} + +func TestTaskAttachment_Delete(t *testing.T) { + files.InitTestFileFixtures(t) + t.Run("Normal", func(t *testing.T) { + ta := &TaskAttachment{ID: 1} + err := ta.Delete() + assert.NoError(t, err) + // Check if the file itself was deleted + _, err = files.FileStat("/1") // The new file has the id 2 since it's the second attachment + assert.True(t, os.IsNotExist(err)) + }) + t.Run("Nonexisting", func(t *testing.T) { + files.InitTestFileFixtures(t) + ta := &TaskAttachment{ID: 9999} + err := ta.Delete() + assert.Error(t, err) + assert.True(t, IsErrTaskAttachmentDoesNotExist(err)) + }) + t.Run("Existing attachment, nonexisting file", func(t *testing.T) { + files.InitTestFileFixtures(t) + ta := &TaskAttachment{ID: 2} + err := ta.Delete() + assert.NoError(t, err) + }) +} diff --git a/pkg/models/task_readall_test.go b/pkg/models/task_readall_test.go index 51f57d0eb..e87ba29e3 100644 --- a/pkg/models/task_readall_test.go +++ b/pkg/models/task_readall_test.go @@ -7,6 +7,8 @@ package models import ( + "code.vikunja.io/api/pkg/db" + "code.vikunja.io/api/pkg/files" "github.com/stretchr/testify/assert" "gopkg.in/d4l3k/messagediff.v1" "sort" @@ -67,6 +69,29 @@ func sortTasksForTesting(by SortBy) (tasks []*Task) { }, }, }, + Attachments: []*TaskAttachment{ + { + ID: 1, + TaskID: 1, + FileID: 1, + CreatedByID: 1, + CreatedBy: user1, + File: &files.File{ + ID: 1, + Name: "test", + Size: 100, + CreatedUnix: 1570998791, + CreatedByID: 1, + }, + }, + { + ID: 2, + TaskID: 1, + FileID: 9999, + CreatedByID: 1, + CreatedBy: user1, + }, + }, Created: 1543626724, Updated: 1543626724, }, @@ -434,7 +459,7 @@ func sortTasksForTesting(by SortBy) (tasks []*Task) { } func TestTask_ReadAll(t *testing.T) { - assert.NoError(t, LoadFixtures()) + assert.NoError(t, db.LoadFixtures()) // Dummy users user1 := &User{ diff --git a/pkg/models/tasks.go b/pkg/models/tasks.go index 491363ebb..7584df8ab 100644 --- a/pkg/models/tasks.go +++ b/pkg/models/tasks.go @@ -17,6 +17,7 @@ package models import ( + "code.vikunja.io/api/pkg/files" "code.vikunja.io/api/pkg/metrics" "code.vikunja.io/api/pkg/utils" "code.vikunja.io/web" @@ -71,6 +72,9 @@ type Task struct { // All related tasks, grouped by their relation kind RelatedTasks RelatedTaskMap `xorm:"-" json:"related_tasks"` + // All attachments this task has + Attachments []*TaskAttachment `xorm:"-" json:"attachments"` + // A unix timestamp when this task was created. You cannot change this value. Created int64 `xorm:"created not null" json:"created"` // A unix timestamp when this task was last updated. You cannot change this value. @@ -409,6 +413,28 @@ func addMoreInfoToTasks(taskMap map[int64]*Task) (tasks []*Task, err error) { } } + // Get task attachments + attachments := []*TaskAttachment{} + err = x. + In("task_id", taskIDs). + Find(&attachments) + if err != nil { + return nil, err + } + + fileIDs := []int64{} + for _, a := range attachments { + userIDs = append(userIDs, a.CreatedByID) + fileIDs = append(fileIDs, a.FileID) + } + + // Get all files + fs := make(map[int64]*files.File) + err = x.In("id", fileIDs).Find(&fs) + if err != nil { + return + } + // Get all users of a task // aka the ones who created a task users := make(map[int64]*User) @@ -422,6 +448,13 @@ func addMoreInfoToTasks(taskMap map[int64]*Task) (tasks []*Task, err error) { u.Email = "" } + // Put the users and files in task attachments + for _, a := range attachments { + a.CreatedBy = users[a.CreatedByID] + a.File = fs[a.FileID] + taskMap[a.TaskID].Attachments = append(taskMap[a.TaskID].Attachments, a) + } + // Get all reminders and put them in a map to have it easier later reminders := []*TaskReminder{} err = x.Table("task_reminders").In("task_id", taskIDs).Find(&reminders) diff --git a/pkg/models/test_fixtures.go b/pkg/models/test_fixtures.go deleted file mode 100644 index ba56af6f3..000000000 --- a/pkg/models/test_fixtures.go +++ /dev/null @@ -1,35 +0,0 @@ -// Vikunja is a todo-list application to facilitate your life. -// Copyright 2018 Vikunja and contributors. All rights reserved. -// -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. -// -// You should have received a copy of the GNU General Public License -// along with this program. If not, see . - -package models - -import ( - "gopkg.in/testfixtures.v2" -) - -var fixtures *testfixtures.Context - -// InitFixtures initialize test fixtures for a test database -func InitFixtures(helper testfixtures.Helper, dir string) (err error) { - testfixtures.SkipDatabaseNameCheck(true) - fixtures, err = testfixtures.NewFolder(x.DB().DB, helper, dir) - return err -} - -// LoadFixtures load fixtures for a test database -func LoadFixtures() error { - return fixtures.Load() -} diff --git a/pkg/models/unit_tests.go b/pkg/models/unit_tests.go index dc41ab8a5..677bbd49f 100644 --- a/pkg/models/unit_tests.go +++ b/pkg/models/unit_tests.go @@ -19,23 +19,16 @@ package models import ( "code.vikunja.io/api/pkg/config" _ "code.vikunja.io/api/pkg/config" // To trigger its init() which initializes the config + "code.vikunja.io/api/pkg/db" "code.vikunja.io/api/pkg/log" "code.vikunja.io/api/pkg/mail" "fmt" - "github.com/go-xorm/core" "github.com/go-xorm/xorm" "gopkg.in/testfixtures.v2" "os" "path/filepath" - "testing" ) -// MainTest creates the test engine -func MainTest(m *testing.M, pathToRoot string) { - SetupTests(pathToRoot) - os.Exit(m.Run()) -} - // SetupTests takes care of seting up the db, fixtures etc. // This is an extra function to be able to call the fixtures setup from the integration tests. func SetupTests(pathToRoot string) { @@ -49,7 +42,7 @@ func SetupTests(pathToRoot string) { mail.StartMailDaemon() // Create test database - if err = LoadFixtures(); err != nil { + if err = db.LoadFixtures(); err != nil { log.Fatalf("Error preparing test database: %v", err.Error()) } } @@ -59,13 +52,12 @@ func createTestEngine(fixturesDir string) error { var fixturesHelper testfixtures.Helper = &testfixtures.SQLite{} // If set, use the config we provided instead of normal if os.Getenv("VIKUNJA_TESTS_USE_CONFIG") == "1" { - config.InitConfig() - err = SetEngine() + x, err = db.CreateTestEngine() if err != nil { - return err + return fmt.Errorf("error getting test engine: %v", err) } - err = x.Sync2(GetTables()...) + err = initSchema(x) if err != nil { return err } @@ -74,23 +66,21 @@ func createTestEngine(fixturesDir string) error { fixturesHelper = &testfixtures.MySQL{} } } else { - x, err = xorm.NewEngine("sqlite3", "file::memory:?cache=shared") + x, err = db.CreateTestEngine() if err != nil { - return err + return fmt.Errorf("error getting test engine: %v", err) } - x.SetMapper(core.GonicMapper{}) - // Sync dat shit - if err := x.Sync2(GetTables()...); err != nil { + err = initSchema(x) + if err != nil { return fmt.Errorf("sync database struct error: %v", err) } - - // Show SQL-Queries if necessary - if os.Getenv("UNIT_TESTS_VERBOSE") == "1" { - x.ShowSQL(true) - } } - return InitFixtures(fixturesHelper, fixturesDir) + return db.InitFixtures(fixturesHelper, fixturesDir) +} + +func initSchema(tx *xorm.Engine) error { + return tx.Sync2(GetTables()...) } diff --git a/pkg/models/users_list_test.go b/pkg/models/users_list_test.go index 62d096218..2512088d9 100644 --- a/pkg/models/users_list_test.go +++ b/pkg/models/users_list_test.go @@ -1,6 +1,7 @@ package models import ( + "code.vikunja.io/api/pkg/db" "github.com/stretchr/testify/assert" "gopkg.in/d4l3k/messagediff.v1" "testing" @@ -8,7 +9,7 @@ import ( func TestListUsersFromList(t *testing.T) { - err := LoadFixtures() + err := db.LoadFixtures() assert.NoError(t, err) testuser1 := &User{ diff --git a/pkg/routes/api/v1/info.go b/pkg/routes/api/v1/info.go index ee22985f5..ed4380919 100644 --- a/pkg/routes/api/v1/info.go +++ b/pkg/routes/api/v1/info.go @@ -28,6 +28,7 @@ type vikunjaInfos struct { FrontendURL string `json:"frontend_url"` Motd string `json:"motd"` LinkSharingEnabled bool `json:"link_sharing_enabled"` + MaxFileSize int64 `json:"max_file_size"` } // Info is the handler to get infos about this vikunja instance @@ -43,5 +44,6 @@ func Info(c echo.Context) error { FrontendURL: config.ServiceFrontendurl.GetString(), Motd: config.ServiceMotd.GetString(), LinkSharingEnabled: config.ServiceEnableLinkSharing.GetBool(), + MaxFileSize: config.FilesMaxSize.GetInt64(), }) } diff --git a/pkg/routes/api/v1/list_by_namespace.go b/pkg/routes/api/v1/list_by_namespace.go index 2b671ac6d..78dbc70ef 100644 --- a/pkg/routes/api/v1/list_by_namespace.go +++ b/pkg/routes/api/v1/list_by_namespace.go @@ -77,7 +77,7 @@ func getNamespace(c echo.Context) (namespace *models.Namespace, err error) { if err != nil { return } - namespace.ID = namespaceID + namespace = &models.Namespace{ID: namespaceID} canRead, err := namespace.CanRead(user) if err != nil { return namespace, err diff --git a/pkg/routes/api/v1/task_attachment.go b/pkg/routes/api/v1/task_attachment.go new file mode 100644 index 000000000..81ac7b34c --- /dev/null +++ b/pkg/routes/api/v1/task_attachment.go @@ -0,0 +1,143 @@ +// Vikunja is a todo-list application to facilitate your life. +// Copyright 2019 Vikunja and contributors. All rights reserved. +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +package v1 + +import ( + "code.vikunja.io/api/pkg/models" + "code.vikunja.io/web/handler" + "github.com/labstack/echo/v4" + "net/http" +) + +// UploadTaskAttachment handles everything needed for the upload of a task attachment +// @Summary Upload a task attachment +// @Description Upload a task attachment. You can pass multiple files with the files form param. +// @tags task +// @Accept mpfd +// @Produce json +// @Param id path int true "Task ID" +// @Param files formData string true "The file, as multipart form file. You can pass multiple." +// @Security JWTKeyAuth +// @Success 200 {object} models.Message "Attachments were uploaded successfully." +// @Failure 403 {object} models.Message "No access to the task." +// @Failure 404 {object} models.Message "The task does not exist." +// @Failure 500 {object} models.Message "Internal error" +// @Router /tasks/{id}/attachments [put] +func UploadTaskAttachment(c echo.Context) error { + + var taskAttachment models.TaskAttachment + if err := c.Bind(&taskAttachment); err != nil { + return echo.NewHTTPError(http.StatusBadRequest, "No task ID provided") + } + + // Rights check + user, err := models.GetCurrentUser(c) + if err != nil { + return handler.HandleHTTPError(err, c) + } + can, err := taskAttachment.CanCreate(user) + if err != nil { + return handler.HandleHTTPError(err, c) + } + if !can { + return echo.ErrForbidden + } + + // Multipart form + form, err := c.MultipartForm() + if err != nil { + return handler.HandleHTTPError(err, c) + } + + type result struct { + Errors []*echo.HTTPError `json:"errors"` + Success []*models.TaskAttachment `json:"success"` + } + r := &result{} + fileHeaders := form.File["files"] + for _, file := range fileHeaders { + // We create a new attachment object here to have a clean start + ta := &models.TaskAttachment{ + TaskID: taskAttachment.TaskID, + } + + f, err := file.Open() + if err != nil { + r.Errors = append(r.Errors, handler.HandleHTTPError(err, c)) + continue + } + defer f.Close() + + err = ta.NewAttachment(f, file.Filename, file.Size, user) + if err != nil { + r.Errors = append(r.Errors, handler.HandleHTTPError(err, c)) + continue + } + r.Success = append(r.Success, ta) + } + + return c.JSON(http.StatusOK, r) +} + +// GetTaskAttachment returns a task attachment to download for the user +// @Summary Get one attachment. +// @Description Get one attachment for download. **Returns json on error.** +// @tags task +// @Produce octet-stream +// @Param id path int true "Task ID" +// @Param attachmentID path int true "Attachment ID" +// @Security JWTKeyAuth +// @Success 200 {} string "The attachment file." +// @Failure 403 {object} models.Message "No access to this task." +// @Failure 404 {object} models.Message "The task does not exist." +// @Failure 500 {object} models.Message "Internal error" +// @Router /tasks/{id}/attachments/{attachmentID} [get] +func GetTaskAttachment(c echo.Context) error { + + var taskAttachment models.TaskAttachment + if err := c.Bind(&taskAttachment); err != nil { + return echo.NewHTTPError(http.StatusBadRequest, "No task ID provided") + } + + // Rights check + user, err := models.GetCurrentUser(c) + if err != nil { + return handler.HandleHTTPError(err, c) + } + can, err := taskAttachment.CanRead(user) + if err != nil { + return handler.HandleHTTPError(err, c) + } + if !can { + return echo.ErrForbidden + } + + // Get the attachment incl file + err = taskAttachment.ReadOne() + if err != nil { + return handler.HandleHTTPError(err, c) + } + + // Open an send the file to the client + err = taskAttachment.File.LoadFileByID() + if err != nil { + return handler.HandleHTTPError(err, c) + } + + http.ServeContent(c.Response(), c.Request(), taskAttachment.File.Name, taskAttachment.File.Created, taskAttachment.File.File) + return nil +} diff --git a/pkg/routes/routes.go b/pkg/routes/routes.go index 30f40f224..fa426ac33 100644 --- a/pkg/routes/routes.go +++ b/pkg/routes/routes.go @@ -17,7 +17,7 @@ // @title Vikunja API // @description This is the documentation for the [Vikunja](http://vikunja.io) API. Vikunja is a cross-plattform Todo-application with a lot of features, such as sharing lists with users or teams. // @description # Authorization -// @description **JWT-Auth:** Main authorization method, used for most of the requests. Needs ` + "`" + `Authorization: Bearer ` + "`" + `-header to authenticate successfully. +// @description **JWT-Auth:** Main authorization method, used for most of the requests. Needs `Authorization: Bearer `-header to authenticate successfully. // @description // @description **BasicAuth:** Only used when requesting tasks via caldav. // @description @@ -266,6 +266,16 @@ func registerAPIRoutes(a *echo.Group) { a.PUT("/tasks/:task/relations", taskRelationHandler.CreateWeb) a.DELETE("/tasks/:task/relations", taskRelationHandler.DeleteWeb) + taskAttachmentHandler := &handler.WebHandler{ + EmptyStruct: func() handler.CObject { + return &models.TaskAttachment{} + }, + } + a.GET("/tasks/:task/attachments", taskAttachmentHandler.ReadAllWeb) + a.DELETE("/tasks/:task/attachments/:attachment", taskAttachmentHandler.DeleteWeb) + a.PUT("/tasks/:task/attachments", apiv1.UploadTaskAttachment) + a.GET("/tasks/:task/attachments/:attachment", apiv1.GetTaskAttachment) + labelHandler := &handler.WebHandler{ EmptyStruct: func() handler.CObject { return &models.Label{} diff --git a/pkg/swagger/docs.go b/pkg/swagger/docs.go index 9da8dd62a..e3a043696 100644 --- a/pkg/swagger/docs.go +++ b/pkg/swagger/docs.go @@ -1,21 +1,24 @@ // GENERATED BY THE COMMAND ABOVE; DO NOT EDIT // This file was generated by swaggo/swag at -// 2019-09-25 20:06:34.002904022 +0200 CEST m=+0.196783505 +// 2019-10-14 22:46:05.9838707 +0200 CEST m=+0.157234506 package swagger import ( "bytes" + "encoding/json" + "strings" "github.com/alecthomas/template" "github.com/swaggo/swag" ) var doc = `{ + "schemes": {{ marshal .Schemes }}, "swagger": "2.0", "info": { - "description": "This is the documentation for the [Vikunja](http://vikunja.io) API. Vikunja is a cross-plattform Todo-application with a lot of features, such as sharing lists with users or teams. \u003c!-- ReDoc-Inject: \u003csecurity-definitions\u003e --\u003e\n# Authorization\n**JWT-Auth:** Main authorization method, used for most of the requests. Needs ` + "`" + `Authorization: Bearer \u003cjwt-token\u003e` + "`" + `-header to authenticate successfully.\n\n**BasicAuth:** Only used when requesting tasks via caldav.\n\u003c!-- ReDoc-Inject: \u003csecurity-definitions\u003e --\u003e", - "title": "Vikunja API", + "description": "{{.Description}}", + "title": "{{.Title}}", "contact": { "name": "General Vikunja contact", "url": "http://vikunja.io/en/contact/", @@ -28,7 +31,7 @@ var doc = `{ "version": "{{.Version}}" }, "host": "{{.Host}}", - "basePath": "/api/v1", + "basePath": "{{.BasePath}}", "paths": { "/info": { "get": { @@ -44,7 +47,6 @@ var doc = `{ "200": { "description": "OK", "schema": { - "type": "object", "$ref": "#/definitions/v1.vikunjaInfos" } } @@ -96,7 +98,6 @@ var doc = `{ "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -135,21 +136,18 @@ var doc = `{ "200": { "description": "The created label object.", "schema": { - "type": "object", "$ref": "#/definitions/models.Label" } }, "400": { "description": "Invalid label object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -187,28 +185,24 @@ var doc = `{ "200": { "description": "The label", "schema": { - "type": "object", "$ref": "#/definitions/models.Label" } }, "403": { "description": "The user does not have access to the label", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "404": { "description": "Label not found", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -254,35 +248,30 @@ var doc = `{ "200": { "description": "The created label object.", "schema": { - "type": "object", "$ref": "#/definitions/models.Label" } }, "400": { "description": "Invalid label object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "403": { "description": "Not allowed to update the label.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "404": { "description": "Label not found.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -318,28 +307,24 @@ var doc = `{ "200": { "description": "The label was successfully deleted.", "schema": { - "type": "object", "$ref": "#/definitions/models.Label" } }, "403": { "description": "Not allowed to delete the label.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "404": { "description": "Label not found.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -391,14 +376,12 @@ var doc = `{ "403": { "description": "The user does not have access to the list", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -436,21 +419,18 @@ var doc = `{ "200": { "description": "The list", "schema": { - "type": "object", "$ref": "#/definitions/models.List" } }, "403": { "description": "The user does not have access to the list", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -496,28 +476,24 @@ var doc = `{ "200": { "description": "The created task object.", "schema": { - "type": "object", "$ref": "#/definitions/models.Task" } }, "400": { "description": "Invalid task object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "403": { "description": "The user does not have access to the list", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -563,28 +539,24 @@ var doc = `{ "200": { "description": "The updated list.", "schema": { - "type": "object", "$ref": "#/definitions/models.List" } }, "400": { "description": "Invalid list object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "403": { "description": "The user does not have access to the list", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -617,28 +589,24 @@ var doc = `{ "200": { "description": "The list was successfully deleted.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "400": { "description": "Invalid list object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "403": { "description": "The user does not have access to the list", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -691,21 +659,18 @@ var doc = `{ "400": { "description": "Something's invalid.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "401": { "description": "The user does not have the right to see the list.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal server error.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -764,14 +729,12 @@ var doc = `{ "403": { "description": "No right to see the list.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -817,35 +780,30 @@ var doc = `{ "200": { "description": "The created team\u003c-\u003elist relation.", "schema": { - "type": "object", "$ref": "#/definitions/models.TeamList" } }, "400": { "description": "Invalid team list object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "403": { "description": "The user does not have access to the list", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "404": { "description": "The team does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -904,14 +862,12 @@ var doc = `{ "403": { "description": "No right to see the list.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -957,35 +913,30 @@ var doc = `{ "200": { "description": "The created user\u003c-\u003elist relation.", "schema": { - "type": "object", "$ref": "#/definitions/models.ListUser" } }, "400": { "description": "Invalid user list object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "403": { "description": "The user does not have access to the list", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "404": { "description": "The user does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -1040,28 +991,24 @@ var doc = `{ "200": { "description": "The updated team \u003c-\u003e list relation.", "schema": { - "type": "object", "$ref": "#/definitions/models.TeamList" } }, "403": { "description": "The user does not have admin-access to the list", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "404": { "description": "Team or list does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -1101,28 +1048,24 @@ var doc = `{ "200": { "description": "The team was successfully deleted.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "403": { "description": "The user does not have access to the list", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "404": { "description": "Team or list does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -1177,28 +1120,24 @@ var doc = `{ "200": { "description": "The updated user \u003c-\u003e list relation.", "schema": { - "type": "object", "$ref": "#/definitions/models.ListUser" } }, "403": { "description": "The user does not have admin-access to the list", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "404": { "description": "User or list does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -1238,28 +1177,24 @@ var doc = `{ "200": { "description": "The user was successfully removed from the list.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "403": { "description": "The user does not have access to the list", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "404": { "description": "user or list does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -1318,7 +1253,6 @@ var doc = `{ "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -1364,35 +1298,30 @@ var doc = `{ "200": { "description": "The created link share object.", "schema": { - "type": "object", "$ref": "#/definitions/models.LinkSharing" } }, "400": { "description": "Invalid link share object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "403": { "description": "Not allowed to add the list share.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "404": { "description": "The list does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -1437,28 +1366,24 @@ var doc = `{ "200": { "description": "The share links", "schema": { - "type": "object", "$ref": "#/definitions/models.LinkSharing" } }, "403": { "description": "No access to the list", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "404": { "description": "Share Link not found.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -1501,28 +1426,24 @@ var doc = `{ "200": { "description": "The link was successfully removed.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "403": { "description": "Not allowed to remove the link.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "404": { "description": "Share Link not found.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -1558,21 +1479,18 @@ var doc = `{ "200": { "description": "OK", "schema": { - "type": "object", "$ref": "#/definitions/v1.Token" } }, "400": { "description": "Invalid user password model.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "403": { "description": "Invalid username or password.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -1620,28 +1538,24 @@ var doc = `{ "200": { "description": "The updated namespace.", "schema": { - "type": "object", "$ref": "#/definitions/models.Namespace" } }, "400": { "description": "Invalid namespace object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "403": { "description": "The user does not have access to the namespace", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -1693,7 +1607,6 @@ var doc = `{ "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -1732,28 +1645,24 @@ var doc = `{ "200": { "description": "The created namespace.", "schema": { - "type": "object", "$ref": "#/definitions/models.Namespace" } }, "400": { "description": "Invalid namespace object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "403": { "description": "The user does not have access to the namespace", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -1791,21 +1700,18 @@ var doc = `{ "200": { "description": "The Namespace", "schema": { - "type": "object", "$ref": "#/definitions/models.Namespace" } }, "403": { "description": "The user does not have access to that namespace.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -1838,28 +1744,24 @@ var doc = `{ "200": { "description": "The namespace was successfully deleted.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "400": { "description": "Invalid namespace object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "403": { "description": "The user does not have access to the namespace", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -1906,21 +1808,18 @@ var doc = `{ "403": { "description": "No access to that namespace.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "404": { "description": "The namespace does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -1979,14 +1878,12 @@ var doc = `{ "403": { "description": "No right to see the namespace.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -2032,35 +1929,30 @@ var doc = `{ "200": { "description": "The created team\u003c-\u003enamespace relation.", "schema": { - "type": "object", "$ref": "#/definitions/models.TeamNamespace" } }, "400": { "description": "Invalid team namespace object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "403": { "description": "The team does not have access to the namespace", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "404": { "description": "The team does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -2119,14 +2011,12 @@ var doc = `{ "403": { "description": "No right to see the namespace.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -2172,35 +2062,30 @@ var doc = `{ "200": { "description": "The created user\u003c-\u003enamespace relation.", "schema": { - "type": "object", "$ref": "#/definitions/models.NamespaceUser" } }, "400": { "description": "Invalid user namespace object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "403": { "description": "The user does not have access to the namespace", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "404": { "description": "The user does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -2248,28 +2133,24 @@ var doc = `{ "200": { "description": "The created list.", "schema": { - "type": "object", "$ref": "#/definitions/models.List" } }, "400": { "description": "Invalid list object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "403": { "description": "The user does not have access to the list", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -2324,28 +2205,24 @@ var doc = `{ "200": { "description": "The updated team \u003c-\u003e namespace relation.", "schema": { - "type": "object", "$ref": "#/definitions/models.TeamNamespace" } }, "403": { "description": "The team does not have admin-access to the namespace", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "404": { "description": "Team or namespace does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -2385,28 +2262,24 @@ var doc = `{ "200": { "description": "The team was successfully deleted.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "403": { "description": "The team does not have access to the namespace", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "404": { "description": "team or namespace does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -2461,28 +2334,24 @@ var doc = `{ "200": { "description": "The updated user \u003c-\u003e namespace relation.", "schema": { - "type": "object", "$ref": "#/definitions/models.NamespaceUser" } }, "403": { "description": "The user does not have admin-access to the namespace", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "404": { "description": "User or namespace does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -2522,28 +2391,24 @@ var doc = `{ "200": { "description": "The user was successfully deleted.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "403": { "description": "The user does not have access to the namespace", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "404": { "description": "user or namespace does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -2579,21 +2444,18 @@ var doc = `{ "200": { "description": "OK", "schema": { - "type": "object", "$ref": "#/definitions/models.User" } }, "400": { "description": "No or invalid user register object provided / User already exists.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -2626,21 +2488,18 @@ var doc = `{ "200": { "description": "The valid jwt auth token.", "schema": { - "type": "object", "$ref": "#/definitions/v1.Token" } }, "400": { "description": "Invalid link share object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -2710,7 +2569,6 @@ var doc = `{ "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -2751,28 +2609,24 @@ var doc = `{ "200": { "description": "The updated task object.", "schema": { - "type": "object", "$ref": "#/definitions/models.Task" } }, "400": { "description": "Invalid task object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "403": { "description": "The user does not have access to the task (aka its list)", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -2820,28 +2674,24 @@ var doc = `{ "200": { "description": "The updated task object.", "schema": { - "type": "object", "$ref": "#/definitions/models.Task" } }, "400": { "description": "Invalid task object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "403": { "description": "The user does not have access to the task (aka its list)", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -2874,28 +2724,261 @@ var doc = `{ "200": { "description": "The created task object.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "400": { "description": "Invalid task ID provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "403": { "description": "The user does not have access to the list", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", + "$ref": "#/definitions/models.Message" + } + } + } + } + }, + "/tasks/{id}/attachments": { + "get": { + "security": [ + { + "JWTKeyAuth": [] + } + ], + "description": "Get all task attachments for one task.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "task" + ], + "summary": "Get all attachments for one task.", + "parameters": [ + { + "type": "integer", + "description": "Task ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "All attachments for this task", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/models.TaskAttachment" + } + } + }, + "403": { + "description": "No access to this task.", + "schema": { + "$ref": "#/definitions/models.Message" + } + }, + "404": { + "description": "The task does not exist.", + "schema": { + "$ref": "#/definitions/models.Message" + } + }, + "500": { + "description": "Internal error", + "schema": { + "$ref": "#/definitions/models.Message" + } + } + } + }, + "put": { + "security": [ + { + "JWTKeyAuth": [] + } + ], + "description": "Upload a task attachment. You can pass multiple files with the files form param.", + "consumes": [ + "multipart/form-data" + ], + "produces": [ + "application/json" + ], + "tags": [ + "task" + ], + "summary": "Upload a task attachment", + "parameters": [ + { + "type": "integer", + "description": "Task ID", + "name": "id", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "The file, as multipart form file. You can pass multiple.", + "name": "files", + "in": "formData", + "required": true + } + ], + "responses": { + "200": { + "description": "Attachments were uploaded successfully.", + "schema": { + "$ref": "#/definitions/models.Message" + } + }, + "403": { + "description": "No access to the task.", + "schema": { + "$ref": "#/definitions/models.Message" + } + }, + "404": { + "description": "The task does not exist.", + "schema": { + "$ref": "#/definitions/models.Message" + } + }, + "500": { + "description": "Internal error", + "schema": { + "$ref": "#/definitions/models.Message" + } + } + } + } + }, + "/tasks/{id}/attachments/{attachmentID}": { + "get": { + "security": [ + { + "JWTKeyAuth": [] + } + ], + "description": "Get one attachment for download. **Returns json on error.**", + "produces": [ + "application/octet-stream" + ], + "tags": [ + "task" + ], + "summary": "Get one attachment.", + "parameters": [ + { + "type": "integer", + "description": "Task ID", + "name": "id", + "in": "path", + "required": true + }, + { + "type": "integer", + "description": "Attachment ID", + "name": "attachmentID", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "The attachment file.", + "schema": { + "type": "" + } + }, + "403": { + "description": "No access to this task.", + "schema": { + "$ref": "#/definitions/models.Message" + } + }, + "404": { + "description": "The task does not exist.", + "schema": { + "$ref": "#/definitions/models.Message" + } + }, + "500": { + "description": "Internal error", + "schema": { + "$ref": "#/definitions/models.Message" + } + } + } + }, + "delete": { + "security": [ + { + "JWTKeyAuth": [] + } + ], + "description": "Delete an attachment.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "task" + ], + "summary": "Delete an attachment", + "parameters": [ + { + "type": "integer", + "description": "Task ID", + "name": "id", + "in": "path", + "required": true + }, + { + "type": "integer", + "description": "Attachment ID", + "name": "attachmentID", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "The attachment was deleted successfully.", + "schema": { + "$ref": "#/definitions/models.Message" + } + }, + "403": { + "description": "No access to this task.", + "schema": { + "$ref": "#/definitions/models.Message" + } + }, + "404": { + "description": "The task does not exist.", + "schema": { + "$ref": "#/definitions/models.Message" + } + }, + "500": { + "description": "Internal error", + "schema": { "$ref": "#/definitions/models.Message" } } @@ -2954,7 +3037,6 @@ var doc = `{ "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3000,21 +3082,18 @@ var doc = `{ "200": { "description": "The created assingee object.", "schema": { - "type": "object", "$ref": "#/definitions/models.TaskAssginee" } }, "400": { "description": "Invalid assignee object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3062,21 +3141,18 @@ var doc = `{ "200": { "description": "The created assingees object.", "schema": { - "type": "object", "$ref": "#/definitions/models.TaskAssginee" } }, "400": { "description": "Invalid assignee object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3121,21 +3197,18 @@ var doc = `{ "200": { "description": "The assignee was successfully deleted.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "403": { "description": "Not allowed to delete the assignee.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3183,21 +3256,18 @@ var doc = `{ "200": { "description": "The updated labels object.", "schema": { - "type": "object", "$ref": "#/definitions/models.LabelTaskBulk" } }, "400": { "description": "Invalid label object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3211,7 +3281,7 @@ var doc = `{ "JWTKeyAuth": [] } ], - "description": "Creates a new relation between two tasks. The user needs to have update rights on the base task and at least read rights on the other task. Both tasks do not need to be on the same list.", + "description": "Creates a new relation between two tasks. The user needs to have update rights on the base task and at least read rights on the other task. Both tasks do not need to be on the same list. Take a look at the docs for available task relation kinds.", "consumes": [ "application/json" ], @@ -3245,21 +3315,18 @@ var doc = `{ "200": { "description": "The created task relation object.", "schema": { - "type": "object", "$ref": "#/definitions/models.TaskRelation" } }, "400": { "description": "Invalid task relation object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3304,28 +3371,24 @@ var doc = `{ "200": { "description": "The task relation was successfully deleted.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "400": { "description": "Invalid task relation object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "404": { "description": "The task relation was not found.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3384,7 +3447,6 @@ var doc = `{ "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3430,35 +3492,30 @@ var doc = `{ "200": { "description": "The created label relation object.", "schema": { - "type": "object", "$ref": "#/definitions/models.LabelTask" } }, "400": { "description": "Invalid label object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "403": { "description": "Not allowed to add the label.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "404": { "description": "The label does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3503,28 +3560,24 @@ var doc = `{ "200": { "description": "The label was successfully removed.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "403": { "description": "Not allowed to remove the label.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "404": { "description": "Label not found.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3576,7 +3629,6 @@ var doc = `{ "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3615,21 +3667,18 @@ var doc = `{ "200": { "description": "The created team.", "schema": { - "type": "object", "$ref": "#/definitions/models.Team" } }, "400": { "description": "Invalid team object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3677,21 +3726,18 @@ var doc = `{ "200": { "description": "The updated team.", "schema": { - "type": "object", "$ref": "#/definitions/models.Team" } }, "400": { "description": "Invalid team object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3724,21 +3770,18 @@ var doc = `{ "200": { "description": "The team was successfully deleted.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "400": { "description": "Invalid team object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3786,28 +3829,24 @@ var doc = `{ "200": { "description": "The newly created member object", "schema": { - "type": "object", "$ref": "#/definitions/models.TeamMember" } }, "400": { "description": "Invalid member object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "403": { "description": "The user does not have access to the team", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3849,14 +3888,12 @@ var doc = `{ "200": { "description": "The user was successfully removed from the team.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3885,21 +3922,18 @@ var doc = `{ "200": { "description": "OK", "schema": { - "type": "object", "$ref": "#/definitions/models.User" } }, "404": { "description": "User does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal server error.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3935,21 +3969,18 @@ var doc = `{ "200": { "description": "OK", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "412": { "description": "Bad token provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3990,28 +4021,24 @@ var doc = `{ "200": { "description": "OK", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "400": { "description": "Something's invalid.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "404": { "description": "User does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal server error.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -4047,21 +4074,18 @@ var doc = `{ "200": { "description": "OK", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "400": { "description": "Bad token provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -4097,21 +4121,18 @@ var doc = `{ "200": { "description": "OK", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "404": { "description": "The user does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -4157,14 +4178,12 @@ var doc = `{ "400": { "description": "Something's invalid.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io.web.HTTPError" } }, "500": { "description": "Internal server error.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -4174,6 +4193,26 @@ var doc = `{ }, "definitions": { "code.vikunja.io.web.HTTPError": {"type": "object","properties": {"code": {"type": "integer"},"message": {"type": "string"}}}, + "files.File": { + "type": "object", + "properties": { + "created": { + "type": "string" + }, + "id": { + "type": "integer" + }, + "mime": { + "type": "string" + }, + "name": { + "type": "string" + }, + "size": { + "type": "integer" + } + } + }, "models.APIUserPassword": { "type": "object", "properties": { @@ -4222,6 +4261,13 @@ var doc = `{ "$ref": "#/definitions/models.User" } }, + "attachments": { + "description": "All attachments this task has", + "type": "array", + "items": { + "$ref": "#/definitions/models.TaskAttachment" + } + }, "created": { "description": "A unix timestamp when this task was created. You cannot change this value.", "type": "integer" @@ -4635,6 +4681,13 @@ var doc = `{ "$ref": "#/definitions/models.User" } }, + "attachments": { + "description": "All attachments this task has", + "type": "array", + "items": { + "$ref": "#/definitions/models.TaskAttachment" + } + }, "created": { "description": "A unix timestamp when this task was created. You cannot change this value.", "type": "integer" @@ -4736,6 +4789,13 @@ var doc = `{ "$ref": "#/definitions/models.User" } }, + "attachments": { + "description": "All attachments this task has", + "type": "array", + "items": { + "$ref": "#/definitions/models.TaskAttachment" + } + }, "created": { "description": "A unix timestamp when this task was created. You cannot change this value.", "type": "integer" @@ -4836,6 +4896,28 @@ var doc = `{ } } }, + "models.TaskAttachment": { + "type": "object", + "properties": { + "created": { + "type": "integer" + }, + "created_by": { + "type": "object", + "$ref": "#/definitions/models.User" + }, + "file": { + "type": "object", + "$ref": "#/definitions/files.File" + }, + "id": { + "type": "integer" + }, + "task_id": { + "type": "integer" + } + } + }, "models.TaskRelation": { "type": "object", "properties": { @@ -5162,6 +5244,9 @@ var doc = `{ "link_sharing_enabled": { "type": "boolean" }, + "max_file_size": { + "type": "integer" + }, "motd": { "type": "string" }, @@ -5187,23 +5272,39 @@ type swaggerInfo struct { Version string Host string BasePath string + Schemes []string Title string Description string } // SwaggerInfo holds exported Swagger Info so clients can modify it -var SwaggerInfo swaggerInfo +var SwaggerInfo = swaggerInfo{ + Version: "", + Host: "", + BasePath: "/api/v1", + Schemes: []string{}, + Title: "Vikunja API", + Description: "This is the documentation for the [Vikunja](http://vikunja.io) API. Vikunja is a cross-plattform Todo-application with a lot of features, such as sharing lists with users or teams. \n# Authorization\n**JWT-Auth:** Main authorization method, used for most of the requests. Needs `Authorization: Bearer `-header to authenticate successfully.\n\n**BasicAuth:** Only used when requesting tasks via caldav.\n", +} type s struct{} func (s *s) ReadDoc() string { - t, err := template.New("swagger_info").Parse(doc) + sInfo := SwaggerInfo + sInfo.Description = strings.Replace(sInfo.Description, "\n", "\\n", -1) + + t, err := template.New("swagger_info").Funcs(template.FuncMap{ + "marshal": func(v interface{}) string { + a, _ := json.Marshal(v) + return string(a) + }, + }).Parse(doc) if err != nil { return doc } var tpl bytes.Buffer - if err := t.Execute(&tpl, SwaggerInfo); err != nil { + if err := t.Execute(&tpl, sInfo); err != nil { return doc } diff --git a/pkg/swagger/swagger.json b/pkg/swagger/swagger.json index c361f115c..cd7aa3984 100644 --- a/pkg/swagger/swagger.json +++ b/pkg/swagger/swagger.json @@ -1,7 +1,7 @@ { "swagger": "2.0", "info": { - "description": "This is the documentation for the [Vikunja](http://vikunja.io) API. Vikunja is a cross-plattform Todo-application with a lot of features, such as sharing lists with users or teams. \u003c!-- ReDoc-Inject: \u003csecurity-definitions\u003e --\u003e\n# Authorization\n**JWT-Auth:** Main authorization method, used for most of the requests. Needs ` + \"`\" + `Authorization: Bearer \u003cjwt-token\u003e` + \"`\" + `-header to authenticate successfully.\n\n**BasicAuth:** Only used when requesting tasks via caldav.\n\u003c!-- ReDoc-Inject: \u003csecurity-definitions\u003e --\u003e", + "description": "This is the documentation for the [Vikunja](http://vikunja.io) API. Vikunja is a cross-plattform Todo-application with a lot of features, such as sharing lists with users or teams. \u003c!-- ReDoc-Inject: \u003csecurity-definitions\u003e --\u003e\n# Authorization\n**JWT-Auth:** Main authorization method, used for most of the requests. Needs `Authorization: Bearer \u003cjwt-token\u003e`-header to authenticate successfully.\n\n**BasicAuth:** Only used when requesting tasks via caldav.\n\u003c!-- ReDoc-Inject: \u003csecurity-definitions\u003e --\u003e", "title": "Vikunja API", "contact": { "name": "General Vikunja contact", @@ -11,10 +11,8 @@ "license": { "name": "GPLv3", "url": "http://code.vikunja.io/api/src/branch/master/LICENSE" - }, - "version": "{{.Version}}" + } }, - "host": "{{.Host}}", "basePath": "/api/v1", "paths": { "/info": { @@ -31,7 +29,6 @@ "200": { "description": "OK", "schema": { - "type": "object", "$ref": "#/definitions/v1.vikunjaInfos" } } @@ -83,7 +80,6 @@ "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -122,21 +118,18 @@ "200": { "description": "The created label object.", "schema": { - "type": "object", "$ref": "#/definitions/models.Label" } }, "400": { "description": "Invalid label object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -174,28 +167,24 @@ "200": { "description": "The label", "schema": { - "type": "object", "$ref": "#/definitions/models.Label" } }, "403": { "description": "The user does not have access to the label", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "404": { "description": "Label not found", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -241,35 +230,30 @@ "200": { "description": "The created label object.", "schema": { - "type": "object", "$ref": "#/definitions/models.Label" } }, "400": { "description": "Invalid label object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "403": { "description": "Not allowed to update the label.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "404": { "description": "Label not found.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -305,28 +289,24 @@ "200": { "description": "The label was successfully deleted.", "schema": { - "type": "object", "$ref": "#/definitions/models.Label" } }, "403": { "description": "Not allowed to delete the label.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "404": { "description": "Label not found.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -378,14 +358,12 @@ "403": { "description": "The user does not have access to the list", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -423,21 +401,18 @@ "200": { "description": "The list", "schema": { - "type": "object", "$ref": "#/definitions/models.List" } }, "403": { "description": "The user does not have access to the list", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -483,28 +458,24 @@ "200": { "description": "The created task object.", "schema": { - "type": "object", "$ref": "#/definitions/models.Task" } }, "400": { "description": "Invalid task object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "403": { "description": "The user does not have access to the list", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -550,28 +521,24 @@ "200": { "description": "The updated list.", "schema": { - "type": "object", "$ref": "#/definitions/models.List" } }, "400": { "description": "Invalid list object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "403": { "description": "The user does not have access to the list", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -604,28 +571,24 @@ "200": { "description": "The list was successfully deleted.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "400": { "description": "Invalid list object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "403": { "description": "The user does not have access to the list", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -678,21 +641,18 @@ "400": { "description": "Something's invalid.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "401": { "description": "The user does not have the right to see the list.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal server error.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -751,14 +711,12 @@ "403": { "description": "No right to see the list.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -804,35 +762,30 @@ "200": { "description": "The created team\u003c-\u003elist relation.", "schema": { - "type": "object", "$ref": "#/definitions/models.TeamList" } }, "400": { "description": "Invalid team list object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "403": { "description": "The user does not have access to the list", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "404": { "description": "The team does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -891,14 +844,12 @@ "403": { "description": "No right to see the list.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -944,35 +895,30 @@ "200": { "description": "The created user\u003c-\u003elist relation.", "schema": { - "type": "object", "$ref": "#/definitions/models.ListUser" } }, "400": { "description": "Invalid user list object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "403": { "description": "The user does not have access to the list", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "404": { "description": "The user does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -1027,28 +973,24 @@ "200": { "description": "The updated team \u003c-\u003e list relation.", "schema": { - "type": "object", "$ref": "#/definitions/models.TeamList" } }, "403": { "description": "The user does not have admin-access to the list", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "404": { "description": "Team or list does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -1088,28 +1030,24 @@ "200": { "description": "The team was successfully deleted.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "403": { "description": "The user does not have access to the list", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "404": { "description": "Team or list does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -1164,28 +1102,24 @@ "200": { "description": "The updated user \u003c-\u003e list relation.", "schema": { - "type": "object", "$ref": "#/definitions/models.ListUser" } }, "403": { "description": "The user does not have admin-access to the list", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "404": { "description": "User or list does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -1225,28 +1159,24 @@ "200": { "description": "The user was successfully removed from the list.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "403": { "description": "The user does not have access to the list", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "404": { "description": "user or list does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -1305,7 +1235,6 @@ "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -1351,35 +1280,30 @@ "200": { "description": "The created link share object.", "schema": { - "type": "object", "$ref": "#/definitions/models.LinkSharing" } }, "400": { "description": "Invalid link share object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "403": { "description": "Not allowed to add the list share.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "404": { "description": "The list does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -1424,28 +1348,24 @@ "200": { "description": "The share links", "schema": { - "type": "object", "$ref": "#/definitions/models.LinkSharing" } }, "403": { "description": "No access to the list", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "404": { "description": "Share Link not found.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -1488,28 +1408,24 @@ "200": { "description": "The link was successfully removed.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "403": { "description": "Not allowed to remove the link.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "404": { "description": "Share Link not found.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -1545,21 +1461,18 @@ "200": { "description": "OK", "schema": { - "type": "object", "$ref": "#/definitions/v1.Token" } }, "400": { "description": "Invalid user password model.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "403": { "description": "Invalid username or password.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -1607,28 +1520,24 @@ "200": { "description": "The updated namespace.", "schema": { - "type": "object", "$ref": "#/definitions/models.Namespace" } }, "400": { "description": "Invalid namespace object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "403": { "description": "The user does not have access to the namespace", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -1680,7 +1589,6 @@ "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -1719,28 +1627,24 @@ "200": { "description": "The created namespace.", "schema": { - "type": "object", "$ref": "#/definitions/models.Namespace" } }, "400": { "description": "Invalid namespace object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "403": { "description": "The user does not have access to the namespace", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -1778,21 +1682,18 @@ "200": { "description": "The Namespace", "schema": { - "type": "object", "$ref": "#/definitions/models.Namespace" } }, "403": { "description": "The user does not have access to that namespace.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -1825,28 +1726,24 @@ "200": { "description": "The namespace was successfully deleted.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "400": { "description": "Invalid namespace object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "403": { "description": "The user does not have access to the namespace", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -1893,21 +1790,18 @@ "403": { "description": "No access to that namespace.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "404": { "description": "The namespace does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -1966,14 +1860,12 @@ "403": { "description": "No right to see the namespace.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -2019,35 +1911,30 @@ "200": { "description": "The created team\u003c-\u003enamespace relation.", "schema": { - "type": "object", "$ref": "#/definitions/models.TeamNamespace" } }, "400": { "description": "Invalid team namespace object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "403": { "description": "The team does not have access to the namespace", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "404": { "description": "The team does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -2106,14 +1993,12 @@ "403": { "description": "No right to see the namespace.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -2159,35 +2044,30 @@ "200": { "description": "The created user\u003c-\u003enamespace relation.", "schema": { - "type": "object", "$ref": "#/definitions/models.NamespaceUser" } }, "400": { "description": "Invalid user namespace object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "403": { "description": "The user does not have access to the namespace", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "404": { "description": "The user does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -2235,28 +2115,24 @@ "200": { "description": "The created list.", "schema": { - "type": "object", "$ref": "#/definitions/models.List" } }, "400": { "description": "Invalid list object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "403": { "description": "The user does not have access to the list", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -2311,28 +2187,24 @@ "200": { "description": "The updated team \u003c-\u003e namespace relation.", "schema": { - "type": "object", "$ref": "#/definitions/models.TeamNamespace" } }, "403": { "description": "The team does not have admin-access to the namespace", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "404": { "description": "Team or namespace does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -2372,28 +2244,24 @@ "200": { "description": "The team was successfully deleted.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "403": { "description": "The team does not have access to the namespace", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "404": { "description": "team or namespace does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -2448,28 +2316,24 @@ "200": { "description": "The updated user \u003c-\u003e namespace relation.", "schema": { - "type": "object", "$ref": "#/definitions/models.NamespaceUser" } }, "403": { "description": "The user does not have admin-access to the namespace", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "404": { "description": "User or namespace does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -2509,28 +2373,24 @@ "200": { "description": "The user was successfully deleted.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "403": { "description": "The user does not have access to the namespace", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "404": { "description": "user or namespace does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -2566,21 +2426,18 @@ "200": { "description": "OK", "schema": { - "type": "object", "$ref": "#/definitions/models.User" } }, "400": { "description": "No or invalid user register object provided / User already exists.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -2613,21 +2470,18 @@ "200": { "description": "The valid jwt auth token.", "schema": { - "type": "object", "$ref": "#/definitions/v1.Token" } }, "400": { "description": "Invalid link share object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -2697,7 +2551,6 @@ "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -2738,28 +2591,24 @@ "200": { "description": "The updated task object.", "schema": { - "type": "object", "$ref": "#/definitions/models.Task" } }, "400": { "description": "Invalid task object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "403": { "description": "The user does not have access to the task (aka its list)", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -2807,28 +2656,24 @@ "200": { "description": "The updated task object.", "schema": { - "type": "object", "$ref": "#/definitions/models.Task" } }, "400": { "description": "Invalid task object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "403": { "description": "The user does not have access to the task (aka its list)", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -2861,28 +2706,261 @@ "200": { "description": "The created task object.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "400": { "description": "Invalid task ID provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "403": { "description": "The user does not have access to the list", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", + "$ref": "#/definitions/models.Message" + } + } + } + } + }, + "/tasks/{id}/attachments": { + "get": { + "security": [ + { + "JWTKeyAuth": [] + } + ], + "description": "Get all task attachments for one task.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "task" + ], + "summary": "Get all attachments for one task.", + "parameters": [ + { + "type": "integer", + "description": "Task ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "All attachments for this task", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/models.TaskAttachment" + } + } + }, + "403": { + "description": "No access to this task.", + "schema": { + "$ref": "#/definitions/models.Message" + } + }, + "404": { + "description": "The task does not exist.", + "schema": { + "$ref": "#/definitions/models.Message" + } + }, + "500": { + "description": "Internal error", + "schema": { + "$ref": "#/definitions/models.Message" + } + } + } + }, + "put": { + "security": [ + { + "JWTKeyAuth": [] + } + ], + "description": "Upload a task attachment. You can pass multiple files with the files form param.", + "consumes": [ + "multipart/form-data" + ], + "produces": [ + "application/json" + ], + "tags": [ + "task" + ], + "summary": "Upload a task attachment", + "parameters": [ + { + "type": "integer", + "description": "Task ID", + "name": "id", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "The file, as multipart form file. You can pass multiple.", + "name": "files", + "in": "formData", + "required": true + } + ], + "responses": { + "200": { + "description": "Attachments were uploaded successfully.", + "schema": { + "$ref": "#/definitions/models.Message" + } + }, + "403": { + "description": "No access to the task.", + "schema": { + "$ref": "#/definitions/models.Message" + } + }, + "404": { + "description": "The task does not exist.", + "schema": { + "$ref": "#/definitions/models.Message" + } + }, + "500": { + "description": "Internal error", + "schema": { + "$ref": "#/definitions/models.Message" + } + } + } + } + }, + "/tasks/{id}/attachments/{attachmentID}": { + "get": { + "security": [ + { + "JWTKeyAuth": [] + } + ], + "description": "Get one attachment for download. **Returns json on error.**", + "produces": [ + "application/octet-stream" + ], + "tags": [ + "task" + ], + "summary": "Get one attachment.", + "parameters": [ + { + "type": "integer", + "description": "Task ID", + "name": "id", + "in": "path", + "required": true + }, + { + "type": "integer", + "description": "Attachment ID", + "name": "attachmentID", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "The attachment file.", + "schema": { + "type": "" + } + }, + "403": { + "description": "No access to this task.", + "schema": { + "$ref": "#/definitions/models.Message" + } + }, + "404": { + "description": "The task does not exist.", + "schema": { + "$ref": "#/definitions/models.Message" + } + }, + "500": { + "description": "Internal error", + "schema": { + "$ref": "#/definitions/models.Message" + } + } + } + }, + "delete": { + "security": [ + { + "JWTKeyAuth": [] + } + ], + "description": "Delete an attachment.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "task" + ], + "summary": "Delete an attachment", + "parameters": [ + { + "type": "integer", + "description": "Task ID", + "name": "id", + "in": "path", + "required": true + }, + { + "type": "integer", + "description": "Attachment ID", + "name": "attachmentID", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "The attachment was deleted successfully.", + "schema": { + "$ref": "#/definitions/models.Message" + } + }, + "403": { + "description": "No access to this task.", + "schema": { + "$ref": "#/definitions/models.Message" + } + }, + "404": { + "description": "The task does not exist.", + "schema": { + "$ref": "#/definitions/models.Message" + } + }, + "500": { + "description": "Internal error", + "schema": { "$ref": "#/definitions/models.Message" } } @@ -2941,7 +3019,6 @@ "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -2987,21 +3064,18 @@ "200": { "description": "The created assingee object.", "schema": { - "type": "object", "$ref": "#/definitions/models.TaskAssginee" } }, "400": { "description": "Invalid assignee object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3049,21 +3123,18 @@ "200": { "description": "The created assingees object.", "schema": { - "type": "object", "$ref": "#/definitions/models.TaskAssginee" } }, "400": { "description": "Invalid assignee object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3108,21 +3179,18 @@ "200": { "description": "The assignee was successfully deleted.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "403": { "description": "Not allowed to delete the assignee.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3170,21 +3238,18 @@ "200": { "description": "The updated labels object.", "schema": { - "type": "object", "$ref": "#/definitions/models.LabelTaskBulk" } }, "400": { "description": "Invalid label object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3198,7 +3263,7 @@ "JWTKeyAuth": [] } ], - "description": "Creates a new relation between two tasks. The user needs to have update rights on the base task and at least read rights on the other task. Both tasks do not need to be on the same list.", + "description": "Creates a new relation between two tasks. The user needs to have update rights on the base task and at least read rights on the other task. Both tasks do not need to be on the same list. Take a look at the docs for available task relation kinds.", "consumes": [ "application/json" ], @@ -3232,21 +3297,18 @@ "200": { "description": "The created task relation object.", "schema": { - "type": "object", "$ref": "#/definitions/models.TaskRelation" } }, "400": { "description": "Invalid task relation object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3291,28 +3353,24 @@ "200": { "description": "The task relation was successfully deleted.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "400": { "description": "Invalid task relation object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "404": { "description": "The task relation was not found.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3371,7 +3429,6 @@ "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3417,35 +3474,30 @@ "200": { "description": "The created label relation object.", "schema": { - "type": "object", "$ref": "#/definitions/models.LabelTask" } }, "400": { "description": "Invalid label object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "403": { "description": "Not allowed to add the label.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "404": { "description": "The label does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3490,28 +3542,24 @@ "200": { "description": "The label was successfully removed.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "403": { "description": "Not allowed to remove the label.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "404": { "description": "Label not found.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3563,7 +3611,6 @@ "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3602,21 +3649,18 @@ "200": { "description": "The created team.", "schema": { - "type": "object", "$ref": "#/definitions/models.Team" } }, "400": { "description": "Invalid team object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3664,21 +3708,18 @@ "200": { "description": "The updated team.", "schema": { - "type": "object", "$ref": "#/definitions/models.Team" } }, "400": { "description": "Invalid team object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3711,21 +3752,18 @@ "200": { "description": "The team was successfully deleted.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "400": { "description": "Invalid team object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3773,28 +3811,24 @@ "200": { "description": "The newly created member object", "schema": { - "type": "object", "$ref": "#/definitions/models.TeamMember" } }, "400": { "description": "Invalid member object provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "403": { "description": "The user does not have access to the team", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3836,14 +3870,12 @@ "200": { "description": "The user was successfully removed from the team.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3872,21 +3904,18 @@ "200": { "description": "OK", "schema": { - "type": "object", "$ref": "#/definitions/models.User" } }, "404": { "description": "User does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal server error.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3922,21 +3951,18 @@ "200": { "description": "OK", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "412": { "description": "Bad token provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -3977,28 +4003,24 @@ "200": { "description": "OK", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "400": { "description": "Something's invalid.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "404": { "description": "User does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal server error.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -4034,21 +4056,18 @@ "200": { "description": "OK", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "400": { "description": "Bad token provided.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -4084,21 +4103,18 @@ "200": { "description": "OK", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } }, "404": { "description": "The user does not exist.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal error", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -4144,14 +4160,12 @@ "400": { "description": "Something's invalid.", "schema": { - "type": "object", "$ref": "#/definitions/code.vikunja.io/web.HTTPError" } }, "500": { "description": "Internal server error.", "schema": { - "type": "object", "$ref": "#/definitions/models.Message" } } @@ -4160,6 +4174,26 @@ } }, "definitions": { + "files.File": { + "type": "object", + "properties": { + "created": { + "type": "string" + }, + "id": { + "type": "integer" + }, + "mime": { + "type": "string" + }, + "name": { + "type": "string" + }, + "size": { + "type": "integer" + } + } + }, "models.APIUserPassword": { "type": "object", "properties": { @@ -4208,6 +4242,13 @@ "$ref": "#/definitions/models.User" } }, + "attachments": { + "description": "All attachments this task has", + "type": "array", + "items": { + "$ref": "#/definitions/models.TaskAttachment" + } + }, "created": { "description": "A unix timestamp when this task was created. You cannot change this value.", "type": "integer" @@ -4621,6 +4662,13 @@ "$ref": "#/definitions/models.User" } }, + "attachments": { + "description": "All attachments this task has", + "type": "array", + "items": { + "$ref": "#/definitions/models.TaskAttachment" + } + }, "created": { "description": "A unix timestamp when this task was created. You cannot change this value.", "type": "integer" @@ -4722,6 +4770,13 @@ "$ref": "#/definitions/models.User" } }, + "attachments": { + "description": "All attachments this task has", + "type": "array", + "items": { + "$ref": "#/definitions/models.TaskAttachment" + } + }, "created": { "description": "A unix timestamp when this task was created. You cannot change this value.", "type": "integer" @@ -4822,6 +4877,28 @@ } } }, + "models.TaskAttachment": { + "type": "object", + "properties": { + "created": { + "type": "integer" + }, + "created_by": { + "type": "object", + "$ref": "#/definitions/models.User" + }, + "file": { + "type": "object", + "$ref": "#/definitions/files.File" + }, + "id": { + "type": "integer" + }, + "task_id": { + "type": "integer" + } + } + }, "models.TaskRelation": { "type": "object", "properties": { @@ -5148,6 +5225,9 @@ "link_sharing_enabled": { "type": "boolean" }, + "max_file_size": { + "type": "integer" + }, "motd": { "type": "string" }, diff --git a/pkg/swagger/swagger.yaml b/pkg/swagger/swagger.yaml index 109d88c4a..827514afd 100644 --- a/pkg/swagger/swagger.yaml +++ b/pkg/swagger/swagger.yaml @@ -1,5 +1,18 @@ basePath: /api/v1 definitions: + files.File: + properties: + created: + type: string + id: + type: integer + mime: + type: string + name: + type: string + size: + type: integer + type: object models.APIUserPassword: properties: email: @@ -36,6 +49,11 @@ definitions: items: $ref: '#/definitions/models.User' type: array + attachments: + description: All attachments this task has + items: + $ref: '#/definitions/models.TaskAttachment' + type: array created: description: A unix timestamp when this task was created. You cannot change this value. @@ -365,6 +383,11 @@ definitions: items: $ref: '#/definitions/models.User' type: array + attachments: + description: All attachments this task has + items: + $ref: '#/definitions/models.TaskAttachment' + type: array created: description: A unix timestamp when this task was created. You cannot change this value. @@ -447,6 +470,11 @@ definitions: items: $ref: '#/definitions/models.User' type: array + attachments: + description: All attachments this task has + items: + $ref: '#/definitions/models.TaskAttachment' + type: array created: description: A unix timestamp when this task was created. You cannot change this value. @@ -527,6 +555,21 @@ definitions: user_id: type: integer type: object + models.TaskAttachment: + properties: + created: + type: integer + created_by: + $ref: '#/definitions/models.User' + type: object + file: + $ref: '#/definitions/files.File' + type: object + id: + type: integer + task_id: + type: integer + type: object models.TaskRelation: properties: created: @@ -789,12 +832,13 @@ definitions: type: string link_sharing_enabled: type: boolean + max_file_size: + type: integer motd: type: string version: type: string type: object -host: '{{.Host}}' info: contact: email: hello@vikunja.io @@ -803,7 +847,7 @@ info: description: |- This is the documentation for the [Vikunja](http://vikunja.io) API. Vikunja is a cross-plattform Todo-application with a lot of features, such as sharing lists with users or teams. # Authorization - **JWT-Auth:** Main authorization method, used for most of the requests. Needs ` + "`" + `Authorization: Bearer ` + "`" + `-header to authenticate successfully. + **JWT-Auth:** Main authorization method, used for most of the requests. Needs `Authorization: Bearer `-header to authenticate successfully. **BasicAuth:** Only used when requesting tasks via caldav. @@ -811,7 +855,6 @@ info: name: GPLv3 url: http://code.vikunja.io/api/src/branch/master/LICENSE title: Vikunja API - version: '{{.Version}}' paths: /info: get: @@ -824,7 +867,6 @@ paths: description: OK schema: $ref: '#/definitions/v1.vikunjaInfos' - type: object summary: Info tags: - service @@ -857,7 +899,6 @@ paths: description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Get all labels a user has access to @@ -882,17 +923,14 @@ paths: description: The created label object. schema: $ref: '#/definitions/models.Label' - type: object "400": description: Invalid label object provided. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Create a label @@ -917,22 +955,18 @@ paths: description: The label was successfully deleted. schema: $ref: '#/definitions/models.Label' - type: object "403": description: Not allowed to delete the label. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "404": description: Label not found. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Delete a label @@ -955,22 +989,18 @@ paths: description: The label schema: $ref: '#/definitions/models.Label' - type: object "403": description: The user does not have access to the label schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "404": description: Label not found schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Gets one label @@ -1001,27 +1031,22 @@ paths: description: The created label object. schema: $ref: '#/definitions/models.Label' - type: object "400": description: Invalid label object provided. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "403": description: Not allowed to update the label. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "404": description: Label not found. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Update a label @@ -1055,12 +1080,10 @@ paths: description: The user does not have access to the list schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Get all lists a user has access to @@ -1082,22 +1105,18 @@ paths: description: The list was successfully deleted. schema: $ref: '#/definitions/models.Message' - type: object "400": description: Invalid list object provided. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "403": description: The user does not have access to the list schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Deletes a list @@ -1120,17 +1139,14 @@ paths: description: The list schema: $ref: '#/definitions/models.List' - type: object "403": description: The user does not have access to the list schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Gets one list @@ -1160,22 +1176,18 @@ paths: description: The updated list. schema: $ref: '#/definitions/models.List' - type: object "400": description: Invalid list object provided. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "403": description: The user does not have access to the list schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Updates a list @@ -1205,22 +1217,18 @@ paths: description: The created task object. schema: $ref: '#/definitions/models.Task' - type: object "400": description: Invalid task object provided. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "403": description: The user does not have access to the list schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Create a task @@ -1255,17 +1263,14 @@ paths: description: Something's invalid. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "401": description: The user does not have the right to see the list. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal server error. schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Get users @@ -1304,12 +1309,10 @@ paths: description: No right to see the list. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Get teams on a list @@ -1339,27 +1342,22 @@ paths: description: The created team<->list relation. schema: $ref: '#/definitions/models.TeamList' - type: object "400": description: Invalid team list object provided. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "403": description: The user does not have access to the list schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "404": description: The team does not exist. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Add a team to a list @@ -1398,12 +1396,10 @@ paths: description: No right to see the list. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Get users on a list @@ -1433,27 +1429,22 @@ paths: description: The created user<->list relation. schema: $ref: '#/definitions/models.ListUser' - type: object "400": description: Invalid user list object provided. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "403": description: The user does not have access to the list schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "404": description: The user does not exist. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Add a user to a list @@ -1492,7 +1483,6 @@ paths: description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Get all link shares for a list @@ -1523,27 +1513,22 @@ paths: description: The created link share object. schema: $ref: '#/definitions/models.LinkSharing' - type: object "400": description: Invalid link share object provided. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "403": description: Not allowed to add the list share. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "404": description: The list does not exist. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Share a list via link @@ -1573,22 +1558,18 @@ paths: description: The link was successfully removed. schema: $ref: '#/definitions/models.Message' - type: object "403": description: Not allowed to remove the link. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "404": description: Share Link not found. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Remove a link share @@ -1616,22 +1597,18 @@ paths: description: The share links schema: $ref: '#/definitions/models.LinkSharing' - type: object "403": description: No access to the list schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "404": description: Share Link not found. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Get one link shares for a list @@ -1659,22 +1636,18 @@ paths: description: The team was successfully deleted. schema: $ref: '#/definitions/models.Message' - type: object "403": description: The user does not have access to the list schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "404": description: Team or list does not exist. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Delete a team from a list @@ -1710,22 +1683,18 @@ paths: description: The updated team <-> list relation. schema: $ref: '#/definitions/models.TeamList' - type: object "403": description: The user does not have admin-access to the list schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "404": description: Team or list does not exist. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Update a team <-> list relation @@ -1753,22 +1722,18 @@ paths: description: The user was successfully removed from the list. schema: $ref: '#/definitions/models.Message' - type: object "403": description: The user does not have access to the list schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "404": description: user or list does not exist. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Delete a user from a list @@ -1804,22 +1769,18 @@ paths: description: The updated user <-> list relation. schema: $ref: '#/definitions/models.ListUser' - type: object "403": description: The user does not have admin-access to the list schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "404": description: User or list does not exist. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Update a user <-> list relation @@ -1845,17 +1806,14 @@ paths: description: OK schema: $ref: '#/definitions/v1.Token' - type: object "400": description: Invalid user password model. schema: $ref: '#/definitions/models.Message' - type: object "403": description: Invalid username or password. schema: $ref: '#/definitions/models.Message' - type: object summary: Login tags: - user @@ -1884,22 +1842,18 @@ paths: description: The updated namespace. schema: $ref: '#/definitions/models.Namespace' - type: object "400": description: Invalid namespace object provided. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "403": description: The user does not have access to the namespace schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Updates a namespace @@ -1933,7 +1887,6 @@ paths: description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Get all namespaces a user has access to @@ -1958,22 +1911,18 @@ paths: description: The created namespace. schema: $ref: '#/definitions/models.Namespace' - type: object "400": description: Invalid namespace object provided. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "403": description: The user does not have access to the namespace schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Creates a new namespace @@ -1995,22 +1944,18 @@ paths: description: The namespace was successfully deleted. schema: $ref: '#/definitions/models.Message' - type: object "400": description: Invalid namespace object provided. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "403": description: The user does not have access to the namespace schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Deletes a namespace @@ -2033,17 +1978,14 @@ paths: description: The Namespace schema: $ref: '#/definitions/models.Namespace' - type: object "403": description: The user does not have access to that namespace. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Gets one namespace @@ -2073,17 +2015,14 @@ paths: description: No access to that namespace. schema: $ref: '#/definitions/models.Message' - type: object "404": description: The namespace does not exist. schema: $ref: '#/definitions/models.Message' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Get all lists in a namespace @@ -2123,12 +2062,10 @@ paths: description: No right to see the namespace. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Get teams on a namespace @@ -2158,27 +2095,22 @@ paths: description: The created team<->namespace relation. schema: $ref: '#/definitions/models.TeamNamespace' - type: object "400": description: Invalid team namespace object provided. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "403": description: The team does not have access to the namespace schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "404": description: The team does not exist. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Add a team to a namespace @@ -2218,12 +2150,10 @@ paths: description: No right to see the namespace. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Get users on a namespace @@ -2253,27 +2183,22 @@ paths: description: The created user<->namespace relation. schema: $ref: '#/definitions/models.NamespaceUser' - type: object "400": description: Invalid user namespace object provided. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "403": description: The user does not have access to the namespace schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "404": description: The user does not exist. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Add a user to a namespace @@ -2305,22 +2230,18 @@ paths: description: The created list. schema: $ref: '#/definitions/models.List' - type: object "400": description: Invalid list object provided. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "403": description: The user does not have access to the list schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Creates a new list @@ -2348,22 +2269,18 @@ paths: description: The team was successfully deleted. schema: $ref: '#/definitions/models.Message' - type: object "403": description: The team does not have access to the namespace schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "404": description: team or namespace does not exist. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Delete a team from a namespace @@ -2399,22 +2316,18 @@ paths: description: The updated team <-> namespace relation. schema: $ref: '#/definitions/models.TeamNamespace' - type: object "403": description: The team does not have admin-access to the namespace schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "404": description: Team or namespace does not exist. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Update a team <-> namespace relation @@ -2442,22 +2355,18 @@ paths: description: The user was successfully deleted. schema: $ref: '#/definitions/models.Message' - type: object "403": description: The user does not have access to the namespace schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "404": description: user or namespace does not exist. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Delete a user from a namespace @@ -2493,22 +2402,18 @@ paths: description: The updated user <-> namespace relation. schema: $ref: '#/definitions/models.NamespaceUser' - type: object "403": description: The user does not have admin-access to the namespace schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "404": description: User or namespace does not exist. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Update a user <-> namespace relation @@ -2534,18 +2439,15 @@ paths: description: OK schema: $ref: '#/definitions/models.User' - type: object "400": description: No or invalid user register object provided / User already exists. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object summary: Register tags: - user @@ -2567,17 +2469,14 @@ paths: description: The valid jwt auth token. schema: $ref: '#/definitions/v1.Token' - type: object "400": description: Invalid link share object provided. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object summary: Get an auth token for a share tags: - sharing @@ -2597,22 +2496,18 @@ paths: description: The created task object. schema: $ref: '#/definitions/models.Message' - type: object "400": description: Invalid task ID provided. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "403": description: The user does not have access to the list schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Delete a task @@ -2644,27 +2539,177 @@ paths: description: The updated task object. schema: $ref: '#/definitions/models.Task' - type: object "400": description: Invalid task object provided. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "403": description: The user does not have access to the task (aka its list) schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Update a task tags: - task + /tasks/{id}/attachments: + get: + consumes: + - application/json + description: Get all task attachments for one task. + parameters: + - description: Task ID + in: path + name: id + required: true + type: integer + produces: + - application/json + responses: + "200": + description: All attachments for this task + schema: + items: + $ref: '#/definitions/models.TaskAttachment' + type: array + "403": + description: No access to this task. + schema: + $ref: '#/definitions/models.Message' + "404": + description: The task does not exist. + schema: + $ref: '#/definitions/models.Message' + "500": + description: Internal error + schema: + $ref: '#/definitions/models.Message' + security: + - JWTKeyAuth: [] + summary: Get all attachments for one task. + tags: + - task + put: + consumes: + - multipart/form-data + description: Upload a task attachment. You can pass multiple files with the + files form param. + parameters: + - description: Task ID + in: path + name: id + required: true + type: integer + - description: The file, as multipart form file. You can pass multiple. + in: formData + name: files + required: true + type: string + produces: + - application/json + responses: + "200": + description: Attachments were uploaded successfully. + schema: + $ref: '#/definitions/models.Message' + "403": + description: No access to the task. + schema: + $ref: '#/definitions/models.Message' + "404": + description: The task does not exist. + schema: + $ref: '#/definitions/models.Message' + "500": + description: Internal error + schema: + $ref: '#/definitions/models.Message' + security: + - JWTKeyAuth: [] + summary: Upload a task attachment + tags: + - task + /tasks/{id}/attachments/{attachmentID}: + delete: + consumes: + - application/json + description: Delete an attachment. + parameters: + - description: Task ID + in: path + name: id + required: true + type: integer + - description: Attachment ID + in: path + name: attachmentID + required: true + type: integer + produces: + - application/json + responses: + "200": + description: The attachment was deleted successfully. + schema: + $ref: '#/definitions/models.Message' + "403": + description: No access to this task. + schema: + $ref: '#/definitions/models.Message' + "404": + description: The task does not exist. + schema: + $ref: '#/definitions/models.Message' + "500": + description: Internal error + schema: + $ref: '#/definitions/models.Message' + security: + - JWTKeyAuth: [] + summary: Delete an attachment + tags: + - task + get: + description: Get one attachment for download. **Returns json on error.** + parameters: + - description: Task ID + in: path + name: id + required: true + type: integer + - description: Attachment ID + in: path + name: attachmentID + required: true + type: integer + produces: + - application/octet-stream + responses: + "200": + description: The attachment file. + schema: + type: "" + "403": + description: No access to this task. + schema: + $ref: '#/definitions/models.Message' + "404": + description: The task does not exist. + schema: + $ref: '#/definitions/models.Message' + "500": + description: Internal error + schema: + $ref: '#/definitions/models.Message' + security: + - JWTKeyAuth: [] + summary: Get one attachment. + tags: + - task /tasks/{task}/labels: get: consumes: @@ -2698,7 +2743,6 @@ paths: description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Get all labels on a task @@ -2729,27 +2773,22 @@ paths: description: The created label relation object. schema: $ref: '#/definitions/models.LabelTask' - type: object "400": description: Invalid label object provided. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "403": description: Not allowed to add the label. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "404": description: The label does not exist. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Add a label to a task @@ -2779,22 +2818,18 @@ paths: description: The label was successfully removed. schema: $ref: '#/definitions/models.Message' - type: object "403": description: Not allowed to remove the label. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "404": description: Label not found. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Remove a label from a task @@ -2833,7 +2868,6 @@ paths: description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Get all assignees for a task @@ -2864,17 +2898,14 @@ paths: description: The created assingee object. schema: $ref: '#/definitions/models.TaskAssginee' - type: object "400": description: Invalid assignee object provided. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Add a new assignee to a task @@ -2903,17 +2934,14 @@ paths: description: The assignee was successfully deleted. schema: $ref: '#/definitions/models.Message' - type: object "403": description: Not allowed to delete the assignee. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Delete an assignee @@ -2947,17 +2975,14 @@ paths: description: The created assingees object. schema: $ref: '#/definitions/models.TaskAssginee' - type: object "400": description: Invalid assignee object provided. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Add multiple new assignees to a task @@ -2991,17 +3016,14 @@ paths: description: The updated labels object. schema: $ref: '#/definitions/models.LabelTaskBulk' - type: object "400": description: Invalid label object provided. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Update all labels on a task. @@ -3031,22 +3053,18 @@ paths: description: The task relation was successfully deleted. schema: $ref: '#/definitions/models.Message' - type: object "400": description: Invalid task relation object provided. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "404": description: The task relation was not found. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Remove a task relation @@ -3057,7 +3075,8 @@ paths: - application/json description: Creates a new relation between two tasks. The user needs to have update rights on the base task and at least read rights on the other task. - Both tasks do not need to be on the same list. + Both tasks do not need to be on the same list. Take a look at the docs for + available task relation kinds. parameters: - description: The relation object in: body @@ -3078,17 +3097,14 @@ paths: description: The created task relation object. schema: $ref: '#/definitions/models.TaskRelation' - type: object "400": description: Invalid task relation object provided. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Create a new relation between two tasks @@ -3139,7 +3155,6 @@ paths: description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Get tasks @@ -3168,22 +3183,18 @@ paths: description: The updated task object. schema: $ref: '#/definitions/models.Task' - type: object "400": description: Invalid task object provided. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "403": description: The user does not have access to the task (aka its list) schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Update a bunch of tasks at once @@ -3217,7 +3228,6 @@ paths: description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Get teams @@ -3243,17 +3253,14 @@ paths: description: The created team. schema: $ref: '#/definitions/models.Team' - type: object "400": description: Invalid team object provided. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Creates a new team @@ -3276,17 +3283,14 @@ paths: description: The team was successfully deleted. schema: $ref: '#/definitions/models.Message' - type: object "400": description: Invalid team object provided. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Deletes a team @@ -3316,17 +3320,14 @@ paths: description: The updated team. schema: $ref: '#/definitions/models.Team' - type: object "400": description: Invalid team object provided. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Updates a team @@ -3357,22 +3358,18 @@ paths: description: The newly created member object schema: $ref: '#/definitions/models.TeamMember' - type: object "400": description: Invalid member object provided. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "403": description: The user does not have access to the team schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Add a user to a team @@ -3400,12 +3397,10 @@ paths: description: The user was successfully removed from the team. schema: $ref: '#/definitions/models.Message' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Remove a user from a team @@ -3423,17 +3418,14 @@ paths: description: OK schema: $ref: '#/definitions/models.User' - type: object "404": description: User does not exist. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal server error. schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Get user information @@ -3459,17 +3451,14 @@ paths: description: OK schema: $ref: '#/definitions/models.Message' - type: object "412": description: Bad token provided. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object summary: Confirm the email of a new user tags: - user @@ -3493,22 +3482,18 @@ paths: description: OK schema: $ref: '#/definitions/models.Message' - type: object "400": description: Something's invalid. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "404": description: User does not exist. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal server error. schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Change password @@ -3534,17 +3519,14 @@ paths: description: OK schema: $ref: '#/definitions/models.Message' - type: object "400": description: Bad token provided. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object summary: Resets a password tags: - user @@ -3569,17 +3551,14 @@ paths: description: OK schema: $ref: '#/definitions/models.Message' - type: object "404": description: The user does not exist. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal error schema: $ref: '#/definitions/models.Message' - type: object summary: Request password reset token tags: - user @@ -3607,12 +3586,10 @@ paths: description: Something's invalid. schema: $ref: '#/definitions/code.vikunja.io/web.HTTPError' - type: object "500": description: Internal server error. schema: $ref: '#/definitions/models.Message' - type: object security: - JWTKeyAuth: [] summary: Get users diff --git a/vendor/github.com/KyleBanks/depth/.gitignore b/vendor/github.com/KyleBanks/depth/.gitignore new file mode 100644 index 000000000..8c2e171ea --- /dev/null +++ b/vendor/github.com/KyleBanks/depth/.gitignore @@ -0,0 +1,16 @@ +# Binaries for programs and plugins +*.exe +*.dll +*.so +*.dylib + +# Test binary, build with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Project-local glide cache, RE: https://github.com/Masterminds/glide/issues/736 +.glide/ + +bin/ diff --git a/vendor/github.com/KyleBanks/depth/.travis.yml b/vendor/github.com/KyleBanks/depth/.travis.yml new file mode 100644 index 000000000..ab506f939 --- /dev/null +++ b/vendor/github.com/KyleBanks/depth/.travis.yml @@ -0,0 +1,9 @@ +language: go +sudo: false +go: + - 1.9.x +before_install: + - go get github.com/mattn/goveralls +script: + - $HOME/gopath/bin/goveralls -service=travis-ci +#script: go test $(go list ./... | grep -v vendor/) diff --git a/vendor/github.com/KyleBanks/depth/LICENSE b/vendor/github.com/KyleBanks/depth/LICENSE new file mode 100644 index 000000000..070b426b8 --- /dev/null +++ b/vendor/github.com/KyleBanks/depth/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Kyle Banks + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/KyleBanks/depth/Makefile b/vendor/github.com/KyleBanks/depth/Makefile new file mode 100644 index 000000000..acd35bb41 --- /dev/null +++ b/vendor/github.com/KyleBanks/depth/Makefile @@ -0,0 +1,32 @@ +VERSION = 1.2.1 + +RELEASE_PKG = ./cmd/depth +INSTALL_PKG = $(RELEASE_PKG) + + +# Remote includes require 'mmake' +# github.com/tj/mmake +include github.com/KyleBanks/make/go/install +include github.com/KyleBanks/make/go/sanity +include github.com/KyleBanks/make/go/release +include github.com/KyleBanks/make/go/bench +include github.com/KyleBanks/make/git/precommit + +# Runs a number of depth commands as examples of what's possible. +example: | install + depth github.com/KyleBanks/depth/cmd/depth strings ./ + + depth -internal strings + + depth -json github.com/KyleBanks/depth/cmd/depth + + depth -test github.com/KyleBanks/depth/cmd/depth + + depth -test -internal strings + + depth -test -internal -max 3 strings + + depth . + + depth ./cmd/depth +.PHONY: example diff --git a/vendor/github.com/KyleBanks/depth/README.md b/vendor/github.com/KyleBanks/depth/README.md new file mode 100644 index 000000000..0de954fd3 --- /dev/null +++ b/vendor/github.com/KyleBanks/depth/README.md @@ -0,0 +1,232 @@ +# depth + +[![GoDoc](https://godoc.org/github.com/KyleBanks/depth?status.svg)](https://godoc.org/github.com/KyleBanks/depth)  +[![Build Status](https://travis-ci.org/KyleBanks/depth.svg?branch=master)](https://travis-ci.org/KyleBanks/depth)  +[![Go Report Card](https://goreportcard.com/badge/github.com/KyleBanks/depth)](https://goreportcard.com/report/github.com/KyleBanks/depth)  +[![Coverage Status](https://coveralls.io/repos/github/KyleBanks/depth/badge.svg?branch=master)](https://coveralls.io/github/KyleBanks/depth?branch=master) + +`depth` is tool to retrieve and visualize Go source code dependency trees. + +## Install + +Download the appropriate binary for your platform from the [Releases](https://github.com/KyleBanks/depth/releases) page, or: + +```sh +go get github.com/KyleBanks/depth/cmd/depth +``` + +## Usage + +`depth` can be used as a standalone command-line application, or as a package within your own project. + +### Command-Line + +Simply execute `depth` with one or more package names to visualize. You can use the fully qualified import path of the package, like so: + +```sh +$ depth github.com/KyleBanks/depth/cmd/depth +github.com/KyleBanks/depth/cmd/depth + ├ encoding/json + ├ flag + ├ fmt + ├ io + ├ log + ├ os + ├ strings + └ github.com/KyleBanks/depth + ├ fmt + ├ go/build + ├ path + ├ sort + └ strings +12 dependencies (11 internal, 1 external, 0 testing). +``` + +Or you can use a relative path, for example: + +```sh +$ depth . +$ depth ./cmd/depth +$ depth ../ +``` + +You can also use `depth` on the Go standard library: + +```sh +$ depth strings +strings + ├ errors + ├ io + ├ unicode + └ unicode/utf8 +5 dependencies (5 internal, 0 external, 0 testing). +``` + +Visualizing multiple packages at a time is supported by simply naming the packages you'd like to visualize: + +```sh +$ depth strings github.com/KyleBanks/depth +strings + ├ errors + ├ io + ├ unicode + └ unicode/utf8 +5 dependencies (5 internal, 0 external, 0 testing). +github.com/KyleBanks/depth + ├ fmt + ├ go/build + ├ path + ├ sort + └ strings +7 dependencies (7 internal, 0 external, 0 testing). +``` + +#### `-internal` + +By default, `depth` only resolves the top level of dependencies for standard library packages, however you can use the `-internal` flag to visualize all internal dependencies: + +```sh +$ depth -internal strings +strings + ├ errors + ├ io + ├ errors + └ sync + ├ internal/race + └ unsafe + ├ runtime + ├ runtime/internal/atomic + └ unsafe + ├ runtime/internal/sys + └ unsafe + ├ sync/atomic + └ unsafe + └ unsafe + ├ unicode + └ unicode/utf8 +12 dependencies (12 internal, 0 external, 0 testing). +``` + +#### `-max` + +The `-max` flag limits the dependency tree to the maximum depth provided. For example, if you supply `-max 1` on the `depth` package, your output would look like so: + +``` +$ depth -max 1 github.com/KyleBanks/depth/cmd/depth +github.com/KyleBanks/depth/cmd/depth + ├ encoding/json + ├ flag + ├ fmt + ├ io + ├ log + ├ os + ├ strings + └ github.com/KyleBanks/depth +7 dependencies (6 internal, 1 external, 0 testing). +``` + +The `-max` flag is particularly useful in conjunction with the `-internal` flag which can lead to very deep dependency trees. + +#### `-test` + +By default, `depth` ignores dependencies that are only required for testing. However, you can view test dependencies using the `-test` flag: + +```sh +$ depth -test strings +strings + ├ bytes + ├ errors + ├ fmt + ├ io + ├ io/ioutil + ├ math/rand + ├ reflect + ├ sync + ├ testing + ├ unicode + ├ unicode/utf8 + └ unsafe +13 dependencies (13 internal, 0 external, 8 testing). +``` + +#### `-explain target-package` + +The `-explain` flag instructs `depth` to print import chains in which the +`target-package` is found: + +```sh +$ depth -explain strings github.com/KyleBanks/depth/cmd/depth +github.com/KyleBanks/depth/cmd/depth -> strings +github.com/KyleBanks/depth/cmd/depth -> github.com/KyleBanks/depth -> strings +``` + +#### `-json` + +The `-json` flag instructs `depth` to output dependencies in JSON format: + +```sh +$ depth -json github.com/KyleBanks/depth/cmd/depth +{ + "name": "github.com/KyleBanks/depth/cmd/depth", + "deps": [ + { + "name": "encoding/json", + "internal": true, + "deps": null + }, + ... + { + "name": "github.com/KyleBanks/depth", + "internal": false, + "deps": [ + { + "name": "go/build", + "internal": true, + "deps": null + }, + ... + ] + } + ] +} +``` + +### Integrating With Your Project + +The `depth` package can easily be used to retrieve the dependency tree for a particular package in your own project. For example, here's how you would retrieve the dependency tree for the `strings` package: + +```go +import "github.com/KyleBanks/depth" + +var t depth.Tree +err := t.Resolve("strings") +if err != nil { + log.Fatal(err) +} + +// Output: "'strings' has 4 dependencies." +log.Printf("'%v' has %v dependencies.", t.Root.Name, len(t.Root.Deps)) +``` + +For additional customization, simply set the appropriate flags on the `Tree` before resolving: + +```go +import "github.com/KyleBanks/depth" + +t := depth.Tree { + ResolveInternal: true, + ResolveTest: true, + MaxDepth: 10, +} + + +err := t.Resolve("strings") +``` + +## Author + +`depth` was developed by [Kyle Banks](https://twitter.com/kylewbanks). + +## License + +`depth` is available under the [MIT](./LICENSE) license. diff --git a/vendor/github.com/KyleBanks/depth/depth.go b/vendor/github.com/KyleBanks/depth/depth.go new file mode 100644 index 000000000..115c28ac9 --- /dev/null +++ b/vendor/github.com/KyleBanks/depth/depth.go @@ -0,0 +1,129 @@ +// Package depth provides the ability to traverse and retrieve Go source code dependencies in the form of +// internal and external packages. +// +// For example, the dependencies of the stdlib `strings` package can be resolved like so: +// +// import "github.com/KyleBanks/depth" +// +// var t depth.Tree +// err := t.Resolve("strings") +// if err != nil { +// log.Fatal(err) +// } +// +// // Output: "strings has 4 dependencies." +// log.Printf("%v has %v dependencies.", t.Root.Name, len(t.Root.Deps)) +// +// For additional customization, simply set the appropriate flags on the `Tree` before resolving: +// +// import "github.com/KyleBanks/depth" +// +// t := depth.Tree { +// ResolveInternal: true, +// ResolveTest: true, +// MaxDepth: 10, +// } +// err := t.Resolve("strings") +package depth + +import ( + "errors" + "go/build" + "os" +) + +// ErrRootPkgNotResolved is returned when the root Pkg of the Tree cannot be resolved, +// typically because it does not exist. +var ErrRootPkgNotResolved = errors.New("unable to resolve root package") + +// Importer defines a type that can import a package and return its details. +type Importer interface { + Import(name, srcDir string, im build.ImportMode) (*build.Package, error) +} + +// Tree represents the top level of a Pkg and the configuration used to +// initialize and represent its contents. +type Tree struct { + Root *Pkg + + ResolveInternal bool + ResolveTest bool + MaxDepth int + + Importer Importer + + importCache map[string]struct{} +} + +// Resolve recursively finds all dependencies for the root Pkg name provided, +// and the packages it depends on. +func (t *Tree) Resolve(name string) error { + pwd, err := os.Getwd() + if err != nil { + return err + } + + t.Root = &Pkg{ + Name: name, + Tree: t, + SrcDir: pwd, + Test: false, + } + + // Reset the import cache each time to ensure a reused Tree doesn't + // reuse the same cache. + t.importCache = nil + + // Allow custom importers, but use build.Default if none is provided. + if t.Importer == nil { + t.Importer = &build.Default + } + + t.Root.Resolve(t.Importer) + if !t.Root.Resolved { + return ErrRootPkgNotResolved + } + + return nil +} + +// shouldResolveInternal determines if internal packages should be further resolved beyond the +// current parent. +// +// For example, if the parent Pkg is `github.com/foo/bar` and true is returned, all the +// internal dependencies it relies on will be resolved. If for example `strings` is one of those +// dependencies, and it is passed as the parent here, false may be returned and its internal +// dependencies will not be resolved. +func (t *Tree) shouldResolveInternal(parent *Pkg) bool { + if t.ResolveInternal { + return true + } + + return parent == t.Root +} + +// isAtMaxDepth returns true when the depth of the Pkg provided is at or beyond the maximum +// depth allowed by the tree. +// +// If the Tree has a MaxDepth of zero, true is never returned. +func (t *Tree) isAtMaxDepth(p *Pkg) bool { + if t.MaxDepth == 0 { + return false + } + + return p.depth() >= t.MaxDepth +} + +// hasSeenImport returns true if the import name provided has already been seen within the tree. +// This function only returns false for a name once. +func (t *Tree) hasSeenImport(name string) bool { + if t.importCache == nil { + t.importCache = make(map[string]struct{}) + } + + if _, ok := t.importCache[name]; ok { + return true + } + t.importCache[name] = struct{}{} + return false +} diff --git a/vendor/github.com/KyleBanks/depth/pkg.go b/vendor/github.com/KyleBanks/depth/pkg.go new file mode 100644 index 000000000..1d54d717d --- /dev/null +++ b/vendor/github.com/KyleBanks/depth/pkg.go @@ -0,0 +1,184 @@ +package depth + +import ( + "bytes" + "go/build" + "path" + "sort" + "strings" +) + +// Pkg represents a Go source package, and its dependencies. +type Pkg struct { + Name string `json:"name"` + SrcDir string `json:"-"` + + Internal bool `json:"internal"` + Resolved bool `json:"resolved"` + Test bool `json:"-"` + + Tree *Tree `json:"-"` + Parent *Pkg `json:"-"` + Deps []Pkg `json:"deps"` + + Raw *build.Package `json:"-"` +} + +// Resolve recursively finds all dependencies for the Pkg and the packages it depends on. +func (p *Pkg) Resolve(i Importer) { + // Resolved is always true, regardless of if we skip the import, + // it is only false if there is an error while importing. + p.Resolved = true + + name := p.cleanName() + if name == "" { + return + } + + // Stop resolving imports if we've reached max depth or found a duplicate. + var importMode build.ImportMode + if p.Tree.hasSeenImport(name) || p.Tree.isAtMaxDepth(p) { + importMode = build.FindOnly + } + + pkg, err := i.Import(name, p.SrcDir, importMode) + if err != nil { + // TODO: Check the error type? + p.Resolved = false + return + } + p.Raw = pkg + + // Update the name with the fully qualified import path. + p.Name = pkg.ImportPath + + // If this is an internal dependency, we may need to skip it. + if pkg.Goroot { + p.Internal = true + if !p.Tree.shouldResolveInternal(p) { + return + } + } + + //first we set the regular dependencies, then we add the test dependencies + //sharing the same set. This allows us to mark all test-only deps linearly + unique := make(map[string]struct{}) + p.setDeps(i, pkg.Imports, pkg.Dir, unique, false) + if p.Tree.ResolveTest { + p.setDeps(i, append(pkg.TestImports, pkg.XTestImports...), pkg.Dir, unique, true) + } +} + +// setDeps takes a slice of import paths and the source directory they are relative to, +// and creates the Deps of the Pkg. Each dependency is also further resolved prior to being added +// to the Pkg. +func (p *Pkg) setDeps(i Importer, imports []string, srcDir string, unique map[string]struct{}, isTest bool) { + for _, imp := range imports { + // Mostly for testing files where cyclic imports are allowed. + if imp == p.Name { + continue + } + + // Skip duplicates. + if _, ok := unique[imp]; ok { + continue + } + unique[imp] = struct{}{} + + p.addDep(i, imp, srcDir, isTest) + } + + sort.Sort(byInternalAndName(p.Deps)) +} + +// addDep creates a Pkg and it's dependencies from an imported package name. +func (p *Pkg) addDep(i Importer, name string, srcDir string, isTest bool) { + dep := Pkg{ + Name: name, + SrcDir: srcDir, + Tree: p.Tree, + Parent: p, + Test: isTest, + } + dep.Resolve(i) + + p.Deps = append(p.Deps, dep) +} + +// isParent goes recursively up the chain of Pkgs to determine if the name provided is ever a +// parent of the current Pkg. +func (p *Pkg) isParent(name string) bool { + if p.Parent == nil { + return false + } + + if p.Parent.Name == name { + return true + } + + return p.Parent.isParent(name) +} + +// depth returns the depth of the Pkg within the Tree. +func (p *Pkg) depth() int { + if p.Parent == nil { + return 0 + } + + return p.Parent.depth() + 1 +} + +// cleanName returns a cleaned version of the Pkg name used for resolving dependencies. +// +// If an empty string is returned, dependencies should not be resolved. +func (p *Pkg) cleanName() string { + name := p.Name + + // C 'package' cannot be resolved. + if name == "C" { + return "" + } + + // Internal golang_org/* packages must be prefixed with vendor/ + // + // Thanks to @davecheney for this: + // https://github.com/davecheney/graphpkg/blob/master/main.go#L46 + if strings.HasPrefix(name, "golang_org") { + name = path.Join("vendor", name) + } + + return name +} + +// String returns a string representation of the Pkg containing the Pkg name and status. +func (p *Pkg) String() string { + b := bytes.NewBufferString(p.Name) + + if !p.Resolved { + b.Write([]byte(" (unresolved)")) + } + + return b.String() +} + +// byInternalAndName ensures a slice of Pkgs are sorted such that the internal stdlib +// packages are always above external packages (ie. github.com/whatever). +type byInternalAndName []Pkg + +func (b byInternalAndName) Len() int { + return len(b) +} + +func (b byInternalAndName) Swap(i, j int) { + b[i], b[j] = b[j], b[i] +} + +func (b byInternalAndName) Less(i, j int) bool { + if b[i].Internal && !b[j].Internal { + return true + } else if !b[i].Internal && b[j].Internal { + return false + } + + return b[i].Name < b[j].Name +} diff --git a/vendor/github.com/alecthomas/template/go.mod b/vendor/github.com/alecthomas/template/go.mod new file mode 100644 index 000000000..a70670ae2 --- /dev/null +++ b/vendor/github.com/alecthomas/template/go.mod @@ -0,0 +1 @@ +module github.com/alecthomas/template diff --git a/vendor/github.com/cpuguy83/go-md2man/v2/LICENSE.md b/vendor/github.com/cpuguy83/go-md2man/v2/LICENSE.md new file mode 100644 index 000000000..1cade6cef --- /dev/null +++ b/vendor/github.com/cpuguy83/go-md2man/v2/LICENSE.md @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Brian Goff + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/cpuguy83/go-md2man/v2/md2man/md2man.go b/vendor/github.com/cpuguy83/go-md2man/v2/md2man/md2man.go new file mode 100644 index 000000000..b48005673 --- /dev/null +++ b/vendor/github.com/cpuguy83/go-md2man/v2/md2man/md2man.go @@ -0,0 +1,14 @@ +package md2man + +import ( + "github.com/russross/blackfriday/v2" +) + +// Render converts a markdown document into a roff formatted document. +func Render(doc []byte) []byte { + renderer := NewRoffRenderer() + + return blackfriday.Run(doc, + []blackfriday.Option{blackfriday.WithRenderer(renderer), + blackfriday.WithExtensions(renderer.GetExtensions())}...) +} diff --git a/vendor/github.com/cpuguy83/go-md2man/v2/md2man/roff.go b/vendor/github.com/cpuguy83/go-md2man/v2/md2man/roff.go new file mode 100644 index 000000000..0668a66cf --- /dev/null +++ b/vendor/github.com/cpuguy83/go-md2man/v2/md2man/roff.go @@ -0,0 +1,345 @@ +package md2man + +import ( + "fmt" + "io" + "os" + "strings" + + "github.com/russross/blackfriday/v2" +) + +// roffRenderer implements the blackfriday.Renderer interface for creating +// roff format (manpages) from markdown text +type roffRenderer struct { + extensions blackfriday.Extensions + listCounters []int + firstHeader bool + defineTerm bool + listDepth int +} + +const ( + titleHeader = ".TH " + topLevelHeader = "\n\n.SH " + secondLevelHdr = "\n.SH " + otherHeader = "\n.SS " + crTag = "\n" + emphTag = "\\fI" + emphCloseTag = "\\fP" + strongTag = "\\fB" + strongCloseTag = "\\fP" + breakTag = "\n.br\n" + paraTag = "\n.PP\n" + hruleTag = "\n.ti 0\n\\l'\\n(.lu'\n" + linkTag = "\n\\[la]" + linkCloseTag = "\\[ra]" + codespanTag = "\\fB\\fC" + codespanCloseTag = "\\fR" + codeTag = "\n.PP\n.RS\n\n.nf\n" + codeCloseTag = "\n.fi\n.RE\n" + quoteTag = "\n.PP\n.RS\n" + quoteCloseTag = "\n.RE\n" + listTag = "\n.RS\n" + listCloseTag = "\n.RE\n" + arglistTag = "\n.TP\n" + tableStart = "\n.TS\nallbox;\n" + tableEnd = ".TE\n" + tableCellStart = "T{\n" + tableCellEnd = "\nT}\n" +) + +// NewRoffRenderer creates a new blackfriday Renderer for generating roff documents +// from markdown +func NewRoffRenderer() *roffRenderer { // nolint: golint + var extensions blackfriday.Extensions + + extensions |= blackfriday.NoIntraEmphasis + extensions |= blackfriday.Tables + extensions |= blackfriday.FencedCode + extensions |= blackfriday.SpaceHeadings + extensions |= blackfriday.Footnotes + extensions |= blackfriday.Titleblock + extensions |= blackfriday.DefinitionLists + return &roffRenderer{ + extensions: extensions, + } +} + +// GetExtensions returns the list of extensions used by this renderer implementation +func (r *roffRenderer) GetExtensions() blackfriday.Extensions { + return r.extensions +} + +// RenderHeader handles outputting the header at document start +func (r *roffRenderer) RenderHeader(w io.Writer, ast *blackfriday.Node) { + // disable hyphenation + out(w, ".nh\n") +} + +// RenderFooter handles outputting the footer at the document end; the roff +// renderer has no footer information +func (r *roffRenderer) RenderFooter(w io.Writer, ast *blackfriday.Node) { +} + +// RenderNode is called for each node in a markdown document; based on the node +// type the equivalent roff output is sent to the writer +func (r *roffRenderer) RenderNode(w io.Writer, node *blackfriday.Node, entering bool) blackfriday.WalkStatus { + + var walkAction = blackfriday.GoToNext + + switch node.Type { + case blackfriday.Text: + r.handleText(w, node, entering) + case blackfriday.Softbreak: + out(w, crTag) + case blackfriday.Hardbreak: + out(w, breakTag) + case blackfriday.Emph: + if entering { + out(w, emphTag) + } else { + out(w, emphCloseTag) + } + case blackfriday.Strong: + if entering { + out(w, strongTag) + } else { + out(w, strongCloseTag) + } + case blackfriday.Link: + if !entering { + out(w, linkTag+string(node.LinkData.Destination)+linkCloseTag) + } + case blackfriday.Image: + // ignore images + walkAction = blackfriday.SkipChildren + case blackfriday.Code: + out(w, codespanTag) + escapeSpecialChars(w, node.Literal) + out(w, codespanCloseTag) + case blackfriday.Document: + break + case blackfriday.Paragraph: + // roff .PP markers break lists + if r.listDepth > 0 { + return blackfriday.GoToNext + } + if entering { + out(w, paraTag) + } else { + out(w, crTag) + } + case blackfriday.BlockQuote: + if entering { + out(w, quoteTag) + } else { + out(w, quoteCloseTag) + } + case blackfriday.Heading: + r.handleHeading(w, node, entering) + case blackfriday.HorizontalRule: + out(w, hruleTag) + case blackfriday.List: + r.handleList(w, node, entering) + case blackfriday.Item: + r.handleItem(w, node, entering) + case blackfriday.CodeBlock: + out(w, codeTag) + escapeSpecialChars(w, node.Literal) + out(w, codeCloseTag) + case blackfriday.Table: + r.handleTable(w, node, entering) + case blackfriday.TableCell: + r.handleTableCell(w, node, entering) + case blackfriday.TableHead: + case blackfriday.TableBody: + case blackfriday.TableRow: + // no action as cell entries do all the nroff formatting + return blackfriday.GoToNext + default: + fmt.Fprintln(os.Stderr, "WARNING: go-md2man does not handle node type "+node.Type.String()) + } + return walkAction +} + +func (r *roffRenderer) handleText(w io.Writer, node *blackfriday.Node, entering bool) { + var ( + start, end string + ) + // handle special roff table cell text encapsulation + if node.Parent.Type == blackfriday.TableCell { + if len(node.Literal) > 30 { + start = tableCellStart + end = tableCellEnd + } else { + // end rows that aren't terminated by "tableCellEnd" with a cr if end of row + if node.Parent.Next == nil && !node.Parent.IsHeader { + end = crTag + } + } + } + out(w, start) + escapeSpecialChars(w, node.Literal) + out(w, end) +} + +func (r *roffRenderer) handleHeading(w io.Writer, node *blackfriday.Node, entering bool) { + if entering { + switch node.Level { + case 1: + if !r.firstHeader { + out(w, titleHeader) + r.firstHeader = true + break + } + out(w, topLevelHeader) + case 2: + out(w, secondLevelHdr) + default: + out(w, otherHeader) + } + } +} + +func (r *roffRenderer) handleList(w io.Writer, node *blackfriday.Node, entering bool) { + openTag := listTag + closeTag := listCloseTag + if node.ListFlags&blackfriday.ListTypeDefinition != 0 { + // tags for definition lists handled within Item node + openTag = "" + closeTag = "" + } + if entering { + r.listDepth++ + if node.ListFlags&blackfriday.ListTypeOrdered != 0 { + r.listCounters = append(r.listCounters, 1) + } + out(w, openTag) + } else { + if node.ListFlags&blackfriday.ListTypeOrdered != 0 { + r.listCounters = r.listCounters[:len(r.listCounters)-1] + } + out(w, closeTag) + r.listDepth-- + } +} + +func (r *roffRenderer) handleItem(w io.Writer, node *blackfriday.Node, entering bool) { + if entering { + if node.ListFlags&blackfriday.ListTypeOrdered != 0 { + out(w, fmt.Sprintf(".IP \"%3d.\" 5\n", r.listCounters[len(r.listCounters)-1])) + r.listCounters[len(r.listCounters)-1]++ + } else if node.ListFlags&blackfriday.ListTypeDefinition != 0 { + // state machine for handling terms and following definitions + // since blackfriday does not distinguish them properly, nor + // does it seperate them into separate lists as it should + if !r.defineTerm { + out(w, arglistTag) + r.defineTerm = true + } else { + r.defineTerm = false + } + } else { + out(w, ".IP \\(bu 2\n") + } + } else { + out(w, "\n") + } +} + +func (r *roffRenderer) handleTable(w io.Writer, node *blackfriday.Node, entering bool) { + if entering { + out(w, tableStart) + //call walker to count cells (and rows?) so format section can be produced + columns := countColumns(node) + out(w, strings.Repeat("l ", columns)+"\n") + out(w, strings.Repeat("l ", columns)+".\n") + } else { + out(w, tableEnd) + } +} + +func (r *roffRenderer) handleTableCell(w io.Writer, node *blackfriday.Node, entering bool) { + var ( + start, end string + ) + if node.IsHeader { + start = codespanTag + end = codespanCloseTag + } + if entering { + if node.Prev != nil && node.Prev.Type == blackfriday.TableCell { + out(w, "\t"+start) + } else { + out(w, start) + } + } else { + // need to carriage return if we are at the end of the header row + if node.IsHeader && node.Next == nil { + end = end + crTag + } + out(w, end) + } +} + +// because roff format requires knowing the column count before outputting any table +// data we need to walk a table tree and count the columns +func countColumns(node *blackfriday.Node) int { + var columns int + + node.Walk(func(node *blackfriday.Node, entering bool) blackfriday.WalkStatus { + switch node.Type { + case blackfriday.TableRow: + if !entering { + return blackfriday.Terminate + } + case blackfriday.TableCell: + if entering { + columns++ + } + default: + } + return blackfriday.GoToNext + }) + return columns +} + +func out(w io.Writer, output string) { + io.WriteString(w, output) // nolint: errcheck +} + +func needsBackslash(c byte) bool { + for _, r := range []byte("-_&\\~") { + if c == r { + return true + } + } + return false +} + +func escapeSpecialChars(w io.Writer, text []byte) { + for i := 0; i < len(text); i++ { + // escape initial apostrophe or period + if len(text) >= 1 && (text[0] == '\'' || text[0] == '.') { + out(w, "\\&") + } + + // directly copy normal characters + org := i + + for i < len(text) && !needsBackslash(text[i]) { + i++ + } + if i > org { + w.Write(text[org:i]) // nolint: errcheck + } + + // escape a character + if i >= len(text) { + break + } + + w.Write([]byte{'\\', text[i]}) // nolint: errcheck + } +} diff --git a/vendor/github.com/go-openapi/jsonpointer/.travis.yml b/vendor/github.com/go-openapi/jsonpointer/.travis.yml index 3436c4590..9aef9184e 100644 --- a/vendor/github.com/go-openapi/jsonpointer/.travis.yml +++ b/vendor/github.com/go-openapi/jsonpointer/.travis.yml @@ -1,15 +1,15 @@ after_success: - bash <(curl -s https://codecov.io/bash) go: -- '1.9' -- 1.10.x - 1.11.x +- 1.12.x install: -- go get -u github.com/stretchr/testify/assert -- go get -u github.com/go-openapi/swag +- GO111MODULE=off go get -u gotest.tools/gotestsum +env: +- GO111MODULE=on language: go notifications: slack: secure: a5VgoiwB1G/AZqzmephPZIhEB9avMlsWSlVnM1dSAtYAwdrQHGTQxAmpOxYIoSPDhWNN5bfZmjd29++UlTwLcHSR+e0kJhH6IfDlsHj/HplNCJ9tyI0zYc7XchtdKgeMxMzBKCzgwFXGSbQGydXTliDNBo0HOzmY3cou/daMFTP60K+offcjS+3LRAYb1EroSRXZqrk1nuF/xDL3792DZUdPMiFR/L/Df6y74D6/QP4sTkTDFQitz4Wy/7jbsfj8dG6qK2zivgV6/l+w4OVjFkxVpPXogDWY10vVXNVynqxfJ7to2d1I9lNCHE2ilBCkWMIPdyJF7hjF8pKW+82yP4EzRh0vu8Xn0HT5MZpQxdRY/YMxNrWaG7SxsoEaO4q5uhgdzAqLYY3TRa7MjIK+7Ur+aqOeTXn6OKwVi0CjvZ6mIU3WUKSwiwkFZMbjRAkSb5CYwMEfGFO/z964xz83qGt6WAtBXNotqCQpTIiKtDHQeLOMfksHImCg6JLhQcWBVxamVgu0G3Pdh8Y6DyPnxraXY95+QDavbjqv7TeYT9T/FNnrkXaTTK0s4iWE5H4ACU0Qvz0wUYgfQrZv0/Hp7V17+rabUwnzYySHCy9SWX/7OV9Cfh31iMp9ZIffr76xmmThtOEqs8TrTtU6BWI3rWwvA9cXQipZTVtL0oswrGw= script: -- go test -v -race -cover -coverprofile=coverage.txt -covermode=atomic ./... +- gotestsum -f short-verbose -- -race -coverprofile=coverage.txt -covermode=atomic ./... diff --git a/vendor/github.com/go-openapi/jsonpointer/go.mod b/vendor/github.com/go-openapi/jsonpointer/go.mod index eb4d623c5..3e45e225b 100644 --- a/vendor/github.com/go-openapi/jsonpointer/go.mod +++ b/vendor/github.com/go-openapi/jsonpointer/go.mod @@ -1,10 +1,9 @@ module github.com/go-openapi/jsonpointer require ( - github.com/davecgh/go-spew v1.1.1 // indirect - github.com/go-openapi/swag v0.17.0 - github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329 // indirect - github.com/pmezard/go-difflib v1.0.0 // indirect - github.com/stretchr/testify v1.2.2 - gopkg.in/yaml.v2 v2.2.1 // indirect + github.com/go-openapi/swag v0.19.5 + github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e // indirect + github.com/stretchr/testify v1.3.0 ) + +go 1.13 diff --git a/vendor/github.com/go-openapi/jsonpointer/go.sum b/vendor/github.com/go-openapi/jsonpointer/go.sum index c71f4d7a2..953d4f354 100644 --- a/vendor/github.com/go-openapi/jsonpointer/go.sum +++ b/vendor/github.com/go-openapi/jsonpointer/go.sum @@ -1,11 +1,24 @@ +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/go-openapi/swag v0.17.0 h1:7wu+dZ5k83kvUWeAb+WUkFiUhDzwGqzTR/NhWzeo1JU= -github.com/go-openapi/swag v0.17.0/go.mod h1:DXUve3Dpr1UfpPtxFw+EFuQ41HhCWZfha5jSVRG7C7I= -github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329 h1:2gxZ0XQIU/5z3Z3bUBu+FXuk2pFbkN6tcwi/pjyaDic= -github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/go-openapi/swag v0.19.5 h1:lTz6Ys4CmqqCQmZPBlbQENR1/GucA2bzYTE12Pw4tFY= +github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= +github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63 h1:nTT4s92Dgz2HlrB2NaMgvlfqHH39OgMhA7z3PK7PGD4= +github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e h1:hB2xlXdHp/pmPZq0y3QnmWAArdw9PqbmotexnWx/FU8= +github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w= -github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/vendor/github.com/go-openapi/jsonpointer/pointer.go b/vendor/github.com/go-openapi/jsonpointer/pointer.go index fe2d6ee57..b284eb77a 100644 --- a/vendor/github.com/go-openapi/jsonpointer/pointer.go +++ b/vendor/github.com/go-openapi/jsonpointer/pointer.go @@ -135,7 +135,7 @@ func getSingleImpl(node interface{}, decodedToken string, nameProvider *swag.Nam kv := reflect.ValueOf(decodedToken) mv := rValue.MapIndex(kv) - if mv.IsValid() && !swag.IsZero(mv) { + if mv.IsValid() { return mv.Interface(), kind, nil } return nil, kind, fmt.Errorf("object has no key %q", decodedToken) diff --git a/vendor/github.com/go-openapi/jsonreference/.travis.yml b/vendor/github.com/go-openapi/jsonreference/.travis.yml index 40034d28d..40b90757d 100644 --- a/vendor/github.com/go-openapi/jsonreference/.travis.yml +++ b/vendor/github.com/go-openapi/jsonreference/.travis.yml @@ -1,16 +1,15 @@ after_success: - bash <(curl -s https://codecov.io/bash) go: -- '1.9' -- 1.10.x - 1.11.x +- 1.12.x install: -- go get -u github.com/stretchr/testify/assert -- go get -u github.com/PuerkitoBio/purell -- go get -u github.com/go-openapi/jsonpointer +- GO111MODULE=off go get -u gotest.tools/gotestsum +env: +- GO111MODULE=on language: go notifications: slack: secure: OpQG/36F7DSF00HLm9WZMhyqFCYYyYTsVDObW226cWiR8PWYiNfLZiSEvIzT1Gx4dDjhigKTIqcLhG34CkL5iNXDjm9Yyo2RYhQPlK8NErNqUEXuBqn4RqYHW48VGhEhOyDd4Ei0E2FN5ZbgpvHgtpkdZ6XDi64r3Ac89isP9aPHXQTuv2Jog6b4/OKKiUTftLcTIst0p4Cp3gqOJWf1wnoj+IadWiECNVQT6zb47IYjtyw6+uV8iUjTzdKcRB6Zc6b4Dq7JAg1Zd7Jfxkql3hlKp4PNlRf9Cy7y5iA3G7MLyg3FcPX5z2kmcyPt2jOTRMBWUJ5zIQpOxizAcN8WsT3WWBL5KbuYK6k0PzujrIDLqdxGpNmjkkMfDBT9cKmZpm2FdW+oZgPFJP+oKmAo4u4KJz/vjiPTXgQlN5bmrLuRMCp+AwC5wkIohTqWZVPE2TK6ZSnMYcg/W39s+RP/9mJoyryAvPSpBOLTI+biCgaUCTOAZxNTWpMFc3tPYntc41WWkdKcooZ9JA5DwfcaVFyTGQ3YXz+HvX6G1z/gW0Q/A4dBi9mj2iE1xm7tRTT+4VQ2AXFvSEI1HJpfPgYnwAtwOD1v3Qm2EUHk9sCdtEDR4wVGEPIVn44GnwFMnGKx9JWppMPYwFu3SVDdHt+E+LOlhZUply11Aa+IVrT2KUQ= script: -- go test -v -race -cover -coverprofile=coverage.txt -covermode=atomic ./... +- gotestsum -f short-verbose -- -race -coverprofile=coverage.txt -covermode=atomic ./... diff --git a/vendor/github.com/go-openapi/jsonreference/go.mod b/vendor/github.com/go-openapi/jsonreference/go.mod index 6d15a7050..aff1d0163 100644 --- a/vendor/github.com/go-openapi/jsonreference/go.mod +++ b/vendor/github.com/go-openapi/jsonreference/go.mod @@ -1,15 +1,12 @@ module github.com/go-openapi/jsonreference require ( - github.com/PuerkitoBio/purell v1.1.0 + github.com/PuerkitoBio/purell v1.1.1 github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 // indirect - github.com/davecgh/go-spew v1.1.1 // indirect - github.com/go-openapi/jsonpointer v0.17.0 - github.com/go-openapi/swag v0.17.0 // indirect - github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329 // indirect - github.com/pmezard/go-difflib v1.0.0 // indirect - github.com/stretchr/testify v1.2.2 - golang.org/x/net v0.0.0-20181005035420-146acd28ed58 // indirect - golang.org/x/text v0.3.0 // indirect - gopkg.in/yaml.v2 v2.2.1 // indirect + github.com/go-openapi/jsonpointer v0.19.3 + github.com/stretchr/testify v1.3.0 + golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297 // indirect + golang.org/x/text v0.3.2 // indirect ) + +go 1.13 diff --git a/vendor/github.com/go-openapi/jsonreference/go.sum b/vendor/github.com/go-openapi/jsonreference/go.sum index ec9bdbc28..c7ceab580 100644 --- a/vendor/github.com/go-openapi/jsonreference/go.sum +++ b/vendor/github.com/go-openapi/jsonreference/go.sum @@ -1,20 +1,44 @@ -github.com/PuerkitoBio/purell v1.1.0 h1:rmGxhojJlM0tuKtfdvliR84CFHljx9ag64t2xmVkjK4= -github.com/PuerkitoBio/purell v1.1.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= +github.com/PuerkitoBio/purell v1.1.1 h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tNFfI= +github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M= github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/go-openapi/jsonpointer v0.17.0 h1:Bpl2DtZ6k7wKqfFs7e+4P08+M9I3FQgn09a1UsRUQbk= -github.com/go-openapi/jsonpointer v0.17.0/go.mod h1:+35s3my2LFTysnkMfxsJBAMHj/DoqoB9knIWoYG/Vk0= -github.com/go-openapi/swag v0.17.0 h1:7wu+dZ5k83kvUWeAb+WUkFiUhDzwGqzTR/NhWzeo1JU= -github.com/go-openapi/swag v0.17.0/go.mod h1:DXUve3Dpr1UfpPtxFw+EFuQ41HhCWZfha5jSVRG7C7I= -github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329 h1:2gxZ0XQIU/5z3Z3bUBu+FXuk2pFbkN6tcwi/pjyaDic= -github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/go-openapi/jsonpointer v0.19.2 h1:A9+F4Dc/MCNB5jibxf6rRvOvR/iFgQdyNx9eIhnGqq0= +github.com/go-openapi/jsonpointer v0.19.2/go.mod h1:3akKfEdA7DF1sugOqz1dVQHBcuDBPKZGEoHC/NkiQRg= +github.com/go-openapi/jsonpointer v0.19.3 h1:gihV7YNZK1iK6Tgwwsxo2rJbD1GTbdm72325Bq8FI3w= +github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= +github.com/go-openapi/swag v0.19.2 h1:jvO6bCMBEilGwMfHhrd61zIID4oIFdwb76V17SM88dE= +github.com/go-openapi/swag v0.19.2/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= +github.com/go-openapi/swag v0.19.5 h1:lTz6Ys4CmqqCQmZPBlbQENR1/GucA2bzYTE12Pw4tFY= +github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= +github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63 h1:nTT4s92Dgz2HlrB2NaMgvlfqHH39OgMhA7z3PK7PGD4= +github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e h1:hB2xlXdHp/pmPZq0y3QnmWAArdw9PqbmotexnWx/FU8= +github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w= -github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -golang.org/x/net v0.0.0-20181005035420-146acd28ed58 h1:otZG8yDCO4LVps5+9bxOeNiCvgmOyt96J3roHTYs7oE= -golang.org/x/net v0.0.0-20181005035420-146acd28ed58/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980 h1:dfGZHvZk057jK2MCeWus/TowKpJ8y4AmooUzdBSR9GU= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297 h1:k7pJ2yAPLPgbskkFdhRCsA77k2fySZ1zf2zCjvQCiIM= +golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/vendor/github.com/go-openapi/spec/.golangci.yml b/vendor/github.com/go-openapi/spec/.golangci.yml index 00277082f..3e33f9f2e 100644 --- a/vendor/github.com/go-openapi/spec/.golangci.yml +++ b/vendor/github.com/go-openapi/spec/.golangci.yml @@ -8,7 +8,7 @@ linters-settings: maligned: suggest-new: true dupl: - threshold: 100 + threshold: 200 goconst: min-len: 2 min-occurrences: 2 diff --git a/vendor/github.com/go-openapi/spec/.travis.yml b/vendor/github.com/go-openapi/spec/.travis.yml index a4f03484b..aa26d8763 100644 --- a/vendor/github.com/go-openapi/spec/.travis.yml +++ b/vendor/github.com/go-openapi/spec/.travis.yml @@ -1,18 +1,15 @@ after_success: - bash <(curl -s https://codecov.io/bash) go: -- '1.9' -- 1.10.x - 1.11.x +- 1.12.x install: -- go get -u github.com/stretchr/testify -- go get -u github.com/go-openapi/swag -- go get -u gopkg.in/yaml.v2 -- go get -u github.com/go-openapi/jsonpointer -- go get -u github.com/go-openapi/jsonreference +- GO111MODULE=off go get -u gotest.tools/gotestsum +env: +- GO111MODULE=on language: go notifications: slack: secure: QUWvCkBBK09GF7YtEvHHVt70JOkdlNBG0nIKu/5qc4/nW5HP8I2w0SEf/XR2je0eED1Qe3L/AfMCWwrEj+IUZc3l4v+ju8X8R3Lomhme0Eb0jd1MTMCuPcBT47YCj0M7RON7vXtbFfm1hFJ/jLe5+9FXz0hpXsR24PJc5ZIi/ogNwkaPqG4BmndzecpSh0vc2FJPZUD9LT0I09REY/vXR0oQAalLkW0asGD5taHZTUZq/kBpsNxaAFrLM23i4mUcf33M5fjLpvx5LRICrX/57XpBrDh2TooBU6Qj3CgoY0uPRYUmSNxbVx1czNzl2JtEpb5yjoxfVPQeg0BvQM00G8LJINISR+ohrjhkZmAqchDupAX+yFrxTtORa78CtnIL6z/aTNlgwwVD8kvL/1pFA/JWYmKDmz93mV/+6wubGzNSQCstzjkFA4/iZEKewKUoRIAi/fxyscP6L/rCpmY/4llZZvrnyTqVbt6URWpopUpH4rwYqreXAtJxJsfBJIeSmUIiDIOMGkCTvyTEW3fWGmGoqWtSHLoaWDyAIGb7azb+KvfpWtEcoPFWfSWU+LGee0A/YsUhBl7ADB9A0CJEuR8q4BPpKpfLwPKSiKSAXL7zDkyjExyhtgqbSl2jS+rKIHOZNL8JkCcTP2MKMVd563C5rC5FMKqu3S9m2b6380E= script: -- go test -v -race -cover -coverprofile=coverage.txt -covermode=atomic ./... +- gotestsum -f short-verbose -- -race -coverprofile=coverage.txt -covermode=atomic ./... diff --git a/vendor/github.com/go-openapi/spec/bindata.go b/vendor/github.com/go-openapi/spec/bindata.go index d5ec7b900..c67e2d877 100644 --- a/vendor/github.com/go-openapi/spec/bindata.go +++ b/vendor/github.com/go-openapi/spec/bindata.go @@ -21,7 +21,7 @@ import ( func bindataRead(data []byte, name string) ([]byte, error) { gz, err := gzip.NewReader(bytes.NewBuffer(data)) if err != nil { - return nil, fmt.Errorf("Read %q: %v", name, err) + return nil, fmt.Errorf("read %q: %v", name, err) } var buf bytes.Buffer @@ -29,7 +29,7 @@ func bindataRead(data []byte, name string) ([]byte, error) { clErr := gz.Close() if err != nil { - return nil, fmt.Errorf("Read %q: %v", name, err) + return nil, fmt.Errorf("read %q: %v", name, err) } if clErr != nil { return nil, err @@ -85,7 +85,7 @@ func jsonschemaDraft04JSON() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "jsonschema-draft-04.json", size: 4357, mode: os.FileMode(436), modTime: time.Unix(1540282154, 0)} + info := bindataFileInfo{name: "jsonschema-draft-04.json", size: 4357, mode: os.FileMode(0644), modTime: time.Unix(1567900649, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xe1, 0x48, 0x9d, 0xb, 0x47, 0x55, 0xf0, 0x27, 0x93, 0x30, 0x25, 0x91, 0xd3, 0xfc, 0xb8, 0xf0, 0x7b, 0x68, 0x93, 0xa8, 0x2a, 0x94, 0xf2, 0x48, 0x95, 0xf8, 0xe4, 0xed, 0xf1, 0x1b, 0x82, 0xe2}} return a, nil } @@ -105,7 +105,7 @@ func v2SchemaJSON() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "v2/schema.json", size: 40249, mode: os.FileMode(436), modTime: time.Unix(1540282154, 0)} + info := bindataFileInfo{name: "v2/schema.json", size: 40249, mode: os.FileMode(0644), modTime: time.Unix(1567900649, 0)} a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xcb, 0x25, 0x27, 0xe8, 0x46, 0xae, 0x22, 0xc4, 0xf4, 0x8b, 0x1, 0x32, 0x4d, 0x1f, 0xf8, 0xdf, 0x75, 0x15, 0xc8, 0x2d, 0xc7, 0xed, 0xe, 0x7e, 0x0, 0x75, 0xc0, 0xf9, 0xd2, 0x1f, 0x75, 0x57}} return a, nil } diff --git a/vendor/github.com/go-openapi/spec/go.mod b/vendor/github.com/go-openapi/spec/go.mod index 5af64c10b..02a142c03 100644 --- a/vendor/github.com/go-openapi/spec/go.mod +++ b/vendor/github.com/go-openapi/spec/go.mod @@ -1,16 +1,17 @@ module github.com/go-openapi/spec require ( - github.com/PuerkitoBio/purell v1.1.0 // indirect - github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 // indirect - github.com/davecgh/go-spew v1.1.1 // indirect - github.com/go-openapi/jsonpointer v0.17.0 - github.com/go-openapi/jsonreference v0.17.0 - github.com/go-openapi/swag v0.17.0 - github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329 // indirect - github.com/pmezard/go-difflib v1.0.0 // indirect - github.com/stretchr/testify v1.2.2 - golang.org/x/net v0.0.0-20181005035420-146acd28ed58 // indirect - golang.org/x/text v0.3.0 // indirect - gopkg.in/yaml.v2 v2.2.1 + github.com/go-openapi/jsonpointer v0.19.3 + github.com/go-openapi/jsonreference v0.19.2 + github.com/go-openapi/swag v0.19.5 + github.com/kr/pty v1.1.5 // indirect + github.com/stretchr/objx v0.2.0 // indirect + github.com/stretchr/testify v1.3.0 + golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8 // indirect + golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297 // indirect + golang.org/x/sys v0.0.0-20190616124812-15dcb6c0061f // indirect + golang.org/x/tools v0.0.0-20190614205625-5aca471b1d59 // indirect + gopkg.in/yaml.v2 v2.2.2 ) + +go 1.13 diff --git a/vendor/github.com/go-openapi/spec/go.sum b/vendor/github.com/go-openapi/spec/go.sum index ab6bfb608..86db601c9 100644 --- a/vendor/github.com/go-openapi/spec/go.sum +++ b/vendor/github.com/go-openapi/spec/go.sum @@ -1,22 +1,74 @@ github.com/PuerkitoBio/purell v1.1.0 h1:rmGxhojJlM0tuKtfdvliR84CFHljx9ag64t2xmVkjK4= github.com/PuerkitoBio/purell v1.1.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= +github.com/PuerkitoBio/purell v1.1.1 h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tNFfI= +github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M= github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/go-openapi/jsonpointer v0.17.0 h1:Bpl2DtZ6k7wKqfFs7e+4P08+M9I3FQgn09a1UsRUQbk= -github.com/go-openapi/jsonpointer v0.17.0/go.mod h1:+35s3my2LFTysnkMfxsJBAMHj/DoqoB9knIWoYG/Vk0= -github.com/go-openapi/jsonreference v0.17.0 h1:d/o7/fsLWWQZACbihvZxcyLQ59jfUVs7WOJv/ak7T7A= -github.com/go-openapi/jsonreference v0.17.0/go.mod h1:W3Z9FmVs9qj+KR4zFKmDPGiLdk1D9Rlm7cyMvf57TTg= -github.com/go-openapi/swag v0.17.0 h1:7wu+dZ5k83kvUWeAb+WUkFiUhDzwGqzTR/NhWzeo1JU= -github.com/go-openapi/swag v0.17.0/go.mod h1:DXUve3Dpr1UfpPtxFw+EFuQ41HhCWZfha5jSVRG7C7I= +github.com/go-openapi/jsonpointer v0.17.0 h1:nH6xp8XdXHx8dqveo0ZuJBluCO2qGrPbDNZ0dwoRHP0= +github.com/go-openapi/jsonpointer v0.17.0/go.mod h1:cOnomiV+CVVwFLk0A/MExoFMjwdsUdVpsRhURCKh+3M= +github.com/go-openapi/jsonpointer v0.19.0 h1:FTUMcX77w5rQkClIzDtTxvn6Bsa894CcrzNj2MMfeg8= +github.com/go-openapi/jsonpointer v0.19.0/go.mod h1:cOnomiV+CVVwFLk0A/MExoFMjwdsUdVpsRhURCKh+3M= +github.com/go-openapi/jsonpointer v0.19.2 h1:A9+F4Dc/MCNB5jibxf6rRvOvR/iFgQdyNx9eIhnGqq0= +github.com/go-openapi/jsonpointer v0.19.2/go.mod h1:3akKfEdA7DF1sugOqz1dVQHBcuDBPKZGEoHC/NkiQRg= +github.com/go-openapi/jsonpointer v0.19.3 h1:gihV7YNZK1iK6Tgwwsxo2rJbD1GTbdm72325Bq8FI3w= +github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= +github.com/go-openapi/jsonreference v0.19.0 h1:BqWKpV1dFd+AuiKlgtddwVIFQsuMpxfBDBHGfM2yNpk= +github.com/go-openapi/jsonreference v0.19.0/go.mod h1:g4xxGn04lDIRh0GJb5QlpE3HfopLOL6uZrK/VgnsK9I= +github.com/go-openapi/jsonreference v0.19.2 h1:o20suLFB4Ri0tuzpWtyHlh7E7HnkqTNLq6aR6WVNS1w= +github.com/go-openapi/jsonreference v0.19.2/go.mod h1:jMjeRr2HHw6nAVajTXJ4eiUwohSTlpa0o73RUL1owJc= +github.com/go-openapi/swag v0.17.0 h1:iqrgMg7Q7SvtbWLlltPrkMs0UBJI6oTSs79JFRUi880= +github.com/go-openapi/swag v0.17.0/go.mod h1:AByQ+nYG6gQg71GINrmuDXCPWdL640yX49/kXLo40Tg= +github.com/go-openapi/swag v0.19.2 h1:jvO6bCMBEilGwMfHhrd61zIID4oIFdwb76V17SM88dE= +github.com/go-openapi/swag v0.19.2/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= +github.com/go-openapi/swag v0.19.5 h1:lTz6Ys4CmqqCQmZPBlbQENR1/GucA2bzYTE12Pw4tFY= +github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= +github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA= +github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329 h1:2gxZ0XQIU/5z3Z3bUBu+FXuk2pFbkN6tcwi/pjyaDic= github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63 h1:nTT4s92Dgz2HlrB2NaMgvlfqHH39OgMhA7z3PK7PGD4= +github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e h1:hB2xlXdHp/pmPZq0y3QnmWAArdw9PqbmotexnWx/FU8= +github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/net v0.0.0-20181005035420-146acd28ed58 h1:otZG8yDCO4LVps5+9bxOeNiCvgmOyt96J3roHTYs7oE= golang.org/x/net v0.0.0-20181005035420-146acd28ed58/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980 h1:dfGZHvZk057jK2MCeWus/TowKpJ8y4AmooUzdBSR9GU= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297 h1:k7pJ2yAPLPgbskkFdhRCsA77k2fySZ1zf2zCjvQCiIM= +golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190616124812-15dcb6c0061f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190614205625-5aca471b1d59/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.2.1 h1:mUhvW9EsL+naU5Q3cakzfE91YhliOondGd6ZrsDBHQE= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/vendor/github.com/go-openapi/spec/items.go b/vendor/github.com/go-openapi/spec/items.go index 78389317e..365d16315 100644 --- a/vendor/github.com/go-openapi/spec/items.go +++ b/vendor/github.com/go-openapi/spec/items.go @@ -29,6 +29,7 @@ const ( // SimpleSchema describe swagger simple schemas for parameters and headers type SimpleSchema struct { Type string `json:"type,omitempty"` + Nullable bool `json:"nullable,omitempty"` Format string `json:"format,omitempty"` Items *Items `json:"items,omitempty"` CollectionFormat string `json:"collectionFormat,omitempty"` @@ -91,6 +92,12 @@ func (i *Items) Typed(tpe, format string) *Items { return i } +// AsNullable flags this schema as nullable. +func (i *Items) AsNullable() *Items { + i.Nullable = true + return i +} + // CollectionOf a fluent builder method for an array item func (i *Items) CollectionOf(items *Items, format string) *Items { i.Type = jsonArray diff --git a/vendor/github.com/go-openapi/spec/schema.go b/vendor/github.com/go-openapi/spec/schema.go index ce30d26e3..37858ece9 100644 --- a/vendor/github.com/go-openapi/spec/schema.go +++ b/vendor/github.com/go-openapi/spec/schema.go @@ -163,6 +163,7 @@ type SchemaProps struct { Schema SchemaURL `json:"-"` Description string `json:"description,omitempty"` Type StringOrArray `json:"type,omitempty"` + Nullable bool `json:"nullable,omitempty"` Format string `json:"format,omitempty"` Title string `json:"title,omitempty"` Default interface{} `json:"default,omitempty"` @@ -302,6 +303,12 @@ func (s *Schema) AddType(tpe, format string) *Schema { return s } +// AsNullable flags this schema as nullable. +func (s *Schema) AsNullable() *Schema { + s.Nullable = true + return s +} + // CollectionOf a fluent builder method for an array parameter func (s *Schema) CollectionOf(items Schema) *Schema { s.Type = []string{jsonArray} diff --git a/vendor/github.com/go-openapi/spec/schema_loader.go b/vendor/github.com/go-openapi/spec/schema_loader.go index c34a96fa0..9e20e96c2 100644 --- a/vendor/github.com/go-openapi/spec/schema_loader.go +++ b/vendor/github.com/go-openapi/spec/schema_loader.go @@ -160,6 +160,7 @@ func (r *schemaLoader) load(refURL *url.URL) (interface{}, url.URL, bool, error) if !fromCache { b, err := r.loadDoc(normalized) if err != nil { + debugLog("unable to load the document: %v", err) return nil, url.URL{}, false, err } diff --git a/vendor/github.com/go-openapi/spec/swagger.go b/vendor/github.com/go-openapi/spec/swagger.go index 454617e58..44722ffd5 100644 --- a/vendor/github.com/go-openapi/spec/swagger.go +++ b/vendor/github.com/go-openapi/spec/swagger.go @@ -15,6 +15,8 @@ package spec import ( + "bytes" + "encoding/gob" "encoding/json" "fmt" "strconv" @@ -68,6 +70,36 @@ func (s *Swagger) UnmarshalJSON(data []byte) error { return nil } +// GobEncode provides a safe gob encoder for Swagger, including extensions +func (s Swagger) GobEncode() ([]byte, error) { + var b bytes.Buffer + raw := struct { + Props SwaggerProps + Ext VendorExtensible + }{ + Props: s.SwaggerProps, + Ext: s.VendorExtensible, + } + err := gob.NewEncoder(&b).Encode(raw) + return b.Bytes(), err +} + +// GobDecode provides a safe gob decoder for Swagger, including extensions +func (s *Swagger) GobDecode(b []byte) error { + var raw struct { + Props SwaggerProps + Ext VendorExtensible + } + buf := bytes.NewBuffer(b) + err := gob.NewDecoder(buf).Decode(&raw) + if err != nil { + return err + } + s.SwaggerProps = raw.Props + s.VendorExtensible = raw.Ext + return nil +} + // SwaggerProps captures the top-level properties of an Api specification // // NOTE: validation rules @@ -93,6 +125,98 @@ type SwaggerProps struct { ExternalDocs *ExternalDocumentation `json:"externalDocs,omitempty"` } +type swaggerPropsAlias SwaggerProps + +type gobSwaggerPropsAlias struct { + Security []map[string]struct { + List []string + Pad bool + } + Alias *swaggerPropsAlias + SecurityIsEmpty bool +} + +// GobEncode provides a safe gob encoder for SwaggerProps, including empty security requirements +func (o SwaggerProps) GobEncode() ([]byte, error) { + raw := gobSwaggerPropsAlias{ + Alias: (*swaggerPropsAlias)(&o), + } + + var b bytes.Buffer + if o.Security == nil { + // nil security requirement + err := gob.NewEncoder(&b).Encode(raw) + return b.Bytes(), err + } + + if len(o.Security) == 0 { + // empty, but non-nil security requirement + raw.SecurityIsEmpty = true + raw.Alias.Security = nil + err := gob.NewEncoder(&b).Encode(raw) + return b.Bytes(), err + } + + raw.Security = make([]map[string]struct { + List []string + Pad bool + }, 0, len(o.Security)) + for _, req := range o.Security { + v := make(map[string]struct { + List []string + Pad bool + }, len(req)) + for k, val := range req { + v[k] = struct { + List []string + Pad bool + }{ + List: val, + } + } + raw.Security = append(raw.Security, v) + } + + err := gob.NewEncoder(&b).Encode(raw) + return b.Bytes(), err +} + +// GobDecode provides a safe gob decoder for SwaggerProps, including empty security requirements +func (o *SwaggerProps) GobDecode(b []byte) error { + var raw gobSwaggerPropsAlias + + buf := bytes.NewBuffer(b) + err := gob.NewDecoder(buf).Decode(&raw) + if err != nil { + return err + } + if raw.Alias == nil { + return nil + } + + switch { + case raw.SecurityIsEmpty: + // empty, but non-nil security requirement + raw.Alias.Security = []map[string][]string{} + case len(raw.Alias.Security) == 0: + // nil security requirement + raw.Alias.Security = nil + default: + raw.Alias.Security = make([]map[string][]string, 0, len(raw.Security)) + for _, req := range raw.Security { + v := make(map[string][]string, len(req)) + for k, val := range req { + v[k] = make([]string, 0, len(val.List)) + v[k] = append(v[k], val.List...) + } + raw.Alias.Security = append(raw.Alias.Security, v) + } + } + + *o = *(*SwaggerProps)(raw.Alias) + return nil +} + // Dependencies represent a dependencies property type Dependencies map[string]SchemaOrStringArray diff --git a/vendor/github.com/go-openapi/swag/.gitignore b/vendor/github.com/go-openapi/swag/.gitignore index 5862205ee..d69b53acc 100644 --- a/vendor/github.com/go-openapi/swag/.gitignore +++ b/vendor/github.com/go-openapi/swag/.gitignore @@ -1,3 +1,4 @@ secrets.yml vendor Godeps +.idea diff --git a/vendor/github.com/go-openapi/swag/.travis.yml b/vendor/github.com/go-openapi/swag/.travis.yml index bd3a2e527..aa26d8763 100644 --- a/vendor/github.com/go-openapi/swag/.travis.yml +++ b/vendor/github.com/go-openapi/swag/.travis.yml @@ -1,16 +1,15 @@ after_success: - bash <(curl -s https://codecov.io/bash) go: -- '1.9' -- 1.10.x - 1.11.x +- 1.12.x install: -- go get -u github.com/stretchr/testify -- go get -u github.com/mailru/easyjson -- go get -u gopkg.in/yaml.v2 +- GO111MODULE=off go get -u gotest.tools/gotestsum +env: +- GO111MODULE=on language: go notifications: slack: secure: QUWvCkBBK09GF7YtEvHHVt70JOkdlNBG0nIKu/5qc4/nW5HP8I2w0SEf/XR2je0eED1Qe3L/AfMCWwrEj+IUZc3l4v+ju8X8R3Lomhme0Eb0jd1MTMCuPcBT47YCj0M7RON7vXtbFfm1hFJ/jLe5+9FXz0hpXsR24PJc5ZIi/ogNwkaPqG4BmndzecpSh0vc2FJPZUD9LT0I09REY/vXR0oQAalLkW0asGD5taHZTUZq/kBpsNxaAFrLM23i4mUcf33M5fjLpvx5LRICrX/57XpBrDh2TooBU6Qj3CgoY0uPRYUmSNxbVx1czNzl2JtEpb5yjoxfVPQeg0BvQM00G8LJINISR+ohrjhkZmAqchDupAX+yFrxTtORa78CtnIL6z/aTNlgwwVD8kvL/1pFA/JWYmKDmz93mV/+6wubGzNSQCstzjkFA4/iZEKewKUoRIAi/fxyscP6L/rCpmY/4llZZvrnyTqVbt6URWpopUpH4rwYqreXAtJxJsfBJIeSmUIiDIOMGkCTvyTEW3fWGmGoqWtSHLoaWDyAIGb7azb+KvfpWtEcoPFWfSWU+LGee0A/YsUhBl7ADB9A0CJEuR8q4BPpKpfLwPKSiKSAXL7zDkyjExyhtgqbSl2jS+rKIHOZNL8JkCcTP2MKMVd563C5rC5FMKqu3S9m2b6380E= script: -- go test -v -race -cover -coverprofile=coverage.txt -covermode=atomic ./... +- gotestsum -f short-verbose -- -race -coverprofile=coverage.txt -covermode=atomic ./... diff --git a/vendor/github.com/go-openapi/swag/README.md b/vendor/github.com/go-openapi/swag/README.md index 459a3e18d..eb60ae80a 100644 --- a/vendor/github.com/go-openapi/swag/README.md +++ b/vendor/github.com/go-openapi/swag/README.md @@ -19,5 +19,4 @@ You may also use it standalone for your projects. This repo has only few dependencies outside of the standard library: -* JSON utilities depend on github.com/mailru/easyjson * YAML utilities depend on gopkg.in/yaml.v2 diff --git a/vendor/github.com/go-openapi/swag/convert.go b/vendor/github.com/go-openapi/swag/convert.go index 4e446ff70..7da35c316 100644 --- a/vendor/github.com/go-openapi/swag/convert.go +++ b/vendor/github.com/go-openapi/swag/convert.go @@ -39,11 +39,12 @@ func IsFloat64AJSONInteger(f float64) bool { diff := math.Abs(f - g) // more info: https://floating-point-gui.de/errors/comparison/#look-out-for-edge-cases - if f == g { // best case + switch { + case f == g: // best case return true - } else if f == float64(int64(f)) || f == float64(uint64(f)) { // optimistic case + case f == float64(int64(f)) || f == float64(uint64(f)): // optimistic case return true - } else if f == 0 || g == 0 || diff < math.SmallestNonzeroFloat64 { // very close to 0 values + case f == 0 || g == 0 || diff < math.SmallestNonzeroFloat64: // very close to 0 values return diff < (epsilon * math.SmallestNonzeroFloat64) } // check the relative error diff --git a/vendor/github.com/go-openapi/swag/doc.go b/vendor/github.com/go-openapi/swag/doc.go index e01e1a023..8d2c8c501 100644 --- a/vendor/github.com/go-openapi/swag/doc.go +++ b/vendor/github.com/go-openapi/swag/doc.go @@ -27,7 +27,6 @@ You may also use it standalone for your projects. This repo has only few dependencies outside of the standard library: - * JSON utilities depend on github.com/mailru/easyjson * YAML utilities depend on gopkg.in/yaml.v2 */ package swag diff --git a/vendor/github.com/go-openapi/swag/go.mod b/vendor/github.com/go-openapi/swag/go.mod index 9eb936a19..15bbb0822 100644 --- a/vendor/github.com/go-openapi/swag/go.mod +++ b/vendor/github.com/go-openapi/swag/go.mod @@ -2,8 +2,13 @@ module github.com/go-openapi/swag require ( github.com/davecgh/go-spew v1.1.1 // indirect - github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329 - github.com/pmezard/go-difflib v1.0.0 // indirect - github.com/stretchr/testify v1.2.2 - gopkg.in/yaml.v2 v2.2.1 + github.com/kr/pretty v0.1.0 // indirect + github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63 + github.com/stretchr/testify v1.3.0 + gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 // indirect + gopkg.in/yaml.v2 v2.2.2 ) + +replace github.com/golang/lint => golang.org/x/lint v0.0.0-20190409202823-959b441ac422 + +replace sourcegraph.com/sourcegraph/go-diff => github.com/sourcegraph/go-diff v0.5.1 diff --git a/vendor/github.com/go-openapi/swag/go.sum b/vendor/github.com/go-openapi/swag/go.sum index d6e717bd4..33469f54a 100644 --- a/vendor/github.com/go-openapi/swag/go.sum +++ b/vendor/github.com/go-openapi/swag/go.sum @@ -1,9 +1,20 @@ +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329 h1:2gxZ0XQIU/5z3Z3bUBu+FXuk2pFbkN6tcwi/pjyaDic= -github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63 h1:nTT4s92Dgz2HlrB2NaMgvlfqHH39OgMhA7z3PK7PGD4= +github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w= -github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/vendor/github.com/go-openapi/swag/json.go b/vendor/github.com/go-openapi/swag/json.go index 62ab15e54..edf93d84c 100644 --- a/vendor/github.com/go-openapi/swag/json.go +++ b/vendor/github.com/go-openapi/swag/json.go @@ -99,7 +99,7 @@ func ConcatJSON(blobs ...[]byte) []byte { last := len(blobs) - 1 for blobs[last] == nil || bytes.Equal(blobs[last], nullJSON) { // strips trailing null objects - last = last - 1 + last-- if last < 0 { // there was nothing but "null"s or nil... return nil diff --git a/vendor/github.com/go-openapi/swag/name_lexem.go b/vendor/github.com/go-openapi/swag/name_lexem.go new file mode 100644 index 000000000..aa7f6a9bb --- /dev/null +++ b/vendor/github.com/go-openapi/swag/name_lexem.go @@ -0,0 +1,87 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package swag + +import "unicode" + +type ( + nameLexem interface { + GetUnsafeGoName() string + GetOriginal() string + IsInitialism() bool + } + + initialismNameLexem struct { + original string + matchedInitialism string + } + + casualNameLexem struct { + original string + } +) + +func newInitialismNameLexem(original, matchedInitialism string) *initialismNameLexem { + return &initialismNameLexem{ + original: original, + matchedInitialism: matchedInitialism, + } +} + +func newCasualNameLexem(original string) *casualNameLexem { + return &casualNameLexem{ + original: original, + } +} + +func (l *initialismNameLexem) GetUnsafeGoName() string { + return l.matchedInitialism +} + +func (l *casualNameLexem) GetUnsafeGoName() string { + var first rune + var rest string + for i, orig := range l.original { + if i == 0 { + first = orig + continue + } + if i > 0 { + rest = l.original[i:] + break + } + } + if len(l.original) > 1 { + return string(unicode.ToUpper(first)) + lower(rest) + } + + return l.original +} + +func (l *initialismNameLexem) GetOriginal() string { + return l.original +} + +func (l *casualNameLexem) GetOriginal() string { + return l.original +} + +func (l *initialismNameLexem) IsInitialism() bool { + return true +} + +func (l *casualNameLexem) IsInitialism() bool { + return false +} diff --git a/vendor/github.com/go-openapi/swag/split.go b/vendor/github.com/go-openapi/swag/split.go new file mode 100644 index 000000000..a1825fb7d --- /dev/null +++ b/vendor/github.com/go-openapi/swag/split.go @@ -0,0 +1,262 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package swag + +import ( + "unicode" +) + +var nameReplaceTable = map[rune]string{ + '@': "At ", + '&': "And ", + '|': "Pipe ", + '$': "Dollar ", + '!': "Bang ", + '-': "", + '_': "", +} + +type ( + splitter struct { + postSplitInitialismCheck bool + initialisms []string + } + + splitterOption func(*splitter) *splitter +) + +// split calls the splitter; splitter provides more control and post options +func split(str string) []string { + lexems := newSplitter().split(str) + result := make([]string, 0, len(lexems)) + + for _, lexem := range lexems { + result = append(result, lexem.GetOriginal()) + } + + return result + +} + +func (s *splitter) split(str string) []nameLexem { + return s.toNameLexems(str) +} + +func newSplitter(options ...splitterOption) *splitter { + splitter := &splitter{ + postSplitInitialismCheck: false, + initialisms: initialisms, + } + + for _, option := range options { + splitter = option(splitter) + } + + return splitter +} + +// withPostSplitInitialismCheck allows to catch initialisms after main split process +func withPostSplitInitialismCheck(s *splitter) *splitter { + s.postSplitInitialismCheck = true + return s +} + +type ( + initialismMatch struct { + start, end int + body []rune + complete bool + } + initialismMatches []*initialismMatch +) + +func (s *splitter) toNameLexems(name string) []nameLexem { + nameRunes := []rune(name) + matches := s.gatherInitialismMatches(nameRunes) + return s.mapMatchesToNameLexems(nameRunes, matches) +} + +func (s *splitter) gatherInitialismMatches(nameRunes []rune) initialismMatches { + matches := make(initialismMatches, 0) + + for currentRunePosition, currentRune := range nameRunes { + newMatches := make(initialismMatches, 0, len(matches)) + + // check current initialism matches + for _, match := range matches { + if keepCompleteMatch := match.complete; keepCompleteMatch { + newMatches = append(newMatches, match) + continue + } + + // drop failed match + currentMatchRune := match.body[currentRunePosition-match.start] + if !s.initialismRuneEqual(currentMatchRune, currentRune) { + continue + } + + // try to complete ongoing match + if currentRunePosition-match.start == len(match.body)-1 { + // we are close; the next step is to check the symbol ahead + // if it is a small letter, then it is not the end of match + // but beginning of the next word + + if currentRunePosition < len(nameRunes)-1 { + nextRune := nameRunes[currentRunePosition+1] + if newWord := unicode.IsLower(nextRune); newWord { + // oh ok, it was the start of a new word + continue + } + } + + match.complete = true + match.end = currentRunePosition + } + + newMatches = append(newMatches, match) + } + + // check for new initialism matches + for _, initialism := range s.initialisms { + initialismRunes := []rune(initialism) + if s.initialismRuneEqual(initialismRunes[0], currentRune) { + newMatches = append(newMatches, &initialismMatch{ + start: currentRunePosition, + body: initialismRunes, + complete: false, + }) + } + } + + matches = newMatches + } + + return matches +} + +func (s *splitter) mapMatchesToNameLexems(nameRunes []rune, matches initialismMatches) []nameLexem { + nameLexems := make([]nameLexem, 0) + + var lastAcceptedMatch *initialismMatch + for _, match := range matches { + if !match.complete { + continue + } + + if firstMatch := lastAcceptedMatch == nil; firstMatch { + nameLexems = append(nameLexems, s.breakCasualString(nameRunes[:match.start])...) + nameLexems = append(nameLexems, s.breakInitialism(string(match.body))) + + lastAcceptedMatch = match + + continue + } + + if overlappedMatch := match.start <= lastAcceptedMatch.end; overlappedMatch { + continue + } + + middle := nameRunes[lastAcceptedMatch.end+1 : match.start] + nameLexems = append(nameLexems, s.breakCasualString(middle)...) + nameLexems = append(nameLexems, s.breakInitialism(string(match.body))) + + lastAcceptedMatch = match + } + + // we have not found any accepted matches + if lastAcceptedMatch == nil { + return s.breakCasualString(nameRunes) + } + + if lastAcceptedMatch.end+1 != len(nameRunes) { + rest := nameRunes[lastAcceptedMatch.end+1:] + nameLexems = append(nameLexems, s.breakCasualString(rest)...) + } + + return nameLexems +} + +func (s *splitter) initialismRuneEqual(a, b rune) bool { + return a == b +} + +func (s *splitter) breakInitialism(original string) nameLexem { + return newInitialismNameLexem(original, original) +} + +func (s *splitter) breakCasualString(str []rune) []nameLexem { + segments := make([]nameLexem, 0) + currentSegment := "" + + addCasualNameLexem := func(original string) { + segments = append(segments, newCasualNameLexem(original)) + } + + addInitialismNameLexem := func(original, match string) { + segments = append(segments, newInitialismNameLexem(original, match)) + } + + addNameLexem := func(original string) { + if s.postSplitInitialismCheck { + for _, initialism := range s.initialisms { + if upper(initialism) == upper(original) { + addInitialismNameLexem(original, initialism) + return + } + } + } + + addCasualNameLexem(original) + } + + for _, rn := range string(str) { + if replace, found := nameReplaceTable[rn]; found { + if currentSegment != "" { + addNameLexem(currentSegment) + currentSegment = "" + } + + if replace != "" { + addNameLexem(replace) + } + + continue + } + + if !unicode.In(rn, unicode.L, unicode.M, unicode.N, unicode.Pc) { + if currentSegment != "" { + addNameLexem(currentSegment) + currentSegment = "" + } + + continue + } + + if unicode.IsUpper(rn) { + if currentSegment != "" { + addNameLexem(currentSegment) + } + currentSegment = "" + } + + currentSegment += string(rn) + } + + if currentSegment != "" { + addNameLexem(currentSegment) + } + + return segments +} diff --git a/vendor/github.com/go-openapi/swag/util.go b/vendor/github.com/go-openapi/swag/util.go index 87c54df80..9eac16afb 100644 --- a/vendor/github.com/go-openapi/swag/util.go +++ b/vendor/github.com/go-openapi/swag/util.go @@ -15,11 +15,8 @@ package swag import ( - "math" "reflect" - "regexp" "strings" - "sync" "unicode" ) @@ -29,15 +26,15 @@ var commonInitialisms *indexOfInitialisms // initialisms is a slice of sorted initialisms var initialisms []string -var once sync.Once - var isInitialism func(string) bool -var ( - splitRex1 *regexp.Regexp - splitRex2 *regexp.Regexp - splitReplacer *strings.Replacer -) +// GoNamePrefixFunc sets an optional rule to prefix go names +// which do not start with a letter. +// +// e.g. to help converting "123" into "{prefix}123" +// +// The default is to prefix with "X" +var GoNamePrefixFunc func(string) string func init() { // Taken from https://github.com/golang/lint/blob/3390df4df2787994aea98de825b964ac7944b817/lint.go#L732-L769 @@ -55,6 +52,8 @@ func init() { "HTTP": true, "ID": true, "IP": true, + "IPv4": true, + "IPv6": true, "JSON": true, "LHS": true, "OAI": true, @@ -85,15 +84,12 @@ func init() { // a thread-safe index of initialisms commonInitialisms = newIndexOfInitialisms().load(configuredInitialisms) + initialisms = commonInitialisms.sorted() // a test function isInitialism = commonInitialisms.isInitialism } -func ensureSorted() { - initialisms = commonInitialisms.sorted() -} - const ( //collectionFormatComma = "csv" collectionFormatSpace = "ssv" @@ -175,40 +171,6 @@ func (s byInitialism) Less(i, j int) bool { return strings.Compare(s[i], s[j]) > 0 } -// Prepares strings by splitting by caps, spaces, dashes, and underscore -func split(str string) []string { - // check if consecutive single char things make up an initialism - once.Do(func() { - splitRex1 = regexp.MustCompile(`(\p{Lu})`) - splitRex2 = regexp.MustCompile(`(\pL|\pM|\pN|\p{Pc})+`) - splitReplacer = strings.NewReplacer( - "@", "At ", - "&", "And ", - "|", "Pipe ", - "$", "Dollar ", - "!", "Bang ", - "-", " ", - "_", " ", - ) - ensureSorted() - }) - - str = trim(str) - - // Convert dash and underscore to spaces - str = splitReplacer.Replace(str) - - // Split when uppercase is found (needed for Snake) - str = splitRex1.ReplaceAllString(str, " $1") - - for _, k := range initialisms { - str = strings.Replace(str, splitRex1.ReplaceAllString(k, " $1"), " "+k, -1) - } - // Get the final list of words - //words = rex2.FindAllString(str, -1) - return splitRex2.FindAllString(str, -1) -} - // Removes leading whitespaces func trim(str string) string { return strings.Trim(str, " ") @@ -261,30 +223,31 @@ func ToCommandName(name string) string { // ToHumanNameLower represents a code name as a human series of words func ToHumanNameLower(name string) string { - in := split(name) + in := newSplitter(withPostSplitInitialismCheck).split(name) out := make([]string, 0, len(in)) for _, w := range in { - if !isInitialism(upper(w)) { - out = append(out, lower(w)) + if !w.IsInitialism() { + out = append(out, lower(w.GetOriginal())) } else { - out = append(out, w) + out = append(out, w.GetOriginal()) } } + return strings.Join(out, " ") } // ToHumanNameTitle represents a code name as a human series of words with the first letters titleized func ToHumanNameTitle(name string) string { - in := split(name) - out := make([]string, 0, len(in)) + in := newSplitter(withPostSplitInitialismCheck).split(name) + out := make([]string, 0, len(in)) for _, w := range in { - uw := upper(w) - if !isInitialism(uw) { - out = append(out, Camelize(w)) + original := w.GetOriginal() + if !w.IsInitialism() { + out = append(out, Camelize(original)) } else { - out = append(out, w) + out = append(out, original) } } return strings.Join(out, " ") @@ -319,24 +282,34 @@ func ToVarName(name string) string { // ToGoName translates a swagger name which can be underscored or camel cased to a name that golint likes func ToGoName(name string) string { - in := split(name) - out := make([]string, 0, len(in)) + lexems := newSplitter(withPostSplitInitialismCheck).split(name) - for _, w := range in { - uw := upper(w) - mod := int(math.Min(float64(len(uw)), 2)) - if !isInitialism(uw) && !isInitialism(uw[:len(uw)-mod]) { - uw = Camelize(w) + result := "" + for _, lexem := range lexems { + goName := lexem.GetUnsafeGoName() + + // to support old behavior + if lexem.IsInitialism() { + goName = upper(goName) } - out = append(out, uw) + result += goName } - result := strings.Join(out, "") if len(result) > 0 { - if !unicode.IsUpper([]rune(result)[0]) { - result = "X" + result + // Only prefix with X when the first character isn't an ascii letter + first := []rune(result)[0] + if !unicode.IsLetter(first) || (first > unicode.MaxASCII && !unicode.IsUpper(first)) { + if GoNamePrefixFunc == nil { + return "X" + result + } + result = GoNamePrefixFunc(name) + result + } + first = []rune(result)[0] + if unicode.IsLetter(first) && !unicode.IsUpper(first) { + result = string(append([]rune{unicode.ToUpper(first)}, []rune(result)[1:]...)) } } + return result } diff --git a/vendor/github.com/go-openapi/swag/yaml.go b/vendor/github.com/go-openapi/swag/yaml.go index 435e2948e..ec9691440 100644 --- a/vendor/github.com/go-openapi/swag/yaml.go +++ b/vendor/github.com/go-openapi/swag/yaml.go @@ -143,19 +143,43 @@ func (s *JSONMapItem) UnmarshalEasyJSON(in *jlexer.Lexer) { } func transformData(input interface{}) (out interface{}, err error) { + format := func(t interface{}) (string, error) { + switch k := t.(type) { + case string: + return k, nil + case uint: + return strconv.FormatUint(uint64(k), 10), nil + case uint8: + return strconv.FormatUint(uint64(k), 10), nil + case uint16: + return strconv.FormatUint(uint64(k), 10), nil + case uint32: + return strconv.FormatUint(uint64(k), 10), nil + case uint64: + return strconv.FormatUint(k, 10), nil + case int: + return strconv.Itoa(k), nil + case int8: + return strconv.FormatInt(int64(k), 10), nil + case int16: + return strconv.FormatInt(int64(k), 10), nil + case int32: + return strconv.FormatInt(int64(k), 10), nil + case int64: + return strconv.FormatInt(k, 10), nil + default: + return "", fmt.Errorf("unexpected map key type, got: %T", k) + } + } + switch in := input.(type) { case yaml.MapSlice: o := make(JSONMapSlice, len(in)) for i, mi := range in { var nmi JSONMapItem - switch k := mi.Key.(type) { - case string: - nmi.Key = k - case int: - nmi.Key = strconv.Itoa(k) - default: - return nil, fmt.Errorf("types don't match expect map key string or int got: %T", mi.Key) + if nmi.Key, err = format(mi.Key); err != nil { + return nil, err } v, ert := transformData(mi.Value) @@ -170,13 +194,8 @@ func transformData(input interface{}) (out interface{}, err error) { o := make(JSONMapSlice, 0, len(in)) for ke, va := range in { var nmi JSONMapItem - switch k := ke.(type) { - case string: - nmi.Key = k - case int: - nmi.Key = strconv.Itoa(k) - default: - return nil, fmt.Errorf("types don't match expect map key string or int got: %T", ke) + if nmi.Key, err = format(ke); err != nil { + return nil, err } v, ert := transformData(va) diff --git a/vendor/github.com/golang/protobuf/proto/properties.go b/vendor/github.com/golang/protobuf/proto/properties.go index 79668ff5c..a4b8c0cd3 100644 --- a/vendor/github.com/golang/protobuf/proto/properties.go +++ b/vendor/github.com/golang/protobuf/proto/properties.go @@ -38,7 +38,6 @@ package proto import ( "fmt" "log" - "os" "reflect" "sort" "strconv" @@ -194,7 +193,7 @@ func (p *Properties) Parse(s string) { // "bytes,49,opt,name=foo,def=hello!" fields := strings.Split(s, ",") // breaks def=, but handled below. if len(fields) < 2 { - fmt.Fprintf(os.Stderr, "proto: tag has too few fields: %q\n", s) + log.Printf("proto: tag has too few fields: %q", s) return } @@ -214,7 +213,7 @@ func (p *Properties) Parse(s string) { p.WireType = WireBytes // no numeric converter for non-numeric types default: - fmt.Fprintf(os.Stderr, "proto: tag has unknown wire type: %q\n", s) + log.Printf("proto: tag has unknown wire type: %q", s) return } diff --git a/vendor/github.com/labstack/echo/v4/go.sum b/vendor/github.com/labstack/echo/v4/go.sum index 987bc9ec8..695f92764 100644 --- a/vendor/github.com/labstack/echo/v4/go.sum +++ b/vendor/github.com/labstack/echo/v4/go.sum @@ -45,6 +45,7 @@ golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20190531175056-4c3a928424d2 h1:T5DasATyLQfmbTpfEXx/IOL9vfjzW6up+ZDkmHvIf2s= golang.org/x/sys v0.0.0-20190531175056-4c3a928424d2/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190602015325-4c4f7f33c9ed/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190609082536-301114b31cce h1:CQakrGkKbydnUmt7cFIlmQ4lNQiqdTPt6xzXij4nYCc= golang.org/x/sys v0.0.0-20190609082536-301114b31cce/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= diff --git a/vendor/github.com/mailru/easyjson/jlexer/lexer.go b/vendor/github.com/mailru/easyjson/jlexer/lexer.go index ef640751e..ddd376b84 100644 --- a/vendor/github.com/mailru/easyjson/jlexer/lexer.go +++ b/vendor/github.com/mailru/easyjson/jlexer/lexer.go @@ -1152,7 +1152,7 @@ func (r *Lexer) Interface() interface{} { } else if r.token.delimValue == '[' { r.consume() - var ret []interface{} + ret := []interface{}{} for !r.IsDelim(']') { ret = append(ret, r.Interface()) r.WantComma() diff --git a/vendor/github.com/mattn/go-colorable/go.sum b/vendor/github.com/mattn/go-colorable/go.sum index 2c12960ec..a53c2ca7d 100644 --- a/vendor/github.com/mattn/go-colorable/go.sum +++ b/vendor/github.com/mattn/go-colorable/go.sum @@ -1,4 +1,6 @@ github.com/mattn/go-isatty v0.0.5 h1:tHXDdz1cpzGaovsTB+TVB8q90WEokoVmfMqoVcrLUgw= github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.8 h1:HLtExJ+uU2HOZ+wI0Tt5DtUDrx8yhUqDcp7fYERX4CE= +github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223 h1:DH4skfRX4EBpamg7iV4ZlCpblAHI6s6TDM39bFZumv8= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= diff --git a/vendor/github.com/mattn/go-isatty/go.mod b/vendor/github.com/mattn/go-isatty/go.mod index f310320c3..a8ddf404f 100644 --- a/vendor/github.com/mattn/go-isatty/go.mod +++ b/vendor/github.com/mattn/go-isatty/go.mod @@ -1,3 +1,5 @@ module github.com/mattn/go-isatty -require golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223 +require golang.org/x/sys v0.0.0-20191008105621-543471e840be + +go 1.14 diff --git a/vendor/github.com/mattn/go-isatty/go.sum b/vendor/github.com/mattn/go-isatty/go.sum index 426c8973c..c141fc53a 100644 --- a/vendor/github.com/mattn/go-isatty/go.sum +++ b/vendor/github.com/mattn/go-isatty/go.sum @@ -1,2 +1,4 @@ -golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223 h1:DH4skfRX4EBpamg7iV4ZlCpblAHI6s6TDM39bFZumv8= -golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a h1:aYOabOQFp6Vj6W1F80affTUvO9UxmJRx8K0gsfABByQ= +golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191008105621-543471e840be h1:QAcqgptGM8IQBC9K/RC4o+O9YmqEm0diQn9QmZw/0mU= +golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= diff --git a/vendor/github.com/mattn/go-isatty/isatty_plan9.go b/vendor/github.com/mattn/go-isatty/isatty_plan9.go new file mode 100644 index 000000000..bc0a70920 --- /dev/null +++ b/vendor/github.com/mattn/go-isatty/isatty_plan9.go @@ -0,0 +1,22 @@ +// +build plan9 + +package isatty + +import ( + "syscall" +) + +// IsTerminal returns true if the given file descriptor is a terminal. +func IsTerminal(fd uintptr) bool { + path, err := syscall.Fd2path(fd) + if err != nil { + return false + } + return path == "/dev/cons" || path == "/mnt/term/dev/cons" +} + +// IsCygwinTerminal return true if the file descriptor is a cygwin or msys2 +// terminal. This is also always false on this environment. +func IsCygwinTerminal(fd uintptr) bool { + return false +} diff --git a/vendor/github.com/mattn/go-isatty/isatty_windows.go b/vendor/github.com/mattn/go-isatty/isatty_windows.go index af51cbcaa..1fa869154 100644 --- a/vendor/github.com/mattn/go-isatty/isatty_windows.go +++ b/vendor/github.com/mattn/go-isatty/isatty_windows.go @@ -4,6 +4,7 @@ package isatty import ( + "errors" "strings" "syscall" "unicode/utf16" @@ -11,15 +12,18 @@ import ( ) const ( - fileNameInfo uintptr = 2 - fileTypePipe = 3 + objectNameInfo uintptr = 1 + fileNameInfo = 2 + fileTypePipe = 3 ) var ( kernel32 = syscall.NewLazyDLL("kernel32.dll") + ntdll = syscall.NewLazyDLL("ntdll.dll") procGetConsoleMode = kernel32.NewProc("GetConsoleMode") procGetFileInformationByHandleEx = kernel32.NewProc("GetFileInformationByHandleEx") procGetFileType = kernel32.NewProc("GetFileType") + procNtQueryObject = ntdll.NewProc("NtQueryObject") ) func init() { @@ -45,7 +49,10 @@ func isCygwinPipeName(name string) bool { return false } - if token[0] != `\msys` && token[0] != `\cygwin` { + if token[0] != `\msys` && + token[0] != `\cygwin` && + token[0] != `\Device\NamedPipe\msys` && + token[0] != `\Device\NamedPipe\cygwin` { return false } @@ -68,11 +75,35 @@ func isCygwinPipeName(name string) bool { return true } +// getFileNameByHandle use the undocomented ntdll NtQueryObject to get file full name from file handler +// since GetFileInformationByHandleEx is not avilable under windows Vista and still some old fashion +// guys are using Windows XP, this is a workaround for those guys, it will also work on system from +// Windows vista to 10 +// see https://stackoverflow.com/a/18792477 for details +func getFileNameByHandle(fd uintptr) (string, error) { + if procNtQueryObject == nil { + return "", errors.New("ntdll.dll: NtQueryObject not supported") + } + + var buf [4 + syscall.MAX_PATH]uint16 + var result int + r, _, e := syscall.Syscall6(procNtQueryObject.Addr(), 5, + fd, objectNameInfo, uintptr(unsafe.Pointer(&buf)), uintptr(2*len(buf)), uintptr(unsafe.Pointer(&result)), 0) + if r != 0 { + return "", e + } + return string(utf16.Decode(buf[4 : 4+buf[0]/2])), nil +} + // IsCygwinTerminal() return true if the file descriptor is a cygwin or msys2 // terminal. func IsCygwinTerminal(fd uintptr) bool { if procGetFileInformationByHandleEx == nil { - return false + name, err := getFileNameByHandle(fd) + if err != nil { + return false + } + return isCygwinPipeName(name) } // Cygwin/msys's pty is a pipe. diff --git a/vendor/github.com/prometheus/procfs/go.mod b/vendor/github.com/prometheus/procfs/go.mod index e89ee6c90..14631543a 100644 --- a/vendor/github.com/prometheus/procfs/go.mod +++ b/vendor/github.com/prometheus/procfs/go.mod @@ -1 +1,3 @@ module github.com/prometheus/procfs + +go 1.12 diff --git a/vendor/github.com/russross/blackfriday/v2/.gitignore b/vendor/github.com/russross/blackfriday/v2/.gitignore new file mode 100644 index 000000000..75623dccc --- /dev/null +++ b/vendor/github.com/russross/blackfriday/v2/.gitignore @@ -0,0 +1,8 @@ +*.out +*.swp +*.8 +*.6 +_obj +_test* +markdown +tags diff --git a/vendor/github.com/russross/blackfriday/v2/.travis.yml b/vendor/github.com/russross/blackfriday/v2/.travis.yml new file mode 100644 index 000000000..b0b525a5a --- /dev/null +++ b/vendor/github.com/russross/blackfriday/v2/.travis.yml @@ -0,0 +1,17 @@ +sudo: false +language: go +go: + - "1.10.x" + - "1.11.x" + - tip +matrix: + fast_finish: true + allow_failures: + - go: tip +install: + - # Do nothing. This is needed to prevent default install action "go get -t -v ./..." from happening here (we want it to happen inside script step). +script: + - go get -t -v ./... + - diff -u <(echo -n) <(gofmt -d -s .) + - go tool vet . + - go test -v ./... diff --git a/vendor/github.com/russross/blackfriday/v2/LICENSE.txt b/vendor/github.com/russross/blackfriday/v2/LICENSE.txt new file mode 100644 index 000000000..2885af360 --- /dev/null +++ b/vendor/github.com/russross/blackfriday/v2/LICENSE.txt @@ -0,0 +1,29 @@ +Blackfriday is distributed under the Simplified BSD License: + +> Copyright © 2011 Russ Ross +> All rights reserved. +> +> Redistribution and use in source and binary forms, with or without +> modification, are permitted provided that the following conditions +> are met: +> +> 1. Redistributions of source code must retain the above copyright +> notice, this list of conditions and the following disclaimer. +> +> 2. Redistributions in binary form must reproduce the above +> copyright notice, this list of conditions and the following +> disclaimer in the documentation and/or other materials provided with +> the distribution. +> +> THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +> "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +> LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +> FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +> COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +> INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +> BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +> LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +> CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +> LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +> ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +> POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/russross/blackfriday/v2/README.md b/vendor/github.com/russross/blackfriday/v2/README.md new file mode 100644 index 000000000..d5a8649bd --- /dev/null +++ b/vendor/github.com/russross/blackfriday/v2/README.md @@ -0,0 +1,291 @@ +Blackfriday [![Build Status](https://travis-ci.org/russross/blackfriday.svg?branch=master)](https://travis-ci.org/russross/blackfriday) +=========== + +Blackfriday is a [Markdown][1] processor implemented in [Go][2]. It +is paranoid about its input (so you can safely feed it user-supplied +data), it is fast, it supports common extensions (tables, smart +punctuation substitutions, etc.), and it is safe for all utf-8 +(unicode) input. + +HTML output is currently supported, along with Smartypants +extensions. + +It started as a translation from C of [Sundown][3]. + + +Installation +------------ + +Blackfriday is compatible with any modern Go release. With Go 1.7 and git +installed: + + go get gopkg.in/russross/blackfriday.v2 + +will download, compile, and install the package into your `$GOPATH` +directory hierarchy. Alternatively, you can achieve the same if you +import it into a project: + + import "gopkg.in/russross/blackfriday.v2" + +and `go get` without parameters. + + +Versions +-------- + +Currently maintained and recommended version of Blackfriday is `v2`. It's being +developed on its own branch: https://github.com/russross/blackfriday/tree/v2 and the +documentation is available at +https://godoc.org/gopkg.in/russross/blackfriday.v2. + +It is `go get`-able via via [gopkg.in][6] at `gopkg.in/russross/blackfriday.v2`, +but we highly recommend using package management tool like [dep][7] or +[Glide][8] and make use of semantic versioning. With package management you +should import `github.com/russross/blackfriday` and specify that you're using +version 2.0.0. + +Version 2 offers a number of improvements over v1: + +* Cleaned up API +* A separate call to [`Parse`][4], which produces an abstract syntax tree for + the document +* Latest bug fixes +* Flexibility to easily add your own rendering extensions + +Potential drawbacks: + +* Our benchmarks show v2 to be slightly slower than v1. Currently in the + ballpark of around 15%. +* API breakage. If you can't afford modifying your code to adhere to the new API + and don't care too much about the new features, v2 is probably not for you. +* Several bug fixes are trailing behind and still need to be forward-ported to + v2. See issue [#348](https://github.com/russross/blackfriday/issues/348) for + tracking. + +Usage +----- + +For the most sensible markdown processing, it is as simple as getting your input +into a byte slice and calling: + +```go +output := blackfriday.Run(input) +``` + +Your input will be parsed and the output rendered with a set of most popular +extensions enabled. If you want the most basic feature set, corresponding with +the bare Markdown specification, use: + +```go +output := blackfriday.Run(input, blackfriday.WithNoExtensions()) +``` + +### Sanitize untrusted content + +Blackfriday itself does nothing to protect against malicious content. If you are +dealing with user-supplied markdown, we recommend running Blackfriday's output +through HTML sanitizer such as [Bluemonday][5]. + +Here's an example of simple usage of Blackfriday together with Bluemonday: + +```go +import ( + "github.com/microcosm-cc/bluemonday" + "github.com/russross/blackfriday" +) + +// ... +unsafe := blackfriday.Run(input) +html := bluemonday.UGCPolicy().SanitizeBytes(unsafe) +``` + +### Custom options + +If you want to customize the set of options, use `blackfriday.WithExtensions`, +`blackfriday.WithRenderer` and `blackfriday.WithRefOverride`. + +You can also check out `blackfriday-tool` for a more complete example +of how to use it. Download and install it using: + + go get github.com/russross/blackfriday-tool + +This is a simple command-line tool that allows you to process a +markdown file using a standalone program. You can also browse the +source directly on github if you are just looking for some example +code: + +* + +Note that if you have not already done so, installing +`blackfriday-tool` will be sufficient to download and install +blackfriday in addition to the tool itself. The tool binary will be +installed in `$GOPATH/bin`. This is a statically-linked binary that +can be copied to wherever you need it without worrying about +dependencies and library versions. + + +Features +-------- + +All features of Sundown are supported, including: + +* **Compatibility**. The Markdown v1.0.3 test suite passes with + the `--tidy` option. Without `--tidy`, the differences are + mostly in whitespace and entity escaping, where blackfriday is + more consistent and cleaner. + +* **Common extensions**, including table support, fenced code + blocks, autolinks, strikethroughs, non-strict emphasis, etc. + +* **Safety**. Blackfriday is paranoid when parsing, making it safe + to feed untrusted user input without fear of bad things + happening. The test suite stress tests this and there are no + known inputs that make it crash. If you find one, please let me + know and send me the input that does it. + + NOTE: "safety" in this context means *runtime safety only*. In order to + protect yourself against JavaScript injection in untrusted content, see + [this example](https://github.com/russross/blackfriday#sanitize-untrusted-content). + +* **Fast processing**. It is fast enough to render on-demand in + most web applications without having to cache the output. + +* **Thread safety**. You can run multiple parsers in different + goroutines without ill effect. There is no dependence on global + shared state. + +* **Minimal dependencies**. Blackfriday only depends on standard + library packages in Go. The source code is pretty + self-contained, so it is easy to add to any project, including + Google App Engine projects. + +* **Standards compliant**. Output successfully validates using the + W3C validation tool for HTML 4.01 and XHTML 1.0 Transitional. + + +Extensions +---------- + +In addition to the standard markdown syntax, this package +implements the following extensions: + +* **Intra-word emphasis supression**. The `_` character is + commonly used inside words when discussing code, so having + markdown interpret it as an emphasis command is usually the + wrong thing. Blackfriday lets you treat all emphasis markers as + normal characters when they occur inside a word. + +* **Tables**. Tables can be created by drawing them in the input + using a simple syntax: + + ``` + Name | Age + --------|------ + Bob | 27 + Alice | 23 + ``` + +* **Fenced code blocks**. In addition to the normal 4-space + indentation to mark code blocks, you can explicitly mark them + and supply a language (to make syntax highlighting simple). Just + mark it like this: + + ```go + func getTrue() bool { + return true + } + ``` + + You can use 3 or more backticks to mark the beginning of the + block, and the same number to mark the end of the block. + +* **Definition lists**. A simple definition list is made of a single-line + term followed by a colon and the definition for that term. + + Cat + : Fluffy animal everyone likes + + Internet + : Vector of transmission for pictures of cats + + Terms must be separated from the previous definition by a blank line. + +* **Footnotes**. A marker in the text that will become a superscript number; + a footnote definition that will be placed in a list of footnotes at the + end of the document. A footnote looks like this: + + This is a footnote.[^1] + + [^1]: the footnote text. + +* **Autolinking**. Blackfriday can find URLs that have not been + explicitly marked as links and turn them into links. + +* **Strikethrough**. Use two tildes (`~~`) to mark text that + should be crossed out. + +* **Hard line breaks**. With this extension enabled newlines in the input + translate into line breaks in the output. This extension is off by default. + +* **Smart quotes**. Smartypants-style punctuation substitution is + supported, turning normal double- and single-quote marks into + curly quotes, etc. + +* **LaTeX-style dash parsing** is an additional option, where `--` + is translated into `–`, and `---` is translated into + `—`. This differs from most smartypants processors, which + turn a single hyphen into an ndash and a double hyphen into an + mdash. + +* **Smart fractions**, where anything that looks like a fraction + is translated into suitable HTML (instead of just a few special + cases like most smartypant processors). For example, `4/5` + becomes `45`, which renders as + 45. + + +Other renderers +--------------- + +Blackfriday is structured to allow alternative rendering engines. Here +are a few of note: + +* [github_flavored_markdown](https://godoc.org/github.com/shurcooL/github_flavored_markdown): + provides a GitHub Flavored Markdown renderer with fenced code block + highlighting, clickable heading anchor links. + + It's not customizable, and its goal is to produce HTML output + equivalent to the [GitHub Markdown API endpoint](https://developer.github.com/v3/markdown/#render-a-markdown-document-in-raw-mode), + except the rendering is performed locally. + +* [markdownfmt](https://github.com/shurcooL/markdownfmt): like gofmt, + but for markdown. + +* [LaTeX output](https://github.com/Ambrevar/Blackfriday-LaTeX): + renders output as LaTeX. + +* [Blackfriday-Confluence](https://github.com/kentaro-m/blackfriday-confluence): provides a [Confluence Wiki Markup](https://confluence.atlassian.com/doc/confluence-wiki-markup-251003035.html) renderer. + + +Todo +---- + +* More unit testing +* Improve unicode support. It does not understand all unicode + rules (about what constitutes a letter, a punctuation symbol, + etc.), so it may fail to detect word boundaries correctly in + some instances. It is safe on all utf-8 input. + + +License +------- + +[Blackfriday is distributed under the Simplified BSD License](LICENSE.txt) + + + [1]: https://daringfireball.net/projects/markdown/ "Markdown" + [2]: https://golang.org/ "Go Language" + [3]: https://github.com/vmg/sundown "Sundown" + [4]: https://godoc.org/gopkg.in/russross/blackfriday.v2#Parse "Parse func" + [5]: https://github.com/microcosm-cc/bluemonday "Bluemonday" + [6]: https://labix.org/gopkg.in "gopkg.in" diff --git a/vendor/github.com/russross/blackfriday/v2/block.go b/vendor/github.com/russross/blackfriday/v2/block.go new file mode 100644 index 000000000..b8607474e --- /dev/null +++ b/vendor/github.com/russross/blackfriday/v2/block.go @@ -0,0 +1,1590 @@ +// +// Blackfriday Markdown Processor +// Available at http://github.com/russross/blackfriday +// +// Copyright © 2011 Russ Ross . +// Distributed under the Simplified BSD License. +// See README.md for details. +// + +// +// Functions to parse block-level elements. +// + +package blackfriday + +import ( + "bytes" + "html" + "regexp" + "strings" + + "github.com/shurcooL/sanitized_anchor_name" +) + +const ( + charEntity = "&(?:#x[a-f0-9]{1,8}|#[0-9]{1,8}|[a-z][a-z0-9]{1,31});" + escapable = "[!\"#$%&'()*+,./:;<=>?@[\\\\\\]^_`{|}~-]" +) + +var ( + reBackslashOrAmp = regexp.MustCompile("[\\&]") + reEntityOrEscapedChar = regexp.MustCompile("(?i)\\\\" + escapable + "|" + charEntity) +) + +// Parse block-level data. +// Note: this function and many that it calls assume that +// the input buffer ends with a newline. +func (p *Markdown) block(data []byte) { + // this is called recursively: enforce a maximum depth + if p.nesting >= p.maxNesting { + return + } + p.nesting++ + + // parse out one block-level construct at a time + for len(data) > 0 { + // prefixed heading: + // + // # Heading 1 + // ## Heading 2 + // ... + // ###### Heading 6 + if p.isPrefixHeading(data) { + data = data[p.prefixHeading(data):] + continue + } + + // block of preformatted HTML: + // + //
+ // ... + //
+ if data[0] == '<' { + if i := p.html(data, true); i > 0 { + data = data[i:] + continue + } + } + + // title block + // + // % stuff + // % more stuff + // % even more stuff + if p.extensions&Titleblock != 0 { + if data[0] == '%' { + if i := p.titleBlock(data, true); i > 0 { + data = data[i:] + continue + } + } + } + + // blank lines. note: returns the # of bytes to skip + if i := p.isEmpty(data); i > 0 { + data = data[i:] + continue + } + + // indented code block: + // + // func max(a, b int) int { + // if a > b { + // return a + // } + // return b + // } + if p.codePrefix(data) > 0 { + data = data[p.code(data):] + continue + } + + // fenced code block: + // + // ``` go + // func fact(n int) int { + // if n <= 1 { + // return n + // } + // return n * fact(n-1) + // } + // ``` + if p.extensions&FencedCode != 0 { + if i := p.fencedCodeBlock(data, true); i > 0 { + data = data[i:] + continue + } + } + + // horizontal rule: + // + // ------ + // or + // ****** + // or + // ______ + if p.isHRule(data) { + p.addBlock(HorizontalRule, nil) + var i int + for i = 0; i < len(data) && data[i] != '\n'; i++ { + } + data = data[i:] + continue + } + + // block quote: + // + // > A big quote I found somewhere + // > on the web + if p.quotePrefix(data) > 0 { + data = data[p.quote(data):] + continue + } + + // table: + // + // Name | Age | Phone + // ------|-----|--------- + // Bob | 31 | 555-1234 + // Alice | 27 | 555-4321 + if p.extensions&Tables != 0 { + if i := p.table(data); i > 0 { + data = data[i:] + continue + } + } + + // an itemized/unordered list: + // + // * Item 1 + // * Item 2 + // + // also works with + or - + if p.uliPrefix(data) > 0 { + data = data[p.list(data, 0):] + continue + } + + // a numbered/ordered list: + // + // 1. Item 1 + // 2. Item 2 + if p.oliPrefix(data) > 0 { + data = data[p.list(data, ListTypeOrdered):] + continue + } + + // definition lists: + // + // Term 1 + // : Definition a + // : Definition b + // + // Term 2 + // : Definition c + if p.extensions&DefinitionLists != 0 { + if p.dliPrefix(data) > 0 { + data = data[p.list(data, ListTypeDefinition):] + continue + } + } + + // anything else must look like a normal paragraph + // note: this finds underlined headings, too + data = data[p.paragraph(data):] + } + + p.nesting-- +} + +func (p *Markdown) addBlock(typ NodeType, content []byte) *Node { + p.closeUnmatchedBlocks() + container := p.addChild(typ, 0) + container.content = content + return container +} + +func (p *Markdown) isPrefixHeading(data []byte) bool { + if data[0] != '#' { + return false + } + + if p.extensions&SpaceHeadings != 0 { + level := 0 + for level < 6 && level < len(data) && data[level] == '#' { + level++ + } + if level == len(data) || data[level] != ' ' { + return false + } + } + return true +} + +func (p *Markdown) prefixHeading(data []byte) int { + level := 0 + for level < 6 && level < len(data) && data[level] == '#' { + level++ + } + i := skipChar(data, level, ' ') + end := skipUntilChar(data, i, '\n') + skip := end + id := "" + if p.extensions&HeadingIDs != 0 { + j, k := 0, 0 + // find start/end of heading id + for j = i; j < end-1 && (data[j] != '{' || data[j+1] != '#'); j++ { + } + for k = j + 1; k < end && data[k] != '}'; k++ { + } + // extract heading id iff found + if j < end && k < end { + id = string(data[j+2 : k]) + end = j + skip = k + 1 + for end > 0 && data[end-1] == ' ' { + end-- + } + } + } + for end > 0 && data[end-1] == '#' { + if isBackslashEscaped(data, end-1) { + break + } + end-- + } + for end > 0 && data[end-1] == ' ' { + end-- + } + if end > i { + if id == "" && p.extensions&AutoHeadingIDs != 0 { + id = sanitized_anchor_name.Create(string(data[i:end])) + } + block := p.addBlock(Heading, data[i:end]) + block.HeadingID = id + block.Level = level + } + return skip +} + +func (p *Markdown) isUnderlinedHeading(data []byte) int { + // test of level 1 heading + if data[0] == '=' { + i := skipChar(data, 1, '=') + i = skipChar(data, i, ' ') + if i < len(data) && data[i] == '\n' { + return 1 + } + return 0 + } + + // test of level 2 heading + if data[0] == '-' { + i := skipChar(data, 1, '-') + i = skipChar(data, i, ' ') + if i < len(data) && data[i] == '\n' { + return 2 + } + return 0 + } + + return 0 +} + +func (p *Markdown) titleBlock(data []byte, doRender bool) int { + if data[0] != '%' { + return 0 + } + splitData := bytes.Split(data, []byte("\n")) + var i int + for idx, b := range splitData { + if !bytes.HasPrefix(b, []byte("%")) { + i = idx // - 1 + break + } + } + + data = bytes.Join(splitData[0:i], []byte("\n")) + consumed := len(data) + data = bytes.TrimPrefix(data, []byte("% ")) + data = bytes.Replace(data, []byte("\n% "), []byte("\n"), -1) + block := p.addBlock(Heading, data) + block.Level = 1 + block.IsTitleblock = true + + return consumed +} + +func (p *Markdown) html(data []byte, doRender bool) int { + var i, j int + + // identify the opening tag + if data[0] != '<' { + return 0 + } + curtag, tagfound := p.htmlFindTag(data[1:]) + + // handle special cases + if !tagfound { + // check for an HTML comment + if size := p.htmlComment(data, doRender); size > 0 { + return size + } + + // check for an
tag + if size := p.htmlHr(data, doRender); size > 0 { + return size + } + + // no special case recognized + return 0 + } + + // look for an unindented matching closing tag + // followed by a blank line + found := false + /* + closetag := []byte("\n") + j = len(curtag) + 1 + for !found { + // scan for a closing tag at the beginning of a line + if skip := bytes.Index(data[j:], closetag); skip >= 0 { + j += skip + len(closetag) + } else { + break + } + + // see if it is the only thing on the line + if skip := p.isEmpty(data[j:]); skip > 0 { + // see if it is followed by a blank line/eof + j += skip + if j >= len(data) { + found = true + i = j + } else { + if skip := p.isEmpty(data[j:]); skip > 0 { + j += skip + found = true + i = j + } + } + } + } + */ + + // if not found, try a second pass looking for indented match + // but not if tag is "ins" or "del" (following original Markdown.pl) + if !found && curtag != "ins" && curtag != "del" { + i = 1 + for i < len(data) { + i++ + for i < len(data) && !(data[i-1] == '<' && data[i] == '/') { + i++ + } + + if i+2+len(curtag) >= len(data) { + break + } + + j = p.htmlFindEnd(curtag, data[i-1:]) + + if j > 0 { + i += j - 1 + found = true + break + } + } + } + + if !found { + return 0 + } + + // the end of the block has been found + if doRender { + // trim newlines + end := i + for end > 0 && data[end-1] == '\n' { + end-- + } + finalizeHTMLBlock(p.addBlock(HTMLBlock, data[:end])) + } + + return i +} + +func finalizeHTMLBlock(block *Node) { + block.Literal = block.content + block.content = nil +} + +// HTML comment, lax form +func (p *Markdown) htmlComment(data []byte, doRender bool) int { + i := p.inlineHTMLComment(data) + // needs to end with a blank line + if j := p.isEmpty(data[i:]); j > 0 { + size := i + j + if doRender { + // trim trailing newlines + end := size + for end > 0 && data[end-1] == '\n' { + end-- + } + block := p.addBlock(HTMLBlock, data[:end]) + finalizeHTMLBlock(block) + } + return size + } + return 0 +} + +// HR, which is the only self-closing block tag considered +func (p *Markdown) htmlHr(data []byte, doRender bool) int { + if len(data) < 4 { + return 0 + } + if data[0] != '<' || (data[1] != 'h' && data[1] != 'H') || (data[2] != 'r' && data[2] != 'R') { + return 0 + } + if data[3] != ' ' && data[3] != '/' && data[3] != '>' { + // not an
tag after all; at least not a valid one + return 0 + } + i := 3 + for i < len(data) && data[i] != '>' && data[i] != '\n' { + i++ + } + if i < len(data) && data[i] == '>' { + i++ + if j := p.isEmpty(data[i:]); j > 0 { + size := i + j + if doRender { + // trim newlines + end := size + for end > 0 && data[end-1] == '\n' { + end-- + } + finalizeHTMLBlock(p.addBlock(HTMLBlock, data[:end])) + } + return size + } + } + return 0 +} + +func (p *Markdown) htmlFindTag(data []byte) (string, bool) { + i := 0 + for i < len(data) && isalnum(data[i]) { + i++ + } + key := string(data[:i]) + if _, ok := blockTags[key]; ok { + return key, true + } + return "", false +} + +func (p *Markdown) htmlFindEnd(tag string, data []byte) int { + // assume data[0] == '<' && data[1] == '/' already tested + if tag == "hr" { + return 2 + } + // check if tag is a match + closetag := []byte("") + if !bytes.HasPrefix(data, closetag) { + return 0 + } + i := len(closetag) + + // check that the rest of the line is blank + skip := 0 + if skip = p.isEmpty(data[i:]); skip == 0 { + return 0 + } + i += skip + skip = 0 + + if i >= len(data) { + return i + } + + if p.extensions&LaxHTMLBlocks != 0 { + return i + } + if skip = p.isEmpty(data[i:]); skip == 0 { + // following line must be blank + return 0 + } + + return i + skip +} + +func (*Markdown) isEmpty(data []byte) int { + // it is okay to call isEmpty on an empty buffer + if len(data) == 0 { + return 0 + } + + var i int + for i = 0; i < len(data) && data[i] != '\n'; i++ { + if data[i] != ' ' && data[i] != '\t' { + return 0 + } + } + if i < len(data) && data[i] == '\n' { + i++ + } + return i +} + +func (*Markdown) isHRule(data []byte) bool { + i := 0 + + // skip up to three spaces + for i < 3 && data[i] == ' ' { + i++ + } + + // look at the hrule char + if data[i] != '*' && data[i] != '-' && data[i] != '_' { + return false + } + c := data[i] + + // the whole line must be the char or whitespace + n := 0 + for i < len(data) && data[i] != '\n' { + switch { + case data[i] == c: + n++ + case data[i] != ' ': + return false + } + i++ + } + + return n >= 3 +} + +// isFenceLine checks if there's a fence line (e.g., ``` or ``` go) at the beginning of data, +// and returns the end index if so, or 0 otherwise. It also returns the marker found. +// If info is not nil, it gets set to the syntax specified in the fence line. +func isFenceLine(data []byte, info *string, oldmarker string) (end int, marker string) { + i, size := 0, 0 + + // skip up to three spaces + for i < len(data) && i < 3 && data[i] == ' ' { + i++ + } + + // check for the marker characters: ~ or ` + if i >= len(data) { + return 0, "" + } + if data[i] != '~' && data[i] != '`' { + return 0, "" + } + + c := data[i] + + // the whole line must be the same char or whitespace + for i < len(data) && data[i] == c { + size++ + i++ + } + + // the marker char must occur at least 3 times + if size < 3 { + return 0, "" + } + marker = string(data[i-size : i]) + + // if this is the end marker, it must match the beginning marker + if oldmarker != "" && marker != oldmarker { + return 0, "" + } + + // TODO(shurcooL): It's probably a good idea to simplify the 2 code paths here + // into one, always get the info string, and discard it if the caller doesn't care. + if info != nil { + infoLength := 0 + i = skipChar(data, i, ' ') + + if i >= len(data) { + if i == len(data) { + return i, marker + } + return 0, "" + } + + infoStart := i + + if data[i] == '{' { + i++ + infoStart++ + + for i < len(data) && data[i] != '}' && data[i] != '\n' { + infoLength++ + i++ + } + + if i >= len(data) || data[i] != '}' { + return 0, "" + } + + // strip all whitespace at the beginning and the end + // of the {} block + for infoLength > 0 && isspace(data[infoStart]) { + infoStart++ + infoLength-- + } + + for infoLength > 0 && isspace(data[infoStart+infoLength-1]) { + infoLength-- + } + i++ + i = skipChar(data, i, ' ') + } else { + for i < len(data) && !isverticalspace(data[i]) { + infoLength++ + i++ + } + } + + *info = strings.TrimSpace(string(data[infoStart : infoStart+infoLength])) + } + + if i == len(data) { + return i, marker + } + if i > len(data) || data[i] != '\n' { + return 0, "" + } + return i + 1, marker // Take newline into account. +} + +// fencedCodeBlock returns the end index if data contains a fenced code block at the beginning, +// or 0 otherwise. It writes to out if doRender is true, otherwise it has no side effects. +// If doRender is true, a final newline is mandatory to recognize the fenced code block. +func (p *Markdown) fencedCodeBlock(data []byte, doRender bool) int { + var info string + beg, marker := isFenceLine(data, &info, "") + if beg == 0 || beg >= len(data) { + return 0 + } + + var work bytes.Buffer + work.Write([]byte(info)) + work.WriteByte('\n') + + for { + // safe to assume beg < len(data) + + // check for the end of the code block + fenceEnd, _ := isFenceLine(data[beg:], nil, marker) + if fenceEnd != 0 { + beg += fenceEnd + break + } + + // copy the current line + end := skipUntilChar(data, beg, '\n') + 1 + + // did we reach the end of the buffer without a closing marker? + if end >= len(data) { + return 0 + } + + // verbatim copy to the working buffer + if doRender { + work.Write(data[beg:end]) + } + beg = end + } + + if doRender { + block := p.addBlock(CodeBlock, work.Bytes()) // TODO: get rid of temp buffer + block.IsFenced = true + finalizeCodeBlock(block) + } + + return beg +} + +func unescapeChar(str []byte) []byte { + if str[0] == '\\' { + return []byte{str[1]} + } + return []byte(html.UnescapeString(string(str))) +} + +func unescapeString(str []byte) []byte { + if reBackslashOrAmp.Match(str) { + return reEntityOrEscapedChar.ReplaceAllFunc(str, unescapeChar) + } + return str +} + +func finalizeCodeBlock(block *Node) { + if block.IsFenced { + newlinePos := bytes.IndexByte(block.content, '\n') + firstLine := block.content[:newlinePos] + rest := block.content[newlinePos+1:] + block.Info = unescapeString(bytes.Trim(firstLine, "\n")) + block.Literal = rest + } else { + block.Literal = block.content + } + block.content = nil +} + +func (p *Markdown) table(data []byte) int { + table := p.addBlock(Table, nil) + i, columns := p.tableHeader(data) + if i == 0 { + p.tip = table.Parent + table.Unlink() + return 0 + } + + p.addBlock(TableBody, nil) + + for i < len(data) { + pipes, rowStart := 0, i + for ; i < len(data) && data[i] != '\n'; i++ { + if data[i] == '|' { + pipes++ + } + } + + if pipes == 0 { + i = rowStart + break + } + + // include the newline in data sent to tableRow + if i < len(data) && data[i] == '\n' { + i++ + } + p.tableRow(data[rowStart:i], columns, false) + } + + return i +} + +// check if the specified position is preceded by an odd number of backslashes +func isBackslashEscaped(data []byte, i int) bool { + backslashes := 0 + for i-backslashes-1 >= 0 && data[i-backslashes-1] == '\\' { + backslashes++ + } + return backslashes&1 == 1 +} + +func (p *Markdown) tableHeader(data []byte) (size int, columns []CellAlignFlags) { + i := 0 + colCount := 1 + for i = 0; i < len(data) && data[i] != '\n'; i++ { + if data[i] == '|' && !isBackslashEscaped(data, i) { + colCount++ + } + } + + // doesn't look like a table header + if colCount == 1 { + return + } + + // include the newline in the data sent to tableRow + j := i + if j < len(data) && data[j] == '\n' { + j++ + } + header := data[:j] + + // column count ignores pipes at beginning or end of line + if data[0] == '|' { + colCount-- + } + if i > 2 && data[i-1] == '|' && !isBackslashEscaped(data, i-1) { + colCount-- + } + + columns = make([]CellAlignFlags, colCount) + + // move on to the header underline + i++ + if i >= len(data) { + return + } + + if data[i] == '|' && !isBackslashEscaped(data, i) { + i++ + } + i = skipChar(data, i, ' ') + + // each column header is of form: / *:?-+:? *|/ with # dashes + # colons >= 3 + // and trailing | optional on last column + col := 0 + for i < len(data) && data[i] != '\n' { + dashes := 0 + + if data[i] == ':' { + i++ + columns[col] |= TableAlignmentLeft + dashes++ + } + for i < len(data) && data[i] == '-' { + i++ + dashes++ + } + if i < len(data) && data[i] == ':' { + i++ + columns[col] |= TableAlignmentRight + dashes++ + } + for i < len(data) && data[i] == ' ' { + i++ + } + if i == len(data) { + return + } + // end of column test is messy + switch { + case dashes < 3: + // not a valid column + return + + case data[i] == '|' && !isBackslashEscaped(data, i): + // marker found, now skip past trailing whitespace + col++ + i++ + for i < len(data) && data[i] == ' ' { + i++ + } + + // trailing junk found after last column + if col >= colCount && i < len(data) && data[i] != '\n' { + return + } + + case (data[i] != '|' || isBackslashEscaped(data, i)) && col+1 < colCount: + // something else found where marker was required + return + + case data[i] == '\n': + // marker is optional for the last column + col++ + + default: + // trailing junk found after last column + return + } + } + if col != colCount { + return + } + + p.addBlock(TableHead, nil) + p.tableRow(header, columns, true) + size = i + if size < len(data) && data[size] == '\n' { + size++ + } + return +} + +func (p *Markdown) tableRow(data []byte, columns []CellAlignFlags, header bool) { + p.addBlock(TableRow, nil) + i, col := 0, 0 + + if data[i] == '|' && !isBackslashEscaped(data, i) { + i++ + } + + for col = 0; col < len(columns) && i < len(data); col++ { + for i < len(data) && data[i] == ' ' { + i++ + } + + cellStart := i + + for i < len(data) && (data[i] != '|' || isBackslashEscaped(data, i)) && data[i] != '\n' { + i++ + } + + cellEnd := i + + // skip the end-of-cell marker, possibly taking us past end of buffer + i++ + + for cellEnd > cellStart && cellEnd-1 < len(data) && data[cellEnd-1] == ' ' { + cellEnd-- + } + + cell := p.addBlock(TableCell, data[cellStart:cellEnd]) + cell.IsHeader = header + cell.Align = columns[col] + } + + // pad it out with empty columns to get the right number + for ; col < len(columns); col++ { + cell := p.addBlock(TableCell, nil) + cell.IsHeader = header + cell.Align = columns[col] + } + + // silently ignore rows with too many cells +} + +// returns blockquote prefix length +func (p *Markdown) quotePrefix(data []byte) int { + i := 0 + for i < 3 && i < len(data) && data[i] == ' ' { + i++ + } + if i < len(data) && data[i] == '>' { + if i+1 < len(data) && data[i+1] == ' ' { + return i + 2 + } + return i + 1 + } + return 0 +} + +// blockquote ends with at least one blank line +// followed by something without a blockquote prefix +func (p *Markdown) terminateBlockquote(data []byte, beg, end int) bool { + if p.isEmpty(data[beg:]) <= 0 { + return false + } + if end >= len(data) { + return true + } + return p.quotePrefix(data[end:]) == 0 && p.isEmpty(data[end:]) == 0 +} + +// parse a blockquote fragment +func (p *Markdown) quote(data []byte) int { + block := p.addBlock(BlockQuote, nil) + var raw bytes.Buffer + beg, end := 0, 0 + for beg < len(data) { + end = beg + // Step over whole lines, collecting them. While doing that, check for + // fenced code and if one's found, incorporate it altogether, + // irregardless of any contents inside it + for end < len(data) && data[end] != '\n' { + if p.extensions&FencedCode != 0 { + if i := p.fencedCodeBlock(data[end:], false); i > 0 { + // -1 to compensate for the extra end++ after the loop: + end += i - 1 + break + } + } + end++ + } + if end < len(data) && data[end] == '\n' { + end++ + } + if pre := p.quotePrefix(data[beg:]); pre > 0 { + // skip the prefix + beg += pre + } else if p.terminateBlockquote(data, beg, end) { + break + } + // this line is part of the blockquote + raw.Write(data[beg:end]) + beg = end + } + p.block(raw.Bytes()) + p.finalize(block) + return end +} + +// returns prefix length for block code +func (p *Markdown) codePrefix(data []byte) int { + if len(data) >= 1 && data[0] == '\t' { + return 1 + } + if len(data) >= 4 && data[0] == ' ' && data[1] == ' ' && data[2] == ' ' && data[3] == ' ' { + return 4 + } + return 0 +} + +func (p *Markdown) code(data []byte) int { + var work bytes.Buffer + + i := 0 + for i < len(data) { + beg := i + for i < len(data) && data[i] != '\n' { + i++ + } + if i < len(data) && data[i] == '\n' { + i++ + } + + blankline := p.isEmpty(data[beg:i]) > 0 + if pre := p.codePrefix(data[beg:i]); pre > 0 { + beg += pre + } else if !blankline { + // non-empty, non-prefixed line breaks the pre + i = beg + break + } + + // verbatim copy to the working buffer + if blankline { + work.WriteByte('\n') + } else { + work.Write(data[beg:i]) + } + } + + // trim all the \n off the end of work + workbytes := work.Bytes() + eol := len(workbytes) + for eol > 0 && workbytes[eol-1] == '\n' { + eol-- + } + if eol != len(workbytes) { + work.Truncate(eol) + } + + work.WriteByte('\n') + + block := p.addBlock(CodeBlock, work.Bytes()) // TODO: get rid of temp buffer + block.IsFenced = false + finalizeCodeBlock(block) + + return i +} + +// returns unordered list item prefix +func (p *Markdown) uliPrefix(data []byte) int { + i := 0 + // start with up to 3 spaces + for i < len(data) && i < 3 && data[i] == ' ' { + i++ + } + if i >= len(data)-1 { + return 0 + } + // need one of {'*', '+', '-'} followed by a space or a tab + if (data[i] != '*' && data[i] != '+' && data[i] != '-') || + (data[i+1] != ' ' && data[i+1] != '\t') { + return 0 + } + return i + 2 +} + +// returns ordered list item prefix +func (p *Markdown) oliPrefix(data []byte) int { + i := 0 + + // start with up to 3 spaces + for i < 3 && i < len(data) && data[i] == ' ' { + i++ + } + + // count the digits + start := i + for i < len(data) && data[i] >= '0' && data[i] <= '9' { + i++ + } + if start == i || i >= len(data)-1 { + return 0 + } + + // we need >= 1 digits followed by a dot and a space or a tab + if data[i] != '.' || !(data[i+1] == ' ' || data[i+1] == '\t') { + return 0 + } + return i + 2 +} + +// returns definition list item prefix +func (p *Markdown) dliPrefix(data []byte) int { + if len(data) < 2 { + return 0 + } + i := 0 + // need a ':' followed by a space or a tab + if data[i] != ':' || !(data[i+1] == ' ' || data[i+1] == '\t') { + return 0 + } + for i < len(data) && data[i] == ' ' { + i++ + } + return i + 2 +} + +// parse ordered or unordered list block +func (p *Markdown) list(data []byte, flags ListType) int { + i := 0 + flags |= ListItemBeginningOfList + block := p.addBlock(List, nil) + block.ListFlags = flags + block.Tight = true + + for i < len(data) { + skip := p.listItem(data[i:], &flags) + if flags&ListItemContainsBlock != 0 { + block.ListData.Tight = false + } + i += skip + if skip == 0 || flags&ListItemEndOfList != 0 { + break + } + flags &= ^ListItemBeginningOfList + } + + above := block.Parent + finalizeList(block) + p.tip = above + return i +} + +// Returns true if the list item is not the same type as its parent list +func (p *Markdown) listTypeChanged(data []byte, flags *ListType) bool { + if p.dliPrefix(data) > 0 && *flags&ListTypeDefinition == 0 { + return true + } else if p.oliPrefix(data) > 0 && *flags&ListTypeOrdered == 0 { + return true + } else if p.uliPrefix(data) > 0 && (*flags&ListTypeOrdered != 0 || *flags&ListTypeDefinition != 0) { + return true + } + return false +} + +// Returns true if block ends with a blank line, descending if needed +// into lists and sublists. +func endsWithBlankLine(block *Node) bool { + // TODO: figure this out. Always false now. + for block != nil { + //if block.lastLineBlank { + //return true + //} + t := block.Type + if t == List || t == Item { + block = block.LastChild + } else { + break + } + } + return false +} + +func finalizeList(block *Node) { + block.open = false + item := block.FirstChild + for item != nil { + // check for non-final list item ending with blank line: + if endsWithBlankLine(item) && item.Next != nil { + block.ListData.Tight = false + break + } + // recurse into children of list item, to see if there are spaces + // between any of them: + subItem := item.FirstChild + for subItem != nil { + if endsWithBlankLine(subItem) && (item.Next != nil || subItem.Next != nil) { + block.ListData.Tight = false + break + } + subItem = subItem.Next + } + item = item.Next + } +} + +// Parse a single list item. +// Assumes initial prefix is already removed if this is a sublist. +func (p *Markdown) listItem(data []byte, flags *ListType) int { + // keep track of the indentation of the first line + itemIndent := 0 + if data[0] == '\t' { + itemIndent += 4 + } else { + for itemIndent < 3 && data[itemIndent] == ' ' { + itemIndent++ + } + } + + var bulletChar byte = '*' + i := p.uliPrefix(data) + if i == 0 { + i = p.oliPrefix(data) + } else { + bulletChar = data[i-2] + } + if i == 0 { + i = p.dliPrefix(data) + // reset definition term flag + if i > 0 { + *flags &= ^ListTypeTerm + } + } + if i == 0 { + // if in definition list, set term flag and continue + if *flags&ListTypeDefinition != 0 { + *flags |= ListTypeTerm + } else { + return 0 + } + } + + // skip leading whitespace on first line + for i < len(data) && data[i] == ' ' { + i++ + } + + // find the end of the line + line := i + for i > 0 && i < len(data) && data[i-1] != '\n' { + i++ + } + + // get working buffer + var raw bytes.Buffer + + // put the first line into the working buffer + raw.Write(data[line:i]) + line = i + + // process the following lines + containsBlankLine := false + sublist := 0 + codeBlockMarker := "" + +gatherlines: + for line < len(data) { + i++ + + // find the end of this line + for i < len(data) && data[i-1] != '\n' { + i++ + } + + // if it is an empty line, guess that it is part of this item + // and move on to the next line + if p.isEmpty(data[line:i]) > 0 { + containsBlankLine = true + line = i + continue + } + + // calculate the indentation + indent := 0 + indentIndex := 0 + if data[line] == '\t' { + indentIndex++ + indent += 4 + } else { + for indent < 4 && line+indent < i && data[line+indent] == ' ' { + indent++ + indentIndex++ + } + } + + chunk := data[line+indentIndex : i] + + if p.extensions&FencedCode != 0 { + // determine if in or out of codeblock + // if in codeblock, ignore normal list processing + _, marker := isFenceLine(chunk, nil, codeBlockMarker) + if marker != "" { + if codeBlockMarker == "" { + // start of codeblock + codeBlockMarker = marker + } else { + // end of codeblock. + codeBlockMarker = "" + } + } + // we are in a codeblock, write line, and continue + if codeBlockMarker != "" || marker != "" { + raw.Write(data[line+indentIndex : i]) + line = i + continue gatherlines + } + } + + // evaluate how this line fits in + switch { + // is this a nested list item? + case (p.uliPrefix(chunk) > 0 && !p.isHRule(chunk)) || + p.oliPrefix(chunk) > 0 || + p.dliPrefix(chunk) > 0: + + // to be a nested list, it must be indented more + // if not, it is either a different kind of list + // or the next item in the same list + if indent <= itemIndent { + if p.listTypeChanged(chunk, flags) { + *flags |= ListItemEndOfList + } else if containsBlankLine { + *flags |= ListItemContainsBlock + } + + break gatherlines + } + + if containsBlankLine { + *flags |= ListItemContainsBlock + } + + // is this the first item in the nested list? + if sublist == 0 { + sublist = raw.Len() + } + + // is this a nested prefix heading? + case p.isPrefixHeading(chunk): + // if the heading is not indented, it is not nested in the list + // and thus ends the list + if containsBlankLine && indent < 4 { + *flags |= ListItemEndOfList + break gatherlines + } + *flags |= ListItemContainsBlock + + // anything following an empty line is only part + // of this item if it is indented 4 spaces + // (regardless of the indentation of the beginning of the item) + case containsBlankLine && indent < 4: + if *flags&ListTypeDefinition != 0 && i < len(data)-1 { + // is the next item still a part of this list? + next := i + for next < len(data) && data[next] != '\n' { + next++ + } + for next < len(data)-1 && data[next] == '\n' { + next++ + } + if i < len(data)-1 && data[i] != ':' && data[next] != ':' { + *flags |= ListItemEndOfList + } + } else { + *flags |= ListItemEndOfList + } + break gatherlines + + // a blank line means this should be parsed as a block + case containsBlankLine: + raw.WriteByte('\n') + *flags |= ListItemContainsBlock + } + + // if this line was preceded by one or more blanks, + // re-introduce the blank into the buffer + if containsBlankLine { + containsBlankLine = false + raw.WriteByte('\n') + } + + // add the line into the working buffer without prefix + raw.Write(data[line+indentIndex : i]) + + line = i + } + + rawBytes := raw.Bytes() + + block := p.addBlock(Item, nil) + block.ListFlags = *flags + block.Tight = false + block.BulletChar = bulletChar + block.Delimiter = '.' // Only '.' is possible in Markdown, but ')' will also be possible in CommonMark + + // render the contents of the list item + if *flags&ListItemContainsBlock != 0 && *flags&ListTypeTerm == 0 { + // intermediate render of block item, except for definition term + if sublist > 0 { + p.block(rawBytes[:sublist]) + p.block(rawBytes[sublist:]) + } else { + p.block(rawBytes) + } + } else { + // intermediate render of inline item + if sublist > 0 { + child := p.addChild(Paragraph, 0) + child.content = rawBytes[:sublist] + p.block(rawBytes[sublist:]) + } else { + child := p.addChild(Paragraph, 0) + child.content = rawBytes + } + } + return line +} + +// render a single paragraph that has already been parsed out +func (p *Markdown) renderParagraph(data []byte) { + if len(data) == 0 { + return + } + + // trim leading spaces + beg := 0 + for data[beg] == ' ' { + beg++ + } + + end := len(data) + // trim trailing newline + if data[len(data)-1] == '\n' { + end-- + } + + // trim trailing spaces + for end > beg && data[end-1] == ' ' { + end-- + } + + p.addBlock(Paragraph, data[beg:end]) +} + +func (p *Markdown) paragraph(data []byte) int { + // prev: index of 1st char of previous line + // line: index of 1st char of current line + // i: index of cursor/end of current line + var prev, line, i int + tabSize := TabSizeDefault + if p.extensions&TabSizeEight != 0 { + tabSize = TabSizeDouble + } + // keep going until we find something to mark the end of the paragraph + for i < len(data) { + // mark the beginning of the current line + prev = line + current := data[i:] + line = i + + // did we find a reference or a footnote? If so, end a paragraph + // preceding it and report that we have consumed up to the end of that + // reference: + if refEnd := isReference(p, current, tabSize); refEnd > 0 { + p.renderParagraph(data[:i]) + return i + refEnd + } + + // did we find a blank line marking the end of the paragraph? + if n := p.isEmpty(current); n > 0 { + // did this blank line followed by a definition list item? + if p.extensions&DefinitionLists != 0 { + if i < len(data)-1 && data[i+1] == ':' { + return p.list(data[prev:], ListTypeDefinition) + } + } + + p.renderParagraph(data[:i]) + return i + n + } + + // an underline under some text marks a heading, so our paragraph ended on prev line + if i > 0 { + if level := p.isUnderlinedHeading(current); level > 0 { + // render the paragraph + p.renderParagraph(data[:prev]) + + // ignore leading and trailing whitespace + eol := i - 1 + for prev < eol && data[prev] == ' ' { + prev++ + } + for eol > prev && data[eol-1] == ' ' { + eol-- + } + + id := "" + if p.extensions&AutoHeadingIDs != 0 { + id = sanitized_anchor_name.Create(string(data[prev:eol])) + } + + block := p.addBlock(Heading, data[prev:eol]) + block.Level = level + block.HeadingID = id + + // find the end of the underline + for i < len(data) && data[i] != '\n' { + i++ + } + return i + } + } + + // if the next line starts a block of HTML, then the paragraph ends here + if p.extensions&LaxHTMLBlocks != 0 { + if data[i] == '<' && p.html(current, false) > 0 { + // rewind to before the HTML block + p.renderParagraph(data[:i]) + return i + } + } + + // if there's a prefixed heading or a horizontal rule after this, paragraph is over + if p.isPrefixHeading(current) || p.isHRule(current) { + p.renderParagraph(data[:i]) + return i + } + + // if there's a fenced code block, paragraph is over + if p.extensions&FencedCode != 0 { + if p.fencedCodeBlock(current, false) > 0 { + p.renderParagraph(data[:i]) + return i + } + } + + // if there's a definition list item, prev line is a definition term + if p.extensions&DefinitionLists != 0 { + if p.dliPrefix(current) != 0 { + ret := p.list(data[prev:], ListTypeDefinition) + return ret + } + } + + // if there's a list after this, paragraph is over + if p.extensions&NoEmptyLineBeforeBlock != 0 { + if p.uliPrefix(current) != 0 || + p.oliPrefix(current) != 0 || + p.quotePrefix(current) != 0 || + p.codePrefix(current) != 0 { + p.renderParagraph(data[:i]) + return i + } + } + + // otherwise, scan to the beginning of the next line + nl := bytes.IndexByte(data[i:], '\n') + if nl >= 0 { + i += nl + 1 + } else { + i += len(data[i:]) + } + } + + p.renderParagraph(data[:i]) + return i +} + +func skipChar(data []byte, start int, char byte) int { + i := start + for i < len(data) && data[i] == char { + i++ + } + return i +} + +func skipUntilChar(text []byte, start int, char byte) int { + i := start + for i < len(text) && text[i] != char { + i++ + } + return i +} diff --git a/vendor/github.com/russross/blackfriday/v2/doc.go b/vendor/github.com/russross/blackfriday/v2/doc.go new file mode 100644 index 000000000..5b3fa9876 --- /dev/null +++ b/vendor/github.com/russross/blackfriday/v2/doc.go @@ -0,0 +1,18 @@ +// Package blackfriday is a markdown processor. +// +// It translates plain text with simple formatting rules into an AST, which can +// then be further processed to HTML (provided by Blackfriday itself) or other +// formats (provided by the community). +// +// The simplest way to invoke Blackfriday is to call the Run function. It will +// take a text input and produce a text output in HTML (or other format). +// +// A slightly more sophisticated way to use Blackfriday is to create a Markdown +// processor and to call Parse, which returns a syntax tree for the input +// document. You can leverage Blackfriday's parsing for content extraction from +// markdown documents. You can assign a custom renderer and set various options +// to the Markdown processor. +// +// If you're interested in calling Blackfriday from command line, see +// https://github.com/russross/blackfriday-tool. +package blackfriday diff --git a/vendor/github.com/russross/blackfriday/v2/esc.go b/vendor/github.com/russross/blackfriday/v2/esc.go new file mode 100644 index 000000000..6385f27cb --- /dev/null +++ b/vendor/github.com/russross/blackfriday/v2/esc.go @@ -0,0 +1,34 @@ +package blackfriday + +import ( + "html" + "io" +) + +var htmlEscaper = [256][]byte{ + '&': []byte("&"), + '<': []byte("<"), + '>': []byte(">"), + '"': []byte("""), +} + +func escapeHTML(w io.Writer, s []byte) { + var start, end int + for end < len(s) { + escSeq := htmlEscaper[s[end]] + if escSeq != nil { + w.Write(s[start:end]) + w.Write(escSeq) + start = end + 1 + } + end++ + } + if start < len(s) && end <= len(s) { + w.Write(s[start:end]) + } +} + +func escLink(w io.Writer, text []byte) { + unesc := html.UnescapeString(string(text)) + escapeHTML(w, []byte(unesc)) +} diff --git a/vendor/github.com/russross/blackfriday/v2/go.mod b/vendor/github.com/russross/blackfriday/v2/go.mod new file mode 100644 index 000000000..620b74e0a --- /dev/null +++ b/vendor/github.com/russross/blackfriday/v2/go.mod @@ -0,0 +1 @@ +module github.com/russross/blackfriday/v2 diff --git a/vendor/github.com/russross/blackfriday/v2/html.go b/vendor/github.com/russross/blackfriday/v2/html.go new file mode 100644 index 000000000..284c87184 --- /dev/null +++ b/vendor/github.com/russross/blackfriday/v2/html.go @@ -0,0 +1,949 @@ +// +// Blackfriday Markdown Processor +// Available at http://github.com/russross/blackfriday +// +// Copyright © 2011 Russ Ross . +// Distributed under the Simplified BSD License. +// See README.md for details. +// + +// +// +// HTML rendering backend +// +// + +package blackfriday + +import ( + "bytes" + "fmt" + "io" + "regexp" + "strings" +) + +// HTMLFlags control optional behavior of HTML renderer. +type HTMLFlags int + +// HTML renderer configuration options. +const ( + HTMLFlagsNone HTMLFlags = 0 + SkipHTML HTMLFlags = 1 << iota // Skip preformatted HTML blocks + SkipImages // Skip embedded images + SkipLinks // Skip all links + Safelink // Only link to trusted protocols + NofollowLinks // Only link with rel="nofollow" + NoreferrerLinks // Only link with rel="noreferrer" + NoopenerLinks // Only link with rel="noopener" + HrefTargetBlank // Add a blank target + CompletePage // Generate a complete HTML page + UseXHTML // Generate XHTML output instead of HTML + FootnoteReturnLinks // Generate a link at the end of a footnote to return to the source + Smartypants // Enable smart punctuation substitutions + SmartypantsFractions // Enable smart fractions (with Smartypants) + SmartypantsDashes // Enable smart dashes (with Smartypants) + SmartypantsLatexDashes // Enable LaTeX-style dashes (with Smartypants) + SmartypantsAngledQuotes // Enable angled double quotes (with Smartypants) for double quotes rendering + SmartypantsQuotesNBSP // Enable « French guillemets » (with Smartypants) + TOC // Generate a table of contents +) + +var ( + htmlTagRe = regexp.MustCompile("(?i)^" + htmlTag) +) + +const ( + htmlTag = "(?:" + openTag + "|" + closeTag + "|" + htmlComment + "|" + + processingInstruction + "|" + declaration + "|" + cdata + ")" + closeTag = "]" + openTag = "<" + tagName + attribute + "*" + "\\s*/?>" + attribute = "(?:" + "\\s+" + attributeName + attributeValueSpec + "?)" + attributeValue = "(?:" + unquotedValue + "|" + singleQuotedValue + "|" + doubleQuotedValue + ")" + attributeValueSpec = "(?:" + "\\s*=" + "\\s*" + attributeValue + ")" + attributeName = "[a-zA-Z_:][a-zA-Z0-9:._-]*" + cdata = "" + declaration = "]*>" + doubleQuotedValue = "\"[^\"]*\"" + htmlComment = "|" + processingInstruction = "[<][?].*?[?][>]" + singleQuotedValue = "'[^']*'" + tagName = "[A-Za-z][A-Za-z0-9-]*" + unquotedValue = "[^\"'=<>`\\x00-\\x20]+" +) + +// HTMLRendererParameters is a collection of supplementary parameters tweaking +// the behavior of various parts of HTML renderer. +type HTMLRendererParameters struct { + // Prepend this text to each relative URL. + AbsolutePrefix string + // Add this text to each footnote anchor, to ensure uniqueness. + FootnoteAnchorPrefix string + // Show this text inside the tag for a footnote return link, if the + // HTML_FOOTNOTE_RETURN_LINKS flag is enabled. If blank, the string + // [return] is used. + FootnoteReturnLinkContents string + // If set, add this text to the front of each Heading ID, to ensure + // uniqueness. + HeadingIDPrefix string + // If set, add this text to the back of each Heading ID, to ensure uniqueness. + HeadingIDSuffix string + // Increase heading levels: if the offset is 1,

becomes

etc. + // Negative offset is also valid. + // Resulting levels are clipped between 1 and 6. + HeadingLevelOffset int + + Title string // Document title (used if CompletePage is set) + CSS string // Optional CSS file URL (used if CompletePage is set) + Icon string // Optional icon file URL (used if CompletePage is set) + + Flags HTMLFlags // Flags allow customizing this renderer's behavior +} + +// HTMLRenderer is a type that implements the Renderer interface for HTML output. +// +// Do not create this directly, instead use the NewHTMLRenderer function. +type HTMLRenderer struct { + HTMLRendererParameters + + closeTag string // how to end singleton tags: either " />" or ">" + + // Track heading IDs to prevent ID collision in a single generation. + headingIDs map[string]int + + lastOutputLen int + disableTags int + + sr *SPRenderer +} + +const ( + xhtmlClose = " />" + htmlClose = ">" +) + +// NewHTMLRenderer creates and configures an HTMLRenderer object, which +// satisfies the Renderer interface. +func NewHTMLRenderer(params HTMLRendererParameters) *HTMLRenderer { + // configure the rendering engine + closeTag := htmlClose + if params.Flags&UseXHTML != 0 { + closeTag = xhtmlClose + } + + if params.FootnoteReturnLinkContents == "" { + params.FootnoteReturnLinkContents = `[return]` + } + + return &HTMLRenderer{ + HTMLRendererParameters: params, + + closeTag: closeTag, + headingIDs: make(map[string]int), + + sr: NewSmartypantsRenderer(params.Flags), + } +} + +func isHTMLTag(tag []byte, tagname string) bool { + found, _ := findHTMLTagPos(tag, tagname) + return found +} + +// Look for a character, but ignore it when it's in any kind of quotes, it +// might be JavaScript +func skipUntilCharIgnoreQuotes(html []byte, start int, char byte) int { + inSingleQuote := false + inDoubleQuote := false + inGraveQuote := false + i := start + for i < len(html) { + switch { + case html[i] == char && !inSingleQuote && !inDoubleQuote && !inGraveQuote: + return i + case html[i] == '\'': + inSingleQuote = !inSingleQuote + case html[i] == '"': + inDoubleQuote = !inDoubleQuote + case html[i] == '`': + inGraveQuote = !inGraveQuote + } + i++ + } + return start +} + +func findHTMLTagPos(tag []byte, tagname string) (bool, int) { + i := 0 + if i < len(tag) && tag[0] != '<' { + return false, -1 + } + i++ + i = skipSpace(tag, i) + + if i < len(tag) && tag[i] == '/' { + i++ + } + + i = skipSpace(tag, i) + j := 0 + for ; i < len(tag); i, j = i+1, j+1 { + if j >= len(tagname) { + break + } + + if strings.ToLower(string(tag[i]))[0] != tagname[j] { + return false, -1 + } + } + + if i == len(tag) { + return false, -1 + } + + rightAngle := skipUntilCharIgnoreQuotes(tag, i, '>') + if rightAngle >= i { + return true, rightAngle + } + + return false, -1 +} + +func skipSpace(tag []byte, i int) int { + for i < len(tag) && isspace(tag[i]) { + i++ + } + return i +} + +func isRelativeLink(link []byte) (yes bool) { + // a tag begin with '#' + if link[0] == '#' { + return true + } + + // link begin with '/' but not '//', the second maybe a protocol relative link + if len(link) >= 2 && link[0] == '/' && link[1] != '/' { + return true + } + + // only the root '/' + if len(link) == 1 && link[0] == '/' { + return true + } + + // current directory : begin with "./" + if bytes.HasPrefix(link, []byte("./")) { + return true + } + + // parent directory : begin with "../" + if bytes.HasPrefix(link, []byte("../")) { + return true + } + + return false +} + +func (r *HTMLRenderer) ensureUniqueHeadingID(id string) string { + for count, found := r.headingIDs[id]; found; count, found = r.headingIDs[id] { + tmp := fmt.Sprintf("%s-%d", id, count+1) + + if _, tmpFound := r.headingIDs[tmp]; !tmpFound { + r.headingIDs[id] = count + 1 + id = tmp + } else { + id = id + "-1" + } + } + + if _, found := r.headingIDs[id]; !found { + r.headingIDs[id] = 0 + } + + return id +} + +func (r *HTMLRenderer) addAbsPrefix(link []byte) []byte { + if r.AbsolutePrefix != "" && isRelativeLink(link) && link[0] != '.' { + newDest := r.AbsolutePrefix + if link[0] != '/' { + newDest += "/" + } + newDest += string(link) + return []byte(newDest) + } + return link +} + +func appendLinkAttrs(attrs []string, flags HTMLFlags, link []byte) []string { + if isRelativeLink(link) { + return attrs + } + val := []string{} + if flags&NofollowLinks != 0 { + val = append(val, "nofollow") + } + if flags&NoreferrerLinks != 0 { + val = append(val, "noreferrer") + } + if flags&NoopenerLinks != 0 { + val = append(val, "noopener") + } + if flags&HrefTargetBlank != 0 { + attrs = append(attrs, "target=\"_blank\"") + } + if len(val) == 0 { + return attrs + } + attr := fmt.Sprintf("rel=%q", strings.Join(val, " ")) + return append(attrs, attr) +} + +func isMailto(link []byte) bool { + return bytes.HasPrefix(link, []byte("mailto:")) +} + +func needSkipLink(flags HTMLFlags, dest []byte) bool { + if flags&SkipLinks != 0 { + return true + } + return flags&Safelink != 0 && !isSafeLink(dest) && !isMailto(dest) +} + +func isSmartypantable(node *Node) bool { + pt := node.Parent.Type + return pt != Link && pt != CodeBlock && pt != Code +} + +func appendLanguageAttr(attrs []string, info []byte) []string { + if len(info) == 0 { + return attrs + } + endOfLang := bytes.IndexAny(info, "\t ") + if endOfLang < 0 { + endOfLang = len(info) + } + return append(attrs, fmt.Sprintf("class=\"language-%s\"", info[:endOfLang])) +} + +func (r *HTMLRenderer) tag(w io.Writer, name []byte, attrs []string) { + w.Write(name) + if len(attrs) > 0 { + w.Write(spaceBytes) + w.Write([]byte(strings.Join(attrs, " "))) + } + w.Write(gtBytes) + r.lastOutputLen = 1 +} + +func footnoteRef(prefix string, node *Node) []byte { + urlFrag := prefix + string(slugify(node.Destination)) + anchor := fmt.Sprintf(`%d`, urlFrag, node.NoteID) + return []byte(fmt.Sprintf(`%s`, urlFrag, anchor)) +} + +func footnoteItem(prefix string, slug []byte) []byte { + return []byte(fmt.Sprintf(`
  • `, prefix, slug)) +} + +func footnoteReturnLink(prefix, returnLink string, slug []byte) []byte { + const format = ` %s` + return []byte(fmt.Sprintf(format, prefix, slug, returnLink)) +} + +func itemOpenCR(node *Node) bool { + if node.Prev == nil { + return false + } + ld := node.Parent.ListData + return !ld.Tight && ld.ListFlags&ListTypeDefinition == 0 +} + +func skipParagraphTags(node *Node) bool { + grandparent := node.Parent.Parent + if grandparent == nil || grandparent.Type != List { + return false + } + tightOrTerm := grandparent.Tight || node.Parent.ListFlags&ListTypeTerm != 0 + return grandparent.Type == List && tightOrTerm +} + +func cellAlignment(align CellAlignFlags) string { + switch align { + case TableAlignmentLeft: + return "left" + case TableAlignmentRight: + return "right" + case TableAlignmentCenter: + return "center" + default: + return "" + } +} + +func (r *HTMLRenderer) out(w io.Writer, text []byte) { + if r.disableTags > 0 { + w.Write(htmlTagRe.ReplaceAll(text, []byte{})) + } else { + w.Write(text) + } + r.lastOutputLen = len(text) +} + +func (r *HTMLRenderer) cr(w io.Writer) { + if r.lastOutputLen > 0 { + r.out(w, nlBytes) + } +} + +var ( + nlBytes = []byte{'\n'} + gtBytes = []byte{'>'} + spaceBytes = []byte{' '} +) + +var ( + brTag = []byte("
    ") + brXHTMLTag = []byte("
    ") + emTag = []byte("") + emCloseTag = []byte("") + strongTag = []byte("") + strongCloseTag = []byte("") + delTag = []byte("") + delCloseTag = []byte("") + ttTag = []byte("") + ttCloseTag = []byte("") + aTag = []byte("") + preTag = []byte("
    ")
    +	preCloseTag        = []byte("
    ") + codeTag = []byte("") + codeCloseTag = []byte("") + pTag = []byte("

    ") + pCloseTag = []byte("

    ") + blockquoteTag = []byte("
    ") + blockquoteCloseTag = []byte("
    ") + hrTag = []byte("
    ") + hrXHTMLTag = []byte("
    ") + ulTag = []byte("
      ") + ulCloseTag = []byte("
    ") + olTag = []byte("
      ") + olCloseTag = []byte("
    ") + dlTag = []byte("
    ") + dlCloseTag = []byte("
    ") + liTag = []byte("
  • ") + liCloseTag = []byte("
  • ") + ddTag = []byte("
    ") + ddCloseTag = []byte("
    ") + dtTag = []byte("
    ") + dtCloseTag = []byte("
    ") + tableTag = []byte("") + tableCloseTag = []byte("
    ") + tdTag = []byte("") + thTag = []byte("") + theadTag = []byte("") + theadCloseTag = []byte("") + tbodyTag = []byte("") + tbodyCloseTag = []byte("") + trTag = []byte("") + trCloseTag = []byte("") + h1Tag = []byte("") + h2Tag = []byte("") + h3Tag = []byte("") + h4Tag = []byte("") + h5Tag = []byte("") + h6Tag = []byte("") + + footnotesDivBytes = []byte("\n
    \n\n") + footnotesCloseDivBytes = []byte("\n
    \n") +) + +func headingTagsFromLevel(level int) ([]byte, []byte) { + if level <= 1 { + return h1Tag, h1CloseTag + } + switch level { + case 2: + return h2Tag, h2CloseTag + case 3: + return h3Tag, h3CloseTag + case 4: + return h4Tag, h4CloseTag + case 5: + return h5Tag, h5CloseTag + } + return h6Tag, h6CloseTag +} + +func (r *HTMLRenderer) outHRTag(w io.Writer) { + if r.Flags&UseXHTML == 0 { + r.out(w, hrTag) + } else { + r.out(w, hrXHTMLTag) + } +} + +// RenderNode is a default renderer of a single node of a syntax tree. For +// block nodes it will be called twice: first time with entering=true, second +// time with entering=false, so that it could know when it's working on an open +// tag and when on close. It writes the result to w. +// +// The return value is a way to tell the calling walker to adjust its walk +// pattern: e.g. it can terminate the traversal by returning Terminate. Or it +// can ask the walker to skip a subtree of this node by returning SkipChildren. +// The typical behavior is to return GoToNext, which asks for the usual +// traversal to the next node. +func (r *HTMLRenderer) RenderNode(w io.Writer, node *Node, entering bool) WalkStatus { + attrs := []string{} + switch node.Type { + case Text: + if r.Flags&Smartypants != 0 { + var tmp bytes.Buffer + escapeHTML(&tmp, node.Literal) + r.sr.Process(w, tmp.Bytes()) + } else { + if node.Parent.Type == Link { + escLink(w, node.Literal) + } else { + escapeHTML(w, node.Literal) + } + } + case Softbreak: + r.cr(w) + // TODO: make it configurable via out(renderer.softbreak) + case Hardbreak: + if r.Flags&UseXHTML == 0 { + r.out(w, brTag) + } else { + r.out(w, brXHTMLTag) + } + r.cr(w) + case Emph: + if entering { + r.out(w, emTag) + } else { + r.out(w, emCloseTag) + } + case Strong: + if entering { + r.out(w, strongTag) + } else { + r.out(w, strongCloseTag) + } + case Del: + if entering { + r.out(w, delTag) + } else { + r.out(w, delCloseTag) + } + case HTMLSpan: + if r.Flags&SkipHTML != 0 { + break + } + r.out(w, node.Literal) + case Link: + // mark it but don't link it if it is not a safe link: no smartypants + dest := node.LinkData.Destination + if needSkipLink(r.Flags, dest) { + if entering { + r.out(w, ttTag) + } else { + r.out(w, ttCloseTag) + } + } else { + if entering { + dest = r.addAbsPrefix(dest) + var hrefBuf bytes.Buffer + hrefBuf.WriteString("href=\"") + escLink(&hrefBuf, dest) + hrefBuf.WriteByte('"') + attrs = append(attrs, hrefBuf.String()) + if node.NoteID != 0 { + r.out(w, footnoteRef(r.FootnoteAnchorPrefix, node)) + break + } + attrs = appendLinkAttrs(attrs, r.Flags, dest) + if len(node.LinkData.Title) > 0 { + var titleBuff bytes.Buffer + titleBuff.WriteString("title=\"") + escapeHTML(&titleBuff, node.LinkData.Title) + titleBuff.WriteByte('"') + attrs = append(attrs, titleBuff.String()) + } + r.tag(w, aTag, attrs) + } else { + if node.NoteID != 0 { + break + } + r.out(w, aCloseTag) + } + } + case Image: + if r.Flags&SkipImages != 0 { + return SkipChildren + } + if entering { + dest := node.LinkData.Destination + dest = r.addAbsPrefix(dest) + if r.disableTags == 0 { + //if options.safe && potentiallyUnsafe(dest) { + //out(w, ``)
+				//} else {
+				r.out(w, []byte(`<img src=`)) + } + } + case Code: + r.out(w, codeTag) + escapeHTML(w, node.Literal) + r.out(w, codeCloseTag) + case Document: + break + case Paragraph: + if skipParagraphTags(node) { + break + } + if entering { + // TODO: untangle this clusterfuck about when the newlines need + // to be added and when not. + if node.Prev != nil { + switch node.Prev.Type { + case HTMLBlock, List, Paragraph, Heading, CodeBlock, BlockQuote, HorizontalRule: + r.cr(w) + } + } + if node.Parent.Type == BlockQuote && node.Prev == nil { + r.cr(w) + } + r.out(w, pTag) + } else { + r.out(w, pCloseTag) + if !(node.Parent.Type == Item && node.Next == nil) { + r.cr(w) + } + } + case BlockQuote: + if entering { + r.cr(w) + r.out(w, blockquoteTag) + } else { + r.out(w, blockquoteCloseTag) + r.cr(w) + } + case HTMLBlock: + if r.Flags&SkipHTML != 0 { + break + } + r.cr(w) + r.out(w, node.Literal) + r.cr(w) + case Heading: + headingLevel := r.HTMLRendererParameters.HeadingLevelOffset + node.Level + openTag, closeTag := headingTagsFromLevel(headingLevel) + if entering { + if node.IsTitleblock { + attrs = append(attrs, `class="title"`) + } + if node.HeadingID != "" { + id := r.ensureUniqueHeadingID(node.HeadingID) + if r.HeadingIDPrefix != "" { + id = r.HeadingIDPrefix + id + } + if r.HeadingIDSuffix != "" { + id = id + r.HeadingIDSuffix + } + attrs = append(attrs, fmt.Sprintf(`id="%s"`, id)) + } + r.cr(w) + r.tag(w, openTag, attrs) + } else { + r.out(w, closeTag) + if !(node.Parent.Type == Item && node.Next == nil) { + r.cr(w) + } + } + case HorizontalRule: + r.cr(w) + r.outHRTag(w) + r.cr(w) + case List: + openTag := ulTag + closeTag := ulCloseTag + if node.ListFlags&ListTypeOrdered != 0 { + openTag = olTag + closeTag = olCloseTag + } + if node.ListFlags&ListTypeDefinition != 0 { + openTag = dlTag + closeTag = dlCloseTag + } + if entering { + if node.IsFootnotesList { + r.out(w, footnotesDivBytes) + r.outHRTag(w) + r.cr(w) + } + r.cr(w) + if node.Parent.Type == Item && node.Parent.Parent.Tight { + r.cr(w) + } + r.tag(w, openTag[:len(openTag)-1], attrs) + r.cr(w) + } else { + r.out(w, closeTag) + //cr(w) + //if node.parent.Type != Item { + // cr(w) + //} + if node.Parent.Type == Item && node.Next != nil { + r.cr(w) + } + if node.Parent.Type == Document || node.Parent.Type == BlockQuote { + r.cr(w) + } + if node.IsFootnotesList { + r.out(w, footnotesCloseDivBytes) + } + } + case Item: + openTag := liTag + closeTag := liCloseTag + if node.ListFlags&ListTypeDefinition != 0 { + openTag = ddTag + closeTag = ddCloseTag + } + if node.ListFlags&ListTypeTerm != 0 { + openTag = dtTag + closeTag = dtCloseTag + } + if entering { + if itemOpenCR(node) { + r.cr(w) + } + if node.ListData.RefLink != nil { + slug := slugify(node.ListData.RefLink) + r.out(w, footnoteItem(r.FootnoteAnchorPrefix, slug)) + break + } + r.out(w, openTag) + } else { + if node.ListData.RefLink != nil { + slug := slugify(node.ListData.RefLink) + if r.Flags&FootnoteReturnLinks != 0 { + r.out(w, footnoteReturnLink(r.FootnoteAnchorPrefix, r.FootnoteReturnLinkContents, slug)) + } + } + r.out(w, closeTag) + r.cr(w) + } + case CodeBlock: + attrs = appendLanguageAttr(attrs, node.Info) + r.cr(w) + r.out(w, preTag) + r.tag(w, codeTag[:len(codeTag)-1], attrs) + escapeHTML(w, node.Literal) + r.out(w, codeCloseTag) + r.out(w, preCloseTag) + if node.Parent.Type != Item { + r.cr(w) + } + case Table: + if entering { + r.cr(w) + r.out(w, tableTag) + } else { + r.out(w, tableCloseTag) + r.cr(w) + } + case TableCell: + openTag := tdTag + closeTag := tdCloseTag + if node.IsHeader { + openTag = thTag + closeTag = thCloseTag + } + if entering { + align := cellAlignment(node.Align) + if align != "" { + attrs = append(attrs, fmt.Sprintf(`align="%s"`, align)) + } + if node.Prev == nil { + r.cr(w) + } + r.tag(w, openTag, attrs) + } else { + r.out(w, closeTag) + r.cr(w) + } + case TableHead: + if entering { + r.cr(w) + r.out(w, theadTag) + } else { + r.out(w, theadCloseTag) + r.cr(w) + } + case TableBody: + if entering { + r.cr(w) + r.out(w, tbodyTag) + // XXX: this is to adhere to a rather silly test. Should fix test. + if node.FirstChild == nil { + r.cr(w) + } + } else { + r.out(w, tbodyCloseTag) + r.cr(w) + } + case TableRow: + if entering { + r.cr(w) + r.out(w, trTag) + } else { + r.out(w, trCloseTag) + r.cr(w) + } + default: + panic("Unknown node type " + node.Type.String()) + } + return GoToNext +} + +// RenderHeader writes HTML document preamble and TOC if requested. +func (r *HTMLRenderer) RenderHeader(w io.Writer, ast *Node) { + r.writeDocumentHeader(w) + if r.Flags&TOC != 0 { + r.writeTOC(w, ast) + } +} + +// RenderFooter writes HTML document footer. +func (r *HTMLRenderer) RenderFooter(w io.Writer, ast *Node) { + if r.Flags&CompletePage == 0 { + return + } + io.WriteString(w, "\n\n\n") +} + +func (r *HTMLRenderer) writeDocumentHeader(w io.Writer) { + if r.Flags&CompletePage == 0 { + return + } + ending := "" + if r.Flags&UseXHTML != 0 { + io.WriteString(w, "\n") + io.WriteString(w, "\n") + ending = " /" + } else { + io.WriteString(w, "\n") + io.WriteString(w, "\n") + } + io.WriteString(w, "\n") + io.WriteString(w, " ") + if r.Flags&Smartypants != 0 { + r.sr.Process(w, []byte(r.Title)) + } else { + escapeHTML(w, []byte(r.Title)) + } + io.WriteString(w, "\n") + io.WriteString(w, " \n") + io.WriteString(w, " \n") + if r.CSS != "" { + io.WriteString(w, " \n") + } + if r.Icon != "" { + io.WriteString(w, " \n") + } + io.WriteString(w, "\n") + io.WriteString(w, "\n\n") +} + +func (r *HTMLRenderer) writeTOC(w io.Writer, ast *Node) { + buf := bytes.Buffer{} + + inHeading := false + tocLevel := 0 + headingCount := 0 + + ast.Walk(func(node *Node, entering bool) WalkStatus { + if node.Type == Heading && !node.HeadingData.IsTitleblock { + inHeading = entering + if entering { + node.HeadingID = fmt.Sprintf("toc_%d", headingCount) + if node.Level == tocLevel { + buf.WriteString("\n\n
  • ") + } else if node.Level < tocLevel { + for node.Level < tocLevel { + tocLevel-- + buf.WriteString("
  • \n") + } + buf.WriteString("\n\n
  • ") + } else { + for node.Level > tocLevel { + tocLevel++ + buf.WriteString("\n") + } + + if buf.Len() > 0 { + io.WriteString(w, "\n") + } + r.lastOutputLen = buf.Len() +} diff --git a/vendor/github.com/russross/blackfriday/v2/inline.go b/vendor/github.com/russross/blackfriday/v2/inline.go new file mode 100644 index 000000000..4ed290792 --- /dev/null +++ b/vendor/github.com/russross/blackfriday/v2/inline.go @@ -0,0 +1,1228 @@ +// +// Blackfriday Markdown Processor +// Available at http://github.com/russross/blackfriday +// +// Copyright © 2011 Russ Ross . +// Distributed under the Simplified BSD License. +// See README.md for details. +// + +// +// Functions to parse inline elements. +// + +package blackfriday + +import ( + "bytes" + "regexp" + "strconv" +) + +var ( + urlRe = `((https?|ftp):\/\/|\/)[-A-Za-z0-9+&@#\/%?=~_|!:,.;\(\)]+` + anchorRe = regexp.MustCompile(`^(]+")?\s?>` + urlRe + `<\/a>)`) + + // https://www.w3.org/TR/html5/syntax.html#character-references + // highest unicode code point in 17 planes (2^20): 1,114,112d = + // 7 dec digits or 6 hex digits + // named entity references can be 2-31 characters with stuff like < + // at one end and ∳ at the other. There + // are also sometimes numbers at the end, although this isn't inherent + // in the specification; there are never numbers anywhere else in + // current character references, though; see ¾ and ▒, etc. + // https://www.w3.org/TR/html5/syntax.html#named-character-references + // + // entity := "&" (named group | number ref) ";" + // named group := [a-zA-Z]{2,31}[0-9]{0,2} + // number ref := "#" (dec ref | hex ref) + // dec ref := [0-9]{1,7} + // hex ref := ("x" | "X") [0-9a-fA-F]{1,6} + htmlEntityRe = regexp.MustCompile(`&([a-zA-Z]{2,31}[0-9]{0,2}|#([0-9]{1,7}|[xX][0-9a-fA-F]{1,6}));`) +) + +// Functions to parse text within a block +// Each function returns the number of chars taken care of +// data is the complete block being rendered +// offset is the number of valid chars before the current cursor + +func (p *Markdown) inline(currBlock *Node, data []byte) { + // handlers might call us recursively: enforce a maximum depth + if p.nesting >= p.maxNesting || len(data) == 0 { + return + } + p.nesting++ + beg, end := 0, 0 + for end < len(data) { + handler := p.inlineCallback[data[end]] + if handler != nil { + if consumed, node := handler(p, data, end); consumed == 0 { + // No action from the callback. + end++ + } else { + // Copy inactive chars into the output. + currBlock.AppendChild(text(data[beg:end])) + if node != nil { + currBlock.AppendChild(node) + } + // Skip past whatever the callback used. + beg = end + consumed + end = beg + } + } else { + end++ + } + } + if beg < len(data) { + if data[end-1] == '\n' { + end-- + } + currBlock.AppendChild(text(data[beg:end])) + } + p.nesting-- +} + +// single and double emphasis parsing +func emphasis(p *Markdown, data []byte, offset int) (int, *Node) { + data = data[offset:] + c := data[0] + + if len(data) > 2 && data[1] != c { + // whitespace cannot follow an opening emphasis; + // strikethrough only takes two characters '~~' + if c == '~' || isspace(data[1]) { + return 0, nil + } + ret, node := helperEmphasis(p, data[1:], c) + if ret == 0 { + return 0, nil + } + + return ret + 1, node + } + + if len(data) > 3 && data[1] == c && data[2] != c { + if isspace(data[2]) { + return 0, nil + } + ret, node := helperDoubleEmphasis(p, data[2:], c) + if ret == 0 { + return 0, nil + } + + return ret + 2, node + } + + if len(data) > 4 && data[1] == c && data[2] == c && data[3] != c { + if c == '~' || isspace(data[3]) { + return 0, nil + } + ret, node := helperTripleEmphasis(p, data, 3, c) + if ret == 0 { + return 0, nil + } + + return ret + 3, node + } + + return 0, nil +} + +func codeSpan(p *Markdown, data []byte, offset int) (int, *Node) { + data = data[offset:] + + nb := 0 + + // count the number of backticks in the delimiter + for nb < len(data) && data[nb] == '`' { + nb++ + } + + // find the next delimiter + i, end := 0, 0 + for end = nb; end < len(data) && i < nb; end++ { + if data[end] == '`' { + i++ + } else { + i = 0 + } + } + + // no matching delimiter? + if i < nb && end >= len(data) { + return 0, nil + } + + // trim outside whitespace + fBegin := nb + for fBegin < end && data[fBegin] == ' ' { + fBegin++ + } + + fEnd := end - nb + for fEnd > fBegin && data[fEnd-1] == ' ' { + fEnd-- + } + + // render the code span + if fBegin != fEnd { + code := NewNode(Code) + code.Literal = data[fBegin:fEnd] + return end, code + } + + return end, nil +} + +// newline preceded by two spaces becomes
    +func maybeLineBreak(p *Markdown, data []byte, offset int) (int, *Node) { + origOffset := offset + for offset < len(data) && data[offset] == ' ' { + offset++ + } + + if offset < len(data) && data[offset] == '\n' { + if offset-origOffset >= 2 { + return offset - origOffset + 1, NewNode(Hardbreak) + } + return offset - origOffset, nil + } + return 0, nil +} + +// newline without two spaces works when HardLineBreak is enabled +func lineBreak(p *Markdown, data []byte, offset int) (int, *Node) { + if p.extensions&HardLineBreak != 0 { + return 1, NewNode(Hardbreak) + } + return 0, nil +} + +type linkType int + +const ( + linkNormal linkType = iota + linkImg + linkDeferredFootnote + linkInlineFootnote +) + +func isReferenceStyleLink(data []byte, pos int, t linkType) bool { + if t == linkDeferredFootnote { + return false + } + return pos < len(data)-1 && data[pos] == '[' && data[pos+1] != '^' +} + +func maybeImage(p *Markdown, data []byte, offset int) (int, *Node) { + if offset < len(data)-1 && data[offset+1] == '[' { + return link(p, data, offset) + } + return 0, nil +} + +func maybeInlineFootnote(p *Markdown, data []byte, offset int) (int, *Node) { + if offset < len(data)-1 && data[offset+1] == '[' { + return link(p, data, offset) + } + return 0, nil +} + +// '[': parse a link or an image or a footnote +func link(p *Markdown, data []byte, offset int) (int, *Node) { + // no links allowed inside regular links, footnote, and deferred footnotes + if p.insideLink && (offset > 0 && data[offset-1] == '[' || len(data)-1 > offset && data[offset+1] == '^') { + return 0, nil + } + + var t linkType + switch { + // special case: ![^text] == deferred footnote (that follows something with + // an exclamation point) + case p.extensions&Footnotes != 0 && len(data)-1 > offset && data[offset+1] == '^': + t = linkDeferredFootnote + // ![alt] == image + case offset >= 0 && data[offset] == '!': + t = linkImg + offset++ + // ^[text] == inline footnote + // [^refId] == deferred footnote + case p.extensions&Footnotes != 0: + if offset >= 0 && data[offset] == '^' { + t = linkInlineFootnote + offset++ + } else if len(data)-1 > offset && data[offset+1] == '^' { + t = linkDeferredFootnote + } + // [text] == regular link + default: + t = linkNormal + } + + data = data[offset:] + + var ( + i = 1 + noteID int + title, link, altContent []byte + textHasNl = false + ) + + if t == linkDeferredFootnote { + i++ + } + + // look for the matching closing bracket + for level := 1; level > 0 && i < len(data); i++ { + switch { + case data[i] == '\n': + textHasNl = true + + case data[i-1] == '\\': + continue + + case data[i] == '[': + level++ + + case data[i] == ']': + level-- + if level <= 0 { + i-- // compensate for extra i++ in for loop + } + } + } + + if i >= len(data) { + return 0, nil + } + + txtE := i + i++ + var footnoteNode *Node + + // skip any amount of whitespace or newline + // (this is much more lax than original markdown syntax) + for i < len(data) && isspace(data[i]) { + i++ + } + + // inline style link + switch { + case i < len(data) && data[i] == '(': + // skip initial whitespace + i++ + + for i < len(data) && isspace(data[i]) { + i++ + } + + linkB := i + + // look for link end: ' " ) + findlinkend: + for i < len(data) { + switch { + case data[i] == '\\': + i += 2 + + case data[i] == ')' || data[i] == '\'' || data[i] == '"': + break findlinkend + + default: + i++ + } + } + + if i >= len(data) { + return 0, nil + } + linkE := i + + // look for title end if present + titleB, titleE := 0, 0 + if data[i] == '\'' || data[i] == '"' { + i++ + titleB = i + + findtitleend: + for i < len(data) { + switch { + case data[i] == '\\': + i += 2 + + case data[i] == ')': + break findtitleend + + default: + i++ + } + } + + if i >= len(data) { + return 0, nil + } + + // skip whitespace after title + titleE = i - 1 + for titleE > titleB && isspace(data[titleE]) { + titleE-- + } + + // check for closing quote presence + if data[titleE] != '\'' && data[titleE] != '"' { + titleB, titleE = 0, 0 + linkE = i + } + } + + // remove whitespace at the end of the link + for linkE > linkB && isspace(data[linkE-1]) { + linkE-- + } + + // remove optional angle brackets around the link + if data[linkB] == '<' { + linkB++ + } + if data[linkE-1] == '>' { + linkE-- + } + + // build escaped link and title + if linkE > linkB { + link = data[linkB:linkE] + } + + if titleE > titleB { + title = data[titleB:titleE] + } + + i++ + + // reference style link + case isReferenceStyleLink(data, i, t): + var id []byte + altContentConsidered := false + + // look for the id + i++ + linkB := i + for i < len(data) && data[i] != ']' { + i++ + } + if i >= len(data) { + return 0, nil + } + linkE := i + + // find the reference + if linkB == linkE { + if textHasNl { + var b bytes.Buffer + + for j := 1; j < txtE; j++ { + switch { + case data[j] != '\n': + b.WriteByte(data[j]) + case data[j-1] != ' ': + b.WriteByte(' ') + } + } + + id = b.Bytes() + } else { + id = data[1:txtE] + altContentConsidered = true + } + } else { + id = data[linkB:linkE] + } + + // find the reference with matching id + lr, ok := p.getRef(string(id)) + if !ok { + return 0, nil + } + + // keep link and title from reference + link = lr.link + title = lr.title + if altContentConsidered { + altContent = lr.text + } + i++ + + // shortcut reference style link or reference or inline footnote + default: + var id []byte + + // craft the id + if textHasNl { + var b bytes.Buffer + + for j := 1; j < txtE; j++ { + switch { + case data[j] != '\n': + b.WriteByte(data[j]) + case data[j-1] != ' ': + b.WriteByte(' ') + } + } + + id = b.Bytes() + } else { + if t == linkDeferredFootnote { + id = data[2:txtE] // get rid of the ^ + } else { + id = data[1:txtE] + } + } + + footnoteNode = NewNode(Item) + if t == linkInlineFootnote { + // create a new reference + noteID = len(p.notes) + 1 + + var fragment []byte + if len(id) > 0 { + if len(id) < 16 { + fragment = make([]byte, len(id)) + } else { + fragment = make([]byte, 16) + } + copy(fragment, slugify(id)) + } else { + fragment = append([]byte("footnote-"), []byte(strconv.Itoa(noteID))...) + } + + ref := &reference{ + noteID: noteID, + hasBlock: false, + link: fragment, + title: id, + footnote: footnoteNode, + } + + p.notes = append(p.notes, ref) + + link = ref.link + title = ref.title + } else { + // find the reference with matching id + lr, ok := p.getRef(string(id)) + if !ok { + return 0, nil + } + + if t == linkDeferredFootnote { + lr.noteID = len(p.notes) + 1 + lr.footnote = footnoteNode + p.notes = append(p.notes, lr) + } + + // keep link and title from reference + link = lr.link + // if inline footnote, title == footnote contents + title = lr.title + noteID = lr.noteID + } + + // rewind the whitespace + i = txtE + 1 + } + + var uLink []byte + if t == linkNormal || t == linkImg { + if len(link) > 0 { + var uLinkBuf bytes.Buffer + unescapeText(&uLinkBuf, link) + uLink = uLinkBuf.Bytes() + } + + // links need something to click on and somewhere to go + if len(uLink) == 0 || (t == linkNormal && txtE <= 1) { + return 0, nil + } + } + + // call the relevant rendering function + var linkNode *Node + switch t { + case linkNormal: + linkNode = NewNode(Link) + linkNode.Destination = normalizeURI(uLink) + linkNode.Title = title + if len(altContent) > 0 { + linkNode.AppendChild(text(altContent)) + } else { + // links cannot contain other links, so turn off link parsing + // temporarily and recurse + insideLink := p.insideLink + p.insideLink = true + p.inline(linkNode, data[1:txtE]) + p.insideLink = insideLink + } + + case linkImg: + linkNode = NewNode(Image) + linkNode.Destination = uLink + linkNode.Title = title + linkNode.AppendChild(text(data[1:txtE])) + i++ + + case linkInlineFootnote, linkDeferredFootnote: + linkNode = NewNode(Link) + linkNode.Destination = link + linkNode.Title = title + linkNode.NoteID = noteID + linkNode.Footnote = footnoteNode + if t == linkInlineFootnote { + i++ + } + + default: + return 0, nil + } + + return i, linkNode +} + +func (p *Markdown) inlineHTMLComment(data []byte) int { + if len(data) < 5 { + return 0 + } + if data[0] != '<' || data[1] != '!' || data[2] != '-' || data[3] != '-' { + return 0 + } + i := 5 + // scan for an end-of-comment marker, across lines if necessary + for i < len(data) && !(data[i-2] == '-' && data[i-1] == '-' && data[i] == '>') { + i++ + } + // no end-of-comment marker + if i >= len(data) { + return 0 + } + return i + 1 +} + +func stripMailto(link []byte) []byte { + if bytes.HasPrefix(link, []byte("mailto://")) { + return link[9:] + } else if bytes.HasPrefix(link, []byte("mailto:")) { + return link[7:] + } else { + return link + } +} + +// autolinkType specifies a kind of autolink that gets detected. +type autolinkType int + +// These are the possible flag values for the autolink renderer. +const ( + notAutolink autolinkType = iota + normalAutolink + emailAutolink +) + +// '<' when tags or autolinks are allowed +func leftAngle(p *Markdown, data []byte, offset int) (int, *Node) { + data = data[offset:] + altype, end := tagLength(data) + if size := p.inlineHTMLComment(data); size > 0 { + end = size + } + if end > 2 { + if altype != notAutolink { + var uLink bytes.Buffer + unescapeText(&uLink, data[1:end+1-2]) + if uLink.Len() > 0 { + link := uLink.Bytes() + node := NewNode(Link) + node.Destination = link + if altype == emailAutolink { + node.Destination = append([]byte("mailto:"), link...) + } + node.AppendChild(text(stripMailto(link))) + return end, node + } + } else { + htmlTag := NewNode(HTMLSpan) + htmlTag.Literal = data[:end] + return end, htmlTag + } + } + + return end, nil +} + +// '\\' backslash escape +var escapeChars = []byte("\\`*_{}[]()#+-.!:|&<>~") + +func escape(p *Markdown, data []byte, offset int) (int, *Node) { + data = data[offset:] + + if len(data) > 1 { + if p.extensions&BackslashLineBreak != 0 && data[1] == '\n' { + return 2, NewNode(Hardbreak) + } + if bytes.IndexByte(escapeChars, data[1]) < 0 { + return 0, nil + } + + return 2, text(data[1:2]) + } + + return 2, nil +} + +func unescapeText(ob *bytes.Buffer, src []byte) { + i := 0 + for i < len(src) { + org := i + for i < len(src) && src[i] != '\\' { + i++ + } + + if i > org { + ob.Write(src[org:i]) + } + + if i+1 >= len(src) { + break + } + + ob.WriteByte(src[i+1]) + i += 2 + } +} + +// '&' escaped when it doesn't belong to an entity +// valid entities are assumed to be anything matching &#?[A-Za-z0-9]+; +func entity(p *Markdown, data []byte, offset int) (int, *Node) { + data = data[offset:] + + end := 1 + + if end < len(data) && data[end] == '#' { + end++ + } + + for end < len(data) && isalnum(data[end]) { + end++ + } + + if end < len(data) && data[end] == ';' { + end++ // real entity + } else { + return 0, nil // lone '&' + } + + ent := data[:end] + // undo & escaping or it will be converted to &amp; by another + // escaper in the renderer + if bytes.Equal(ent, []byte("&")) { + ent = []byte{'&'} + } + + return end, text(ent) +} + +func linkEndsWithEntity(data []byte, linkEnd int) bool { + entityRanges := htmlEntityRe.FindAllIndex(data[:linkEnd], -1) + return entityRanges != nil && entityRanges[len(entityRanges)-1][1] == linkEnd +} + +// hasPrefixCaseInsensitive is a custom implementation of +// strings.HasPrefix(strings.ToLower(s), prefix) +// we rolled our own because ToLower pulls in a huge machinery of lowercasing +// anything from Unicode and that's very slow. Since this func will only be +// used on ASCII protocol prefixes, we can take shortcuts. +func hasPrefixCaseInsensitive(s, prefix []byte) bool { + if len(s) < len(prefix) { + return false + } + delta := byte('a' - 'A') + for i, b := range prefix { + if b != s[i] && b != s[i]+delta { + return false + } + } + return true +} + +var protocolPrefixes = [][]byte{ + []byte("http://"), + []byte("https://"), + []byte("ftp://"), + []byte("file://"), + []byte("mailto:"), +} + +const shortestPrefix = 6 // len("ftp://"), the shortest of the above + +func maybeAutoLink(p *Markdown, data []byte, offset int) (int, *Node) { + // quick check to rule out most false hits + if p.insideLink || len(data) < offset+shortestPrefix { + return 0, nil + } + for _, prefix := range protocolPrefixes { + endOfHead := offset + 8 // 8 is the len() of the longest prefix + if endOfHead > len(data) { + endOfHead = len(data) + } + if hasPrefixCaseInsensitive(data[offset:endOfHead], prefix) { + return autoLink(p, data, offset) + } + } + return 0, nil +} + +func autoLink(p *Markdown, data []byte, offset int) (int, *Node) { + // Now a more expensive check to see if we're not inside an anchor element + anchorStart := offset + offsetFromAnchor := 0 + for anchorStart > 0 && data[anchorStart] != '<' { + anchorStart-- + offsetFromAnchor++ + } + + anchorStr := anchorRe.Find(data[anchorStart:]) + if anchorStr != nil { + anchorClose := NewNode(HTMLSpan) + anchorClose.Literal = anchorStr[offsetFromAnchor:] + return len(anchorStr) - offsetFromAnchor, anchorClose + } + + // scan backward for a word boundary + rewind := 0 + for offset-rewind > 0 && rewind <= 7 && isletter(data[offset-rewind-1]) { + rewind++ + } + if rewind > 6 { // longest supported protocol is "mailto" which has 6 letters + return 0, nil + } + + origData := data + data = data[offset-rewind:] + + if !isSafeLink(data) { + return 0, nil + } + + linkEnd := 0 + for linkEnd < len(data) && !isEndOfLink(data[linkEnd]) { + linkEnd++ + } + + // Skip punctuation at the end of the link + if (data[linkEnd-1] == '.' || data[linkEnd-1] == ',') && data[linkEnd-2] != '\\' { + linkEnd-- + } + + // But don't skip semicolon if it's a part of escaped entity: + if data[linkEnd-1] == ';' && data[linkEnd-2] != '\\' && !linkEndsWithEntity(data, linkEnd) { + linkEnd-- + } + + // See if the link finishes with a punctuation sign that can be closed. + var copen byte + switch data[linkEnd-1] { + case '"': + copen = '"' + case '\'': + copen = '\'' + case ')': + copen = '(' + case ']': + copen = '[' + case '}': + copen = '{' + default: + copen = 0 + } + + if copen != 0 { + bufEnd := offset - rewind + linkEnd - 2 + + openDelim := 1 + + /* Try to close the final punctuation sign in this same line; + * if we managed to close it outside of the URL, that means that it's + * not part of the URL. If it closes inside the URL, that means it + * is part of the URL. + * + * Examples: + * + * foo http://www.pokemon.com/Pikachu_(Electric) bar + * => http://www.pokemon.com/Pikachu_(Electric) + * + * foo (http://www.pokemon.com/Pikachu_(Electric)) bar + * => http://www.pokemon.com/Pikachu_(Electric) + * + * foo http://www.pokemon.com/Pikachu_(Electric)) bar + * => http://www.pokemon.com/Pikachu_(Electric)) + * + * (foo http://www.pokemon.com/Pikachu_(Electric)) bar + * => foo http://www.pokemon.com/Pikachu_(Electric) + */ + + for bufEnd >= 0 && origData[bufEnd] != '\n' && openDelim != 0 { + if origData[bufEnd] == data[linkEnd-1] { + openDelim++ + } + + if origData[bufEnd] == copen { + openDelim-- + } + + bufEnd-- + } + + if openDelim == 0 { + linkEnd-- + } + } + + var uLink bytes.Buffer + unescapeText(&uLink, data[:linkEnd]) + + if uLink.Len() > 0 { + node := NewNode(Link) + node.Destination = uLink.Bytes() + node.AppendChild(text(uLink.Bytes())) + return linkEnd, node + } + + return linkEnd, nil +} + +func isEndOfLink(char byte) bool { + return isspace(char) || char == '<' +} + +var validUris = [][]byte{[]byte("http://"), []byte("https://"), []byte("ftp://"), []byte("mailto://")} +var validPaths = [][]byte{[]byte("/"), []byte("./"), []byte("../")} + +func isSafeLink(link []byte) bool { + for _, path := range validPaths { + if len(link) >= len(path) && bytes.Equal(link[:len(path)], path) { + if len(link) == len(path) { + return true + } else if isalnum(link[len(path)]) { + return true + } + } + } + + for _, prefix := range validUris { + // TODO: handle unicode here + // case-insensitive prefix test + if len(link) > len(prefix) && bytes.Equal(bytes.ToLower(link[:len(prefix)]), prefix) && isalnum(link[len(prefix)]) { + return true + } + } + + return false +} + +// return the length of the given tag, or 0 is it's not valid +func tagLength(data []byte) (autolink autolinkType, end int) { + var i, j int + + // a valid tag can't be shorter than 3 chars + if len(data) < 3 { + return notAutolink, 0 + } + + // begins with a '<' optionally followed by '/', followed by letter or number + if data[0] != '<' { + return notAutolink, 0 + } + if data[1] == '/' { + i = 2 + } else { + i = 1 + } + + if !isalnum(data[i]) { + return notAutolink, 0 + } + + // scheme test + autolink = notAutolink + + // try to find the beginning of an URI + for i < len(data) && (isalnum(data[i]) || data[i] == '.' || data[i] == '+' || data[i] == '-') { + i++ + } + + if i > 1 && i < len(data) && data[i] == '@' { + if j = isMailtoAutoLink(data[i:]); j != 0 { + return emailAutolink, i + j + } + } + + if i > 2 && i < len(data) && data[i] == ':' { + autolink = normalAutolink + i++ + } + + // complete autolink test: no whitespace or ' or " + switch { + case i >= len(data): + autolink = notAutolink + case autolink != notAutolink: + j = i + + for i < len(data) { + if data[i] == '\\' { + i += 2 + } else if data[i] == '>' || data[i] == '\'' || data[i] == '"' || isspace(data[i]) { + break + } else { + i++ + } + + } + + if i >= len(data) { + return autolink, 0 + } + if i > j && data[i] == '>' { + return autolink, i + 1 + } + + // one of the forbidden chars has been found + autolink = notAutolink + } + i += bytes.IndexByte(data[i:], '>') + if i < 0 { + return autolink, 0 + } + return autolink, i + 1 +} + +// look for the address part of a mail autolink and '>' +// this is less strict than the original markdown e-mail address matching +func isMailtoAutoLink(data []byte) int { + nb := 0 + + // address is assumed to be: [-@._a-zA-Z0-9]+ with exactly one '@' + for i := 0; i < len(data); i++ { + if isalnum(data[i]) { + continue + } + + switch data[i] { + case '@': + nb++ + + case '-', '.', '_': + break + + case '>': + if nb == 1 { + return i + 1 + } + return 0 + default: + return 0 + } + } + + return 0 +} + +// look for the next emph char, skipping other constructs +func helperFindEmphChar(data []byte, c byte) int { + i := 0 + + for i < len(data) { + for i < len(data) && data[i] != c && data[i] != '`' && data[i] != '[' { + i++ + } + if i >= len(data) { + return 0 + } + // do not count escaped chars + if i != 0 && data[i-1] == '\\' { + i++ + continue + } + if data[i] == c { + return i + } + + if data[i] == '`' { + // skip a code span + tmpI := 0 + i++ + for i < len(data) && data[i] != '`' { + if tmpI == 0 && data[i] == c { + tmpI = i + } + i++ + } + if i >= len(data) { + return tmpI + } + i++ + } else if data[i] == '[' { + // skip a link + tmpI := 0 + i++ + for i < len(data) && data[i] != ']' { + if tmpI == 0 && data[i] == c { + tmpI = i + } + i++ + } + i++ + for i < len(data) && (data[i] == ' ' || data[i] == '\n') { + i++ + } + if i >= len(data) { + return tmpI + } + if data[i] != '[' && data[i] != '(' { // not a link + if tmpI > 0 { + return tmpI + } + continue + } + cc := data[i] + i++ + for i < len(data) && data[i] != cc { + if tmpI == 0 && data[i] == c { + return i + } + i++ + } + if i >= len(data) { + return tmpI + } + i++ + } + } + return 0 +} + +func helperEmphasis(p *Markdown, data []byte, c byte) (int, *Node) { + i := 0 + + // skip one symbol if coming from emph3 + if len(data) > 1 && data[0] == c && data[1] == c { + i = 1 + } + + for i < len(data) { + length := helperFindEmphChar(data[i:], c) + if length == 0 { + return 0, nil + } + i += length + if i >= len(data) { + return 0, nil + } + + if i+1 < len(data) && data[i+1] == c { + i++ + continue + } + + if data[i] == c && !isspace(data[i-1]) { + + if p.extensions&NoIntraEmphasis != 0 { + if !(i+1 == len(data) || isspace(data[i+1]) || ispunct(data[i+1])) { + continue + } + } + + emph := NewNode(Emph) + p.inline(emph, data[:i]) + return i + 1, emph + } + } + + return 0, nil +} + +func helperDoubleEmphasis(p *Markdown, data []byte, c byte) (int, *Node) { + i := 0 + + for i < len(data) { + length := helperFindEmphChar(data[i:], c) + if length == 0 { + return 0, nil + } + i += length + + if i+1 < len(data) && data[i] == c && data[i+1] == c && i > 0 && !isspace(data[i-1]) { + nodeType := Strong + if c == '~' { + nodeType = Del + } + node := NewNode(nodeType) + p.inline(node, data[:i]) + return i + 2, node + } + i++ + } + return 0, nil +} + +func helperTripleEmphasis(p *Markdown, data []byte, offset int, c byte) (int, *Node) { + i := 0 + origData := data + data = data[offset:] + + for i < len(data) { + length := helperFindEmphChar(data[i:], c) + if length == 0 { + return 0, nil + } + i += length + + // skip whitespace preceded symbols + if data[i] != c || isspace(data[i-1]) { + continue + } + + switch { + case i+2 < len(data) && data[i+1] == c && data[i+2] == c: + // triple symbol found + strong := NewNode(Strong) + em := NewNode(Emph) + strong.AppendChild(em) + p.inline(em, data[:i]) + return i + 3, strong + case (i+1 < len(data) && data[i+1] == c): + // double symbol found, hand over to emph1 + length, node := helperEmphasis(p, origData[offset-2:], c) + if length == 0 { + return 0, nil + } + return length - 2, node + default: + // single symbol found, hand over to emph2 + length, node := helperDoubleEmphasis(p, origData[offset-1:], c) + if length == 0 { + return 0, nil + } + return length - 1, node + } + } + return 0, nil +} + +func text(s []byte) *Node { + node := NewNode(Text) + node.Literal = s + return node +} + +func normalizeURI(s []byte) []byte { + return s // TODO: implement +} diff --git a/vendor/github.com/russross/blackfriday/v2/markdown.go b/vendor/github.com/russross/blackfriday/v2/markdown.go new file mode 100644 index 000000000..58d2e4538 --- /dev/null +++ b/vendor/github.com/russross/blackfriday/v2/markdown.go @@ -0,0 +1,950 @@ +// Blackfriday Markdown Processor +// Available at http://github.com/russross/blackfriday +// +// Copyright © 2011 Russ Ross . +// Distributed under the Simplified BSD License. +// See README.md for details. + +package blackfriday + +import ( + "bytes" + "fmt" + "io" + "strings" + "unicode/utf8" +) + +// +// Markdown parsing and processing +// + +// Version string of the package. Appears in the rendered document when +// CompletePage flag is on. +const Version = "2.0" + +// Extensions is a bitwise or'ed collection of enabled Blackfriday's +// extensions. +type Extensions int + +// These are the supported markdown parsing extensions. +// OR these values together to select multiple extensions. +const ( + NoExtensions Extensions = 0 + NoIntraEmphasis Extensions = 1 << iota // Ignore emphasis markers inside words + Tables // Render tables + FencedCode // Render fenced code blocks + Autolink // Detect embedded URLs that are not explicitly marked + Strikethrough // Strikethrough text using ~~test~~ + LaxHTMLBlocks // Loosen up HTML block parsing rules + SpaceHeadings // Be strict about prefix heading rules + HardLineBreak // Translate newlines into line breaks + TabSizeEight // Expand tabs to eight spaces instead of four + Footnotes // Pandoc-style footnotes + NoEmptyLineBeforeBlock // No need to insert an empty line to start a (code, quote, ordered list, unordered list) block + HeadingIDs // specify heading IDs with {#id} + Titleblock // Titleblock ala pandoc + AutoHeadingIDs // Create the heading ID from the text + BackslashLineBreak // Translate trailing backslashes into line breaks + DefinitionLists // Render definition lists + + CommonHTMLFlags HTMLFlags = UseXHTML | Smartypants | + SmartypantsFractions | SmartypantsDashes | SmartypantsLatexDashes + + CommonExtensions Extensions = NoIntraEmphasis | Tables | FencedCode | + Autolink | Strikethrough | SpaceHeadings | HeadingIDs | + BackslashLineBreak | DefinitionLists +) + +// ListType contains bitwise or'ed flags for list and list item objects. +type ListType int + +// These are the possible flag values for the ListItem renderer. +// Multiple flag values may be ORed together. +// These are mostly of interest if you are writing a new output format. +const ( + ListTypeOrdered ListType = 1 << iota + ListTypeDefinition + ListTypeTerm + + ListItemContainsBlock + ListItemBeginningOfList // TODO: figure out if this is of any use now + ListItemEndOfList +) + +// CellAlignFlags holds a type of alignment in a table cell. +type CellAlignFlags int + +// These are the possible flag values for the table cell renderer. +// Only a single one of these values will be used; they are not ORed together. +// These are mostly of interest if you are writing a new output format. +const ( + TableAlignmentLeft CellAlignFlags = 1 << iota + TableAlignmentRight + TableAlignmentCenter = (TableAlignmentLeft | TableAlignmentRight) +) + +// The size of a tab stop. +const ( + TabSizeDefault = 4 + TabSizeDouble = 8 +) + +// blockTags is a set of tags that are recognized as HTML block tags. +// Any of these can be included in markdown text without special escaping. +var blockTags = map[string]struct{}{ + "blockquote": {}, + "del": {}, + "div": {}, + "dl": {}, + "fieldset": {}, + "form": {}, + "h1": {}, + "h2": {}, + "h3": {}, + "h4": {}, + "h5": {}, + "h6": {}, + "iframe": {}, + "ins": {}, + "math": {}, + "noscript": {}, + "ol": {}, + "pre": {}, + "p": {}, + "script": {}, + "style": {}, + "table": {}, + "ul": {}, + + // HTML5 + "address": {}, + "article": {}, + "aside": {}, + "canvas": {}, + "figcaption": {}, + "figure": {}, + "footer": {}, + "header": {}, + "hgroup": {}, + "main": {}, + "nav": {}, + "output": {}, + "progress": {}, + "section": {}, + "video": {}, +} + +// Renderer is the rendering interface. This is mostly of interest if you are +// implementing a new rendering format. +// +// Only an HTML implementation is provided in this repository, see the README +// for external implementations. +type Renderer interface { + // RenderNode is the main rendering method. It will be called once for + // every leaf node and twice for every non-leaf node (first with + // entering=true, then with entering=false). The method should write its + // rendition of the node to the supplied writer w. + RenderNode(w io.Writer, node *Node, entering bool) WalkStatus + + // RenderHeader is a method that allows the renderer to produce some + // content preceding the main body of the output document. The header is + // understood in the broad sense here. For example, the default HTML + // renderer will write not only the HTML document preamble, but also the + // table of contents if it was requested. + // + // The method will be passed an entire document tree, in case a particular + // implementation needs to inspect it to produce output. + // + // The output should be written to the supplied writer w. If your + // implementation has no header to write, supply an empty implementation. + RenderHeader(w io.Writer, ast *Node) + + // RenderFooter is a symmetric counterpart of RenderHeader. + RenderFooter(w io.Writer, ast *Node) +} + +// Callback functions for inline parsing. One such function is defined +// for each character that triggers a response when parsing inline data. +type inlineParser func(p *Markdown, data []byte, offset int) (int, *Node) + +// Markdown is a type that holds extensions and the runtime state used by +// Parse, and the renderer. You can not use it directly, construct it with New. +type Markdown struct { + renderer Renderer + referenceOverride ReferenceOverrideFunc + refs map[string]*reference + inlineCallback [256]inlineParser + extensions Extensions + nesting int + maxNesting int + insideLink bool + + // Footnotes need to be ordered as well as available to quickly check for + // presence. If a ref is also a footnote, it's stored both in refs and here + // in notes. Slice is nil if footnotes not enabled. + notes []*reference + + doc *Node + tip *Node // = doc + oldTip *Node + lastMatchedContainer *Node // = doc + allClosed bool +} + +func (p *Markdown) getRef(refid string) (ref *reference, found bool) { + if p.referenceOverride != nil { + r, overridden := p.referenceOverride(refid) + if overridden { + if r == nil { + return nil, false + } + return &reference{ + link: []byte(r.Link), + title: []byte(r.Title), + noteID: 0, + hasBlock: false, + text: []byte(r.Text)}, true + } + } + // refs are case insensitive + ref, found = p.refs[strings.ToLower(refid)] + return ref, found +} + +func (p *Markdown) finalize(block *Node) { + above := block.Parent + block.open = false + p.tip = above +} + +func (p *Markdown) addChild(node NodeType, offset uint32) *Node { + return p.addExistingChild(NewNode(node), offset) +} + +func (p *Markdown) addExistingChild(node *Node, offset uint32) *Node { + for !p.tip.canContain(node.Type) { + p.finalize(p.tip) + } + p.tip.AppendChild(node) + p.tip = node + return node +} + +func (p *Markdown) closeUnmatchedBlocks() { + if !p.allClosed { + for p.oldTip != p.lastMatchedContainer { + parent := p.oldTip.Parent + p.finalize(p.oldTip) + p.oldTip = parent + } + p.allClosed = true + } +} + +// +// +// Public interface +// +// + +// Reference represents the details of a link. +// See the documentation in Options for more details on use-case. +type Reference struct { + // Link is usually the URL the reference points to. + Link string + // Title is the alternate text describing the link in more detail. + Title string + // Text is the optional text to override the ref with if the syntax used was + // [refid][] + Text string +} + +// ReferenceOverrideFunc is expected to be called with a reference string and +// return either a valid Reference type that the reference string maps to or +// nil. If overridden is false, the default reference logic will be executed. +// See the documentation in Options for more details on use-case. +type ReferenceOverrideFunc func(reference string) (ref *Reference, overridden bool) + +// New constructs a Markdown processor. You can use the same With* functions as +// for Run() to customize parser's behavior and the renderer. +func New(opts ...Option) *Markdown { + var p Markdown + for _, opt := range opts { + opt(&p) + } + p.refs = make(map[string]*reference) + p.maxNesting = 16 + p.insideLink = false + docNode := NewNode(Document) + p.doc = docNode + p.tip = docNode + p.oldTip = docNode + p.lastMatchedContainer = docNode + p.allClosed = true + // register inline parsers + p.inlineCallback[' '] = maybeLineBreak + p.inlineCallback['*'] = emphasis + p.inlineCallback['_'] = emphasis + if p.extensions&Strikethrough != 0 { + p.inlineCallback['~'] = emphasis + } + p.inlineCallback['`'] = codeSpan + p.inlineCallback['\n'] = lineBreak + p.inlineCallback['['] = link + p.inlineCallback['<'] = leftAngle + p.inlineCallback['\\'] = escape + p.inlineCallback['&'] = entity + p.inlineCallback['!'] = maybeImage + p.inlineCallback['^'] = maybeInlineFootnote + if p.extensions&Autolink != 0 { + p.inlineCallback['h'] = maybeAutoLink + p.inlineCallback['m'] = maybeAutoLink + p.inlineCallback['f'] = maybeAutoLink + p.inlineCallback['H'] = maybeAutoLink + p.inlineCallback['M'] = maybeAutoLink + p.inlineCallback['F'] = maybeAutoLink + } + if p.extensions&Footnotes != 0 { + p.notes = make([]*reference, 0) + } + return &p +} + +// Option customizes the Markdown processor's default behavior. +type Option func(*Markdown) + +// WithRenderer allows you to override the default renderer. +func WithRenderer(r Renderer) Option { + return func(p *Markdown) { + p.renderer = r + } +} + +// WithExtensions allows you to pick some of the many extensions provided by +// Blackfriday. You can bitwise OR them. +func WithExtensions(e Extensions) Option { + return func(p *Markdown) { + p.extensions = e + } +} + +// WithNoExtensions turns off all extensions and custom behavior. +func WithNoExtensions() Option { + return func(p *Markdown) { + p.extensions = NoExtensions + p.renderer = NewHTMLRenderer(HTMLRendererParameters{ + Flags: HTMLFlagsNone, + }) + } +} + +// WithRefOverride sets an optional function callback that is called every +// time a reference is resolved. +// +// In Markdown, the link reference syntax can be made to resolve a link to +// a reference instead of an inline URL, in one of the following ways: +// +// * [link text][refid] +// * [refid][] +// +// Usually, the refid is defined at the bottom of the Markdown document. If +// this override function is provided, the refid is passed to the override +// function first, before consulting the defined refids at the bottom. If +// the override function indicates an override did not occur, the refids at +// the bottom will be used to fill in the link details. +func WithRefOverride(o ReferenceOverrideFunc) Option { + return func(p *Markdown) { + p.referenceOverride = o + } +} + +// Run is the main entry point to Blackfriday. It parses and renders a +// block of markdown-encoded text. +// +// The simplest invocation of Run takes one argument, input: +// output := Run(input) +// This will parse the input with CommonExtensions enabled and render it with +// the default HTMLRenderer (with CommonHTMLFlags). +// +// Variadic arguments opts can customize the default behavior. Since Markdown +// type does not contain exported fields, you can not use it directly. Instead, +// use the With* functions. For example, this will call the most basic +// functionality, with no extensions: +// output := Run(input, WithNoExtensions()) +// +// You can use any number of With* arguments, even contradicting ones. They +// will be applied in order of appearance and the latter will override the +// former: +// output := Run(input, WithNoExtensions(), WithExtensions(exts), +// WithRenderer(yourRenderer)) +func Run(input []byte, opts ...Option) []byte { + r := NewHTMLRenderer(HTMLRendererParameters{ + Flags: CommonHTMLFlags, + }) + optList := []Option{WithRenderer(r), WithExtensions(CommonExtensions)} + optList = append(optList, opts...) + parser := New(optList...) + ast := parser.Parse(input) + var buf bytes.Buffer + parser.renderer.RenderHeader(&buf, ast) + ast.Walk(func(node *Node, entering bool) WalkStatus { + return parser.renderer.RenderNode(&buf, node, entering) + }) + parser.renderer.RenderFooter(&buf, ast) + return buf.Bytes() +} + +// Parse is an entry point to the parsing part of Blackfriday. It takes an +// input markdown document and produces a syntax tree for its contents. This +// tree can then be rendered with a default or custom renderer, or +// analyzed/transformed by the caller to whatever non-standard needs they have. +// The return value is the root node of the syntax tree. +func (p *Markdown) Parse(input []byte) *Node { + p.block(input) + // Walk the tree and finish up some of unfinished blocks + for p.tip != nil { + p.finalize(p.tip) + } + // Walk the tree again and process inline markdown in each block + p.doc.Walk(func(node *Node, entering bool) WalkStatus { + if node.Type == Paragraph || node.Type == Heading || node.Type == TableCell { + p.inline(node, node.content) + node.content = nil + } + return GoToNext + }) + p.parseRefsToAST() + return p.doc +} + +func (p *Markdown) parseRefsToAST() { + if p.extensions&Footnotes == 0 || len(p.notes) == 0 { + return + } + p.tip = p.doc + block := p.addBlock(List, nil) + block.IsFootnotesList = true + block.ListFlags = ListTypeOrdered + flags := ListItemBeginningOfList + // Note: this loop is intentionally explicit, not range-form. This is + // because the body of the loop will append nested footnotes to p.notes and + // we need to process those late additions. Range form would only walk over + // the fixed initial set. + for i := 0; i < len(p.notes); i++ { + ref := p.notes[i] + p.addExistingChild(ref.footnote, 0) + block := ref.footnote + block.ListFlags = flags | ListTypeOrdered + block.RefLink = ref.link + if ref.hasBlock { + flags |= ListItemContainsBlock + p.block(ref.title) + } else { + p.inline(block, ref.title) + } + flags &^= ListItemBeginningOfList | ListItemContainsBlock + } + above := block.Parent + finalizeList(block) + p.tip = above + block.Walk(func(node *Node, entering bool) WalkStatus { + if node.Type == Paragraph || node.Type == Heading { + p.inline(node, node.content) + node.content = nil + } + return GoToNext + }) +} + +// +// Link references +// +// This section implements support for references that (usually) appear +// as footnotes in a document, and can be referenced anywhere in the document. +// The basic format is: +// +// [1]: http://www.google.com/ "Google" +// [2]: http://www.github.com/ "Github" +// +// Anywhere in the document, the reference can be linked by referring to its +// label, i.e., 1 and 2 in this example, as in: +// +// This library is hosted on [Github][2], a git hosting site. +// +// Actual footnotes as specified in Pandoc and supported by some other Markdown +// libraries such as php-markdown are also taken care of. They look like this: +// +// This sentence needs a bit of further explanation.[^note] +// +// [^note]: This is the explanation. +// +// Footnotes should be placed at the end of the document in an ordered list. +// Finally, there are inline footnotes such as: +// +// Inline footnotes^[Also supported.] provide a quick inline explanation, +// but are rendered at the bottom of the document. +// + +// reference holds all information necessary for a reference-style links or +// footnotes. +// +// Consider this markdown with reference-style links: +// +// [link][ref] +// +// [ref]: /url/ "tooltip title" +// +// It will be ultimately converted to this HTML: +// +//

    link

    +// +// And a reference structure will be populated as follows: +// +// p.refs["ref"] = &reference{ +// link: "/url/", +// title: "tooltip title", +// } +// +// Alternatively, reference can contain information about a footnote. Consider +// this markdown: +// +// Text needing a footnote.[^a] +// +// [^a]: This is the note +// +// A reference structure will be populated as follows: +// +// p.refs["a"] = &reference{ +// link: "a", +// title: "This is the note", +// noteID: , +// } +// +// TODO: As you can see, it begs for splitting into two dedicated structures +// for refs and for footnotes. +type reference struct { + link []byte + title []byte + noteID int // 0 if not a footnote ref + hasBlock bool + footnote *Node // a link to the Item node within a list of footnotes + + text []byte // only gets populated by refOverride feature with Reference.Text +} + +func (r *reference) String() string { + return fmt.Sprintf("{link: %q, title: %q, text: %q, noteID: %d, hasBlock: %v}", + r.link, r.title, r.text, r.noteID, r.hasBlock) +} + +// Check whether or not data starts with a reference link. +// If so, it is parsed and stored in the list of references +// (in the render struct). +// Returns the number of bytes to skip to move past it, +// or zero if the first line is not a reference. +func isReference(p *Markdown, data []byte, tabSize int) int { + // up to 3 optional leading spaces + if len(data) < 4 { + return 0 + } + i := 0 + for i < 3 && data[i] == ' ' { + i++ + } + + noteID := 0 + + // id part: anything but a newline between brackets + if data[i] != '[' { + return 0 + } + i++ + if p.extensions&Footnotes != 0 { + if i < len(data) && data[i] == '^' { + // we can set it to anything here because the proper noteIds will + // be assigned later during the second pass. It just has to be != 0 + noteID = 1 + i++ + } + } + idOffset := i + for i < len(data) && data[i] != '\n' && data[i] != '\r' && data[i] != ']' { + i++ + } + if i >= len(data) || data[i] != ']' { + return 0 + } + idEnd := i + // footnotes can have empty ID, like this: [^], but a reference can not be + // empty like this: []. Break early if it's not a footnote and there's no ID + if noteID == 0 && idOffset == idEnd { + return 0 + } + // spacer: colon (space | tab)* newline? (space | tab)* + i++ + if i >= len(data) || data[i] != ':' { + return 0 + } + i++ + for i < len(data) && (data[i] == ' ' || data[i] == '\t') { + i++ + } + if i < len(data) && (data[i] == '\n' || data[i] == '\r') { + i++ + if i < len(data) && data[i] == '\n' && data[i-1] == '\r' { + i++ + } + } + for i < len(data) && (data[i] == ' ' || data[i] == '\t') { + i++ + } + if i >= len(data) { + return 0 + } + + var ( + linkOffset, linkEnd int + titleOffset, titleEnd int + lineEnd int + raw []byte + hasBlock bool + ) + + if p.extensions&Footnotes != 0 && noteID != 0 { + linkOffset, linkEnd, raw, hasBlock = scanFootnote(p, data, i, tabSize) + lineEnd = linkEnd + } else { + linkOffset, linkEnd, titleOffset, titleEnd, lineEnd = scanLinkRef(p, data, i) + } + if lineEnd == 0 { + return 0 + } + + // a valid ref has been found + + ref := &reference{ + noteID: noteID, + hasBlock: hasBlock, + } + + if noteID > 0 { + // reusing the link field for the id since footnotes don't have links + ref.link = data[idOffset:idEnd] + // if footnote, it's not really a title, it's the contained text + ref.title = raw + } else { + ref.link = data[linkOffset:linkEnd] + ref.title = data[titleOffset:titleEnd] + } + + // id matches are case-insensitive + id := string(bytes.ToLower(data[idOffset:idEnd])) + + p.refs[id] = ref + + return lineEnd +} + +func scanLinkRef(p *Markdown, data []byte, i int) (linkOffset, linkEnd, titleOffset, titleEnd, lineEnd int) { + // link: whitespace-free sequence, optionally between angle brackets + if data[i] == '<' { + i++ + } + linkOffset = i + for i < len(data) && data[i] != ' ' && data[i] != '\t' && data[i] != '\n' && data[i] != '\r' { + i++ + } + linkEnd = i + if data[linkOffset] == '<' && data[linkEnd-1] == '>' { + linkOffset++ + linkEnd-- + } + + // optional spacer: (space | tab)* (newline | '\'' | '"' | '(' ) + for i < len(data) && (data[i] == ' ' || data[i] == '\t') { + i++ + } + if i < len(data) && data[i] != '\n' && data[i] != '\r' && data[i] != '\'' && data[i] != '"' && data[i] != '(' { + return + } + + // compute end-of-line + if i >= len(data) || data[i] == '\r' || data[i] == '\n' { + lineEnd = i + } + if i+1 < len(data) && data[i] == '\r' && data[i+1] == '\n' { + lineEnd++ + } + + // optional (space|tab)* spacer after a newline + if lineEnd > 0 { + i = lineEnd + 1 + for i < len(data) && (data[i] == ' ' || data[i] == '\t') { + i++ + } + } + + // optional title: any non-newline sequence enclosed in '"() alone on its line + if i+1 < len(data) && (data[i] == '\'' || data[i] == '"' || data[i] == '(') { + i++ + titleOffset = i + + // look for EOL + for i < len(data) && data[i] != '\n' && data[i] != '\r' { + i++ + } + if i+1 < len(data) && data[i] == '\n' && data[i+1] == '\r' { + titleEnd = i + 1 + } else { + titleEnd = i + } + + // step back + i-- + for i > titleOffset && (data[i] == ' ' || data[i] == '\t') { + i-- + } + if i > titleOffset && (data[i] == '\'' || data[i] == '"' || data[i] == ')') { + lineEnd = titleEnd + titleEnd = i + } + } + + return +} + +// The first bit of this logic is the same as Parser.listItem, but the rest +// is much simpler. This function simply finds the entire block and shifts it +// over by one tab if it is indeed a block (just returns the line if it's not). +// blockEnd is the end of the section in the input buffer, and contents is the +// extracted text that was shifted over one tab. It will need to be rendered at +// the end of the document. +func scanFootnote(p *Markdown, data []byte, i, indentSize int) (blockStart, blockEnd int, contents []byte, hasBlock bool) { + if i == 0 || len(data) == 0 { + return + } + + // skip leading whitespace on first line + for i < len(data) && data[i] == ' ' { + i++ + } + + blockStart = i + + // find the end of the line + blockEnd = i + for i < len(data) && data[i-1] != '\n' { + i++ + } + + // get working buffer + var raw bytes.Buffer + + // put the first line into the working buffer + raw.Write(data[blockEnd:i]) + blockEnd = i + + // process the following lines + containsBlankLine := false + +gatherLines: + for blockEnd < len(data) { + i++ + + // find the end of this line + for i < len(data) && data[i-1] != '\n' { + i++ + } + + // if it is an empty line, guess that it is part of this item + // and move on to the next line + if p.isEmpty(data[blockEnd:i]) > 0 { + containsBlankLine = true + blockEnd = i + continue + } + + n := 0 + if n = isIndented(data[blockEnd:i], indentSize); n == 0 { + // this is the end of the block. + // we don't want to include this last line in the index. + break gatherLines + } + + // if there were blank lines before this one, insert a new one now + if containsBlankLine { + raw.WriteByte('\n') + containsBlankLine = false + } + + // get rid of that first tab, write to buffer + raw.Write(data[blockEnd+n : i]) + hasBlock = true + + blockEnd = i + } + + if data[blockEnd-1] != '\n' { + raw.WriteByte('\n') + } + + contents = raw.Bytes() + + return +} + +// +// +// Miscellaneous helper functions +// +// + +// Test if a character is a punctuation symbol. +// Taken from a private function in regexp in the stdlib. +func ispunct(c byte) bool { + for _, r := range []byte("!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~") { + if c == r { + return true + } + } + return false +} + +// Test if a character is a whitespace character. +func isspace(c byte) bool { + return ishorizontalspace(c) || isverticalspace(c) +} + +// Test if a character is a horizontal whitespace character. +func ishorizontalspace(c byte) bool { + return c == ' ' || c == '\t' +} + +// Test if a character is a vertical character. +func isverticalspace(c byte) bool { + return c == '\n' || c == '\r' || c == '\f' || c == '\v' +} + +// Test if a character is letter. +func isletter(c byte) bool { + return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') +} + +// Test if a character is a letter or a digit. +// TODO: check when this is looking for ASCII alnum and when it should use unicode +func isalnum(c byte) bool { + return (c >= '0' && c <= '9') || isletter(c) +} + +// Replace tab characters with spaces, aligning to the next TAB_SIZE column. +// always ends output with a newline +func expandTabs(out *bytes.Buffer, line []byte, tabSize int) { + // first, check for common cases: no tabs, or only tabs at beginning of line + i, prefix := 0, 0 + slowcase := false + for i = 0; i < len(line); i++ { + if line[i] == '\t' { + if prefix == i { + prefix++ + } else { + slowcase = true + break + } + } + } + + // no need to decode runes if all tabs are at the beginning of the line + if !slowcase { + for i = 0; i < prefix*tabSize; i++ { + out.WriteByte(' ') + } + out.Write(line[prefix:]) + return + } + + // the slow case: we need to count runes to figure out how + // many spaces to insert for each tab + column := 0 + i = 0 + for i < len(line) { + start := i + for i < len(line) && line[i] != '\t' { + _, size := utf8.DecodeRune(line[i:]) + i += size + column++ + } + + if i > start { + out.Write(line[start:i]) + } + + if i >= len(line) { + break + } + + for { + out.WriteByte(' ') + column++ + if column%tabSize == 0 { + break + } + } + + i++ + } +} + +// Find if a line counts as indented or not. +// Returns number of characters the indent is (0 = not indented). +func isIndented(data []byte, indentSize int) int { + if len(data) == 0 { + return 0 + } + if data[0] == '\t' { + return 1 + } + if len(data) < indentSize { + return 0 + } + for i := 0; i < indentSize; i++ { + if data[i] != ' ' { + return 0 + } + } + return indentSize +} + +// Create a url-safe slug for fragments +func slugify(in []byte) []byte { + if len(in) == 0 { + return in + } + out := make([]byte, 0, len(in)) + sym := false + + for _, ch := range in { + if isalnum(ch) { + sym = false + out = append(out, ch) + } else if sym { + continue + } else { + out = append(out, '-') + sym = true + } + } + var a, b int + var ch byte + for a, ch = range out { + if ch != '-' { + break + } + } + for b = len(out) - 1; b > 0; b-- { + if out[b] != '-' { + break + } + } + return out[a : b+1] +} diff --git a/vendor/github.com/russross/blackfriday/v2/node.go b/vendor/github.com/russross/blackfriday/v2/node.go new file mode 100644 index 000000000..51b9e8c1b --- /dev/null +++ b/vendor/github.com/russross/blackfriday/v2/node.go @@ -0,0 +1,354 @@ +package blackfriday + +import ( + "bytes" + "fmt" +) + +// NodeType specifies a type of a single node of a syntax tree. Usually one +// node (and its type) corresponds to a single markdown feature, e.g. emphasis +// or code block. +type NodeType int + +// Constants for identifying different types of nodes. See NodeType. +const ( + Document NodeType = iota + BlockQuote + List + Item + Paragraph + Heading + HorizontalRule + Emph + Strong + Del + Link + Image + Text + HTMLBlock + CodeBlock + Softbreak + Hardbreak + Code + HTMLSpan + Table + TableCell + TableHead + TableBody + TableRow +) + +var nodeTypeNames = []string{ + Document: "Document", + BlockQuote: "BlockQuote", + List: "List", + Item: "Item", + Paragraph: "Paragraph", + Heading: "Heading", + HorizontalRule: "HorizontalRule", + Emph: "Emph", + Strong: "Strong", + Del: "Del", + Link: "Link", + Image: "Image", + Text: "Text", + HTMLBlock: "HTMLBlock", + CodeBlock: "CodeBlock", + Softbreak: "Softbreak", + Hardbreak: "Hardbreak", + Code: "Code", + HTMLSpan: "HTMLSpan", + Table: "Table", + TableCell: "TableCell", + TableHead: "TableHead", + TableBody: "TableBody", + TableRow: "TableRow", +} + +func (t NodeType) String() string { + return nodeTypeNames[t] +} + +// ListData contains fields relevant to a List and Item node type. +type ListData struct { + ListFlags ListType + Tight bool // Skip

    s around list item data if true + BulletChar byte // '*', '+' or '-' in bullet lists + Delimiter byte // '.' or ')' after the number in ordered lists + RefLink []byte // If not nil, turns this list item into a footnote item and triggers different rendering + IsFootnotesList bool // This is a list of footnotes +} + +// LinkData contains fields relevant to a Link node type. +type LinkData struct { + Destination []byte // Destination is what goes into a href + Title []byte // Title is the tooltip thing that goes in a title attribute + NoteID int // NoteID contains a serial number of a footnote, zero if it's not a footnote + Footnote *Node // If it's a footnote, this is a direct link to the footnote Node. Otherwise nil. +} + +// CodeBlockData contains fields relevant to a CodeBlock node type. +type CodeBlockData struct { + IsFenced bool // Specifies whether it's a fenced code block or an indented one + Info []byte // This holds the info string + FenceChar byte + FenceLength int + FenceOffset int +} + +// TableCellData contains fields relevant to a TableCell node type. +type TableCellData struct { + IsHeader bool // This tells if it's under the header row + Align CellAlignFlags // This holds the value for align attribute +} + +// HeadingData contains fields relevant to a Heading node type. +type HeadingData struct { + Level int // This holds the heading level number + HeadingID string // This might hold heading ID, if present + IsTitleblock bool // Specifies whether it's a title block +} + +// Node is a single element in the abstract syntax tree of the parsed document. +// It holds connections to the structurally neighboring nodes and, for certain +// types of nodes, additional information that might be needed when rendering. +type Node struct { + Type NodeType // Determines the type of the node + Parent *Node // Points to the parent + FirstChild *Node // Points to the first child, if any + LastChild *Node // Points to the last child, if any + Prev *Node // Previous sibling; nil if it's the first child + Next *Node // Next sibling; nil if it's the last child + + Literal []byte // Text contents of the leaf nodes + + HeadingData // Populated if Type is Heading + ListData // Populated if Type is List + CodeBlockData // Populated if Type is CodeBlock + LinkData // Populated if Type is Link + TableCellData // Populated if Type is TableCell + + content []byte // Markdown content of the block nodes + open bool // Specifies an open block node that has not been finished to process yet +} + +// NewNode allocates a node of a specified type. +func NewNode(typ NodeType) *Node { + return &Node{ + Type: typ, + open: true, + } +} + +func (n *Node) String() string { + ellipsis := "" + snippet := n.Literal + if len(snippet) > 16 { + snippet = snippet[:16] + ellipsis = "..." + } + return fmt.Sprintf("%s: '%s%s'", n.Type, snippet, ellipsis) +} + +// Unlink removes node 'n' from the tree. +// It panics if the node is nil. +func (n *Node) Unlink() { + if n.Prev != nil { + n.Prev.Next = n.Next + } else if n.Parent != nil { + n.Parent.FirstChild = n.Next + } + if n.Next != nil { + n.Next.Prev = n.Prev + } else if n.Parent != nil { + n.Parent.LastChild = n.Prev + } + n.Parent = nil + n.Next = nil + n.Prev = nil +} + +// AppendChild adds a node 'child' as a child of 'n'. +// It panics if either node is nil. +func (n *Node) AppendChild(child *Node) { + child.Unlink() + child.Parent = n + if n.LastChild != nil { + n.LastChild.Next = child + child.Prev = n.LastChild + n.LastChild = child + } else { + n.FirstChild = child + n.LastChild = child + } +} + +// InsertBefore inserts 'sibling' immediately before 'n'. +// It panics if either node is nil. +func (n *Node) InsertBefore(sibling *Node) { + sibling.Unlink() + sibling.Prev = n.Prev + if sibling.Prev != nil { + sibling.Prev.Next = sibling + } + sibling.Next = n + n.Prev = sibling + sibling.Parent = n.Parent + if sibling.Prev == nil { + sibling.Parent.FirstChild = sibling + } +} + +func (n *Node) isContainer() bool { + switch n.Type { + case Document: + fallthrough + case BlockQuote: + fallthrough + case List: + fallthrough + case Item: + fallthrough + case Paragraph: + fallthrough + case Heading: + fallthrough + case Emph: + fallthrough + case Strong: + fallthrough + case Del: + fallthrough + case Link: + fallthrough + case Image: + fallthrough + case Table: + fallthrough + case TableHead: + fallthrough + case TableBody: + fallthrough + case TableRow: + fallthrough + case TableCell: + return true + default: + return false + } +} + +func (n *Node) canContain(t NodeType) bool { + if n.Type == List { + return t == Item + } + if n.Type == Document || n.Type == BlockQuote || n.Type == Item { + return t != Item + } + if n.Type == Table { + return t == TableHead || t == TableBody + } + if n.Type == TableHead || n.Type == TableBody { + return t == TableRow + } + if n.Type == TableRow { + return t == TableCell + } + return false +} + +// WalkStatus allows NodeVisitor to have some control over the tree traversal. +// It is returned from NodeVisitor and different values allow Node.Walk to +// decide which node to go to next. +type WalkStatus int + +const ( + // GoToNext is the default traversal of every node. + GoToNext WalkStatus = iota + // SkipChildren tells walker to skip all children of current node. + SkipChildren + // Terminate tells walker to terminate the traversal. + Terminate +) + +// NodeVisitor is a callback to be called when traversing the syntax tree. +// Called twice for every node: once with entering=true when the branch is +// first visited, then with entering=false after all the children are done. +type NodeVisitor func(node *Node, entering bool) WalkStatus + +// Walk is a convenience method that instantiates a walker and starts a +// traversal of subtree rooted at n. +func (n *Node) Walk(visitor NodeVisitor) { + w := newNodeWalker(n) + for w.current != nil { + status := visitor(w.current, w.entering) + switch status { + case GoToNext: + w.next() + case SkipChildren: + w.entering = false + w.next() + case Terminate: + return + } + } +} + +type nodeWalker struct { + current *Node + root *Node + entering bool +} + +func newNodeWalker(root *Node) *nodeWalker { + return &nodeWalker{ + current: root, + root: root, + entering: true, + } +} + +func (nw *nodeWalker) next() { + if (!nw.current.isContainer() || !nw.entering) && nw.current == nw.root { + nw.current = nil + return + } + if nw.entering && nw.current.isContainer() { + if nw.current.FirstChild != nil { + nw.current = nw.current.FirstChild + nw.entering = true + } else { + nw.entering = false + } + } else if nw.current.Next == nil { + nw.current = nw.current.Parent + nw.entering = false + } else { + nw.current = nw.current.Next + nw.entering = true + } +} + +func dump(ast *Node) { + fmt.Println(dumpString(ast)) +} + +func dumpR(ast *Node, depth int) string { + if ast == nil { + return "" + } + indent := bytes.Repeat([]byte("\t"), depth) + content := ast.Literal + if content == nil { + content = ast.content + } + result := fmt.Sprintf("%s%s(%q)\n", indent, ast.Type, content) + for n := ast.FirstChild; n != nil; n = n.Next { + result += dumpR(n, depth+1) + } + return result +} + +func dumpString(ast *Node) string { + return dumpR(ast, 0) +} diff --git a/vendor/github.com/russross/blackfriday/v2/smartypants.go b/vendor/github.com/russross/blackfriday/v2/smartypants.go new file mode 100644 index 000000000..3a220e942 --- /dev/null +++ b/vendor/github.com/russross/blackfriday/v2/smartypants.go @@ -0,0 +1,457 @@ +// +// Blackfriday Markdown Processor +// Available at http://github.com/russross/blackfriday +// +// Copyright © 2011 Russ Ross . +// Distributed under the Simplified BSD License. +// See README.md for details. +// + +// +// +// SmartyPants rendering +// +// + +package blackfriday + +import ( + "bytes" + "io" +) + +// SPRenderer is a struct containing state of a Smartypants renderer. +type SPRenderer struct { + inSingleQuote bool + inDoubleQuote bool + callbacks [256]smartCallback +} + +func wordBoundary(c byte) bool { + return c == 0 || isspace(c) || ispunct(c) +} + +func tolower(c byte) byte { + if c >= 'A' && c <= 'Z' { + return c - 'A' + 'a' + } + return c +} + +func isdigit(c byte) bool { + return c >= '0' && c <= '9' +} + +func smartQuoteHelper(out *bytes.Buffer, previousChar byte, nextChar byte, quote byte, isOpen *bool, addNBSP bool) bool { + // edge of the buffer is likely to be a tag that we don't get to see, + // so we treat it like text sometimes + + // enumerate all sixteen possibilities for (previousChar, nextChar) + // each can be one of {0, space, punct, other} + switch { + case previousChar == 0 && nextChar == 0: + // context is not any help here, so toggle + *isOpen = !*isOpen + case isspace(previousChar) && nextChar == 0: + // [ "] might be [ "foo...] + *isOpen = true + case ispunct(previousChar) && nextChar == 0: + // [!"] hmm... could be [Run!"] or [("...] + *isOpen = false + case /* isnormal(previousChar) && */ nextChar == 0: + // [a"] is probably a close + *isOpen = false + case previousChar == 0 && isspace(nextChar): + // [" ] might be [...foo" ] + *isOpen = false + case isspace(previousChar) && isspace(nextChar): + // [ " ] context is not any help here, so toggle + *isOpen = !*isOpen + case ispunct(previousChar) && isspace(nextChar): + // [!" ] is probably a close + *isOpen = false + case /* isnormal(previousChar) && */ isspace(nextChar): + // [a" ] this is one of the easy cases + *isOpen = false + case previousChar == 0 && ispunct(nextChar): + // ["!] hmm... could be ["$1.95] or ["!...] + *isOpen = false + case isspace(previousChar) && ispunct(nextChar): + // [ "!] looks more like [ "$1.95] + *isOpen = true + case ispunct(previousChar) && ispunct(nextChar): + // [!"!] context is not any help here, so toggle + *isOpen = !*isOpen + case /* isnormal(previousChar) && */ ispunct(nextChar): + // [a"!] is probably a close + *isOpen = false + case previousChar == 0 /* && isnormal(nextChar) */ : + // ["a] is probably an open + *isOpen = true + case isspace(previousChar) /* && isnormal(nextChar) */ : + // [ "a] this is one of the easy cases + *isOpen = true + case ispunct(previousChar) /* && isnormal(nextChar) */ : + // [!"a] is probably an open + *isOpen = true + default: + // [a'b] maybe a contraction? + *isOpen = false + } + + // Note that with the limited lookahead, this non-breaking + // space will also be appended to single double quotes. + if addNBSP && !*isOpen { + out.WriteString(" ") + } + + out.WriteByte('&') + if *isOpen { + out.WriteByte('l') + } else { + out.WriteByte('r') + } + out.WriteByte(quote) + out.WriteString("quo;") + + if addNBSP && *isOpen { + out.WriteString(" ") + } + + return true +} + +func (r *SPRenderer) smartSingleQuote(out *bytes.Buffer, previousChar byte, text []byte) int { + if len(text) >= 2 { + t1 := tolower(text[1]) + + if t1 == '\'' { + nextChar := byte(0) + if len(text) >= 3 { + nextChar = text[2] + } + if smartQuoteHelper(out, previousChar, nextChar, 'd', &r.inDoubleQuote, false) { + return 1 + } + } + + if (t1 == 's' || t1 == 't' || t1 == 'm' || t1 == 'd') && (len(text) < 3 || wordBoundary(text[2])) { + out.WriteString("’") + return 0 + } + + if len(text) >= 3 { + t2 := tolower(text[2]) + + if ((t1 == 'r' && t2 == 'e') || (t1 == 'l' && t2 == 'l') || (t1 == 'v' && t2 == 'e')) && + (len(text) < 4 || wordBoundary(text[3])) { + out.WriteString("’") + return 0 + } + } + } + + nextChar := byte(0) + if len(text) > 1 { + nextChar = text[1] + } + if smartQuoteHelper(out, previousChar, nextChar, 's', &r.inSingleQuote, false) { + return 0 + } + + out.WriteByte(text[0]) + return 0 +} + +func (r *SPRenderer) smartParens(out *bytes.Buffer, previousChar byte, text []byte) int { + if len(text) >= 3 { + t1 := tolower(text[1]) + t2 := tolower(text[2]) + + if t1 == 'c' && t2 == ')' { + out.WriteString("©") + return 2 + } + + if t1 == 'r' && t2 == ')' { + out.WriteString("®") + return 2 + } + + if len(text) >= 4 && t1 == 't' && t2 == 'm' && text[3] == ')' { + out.WriteString("™") + return 3 + } + } + + out.WriteByte(text[0]) + return 0 +} + +func (r *SPRenderer) smartDash(out *bytes.Buffer, previousChar byte, text []byte) int { + if len(text) >= 2 { + if text[1] == '-' { + out.WriteString("—") + return 1 + } + + if wordBoundary(previousChar) && wordBoundary(text[1]) { + out.WriteString("–") + return 0 + } + } + + out.WriteByte(text[0]) + return 0 +} + +func (r *SPRenderer) smartDashLatex(out *bytes.Buffer, previousChar byte, text []byte) int { + if len(text) >= 3 && text[1] == '-' && text[2] == '-' { + out.WriteString("—") + return 2 + } + if len(text) >= 2 && text[1] == '-' { + out.WriteString("–") + return 1 + } + + out.WriteByte(text[0]) + return 0 +} + +func (r *SPRenderer) smartAmpVariant(out *bytes.Buffer, previousChar byte, text []byte, quote byte, addNBSP bool) int { + if bytes.HasPrefix(text, []byte(""")) { + nextChar := byte(0) + if len(text) >= 7 { + nextChar = text[6] + } + if smartQuoteHelper(out, previousChar, nextChar, quote, &r.inDoubleQuote, addNBSP) { + return 5 + } + } + + if bytes.HasPrefix(text, []byte("�")) { + return 3 + } + + out.WriteByte('&') + return 0 +} + +func (r *SPRenderer) smartAmp(angledQuotes, addNBSP bool) func(*bytes.Buffer, byte, []byte) int { + var quote byte = 'd' + if angledQuotes { + quote = 'a' + } + + return func(out *bytes.Buffer, previousChar byte, text []byte) int { + return r.smartAmpVariant(out, previousChar, text, quote, addNBSP) + } +} + +func (r *SPRenderer) smartPeriod(out *bytes.Buffer, previousChar byte, text []byte) int { + if len(text) >= 3 && text[1] == '.' && text[2] == '.' { + out.WriteString("…") + return 2 + } + + if len(text) >= 5 && text[1] == ' ' && text[2] == '.' && text[3] == ' ' && text[4] == '.' { + out.WriteString("…") + return 4 + } + + out.WriteByte(text[0]) + return 0 +} + +func (r *SPRenderer) smartBacktick(out *bytes.Buffer, previousChar byte, text []byte) int { + if len(text) >= 2 && text[1] == '`' { + nextChar := byte(0) + if len(text) >= 3 { + nextChar = text[2] + } + if smartQuoteHelper(out, previousChar, nextChar, 'd', &r.inDoubleQuote, false) { + return 1 + } + } + + out.WriteByte(text[0]) + return 0 +} + +func (r *SPRenderer) smartNumberGeneric(out *bytes.Buffer, previousChar byte, text []byte) int { + if wordBoundary(previousChar) && previousChar != '/' && len(text) >= 3 { + // is it of the form digits/digits(word boundary)?, i.e., \d+/\d+\b + // note: check for regular slash (/) or fraction slash (⁄, 0x2044, or 0xe2 81 84 in utf-8) + // and avoid changing dates like 1/23/2005 into fractions. + numEnd := 0 + for len(text) > numEnd && isdigit(text[numEnd]) { + numEnd++ + } + if numEnd == 0 { + out.WriteByte(text[0]) + return 0 + } + denStart := numEnd + 1 + if len(text) > numEnd+3 && text[numEnd] == 0xe2 && text[numEnd+1] == 0x81 && text[numEnd+2] == 0x84 { + denStart = numEnd + 3 + } else if len(text) < numEnd+2 || text[numEnd] != '/' { + out.WriteByte(text[0]) + return 0 + } + denEnd := denStart + for len(text) > denEnd && isdigit(text[denEnd]) { + denEnd++ + } + if denEnd == denStart { + out.WriteByte(text[0]) + return 0 + } + if len(text) == denEnd || wordBoundary(text[denEnd]) && text[denEnd] != '/' { + out.WriteString("") + out.Write(text[:numEnd]) + out.WriteString("") + out.Write(text[denStart:denEnd]) + out.WriteString("") + return denEnd - 1 + } + } + + out.WriteByte(text[0]) + return 0 +} + +func (r *SPRenderer) smartNumber(out *bytes.Buffer, previousChar byte, text []byte) int { + if wordBoundary(previousChar) && previousChar != '/' && len(text) >= 3 { + if text[0] == '1' && text[1] == '/' && text[2] == '2' { + if len(text) < 4 || wordBoundary(text[3]) && text[3] != '/' { + out.WriteString("½") + return 2 + } + } + + if text[0] == '1' && text[1] == '/' && text[2] == '4' { + if len(text) < 4 || wordBoundary(text[3]) && text[3] != '/' || (len(text) >= 5 && tolower(text[3]) == 't' && tolower(text[4]) == 'h') { + out.WriteString("¼") + return 2 + } + } + + if text[0] == '3' && text[1] == '/' && text[2] == '4' { + if len(text) < 4 || wordBoundary(text[3]) && text[3] != '/' || (len(text) >= 6 && tolower(text[3]) == 't' && tolower(text[4]) == 'h' && tolower(text[5]) == 's') { + out.WriteString("¾") + return 2 + } + } + } + + out.WriteByte(text[0]) + return 0 +} + +func (r *SPRenderer) smartDoubleQuoteVariant(out *bytes.Buffer, previousChar byte, text []byte, quote byte) int { + nextChar := byte(0) + if len(text) > 1 { + nextChar = text[1] + } + if !smartQuoteHelper(out, previousChar, nextChar, quote, &r.inDoubleQuote, false) { + out.WriteString(""") + } + + return 0 +} + +func (r *SPRenderer) smartDoubleQuote(out *bytes.Buffer, previousChar byte, text []byte) int { + return r.smartDoubleQuoteVariant(out, previousChar, text, 'd') +} + +func (r *SPRenderer) smartAngledDoubleQuote(out *bytes.Buffer, previousChar byte, text []byte) int { + return r.smartDoubleQuoteVariant(out, previousChar, text, 'a') +} + +func (r *SPRenderer) smartLeftAngle(out *bytes.Buffer, previousChar byte, text []byte) int { + i := 0 + + for i < len(text) && text[i] != '>' { + i++ + } + + out.Write(text[:i+1]) + return i +} + +type smartCallback func(out *bytes.Buffer, previousChar byte, text []byte) int + +// NewSmartypantsRenderer constructs a Smartypants renderer object. +func NewSmartypantsRenderer(flags HTMLFlags) *SPRenderer { + var ( + r SPRenderer + + smartAmpAngled = r.smartAmp(true, false) + smartAmpAngledNBSP = r.smartAmp(true, true) + smartAmpRegular = r.smartAmp(false, false) + smartAmpRegularNBSP = r.smartAmp(false, true) + + addNBSP = flags&SmartypantsQuotesNBSP != 0 + ) + + if flags&SmartypantsAngledQuotes == 0 { + r.callbacks['"'] = r.smartDoubleQuote + if !addNBSP { + r.callbacks['&'] = smartAmpRegular + } else { + r.callbacks['&'] = smartAmpRegularNBSP + } + } else { + r.callbacks['"'] = r.smartAngledDoubleQuote + if !addNBSP { + r.callbacks['&'] = smartAmpAngled + } else { + r.callbacks['&'] = smartAmpAngledNBSP + } + } + r.callbacks['\''] = r.smartSingleQuote + r.callbacks['('] = r.smartParens + if flags&SmartypantsDashes != 0 { + if flags&SmartypantsLatexDashes == 0 { + r.callbacks['-'] = r.smartDash + } else { + r.callbacks['-'] = r.smartDashLatex + } + } + r.callbacks['.'] = r.smartPeriod + if flags&SmartypantsFractions == 0 { + r.callbacks['1'] = r.smartNumber + r.callbacks['3'] = r.smartNumber + } else { + for ch := '1'; ch <= '9'; ch++ { + r.callbacks[ch] = r.smartNumberGeneric + } + } + r.callbacks['<'] = r.smartLeftAngle + r.callbacks['`'] = r.smartBacktick + return &r +} + +// Process is the entry point of the Smartypants renderer. +func (r *SPRenderer) Process(w io.Writer, text []byte) { + mark := 0 + for i := 0; i < len(text); i++ { + if action := r.callbacks[text[i]]; action != nil { + if i > mark { + w.Write(text[mark:i]) + } + previousChar := byte(0) + if i > 0 { + previousChar = text[i-1] + } + var tmp bytes.Buffer + i += action(&tmp, previousChar, text[i:]) + w.Write(tmp.Bytes()) + mark = i + 1 + } + } + if mark < len(text) { + w.Write(text[mark:]) + } +} diff --git a/vendor/github.com/shurcooL/sanitized_anchor_name/.travis.yml b/vendor/github.com/shurcooL/sanitized_anchor_name/.travis.yml new file mode 100644 index 000000000..93b1fcdb3 --- /dev/null +++ b/vendor/github.com/shurcooL/sanitized_anchor_name/.travis.yml @@ -0,0 +1,16 @@ +sudo: false +language: go +go: + - 1.x + - master +matrix: + allow_failures: + - go: master + fast_finish: true +install: + - # Do nothing. This is needed to prevent default install action "go get -t -v ./..." from happening here (we want it to happen inside script step). +script: + - go get -t -v ./... + - diff -u <(echo -n) <(gofmt -d -s .) + - go tool vet . + - go test -v -race ./... diff --git a/vendor/github.com/shurcooL/sanitized_anchor_name/LICENSE b/vendor/github.com/shurcooL/sanitized_anchor_name/LICENSE new file mode 100644 index 000000000..c35c17af9 --- /dev/null +++ b/vendor/github.com/shurcooL/sanitized_anchor_name/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2015 Dmitri Shuralyov + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/shurcooL/sanitized_anchor_name/README.md b/vendor/github.com/shurcooL/sanitized_anchor_name/README.md new file mode 100644 index 000000000..670bf0fe6 --- /dev/null +++ b/vendor/github.com/shurcooL/sanitized_anchor_name/README.md @@ -0,0 +1,36 @@ +sanitized_anchor_name +===================== + +[![Build Status](https://travis-ci.org/shurcooL/sanitized_anchor_name.svg?branch=master)](https://travis-ci.org/shurcooL/sanitized_anchor_name) [![GoDoc](https://godoc.org/github.com/shurcooL/sanitized_anchor_name?status.svg)](https://godoc.org/github.com/shurcooL/sanitized_anchor_name) + +Package sanitized_anchor_name provides a func to create sanitized anchor names. + +Its logic can be reused by multiple packages to create interoperable anchor names +and links to those anchors. + +At this time, it does not try to ensure that generated anchor names +are unique, that responsibility falls on the caller. + +Installation +------------ + +```bash +go get -u github.com/shurcooL/sanitized_anchor_name +``` + +Example +------- + +```Go +anchorName := sanitized_anchor_name.Create("This is a header") + +fmt.Println(anchorName) + +// Output: +// this-is-a-header +``` + +License +------- + +- [MIT License](LICENSE) diff --git a/vendor/github.com/shurcooL/sanitized_anchor_name/go.mod b/vendor/github.com/shurcooL/sanitized_anchor_name/go.mod new file mode 100644 index 000000000..1e2553475 --- /dev/null +++ b/vendor/github.com/shurcooL/sanitized_anchor_name/go.mod @@ -0,0 +1 @@ +module github.com/shurcooL/sanitized_anchor_name diff --git a/vendor/github.com/shurcooL/sanitized_anchor_name/main.go b/vendor/github.com/shurcooL/sanitized_anchor_name/main.go new file mode 100644 index 000000000..6a77d1243 --- /dev/null +++ b/vendor/github.com/shurcooL/sanitized_anchor_name/main.go @@ -0,0 +1,29 @@ +// Package sanitized_anchor_name provides a func to create sanitized anchor names. +// +// Its logic can be reused by multiple packages to create interoperable anchor names +// and links to those anchors. +// +// At this time, it does not try to ensure that generated anchor names +// are unique, that responsibility falls on the caller. +package sanitized_anchor_name // import "github.com/shurcooL/sanitized_anchor_name" + +import "unicode" + +// Create returns a sanitized anchor name for the given text. +func Create(text string) string { + var anchorName []rune + var futureDash = false + for _, r := range text { + switch { + case unicode.IsLetter(r) || unicode.IsNumber(r): + if futureDash && len(anchorName) > 0 { + anchorName = append(anchorName, '-') + } + futureDash = false + anchorName = append(anchorName, unicode.ToLower(r)) + default: + futureDash = true + } + } + return string(anchorName) +} diff --git a/vendor/github.com/stretchr/testify/assert/assertion_format.go b/vendor/github.com/stretchr/testify/assert/assertion_format.go index aa1c2b95c..e0364e9e7 100644 --- a/vendor/github.com/stretchr/testify/assert/assertion_format.go +++ b/vendor/github.com/stretchr/testify/assert/assertion_format.go @@ -113,6 +113,17 @@ func Errorf(t TestingT, err error, msg string, args ...interface{}) bool { return Error(t, err, append([]interface{}{msg}, args...)...) } +// Eventuallyf asserts that given condition will be met in waitFor time, +// periodically checking target function each tick. +// +// assert.Eventuallyf(t, func() bool { return true; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") +func Eventuallyf(t TestingT, condition func() bool, waitFor time.Duration, tick time.Duration, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return Eventually(t, condition, waitFor, tick, append([]interface{}{msg}, args...)...) +} + // Exactlyf asserts that two objects are equal in value and type. // // assert.Exactlyf(t, int32(123, "error message %s", "formatted"), int64(123)) @@ -157,6 +168,31 @@ func FileExistsf(t TestingT, path string, msg string, args ...interface{}) bool return FileExists(t, path, append([]interface{}{msg}, args...)...) } +// Greaterf asserts that the first element is greater than the second +// +// assert.Greaterf(t, 2, 1, "error message %s", "formatted") +// assert.Greaterf(t, float64(2, "error message %s", "formatted"), float64(1)) +// assert.Greaterf(t, "b", "a", "error message %s", "formatted") +func Greaterf(t TestingT, e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return Greater(t, e1, e2, append([]interface{}{msg}, args...)...) +} + +// GreaterOrEqualf asserts that the first element is greater than or equal to the second +// +// assert.GreaterOrEqualf(t, 2, 1, "error message %s", "formatted") +// assert.GreaterOrEqualf(t, 2, 2, "error message %s", "formatted") +// assert.GreaterOrEqualf(t, "b", "a", "error message %s", "formatted") +// assert.GreaterOrEqualf(t, "b", "b", "error message %s", "formatted") +func GreaterOrEqualf(t TestingT, e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return GreaterOrEqual(t, e1, e2, append([]interface{}{msg}, args...)...) +} + // HTTPBodyContainsf asserts that a specified handler returns a // body that contains a string. // @@ -289,6 +325,14 @@ func JSONEqf(t TestingT, expected string, actual string, msg string, args ...int return JSONEq(t, expected, actual, append([]interface{}{msg}, args...)...) } +// YAMLEqf asserts that two YAML strings are equivalent. +func YAMLEqf(t TestingT, expected string, actual string, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return YAMLEq(t, expected, actual, append([]interface{}{msg}, args...)...) +} + // Lenf asserts that the specified object has specific length. // Lenf also fails if the object has a type that len() not accept. // @@ -300,6 +344,31 @@ func Lenf(t TestingT, object interface{}, length int, msg string, args ...interf return Len(t, object, length, append([]interface{}{msg}, args...)...) } +// Lessf asserts that the first element is less than the second +// +// assert.Lessf(t, 1, 2, "error message %s", "formatted") +// assert.Lessf(t, float64(1, "error message %s", "formatted"), float64(2)) +// assert.Lessf(t, "a", "b", "error message %s", "formatted") +func Lessf(t TestingT, e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return Less(t, e1, e2, append([]interface{}{msg}, args...)...) +} + +// LessOrEqualf asserts that the first element is less than or equal to the second +// +// assert.LessOrEqualf(t, 1, 2, "error message %s", "formatted") +// assert.LessOrEqualf(t, 2, 2, "error message %s", "formatted") +// assert.LessOrEqualf(t, "a", "b", "error message %s", "formatted") +// assert.LessOrEqualf(t, "b", "b", "error message %s", "formatted") +func LessOrEqualf(t TestingT, e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return LessOrEqual(t, e1, e2, append([]interface{}{msg}, args...)...) +} + // Nilf asserts that the specified object is nil. // // assert.Nilf(t, err, "error message %s", "formatted") @@ -444,6 +513,19 @@ func Regexpf(t TestingT, rx interface{}, str interface{}, msg string, args ...in return Regexp(t, rx, str, append([]interface{}{msg}, args...)...) } +// Samef asserts that two pointers reference the same object. +// +// assert.Samef(t, ptr1, ptr2, "error message %s", "formatted") +// +// Both arguments must be pointer variables. Pointer variable sameness is +// determined based on the equality of both type and value. +func Samef(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return Same(t, expected, actual, append([]interface{}{msg}, args...)...) +} + // Subsetf asserts that the specified list(array, slice...) contains all // elements given in the specified subset(array, slice...). // diff --git a/vendor/github.com/stretchr/testify/assert/assertion_forward.go b/vendor/github.com/stretchr/testify/assert/assertion_forward.go index de39f794e..26830403a 100644 --- a/vendor/github.com/stretchr/testify/assert/assertion_forward.go +++ b/vendor/github.com/stretchr/testify/assert/assertion_forward.go @@ -215,6 +215,28 @@ func (a *Assertions) Errorf(err error, msg string, args ...interface{}) bool { return Errorf(a.t, err, msg, args...) } +// Eventually asserts that given condition will be met in waitFor time, +// periodically checking target function each tick. +// +// a.Eventually(func() bool { return true; }, time.Second, 10*time.Millisecond) +func (a *Assertions) Eventually(condition func() bool, waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return Eventually(a.t, condition, waitFor, tick, msgAndArgs...) +} + +// Eventuallyf asserts that given condition will be met in waitFor time, +// periodically checking target function each tick. +// +// a.Eventuallyf(func() bool { return true; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") +func (a *Assertions) Eventuallyf(condition func() bool, waitFor time.Duration, tick time.Duration, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return Eventuallyf(a.t, condition, waitFor, tick, msg, args...) +} + // Exactly asserts that two objects are equal in value and type. // // a.Exactly(int32(123), int64(123)) @@ -303,6 +325,56 @@ func (a *Assertions) FileExistsf(path string, msg string, args ...interface{}) b return FileExistsf(a.t, path, msg, args...) } +// Greater asserts that the first element is greater than the second +// +// a.Greater(2, 1) +// a.Greater(float64(2), float64(1)) +// a.Greater("b", "a") +func (a *Assertions) Greater(e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return Greater(a.t, e1, e2, msgAndArgs...) +} + +// GreaterOrEqual asserts that the first element is greater than or equal to the second +// +// a.GreaterOrEqual(2, 1) +// a.GreaterOrEqual(2, 2) +// a.GreaterOrEqual("b", "a") +// a.GreaterOrEqual("b", "b") +func (a *Assertions) GreaterOrEqual(e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return GreaterOrEqual(a.t, e1, e2, msgAndArgs...) +} + +// GreaterOrEqualf asserts that the first element is greater than or equal to the second +// +// a.GreaterOrEqualf(2, 1, "error message %s", "formatted") +// a.GreaterOrEqualf(2, 2, "error message %s", "formatted") +// a.GreaterOrEqualf("b", "a", "error message %s", "formatted") +// a.GreaterOrEqualf("b", "b", "error message %s", "formatted") +func (a *Assertions) GreaterOrEqualf(e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return GreaterOrEqualf(a.t, e1, e2, msg, args...) +} + +// Greaterf asserts that the first element is greater than the second +// +// a.Greaterf(2, 1, "error message %s", "formatted") +// a.Greaterf(float64(2, "error message %s", "formatted"), float64(1)) +// a.Greaterf("b", "a", "error message %s", "formatted") +func (a *Assertions) Greaterf(e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return Greaterf(a.t, e1, e2, msg, args...) +} + // HTTPBodyContains asserts that a specified handler returns a // body that contains a string. // @@ -567,6 +639,22 @@ func (a *Assertions) JSONEqf(expected string, actual string, msg string, args .. return JSONEqf(a.t, expected, actual, msg, args...) } +// YAMLEq asserts that two YAML strings are equivalent. +func (a *Assertions) YAMLEq(expected string, actual string, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return YAMLEq(a.t, expected, actual, msgAndArgs...) +} + +// YAMLEqf asserts that two YAML strings are equivalent. +func (a *Assertions) YAMLEqf(expected string, actual string, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return YAMLEqf(a.t, expected, actual, msg, args...) +} + // Len asserts that the specified object has specific length. // Len also fails if the object has a type that len() not accept. // @@ -589,6 +677,56 @@ func (a *Assertions) Lenf(object interface{}, length int, msg string, args ...in return Lenf(a.t, object, length, msg, args...) } +// Less asserts that the first element is less than the second +// +// a.Less(1, 2) +// a.Less(float64(1), float64(2)) +// a.Less("a", "b") +func (a *Assertions) Less(e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return Less(a.t, e1, e2, msgAndArgs...) +} + +// LessOrEqual asserts that the first element is less than or equal to the second +// +// a.LessOrEqual(1, 2) +// a.LessOrEqual(2, 2) +// a.LessOrEqual("a", "b") +// a.LessOrEqual("b", "b") +func (a *Assertions) LessOrEqual(e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return LessOrEqual(a.t, e1, e2, msgAndArgs...) +} + +// LessOrEqualf asserts that the first element is less than or equal to the second +// +// a.LessOrEqualf(1, 2, "error message %s", "formatted") +// a.LessOrEqualf(2, 2, "error message %s", "formatted") +// a.LessOrEqualf("a", "b", "error message %s", "formatted") +// a.LessOrEqualf("b", "b", "error message %s", "formatted") +func (a *Assertions) LessOrEqualf(e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return LessOrEqualf(a.t, e1, e2, msg, args...) +} + +// Lessf asserts that the first element is less than the second +// +// a.Lessf(1, 2, "error message %s", "formatted") +// a.Lessf(float64(1, "error message %s", "formatted"), float64(2)) +// a.Lessf("a", "b", "error message %s", "formatted") +func (a *Assertions) Lessf(e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return Lessf(a.t, e1, e2, msg, args...) +} + // Nil asserts that the specified object is nil. // // a.Nil(err) @@ -877,6 +1015,32 @@ func (a *Assertions) Regexpf(rx interface{}, str interface{}, msg string, args . return Regexpf(a.t, rx, str, msg, args...) } +// Same asserts that two pointers reference the same object. +// +// a.Same(ptr1, ptr2) +// +// Both arguments must be pointer variables. Pointer variable sameness is +// determined based on the equality of both type and value. +func (a *Assertions) Same(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return Same(a.t, expected, actual, msgAndArgs...) +} + +// Samef asserts that two pointers reference the same object. +// +// a.Samef(ptr1, ptr2, "error message %s", "formatted") +// +// Both arguments must be pointer variables. Pointer variable sameness is +// determined based on the equality of both type and value. +func (a *Assertions) Samef(expected interface{}, actual interface{}, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return Samef(a.t, expected, actual, msg, args...) +} + // Subset asserts that the specified list(array, slice...) contains all // elements given in the specified subset(array, slice...). // diff --git a/vendor/github.com/stretchr/testify/assert/assertion_order.go b/vendor/github.com/stretchr/testify/assert/assertion_order.go new file mode 100644 index 000000000..15a486ca6 --- /dev/null +++ b/vendor/github.com/stretchr/testify/assert/assertion_order.go @@ -0,0 +1,309 @@ +package assert + +import ( + "fmt" + "reflect" +) + +func compare(obj1, obj2 interface{}, kind reflect.Kind) (int, bool) { + switch kind { + case reflect.Int: + { + intobj1 := obj1.(int) + intobj2 := obj2.(int) + if intobj1 > intobj2 { + return -1, true + } + if intobj1 == intobj2 { + return 0, true + } + if intobj1 < intobj2 { + return 1, true + } + } + case reflect.Int8: + { + int8obj1 := obj1.(int8) + int8obj2 := obj2.(int8) + if int8obj1 > int8obj2 { + return -1, true + } + if int8obj1 == int8obj2 { + return 0, true + } + if int8obj1 < int8obj2 { + return 1, true + } + } + case reflect.Int16: + { + int16obj1 := obj1.(int16) + int16obj2 := obj2.(int16) + if int16obj1 > int16obj2 { + return -1, true + } + if int16obj1 == int16obj2 { + return 0, true + } + if int16obj1 < int16obj2 { + return 1, true + } + } + case reflect.Int32: + { + int32obj1 := obj1.(int32) + int32obj2 := obj2.(int32) + if int32obj1 > int32obj2 { + return -1, true + } + if int32obj1 == int32obj2 { + return 0, true + } + if int32obj1 < int32obj2 { + return 1, true + } + } + case reflect.Int64: + { + int64obj1 := obj1.(int64) + int64obj2 := obj2.(int64) + if int64obj1 > int64obj2 { + return -1, true + } + if int64obj1 == int64obj2 { + return 0, true + } + if int64obj1 < int64obj2 { + return 1, true + } + } + case reflect.Uint: + { + uintobj1 := obj1.(uint) + uintobj2 := obj2.(uint) + if uintobj1 > uintobj2 { + return -1, true + } + if uintobj1 == uintobj2 { + return 0, true + } + if uintobj1 < uintobj2 { + return 1, true + } + } + case reflect.Uint8: + { + uint8obj1 := obj1.(uint8) + uint8obj2 := obj2.(uint8) + if uint8obj1 > uint8obj2 { + return -1, true + } + if uint8obj1 == uint8obj2 { + return 0, true + } + if uint8obj1 < uint8obj2 { + return 1, true + } + } + case reflect.Uint16: + { + uint16obj1 := obj1.(uint16) + uint16obj2 := obj2.(uint16) + if uint16obj1 > uint16obj2 { + return -1, true + } + if uint16obj1 == uint16obj2 { + return 0, true + } + if uint16obj1 < uint16obj2 { + return 1, true + } + } + case reflect.Uint32: + { + uint32obj1 := obj1.(uint32) + uint32obj2 := obj2.(uint32) + if uint32obj1 > uint32obj2 { + return -1, true + } + if uint32obj1 == uint32obj2 { + return 0, true + } + if uint32obj1 < uint32obj2 { + return 1, true + } + } + case reflect.Uint64: + { + uint64obj1 := obj1.(uint64) + uint64obj2 := obj2.(uint64) + if uint64obj1 > uint64obj2 { + return -1, true + } + if uint64obj1 == uint64obj2 { + return 0, true + } + if uint64obj1 < uint64obj2 { + return 1, true + } + } + case reflect.Float32: + { + float32obj1 := obj1.(float32) + float32obj2 := obj2.(float32) + if float32obj1 > float32obj2 { + return -1, true + } + if float32obj1 == float32obj2 { + return 0, true + } + if float32obj1 < float32obj2 { + return 1, true + } + } + case reflect.Float64: + { + float64obj1 := obj1.(float64) + float64obj2 := obj2.(float64) + if float64obj1 > float64obj2 { + return -1, true + } + if float64obj1 == float64obj2 { + return 0, true + } + if float64obj1 < float64obj2 { + return 1, true + } + } + case reflect.String: + { + stringobj1 := obj1.(string) + stringobj2 := obj2.(string) + if stringobj1 > stringobj2 { + return -1, true + } + if stringobj1 == stringobj2 { + return 0, true + } + if stringobj1 < stringobj2 { + return 1, true + } + } + } + + return 0, false +} + +// Greater asserts that the first element is greater than the second +// +// assert.Greater(t, 2, 1) +// assert.Greater(t, float64(2), float64(1)) +// assert.Greater(t, "b", "a") +func Greater(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + + e1Kind := reflect.ValueOf(e1).Kind() + e2Kind := reflect.ValueOf(e2).Kind() + if e1Kind != e2Kind { + return Fail(t, "Elements should be the same type", msgAndArgs...) + } + + res, isComparable := compare(e1, e2, e1Kind) + if !isComparable { + return Fail(t, fmt.Sprintf("Can not compare type \"%s\"", reflect.TypeOf(e1)), msgAndArgs...) + } + + if res != -1 { + return Fail(t, fmt.Sprintf("\"%v\" is not greater than \"%v\"", e1, e2), msgAndArgs...) + } + + return true +} + +// GreaterOrEqual asserts that the first element is greater than or equal to the second +// +// assert.GreaterOrEqual(t, 2, 1) +// assert.GreaterOrEqual(t, 2, 2) +// assert.GreaterOrEqual(t, "b", "a") +// assert.GreaterOrEqual(t, "b", "b") +func GreaterOrEqual(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + + e1Kind := reflect.ValueOf(e1).Kind() + e2Kind := reflect.ValueOf(e2).Kind() + if e1Kind != e2Kind { + return Fail(t, "Elements should be the same type", msgAndArgs...) + } + + res, isComparable := compare(e1, e2, e1Kind) + if !isComparable { + return Fail(t, fmt.Sprintf("Can not compare type \"%s\"", reflect.TypeOf(e1)), msgAndArgs...) + } + + if res != -1 && res != 0 { + return Fail(t, fmt.Sprintf("\"%v\" is not greater than or equal to \"%v\"", e1, e2), msgAndArgs...) + } + + return true +} + +// Less asserts that the first element is less than the second +// +// assert.Less(t, 1, 2) +// assert.Less(t, float64(1), float64(2)) +// assert.Less(t, "a", "b") +func Less(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + + e1Kind := reflect.ValueOf(e1).Kind() + e2Kind := reflect.ValueOf(e2).Kind() + if e1Kind != e2Kind { + return Fail(t, "Elements should be the same type", msgAndArgs...) + } + + res, isComparable := compare(e1, e2, e1Kind) + if !isComparable { + return Fail(t, fmt.Sprintf("Can not compare type \"%s\"", reflect.TypeOf(e1)), msgAndArgs...) + } + + if res != 1 { + return Fail(t, fmt.Sprintf("\"%v\" is not less than \"%v\"", e1, e2), msgAndArgs...) + } + + return true +} + +// LessOrEqual asserts that the first element is less than or equal to the second +// +// assert.LessOrEqual(t, 1, 2) +// assert.LessOrEqual(t, 2, 2) +// assert.LessOrEqual(t, "a", "b") +// assert.LessOrEqual(t, "b", "b") +func LessOrEqual(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + + e1Kind := reflect.ValueOf(e1).Kind() + e2Kind := reflect.ValueOf(e2).Kind() + if e1Kind != e2Kind { + return Fail(t, "Elements should be the same type", msgAndArgs...) + } + + res, isComparable := compare(e1, e2, e1Kind) + if !isComparable { + return Fail(t, fmt.Sprintf("Can not compare type \"%s\"", reflect.TypeOf(e1)), msgAndArgs...) + } + + if res != 1 && res != 0 { + return Fail(t, fmt.Sprintf("\"%v\" is not less than or equal to \"%v\"", e1, e2), msgAndArgs...) + } + + return true +} diff --git a/vendor/github.com/stretchr/testify/assert/assertions.go b/vendor/github.com/stretchr/testify/assert/assertions.go index 9bd4a80e4..044da8b01 100644 --- a/vendor/github.com/stretchr/testify/assert/assertions.go +++ b/vendor/github.com/stretchr/testify/assert/assertions.go @@ -18,6 +18,7 @@ import ( "github.com/davecgh/go-spew/spew" "github.com/pmezard/go-difflib/difflib" + yaml "gopkg.in/yaml.v2" ) //go:generate go run ../_codegen/main.go -output-package=assert -template=assertion_format.go.tmpl @@ -350,6 +351,37 @@ func Equal(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) } +// Same asserts that two pointers reference the same object. +// +// assert.Same(t, ptr1, ptr2) +// +// Both arguments must be pointer variables. Pointer variable sameness is +// determined based on the equality of both type and value. +func Same(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + + expectedPtr, actualPtr := reflect.ValueOf(expected), reflect.ValueOf(actual) + if expectedPtr.Kind() != reflect.Ptr || actualPtr.Kind() != reflect.Ptr { + return Fail(t, "Invalid operation: both arguments must be pointers", msgAndArgs...) + } + + expectedType, actualType := reflect.TypeOf(expected), reflect.TypeOf(actual) + if expectedType != actualType { + return Fail(t, fmt.Sprintf("Pointer expected to be of type %v, but was %v", + expectedType, actualType), msgAndArgs...) + } + + if expected != actual { + return Fail(t, fmt.Sprintf("Not same: \n"+ + "expected: %p %#v\n"+ + "actual : %p %#v", expected, expected, actual, actual), msgAndArgs...) + } + + return true +} + // formatUnequalValues takes two values of arbitrary types and returns string // representations appropriate to be presented to the user. // @@ -479,14 +511,14 @@ func isEmpty(object interface{}) bool { // collection types are empty when they have no element case reflect.Array, reflect.Chan, reflect.Map, reflect.Slice: return objValue.Len() == 0 - // pointers are empty if nil or if the value they point to is empty + // pointers are empty if nil or if the value they point to is empty case reflect.Ptr: if objValue.IsNil() { return true } deref := objValue.Elem().Interface() return isEmpty(deref) - // for all other types, compare against the zero value + // for all other types, compare against the zero value default: zero := reflect.Zero(objValue.Type()) return reflect.DeepEqual(object, zero.Interface()) @@ -629,7 +661,7 @@ func NotEqual(t TestingT, expected, actual interface{}, msgAndArgs ...interface{ func includeElement(list interface{}, element interface{}) (ok, found bool) { listValue := reflect.ValueOf(list) - elementValue := reflect.ValueOf(element) + listKind := reflect.TypeOf(list).Kind() defer func() { if e := recover(); e != nil { ok = false @@ -637,11 +669,12 @@ func includeElement(list interface{}, element interface{}) (ok, found bool) { } }() - if reflect.TypeOf(list).Kind() == reflect.String { + if listKind == reflect.String { + elementValue := reflect.ValueOf(element) return true, strings.Contains(listValue.String(), elementValue.String()) } - if reflect.TypeOf(list).Kind() == reflect.Map { + if listKind == reflect.Map { mapKeys := listValue.MapKeys() for i := 0; i < len(mapKeys); i++ { if ObjectsAreEqual(mapKeys[i].Interface(), element) { @@ -1337,6 +1370,24 @@ func JSONEq(t TestingT, expected string, actual string, msgAndArgs ...interface{ return Equal(t, expectedJSONAsInterface, actualJSONAsInterface, msgAndArgs...) } +// YAMLEq asserts that two YAML strings are equivalent. +func YAMLEq(t TestingT, expected string, actual string, msgAndArgs ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + var expectedYAMLAsInterface, actualYAMLAsInterface interface{} + + if err := yaml.Unmarshal([]byte(expected), &expectedYAMLAsInterface); err != nil { + return Fail(t, fmt.Sprintf("Expected value ('%s') is not valid yaml.\nYAML parsing error: '%s'", expected, err.Error()), msgAndArgs...) + } + + if err := yaml.Unmarshal([]byte(actual), &actualYAMLAsInterface); err != nil { + return Fail(t, fmt.Sprintf("Input ('%s') needs to be valid yaml.\nYAML error: '%s'", actual, err.Error()), msgAndArgs...) + } + + return Equal(t, expectedYAMLAsInterface, actualYAMLAsInterface, msgAndArgs...) +} + func typeAndKind(v interface{}) (reflect.Type, reflect.Kind) { t := reflect.TypeOf(v) k := t.Kind() @@ -1371,8 +1422,8 @@ func diff(expected interface{}, actual interface{}) string { e = spewConfig.Sdump(expected) a = spewConfig.Sdump(actual) } else { - e = expected.(string) - a = actual.(string) + e = reflect.ValueOf(expected).String() + a = reflect.ValueOf(actual).String() } diff, _ := difflib.GetUnifiedDiffString(difflib.UnifiedDiff{ @@ -1414,3 +1465,34 @@ var spewConfig = spew.ConfigState{ type tHelper interface { Helper() } + +// Eventually asserts that given condition will be met in waitFor time, +// periodically checking target function each tick. +// +// assert.Eventually(t, func() bool { return true; }, time.Second, 10*time.Millisecond) +func Eventually(t TestingT, condition func() bool, waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + + timer := time.NewTimer(waitFor) + ticker := time.NewTicker(tick) + checkPassed := make(chan bool) + defer timer.Stop() + defer ticker.Stop() + defer close(checkPassed) + for { + select { + case <-timer.C: + return Fail(t, "Condition never satisfied", msgAndArgs...) + case result := <-checkPassed: + if result { + return true + } + case <-ticker.C: + go func() { + checkPassed <- condition() + }() + } + } +} diff --git a/vendor/github.com/swaggo/swag/.gitignore b/vendor/github.com/swaggo/swag/.gitignore index e09612418..bea8db681 100644 --- a/vendor/github.com/swaggo/swag/.gitignore +++ b/vendor/github.com/swaggo/swag/.gitignore @@ -1,7 +1,10 @@ dist testdata/simple*/docs +example/basic/docs/* +example/celler/docs/* cover.out + # Test binary, build with `go test -c` *.test @@ -12,3 +15,6 @@ cover.out # Etc .DS_Store + +swag +swag.exe diff --git a/vendor/github.com/swaggo/swag/.travis.yml b/vendor/github.com/swaggo/swag/.travis.yml index 00fd37646..560bdf389 100644 --- a/vendor/github.com/swaggo/swag/.travis.yml +++ b/vendor/github.com/swaggo/swag/.travis.yml @@ -2,13 +2,12 @@ language: go sudo: false go: - - 1.9.x - 1.10.x - 1.11.x - 1.12.x install: - - make install + - make deps script: - make fmt-check diff --git a/vendor/github.com/swaggo/swag/Makefile b/vendor/github.com/swaggo/swag/Makefile index dcdd8d853..75f950268 100644 --- a/vendor/github.com/swaggo/swag/Makefile +++ b/vendor/github.com/swaggo/swag/Makefile @@ -3,14 +3,16 @@ GOLINT:=$(shell which golint) GOIMPORT:=$(shell which goimports) GOFMT:=$(shell which gofmt) GOBUILD:=$(GOCMD) build +GOINSTALL:=$(GOCMD) install GOCLEAN:=$(GOCMD) clean GOTEST:=$(GOCMD) test GOGET:=$(GOCMD) get GOLIST:=$(GOCMD) list GOVET:=$(GOCMD) vet +u := $(if $(update),-u) BINARY_NAME:=swag -PACKAGES:=$(shell $(GOLIST) ./...) +PACKAGES:=$(shell $(GOLIST) github.com/swaggo/swag github.com/swaggo/swag/cmd/swag github.com/swaggo/swag/gen) GOFILES:=$(shell find . -name "*.go" -type f) export GO111MODULE := on @@ -18,8 +20,12 @@ export GO111MODULE := on all: test build .PHONY: build -build: - $(GOBUILD) -o $(BINARY_NAME) -v ./cmd/... +build: deps + $(GOBUILD) -o $(BINARY_NAME) ./cmd/swag + +.PHONY: install +install: deps + $(GOINSTALL) ./cmd/swag .PHONY: test test: @@ -45,18 +51,25 @@ clean: $(GOCLEAN) rm -f $(BINARY_NAME) -.PHONY: install -install: - $(GOGET) -v ./... +.PHONY: deps +deps: + $(GOGET) ${u} -d $(GOGET) github.com/stretchr/testify/assert + $(GOGET) github.com/alecthomas/template + +.PHONY: devel-deps +devel-deps: + GO111MODULE=off $(GOGET) -v -u \ + golang.org/x/lint/golint \ + github.com/swaggo/swag/cmd/swag \ + github.com/swaggo/swag/gen .PHONY: lint -lint: - which golint || $(GOGET) -u golang.org/x/lint/golint +lint: devel-deps for PKG in $(PACKAGES); do golint -set_exit_status $$PKG || exit 1; done; .PHONY: vet -vet: +vet: deps devel-deps $(GOVET) $(PACKAGES) .PHONY: fmt diff --git a/vendor/github.com/swaggo/swag/README.md b/vendor/github.com/swaggo/swag/README.md index b69ca61e2..4d24e6c8c 100644 --- a/vendor/github.com/swaggo/swag/README.md +++ b/vendor/github.com/swaggo/swag/README.md @@ -9,6 +9,7 @@ [![Go Doc](https://godoc.org/github.com/swaggo/swagg?status.svg)](https://godoc.org/github.com/swaggo/swag) [![Backers on Open Collective](https://opencollective.com/swag/backers/badge.svg)](#backers) [![Sponsors on Open Collective](https://opencollective.com/swag/sponsors/badge.svg)](#sponsors) [![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2Fswaggo%2Fswag.svg?type=shield)](https://app.fossa.io/projects/git%2Bgithub.com%2Fswaggo%2Fswag?ref=badge_shield) +[![Release](https://img.shields.io/github/release/swaggo/swag.svg?style=flat-square)](https://github.com/swaggo/swag/releases) Swag converts Go annotations to Swagger Documentation 2.0. We've created a variety of plugins for popular [Go web frameworks](#supported-web-frameworks). This allows you to quickly integrate with an existing Go project (using Swagger UI). @@ -19,7 +20,7 @@ Swag converts Go annotations to Swagger Documentation 2.0. We've created a varie - [How to use it with Gin](#how-to-use-it-with-gin) - [Implementation Status](#implementation-status) - [Declarative Comments Format](#declarative-comments-format) - - [General API Info](##general-api-info) + - [General API Info](#general-api-info) - [API Operation](#api-operation) - [Security](#security) - [Examples](#examples) @@ -29,7 +30,7 @@ Swag converts Go annotations to Swagger Documentation 2.0. We've created a varie - [Use multiple path params](#use-multiple-path-params) - [Example value of struct](#example-value-of-struct) - [Description of struct](#description-of-struct) - - [Override swagger type of a struct field](#Override-swagger-type-of-a-struct-field) + - [Use swaggertype tag to supported custom type](#use-swaggertype-tag-to-supported-custom-type) - [Add extension info to struct field](#add-extension-info-to-struct-field) - [How to using security annotations](#how-to-using-security-annotations) - [About the Project](#about-the-project) @@ -42,6 +43,8 @@ Swag converts Go annotations to Swagger Documentation 2.0. We've created a varie ```sh $ go get -u github.com/swaggo/swag/cmd/swag ``` +To build from source you need [Go](https://golang.org/dl/) (1.9 or newer). + Or download the pre-compiled binaries binray form [release page](https://github.com/swaggo/swag/releases). 3. Run `swag init` in the project's root folder which contains the `main.go` file. This will parse your comments and generate the required files (`docs` folder and `docs/docs.go`). @@ -69,7 +72,8 @@ OPTIONS: --dir value, -d value Directory you want to parse (default: "./") --propertyStrategy value, -p value Property Naming Strategy like snakecase,camelcase,pascalcase (default: "camelcase") --output value, -o value Output directory for al the generated files(swagger.json, swagger.yaml and doc.go) (default: "./docs") - --parseVendor Parse go files in 'vendor' folder, disabled by default --output value, -o value Output directory for al the generated files(swagger.json, swagger.yaml and doc.go) (default: "./docs") + --parseVendor Parse go files in 'vendor' folder, disabled by default + --parseDependency Parse go files in outside dependency folder, disabled by default ``` ## Supported Web Frameworks @@ -86,7 +90,7 @@ Find the example source code [here](https://github.com/swaggo/swag/tree/master/e 1. After using `swag init` to generate Swagger 2.0 docs, import the following packages: ```go import "github.com/swaggo/gin-swagger" // gin-swagger middleware -import "github.com/swaggo/gin-swagger/swaggerFiles" // swagger embed files +import "github.com/swaggo/files" // swagger embed files ``` 2. Add [General API](#general-api-info) annotations in `main.go` code: @@ -134,6 +138,8 @@ import "github.com/swaggo/gin-swagger/swaggerFiles" // swagger embed files // @authorizationurl https://example.com/oauth/authorize // @scope.admin Grants read and write access to administrative information +// @x-extension-openapi {"example": "value on a json format"} + func main() { r := gin.Default() @@ -165,9 +171,9 @@ package main import ( "github.com/gin-gonic/gin" + "github.com/swaggo/files" "github.com/swaggo/gin-swagger" - "github.com/swaggo/gin-swagger/swaggerFiles" - + "./docs" // docs is generated by Swag CLI, you have to import it. ) @@ -188,7 +194,8 @@ func main() { docs.SwaggerInfo.Version = "1.0" docs.SwaggerInfo.Host = "petstore.swagger.io" docs.SwaggerInfo.BasePath = "/v2" - + docs.SwaggerInfo.Schemes = []string{"http", "https"} + r := gin.New() // use ginSwagger middleware to serve the API docs @@ -309,7 +316,7 @@ $ swag init | tag.name | Name of a tag.| // @tag.name This is the name of the tag | | tag.description | Description of the tag | // @tag.description Cool Description | | tag.docs.url | Url of the external Documentation of the tag | // @tag.docs.url https://example.com| -| tag.docs.descripiton | Description of the external Documentation of the tag| // @tag.docs.descirption Best example documentation | +| tag.docs.description | Description of the external Documentation of the tag| // @tag.docs.description Best example documentation | | termsOfService | The Terms of Service for the API.| // @termsOfService http://swagger.io/terms/ | | contact.name | The contact information for the exposed API.| // @contact.name API Support | | contact.url | The URL pointing to the contact information. MUST be in the format of a URL. | // @contact.url http://www.swagger.io/support| @@ -319,6 +326,21 @@ $ swag init | host | The host (name or ip) serving the API. | // @host localhost:8080 | | BasePath | The base path on which the API is served. | // @BasePath /api/v1 | | schemes | The transfer protocol for the operation that separated by spaces. | // @schemes http https | +| x-name | The extension key, must be start by x- and take only json value | // @x-example-key {"key": "value"} | + +### Using markdown descriptions +When a short string in your documentation is insufficient, or you need images, code examples and things like that you may want to use markdown descriptions. In order to use markdown descriptions use the following annotations. + + +| annotation | description | example | +|-------------|--------------------------------------------|---------------------------------| +| title | **Required.** The title of the application.| // @title Swagger Example API | +| version | **Required.** Provides the version of the application API.| // @version 1.0 | +| description.markdown | A short description of the application. Parsed from the api.md file. This is an alternative to @description |// @description.markdown No value needed, this parses the description from api.md | +| tag.name | Name of a tag.| // @tag.name This is the name of the tag | +| tag.description.markdown | Description of the tag this is an alternative to tag.description. The description will be read from a file named like tagname.md | // @tag.description.markdown | + + ## API Operation @@ -326,20 +348,21 @@ $ swag init [celler/controller](https://github.com/swaggo/swag/tree/master/example/celler/controller) -| annotation | description | -|--------------------|----------------------------------------------------------------------------------------------------------------------------| -| description | A verbose explanation of the operation behavior. | -| id | A unique string used to identify the operation. Must be unique among all API operations. | -| tags | A list of tags to each API operation that separated by commas. | -| summary | A short summary of what the operation does. | -| accept | A list of MIME types the APIs can consume. Value MUST be as described under [Mime Types](#mime-types). | -| produce | A list of MIME types the APIs can produce. Value MUST be as described under [Mime Types](#mime-types). | -| param | Parameters that separated by spaces. `param name`,`param type`,`data type`,`is mandatory?`,`comment` `attribute(optional)` | -| security | [Security](#security) to each API operation. | -| success | Success response that separated by spaces. `return code`,`{param type}`,`data type`,`comment` | -| failure | Failure response that separated by spaces. `return code`,`{param type}`,`data type`,`comment` | -| header | Header in response that separated by spaces. `return code`,`{param type}`,`data type`,`comment` | -| router | Path definition that separated by spaces. `path`,`[httpMethod]` | +| annotation | description | +|-------------|----------------------------------------------------------------------------------------------------------------------------| +| description | A verbose explanation of the operation behavior. | +| id | A unique string used to identify the operation. Must be unique among all API operations. | +| tags | A list of tags to each API operation that separated by commas. | +| summary | A short summary of what the operation does. | +| accept | A list of MIME types the APIs can consume. Value MUST be as described under [Mime Types](#mime-types). | +| produce | A list of MIME types the APIs can produce. Value MUST be as described under [Mime Types](#mime-types). | +| param | Parameters that separated by spaces. `param name`,`param type`,`data type`,`is mandatory?`,`comment` `attribute(optional)` | +| security | [Security](#security) to each API operation. | +| success | Success response that separated by spaces. `return code`,`{param type}`,`data type`,`comment` | +| failure | Failure response that separated by spaces. `return code`,`{param type}`,`data type`,`comment` | +| header | Header in response that separated by spaces. `return code`,`{param type}`,`data type`,`comment` | +| router | Path definition that separated by spaces. `path`,`[httpMethod]` | +| x-name | The extension key, must be start by x- and take only json value. | ## Mime Types @@ -365,12 +388,11 @@ Besides that, `swag` also accepts aliases for some MIME Types as follows: ## Param Type -- object (struct) -- string (string) -- integer (int, uint, uint32, uint64) -- number (float32) -- boolean (bool) -- array +- query +- path +- header +- body +- formData ## Data Type @@ -508,7 +530,8 @@ type Account struct { } ``` -### Override swagger type of a struct field +### Use swaggertype tag to supported custom type +[#201](https://github.com/swaggo/swag/issues/201#issuecomment-475479409) ```go type TimestampTime struct { @@ -544,6 +567,33 @@ type Account struct { } ``` +[#379](https://github.com/swaggo/swag/issues/379) +```go +type CerticateKeyPair struct { + Crt []byte `json:"crt" swaggertype:"string" format:"base64" example:"U3dhZ2dlciByb2Nrcw=="` + Key []byte `json:"key" swaggertype:"string" format:"base64" example:"U3dhZ2dlciByb2Nrcw=="` +} +``` +generated swagger doc as follows: +```go +"api.MyBinding": { + "type":"object", + "properties":{ + "crt":{ + "type":"string", + "format":"base64", + "example":"U3dhZ2dlciByb2Nrcw==" + }, + "key":{ + "type":"string", + "format":"base64", + "example":"U3dhZ2dlciByb2Nrcw==" + } + } +} + +``` + ### Add extension info to struct field ```go @@ -627,4 +677,4 @@ Support this project by becoming a sponsor. Your logo will show up here with a l ## License -[![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2Fswaggo%2Fswag.svg?type=large)](https://app.fossa.io/projects/git%2Bgithub.com%2Fswaggo%2Fswag?ref=badge_large) \ No newline at end of file +[![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2Fswaggo%2Fswag.svg?type=large)](https://app.fossa.io/projects/git%2Bgithub.com%2Fswaggo%2Fswag?ref=badge_large) diff --git a/vendor/github.com/swaggo/swag/cmd/swag/main.go b/vendor/github.com/swaggo/swag/cmd/swag/main.go index 61f2ce04c..fd3a67c76 100644 --- a/vendor/github.com/swaggo/swag/cmd/swag/main.go +++ b/vendor/github.com/swaggo/swag/cmd/swag/main.go @@ -1,15 +1,23 @@ package main import ( + "fmt" "log" "os" - "github.com/pkg/errors" "github.com/swaggo/swag" "github.com/swaggo/swag/gen" "github.com/urfave/cli" ) +const searchDirFlag = "dir" +const generalInfoFlag = "generalInfo" +const propertyStrategyFlag = "propertyStrategy" +const outputFlag = "output" +const parseVendorFlag = "parseVendor" +const parseDependency = "parseDependency" +const markdownFilesDirFlag = "markdownFiles" + func main() { app := cli.NewApp() app.Version = swag.Version @@ -20,16 +28,18 @@ func main() { Aliases: []string{"i"}, Usage: "Create docs.go", Action: func(c *cli.Context) error { - searchDir := c.String("dir") - mainAPIFile := c.String("generalInfo") - strategy := c.String("propertyStrategy") - outputDir := c.String("output") - parseVendor := c.Bool("parseVendor") + searchDir := c.String(searchDirFlag) + mainAPIFile := c.String(generalInfoFlag) + strategy := c.String(propertyStrategyFlag) + outputDir := c.String(outputFlag) + parseVendor := c.Bool(parseVendorFlag) + parseDependency := c.Bool(parseDependency) + markdownFilesDir := c.String(markdownFilesDirFlag) switch strategy { case swag.CamelCase, swag.SnakeCase, swag.PascalCase: default: - return errors.Errorf("not supported %s propertyStrategy", strategy) + return fmt.Errorf("not supported %s propertyStrategy", strategy) } return gen.New().Build(&gen.Config{ @@ -38,6 +48,8 @@ func main() { PropNamingStrategy: strategy, OutputDir: outputDir, ParseVendor: parseVendor, + ParseDependency: parseDependency, + MarkdownFilesDir: markdownFilesDir, }) }, Flags: []cli.Flag{ @@ -65,6 +77,15 @@ func main() { Name: "parseVendor", Usage: "Parse go files in 'vendor' folder, disabled by default", }, + cli.BoolFlag{ + Name: "parseDependency", + Usage: "Parse go files in outside dependency folder, disabled by default", + }, + cli.StringFlag{ + Name: "markdownFiles, md", + Value: "", + Usage: "Parse folder containing markdown files to use as description, disabled by default", + }, }, }, } diff --git a/vendor/github.com/swaggo/swag/debug.go b/vendor/github.com/swaggo/swag/debug.go index 68025eb5c..f28a8e653 100644 --- a/vendor/github.com/swaggo/swag/debug.go +++ b/vendor/github.com/swaggo/swag/debug.go @@ -15,14 +15,14 @@ func isRelease() bool { return swagMode == release } -//Println calls Output to print to the standard logger when release mode. +// Println calls Output to print to the standard logger when release mode. func Println(v ...interface{}) { if isRelease() { log.Println(v...) } } -//Printf calls Output to print to the standard logger when release mode. +// Printf calls Output to print to the standard logger when release mode. func Printf(format string, v ...interface{}) { if isRelease() { log.Printf(format, v...) diff --git a/vendor/github.com/swaggo/swag/gen/gen.go b/vendor/github.com/swaggo/swag/gen/gen.go index c2fc54b1e..7fed9d041 100644 --- a/vendor/github.com/swaggo/swag/gen/gen.go +++ b/vendor/github.com/swaggo/swag/gen/gen.go @@ -1,22 +1,26 @@ package gen import ( + "bytes" "encoding/json" "fmt" + "go/format" + "io" "log" "os" "path" + "strings" "text/template" "time" + "github.com/go-openapi/spec" + "github.com/ghodss/yaml" - "github.com/pkg/errors" "github.com/swaggo/swag" ) // Gen presents a generate tool for swag. -type Gen struct { -} +type Gen struct{} // New creates a new Gen. func New() *Gen { @@ -28,41 +32,51 @@ type Config struct { // SearchDir the swag would be parse SearchDir string - //OutputDir represents the output directory for al the generated files + // OutputDir represents the output directory for al the generated files OutputDir string - //MainAPIFile the Go file path in which 'swagger general API Info' is written + // MainAPIFile the Go file path in which 'swagger general API Info' is written MainAPIFile string - //PropNamingStrategy represents property naming strategy like snakecase,camelcase,pascalcase + // PropNamingStrategy represents property naming strategy like snakecase,camelcase,pascalcase PropNamingStrategy string - //ParseVendor whether swag should be parse vendor folder + // ParseVendor whether swag should be parse vendor folder ParseVendor bool + + // ParseDependencies whether swag should be parse outside dependency folder + ParseDependency bool + + // MarkdownFilesDir used to find markdownfiles, which can be used for tag descriptions + MarkdownFilesDir string } -// Build builds swagger json file for gived searchDir and mainAPIFile. Returns json +// Build builds swagger json file for given searchDir and mainAPIFile. Returns json func (g *Gen) Build(config *Config) error { if _, err := os.Stat(config.SearchDir); os.IsNotExist(err) { return fmt.Errorf("dir: %s is not exist", config.SearchDir) } log.Println("Generate swagger docs....") - p := swag.New() + p := swag.New(swag.SetMarkdownFileDirectory(config.MarkdownFilesDir)) p.PropNamingStrategy = config.PropNamingStrategy p.ParseVendor = config.ParseVendor + p.ParseDependency = config.ParseDependency if err := p.ParseAPI(config.SearchDir, config.MainAPIFile); err != nil { return err } swagger := p.GetSwagger() - b, err := json.MarshalIndent(swagger, "", " ") + b, err := g.jsonIndent(swagger) if err != nil { return err } - os.MkdirAll(config.OutputDir, os.ModePerm) + if err := os.MkdirAll(config.OutputDir, os.ModePerm); err != nil { + return err + } + docs, err := os.Create(path.Join(config.OutputDir, "docs.go")) if err != nil { return err @@ -73,9 +87,11 @@ func (g *Gen) Build(config *Config) error { if err != nil { return err } - defer swaggerJSON.Close() - swaggerJSON.Write(b) + + if _, err := swaggerJSON.Write(b); err != nil { + return err + } swaggerYAML, err := os.Create(path.Join(config.OutputDir, "swagger.yaml")) if err != nil { @@ -85,18 +101,16 @@ func (g *Gen) Build(config *Config) error { defer swaggerYAML.Close() y, err := yaml.JSONToYAML(b) if err != nil { - return errors.Wrap(err, "cannot covert json to yaml") + return fmt.Errorf("cannot covert json to yaml error: %s", err) } - swaggerYAML.Write(y) + if _, err := swaggerYAML.Write(y); err != nil { + return err + } - if err := packageTemplate.Execute(docs, struct { - Timestamp time.Time - Doc string - }{ - Timestamp: time.Now(), - Doc: "`" + string(b) + "`", - }); err != nil { + // Write doc + err = g.writeGoDoc(docs, swagger) + if err != nil { return err } @@ -107,7 +121,102 @@ func (g *Gen) Build(config *Config) error { return nil } -var packageTemplate = template.Must(template.New("").Parse(`// GENERATED BY THE COMMAND ABOVE; DO NOT EDIT +func (g *Gen) jsonIndent(data interface{}) ([]byte, error) { + return json.MarshalIndent(data, "", " ") +} + +func (g *Gen) formatSource(src []byte) []byte { + code, err := format.Source(src) + if err != nil { + code = src // Output the unformated code anyway + } + return code +} + +func (g *Gen) writeGoDoc(output io.Writer, swagger *spec.Swagger) error { + + generator, err := template.New("swagger_info").Funcs(template.FuncMap{ + "printDoc": func(v string) string { + // Add schemes + v = "{\n \"schemes\": {{ marshal .Schemes }}," + v[1:] + // Sanitize backticks + return strings.Replace(v, "`", "`+\"`\"+`", -1) + }, + }).Parse(packageTemplate) + if err != nil { + return err + } + + swaggerSpec := &spec.Swagger{ + VendorExtensible: swagger.VendorExtensible, + SwaggerProps: spec.SwaggerProps{ + ID: swagger.ID, + Consumes: swagger.Consumes, + Produces: swagger.Produces, + Swagger: swagger.Swagger, + Info: &spec.Info{ + VendorExtensible: swagger.Info.VendorExtensible, + InfoProps: spec.InfoProps{ + Description: "{{.Description}}", + Title: "{{.Title}}", + TermsOfService: swagger.Info.TermsOfService, + Contact: swagger.Info.Contact, + License: swagger.Info.License, + Version: "{{.Version}}", + }, + }, + Host: "{{.Host}}", + BasePath: "{{.BasePath}}", + Paths: swagger.Paths, + Definitions: swagger.Definitions, + Parameters: swagger.Parameters, + Responses: swagger.Responses, + SecurityDefinitions: swagger.SecurityDefinitions, + Security: swagger.Security, + Tags: swagger.Tags, + ExternalDocs: swagger.ExternalDocs, + }, + } + + // crafted docs.json + buf, err := g.jsonIndent(swaggerSpec) + if err != nil { + return err + } + + buffer := &bytes.Buffer{} + err = generator.Execute(buffer, struct { + Timestamp time.Time + Doc string + Host string + BasePath string + Schemes []string + Title string + Description string + Version string + }{ + Timestamp: time.Now(), + Doc: string(buf), + Host: swagger.Host, + BasePath: swagger.BasePath, + Schemes: swagger.Schemes, + Title: swagger.Info.Title, + Description: swagger.Info.Description, + Version: swagger.Info.Version, + }) + if err != nil { + return err + } + + code := g.formatSource(buffer.Bytes()) + + // write + _, err = output.Write(code) + return err + +} + +var packageTemplate = `// GENERATED BY THE COMMAND ABOVE; DO NOT EDIT // This file was generated by swaggo/swag at // {{ .Timestamp }} @@ -115,34 +224,52 @@ package docs import ( "bytes" + "encoding/json" + "strings" "github.com/alecthomas/template" "github.com/swaggo/swag" ) -var doc = {{.Doc}} +var doc = ` + "`{{ printDoc .Doc}}`" + ` type swaggerInfo struct { Version string Host string BasePath string + Schemes []string Title string Description string } // SwaggerInfo holds exported Swagger Info so clients can modify it -var SwaggerInfo swaggerInfo +var SwaggerInfo = swaggerInfo{ + Version: {{ printf "%q" .Version}}, + Host: {{ printf "%q" .Host}}, + BasePath: {{ printf "%q" .BasePath}}, + Schemes: []string{ {{ range $index, $schema := .Schemes}}{{if gt $index 0}},{{end}}{{printf "%q" $schema}}{{end}} }, + Title: {{ printf "%q" .Title}}, + Description: {{ printf "%q" .Description}}, +} type s struct{} func (s *s) ReadDoc() string { - t, err := template.New("swagger_info").Parse(doc) + sInfo := SwaggerInfo + sInfo.Description = strings.Replace(sInfo.Description, "\n", "\\n", -1) + + t, err := template.New("swagger_info").Funcs(template.FuncMap{ + "marshal": func(v interface{}) string { + a, _ := json.Marshal(v) + return string(a) + }, + }).Parse(doc) if err != nil { return doc } var tpl bytes.Buffer - if err := t.Execute(&tpl, SwaggerInfo); err != nil { + if err := t.Execute(&tpl, sInfo); err != nil { return doc } @@ -152,4 +279,4 @@ func (s *s) ReadDoc() string { func init() { swag.Register(swag.Name, &s{}) } -`)) +` diff --git a/vendor/github.com/swaggo/swag/go.mod b/vendor/github.com/swaggo/swag/go.mod index d09198e23..9b9c0628c 100644 --- a/vendor/github.com/swaggo/swag/go.mod +++ b/vendor/github.com/swaggo/swag/go.mod @@ -1,31 +1,14 @@ module github.com/swaggo/swag require ( - github.com/PuerkitoBio/purell v1.1.1 // indirect + github.com/KyleBanks/depth v1.2.1 + github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 github.com/ghodss/yaml v1.0.0 - github.com/gin-contrib/sse v0.0.0-20190301062529-5545eab6dad3 // indirect - github.com/gin-gonic/gin v1.3.0 - github.com/go-openapi/jsonpointer v0.18.0 // indirect - github.com/go-openapi/jsonreference v0.18.0 - github.com/go-openapi/spec v0.18.0 - github.com/go-openapi/swag v0.18.0 // indirect - github.com/golang/protobuf v1.3.1 // indirect - github.com/json-iterator/go v1.1.6 // indirect - github.com/kr/pretty v0.1.0 // indirect - github.com/mailru/easyjson v0.0.0-20190312143242-1de009706dbe // indirect - github.com/mattn/go-isatty v0.0.7 // indirect - github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect - github.com/modern-go/reflect2 v1.0.1 // indirect - github.com/pkg/errors v0.8.1 - github.com/satori/go.uuid v1.2.0 - github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24 // indirect + github.com/go-openapi/jsonreference v0.19.0 + github.com/go-openapi/spec v0.19.0 github.com/stretchr/testify v1.3.0 - github.com/swaggo/gin-swagger v1.1.0 - github.com/ugorji/go/codec v0.0.0-20190320090025-2dc34c0b8780 // indirect + github.com/swaggo/files v0.0.0-20190704085106-630677cd5c14 + github.com/swaggo/gin-swagger v1.2.0 github.com/urfave/cli v1.20.0 - golang.org/x/net v0.0.0-20190322120337-addf6b3196f6 // indirect - golang.org/x/sys v0.0.0-20190322080309-f49334f85ddc // indirect - golang.org/x/tools v0.0.0-20190322203728-c1a832b0ad89 - gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 // indirect - gopkg.in/go-playground/assert.v1 v1.2.1 // indirect + golang.org/x/tools v0.0.0-20190611222205-d73e1c7e250b ) diff --git a/vendor/github.com/swaggo/swag/go.sum b/vendor/github.com/swaggo/swag/go.sum index eeeab59a0..b5aa48327 100644 --- a/vendor/github.com/swaggo/swag/go.sum +++ b/vendor/github.com/swaggo/swag/go.sum @@ -1,106 +1,85 @@ +github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc= +github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE= github.com/PuerkitoBio/purell v1.1.0 h1:rmGxhojJlM0tuKtfdvliR84CFHljx9ag64t2xmVkjK4= github.com/PuerkitoBio/purell v1.1.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= -github.com/PuerkitoBio/purell v1.1.1 h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tNFfI= -github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M= github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 h1:JYp7IbQjafoB+tBA3gMyHYHrpOtNuDiK/uB5uXxq5wM= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= -github.com/gin-contrib/sse v0.0.0-20170109093832-22d885f9ecc7 h1:AzN37oI0cOS+cougNAV9szl6CVoj2RYwzS3DpUQNtlY= +github.com/gin-contrib/gzip v0.0.1/go.mod h1:fGBJBCdt6qCZuCAOwWuFhBB4OOq9EFqlo5dEaFhhu5w= github.com/gin-contrib/sse v0.0.0-20170109093832-22d885f9ecc7/go.mod h1:VJ0WA2NBN22VlZ2dKZQPAPnyWw5XTlK1KymzLKsr59s= -github.com/gin-contrib/sse v0.0.0-20190301062529-5545eab6dad3 h1:t8FVkw33L+wilf2QiWkw0UV77qRpcH/JHPKGpKa2E8g= github.com/gin-contrib/sse v0.0.0-20190301062529-5545eab6dad3/go.mod h1:VJ0WA2NBN22VlZ2dKZQPAPnyWw5XTlK1KymzLKsr59s= -github.com/gin-gonic/gin v1.3.0 h1:kCmZyPklC0gVdL728E6Aj20uYBJV93nj/TkwBTKhFbs= +github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= github.com/gin-gonic/gin v1.3.0/go.mod h1:7cKuhb5qV2ggCFctp2fJQ+ErvciLZrIeoOSOm6mUr7Y= +github.com/gin-gonic/gin v1.4.0/go.mod h1:OW2EZn3DO8Ln9oIKOvM++LBO+5UPHJJDH72/q/3rZdM= github.com/go-openapi/jsonpointer v0.17.0 h1:nH6xp8XdXHx8dqveo0ZuJBluCO2qGrPbDNZ0dwoRHP0= github.com/go-openapi/jsonpointer v0.17.0/go.mod h1:cOnomiV+CVVwFLk0A/MExoFMjwdsUdVpsRhURCKh+3M= -github.com/go-openapi/jsonpointer v0.18.0 h1:KVRzjXpMzgdM4GEMDmDTnGcY5yBwGWreJwmmk4k35yU= -github.com/go-openapi/jsonpointer v0.18.0/go.mod h1:cOnomiV+CVVwFLk0A/MExoFMjwdsUdVpsRhURCKh+3M= github.com/go-openapi/jsonreference v0.17.0/go.mod h1:g4xxGn04lDIRh0GJb5QlpE3HfopLOL6uZrK/VgnsK9I= -github.com/go-openapi/jsonreference v0.18.0 h1:oP2OUNdG1l2r5kYhrfVMXO54gWmzcfAwP/GFuHpNTkE= -github.com/go-openapi/jsonreference v0.18.0/go.mod h1:g4xxGn04lDIRh0GJb5QlpE3HfopLOL6uZrK/VgnsK9I= -github.com/go-openapi/spec v0.18.0 h1:aIjeyG5mo5/FrvDkpKKEGZPmF9MPHahS72mzfVqeQXQ= -github.com/go-openapi/spec v0.18.0/go.mod h1:XkF/MOi14NmjsfZ8VtAKf8pIlbZzyoTvZsdfssdxcBI= +github.com/go-openapi/jsonreference v0.19.0 h1:BqWKpV1dFd+AuiKlgtddwVIFQsuMpxfBDBHGfM2yNpk= +github.com/go-openapi/jsonreference v0.19.0/go.mod h1:g4xxGn04lDIRh0GJb5QlpE3HfopLOL6uZrK/VgnsK9I= +github.com/go-openapi/spec v0.19.0 h1:A4SZ6IWh3lnjH0rG0Z5lkxazMGBECtrZcbyYQi+64k4= +github.com/go-openapi/spec v0.19.0/go.mod h1:XkF/MOi14NmjsfZ8VtAKf8pIlbZzyoTvZsdfssdxcBI= github.com/go-openapi/swag v0.17.0 h1:iqrgMg7Q7SvtbWLlltPrkMs0UBJI6oTSs79JFRUi880= github.com/go-openapi/swag v0.17.0/go.mod h1:AByQ+nYG6gQg71GINrmuDXCPWdL640yX49/kXLo40Tg= -github.com/go-openapi/swag v0.18.0 h1:1DU8Km1MRGv9Pj7BNLmkA+umwTStwDHttXvx3NhJA70= -github.com/go-openapi/swag v0.18.0/go.mod h1:AByQ+nYG6gQg71GINrmuDXCPWdL640yX49/kXLo40Tg= -github.com/golang/protobuf v1.2.0 h1:P3YflyNX/ehuJFLhxviNdFxQPkGK5cDcApsge1SqnvM= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.1 h1:YF8+flBXS5eO826T4nzqPrxfhQThhXl0YzfuUPu4SBg= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/json-iterator/go v1.1.6 h1:MrUvLMLTMxbqFJ9kzlvat/rYZqZnW3u4wkLzWTaFwKs= +github.com/json-iterator/go v1.1.5/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= -github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329 h1:2gxZ0XQIU/5z3Z3bUBu+FXuk2pFbkN6tcwi/pjyaDic= github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= -github.com/mailru/easyjson v0.0.0-20190312143242-1de009706dbe h1:W/GaMY0y69G4cFlmsC6B9sbuo2fP8OFP1ABjt4kPz+w= -github.com/mailru/easyjson v0.0.0-20190312143242-1de009706dbe/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= -github.com/mattn/go-isatty v0.0.4 h1:bnP0vzxcAdeI1zdubAl5PjU6zsERjGZb7raWodagDYs= github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= -github.com/mattn/go-isatty v0.0.7 h1:UvyT9uN+3r7yLEYSlJsbQGdsaB/a0DlgWP3pql6iwOc= github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= -github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= +github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/reflect2 v1.0.1 h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI= github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/satori/go.uuid v1.2.0 h1:0uYX9dsZ2yD7q2RtLRtPSdGDWzjeM3TbMJP9utgA0ww= -github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= -github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24 h1:pntxY8Ary0t43dCZ5dqY4YTJCObLY1kIXl0uzMv+7DE= -github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= -github.com/swaggo/gin-swagger v1.1.0 h1:ZI6/82S07DkkrMfGKbJhKj1R+QNTICkeAJP06pU36pU= -github.com/swaggo/gin-swagger v1.1.0/go.mod h1:FQlm07YuT1glfN3hQiO11UQ2m39vOCZ/aa3WWr5E+XU= -github.com/swaggo/swag v1.4.0/go.mod h1:hog2WgeMOrQ/LvQ+o1YGTeT+vWVrbi0SiIslBtxKTyM= -github.com/ugorji/go v1.1.2 h1:JON3E2/GPW2iDNGoSAusl1KDf5TRQ8k8q7Tp097pZGs= -github.com/ugorji/go v1.1.2/go.mod h1:hnLbHMwcvSihnDhEfx2/BzKp2xb0Y+ErdfYcrs9tkJQ= -github.com/ugorji/go/codec v0.0.0-20181209151446-772ced7fd4c2 h1:EICbibRW4JNKMcY+LsWmuwob+CRS1BmdRdjphAm9mH4= -github.com/ugorji/go/codec v0.0.0-20181209151446-772ced7fd4c2/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= -github.com/ugorji/go/codec v0.0.0-20190320090025-2dc34c0b8780 h1:vG/gY/PxA3v3l04qxe3tDjXyu3bozii8ulSlIPOYKhI= -github.com/ugorji/go/codec v0.0.0-20190320090025-2dc34c0b8780/go.mod h1:iT03XoTwV7xq/+UGwKO3UbC1nNNlopQiY61beSdrtOA= +github.com/swaggo/files v0.0.0-20190704085106-630677cd5c14/go.mod h1:gxQT6pBGRuIGunNf/+tSOB5OHvguWi8Tbt82WOkf35E= +github.com/swaggo/gin-swagger v1.2.0/go.mod h1:qlH2+W7zXGZkczuL+r2nEBR2JTT+/lX05Nn6vPhc7OI= +github.com/swaggo/swag v1.5.1/go.mod h1:1Bl9F/ZBpVWh22nY0zmYyASPO1lI/zIwRDrpZU+tv8Y= +github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc= +github.com/ugorji/go v1.1.5-pre/go.mod h1:FwP/aQVg39TXzItUBMwnWp9T9gPQnXw4Poh4/oBQZ/0= +github.com/ugorji/go/codec v0.0.0-20181022190402-e5e69e061d4f/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= +github.com/ugorji/go/codec v1.1.5-pre/go.mod h1:tULtS6Gy1AE1yCENaw4Vb//HLH5njI2tfCQDUqRd8fI= github.com/urfave/cli v1.20.0 h1:fDqGv3UG/4jbVl/QkFwEdddtEDjh/5Ov6X+0B/3bPaw= github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/net v0.0.0-20181005035420-146acd28ed58 h1:otZG8yDCO4LVps5+9bxOeNiCvgmOyt96J3roHTYs7oE= golang.org/x/net v0.0.0-20181005035420-146acd28ed58/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190311183353-d8887717615a h1:oWX7TPOiFAMXLq8o0ikBYfCJVlRHBcsciT5bXOrH628= +golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190322120337-addf6b3196f6 h1:78jEq2G3J16aXneH23HSnTQQTCwMHoyO8VEiUH+bpPM= -golang.org/x/net v0.0.0-20190322120337-addf6b3196f6/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190611141213-3f473d35a33a h1:+KkCgOMgnKSgenxTBoiwkMqTiouMIy/3o8RLdmSbGoY= +golang.org/x/net v0.0.0-20190611141213-3f473d35a33a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20181228144115-9a3f9b0469bb/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190322080309-f49334f85ddc h1:4gbWbmmPFp4ySWICouJl6emP0MyS31yy9SrTlAGFT+g= -golang.org/x/sys v0.0.0-20190322080309-f49334f85ddc/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= +golang.org/x/sys v0.0.0-20190610200419-93c9922d18ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/tools v0.0.0-20190110015856-aa033095749b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190322203728-c1a832b0ad89 h1:iWXXYN3edZ3Nd/7I6Rt1sXrWVmhF9bgVtlEJ7BbH124= -golang.org/x/tools v0.0.0-20190322203728-c1a832b0ad89/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190606050223-4d9ae51c2468/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190611222205-d73e1c7e250b h1:/mJ+GKieZA6hFDQGdWZrjj4AXPl5ylY+5HusG80roy0= +golang.org/x/tools v0.0.0-20190611222205-d73e1c7e250b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/go-playground/assert.v1 v1.2.1 h1:xoYuJVE7KT85PYWrN730RguIQO0ePzVRfFMXadIrXTM= gopkg.in/go-playground/assert.v1 v1.2.1/go.mod h1:9RXL0bg/zibRAgZUYszZSwO/z8Y/a8bDuhia5mkpMnE= -gopkg.in/go-playground/validator.v8 v8.18.2 h1:lFB4DoMU6B626w8ny76MV7VX6W2VHct2GVOI3xgiMrQ= gopkg.in/go-playground/validator.v8 v8.18.2/go.mod h1:RX2a/7Ha8BgOhfk7j780h4/u/RRjR0eouCJSH80/M2Y= -gopkg.in/yaml.v2 v2.2.1 h1:mUhvW9EsL+naU5Q3cakzfE91YhliOondGd6ZrsDBHQE= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/vendor/github.com/swaggo/swag/operation.go b/vendor/github.com/swaggo/swag/operation.go index 9f8388a58..ba5184360 100644 --- a/vendor/github.com/swaggo/swag/operation.go +++ b/vendor/github.com/swaggo/swag/operation.go @@ -1,6 +1,7 @@ package swag import ( + "encoding/json" "fmt" "go/ast" goparser "go/parser" @@ -13,7 +14,6 @@ import ( "github.com/go-openapi/jsonreference" "github.com/go-openapi/spec" - "github.com/pkg/errors" "golang.org/x/tools/go/loader" ) @@ -61,16 +61,14 @@ func (operation *Operation) ParseComment(comment string, astFile *ast.File) erro if len(commentLine) == 0 { return nil } - attribute := strings.Fields(commentLine)[0] lineRemainder := strings.TrimSpace(commentLine[len(attribute):]) - switch strings.ToLower(attribute) { + lowerAttribute := strings.ToLower(attribute) + + var err error + switch lowerAttribute { case "@description": - if operation.Description == "" { - operation.Description = lineRemainder - } else { - operation.Description += "\n" + lineRemainder - } + operation.ParseDescriptionComment(lineRemainder) case "@summary": operation.Summary = lineRemainder case "@id": @@ -78,37 +76,50 @@ func (operation *Operation) ParseComment(comment string, astFile *ast.File) erro case "@tags": operation.ParseTagsComment(lineRemainder) case "@accept": - if err := operation.ParseAcceptComment(lineRemainder); err != nil { - return err - } + err = operation.ParseAcceptComment(lineRemainder) case "@produce": - if err := operation.ParseProduceComment(lineRemainder); err != nil { - return err - } + err = operation.ParseProduceComment(lineRemainder) case "@param": - if err := operation.ParseParamComment(lineRemainder, astFile); err != nil { - return err - } + err = operation.ParseParamComment(lineRemainder, astFile) case "@success", "@failure": - if err := operation.ParseResponseComment(lineRemainder, astFile); err != nil { - if err := operation.ParseEmptyResponseComment(lineRemainder); err != nil { - if err := operation.ParseEmptyResponseOnly(lineRemainder); err != nil { - return err - } - } - } + err = operation.ParseResponseComment(lineRemainder, astFile) case "@header": - if err := operation.ParseResponseHeaderComment(lineRemainder, astFile); err != nil { - return err - } + err = operation.ParseResponseHeaderComment(lineRemainder, astFile) case "@router": - if err := operation.ParseRouterComment(lineRemainder); err != nil { - return err - } + err = operation.ParseRouterComment(lineRemainder) case "@security": - if err := operation.ParseSecurityComment(lineRemainder); err != nil { - return err + err = operation.ParseSecurityComment(lineRemainder) + case "@deprecated": + operation.Deprecate() + default: + err = operation.ParseMetadata(attribute, lowerAttribute, lineRemainder) + } + + return err +} + +// ParseDescriptionComment godoc +func (operation *Operation) ParseDescriptionComment(lineRemainder string) { + if operation.Description == "" { + operation.Description = lineRemainder + return + } + operation.Description += "\n" + lineRemainder +} + +// ParseMetadata godoc +func (operation *Operation) ParseMetadata(attribute, lowerAttribute, lineRemainder string) error { + // parsing specific meta data extensions + if strings.HasPrefix(lowerAttribute, "@x-") { + if len(lineRemainder) == 0 { + return fmt.Errorf("annotation %s need a value", attribute) } + + var valueJSON interface{} + if err := json.Unmarshal([]byte(lineRemainder), &valueJSON); err != nil { + return fmt.Errorf("annotation %s need a valid json value", attribute) + } + operation.Operation.AddExtension(attribute[1:], valueJSON) // Trim "@" at head } return nil } @@ -116,7 +127,7 @@ func (operation *Operation) ParseComment(comment string, astFile *ast.File) erro var paramPattern = regexp.MustCompile(`(\S+)[\s]+([\w]+)[\s]+([\S.]+)[\s]+([\w]+)[\s]+"([^"]+)"`) // ParseParamComment parses params return []string of param properties -// E.g. @Param queryText form string true "The email for login" +// E.g. @Param queryText formData string true "The email for login" // [param name] [paramType] [data type] [is mandatory?] [Comment] // E.g. @Param some_id path int true "Some ID" func (operation *Operation) ParseParamComment(commentLine string, astFile *ast.File) error { @@ -126,34 +137,78 @@ func (operation *Operation) ParseParamComment(commentLine string, astFile *ast.F } name := matches[1] paramType := matches[2] + refType := TransToValidSchemeType(matches[3]) - schemaType := matches[3] + // Detect refType + objectType := "object" + if strings.HasPrefix(refType, "[]") == true { + objectType = "array" + refType = strings.TrimPrefix(refType, "[]") + } else if IsPrimitiveType(refType) || + paramType == "formData" && refType == "file" { + objectType = "primitive" + } requiredText := strings.ToLower(matches[4]) required := requiredText == "true" || requiredText == "required" description := matches[5] - var param spec.Parameter + param := createParameter(paramType, description, name, refType, required) - //five possible parameter types. switch paramType { - case "query", "path", "header": - param = createParameter(paramType, description, name, TransToValidSchemeType(schemaType), required) + case "path", "header", "formData": + switch objectType { + case "array", "object": + return fmt.Errorf("%s is not supported type for %s", refType, paramType) + } + case "query": + switch objectType { + case "array": + if !IsPrimitiveType(refType) { + return fmt.Errorf("%s is not supported array type for %s", refType, paramType) + } + param.SimpleSchema.Type = "array" + param.SimpleSchema.Items = &spec.Items{ + SimpleSchema: spec.SimpleSchema{ + Type: refType, + }, + } + case "object": + return fmt.Errorf("%s is not supported type for %s", refType, paramType) + } case "body": - param = createParameter(paramType, description, name, "object", required) // TODO: if Parameter types can be objects, but also primitives and arrays - if err := operation.registerSchemaType(schemaType, astFile); err != nil { - return err + switch objectType { + case "primitive": + param.Schema.Type = spec.StringOrArray{refType} + case "array": + param.Schema.Items = &spec.SchemaOrArray{ + Schema: &spec.Schema{ + SchemaProps: spec.SchemaProps{}, + }, + } + // Arrau of Primitive or Object + if IsPrimitiveType(refType) { + param.Schema.Items.Schema.Type = spec.StringOrArray{refType} + } else { + if err := operation.registerSchemaType(refType, astFile); err != nil { + return err + } + param.Schema.Items.Schema.Ref = spec.Ref{Ref: jsonreference.MustCreateRef("#/definitions/" + refType)} + } + case "object": + if err := operation.registerSchemaType(refType, astFile); err != nil { + return err + } + param.Schema.Type = spec.StringOrArray{objectType} + param.Schema.Ref = spec.Ref{ + Ref: jsonreference.MustCreateRef("#/definitions/" + refType), + } } - param.Schema.Ref = spec.Ref{ - Ref: jsonreference.MustCreateRef("#/definitions/" + schemaType), - } - case "formData": - param = createParameter(paramType, description, name, TransToValidSchemeType(schemaType), required) default: return fmt.Errorf("%s is not supported paramType", paramType) } - if err := operation.parseAndExtractionParamAttribute(commentLine, schemaType, ¶m); err != nil { + if err := operation.parseAndExtractionParamAttribute(commentLine, refType, ¶m); err != nil { return err } operation.Operation.Parameters = append(operation.Operation.Parameters, param) @@ -184,7 +239,7 @@ func (operation *Operation) registerSchemaType(schemaType string, astFile *ast.F var err error typeSpec, err = findTypeDef(impPath, typeName) if err != nil { - return errors.Wrapf(err, "can not find type def: %q", schemaType) + return fmt.Errorf("can not find type def: %q error: %s", schemaType, err) } break } @@ -223,81 +278,50 @@ var regexAttributes = map[string]*regexp.Regexp{ func (operation *Operation) parseAndExtractionParamAttribute(commentLine, schemaType string, param *spec.Parameter) error { schemaType = TransToValidSchemeType(schemaType) for attrKey, re := range regexAttributes { + attr, err := findAttr(re, commentLine) + if err != nil { + continue + } switch attrKey { case "enums": - enums, err := findAttrList(re, commentLine) + err := setEnumParam(attr, schemaType, param) if err != nil { - break - } - for _, e := range enums { - e = strings.TrimSpace(e) - param.Enum = append(param.Enum, defineType(schemaType, e)) + return err } case "maxinum": - attr, err := findAttr(re, commentLine) + n, err := setNumberParam(attrKey, schemaType, attr, commentLine) if err != nil { - break - } - if schemaType != "integer" && schemaType != "number" { - return fmt.Errorf("maxinum is attribute to set to a number. comment=%s got=%s", commentLine, schemaType) - } - n, err := strconv.ParseFloat(attr, 64) - if err != nil { - return fmt.Errorf("maximum is allow only a number. comment=%s got=%s", commentLine, attr) + return err } param.Maximum = &n case "mininum": - attr, err := findAttr(re, commentLine) + n, err := setNumberParam(attrKey, schemaType, attr, commentLine) if err != nil { - break - } - if schemaType != "integer" && schemaType != "number" { - return fmt.Errorf("mininum is attribute to set to a number. comment=%s got=%s", commentLine, schemaType) - } - n, err := strconv.ParseFloat(attr, 64) - if err != nil { - return fmt.Errorf("mininum is allow only a number got=%s", attr) + return err } param.Minimum = &n case "default": - attr, err := findAttr(re, commentLine) + value, err := defineType(schemaType, attr) if err != nil { - break + return nil } - param.Default = defineType(schemaType, attr) + param.Default = value case "maxlength": - attr, err := findAttr(re, commentLine) + n, err := setStringParam(attrKey, schemaType, attr, commentLine) if err != nil { - break - } - if schemaType != "string" { - return fmt.Errorf("maxlength is attribute to set to a number. comment=%s got=%s", commentLine, schemaType) - } - n, err := strconv.ParseInt(attr, 10, 64) - if err != nil { - return fmt.Errorf("maxlength is allow only a number got=%s", attr) + return err } param.MaxLength = &n case "minlength": - attr, err := findAttr(re, commentLine) + n, err := setStringParam(attrKey, schemaType, attr, commentLine) if err != nil { - break - } - if schemaType != "string" { - return fmt.Errorf("maxlength is attribute to set to a number. comment=%s got=%s", commentLine, schemaType) - } - n, err := strconv.ParseInt(attr, 10, 64) - if err != nil { - return fmt.Errorf("minlength is allow only a number got=%s", attr) + return err } param.MinLength = &n case "format": - attr, err := findAttr(re, commentLine) - if err != nil { - break - } param.Format = attr } + } return nil } @@ -312,40 +336,67 @@ func findAttr(re *regexp.Regexp, commentLine string) (string, error) { return strings.TrimSpace(attr[l+1 : r]), nil } -func findAttrList(re *regexp.Regexp, commentLine string) ([]string, error) { - attr, err := findAttr(re, commentLine) - if err != nil { - return []string{""}, err +func setStringParam(name, schemaType, attr, commentLine string) (int64, error) { + if schemaType != "string" { + return 0, fmt.Errorf("%s is attribute to set to a number. comment=%s got=%s", name, commentLine, schemaType) } - return strings.Split(attr, ","), nil + n, err := strconv.ParseInt(attr, 10, 64) + if err != nil { + return 0, fmt.Errorf("%s is allow only a number got=%s", name, attr) + } + return n, nil +} + +func setNumberParam(name, schemaType, attr, commentLine string) (float64, error) { + if schemaType != "integer" && schemaType != "number" { + return 0, fmt.Errorf("%s is attribute to set to a number. comment=%s got=%s", name, commentLine, schemaType) + } + n, err := strconv.ParseFloat(attr, 64) + if err != nil { + return 0, fmt.Errorf("maximum is allow only a number. comment=%s got=%s", commentLine, attr) + } + return n, nil +} + +func setEnumParam(attr, schemaType string, param *spec.Parameter) error { + for _, e := range strings.Split(attr, ",") { + e = strings.TrimSpace(e) + + value, err := defineType(schemaType, e) + if err != nil { + return err + } + param.Enum = append(param.Enum, value) + } + return nil } // defineType enum value define the type (object and array unsupported) -func defineType(schemaType string, value string) interface{} { +func defineType(schemaType string, value string) (interface{}, error) { schemaType = TransToValidSchemeType(schemaType) switch schemaType { case "string": - return value + return value, nil case "number": v, err := strconv.ParseFloat(value, 64) if err != nil { - panic(fmt.Errorf("enum value %s can't convert to %s err: %s", value, schemaType, err)) + return nil, fmt.Errorf("enum value %s can't convert to %s err: %s", value, schemaType, err) } - return v + return v, nil case "integer": v, err := strconv.Atoi(value) if err != nil { - panic(fmt.Errorf("enum value %s can't convert to %s err: %s", value, schemaType, err)) + return nil, fmt.Errorf("enum value %s can't convert to %s err: %s", value, schemaType, err) } - return v + return v, nil case "boolean": v, err := strconv.ParseBool(value) if err != nil { - panic(fmt.Errorf("enum value %s can't convert to %s err: %s", value, schemaType, err)) + return nil, fmt.Errorf("enum value %s can't convert to %s err: %s", value, schemaType, err) } - return v + return v, nil default: - panic(fmt.Errorf("%s is unsupported type in enum value", schemaType)) + return nil, fmt.Errorf("%s is unsupported type in enum value", schemaType) } } @@ -464,7 +515,7 @@ func findTypeDef(importPath, typeName string) (*ast.TypeSpec, error) { pkgInfo := lprog.Package(importPath) if pkgInfo == nil { - return nil, errors.New("package was nil") + return nil, fmt.Errorf("package was nil") } // TODO: possibly cache pkgInfo since it's an expensive operation @@ -482,17 +533,21 @@ func findTypeDef(importPath, typeName string) (*ast.TypeSpec, error) { } } } - return nil, errors.New("type spec not found") + return nil, fmt.Errorf("type spec not found") } var responsePattern = regexp.MustCompile(`([\d]+)[\s]+([\w\{\}]+)[\s]+([\w\-\.\/]+)[^"]*(.*)?`) -// ParseResponseComment parses comment for gived `response` comment string. +// ParseResponseComment parses comment for given `response` comment string. func (operation *Operation) ParseResponseComment(commentLine string, astFile *ast.File) error { var matches []string if matches = responsePattern.FindStringSubmatch(commentLine); len(matches) != 5 { - return fmt.Errorf("can not parse response comment \"%s\"", commentLine) + err := operation.ParseEmptyResponseComment(commentLine) + if err != nil { + return operation.ParseEmptyResponseOnly(commentLine) + } + return err } response := spec.Response{} @@ -508,6 +563,11 @@ func (operation *Operation) ParseResponseComment(commentLine string, astFile *as schemaType := strings.Trim(matches[2], "{}") refType := matches[3] + if !IsGolangPrimitiveType(refType) && !strings.Contains(refType, ".") { + currentPkgName := astFile.Name.String() + refType = currentPkgName + "." + refType + } + if operation.parser != nil { // checking refType has existing in 'TypeDefinitions' if err := operation.registerSchemaType(refType, astFile); err != nil { return err @@ -515,10 +575,10 @@ func (operation *Operation) ParseResponseComment(commentLine string, astFile *as } // so we have to know all type in app - //TODO: we might omitted schema.type if schemaType equals 'object' response.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Type: []string{schemaType}}} if schemaType == "object" { + response.Schema.SchemaProps = spec.SchemaProps{} response.Schema.Ref = spec.Ref{ Ref: jsonreference.MustCreateRef("#/definitions/" + refType), } diff --git a/vendor/github.com/swaggo/swag/parser.go b/vendor/github.com/swaggo/swag/parser.go index 6957c4db8..11618acf0 100644 --- a/vendor/github.com/swaggo/swag/parser.go +++ b/vendor/github.com/swaggo/swag/parser.go @@ -1,12 +1,16 @@ package swag import ( + "encoding/json" "fmt" "go/ast" + "go/build" goparser "go/parser" "go/token" + "io/ioutil" "net/http" "os" + "os/exec" "path" "path/filepath" "reflect" @@ -15,9 +19,9 @@ import ( "strings" "unicode" + "github.com/KyleBanks/depth" "github.com/go-openapi/jsonreference" "github.com/go-openapi/spec" - "github.com/pkg/errors" ) const ( @@ -36,7 +40,7 @@ type Parser struct { // swagger represents the root document object for the API specification swagger *spec.Swagger - //files is a map that stores map[real_go_file_path][astFile] + // files is a map that stores map[real_go_file_path][astFile] files map[string]*ast.File // TypeDefinitions is a map that stores [package name][type name][*ast.TypeSpec] @@ -45,19 +49,25 @@ type Parser struct { // CustomPrimitiveTypes is a map that stores custom primitive types to actual golang types [type name][string] CustomPrimitiveTypes map[string]string - //registerTypes is a map that stores [refTypeName][*ast.TypeSpec] + // registerTypes is a map that stores [refTypeName][*ast.TypeSpec] registerTypes map[string]*ast.TypeSpec PropNamingStrategy string ParseVendor bool + // ParseDependencies whether swag should be parse outside dependency folder + ParseDependency bool + // structStack stores full names of the structures that were already parsed or are being parsed now structStack []string + + // markdownFileDir holds the path to the folder, where markdown files are stored + markdownFileDir string } // New creates a new Parser with default properties. -func New() *Parser { +func New(options ...func(*Parser)) *Parser { parser := &Parser{ swagger: &spec.Swagger{ SwaggerProps: spec.SwaggerProps{ @@ -78,16 +88,54 @@ func New() *Parser { CustomPrimitiveTypes: make(map[string]string), registerTypes: make(map[string]*ast.TypeSpec), } + + for _, option := range options { + option(parser) + } + return parser } -// ParseAPI parses general api info for gived searchDir and mainAPIFile +// SetMarkdownFileDirectory sets the directory to search for markdownfiles +func SetMarkdownFileDirectory(directoryPath string) func(*Parser) { + return func(p *Parser) { + p.markdownFileDir = directoryPath + } +} + +// ParseAPI parses general api info for given searchDir and mainAPIFile func (parser *Parser) ParseAPI(searchDir string, mainAPIFile string) error { - Println("Generate general API Info") + Printf("Generate general API Info, search dir:%s", searchDir) + if err := parser.getAllGoFileInfo(searchDir); err != nil { return err } - parser.ParseGeneralAPIInfo(path.Join(searchDir, mainAPIFile)) + + var t depth.Tree + + absMainAPIFilePath, err := filepath.Abs(filepath.Join(searchDir, mainAPIFile)) + if err != nil { + return err + } + + if parser.ParseDependency { + pkgName, err := getPkgName(path.Dir(absMainAPIFilePath)) + if err != nil { + return err + } + if err := t.Resolve(pkgName); err != nil { + return fmt.Errorf("pkg %s cannot find all dependencies, %s", pkgName, err) + } + for i := 0; i < len(t.Root.Deps); i++ { + if err := parser.getAllGoFileInfoFromDeps(&t.Root.Deps[i]); err != nil { + return err + } + } + } + + if err := parser.ParseGeneralAPIInfo(absMainAPIFilePath); err != nil { + return err + } for _, astFile := range parser.files { parser.ParseType(astFile) @@ -99,257 +147,176 @@ func (parser *Parser) ParseAPI(searchDir string, mainAPIFile string) error { } } - parser.ParseDefinitions() - - return nil + return parser.parseDefinitions() } -// ParseGeneralAPIInfo parses general api info for gived mainAPIFile path +func getPkgName(searchDir string) (string, error) { + cmd := exec.Command("go", "list", "-f={{.ImportPath}}") + cmd.Dir = searchDir + var stdout, stderr strings.Builder + cmd.Stdout = &stdout + cmd.Stderr = &stderr + + if err := cmd.Run(); err != nil { + return "", fmt.Errorf("execute go list command, %s, stdout:%s, stderr:%s", err, stdout.String(), stderr.String()) + } + + outStr, _ := stdout.String(), stderr.String() + + if outStr[0] == '_' { // will shown like _/{GOPATH}/src/{YOUR_PACKAGE} when NOT enable GO MODULE. + outStr = strings.TrimPrefix(outStr, "_"+build.Default.GOPATH+"/src/") + } + f := strings.Split(outStr, "\n") + outStr = f[0] + + return outStr, nil +} + +// ParseGeneralAPIInfo parses general api info for given mainAPIFile path func (parser *Parser) ParseGeneralAPIInfo(mainAPIFile string) error { fileSet := token.NewFileSet() fileTree, err := goparser.ParseFile(fileSet, mainAPIFile, nil, goparser.ParseComments) if err != nil { - return errors.Wrap(err, "cannot parse soure files") + return fmt.Errorf("cannot parse source files %s: %s", mainAPIFile, err) } parser.swagger.Swagger = "2.0" securityMap := map[string]*spec.SecurityScheme{} - // templated defaults - parser.swagger.Info.Version = "{{.Version}}" - parser.swagger.Info.Title = "{{.Title}}" - parser.swagger.Info.Description = "{{.Description}}" - parser.swagger.Host = "{{.Host}}" - parser.swagger.BasePath = "{{.BasePath}}" - - if fileTree.Comments != nil { - for _, comment := range fileTree.Comments { - comments := strings.Split(comment.Text(), "\n") - previousAttribute := "" - for _, commentLine := range comments { - attribute := strings.ToLower(strings.Split(commentLine, " ")[0]) - multilineBlock := false - if previousAttribute == attribute { - multilineBlock = true - } - switch attribute { - case "@version": - parser.swagger.Info.Version = strings.TrimSpace(commentLine[len(attribute):]) - case "@title": - parser.swagger.Info.Title = strings.TrimSpace(commentLine[len(attribute):]) - case "@description": - if parser.swagger.Info.Description == "{{.Description}}" { - parser.swagger.Info.Description = strings.TrimSpace(commentLine[len(attribute):]) - } else if multilineBlock { - parser.swagger.Info.Description += "\n" + strings.TrimSpace(commentLine[len(attribute):]) - } - case "@termsofservice": - parser.swagger.Info.TermsOfService = strings.TrimSpace(commentLine[len(attribute):]) - case "@contact.name": - parser.swagger.Info.Contact.Name = strings.TrimSpace(commentLine[len(attribute):]) - case "@contact.email": - parser.swagger.Info.Contact.Email = strings.TrimSpace(commentLine[len(attribute):]) - case "@contact.url": - parser.swagger.Info.Contact.URL = strings.TrimSpace(commentLine[len(attribute):]) - case "@license.name": - parser.swagger.Info.License.Name = strings.TrimSpace(commentLine[len(attribute):]) - case "@license.url": - parser.swagger.Info.License.URL = strings.TrimSpace(commentLine[len(attribute):]) - case "@host": - parser.swagger.Host = strings.TrimSpace(commentLine[len(attribute):]) - case "@basepath": - parser.swagger.BasePath = strings.TrimSpace(commentLine[len(attribute):]) - case "@schemes": - parser.swagger.Schemes = getSchemes(commentLine) - case "@tag.name": - commentInfo := strings.TrimSpace(commentLine[len(attribute):]) - parser.swagger.Tags = append(parser.swagger.Tags, spec.Tag{ - TagProps: spec.TagProps{ - Name: strings.TrimSpace(commentInfo), - }, - }) - case "@tag.description": - commentInfo := strings.TrimSpace(commentLine[len(attribute):]) - tag := parser.swagger.Tags[len(parser.swagger.Tags)-1] - tag.TagProps.Description = commentInfo - replaceLastTag(parser.swagger.Tags, tag) - case "@tag.docs.url": - commentInfo := strings.TrimSpace(commentLine[len(attribute):]) - tag := parser.swagger.Tags[len(parser.swagger.Tags)-1] - tag.TagProps.ExternalDocs = &spec.ExternalDocumentation{ - URL: commentInfo, - } - replaceLastTag(parser.swagger.Tags, tag) - - case "@tag.docs.description": - commentInfo := strings.TrimSpace(commentLine[len(attribute):]) - tag := parser.swagger.Tags[len(parser.swagger.Tags)-1] - if tag.TagProps.ExternalDocs == nil { - return errors.New("@tag.docs.description needs to come after a @tags.docs.url") - } - tag.TagProps.ExternalDocs.Description = commentInfo - replaceLastTag(parser.swagger.Tags, tag) - } - previousAttribute = attribute + for _, comment := range fileTree.Comments { + if !isGeneralAPIComment(comment) { + continue + } + comments := strings.Split(comment.Text(), "\n") + previousAttribute := "" + // parsing classic meta data model + for i, commentLine := range comments { + attribute := strings.ToLower(strings.Split(commentLine, " ")[0]) + value := strings.TrimSpace(commentLine[len(attribute):]) + multilineBlock := false + if previousAttribute == attribute { + multilineBlock = true } + switch attribute { + case "@version": + parser.swagger.Info.Version = value + case "@title": + parser.swagger.Info.Title = value + case "@description": + if multilineBlock { + parser.swagger.Info.Description += "\n" + value + continue + } + parser.swagger.Info.Description = value + case "@description.markdown": + commentInfo, err := getMarkdownForTag("api", parser.markdownFileDir) + if err != nil { + return err + } + parser.swagger.Info.Description = string(commentInfo) + case "@termsofservice": + parser.swagger.Info.TermsOfService = value + case "@contact.name": + parser.swagger.Info.Contact.Name = value + case "@contact.email": + parser.swagger.Info.Contact.Email = value + case "@contact.url": + parser.swagger.Info.Contact.URL = value + case "@license.name": + parser.swagger.Info.License.Name = value + case "@license.url": + parser.swagger.Info.License.URL = value + case "@host": + parser.swagger.Host = value + case "@basepath": + parser.swagger.BasePath = value + case "@schemes": + parser.swagger.Schemes = getSchemes(commentLine) + case "@tag.name": + parser.swagger.Tags = append(parser.swagger.Tags, spec.Tag{ + TagProps: spec.TagProps{ + Name: value, + }, + }) + case "@tag.description": + tag := parser.swagger.Tags[len(parser.swagger.Tags)-1] + tag.TagProps.Description = value + replaceLastTag(parser.swagger.Tags, tag) + case "@tag.description.markdown": + tag := parser.swagger.Tags[len(parser.swagger.Tags)-1] + commentInfo, err := getMarkdownForTag(tag.TagProps.Name, parser.markdownFileDir) + if err != nil { + return err + } + tag.TagProps.Description = string(commentInfo) + replaceLastTag(parser.swagger.Tags, tag) + case "@tag.docs.url": + tag := parser.swagger.Tags[len(parser.swagger.Tags)-1] + tag.TagProps.ExternalDocs = &spec.ExternalDocumentation{ + URL: value, + } + replaceLastTag(parser.swagger.Tags, tag) + case "@tag.docs.description": + tag := parser.swagger.Tags[len(parser.swagger.Tags)-1] + if tag.TagProps.ExternalDocs == nil { + return fmt.Errorf("%s needs to come after a @tags.docs.url", attribute) + } + tag.TagProps.ExternalDocs.Description = value + replaceLastTag(parser.swagger.Tags, tag) + case "@securitydefinitions.basic": + securityMap[value] = spec.BasicAuth() + case "@securitydefinitions.apikey": + attrMap, _, err := extractSecurityAttribute(attribute, []string{"@in", "@name"}, comments[i+1:]) + if err != nil { + return err + } + securityMap[value] = spec.APIKeyAuth(attrMap["@name"], attrMap["@in"]) + case "@securitydefinitions.oauth2.application": + attrMap, scopes, err := extractSecurityAttribute(attribute, []string{"@tokenurl"}, comments[i+1:]) + if err != nil { + return err + } + securityMap[value] = securitySchemeOAuth2Application(attrMap["@tokenurl"], scopes) + case "@securitydefinitions.oauth2.implicit": + attrMap, scopes, err := extractSecurityAttribute(attribute, []string{"@authorizationurl"}, comments[i+1:]) + if err != nil { + return err + } + securityMap[value] = securitySchemeOAuth2Implicit(attrMap["@authorizationurl"], scopes) + case "@securitydefinitions.oauth2.password": + attrMap, scopes, err := extractSecurityAttribute(attribute, []string{"@tokenurl"}, comments[i+1:]) + if err != nil { + return err + } + securityMap[value] = securitySchemeOAuth2Password(attrMap["@tokenurl"], scopes) + case "@securitydefinitions.oauth2.accesscode": + attrMap, scopes, err := extractSecurityAttribute(attribute, []string{"@tokenurl", "@authorizationurl"}, comments[i+1:]) + if err != nil { + return err + } + securityMap[value] = securitySchemeOAuth2AccessToken(attrMap["@authorizationurl"], attrMap["@tokenurl"], scopes) - for i := 0; i < len(comments); i++ { - attribute := strings.ToLower(strings.Split(comments[i], " ")[0]) - switch attribute { - case "@securitydefinitions.basic": - securityMap[strings.TrimSpace(comments[i][len(attribute):])] = spec.BasicAuth() - case "@securitydefinitions.apikey": - attrMap := map[string]string{} - for _, v := range comments[i+1:] { - securityAttr := strings.ToLower(strings.Split(v, " ")[0]) - if securityAttr == "@in" || securityAttr == "@name" { - attrMap[securityAttr] = strings.TrimSpace(v[len(securityAttr):]) + default: + prefixExtension := "@x-" + if len(attribute) > 5 { // Prefix extension + 1 char + 1 space + 1 char + if attribute[:len(prefixExtension)] == prefixExtension { + var valueJSON interface{} + split := strings.SplitAfter(commentLine, attribute+" ") + if len(split) < 2 { + return fmt.Errorf("annotation %s need a value", attribute) } - // next securityDefinitions - if strings.Index(securityAttr, "@securitydefinitions.") == 0 { - break + extensionName := "x-" + strings.SplitAfter(attribute, prefixExtension)[1] + if err := json.Unmarshal([]byte(split[1]), &valueJSON); err != nil { + return fmt.Errorf("annotation %s need a valid json value", attribute) } + parser.swagger.AddExtension(extensionName, valueJSON) } - if len(attrMap) != 2 { - return errors.New("@securitydefinitions.apikey is @name and @in required") - } - securityMap[strings.TrimSpace(comments[i][len(attribute):])] = spec.APIKeyAuth(attrMap["@name"], attrMap["@in"]) - case "@securitydefinitions.oauth2.application": - attrMap := map[string]string{} - scopes := map[string]string{} - for _, v := range comments[i+1:] { - securityAttr := strings.ToLower(strings.Split(v, " ")[0]) - if securityAttr == "@tokenurl" { - attrMap[securityAttr] = strings.TrimSpace(v[len(securityAttr):]) - } else { - isExists, err := isExistsScope(securityAttr) - if err != nil { - return err - } - if isExists { - scopScheme, err := getScopeScheme(securityAttr) - if err != nil { - return err - } - scopes[scopScheme] = v[len(securityAttr):] - } - } - // next securityDefinitions - if strings.Index(securityAttr, "@securitydefinitions.") == 0 { - break - } - } - if len(attrMap) != 1 { - return errors.New("@securitydefinitions.oauth2.application is @tokenUrl required") - } - securityScheme := spec.OAuth2Application(attrMap["@tokenurl"]) - for scope, description := range scopes { - securityScheme.AddScope(scope, description) - } - securityMap[strings.TrimSpace(comments[i][len(attribute):])] = securityScheme - case "@securitydefinitions.oauth2.implicit": - attrMap := map[string]string{} - scopes := map[string]string{} - for _, v := range comments[i+1:] { - securityAttr := strings.ToLower(strings.Split(v, " ")[0]) - if securityAttr == "@authorizationurl" { - attrMap[securityAttr] = strings.TrimSpace(v[len(securityAttr):]) - } else { - isExists, err := isExistsScope(securityAttr) - if err != nil { - return err - } - if isExists { - scopScheme, err := getScopeScheme(securityAttr) - if err != nil { - return err - } - scopes[scopScheme] = v[len(securityAttr):] - } - } - // next securityDefinitions - if strings.Index(securityAttr, "@securitydefinitions.") == 0 { - break - } - } - if len(attrMap) != 1 { - return errors.New("@securitydefinitions.oauth2.implicit is @authorizationUrl required") - } - securityScheme := spec.OAuth2Implicit(attrMap["@authorizationurl"]) - for scope, description := range scopes { - securityScheme.AddScope(scope, description) - } - securityMap[strings.TrimSpace(comments[i][len(attribute):])] = securityScheme - case "@securitydefinitions.oauth2.password": - attrMap := map[string]string{} - scopes := map[string]string{} - for _, v := range comments[i+1:] { - securityAttr := strings.ToLower(strings.Split(v, " ")[0]) - if securityAttr == "@tokenurl" { - attrMap[securityAttr] = strings.TrimSpace(v[len(securityAttr):]) - } else { - isExists, err := isExistsScope(securityAttr) - if err != nil { - return err - } - if isExists { - scopScheme, err := getScopeScheme(securityAttr) - if err != nil { - return err - } - scopes[scopScheme] = v[len(securityAttr):] - } - } - // next securityDefinitions - if strings.Index(securityAttr, "@securitydefinitions.") == 0 { - break - } - } - if len(attrMap) != 1 { - return errors.New("@securitydefinitions.oauth2.password is @tokenUrl required") - } - securityScheme := spec.OAuth2Password(attrMap["@tokenurl"]) - for scope, description := range scopes { - securityScheme.AddScope(scope, description) - } - securityMap[strings.TrimSpace(comments[i][len(attribute):])] = securityScheme - case "@securitydefinitions.oauth2.accesscode": - attrMap := map[string]string{} - scopes := map[string]string{} - for _, v := range comments[i+1:] { - securityAttr := strings.ToLower(strings.Split(v, " ")[0]) - if securityAttr == "@tokenurl" || securityAttr == "@authorizationurl" { - attrMap[securityAttr] = strings.TrimSpace(v[len(securityAttr):]) - } else { - isExists, err := isExistsScope(securityAttr) - if err != nil { - return err - } - if isExists { - scopScheme, err := getScopeScheme(securityAttr) - if err != nil { - return err - } - scopes[scopScheme] = v[len(securityAttr):] - } - } - // next securityDefinitions - if strings.Index(securityAttr, "@securitydefinitions.") == 0 { - break - } - } - if len(attrMap) != 2 { - return errors.New("@securitydefinitions.oauth2.accessCode is @tokenUrl and @authorizationUrl required") - } - securityScheme := spec.OAuth2AccessToken(attrMap["@authorizationurl"], attrMap["@tokenurl"]) - for scope, description := range scopes { - securityScheme.AddScope(scope, description) - } - securityMap[strings.TrimSpace(comments[i][len(attribute):])] = securityScheme } } + previousAttribute = attribute } } + if len(securityMap) > 0 { parser.swagger.SecurityDefinitions = securityMap } @@ -357,10 +324,115 @@ func (parser *Parser) ParseGeneralAPIInfo(mainAPIFile string) error { return nil } +func isGeneralAPIComment(comment *ast.CommentGroup) bool { + for _, commentLine := range strings.Split(comment.Text(), "\n") { + attribute := strings.ToLower(strings.Split(commentLine, " ")[0]) + switch attribute { + // The @summary, @router, @success,@failure annotation belongs to Operation + case "@summary", "@router", "@success", "@failure": + return false + } + } + return true +} + +func extractSecurityAttribute(context string, search []string, lines []string) (map[string]string, map[string]string, error) { + attrMap := map[string]string{} + scopes := map[string]string{} + for _, v := range lines { + securityAttr := strings.ToLower(strings.Split(v, " ")[0]) + for _, findterm := range search { + if securityAttr == findterm { + attrMap[securityAttr] = strings.TrimSpace(v[len(securityAttr):]) + continue + } + } + isExists, err := isExistsScope(securityAttr) + if err != nil { + return nil, nil, err + } + if isExists { + scopScheme, err := getScopeScheme(securityAttr) + if err != nil { + return nil, nil, err + } + scopes[scopScheme] = v[len(securityAttr):] + } + // next securityDefinitions + if strings.Index(securityAttr, "@securitydefinitions.") == 0 { + break + } + } + if len(attrMap) != len(search) { + return nil, nil, fmt.Errorf("%s is %v required", context, search) + } + return attrMap, scopes, nil +} + +func securitySchemeOAuth2Application(tokenurl string, scopes map[string]string) *spec.SecurityScheme { + securityScheme := spec.OAuth2Application(tokenurl) + for scope, description := range scopes { + securityScheme.AddScope(scope, description) + } + return securityScheme +} + +func securitySchemeOAuth2Implicit(authorizationurl string, scopes map[string]string) *spec.SecurityScheme { + securityScheme := spec.OAuth2Implicit(authorizationurl) + for scope, description := range scopes { + securityScheme.AddScope(scope, description) + } + return securityScheme +} + +func securitySchemeOAuth2Password(tokenurl string, scopes map[string]string) *spec.SecurityScheme { + securityScheme := spec.OAuth2Password(tokenurl) + for scope, description := range scopes { + securityScheme.AddScope(scope, description) + } + return securityScheme +} + +func securitySchemeOAuth2AccessToken(authorizationurl, tokenurl string, scopes map[string]string) *spec.SecurityScheme { + securityScheme := spec.OAuth2AccessToken(authorizationurl, tokenurl) + for scope, description := range scopes { + securityScheme.AddScope(scope, description) + } + return securityScheme +} + +func getMarkdownForTag(tagName string, dirPath string) ([]byte, error) { + filesInfos, err := ioutil.ReadDir(dirPath) + if err != nil { + return nil, err + } + + for _, fileInfo := range filesInfos { + if fileInfo.IsDir() { + continue + } + fileName := fileInfo.Name() + + if !strings.Contains(fileName, ".md") { + continue + } + + if strings.Contains(fileName, tagName) { + fullPath := filepath.Join(dirPath, fileName) + commentInfo, err := ioutil.ReadFile(fullPath) + if err != nil { + return nil, fmt.Errorf("Failed to read markdown file %s error: %s ", fullPath, err) + } + return commentInfo, nil + } + } + return nil, fmt.Errorf("Unable to find markdown file for tag %s in the given directory", tagName) +} + func getScopeScheme(scope string) (string, error) { scopeValue := scope[strings.Index(scope, "@scope."):] if scopeValue == "" { - return "", errors.New("@scope is empty") + return "", fmt.Errorf("@scope is empty") } return scope[len("@scope."):], nil } @@ -368,13 +440,13 @@ func getScopeScheme(scope string) (string, error) { func isExistsScope(scope string) (bool, error) { s := strings.Fields(scope) for _, v := range s { - if strings.Index(v, "@scope.") != -1 { - if strings.Index(v, ",") != -1 { + if strings.Contains(v, "@scope.") { + if strings.Contains(v, ",") { return false, fmt.Errorf("@scope can't use comma(,) get=" + v) } } } - return strings.Index(scope, "@scope.") != -1, nil + return strings.Contains(scope, "@scope."), nil } // getSchemes parses swagger schemes for given commentLine @@ -460,8 +532,8 @@ func (parser *Parser) isInStructStack(refTypeName string) bool { return false } -// ParseDefinitions parses Swagger Api definitions. -func (parser *Parser) ParseDefinitions() { +// parseDefinitions parses Swagger Api definitions. +func (parser *Parser) parseDefinitions() error { // sort the typeNames so that parsing definitions is deterministic typeNames := make([]string, 0, len(parser.registerTypes)) for refTypeName := range parser.registerTypes { @@ -474,8 +546,11 @@ func (parser *Parser) ParseDefinitions() { ss := strings.Split(refTypeName, ".") pkgName := ss[0] parser.structStack = nil - parser.ParseDefinition(pkgName, typeSpec.Name.Name, typeSpec) + if err := parser.ParseDefinition(pkgName, typeSpec.Name.Name, typeSpec); err != nil { + return err + } } + return nil } // ParseDefinition parses given type spec that corresponds to the type under @@ -483,6 +558,12 @@ func (parser *Parser) ParseDefinitions() { // with a schema for the given type func (parser *Parser) ParseDefinition(pkgName, typeName string, typeSpec *ast.TypeSpec) error { refTypeName := fullTypeName(pkgName, typeName) + + if typeSpec == nil { + Println("Skipping '" + refTypeName + "', pkg '" + pkgName + "' not found, try add flag --parseDependency or --parseVendor.") + return nil + } + if _, isParsed := parser.swagger.Definitions[refTypeName]; isParsed { Println("Skipping '" + refTypeName + "', already parsed.") return nil @@ -559,49 +640,7 @@ func (parser *Parser) parseTypeExpr(pkgName, typeName string, typeExpr ast.Expr) return schema, nil } - extraRequired := make([]string, 0) - properties := make(map[string]spec.Schema) - for _, field := range expr.Fields.List { - var fieldProps map[string]spec.Schema - var requiredFromAnon []string - if field.Names == nil { - var err error - fieldProps, requiredFromAnon, err = parser.parseAnonymousField(pkgName, field) - if err != nil { - return spec.Schema{}, err - } - extraRequired = append(extraRequired, requiredFromAnon...) - } else { - var err error - fieldProps, err = parser.parseStruct(pkgName, field) - if err != nil { - return spec.Schema{}, err - } - } - - for k, v := range fieldProps { - properties[k] = v - } - } - - // collect requireds from our properties and anonymous fields - required := parser.collectRequiredFields(pkgName, properties, extraRequired) - - // unset required from properties because we've collected them - for k, prop := range properties { - tname := prop.SchemaProps.Type[0] - if tname != "object" { - prop.SchemaProps.Required = make([]string, 0) - } - properties[k] = prop - } - - return spec.Schema{ - SchemaProps: spec.SchemaProps{ - Type: []string{"object"}, - Properties: properties, - Required: required, - }}, nil + return parser.parseStruct(pkgName, expr.Fields) // type Foo Baz case *ast.Ident: @@ -671,12 +710,48 @@ func (parser *Parser) parseTypeExpr(pkgName, typeName string, typeExpr ast.Expr) }, nil } +func (parser *Parser) parseStruct(pkgName string, fields *ast.FieldList) (spec.Schema, error) { + + extraRequired := make([]string, 0) + properties := make(map[string]spec.Schema) + for _, field := range fields.List { + fieldProps, requiredFromAnon, err := parser.parseStructField(pkgName, field) + if err != nil { + return spec.Schema{}, err + } + extraRequired = append(extraRequired, requiredFromAnon...) + for k, v := range fieldProps { + properties[k] = v + } + } + + // collect requireds from our properties and anonymous fields + required := parser.collectRequiredFields(pkgName, properties, extraRequired) + + // unset required from properties because we've collected them + for k, prop := range properties { + tname := prop.SchemaProps.Type[0] + if tname != "object" { + prop.SchemaProps.Required = make([]string, 0) + } + properties[k] = prop + } + + return spec.Schema{ + SchemaProps: spec.SchemaProps{ + Type: []string{"object"}, + Properties: properties, + Required: required, + }}, nil +} + type structField struct { name string schemaType string arrayType string formatType string isRequired bool + readOnly bool crossPkg string exampleValue interface{} maximum *float64 @@ -688,14 +763,55 @@ type structField struct { extensions map[string]interface{} } -func (parser *Parser) parseStruct(pkgName string, field *ast.Field) (map[string]spec.Schema, error) { +func (parser *Parser) parseStructField(pkgName string, field *ast.Field) (map[string]spec.Schema, []string, error) { properties := map[string]spec.Schema{} + + if field.Names == nil { + fullTypeName, err := getFieldType(field.Type) + if err != nil { + return properties, []string{}, nil + } + + typeName := fullTypeName + + if splits := strings.Split(fullTypeName, "."); len(splits) > 1 { + pkgName = splits[0] + typeName = splits[1] + } + + typeSpec := parser.TypeDefinitions[pkgName][typeName] + if typeSpec != nil { + schema, err := parser.parseTypeExpr(pkgName, typeName, typeSpec.Type) + if err != nil { + return properties, []string{}, err + } + schemaType := "unknown" + if len(schema.SchemaProps.Type) > 0 { + schemaType = schema.SchemaProps.Type[0] + } + + switch schemaType { + case "object": + for k, v := range schema.SchemaProps.Properties { + properties[k] = v + } + case "array": + properties[typeName] = schema + default: + Printf("Can't extract properties from a schema of type '%s'", schemaType) + } + return properties, schema.SchemaProps.Required, nil + } + + return properties, nil, nil + } + structField, err := parser.parseField(field) if err != nil { - return properties, nil + return properties, nil, nil } if structField.name == "" { - return properties, nil + return properties, nil, nil } var desc string if field.Doc != nil { @@ -705,7 +821,6 @@ func (parser *Parser) parseStruct(pkgName string, field *ast.Field) (map[string] desc = strings.TrimSpace(field.Comment.Text()) } // TODO: find package of schemaType and/or arrayType - if structField.crossPkg != "" { pkgName = structField.crossPkg } @@ -721,6 +836,9 @@ func (parser *Parser) parseStruct(pkgName string, field *ast.Field) (map[string] Ref: jsonreference.MustCreateRef("#/definitions/" + pkgName + "." + structField.schemaType), }, }, + SwaggerSchemaProps: spec.SwaggerSchemaProps{ + ReadOnly: structField.readOnly, + }, } } else if structField.schemaType == "array" { // array field type // if defined -- ref it @@ -741,8 +859,46 @@ func (parser *Parser) parseStruct(pkgName string, field *ast.Field) (map[string] }, }, }, + SwaggerSchemaProps: spec.SwaggerSchemaProps{ + ReadOnly: structField.readOnly, + }, } - } else { // standard type in array + } else if structField.arrayType == "object" { + // Anonymous struct + if astTypeArray, ok := field.Type.(*ast.ArrayType); ok { // if array + props := make(map[string]spec.Schema) + if expr, ok := astTypeArray.Elt.(*ast.StructType); ok { + for _, field := range expr.Fields.List { + var fieldProps map[string]spec.Schema + fieldProps, _, err = parser.parseStructField(pkgName, field) + if err != nil { + return properties, nil, err + } + for k, v := range fieldProps { + props[k] = v + } + } + properties[structField.name] = spec.Schema{ + SchemaProps: spec.SchemaProps{ + Type: []string{structField.schemaType}, + Description: desc, + Items: &spec.SchemaOrArray{ + Schema: &spec.Schema{ + SchemaProps: spec.SchemaProps{ + Type: []string{"object"}, + Properties: props, + }, + }, + }, + }, + SwaggerSchemaProps: spec.SwaggerSchemaProps{ + ReadOnly: structField.readOnly, + }, + } + } + } + } else { + // standard type in array required := make([]string, 0) if structField.isRequired { required = append(required, structField.name) @@ -769,7 +925,8 @@ func (parser *Parser) parseStruct(pkgName string, field *ast.Field) (map[string] }, }, SwaggerSchemaProps: spec.SwaggerSchemaProps{ - Example: structField.exampleValue, + Example: structField.exampleValue, + ReadOnly: structField.readOnly, }, } } @@ -792,21 +949,34 @@ func (parser *Parser) parseStruct(pkgName string, field *ast.Field) (map[string] Default: structField.defaultValue, }, SwaggerSchemaProps: spec.SwaggerSchemaProps{ - Example: structField.exampleValue, + Example: structField.exampleValue, + ReadOnly: structField.readOnly, }, VendorExtensible: spec.VendorExtensible{ Extensions: structField.extensions, }, } + if nestStruct, ok := field.Type.(*ast.StarExpr); ok { + schema, err := parser.parseTypeExpr(pkgName, structField.schemaType, nestStruct.X) + if err != nil { + return nil, nil, err + } + + if len(schema.SchemaProps.Type) > 0 { + properties[structField.name] = schema + return properties, nil, nil + } + } + nestStruct, ok := field.Type.(*ast.StructType) if ok { props := map[string]spec.Schema{} nestRequired := make([]string, 0) for _, v := range nestStruct.Fields.List { - p, err := parser.parseStruct(pkgName, v) + p, _, err := parser.parseStructField(pkgName, v) if err != nil { - return properties, err + return properties, nil, err } for k, v := range p { if v.SchemaProps.Type[0] != "object" { @@ -832,74 +1002,55 @@ func (parser *Parser) parseStruct(pkgName string, field *ast.Field) (map[string] Default: structField.defaultValue, }, SwaggerSchemaProps: spec.SwaggerSchemaProps{ - Example: structField.exampleValue, + Example: structField.exampleValue, + ReadOnly: structField.readOnly, }, } } } - return properties, nil + return properties, nil, nil } -func (parser *Parser) parseAnonymousField(pkgName string, field *ast.Field) (map[string]spec.Schema, []string, error) { - properties := make(map[string]spec.Schema) +func getFieldType(field interface{}) (string, error) { - fullTypeName := "" - switch ftype := field.Type.(type) { + switch ftype := field.(type) { case *ast.Ident: - fullTypeName = ftype.Name + return ftype.Name, nil + + case *ast.SelectorExpr: + packageName, err := getFieldType(ftype.X) + if err != nil { + return "", err + } + return fmt.Sprintf("%s.%s", packageName, ftype.Sel.Name), nil + case *ast.StarExpr: - if ftypeX, ok := ftype.X.(*ast.Ident); ok { - fullTypeName = ftypeX.Name - } else if ftypeX, ok := ftype.X.(*ast.SelectorExpr); ok { - if packageX, ok := ftypeX.X.(*ast.Ident); ok { - fullTypeName = fmt.Sprintf("%s.%s", packageX.Name, ftypeX.Sel.Name) - } - } else { - Printf("Composite field type of '%T' is unhandle by parser. Skipping", ftype) - return properties, []string{}, nil + fullName, err := getFieldType(ftype.X) + if err != nil { + return "", err } - default: - Printf("Field type of '%T' is unsupported. Skipping", ftype) - return properties, []string{}, nil - } + return fullName, nil - typeName := fullTypeName - if splits := strings.Split(fullTypeName, "."); len(splits) > 1 { - pkgName = splits[0] - typeName = splits[1] } - - typeSpec := parser.TypeDefinitions[pkgName][typeName] - schema, err := parser.parseTypeExpr(pkgName, typeName, typeSpec.Type) - if err != nil { - return properties, []string{}, err - } - schemaType := "unknown" - if len(schema.SchemaProps.Type) > 0 { - schemaType = schema.SchemaProps.Type[0] - } - - switch schemaType { - case "object": - for k, v := range schema.SchemaProps.Properties { - properties[k] = v - } - case "array": - properties[typeName] = schema - default: - Printf("Can't extract properties from a schema of type '%s'", schemaType) - } - - return properties, schema.SchemaProps.Required, nil + return "", fmt.Errorf("unknown field type %#v", field) } func (parser *Parser) parseField(field *ast.Field) (*structField, error) { - prop := getPropertyName(field.Type, parser) - if len(prop.ArrayType) == 0 { - CheckSchemaType(prop.SchemaType) - } else { - CheckSchemaType("array") + prop, err := getPropertyName(field.Type, parser) + if err != nil { + return nil, err } + + if len(prop.ArrayType) == 0 { + if err := CheckSchemaType(prop.SchemaType); err != nil { + return nil, err + } + } else { + if err := CheckSchemaType("array"); err != nil { + return nil, err + } + } + structField := &structField{ name: field.Names[0].Name, schemaType: prop.SchemaType, @@ -953,8 +1104,12 @@ func (parser *Parser) parseField(field *ast.Field) (*structField, error) { } } - CheckSchemaType(newSchemaType) - CheckSchemaType(newArrayType) + if err := CheckSchemaType(newSchemaType); err != nil { + return nil, err + } + if err := CheckSchemaType(newArrayType); err != nil { + return nil, err + } structField.schemaType = newSchemaType structField.arrayType = newArrayType } @@ -1003,11 +1158,19 @@ func (parser *Parser) parseField(field *ast.Field) (*structField, error) { } for _, e := range strings.Split(enumsTag, ",") { - structField.enums = append(structField.enums, defineType(enumType, e)) + value, err := defineType(enumType, e) + if err != nil { + return nil, err + } + structField.enums = append(structField.enums, value) } } if defaultTag := structTag.Get("default"); defaultTag != "" { - structField.defaultValue = defineType(structField.schemaType, defaultTag) + value, err := defineType(structField.schemaType, defaultTag) + if err != nil { + return nil, err + } + structField.defaultValue = value } if IsNumericType(structField.schemaType) || IsNumericType(structField.arrayType) { @@ -1036,6 +1199,9 @@ func (parser *Parser) parseField(field *ast.Field) (*structField, error) { } structField.minLength = minLength } + if readOnly := structTag.Get("readonly"); readOnly != "" { + structField.readOnly = readOnly == "true" + } return structField, nil } @@ -1149,11 +1315,45 @@ func (parser *Parser) getAllGoFileInfo(searchDir string) error { return filepath.Walk(searchDir, parser.visit) } +func (parser *Parser) getAllGoFileInfoFromDeps(pkg *depth.Pkg) error { + if pkg.Internal || !pkg.Resolved { // ignored internal and not resolved dependencies + return nil + } + + srcDir := pkg.Raw.Dir + files, err := ioutil.ReadDir(srcDir) // only parsing files in the dir(don't contains sub dir files) + if err != nil { + return err + } + + for _, f := range files { + if f.IsDir() { + continue + } + + path := filepath.Join(srcDir, f.Name()) + if err := parser.parseFile(path); err != nil { + return err + } + } + + for i := 0; i < len(pkg.Deps); i++ { + if err := parser.getAllGoFileInfoFromDeps(&pkg.Deps[i]); err != nil { + return err + } + } + + return nil +} + func (parser *Parser) visit(path string, f os.FileInfo, err error) error { if err := parser.Skip(path, f); err != nil { return err } + return parser.parseFile(path) +} +func (parser *Parser) parseFile(path string) error { if ext := filepath.Ext(path); ext == ".go" { fset := token.NewFileSet() // positions are relative to fset astFile, err := goparser.ParseFile(fset, path, nil, goparser.ParseComments) @@ -1175,6 +1375,11 @@ func (parser *Parser) Skip(path string, f os.FileInfo) error { } } + // issue + if f.IsDir() && f.Name() == "docs" { + return filepath.SkipDir + } + // exclude all hidden folder if f.IsDir() && len(f.Name()) > 1 && f.Name()[0] == '.' { return filepath.SkipDir diff --git a/vendor/github.com/swaggo/swag/property.go b/vendor/github.com/swaggo/swag/property.go index 5d1b38558..5b846d63d 100644 --- a/vendor/github.com/swaggo/swag/property.go +++ b/vendor/github.com/swaggo/swag/property.go @@ -52,6 +52,11 @@ func parseFieldSelectorExpr(astTypeSelectorExpr *ast.SelectorExpr, parser *Parse if pkgName, ok := astTypeSelectorExpr.X.(*ast.Ident); ok { if typeDefinitions, ok := parser.TypeDefinitions[pkgName.Name][astTypeSelectorExpr.Sel.Name]; ok { + if expr, ok := typeDefinitions.Type.(*ast.SelectorExpr); ok { + if primitiveType, err := convertFromSpecificToPrimitive(expr.Sel.Name); err == nil { + return propertyNewFunc(primitiveType, "") + } + } parser.ParseDefinition(pkgName.Name, astTypeSelectorExpr.Sel.Name, typeDefinitions) return propertyNewFunc(astTypeSelectorExpr.Sel.Name, pkgName.Name) } @@ -59,28 +64,26 @@ func parseFieldSelectorExpr(astTypeSelectorExpr *ast.SelectorExpr, parser *Parse return propertyName{SchemaType: actualPrimitiveType, ArrayType: actualPrimitiveType} } } - - Printf("%s is not supported. but it will be set with string temporary. Please report any problems.\n", astTypeSelectorExpr.Sel.Name) return propertyName{SchemaType: "string", ArrayType: "string"} } -// getPropertyName returns the string value for the given field if it exists, otherwise it panics. +// getPropertyName returns the string value for the given field if it exists // allowedValues: array, boolean, integer, null, number, object, string -func getPropertyName(expr ast.Expr, parser *Parser) propertyName { +func getPropertyName(expr ast.Expr, parser *Parser) (propertyName, error) { if astTypeSelectorExpr, ok := expr.(*ast.SelectorExpr); ok { - return parseFieldSelectorExpr(astTypeSelectorExpr, parser, newProperty) + return parseFieldSelectorExpr(astTypeSelectorExpr, parser, newProperty), nil } // check if it is a custom type typeName := fmt.Sprintf("%v", expr) if actualPrimitiveType, isCustomType := parser.CustomPrimitiveTypes[typeName]; isCustomType { - return propertyName{SchemaType: actualPrimitiveType, ArrayType: actualPrimitiveType} + return propertyName{SchemaType: actualPrimitiveType, ArrayType: actualPrimitiveType}, nil } if astTypeIdent, ok := expr.(*ast.Ident); ok { name := astTypeIdent.Name schemeType := TransToValidSchemeType(name) - return propertyName{SchemaType: schemeType, ArrayType: schemeType} + return propertyName{SchemaType: schemeType, ArrayType: schemeType}, nil } if ptr, ok := expr.(*ast.StarExpr); ok { @@ -88,23 +91,24 @@ func getPropertyName(expr ast.Expr, parser *Parser) propertyName { } if astTypeArray, ok := expr.(*ast.ArrayType); ok { // if array - return getArrayPropertyName(astTypeArray, parser) + if _, ok := astTypeArray.Elt.(*ast.StructType); ok { + return propertyName{SchemaType: "array", ArrayType: "object"}, nil + } + return getArrayPropertyName(astTypeArray, parser), nil } if _, ok := expr.(*ast.MapType); ok { // if map - //TODO: support map - return propertyName{SchemaType: "object", ArrayType: "object"} + return propertyName{SchemaType: "object", ArrayType: "object"}, nil } if _, ok := expr.(*ast.StructType); ok { // if struct - return propertyName{SchemaType: "object", ArrayType: "object"} + return propertyName{SchemaType: "object", ArrayType: "object"}, nil } if _, ok := expr.(*ast.InterfaceType); ok { // if interface{} - return propertyName{SchemaType: "object", ArrayType: "object"} + return propertyName{SchemaType: "object", ArrayType: "object"}, nil } - - panic("not supported" + fmt.Sprint(expr)) + return propertyName{}, errors.New("not supported" + fmt.Sprint(expr)) } func getArrayPropertyName(astTypeArray *ast.ArrayType, parser *Parser) propertyName { diff --git a/vendor/github.com/swaggo/swag/schema.go b/vendor/github.com/swaggo/swag/schema.go index 6b799aa78..2ebef1678 100644 --- a/vendor/github.com/swaggo/swag/schema.go +++ b/vendor/github.com/swaggo/swag/schema.go @@ -2,11 +2,12 @@ package swag import "fmt" -// CheckSchemaType begins panicking if typeName is not a name of primitive type -func CheckSchemaType(typeName string) { +// CheckSchemaType checks if typeName is not a name of primitive type +func CheckSchemaType(typeName string) error { if !IsPrimitiveType(typeName) { - panic(fmt.Errorf("%s is not basic types", typeName)) + return fmt.Errorf("%s is not basic types", typeName) } + return nil } // IsPrimitiveType determine whether the type name is a primitive type diff --git a/vendor/github.com/swaggo/swag/version.go b/vendor/github.com/swaggo/swag/version.go index 7f24a554b..7e69b3750 100644 --- a/vendor/github.com/swaggo/swag/version.go +++ b/vendor/github.com/swaggo/swag/version.go @@ -1,4 +1,4 @@ package swag // Version of swag -const Version = "v1.5.0" +const Version = "v1.6.3" diff --git a/vendor/github.com/urfave/cli/.gitignore b/vendor/github.com/urfave/cli/.gitignore index faf70c4c2..7a7e2d9ef 100644 --- a/vendor/github.com/urfave/cli/.gitignore +++ b/vendor/github.com/urfave/cli/.gitignore @@ -1,2 +1,3 @@ *.coverprofile node_modules/ +vendor \ No newline at end of file diff --git a/vendor/github.com/urfave/cli/.travis.yml b/vendor/github.com/urfave/cli/.travis.yml index cf8d0980d..e500b380d 100644 --- a/vendor/github.com/urfave/cli/.travis.yml +++ b/vendor/github.com/urfave/cli/.travis.yml @@ -1,27 +1,35 @@ language: go sudo: false -dist: trusty -osx_image: xcode8.3 -go: 1.8.x +dist: bionic +osx_image: xcode10 +go: + - 1.11.x + - 1.12.x + - 1.13.x os: -- linux -- osx + - linux + - osx + +env: + GO111MODULE=on + GOPROXY=https://proxy.golang.org cache: directories: - - node_modules + - node_modules before_script: -- go get github.com/urfave/gfmrun/... || true -- go get golang.org/x/tools/cmd/goimports -- if [ ! -f node_modules/.bin/markdown-toc ] ; then - npm install markdown-toc ; - fi + - go get github.com/urfave/gfmrun/cmd/gfmrun + - go get golang.org/x/tools/cmd/goimports + - npm install markdown-toc + - go mod tidy script: -- ./runtests gen -- ./runtests vet -- ./runtests test -- ./runtests gfmrun -- ./runtests toc + - go run build.go vet + - go run build.go test + - go run build.go gfmrun + - go run build.go toc + +after_success: + - bash <(curl -s https://codecov.io/bash) diff --git a/vendor/github.com/urfave/cli/CHANGELOG.md b/vendor/github.com/urfave/cli/CHANGELOG.md index 401eae5a2..7c1a27ec7 100644 --- a/vendor/github.com/urfave/cli/CHANGELOG.md +++ b/vendor/github.com/urfave/cli/CHANGELOG.md @@ -4,7 +4,70 @@ ## [Unreleased] -## 1.20.0 - 2017-08-10 +## [1.22.1] - 2019-09-11 + +### Fixed + +* Hide output of hidden commands on man pages in [urfave/cli/pull/889](https://github.com/urfave/cli/pull/889) via [@crosbymichael](https://github.com/crosbymichael) +* Don't generate fish completion for hidden commands [urfave/cli/pull/891](https://github.com/urfave/891) via [@saschagrunert](https://github.com/saschagrunert) +* Using short flag names for required flags throws an error in [urfave/cli/pull/890](https://github.com/urfave/cli/pull/890) via [@asahasrabuddhe](https://github.com/asahasrabuddhe) + +### Changed + +* Remove flag code generation logic, legacy python test runner in [urfave/cli/pull/883](https://github.com/urfave/cli/pull/883) via [@asahasrabuddhe](https://github.com/asahasrabuddhe) +* Enable Go Modules support, drop support for `Go 1.10` add support for `Go 1.13` in [urfave/cli/pull/885](https://github.com/urfave/cli/pull/885) via [@asahasrabuddhe](https://github.com/asahasrabuddhe) + +## [1.22.0] - 2019-09-07 + +### Fixed + +* Fix Subcommands not falling back to `app.ExitEventHandler` in [urfave/cli/pull/856](https://github.com/urfave/cli/pull/856) via [@FaranIdo](https://github.com/FaranIdo) + +### Changed + +* Clarify that altsrc supports both TOML and JSON in [urfave/cli/pull/774](https://github.com/urfave/cli/pull/774) via [@whereswaldon](https://github.com/whereswaldon) +* Made the exit code example more clear in [urfave/cli/pull/823](https://github.com/urfave/cli/pull/823) via [@xordspar0](https://github.com/xordspar0) +* Removed the use of python for internal flag generation in [urfave/cli/pull/836](https://github.com/urfave/cli/pull/836) via [@asahasrabuddhe](https://github.com/asahasrabuddhe) +* Changed the supported go versions to `1.10`, `1.11`, `1.12` in [urfave/cli/pull/843](https://github.com/urfave/cli/pull/843) via [@lafriks](https://github.com/lafriks) +* Changed the v1 releases section in the readme in [urfave/cli/pull/862](https://github.com/urfave/cli/pull/862) via [@russoj88](https://github.com/russoj88) +* Cleaned up go modules in [urfave/cli/pull/874](https://github.com/urfave/cli/pull/874) via [@saschagrunert](https://github.com/saschagrunert) + +### Added + +* Added `UseShortOptionHandling` for combining short flags in [urfave/cli/pull/735](https://github.com/urfave/cli/pull/735) via [@rliebz](https://github.com/rliebz) +* Added support for flags bash completion in [urfave/cli/pull/808](https://github.com/urfave/cli/pull/808) via [@yogeshlonkar](https://github.com/yogeshlonkar) +* Added the `TakesFile` indicator to flag in [urfave/cli/pull/851](https://github.com/urfave/cli/pull/851) via [@saschagrunert](https://github.com/saschagrunert) +* Added fish shell completion support in [urfave/cli/pull/848](https://github.com/urfave/cli/pull/848) via [@saschagrunert](https://github.com/saschagrunert) + +## [1.21.0] - 2019-08-02 + +### Fixed + +* Fix using "slice" flag types with `EnvVar` in [urfave/cli/pull/687](https://github.com/urfave/cli/pull/687) via [@joshuarubin](https://github.com/joshuarubin) +* Fix regression of `SkipFlagParsing` behavior in [urfave/cli/pull/697](https://github.com/urfave/cli/pull/697) via [@jszwedko](https://github.com/jszwedko) +* Fix handling `ShortOptions` and `SkipArgReorder` in [urfave/cli/pull/686](https://github.com/urfave/cli/pull/686) via [@baude](https://github.com/baude) +* Fix args reordering when bool flags are present in [urfave/cli/pull/712](https://github.com/urfave/cli/pull/712) via [@windler](https://github.com/windler) +* Fix parsing of short options in [urfave/cli/pull/758](https://github.com/urfave/cli/pull/758) via [@vrothberg](https://github.com/vrothberg) +* Fix unaligned indents for the command help messages in [urfave/cli/pull/806](https://github.com/urfave/cli/pull/806) via [@mingrammer](https://github.com/mingrammer) + +### Changed + +* Cleaned up help output in [urfave/cli/pull/664](https://github.com/urfave/cli/pull/664) via [@maguro](https://github.com/maguro) +* Remove redundant nil checks in [urfave/cli/pull/773](https://github.com/urfave/cli/pull/773) via [@teresy](https://github.com/teresy) +* Case is now considered when sorting strings in [urfave/cli/pull/676](https://github.com/urfave/cli/pull/676) via [@rliebz](https://github.com/rliebz) + +### Added + +* Added _"required flags"_ support in [urfave/cli/pull/819](https://github.com/urfave/cli/pull/819) via [@lynncyrin](https://github.com/lynncyrin/) +* Backport JSON `InputSource` to v1 in [urfave/cli/pull/598](https://github.com/urfave/cli/pull/598) via [@jszwedko](https://github.com/jszwedko) +* Allow more customization of flag help strings in [urfave/cli/pull/661](https://github.com/urfave/cli/pull/661) via [@rliebz](https://github.com/rliebz) +* Allow custom `ExitError` handler function in [urfave/cli/pull/628](https://github.com/urfave/cli/pull/628) via [@phinnaeus](https://github.com/phinnaeus) +* Allow loading a variable from a file in [urfave/cli/pull/675](https://github.com/urfave/cli/pull/675) via [@jmccann](https://github.com/jmccann) +* Allow combining short bool names in [urfave/cli/pull/684](https://github.com/urfave/cli/pull/684) via [@baude](https://github.com/baude) +* Added test coverage to context in [urfave/cli/pull/788](https://github.com/urfave/cli/pull/788) via [@benzvan](https://github.com/benzvan) +* Added go module support in [urfave/cli/pull/831](https://github.com/urfave/cli/pull/831) via [@saschagrunert](https://github.com/saschagrunert) + +## [1.20.0] - 2017-08-10 ### Fixed @@ -407,7 +470,13 @@ signature of `func(*cli.Context) error`, as defined by `cli.ActionFunc`. ### Added - Initial implementation. -[Unreleased]: https://github.com/urfave/cli/compare/v1.18.0...HEAD +[Unreleased]: https://github.com/urfave/cli/compare/v1.22.1...HEAD +[1.22.1]: https://github.com/urfave/cli/compare/v1.22.0...v1.22.1 +[1.22.0]: https://github.com/urfave/cli/compare/v1.21.0...v1.22.0 +[1.21.0]: https://github.com/urfave/cli/compare/v1.20.0...v1.21.0 +[1.20.0]: https://github.com/urfave/cli/compare/v1.19.1...v1.20.0 +[1.19.1]: https://github.com/urfave/cli/compare/v1.19.0...v1.19.1 +[1.19.0]: https://github.com/urfave/cli/compare/v1.18.0...v1.19.0 [1.18.0]: https://github.com/urfave/cli/compare/v1.17.0...v1.18.0 [1.17.0]: https://github.com/urfave/cli/compare/v1.16.0...v1.17.0 [1.16.0]: https://github.com/urfave/cli/compare/v1.15.0...v1.16.0 diff --git a/vendor/github.com/urfave/cli/CODE_OF_CONDUCT.md b/vendor/github.com/urfave/cli/CODE_OF_CONDUCT.md new file mode 100644 index 000000000..41ba294f6 --- /dev/null +++ b/vendor/github.com/urfave/cli/CODE_OF_CONDUCT.md @@ -0,0 +1,74 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of experience, +education, socio-economic status, nationality, personal appearance, race, +religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting Dan Buch at dan@meatballhat.com. All complaints will be +reviewed and investigated and will result in a response that is deemed necessary +and appropriate to the circumstances. The project team is obligated to maintain +confidentiality with regard to the reporter of an incident. Further details of +specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html + +[homepage]: https://www.contributor-covenant.org + diff --git a/vendor/github.com/urfave/cli/CONTRIBUTING.md b/vendor/github.com/urfave/cli/CONTRIBUTING.md new file mode 100644 index 000000000..9a4640a38 --- /dev/null +++ b/vendor/github.com/urfave/cli/CONTRIBUTING.md @@ -0,0 +1,18 @@ +## Contributing + +Use @urfave/cli to ping the maintainers. + +Feel free to put up a pull request to fix a bug or maybe add a feature. We will +give it a code review and make sure that it does not break backwards +compatibility. If collaborators agree that it is in line with +the vision of the project, we will work with you to get the code into +a mergeable state and merge it into the master branch. + +If you have contributed something significant to the project, we will most +likely add you as a collaborator. As a collaborator you are given the ability +to merge others pull requests. It is very important that new code does not +break existing code, so be careful about what code you do choose to merge. + +If you feel like you have contributed to the project but have not yet been added +as a collaborator, we probably forgot to add you :sweat_smile:. Please open an +issue! diff --git a/vendor/github.com/urfave/cli/README.md b/vendor/github.com/urfave/cli/README.md index 2bbbd8ea9..96720b621 100644 --- a/vendor/github.com/urfave/cli/README.md +++ b/vendor/github.com/urfave/cli/README.md @@ -3,15 +3,11 @@ cli [![Build Status](https://travis-ci.org/urfave/cli.svg?branch=master)](https://travis-ci.org/urfave/cli) [![Windows Build Status](https://ci.appveyor.com/api/projects/status/rtgk5xufi932pb2v?svg=true)](https://ci.appveyor.com/project/urfave/cli) + [![GoDoc](https://godoc.org/github.com/urfave/cli?status.svg)](https://godoc.org/github.com/urfave/cli) [![codebeat](https://codebeat.co/badges/0a8f30aa-f975-404b-b878-5fab3ae1cc5f)](https://codebeat.co/projects/github-com-urfave-cli) [![Go Report Card](https://goreportcard.com/badge/urfave/cli)](https://goreportcard.com/report/urfave/cli) -[![top level coverage](https://gocover.io/_badge/github.com/urfave/cli?0 "top level coverage")](http://gocover.io/github.com/urfave/cli) / -[![altsrc coverage](https://gocover.io/_badge/github.com/urfave/cli/altsrc?0 "altsrc coverage")](http://gocover.io/github.com/urfave/cli/altsrc) - -**Notice:** This is the library formerly known as -`github.com/codegangsta/cli` -- Github will automatically redirect requests -to this repository, but we recommend updating your references for clarity. +[![codecov](https://codecov.io/gh/urfave/cli/branch/master/graph/badge.svg)](https://codecov.io/gh/urfave/cli) cli is a simple, fast, and fun package for building command line apps in Go. The goal is to enable developers to write fast and distributable command line @@ -23,7 +19,7 @@ applications in an expressive way. - [Installation](#installation) * [Supported platforms](#supported-platforms) * [Using the `v2` branch](#using-the-v2-branch) - * [Pinning to the `v1` releases](#pinning-to-the-v1-releases) + * [Using `v1` releases](#using-v1-releases) - [Getting Started](#getting-started) - [Examples](#examples) * [Arguments](#arguments) @@ -32,10 +28,13 @@ applications in an expressive way. + [Alternate Names](#alternate-names) + [Ordering](#ordering) + [Values from the Environment](#values-from-the-environment) + + [Values from files](#values-from-files) + [Values from alternate input sources (YAML, TOML, and others)](#values-from-alternate-input-sources-yaml-toml-and-others) + + [Precedence](#precedence) * [Subcommands](#subcommands) * [Subcommands categories](#subcommands-categories) * [Exit code](#exit-code) + * [Combining short options](#combining-short-options) * [Bash Completion](#bash-completion) + [Enabling](#enabling) + [Distribution](#distribution) @@ -61,7 +60,7 @@ organized, and expressive! ## Installation -Make sure you have a working Go environment. Go version 1.2+ is supported. [See +Make sure you have a working Go environment. Go version 1.10+ is supported. [See the install instructions for Go](http://golang.org/doc/install.html). To install cli, simply run: @@ -105,25 +104,20 @@ import ( ... ``` -### Pinning to the `v1` releases - -Similarly to the section above describing use of the `v2` branch, if one wants -to avoid any unexpected compatibility pains once `v2` becomes `master`, then -pinning to `v1` is an acceptable option, e.g.: +### Using `v1` releases ``` -$ go get gopkg.in/urfave/cli.v1 +$ go get github.com/urfave/cli ``` -``` go +```go ... import ( - "gopkg.in/urfave/cli.v1" // imports as package "cli" + "github.com/urfave/cli" ) ... ``` -This will pull the latest tagged `v1` release (e.g. `v1.18.1` at the time of writing). ## Getting Started @@ -138,13 +132,17 @@ discovery. So a cli app can be as little as one line of code in `main()`. package main import ( + "log" "os" "github.com/urfave/cli" ) func main() { - cli.NewApp().Run(os.Args) + err := cli.NewApp().Run(os.Args) + if err != nil { + log.Fatal(err) + } } ``` @@ -159,6 +157,7 @@ package main import ( "fmt" + "log" "os" "github.com/urfave/cli" @@ -173,7 +172,10 @@ func main() { return nil } - app.Run(os.Args) + err := app.Run(os.Args) + if err != nil { + log.Fatal(err) + } } ``` @@ -197,6 +199,7 @@ package main import ( "fmt" + "log" "os" "github.com/urfave/cli" @@ -211,7 +214,10 @@ func main() { return nil } - app.Run(os.Args) + err := app.Run(os.Args) + if err != nil { + log.Fatal(err) + } } ``` @@ -260,6 +266,7 @@ package main import ( "fmt" + "log" "os" "github.com/urfave/cli" @@ -273,7 +280,10 @@ func main() { return nil } - app.Run(os.Args) + err := app.Run(os.Args) + if err != nil { + log.Fatal(err) + } } ``` @@ -289,6 +299,7 @@ package main import ( "fmt" + "log" "os" "github.com/urfave/cli" @@ -318,7 +329,10 @@ func main() { return nil } - app.Run(os.Args) + err := app.Run(os.Args) + if err != nil { + log.Fatal(err) + } } ``` @@ -332,6 +346,7 @@ scanned. package main import ( + "log" "os" "fmt" @@ -365,7 +380,10 @@ func main() { return nil } - app.Run(os.Args) + err := app.Run(os.Args) + if err != nil { + log.Fatal(err) + } } ``` @@ -386,6 +404,7 @@ For example this: package main import ( + "log" "os" "github.com/urfave/cli" @@ -401,7 +420,10 @@ func main() { }, } - app.Run(os.Args) + err := app.Run(os.Args) + if err != nil { + log.Fatal(err) + } } ``` @@ -427,6 +449,7 @@ list for the `Name`. e.g. package main import ( + "log" "os" "github.com/urfave/cli" @@ -443,7 +466,10 @@ func main() { }, } - app.Run(os.Args) + err := app.Run(os.Args) + if err != nil { + log.Fatal(err) + } } ``` @@ -467,6 +493,7 @@ For example this: package main import ( + "log" "os" "sort" @@ -510,7 +537,10 @@ func main() { sort.Sort(cli.FlagsByName(app.Flags)) sort.Sort(cli.CommandsByName(app.Commands)) - app.Run(os.Args) + err := app.Run(os.Args) + if err != nil { + log.Fatal(err) + } } ``` @@ -533,6 +563,7 @@ You can also have the default value set from the environment via `EnvVar`. e.g. package main import ( + "log" "os" "github.com/urfave/cli" @@ -550,7 +581,10 @@ func main() { }, } - app.Run(os.Args) + err := app.Run(os.Args) + if err != nil { + log.Fatal(err) + } } ``` @@ -565,6 +599,7 @@ environment variable that resolves is used as the default. package main import ( + "log" "os" "github.com/urfave/cli" @@ -582,10 +617,52 @@ func main() { }, } - app.Run(os.Args) + err := app.Run(os.Args) + if err != nil { + log.Fatal(err) + } } ``` +#### Values from files + +You can also have the default value set from file via `FilePath`. e.g. + + +``` go +package main + +import ( + "log" + "os" + + "github.com/urfave/cli" +) + +func main() { + app := cli.NewApp() + + app.Flags = []cli.Flag { + cli.StringFlag{ + Name: "password, p", + Usage: "password for the mysql database", + FilePath: "/etc/mysql/password", + }, + } + + err := app.Run(os.Args) + if err != nil { + log.Fatal(err) + } +} +``` + +Note that default values set from file (e.g. `FilePath`) take precedence over +default values set from the environment (e.g. `EnvVar`). + #### Values from alternate input sources (YAML, TOML, and others) There is a separate package altsrc that adds support for getting flag values @@ -593,6 +670,7 @@ from other file input sources. Currently supported input source formats: * YAML +* JSON * TOML In order to get values for a flag from an alternate input source the following @@ -615,9 +693,9 @@ the yaml input source for any flags that are defined on that command. As a note the "load" flag used would also have to be defined on the command flags in order for this code snipped to work. -Currently only the aboved specified formats are supported but developers can -add support for other input sources by implementing the -altsrc.InputSourceContext for their given sources. +Currently only YAML, JSON, and TOML files are supported but developers can add support +for other input sources by implementing the altsrc.InputSourceContext for their +given sources. Here is a more complete sample of a command using YAML support: @@ -630,6 +708,7 @@ package notmain import ( "fmt" + "log" "os" "github.com/urfave/cli" @@ -652,10 +731,22 @@ func main() { app.Before = altsrc.InitInputSourceWithContext(flags, altsrc.NewYamlSourceFromFlagFunc("load")) app.Flags = flags - app.Run(os.Args) + err := app.Run(os.Args) + if err != nil { + log.Fatal(err) + } } ``` +#### Precedence + +The precedence for flag value sources is as follows (highest to lowest): + +0. Command line flag value from user +0. Environment variable (if specified) +0. Configuration file (if specified) +0. Default defined on the flag + ### Subcommands Subcommands can be defined for a more git-like command line app. @@ -669,6 +760,7 @@ package main import ( "fmt" + "log" "os" "github.com/urfave/cli" @@ -721,7 +813,10 @@ func main() { }, } - app.Run(os.Args) + err := app.Run(os.Args) + if err != nil { + log.Fatal(err) + } } ``` @@ -737,6 +832,7 @@ E.g. package main import ( + "log" "os" "github.com/urfave/cli" @@ -751,15 +847,18 @@ func main() { }, { Name: "add", - Category: "template", + Category: "Template actions", }, { Name: "remove", - Category: "template", + Category: "Template actions", }, } - app.Run(os.Args) + err := app.Run(os.Args) + if err != nil { + log.Fatal(err) + } } ``` @@ -767,7 +866,7 @@ Will include: ``` COMMANDS: - noop + noop Template actions: add @@ -780,11 +879,14 @@ Calling `App.Run` will not automatically call `os.Exit`, which means that by default the exit code will "fall through" to being `0`. An explicit exit code may be set by returning a non-nil error that fulfills `cli.ExitCoder`, *or* a `cli.MultiError` that includes an error that fulfills `cli.ExitCoder`, e.g.: - + ``` go package main import ( + "log" "os" "github.com/urfave/cli" @@ -793,22 +895,95 @@ import ( func main() { app := cli.NewApp() app.Flags = []cli.Flag{ - cli.BoolTFlag{ + cli.BoolFlag{ Name: "ginger-crouton", - Usage: "is it in the soup?", + Usage: "Add ginger croutons to the soup", }, } app.Action = func(ctx *cli.Context) error { if !ctx.Bool("ginger-crouton") { - return cli.NewExitError("it is not in the soup", 86) + return cli.NewExitError("Ginger croutons are not in the soup", 86) } return nil } - app.Run(os.Args) + err := app.Run(os.Args) + if err != nil { + log.Fatal(err) + } } ``` +### Combining short options + +Traditional use of options using their shortnames look like this: + +``` +$ cmd -s -o -m "Some message" +``` + +Suppose you want users to be able to combine options with their shortnames. This +can be done using the `UseShortOptionHandling` bool in your app configuration, +or for individual commands by attaching it to the command configuration. For +example: + + +``` go +package main + +import ( + "fmt" + "log" + "os" + + "github.com/urfave/cli" +) + +func main() { + app := cli.NewApp() + app.UseShortOptionHandling = true + app.Commands = []cli.Command{ + { + Name: "short", + Usage: "complete a task on the list", + Flags: []cli.Flag{ + cli.BoolFlag{Name: "serve, s"}, + cli.BoolFlag{Name: "option, o"}, + cli.StringFlag{Name: "message, m"}, + }, + Action: func(c *cli.Context) error { + fmt.Println("serve:", c.Bool("serve")) + fmt.Println("option:", c.Bool("option")) + fmt.Println("message:", c.String("message")) + return nil + }, + }, + } + + err := app.Run(os.Args) + if err != nil { + log.Fatal(err) + } +} +``` + +If your program has any number of bool flags such as `serve` and `option`, and +optionally one non-bool flag `message`, with the short options of `-s`, `-o`, +and `-m` respectively, setting `UseShortOptionHandling` will also support the +following syntax: + +``` +$ cmd -som "Some message" +``` + +If you enable `UseShortOptionHandling`, then you must not use any flags that +have a single leading `-` or this will result in failures. For example, +`-option` can no longer be used. Flags with two leading dashes (such as +`--options`) are still valid. + ### Bash Completion You can enable completion commands by setting the `EnableBashCompletion` @@ -825,6 +1000,7 @@ package main import ( "fmt" + "log" "os" "github.com/urfave/cli" @@ -856,7 +1032,10 @@ func main() { }, } - app.Run(os.Args) + err := app.Run(os.Args) + if err != nil { + log.Fatal(err) + } } ``` @@ -896,6 +1075,7 @@ The default bash completion flag (`--generate-bash-completion`) is defined as package main import ( + "log" "os" "github.com/urfave/cli" @@ -914,7 +1094,10 @@ func main() { Name: "wat", }, } - app.Run(os.Args) + err := app.Run(os.Args) + if err != nil { + log.Fatal(err) + } } ``` @@ -940,6 +1123,7 @@ package main import ( "fmt" + "log" "io" "os" @@ -983,7 +1167,10 @@ VERSION: fmt.Println("Ha HA. I pwnd the help!!1") } - cli.NewApp().Run(os.Args) + err := cli.NewApp().Run(os.Args) + if err != nil { + log.Fatal(err) + } } ``` @@ -998,6 +1185,7 @@ setting `cli.HelpFlag`, e.g.: package main import ( + "log" "os" "github.com/urfave/cli" @@ -1010,7 +1198,10 @@ func main() { EnvVar: "SHOW_HALP,HALPPLZ", } - cli.NewApp().Run(os.Args) + err := cli.NewApp().Run(os.Args) + if err != nil { + log.Fatal(err) + } } ``` @@ -1033,6 +1224,7 @@ setting `cli.VersionFlag`, e.g.: package main import ( + "log" "os" "github.com/urfave/cli" @@ -1047,7 +1239,10 @@ func main() { app := cli.NewApp() app.Name = "partay" app.Version = "19.99.0" - app.Run(os.Args) + err := app.Run(os.Args) + if err != nil { + log.Fatal(err) + } } ``` @@ -1062,6 +1257,7 @@ package main import ( "fmt" + "log" "os" "github.com/urfave/cli" @@ -1079,7 +1275,10 @@ func main() { app := cli.NewApp() app.Name = "partay" app.Version = "19.99.0" - app.Run(os.Args) + err := app.Run(os.Args) + if err != nil { + log.Fatal(err) + } } ``` @@ -1236,6 +1435,7 @@ func main() { cli.Uint64Flag{Name: "bigage"}, } app.EnableBashCompletion = true + app.UseShortOptionHandling = true app.HideHelp = false app.HideVersion = false app.BashComplete = func(c *cli.Context) { @@ -1341,7 +1541,7 @@ func main() { ec := cli.NewExitError("ohwell", 86) fmt.Fprintf(c.App.Writer, "%d", ec.ExitCode()) fmt.Printf("made it!\n") - return ec + return nil } if os.Getenv("HEXY") != "" { @@ -1355,7 +1555,9 @@ func main() { "whatever-values": 19.99, } - app.Run(os.Args) + + // ignore error so we don't exit non-zero and break gfmrun README example tests + _ = app.Run(os.Args) } func wopAction(c *cli.Context) error { @@ -1366,16 +1568,4 @@ func wopAction(c *cli.Context) error { ## Contribution Guidelines -Feel free to put up a pull request to fix a bug or maybe add a feature. I will -give it a code review and make sure that it does not break backwards -compatibility. If I or any other collaborators agree that it is in line with -the vision of the project, we will work with you to get the code into -a mergeable state and merge it into the master branch. - -If you have contributed something significant to the project, we will most -likely add you as a collaborator. As a collaborator you are given the ability -to merge others pull requests. It is very important that new code does not -break existing code, so be careful about what code you do choose to merge. - -If you feel like you have contributed to the project but have not yet been -added as a collaborator, we probably forgot to add you, please open an issue. +See [./CONTRIBUTING.md](./CONTRIBUTING.md) diff --git a/vendor/github.com/urfave/cli/app.go b/vendor/github.com/urfave/cli/app.go index 51fc45d87..76e869d37 100644 --- a/vendor/github.com/urfave/cli/app.go +++ b/vendor/github.com/urfave/cli/app.go @@ -1,9 +1,9 @@ package cli import ( + "flag" "fmt" "io" - "io/ioutil" "os" "path/filepath" "sort" @@ -11,9 +11,10 @@ import ( ) var ( - changeLogURL = "https://github.com/urfave/cli/blob/master/CHANGELOG.md" - appActionDeprecationURL = fmt.Sprintf("%s#deprecated-cli-app-action-signature", changeLogURL) - runAndExitOnErrorDeprecationURL = fmt.Sprintf("%s#deprecated-cli-app-runandexitonerror", changeLogURL) + changeLogURL = "https://github.com/urfave/cli/blob/master/CHANGELOG.md" + appActionDeprecationURL = fmt.Sprintf("%s#deprecated-cli-app-action-signature", changeLogURL) + // unused variable. commented for now. will remove in future if agreed upon by everyone + //runAndExitOnErrorDeprecationURL = fmt.Sprintf("%s#deprecated-cli-app-runandexitonerror", changeLogURL) contactSysadmin = "This is an error in the application. Please contact the distributor of this application if this is not you." @@ -83,6 +84,9 @@ type App struct { Writer io.Writer // ErrWriter writes error output ErrWriter io.Writer + // Execute this function to handle ExitErrors. If not provided, HandleExitCoder is provided to + // function as a default, so this is optional. + ExitErrHandler ExitErrHandlerFunc // Other custom info Metadata map[string]interface{} // Carries a function which returns app specific info. @@ -91,6 +95,10 @@ type App struct { // cli.go uses text/template to render templates. You can // render custom help text by setting this variable. CustomAppHelpTemplate string + // Boolean to enable short-option handling so user can combine several + // single-character bool arguements into one + // i.e. foobar -o -v -> foobar -ov + UseShortOptionHandling bool didSetup bool } @@ -135,7 +143,7 @@ func (a *App) Setup() { a.Authors = append(a.Authors, Author{Name: a.Author, Email: a.Email}) } - newCmds := []Command{} + var newCmds []Command for _, c := range a.Commands { if c.HelpName == "" { c.HelpName = fmt.Sprintf("%s %s", a.HelpName, c.Name) @@ -170,6 +178,14 @@ func (a *App) Setup() { } } +func (a *App) newFlagSet() (*flag.FlagSet, error) { + return flagSet(a.Name, a.Flags) +} + +func (a *App) useShortOptionHandling() bool { + return a.UseShortOptionHandling +} + // Run is the entry point to the cli app. Parses the arguments slice and routes // to the proper flag/args combination func (a *App) Run(arguments []string) (err error) { @@ -183,19 +199,17 @@ func (a *App) Run(arguments []string) (err error) { // always appends the completion flag at the end of the command shellComplete, arguments := checkShellCompleteFlag(a, arguments) - // parse flags - set, err := flagSet(a.Name, a.Flags) + _, err = a.newFlagSet() if err != nil { return err } - set.SetOutput(ioutil.Discard) - err = set.Parse(arguments[1:]) + set, err := parseIter(a, arguments[1:]) nerr := normalizeFlags(a.Flags, set) context := NewContext(a, set, nil) if nerr != nil { - fmt.Fprintln(a.Writer, nerr) - ShowAppHelp(context) + _, _ = fmt.Fprintln(a.Writer, nerr) + _ = ShowAppHelp(context) return nerr } context.shellComplete = shellComplete @@ -207,16 +221,16 @@ func (a *App) Run(arguments []string) (err error) { if err != nil { if a.OnUsageError != nil { err := a.OnUsageError(context, err, false) - HandleExitCoder(err) + a.handleExitCoder(context, err) return err } - fmt.Fprintf(a.Writer, "%s %s\n\n", "Incorrect Usage.", err.Error()) - ShowAppHelp(context) + _, _ = fmt.Fprintf(a.Writer, "%s %s\n\n", "Incorrect Usage.", err.Error()) + _ = ShowAppHelp(context) return err } if !a.HideHelp && checkHelp(context) { - ShowAppHelp(context) + _ = ShowAppHelp(context) return nil } @@ -225,6 +239,12 @@ func (a *App) Run(arguments []string) (err error) { return nil } + cerr := checkRequiredFlags(a.Flags, context) + if cerr != nil { + _ = ShowAppHelp(context) + return cerr + } + if a.After != nil { defer func() { if afterErr := a.After(context); afterErr != nil { @@ -240,8 +260,9 @@ func (a *App) Run(arguments []string) (err error) { if a.Before != nil { beforeErr := a.Before(context) if beforeErr != nil { - ShowAppHelp(context) - HandleExitCoder(beforeErr) + _, _ = fmt.Fprintf(a.Writer, "%v\n\n", beforeErr) + _ = ShowAppHelp(context) + a.handleExitCoder(context, beforeErr) err = beforeErr return err } @@ -263,7 +284,7 @@ func (a *App) Run(arguments []string) (err error) { // Run default Action err = HandleAction(a.Action, context) - HandleExitCoder(err) + a.handleExitCoder(context, err) return err } @@ -274,7 +295,7 @@ func (a *App) Run(arguments []string) (err error) { // code in the cli.ExitCoder func (a *App) RunAndExitOnError() { if err := a.Run(os.Args); err != nil { - fmt.Fprintln(a.errWriter(), err) + _, _ = fmt.Fprintln(a.errWriter(), err) OsExiter(1) } } @@ -301,24 +322,22 @@ func (a *App) RunAsSubcommand(ctx *Context) (err error) { } a.Commands = newCmds - // parse flags - set, err := flagSet(a.Name, a.Flags) + _, err = a.newFlagSet() if err != nil { return err } - set.SetOutput(ioutil.Discard) - err = set.Parse(ctx.Args().Tail()) + set, err := parseIter(a, ctx.Args().Tail()) nerr := normalizeFlags(a.Flags, set) context := NewContext(a, set, ctx) if nerr != nil { - fmt.Fprintln(a.Writer, nerr) - fmt.Fprintln(a.Writer) + _, _ = fmt.Fprintln(a.Writer, nerr) + _, _ = fmt.Fprintln(a.Writer) if len(a.Commands) > 0 { - ShowSubcommandHelp(context) + _ = ShowSubcommandHelp(context) } else { - ShowCommandHelp(ctx, context.Args().First()) + _ = ShowCommandHelp(ctx, context.Args().First()) } return nerr } @@ -330,11 +349,11 @@ func (a *App) RunAsSubcommand(ctx *Context) (err error) { if err != nil { if a.OnUsageError != nil { err = a.OnUsageError(context, err, true) - HandleExitCoder(err) + a.handleExitCoder(context, err) return err } - fmt.Fprintf(a.Writer, "%s %s\n\n", "Incorrect Usage.", err.Error()) - ShowSubcommandHelp(context) + _, _ = fmt.Fprintf(a.Writer, "%s %s\n\n", "Incorrect Usage.", err.Error()) + _ = ShowSubcommandHelp(context) return err } @@ -348,11 +367,17 @@ func (a *App) RunAsSubcommand(ctx *Context) (err error) { } } + cerr := checkRequiredFlags(a.Flags, context) + if cerr != nil { + _ = ShowSubcommandHelp(context) + return cerr + } + if a.After != nil { defer func() { afterErr := a.After(context) if afterErr != nil { - HandleExitCoder(err) + a.handleExitCoder(context, err) if err != nil { err = NewMultiError(err, afterErr) } else { @@ -365,7 +390,7 @@ func (a *App) RunAsSubcommand(ctx *Context) (err error) { if a.Before != nil { beforeErr := a.Before(context) if beforeErr != nil { - HandleExitCoder(beforeErr) + a.handleExitCoder(context, beforeErr) err = beforeErr return err } @@ -383,7 +408,7 @@ func (a *App) RunAsSubcommand(ctx *Context) (err error) { // Run default Action err = HandleAction(a.Action, context) - HandleExitCoder(err) + a.handleExitCoder(context, err) return err } @@ -424,7 +449,7 @@ func (a *App) VisibleCategories() []*CommandCategory { // VisibleCommands returns a slice of the Commands with Hidden=false func (a *App) VisibleCommands() []Command { - ret := []Command{} + var ret []Command for _, command := range a.Commands { if !command.Hidden { ret = append(ret, command) @@ -449,7 +474,6 @@ func (a *App) hasFlag(flag Flag) bool { } func (a *App) errWriter() io.Writer { - // When the app ErrWriter is nil use the package level one. if a.ErrWriter == nil { return ErrWriter @@ -464,6 +488,14 @@ func (a *App) appendFlag(flag Flag) { } } +func (a *App) handleExitCoder(context *Context, err error) { + if a.ExitErrHandler != nil { + a.ExitErrHandler(context, err) + } else { + HandleExitCoder(err) + } +} + // Author represents someone who has contributed to a cli project. type Author struct { Name string // The Authors name @@ -484,14 +516,15 @@ func (a Author) String() string { // it's an ActionFunc or a func with the legacy signature for Action, the func // is run! func HandleAction(action interface{}, context *Context) (err error) { - if a, ok := action.(ActionFunc); ok { + switch a := action.(type) { + case ActionFunc: return a(context) - } else if a, ok := action.(func(*Context) error); ok { + case func(*Context) error: return a(context) - } else if a, ok := action.(func(*Context)); ok { // deprecated function signature + case func(*Context): // deprecated function signature a(context) return nil - } else { - return errInvalidActionType } + + return errInvalidActionType } diff --git a/vendor/github.com/urfave/cli/appveyor.yml b/vendor/github.com/urfave/cli/appveyor.yml index 1e1489c36..6d2dcee29 100644 --- a/vendor/github.com/urfave/cli/appveyor.yml +++ b/vendor/github.com/urfave/cli/appveyor.yml @@ -8,19 +8,16 @@ clone_folder: c:\gopath\src\github.com\urfave\cli environment: GOPATH: C:\gopath - GOVERSION: 1.8.x - PYTHON: C:\Python36-x64 - PYTHON_VERSION: 3.6.x - PYTHON_ARCH: 64 + GOVERSION: 1.11.x install: -- set PATH=%GOPATH%\bin;C:\go\bin;%PATH% -- go version -- go env -- go get github.com/urfave/gfmrun/... -- go get -v -t ./... + - set PATH=%GOPATH%\bin;C:\go\bin;%PATH% + - go version + - go env + - go get github.com/urfave/gfmrun/... + - go get -v -t ./... build_script: -- python runtests vet -- python runtests test -- python runtests gfmrun + - go run build.go vet + - go run build.go test + - go run build.go gfmrun diff --git a/vendor/github.com/urfave/cli/build.go b/vendor/github.com/urfave/cli/build.go new file mode 100644 index 000000000..a828ae407 --- /dev/null +++ b/vendor/github.com/urfave/cli/build.go @@ -0,0 +1,164 @@ +//+build ignore + +package main + +import ( + "bufio" + "bytes" + "fmt" + "io/ioutil" + "log" + "os" + "os/exec" + "strings" + + "github.com/urfave/cli" +) + +var packages = []string{"cli", "altsrc"} + +func main() { + app := cli.NewApp() + + app.Name = "builder" + app.Usage = "Generates a new urfave/cli build!" + + app.Commands = cli.Commands{ + cli.Command{ + Name: "vet", + Action: VetActionFunc, + }, + cli.Command{ + Name: "test", + Action: TestActionFunc, + }, + cli.Command{ + Name: "gfmrun", + Action: GfmrunActionFunc, + }, + cli.Command{ + Name: "toc", + Action: TocActionFunc, + }, + } + + err := app.Run(os.Args) + if err != nil { + log.Fatal(err) + } +} + +func runCmd(arg string, args ...string) error { + cmd := exec.Command(arg, args...) + + cmd.Stdin = os.Stdin + cmd.Stdout = os.Stdout + cmd.Stderr = os.Stderr + + return cmd.Run() +} + +func VetActionFunc(_ *cli.Context) error { + return runCmd("go", "vet") +} + +func TestActionFunc(c *cli.Context) error { + for _, pkg := range packages { + var packageName string + + if pkg == "cli" { + packageName = "github.com/urfave/cli" + } else { + packageName = fmt.Sprintf("github.com/urfave/cli/%s", pkg) + } + + coverProfile := fmt.Sprintf("--coverprofile=%s.coverprofile", pkg) + + err := runCmd("go", "test", "-v", coverProfile, packageName) + if err != nil { + return err + } + } + + return testCleanup() +} + +func testCleanup() error { + var out bytes.Buffer + + for _, pkg := range packages { + file, err := os.Open(fmt.Sprintf("%s.coverprofile", pkg)) + if err != nil { + return err + } + + b, err := ioutil.ReadAll(file) + if err != nil { + return err + } + + out.Write(b) + err = file.Close() + if err != nil { + return err + } + + err = os.Remove(fmt.Sprintf("%s.coverprofile", pkg)) + if err != nil { + return err + } + } + + outFile, err := os.Create("coverage.txt") + if err != nil { + return err + } + + _, err = out.WriteTo(outFile) + if err != nil { + return err + } + + err = outFile.Close() + if err != nil { + return err + } + + return nil +} + +func GfmrunActionFunc(_ *cli.Context) error { + file, err := os.Open("README.md") + if err != nil { + return err + } + + var counter int + scanner := bufio.NewScanner(file) + for scanner.Scan() { + if strings.Contains(scanner.Text(), "package main") { + counter++ + } + } + + err = scanner.Err() + if err != nil { + return err + } + + return runCmd("gfmrun", "-c", fmt.Sprint(counter), "-s", "README.md") +} + +func TocActionFunc(_ *cli.Context) error { + err := runCmd("node_modules/.bin/markdown-toc", "-i", "README.md") + if err != nil { + return err + } + + err = runCmd("git", "diff", "--exit-code") + if err != nil { + return err + } + + return nil +} diff --git a/vendor/github.com/urfave/cli/category.go b/vendor/github.com/urfave/cli/category.go index 1a6055023..bf3c73c55 100644 --- a/vendor/github.com/urfave/cli/category.go +++ b/vendor/github.com/urfave/cli/category.go @@ -10,7 +10,7 @@ type CommandCategory struct { } func (c CommandCategories) Less(i, j int) bool { - return c[i].Name < c[j].Name + return lexicographicLess(c[i].Name, c[j].Name) } func (c CommandCategories) Len() int { diff --git a/vendor/github.com/urfave/cli/cli.go b/vendor/github.com/urfave/cli/cli.go index 90c07eb8e..4bd250839 100644 --- a/vendor/github.com/urfave/cli/cli.go +++ b/vendor/github.com/urfave/cli/cli.go @@ -19,4 +19,4 @@ // } package cli -//go:generate python ./generate-flag-types cli -i flag-types.json -o flag_generated.go +//go:generate go run flag-gen/main.go flag-gen/assets_vfsdata.go diff --git a/vendor/github.com/urfave/cli/command.go b/vendor/github.com/urfave/cli/command.go index 23de2944b..44a90de6b 100644 --- a/vendor/github.com/urfave/cli/command.go +++ b/vendor/github.com/urfave/cli/command.go @@ -1,8 +1,8 @@ package cli import ( + "flag" "fmt" - "io/ioutil" "sort" "strings" ) @@ -55,6 +55,10 @@ type Command struct { HideHelp bool // Boolean to hide this command from help or completion Hidden bool + // Boolean to enable short-option handling so user can combine several + // single-character bool arguments into one + // i.e. foobar -o -v -> foobar -ov + UseShortOptionHandling bool // Full name of command for help, defaults to full command name, including parent commands. HelpName string @@ -73,7 +77,7 @@ func (c CommandsByName) Len() int { } func (c CommandsByName) Less(i, j int) bool { - return c[i].Name < c[j].Name + return lexicographicLess(c[i].Name, c[j].Name) } func (c CommandsByName) Swap(i, j int) { @@ -106,57 +110,11 @@ func (c Command) Run(ctx *Context) (err error) { ) } - set, err := flagSet(c.Name, c.Flags) - if err != nil { - return err - } - set.SetOutput(ioutil.Discard) - - if c.SkipFlagParsing { - err = set.Parse(append([]string{"--"}, ctx.Args().Tail()...)) - } else if !c.SkipArgReorder { - firstFlagIndex := -1 - terminatorIndex := -1 - for index, arg := range ctx.Args() { - if arg == "--" { - terminatorIndex = index - break - } else if arg == "-" { - // Do nothing. A dash alone is not really a flag. - continue - } else if strings.HasPrefix(arg, "-") && firstFlagIndex == -1 { - firstFlagIndex = index - } - } - - if firstFlagIndex > -1 { - args := ctx.Args() - regularArgs := make([]string, len(args[1:firstFlagIndex])) - copy(regularArgs, args[1:firstFlagIndex]) - - var flagArgs []string - if terminatorIndex > -1 { - flagArgs = args[firstFlagIndex:terminatorIndex] - regularArgs = append(regularArgs, args[terminatorIndex:]...) - } else { - flagArgs = args[firstFlagIndex:] - } - - err = set.Parse(append(flagArgs, regularArgs...)) - } else { - err = set.Parse(ctx.Args().Tail()) - } - } else { - err = set.Parse(ctx.Args().Tail()) + if ctx.App.UseShortOptionHandling { + c.UseShortOptionHandling = true } - nerr := normalizeFlags(c.Flags, set) - if nerr != nil { - fmt.Fprintln(ctx.App.Writer, nerr) - fmt.Fprintln(ctx.App.Writer) - ShowCommandHelp(ctx, c.Name) - return nerr - } + set, err := c.parseFlags(ctx.Args().Tail()) context := NewContext(ctx.App, set, ctx) context.Command = c @@ -167,12 +125,12 @@ func (c Command) Run(ctx *Context) (err error) { if err != nil { if c.OnUsageError != nil { err := c.OnUsageError(context, err, false) - HandleExitCoder(err) + context.App.handleExitCoder(context, err) return err } - fmt.Fprintln(context.App.Writer, "Incorrect Usage:", err.Error()) - fmt.Fprintln(context.App.Writer) - ShowCommandHelp(context, c.Name) + _, _ = fmt.Fprintln(context.App.Writer, "Incorrect Usage:", err.Error()) + _, _ = fmt.Fprintln(context.App.Writer) + _ = ShowCommandHelp(context, c.Name) return err } @@ -180,11 +138,17 @@ func (c Command) Run(ctx *Context) (err error) { return nil } + cerr := checkRequiredFlags(c.Flags, context) + if cerr != nil { + _ = ShowCommandHelp(context, c.Name) + return cerr + } + if c.After != nil { defer func() { afterErr := c.After(context) if afterErr != nil { - HandleExitCoder(err) + context.App.handleExitCoder(context, err) if err != nil { err = NewMultiError(err, afterErr) } else { @@ -197,8 +161,8 @@ func (c Command) Run(ctx *Context) (err error) { if c.Before != nil { err = c.Before(context) if err != nil { - ShowCommandHelp(context, c.Name) - HandleExitCoder(err) + _ = ShowCommandHelp(context, c.Name) + context.App.handleExitCoder(context, err) return err } } @@ -210,11 +174,76 @@ func (c Command) Run(ctx *Context) (err error) { err = HandleAction(c.Action, context) if err != nil { - HandleExitCoder(err) + context.App.handleExitCoder(context, err) } return err } +func (c *Command) parseFlags(args Args) (*flag.FlagSet, error) { + if c.SkipFlagParsing { + set, err := c.newFlagSet() + if err != nil { + return nil, err + } + + return set, set.Parse(append([]string{"--"}, args...)) + } + + if !c.SkipArgReorder { + args = reorderArgs(args) + } + + set, err := parseIter(c, args) + if err != nil { + return nil, err + } + + err = normalizeFlags(c.Flags, set) + if err != nil { + return nil, err + } + + return set, nil +} + +func (c *Command) newFlagSet() (*flag.FlagSet, error) { + return flagSet(c.Name, c.Flags) +} + +func (c *Command) useShortOptionHandling() bool { + return c.UseShortOptionHandling +} + +// reorderArgs moves all flags before arguments as this is what flag expects +func reorderArgs(args []string) []string { + var nonflags, flags []string + + readFlagValue := false + for i, arg := range args { + if arg == "--" { + nonflags = append(nonflags, args[i:]...) + break + } + + if readFlagValue && !strings.HasPrefix(arg, "-") && !strings.HasPrefix(arg, "--") { + readFlagValue = false + flags = append(flags, arg) + continue + } + readFlagValue = false + + if arg != "-" && strings.HasPrefix(arg, "-") { + flags = append(flags, arg) + + readFlagValue = !strings.Contains(arg, "=") + } else { + nonflags = append(nonflags, arg) + } + } + + return append(flags, nonflags...) +} + // Names returns the names including short names and aliases. func (c Command) Names() []string { names := []string{c.Name} @@ -239,6 +268,7 @@ func (c Command) HasName(name string) bool { func (c Command) startApp(ctx *Context) error { app := NewApp() app.Metadata = ctx.App.Metadata + app.ExitErrHandler = ctx.App.ExitErrHandler // set the name and usage app.Name = fmt.Sprintf("%s %s", ctx.App.Name, c.Name) if c.HelpName == "" { @@ -267,6 +297,7 @@ func (c Command) startApp(ctx *Context) error { app.Email = ctx.App.Email app.Writer = ctx.App.Writer app.ErrWriter = ctx.App.ErrWriter + app.UseShortOptionHandling = ctx.App.UseShortOptionHandling app.categories = CommandCategories{} for _, command := range c.Subcommands { diff --git a/vendor/github.com/urfave/cli/context.go b/vendor/github.com/urfave/cli/context.go index db94191e2..ecfc03282 100644 --- a/vendor/github.com/urfave/cli/context.go +++ b/vendor/github.com/urfave/cli/context.go @@ -3,6 +3,8 @@ package cli import ( "errors" "flag" + "fmt" + "os" "reflect" "strings" "syscall" @@ -73,7 +75,7 @@ func (c *Context) IsSet(name string) bool { // change in version 2 to add `IsSet` to the Flag interface to push the // responsibility closer to where the information required to determine // whether a flag is set by non-standard means such as environment - // variables is avaliable. + // variables is available. // // See https://github.com/urfave/cli/issues/294 for additional discussion flags := c.Command.Flags @@ -93,18 +95,26 @@ func (c *Context) IsSet(name string) bool { val = val.Elem() } - envVarValue := val.FieldByName("EnvVar") - if !envVarValue.IsValid() { - return + filePathValue := val.FieldByName("FilePath") + if filePathValue.IsValid() { + eachName(filePathValue.String(), func(filePath string) { + if _, err := os.Stat(filePath); err == nil { + c.setFlags[name] = true + return + } + }) } - eachName(envVarValue.String(), func(envVar string) { - envVar = strings.TrimSpace(envVar) - if _, ok := syscall.Getenv(envVar); ok { - c.setFlags[name] = true - return - } - }) + envVarValue := val.FieldByName("EnvVar") + if envVarValue.IsValid() { + eachName(envVarValue.String(), func(envVar string) { + envVar = strings.TrimSpace(envVar) + if _, ok := syscall.Getenv(envVar); ok { + c.setFlags[name] = true + return + } + }) + } }) } } @@ -129,8 +139,8 @@ func (c *Context) GlobalIsSet(name string) bool { // FlagNames returns a slice of flag names used in this context. func (c *Context) FlagNames() (names []string) { - for _, flag := range c.Command.Flags { - name := strings.Split(flag.GetName(), ",")[0] + for _, f := range c.Command.Flags { + name := strings.Split(f.GetName(), ",")[0] if name == "help" { continue } @@ -141,8 +151,8 @@ func (c *Context) FlagNames() (names []string) { // GlobalFlagNames returns a slice of global flag names used by the app. func (c *Context) GlobalFlagNames() (names []string) { - for _, flag := range c.App.Flags { - name := strings.Split(flag.GetName(), ",")[0] + for _, f := range c.App.Flags { + name := strings.Split(f.GetName(), ",")[0] if name == "help" || name == "version" { continue } @@ -240,7 +250,7 @@ func copyFlag(name string, ff *flag.Flag, set *flag.FlagSet) { switch ff.Value.(type) { case *StringSlice: default: - set.Set(name, ff.Value.String()) + _ = set.Set(name, ff.Value.String()) } } @@ -276,3 +286,54 @@ func normalizeFlags(flags []Flag, set *flag.FlagSet) error { } return nil } + +type requiredFlagsErr interface { + error + getMissingFlags() []string +} + +type errRequiredFlags struct { + missingFlags []string +} + +func (e *errRequiredFlags) Error() string { + numberOfMissingFlags := len(e.missingFlags) + if numberOfMissingFlags == 1 { + return fmt.Sprintf("Required flag %q not set", e.missingFlags[0]) + } + joinedMissingFlags := strings.Join(e.missingFlags, ", ") + return fmt.Sprintf("Required flags %q not set", joinedMissingFlags) +} + +func (e *errRequiredFlags) getMissingFlags() []string { + return e.missingFlags +} + +func checkRequiredFlags(flags []Flag, context *Context) requiredFlagsErr { + var missingFlags []string + for _, f := range flags { + if rf, ok := f.(RequiredFlag); ok && rf.IsRequired() { + var flagPresent bool + var flagName string + for _, key := range strings.Split(f.GetName(), ",") { + if len(key) > 1 { + flagName = key + } + + if context.IsSet(strings.TrimSpace(key)) { + flagPresent = true + } + } + + if !flagPresent && flagName != "" { + missingFlags = append(missingFlags, flagName) + } + } + } + + if len(missingFlags) != 0 { + return &errRequiredFlags{missingFlags: missingFlags} + } + + return nil +} diff --git a/vendor/github.com/urfave/cli/docs.go b/vendor/github.com/urfave/cli/docs.go new file mode 100644 index 000000000..5b9456612 --- /dev/null +++ b/vendor/github.com/urfave/cli/docs.go @@ -0,0 +1,148 @@ +package cli + +import ( + "bytes" + "fmt" + "io" + "sort" + "strings" + "text/template" + + "github.com/cpuguy83/go-md2man/v2/md2man" +) + +// ToMarkdown creates a markdown string for the `*App` +// The function errors if either parsing or writing of the string fails. +func (a *App) ToMarkdown() (string, error) { + var w bytes.Buffer + if err := a.writeDocTemplate(&w); err != nil { + return "", err + } + return w.String(), nil +} + +// ToMan creates a man page string for the `*App` +// The function errors if either parsing or writing of the string fails. +func (a *App) ToMan() (string, error) { + var w bytes.Buffer + if err := a.writeDocTemplate(&w); err != nil { + return "", err + } + man := md2man.Render(w.Bytes()) + return string(man), nil +} + +type cliTemplate struct { + App *App + Commands []string + GlobalArgs []string + SynopsisArgs []string +} + +func (a *App) writeDocTemplate(w io.Writer) error { + const name = "cli" + t, err := template.New(name).Parse(MarkdownDocTemplate) + if err != nil { + return err + } + return t.ExecuteTemplate(w, name, &cliTemplate{ + App: a, + Commands: prepareCommands(a.Commands, 0), + GlobalArgs: prepareArgsWithValues(a.Flags), + SynopsisArgs: prepareArgsSynopsis(a.Flags), + }) +} + +func prepareCommands(commands []Command, level int) []string { + coms := []string{} + for i := range commands { + command := &commands[i] + if command.Hidden { + continue + } + usage := "" + if command.Usage != "" { + usage = command.Usage + } + + prepared := fmt.Sprintf("%s %s\n\n%s\n", + strings.Repeat("#", level+2), + strings.Join(command.Names(), ", "), + usage, + ) + + flags := prepareArgsWithValues(command.Flags) + if len(flags) > 0 { + prepared += fmt.Sprintf("\n%s", strings.Join(flags, "\n")) + } + + coms = append(coms, prepared) + + // recursevly iterate subcommands + if len(command.Subcommands) > 0 { + coms = append( + coms, + prepareCommands(command.Subcommands, level+1)..., + ) + } + } + + return coms +} + +func prepareArgsWithValues(flags []Flag) []string { + return prepareFlags(flags, ", ", "**", "**", `""`, true) +} + +func prepareArgsSynopsis(flags []Flag) []string { + return prepareFlags(flags, "|", "[", "]", "[value]", false) +} + +func prepareFlags( + flags []Flag, + sep, opener, closer, value string, + addDetails bool, +) []string { + args := []string{} + for _, f := range flags { + flag, ok := f.(DocGenerationFlag) + if !ok { + continue + } + modifiedArg := opener + for _, s := range strings.Split(flag.GetName(), ",") { + trimmed := strings.TrimSpace(s) + if len(modifiedArg) > len(opener) { + modifiedArg += sep + } + if len(trimmed) > 1 { + modifiedArg += fmt.Sprintf("--%s", trimmed) + } else { + modifiedArg += fmt.Sprintf("-%s", trimmed) + } + } + modifiedArg += closer + if flag.TakesValue() { + modifiedArg += fmt.Sprintf("=%s", value) + } + + if addDetails { + modifiedArg += flagDetails(flag) + } + + args = append(args, modifiedArg+"\n") + + } + sort.Strings(args) + return args +} + +// flagDetails returns a string containing the flags metadata +func flagDetails(flag DocGenerationFlag) string { + description := flag.GetUsage() + value := flag.GetValue() + if value != "" { + description += " (default: " + value + ")" + } + return ": " + description +} diff --git a/vendor/github.com/urfave/cli/fish.go b/vendor/github.com/urfave/cli/fish.go new file mode 100644 index 000000000..cf183af61 --- /dev/null +++ b/vendor/github.com/urfave/cli/fish.go @@ -0,0 +1,194 @@ +package cli + +import ( + "bytes" + "fmt" + "io" + "strings" + "text/template" +) + +// ToFishCompletion creates a fish completion string for the `*App` +// The function errors if either parsing or writing of the string fails. +func (a *App) ToFishCompletion() (string, error) { + var w bytes.Buffer + if err := a.writeFishCompletionTemplate(&w); err != nil { + return "", err + } + return w.String(), nil +} + +type fishCompletionTemplate struct { + App *App + Completions []string + AllCommands []string +} + +func (a *App) writeFishCompletionTemplate(w io.Writer) error { + const name = "cli" + t, err := template.New(name).Parse(FishCompletionTemplate) + if err != nil { + return err + } + allCommands := []string{} + + // Add global flags + completions := a.prepareFishFlags(a.VisibleFlags(), allCommands) + + // Add help flag + if !a.HideHelp { + completions = append( + completions, + a.prepareFishFlags([]Flag{HelpFlag}, allCommands)..., + ) + } + + // Add version flag + if !a.HideVersion { + completions = append( + completions, + a.prepareFishFlags([]Flag{VersionFlag}, allCommands)..., + ) + } + + // Add commands and their flags + completions = append( + completions, + a.prepareFishCommands(a.VisibleCommands(), &allCommands, []string{})..., + ) + + return t.ExecuteTemplate(w, name, &fishCompletionTemplate{ + App: a, + Completions: completions, + AllCommands: allCommands, + }) +} + +func (a *App) prepareFishCommands(commands []Command, allCommands *[]string, previousCommands []string) []string { + completions := []string{} + for i := range commands { + command := &commands[i] + + if command.Hidden { + continue + } + + var completion strings.Builder + completion.WriteString(fmt.Sprintf( + "complete -r -c %s -n '%s' -a '%s'", + a.Name, + a.fishSubcommandHelper(previousCommands), + strings.Join(command.Names(), " "), + )) + + if command.Usage != "" { + completion.WriteString(fmt.Sprintf(" -d '%s'", + escapeSingleQuotes(command.Usage))) + } + + if !command.HideHelp { + completions = append( + completions, + a.prepareFishFlags([]Flag{HelpFlag}, command.Names())..., + ) + } + + *allCommands = append(*allCommands, command.Names()...) + completions = append(completions, completion.String()) + completions = append( + completions, + a.prepareFishFlags(command.Flags, command.Names())..., + ) + + // recursevly iterate subcommands + if len(command.Subcommands) > 0 { + completions = append( + completions, + a.prepareFishCommands( + command.Subcommands, allCommands, command.Names(), + )..., + ) + } + } + + return completions +} + +func (a *App) prepareFishFlags(flags []Flag, previousCommands []string) []string { + completions := []string{} + for _, f := range flags { + flag, ok := f.(DocGenerationFlag) + if !ok { + continue + } + + completion := &strings.Builder{} + completion.WriteString(fmt.Sprintf( + "complete -c %s -n '%s'", + a.Name, + a.fishSubcommandHelper(previousCommands), + )) + + fishAddFileFlag(f, completion) + + for idx, opt := range strings.Split(flag.GetName(), ",") { + if idx == 0 { + completion.WriteString(fmt.Sprintf( + " -l %s", strings.TrimSpace(opt), + )) + } else { + completion.WriteString(fmt.Sprintf( + " -s %s", strings.TrimSpace(opt), + )) + + } + } + + if flag.TakesValue() { + completion.WriteString(" -r") + } + + if flag.GetUsage() != "" { + completion.WriteString(fmt.Sprintf(" -d '%s'", + escapeSingleQuotes(flag.GetUsage()))) + } + + completions = append(completions, completion.String()) + } + + return completions +} + +func fishAddFileFlag(flag Flag, completion *strings.Builder) { + switch f := flag.(type) { + case GenericFlag: + if f.TakesFile { + return + } + case StringFlag: + if f.TakesFile { + return + } + case StringSliceFlag: + if f.TakesFile { + return + } + } + completion.WriteString(" -f") +} + +func (a *App) fishSubcommandHelper(allCommands []string) string { + fishHelper := fmt.Sprintf("__fish_%s_no_subcommand", a.Name) + if len(allCommands) > 0 { + fishHelper = fmt.Sprintf( + "__fish_seen_subcommand_from %s", + strings.Join(allCommands, " "), + ) + } + return fishHelper + +} + +func escapeSingleQuotes(input string) string { + return strings.Replace(input, `'`, `\'`, -1) +} diff --git a/vendor/github.com/urfave/cli/flag-types.json b/vendor/github.com/urfave/cli/flag-types.json deleted file mode 100644 index 122310785..000000000 --- a/vendor/github.com/urfave/cli/flag-types.json +++ /dev/null @@ -1,93 +0,0 @@ -[ - { - "name": "Bool", - "type": "bool", - "value": false, - "context_default": "false", - "parser": "strconv.ParseBool(f.Value.String())" - }, - { - "name": "BoolT", - "type": "bool", - "value": false, - "doctail": " that is true by default", - "context_default": "false", - "parser": "strconv.ParseBool(f.Value.String())" - }, - { - "name": "Duration", - "type": "time.Duration", - "doctail": " (see https://golang.org/pkg/time/#ParseDuration)", - "context_default": "0", - "parser": "time.ParseDuration(f.Value.String())" - }, - { - "name": "Float64", - "type": "float64", - "context_default": "0", - "parser": "strconv.ParseFloat(f.Value.String(), 64)" - }, - { - "name": "Generic", - "type": "Generic", - "dest": false, - "context_default": "nil", - "context_type": "interface{}" - }, - { - "name": "Int64", - "type": "int64", - "context_default": "0", - "parser": "strconv.ParseInt(f.Value.String(), 0, 64)" - }, - { - "name": "Int", - "type": "int", - "context_default": "0", - "parser": "strconv.ParseInt(f.Value.String(), 0, 64)", - "parser_cast": "int(parsed)" - }, - { - "name": "IntSlice", - "type": "*IntSlice", - "dest": false, - "context_default": "nil", - "context_type": "[]int", - "parser": "(f.Value.(*IntSlice)).Value(), error(nil)" - }, - { - "name": "Int64Slice", - "type": "*Int64Slice", - "dest": false, - "context_default": "nil", - "context_type": "[]int64", - "parser": "(f.Value.(*Int64Slice)).Value(), error(nil)" - }, - { - "name": "String", - "type": "string", - "context_default": "\"\"", - "parser": "f.Value.String(), error(nil)" - }, - { - "name": "StringSlice", - "type": "*StringSlice", - "dest": false, - "context_default": "nil", - "context_type": "[]string", - "parser": "(f.Value.(*StringSlice)).Value(), error(nil)" - }, - { - "name": "Uint64", - "type": "uint64", - "context_default": "0", - "parser": "strconv.ParseUint(f.Value.String(), 0, 64)" - }, - { - "name": "Uint", - "type": "uint", - "context_default": "0", - "parser": "strconv.ParseUint(f.Value.String(), 0, 64)", - "parser_cast": "uint(parsed)" - } -] diff --git a/vendor/github.com/urfave/cli/flag.go b/vendor/github.com/urfave/cli/flag.go index 877ff3523..3c0527077 100644 --- a/vendor/github.com/urfave/cli/flag.go +++ b/vendor/github.com/urfave/cli/flag.go @@ -3,12 +3,12 @@ package cli import ( "flag" "fmt" + "io/ioutil" "reflect" "runtime" "strconv" "strings" "syscall" - "time" ) const defaultPlaceholder = "value" @@ -37,6 +37,18 @@ var HelpFlag Flag = BoolFlag{ // to display a flag. var FlagStringer FlagStringFunc = stringifyFlag +// FlagNamePrefixer converts a full flag name and its placeholder into the help +// message flag prefix. This is used by the default FlagStringer. +var FlagNamePrefixer FlagNamePrefixFunc = prefixedNames + +// FlagEnvHinter annotates flag help message with the environment variable +// details. This is used by the default FlagStringer. +var FlagEnvHinter FlagEnvHintFunc = withEnvHint + +// FlagFileHinter annotates flag help message with the environment variable +// details. This is used by the default FlagStringer. +var FlagFileHinter FlagFileHintFunc = withFileHint + // FlagsByName is a slice of Flag. type FlagsByName []Flag @@ -45,7 +57,7 @@ func (f FlagsByName) Len() int { } func (f FlagsByName) Less(i, j int) bool { - return f[i].GetName() < f[j].GetName() + return lexicographicLess(f[i].GetName(), f[j].GetName()) } func (f FlagsByName) Swap(i, j int) { @@ -62,6 +74,29 @@ type Flag interface { GetName() string } +// RequiredFlag is an interface that allows us to mark flags as required +// it allows flags required flags to be backwards compatible with the Flag interface +type RequiredFlag interface { + Flag + + IsRequired() bool +} + +// DocGenerationFlag is an interface that allows documentation generation for the flag +type DocGenerationFlag interface { + Flag + + // TakesValue returns true of the flag takes a value, otherwise false + TakesValue() bool + + // GetUsage returns the usage string for the flag + GetUsage() string + + // GetValue returns the flags value as string representation and an empty + // string if the flag takes no value at all. + GetValue() string +} + // errorableFlag is an interface that allows us to return errors during apply // it allows flags defined in this library to return errors in a fashion backwards compatible // TODO remove in v2 and modify the existing Flag interface to return errors @@ -84,6 +119,7 @@ func flagSet(name string, flags []Flag) (*flag.FlagSet, error) { f.Apply(set) } } + set.SetOutput(ioutil.Discard) return set, nil } @@ -95,544 +131,12 @@ func eachName(longName string, fn func(string)) { } } -// Generic is a generic parseable type identified by a specific flag -type Generic interface { - Set(value string) error - String() string -} - -// Apply takes the flagset and calls Set on the generic flag with the value -// provided by the user for parsing by the flag -// Ignores parsing errors -func (f GenericFlag) Apply(set *flag.FlagSet) { - f.ApplyWithError(set) -} - -// ApplyWithError takes the flagset and calls Set on the generic flag with the value -// provided by the user for parsing by the flag -func (f GenericFlag) ApplyWithError(set *flag.FlagSet) error { - val := f.Value - if f.EnvVar != "" { - for _, envVar := range strings.Split(f.EnvVar, ",") { - envVar = strings.TrimSpace(envVar) - if envVal, ok := syscall.Getenv(envVar); ok { - if err := val.Set(envVal); err != nil { - return fmt.Errorf("could not parse %s as value for flag %s: %s", envVal, f.Name, err) - } - break - } - } - } - - eachName(f.Name, func(name string) { - set.Var(f.Value, name, f.Usage) - }) - - return nil -} - -// StringSlice is an opaque type for []string to satisfy flag.Value and flag.Getter -type StringSlice []string - -// Set appends the string value to the list of values -func (f *StringSlice) Set(value string) error { - *f = append(*f, value) - return nil -} - -// String returns a readable representation of this value (for usage defaults) -func (f *StringSlice) String() string { - return fmt.Sprintf("%s", *f) -} - -// Value returns the slice of strings set by this flag -func (f *StringSlice) Value() []string { - return *f -} - -// Get returns the slice of strings set by this flag -func (f *StringSlice) Get() interface{} { - return *f -} - -// Apply populates the flag given the flag set and environment -// Ignores errors -func (f StringSliceFlag) Apply(set *flag.FlagSet) { - f.ApplyWithError(set) -} - -// ApplyWithError populates the flag given the flag set and environment -func (f StringSliceFlag) ApplyWithError(set *flag.FlagSet) error { - if f.EnvVar != "" { - for _, envVar := range strings.Split(f.EnvVar, ",") { - envVar = strings.TrimSpace(envVar) - if envVal, ok := syscall.Getenv(envVar); ok { - newVal := &StringSlice{} - for _, s := range strings.Split(envVal, ",") { - s = strings.TrimSpace(s) - if err := newVal.Set(s); err != nil { - return fmt.Errorf("could not parse %s as string value for flag %s: %s", envVal, f.Name, err) - } - } - f.Value = newVal - break - } - } - } - - eachName(f.Name, func(name string) { - if f.Value == nil { - f.Value = &StringSlice{} - } - set.Var(f.Value, name, f.Usage) - }) - - return nil -} - -// IntSlice is an opaque type for []int to satisfy flag.Value and flag.Getter -type IntSlice []int - -// Set parses the value into an integer and appends it to the list of values -func (f *IntSlice) Set(value string) error { - tmp, err := strconv.Atoi(value) - if err != nil { - return err - } - *f = append(*f, tmp) - return nil -} - -// String returns a readable representation of this value (for usage defaults) -func (f *IntSlice) String() string { - return fmt.Sprintf("%#v", *f) -} - -// Value returns the slice of ints set by this flag -func (f *IntSlice) Value() []int { - return *f -} - -// Get returns the slice of ints set by this flag -func (f *IntSlice) Get() interface{} { - return *f -} - -// Apply populates the flag given the flag set and environment -// Ignores errors -func (f IntSliceFlag) Apply(set *flag.FlagSet) { - f.ApplyWithError(set) -} - -// ApplyWithError populates the flag given the flag set and environment -func (f IntSliceFlag) ApplyWithError(set *flag.FlagSet) error { - if f.EnvVar != "" { - for _, envVar := range strings.Split(f.EnvVar, ",") { - envVar = strings.TrimSpace(envVar) - if envVal, ok := syscall.Getenv(envVar); ok { - newVal := &IntSlice{} - for _, s := range strings.Split(envVal, ",") { - s = strings.TrimSpace(s) - if err := newVal.Set(s); err != nil { - return fmt.Errorf("could not parse %s as int slice value for flag %s: %s", envVal, f.Name, err) - } - } - f.Value = newVal - break - } - } - } - - eachName(f.Name, func(name string) { - if f.Value == nil { - f.Value = &IntSlice{} - } - set.Var(f.Value, name, f.Usage) - }) - - return nil -} - -// Int64Slice is an opaque type for []int to satisfy flag.Value and flag.Getter -type Int64Slice []int64 - -// Set parses the value into an integer and appends it to the list of values -func (f *Int64Slice) Set(value string) error { - tmp, err := strconv.ParseInt(value, 10, 64) - if err != nil { - return err - } - *f = append(*f, tmp) - return nil -} - -// String returns a readable representation of this value (for usage defaults) -func (f *Int64Slice) String() string { - return fmt.Sprintf("%#v", *f) -} - -// Value returns the slice of ints set by this flag -func (f *Int64Slice) Value() []int64 { - return *f -} - -// Get returns the slice of ints set by this flag -func (f *Int64Slice) Get() interface{} { - return *f -} - -// Apply populates the flag given the flag set and environment -// Ignores errors -func (f Int64SliceFlag) Apply(set *flag.FlagSet) { - f.ApplyWithError(set) -} - -// ApplyWithError populates the flag given the flag set and environment -func (f Int64SliceFlag) ApplyWithError(set *flag.FlagSet) error { - if f.EnvVar != "" { - for _, envVar := range strings.Split(f.EnvVar, ",") { - envVar = strings.TrimSpace(envVar) - if envVal, ok := syscall.Getenv(envVar); ok { - newVal := &Int64Slice{} - for _, s := range strings.Split(envVal, ",") { - s = strings.TrimSpace(s) - if err := newVal.Set(s); err != nil { - return fmt.Errorf("could not parse %s as int64 slice value for flag %s: %s", envVal, f.Name, err) - } - } - f.Value = newVal - break - } - } - } - - eachName(f.Name, func(name string) { - if f.Value == nil { - f.Value = &Int64Slice{} - } - set.Var(f.Value, name, f.Usage) - }) - return nil -} - -// Apply populates the flag given the flag set and environment -// Ignores errors -func (f BoolFlag) Apply(set *flag.FlagSet) { - f.ApplyWithError(set) -} - -// ApplyWithError populates the flag given the flag set and environment -func (f BoolFlag) ApplyWithError(set *flag.FlagSet) error { - val := false - if f.EnvVar != "" { - for _, envVar := range strings.Split(f.EnvVar, ",") { - envVar = strings.TrimSpace(envVar) - if envVal, ok := syscall.Getenv(envVar); ok { - if envVal == "" { - val = false - break - } - - envValBool, err := strconv.ParseBool(envVal) - if err != nil { - return fmt.Errorf("could not parse %s as bool value for flag %s: %s", envVal, f.Name, err) - } - - val = envValBool - break - } - } - } - - eachName(f.Name, func(name string) { - if f.Destination != nil { - set.BoolVar(f.Destination, name, val, f.Usage) - return - } - set.Bool(name, val, f.Usage) - }) - - return nil -} - -// Apply populates the flag given the flag set and environment -// Ignores errors -func (f BoolTFlag) Apply(set *flag.FlagSet) { - f.ApplyWithError(set) -} - -// ApplyWithError populates the flag given the flag set and environment -func (f BoolTFlag) ApplyWithError(set *flag.FlagSet) error { - val := true - if f.EnvVar != "" { - for _, envVar := range strings.Split(f.EnvVar, ",") { - envVar = strings.TrimSpace(envVar) - if envVal, ok := syscall.Getenv(envVar); ok { - if envVal == "" { - val = false - break - } - - envValBool, err := strconv.ParseBool(envVal) - if err != nil { - return fmt.Errorf("could not parse %s as bool value for flag %s: %s", envVal, f.Name, err) - } - - val = envValBool - break - } - } - } - - eachName(f.Name, func(name string) { - if f.Destination != nil { - set.BoolVar(f.Destination, name, val, f.Usage) - return - } - set.Bool(name, val, f.Usage) - }) - - return nil -} - -// Apply populates the flag given the flag set and environment -// Ignores errors -func (f StringFlag) Apply(set *flag.FlagSet) { - f.ApplyWithError(set) -} - -// ApplyWithError populates the flag given the flag set and environment -func (f StringFlag) ApplyWithError(set *flag.FlagSet) error { - if f.EnvVar != "" { - for _, envVar := range strings.Split(f.EnvVar, ",") { - envVar = strings.TrimSpace(envVar) - if envVal, ok := syscall.Getenv(envVar); ok { - f.Value = envVal - break - } - } - } - - eachName(f.Name, func(name string) { - if f.Destination != nil { - set.StringVar(f.Destination, name, f.Value, f.Usage) - return - } - set.String(name, f.Value, f.Usage) - }) - - return nil -} - -// Apply populates the flag given the flag set and environment -// Ignores errors -func (f IntFlag) Apply(set *flag.FlagSet) { - f.ApplyWithError(set) -} - -// ApplyWithError populates the flag given the flag set and environment -func (f IntFlag) ApplyWithError(set *flag.FlagSet) error { - if f.EnvVar != "" { - for _, envVar := range strings.Split(f.EnvVar, ",") { - envVar = strings.TrimSpace(envVar) - if envVal, ok := syscall.Getenv(envVar); ok { - envValInt, err := strconv.ParseInt(envVal, 0, 64) - if err != nil { - return fmt.Errorf("could not parse %s as int value for flag %s: %s", envVal, f.Name, err) - } - f.Value = int(envValInt) - break - } - } - } - - eachName(f.Name, func(name string) { - if f.Destination != nil { - set.IntVar(f.Destination, name, f.Value, f.Usage) - return - } - set.Int(name, f.Value, f.Usage) - }) - - return nil -} - -// Apply populates the flag given the flag set and environment -// Ignores errors -func (f Int64Flag) Apply(set *flag.FlagSet) { - f.ApplyWithError(set) -} - -// ApplyWithError populates the flag given the flag set and environment -func (f Int64Flag) ApplyWithError(set *flag.FlagSet) error { - if f.EnvVar != "" { - for _, envVar := range strings.Split(f.EnvVar, ",") { - envVar = strings.TrimSpace(envVar) - if envVal, ok := syscall.Getenv(envVar); ok { - envValInt, err := strconv.ParseInt(envVal, 0, 64) - if err != nil { - return fmt.Errorf("could not parse %s as int value for flag %s: %s", envVal, f.Name, err) - } - - f.Value = envValInt - break - } - } - } - - eachName(f.Name, func(name string) { - if f.Destination != nil { - set.Int64Var(f.Destination, name, f.Value, f.Usage) - return - } - set.Int64(name, f.Value, f.Usage) - }) - - return nil -} - -// Apply populates the flag given the flag set and environment -// Ignores errors -func (f UintFlag) Apply(set *flag.FlagSet) { - f.ApplyWithError(set) -} - -// ApplyWithError populates the flag given the flag set and environment -func (f UintFlag) ApplyWithError(set *flag.FlagSet) error { - if f.EnvVar != "" { - for _, envVar := range strings.Split(f.EnvVar, ",") { - envVar = strings.TrimSpace(envVar) - if envVal, ok := syscall.Getenv(envVar); ok { - envValInt, err := strconv.ParseUint(envVal, 0, 64) - if err != nil { - return fmt.Errorf("could not parse %s as uint value for flag %s: %s", envVal, f.Name, err) - } - - f.Value = uint(envValInt) - break - } - } - } - - eachName(f.Name, func(name string) { - if f.Destination != nil { - set.UintVar(f.Destination, name, f.Value, f.Usage) - return - } - set.Uint(name, f.Value, f.Usage) - }) - - return nil -} - -// Apply populates the flag given the flag set and environment -// Ignores errors -func (f Uint64Flag) Apply(set *flag.FlagSet) { - f.ApplyWithError(set) -} - -// ApplyWithError populates the flag given the flag set and environment -func (f Uint64Flag) ApplyWithError(set *flag.FlagSet) error { - if f.EnvVar != "" { - for _, envVar := range strings.Split(f.EnvVar, ",") { - envVar = strings.TrimSpace(envVar) - if envVal, ok := syscall.Getenv(envVar); ok { - envValInt, err := strconv.ParseUint(envVal, 0, 64) - if err != nil { - return fmt.Errorf("could not parse %s as uint64 value for flag %s: %s", envVal, f.Name, err) - } - - f.Value = uint64(envValInt) - break - } - } - } - - eachName(f.Name, func(name string) { - if f.Destination != nil { - set.Uint64Var(f.Destination, name, f.Value, f.Usage) - return - } - set.Uint64(name, f.Value, f.Usage) - }) - - return nil -} - -// Apply populates the flag given the flag set and environment -// Ignores errors -func (f DurationFlag) Apply(set *flag.FlagSet) { - f.ApplyWithError(set) -} - -// ApplyWithError populates the flag given the flag set and environment -func (f DurationFlag) ApplyWithError(set *flag.FlagSet) error { - if f.EnvVar != "" { - for _, envVar := range strings.Split(f.EnvVar, ",") { - envVar = strings.TrimSpace(envVar) - if envVal, ok := syscall.Getenv(envVar); ok { - envValDuration, err := time.ParseDuration(envVal) - if err != nil { - return fmt.Errorf("could not parse %s as duration for flag %s: %s", envVal, f.Name, err) - } - - f.Value = envValDuration - break - } - } - } - - eachName(f.Name, func(name string) { - if f.Destination != nil { - set.DurationVar(f.Destination, name, f.Value, f.Usage) - return - } - set.Duration(name, f.Value, f.Usage) - }) - - return nil -} - -// Apply populates the flag given the flag set and environment -// Ignores errors -func (f Float64Flag) Apply(set *flag.FlagSet) { - f.ApplyWithError(set) -} - -// ApplyWithError populates the flag given the flag set and environment -func (f Float64Flag) ApplyWithError(set *flag.FlagSet) error { - if f.EnvVar != "" { - for _, envVar := range strings.Split(f.EnvVar, ",") { - envVar = strings.TrimSpace(envVar) - if envVal, ok := syscall.Getenv(envVar); ok { - envValFloat, err := strconv.ParseFloat(envVal, 10) - if err != nil { - return fmt.Errorf("could not parse %s as float64 value for flag %s: %s", envVal, f.Name, err) - } - - f.Value = float64(envValFloat) - break - } - } - } - - eachName(f.Name, func(name string) { - if f.Destination != nil { - set.Float64Var(f.Destination, name, f.Value, f.Usage) - return - } - set.Float64(name, f.Value, f.Usage) - }) - - return nil -} - func visibleFlags(fl []Flag) []Flag { - visible := []Flag{} - for _, flag := range fl { - field := flagValue(flag).FieldByName("Hidden") + var visible []Flag + for _, f := range fl { + field := flagValue(f).FieldByName("Hidden") if !field.IsValid() || !field.Bool() { - visible = append(visible, flag) + visible = append(visible, f) } } return visible @@ -692,11 +196,19 @@ func withEnvHint(envVar, str string) string { suffix = "%" sep = "%, %" } - envText = fmt.Sprintf(" [%s%s%s]", prefix, strings.Join(strings.Split(envVar, ","), sep), suffix) + envText = " [" + prefix + strings.Join(strings.Split(envVar, ","), sep) + suffix + "]" } return str + envText } +func withFileHint(filePath, str string) string { + fileText := "" + if filePath != "" { + fileText = fmt.Sprintf(" [%s]", filePath) + } + return str + fileText +} + func flagValue(f Flag) reflect.Value { fv := reflect.ValueOf(f) for fv.Kind() == reflect.Ptr { @@ -710,14 +222,29 @@ func stringifyFlag(f Flag) string { switch f.(type) { case IntSliceFlag: - return withEnvHint(fv.FieldByName("EnvVar").String(), - stringifyIntSliceFlag(f.(IntSliceFlag))) + return FlagFileHinter( + fv.FieldByName("FilePath").String(), + FlagEnvHinter( + fv.FieldByName("EnvVar").String(), + stringifyIntSliceFlag(f.(IntSliceFlag)), + ), + ) case Int64SliceFlag: - return withEnvHint(fv.FieldByName("EnvVar").String(), - stringifyInt64SliceFlag(f.(Int64SliceFlag))) + return FlagFileHinter( + fv.FieldByName("FilePath").String(), + FlagEnvHinter( + fv.FieldByName("EnvVar").String(), + stringifyInt64SliceFlag(f.(Int64SliceFlag)), + ), + ) case StringSliceFlag: - return withEnvHint(fv.FieldByName("EnvVar").String(), - stringifyStringSliceFlag(f.(StringSliceFlag))) + return FlagFileHinter( + fv.FieldByName("FilePath").String(), + FlagEnvHinter( + fv.FieldByName("EnvVar").String(), + stringifyStringSliceFlag(f.(StringSliceFlag)), + ), + ) } placeholder, usage := unquoteUsage(fv.FieldByName("Usage").String()) @@ -742,17 +269,22 @@ func stringifyFlag(f Flag) string { placeholder = defaultPlaceholder } - usageWithDefault := strings.TrimSpace(fmt.Sprintf("%s%s", usage, defaultValueString)) + usageWithDefault := strings.TrimSpace(usage + defaultValueString) - return withEnvHint(fv.FieldByName("EnvVar").String(), - fmt.Sprintf("%s\t%s", prefixedNames(fv.FieldByName("Name").String(), placeholder), usageWithDefault)) + return FlagFileHinter( + fv.FieldByName("FilePath").String(), + FlagEnvHinter( + fv.FieldByName("EnvVar").String(), + FlagNamePrefixer(fv.FieldByName("Name").String(), placeholder)+"\t"+usageWithDefault, + ), + ) } func stringifyIntSliceFlag(f IntSliceFlag) string { - defaultVals := []string{} + var defaultVals []string if f.Value != nil && len(f.Value.Value()) > 0 { for _, i := range f.Value.Value() { - defaultVals = append(defaultVals, fmt.Sprintf("%d", i)) + defaultVals = append(defaultVals, strconv.Itoa(i)) } } @@ -760,10 +292,10 @@ func stringifyIntSliceFlag(f IntSliceFlag) string { } func stringifyInt64SliceFlag(f Int64SliceFlag) string { - defaultVals := []string{} + var defaultVals []string if f.Value != nil && len(f.Value.Value()) > 0 { for _, i := range f.Value.Value() { - defaultVals = append(defaultVals, fmt.Sprintf("%d", i)) + defaultVals = append(defaultVals, strconv.FormatInt(i, 10)) } } @@ -771,11 +303,11 @@ func stringifyInt64SliceFlag(f Int64SliceFlag) string { } func stringifyStringSliceFlag(f StringSliceFlag) string { - defaultVals := []string{} + var defaultVals []string if f.Value != nil && len(f.Value.Value()) > 0 { for _, s := range f.Value.Value() { if len(s) > 0 { - defaultVals = append(defaultVals, fmt.Sprintf("%q", s)) + defaultVals = append(defaultVals, strconv.Quote(s)) } } } @@ -794,6 +326,21 @@ func stringifySliceFlag(usage, name string, defaultVals []string) string { defaultVal = fmt.Sprintf(" (default: %s)", strings.Join(defaultVals, ", ")) } - usageWithDefault := strings.TrimSpace(fmt.Sprintf("%s%s", usage, defaultVal)) - return fmt.Sprintf("%s\t%s", prefixedNames(name, placeholder), usageWithDefault) + usageWithDefault := strings.TrimSpace(usage + defaultVal) + return FlagNamePrefixer(name, placeholder) + "\t" + usageWithDefault +} + +func flagFromFileEnv(filePath, envName string) (val string, ok bool) { + for _, envVar := range strings.Split(envName, ",") { + envVar = strings.TrimSpace(envVar) + if envVal, ok := syscall.Getenv(envVar); ok { + return envVal, true + } + } + for _, fileVar := range strings.Split(filePath, ",") { + if data, err := ioutil.ReadFile(fileVar); err == nil { + return string(data), true + } + } + return "", false } diff --git a/vendor/github.com/urfave/cli/flag_bool.go b/vendor/github.com/urfave/cli/flag_bool.go new file mode 100644 index 000000000..2499b0b52 --- /dev/null +++ b/vendor/github.com/urfave/cli/flag_bool.go @@ -0,0 +1,109 @@ +package cli + +import ( + "flag" + "fmt" + "strconv" +) + +// BoolFlag is a flag with type bool +type BoolFlag struct { + Name string + Usage string + EnvVar string + FilePath string + Required bool + Hidden bool + Destination *bool +} + +// String returns a readable representation of this value +// (for usage defaults) +func (f BoolFlag) String() string { + return FlagStringer(f) +} + +// GetName returns the name of the flag +func (f BoolFlag) GetName() string { + return f.Name +} + +// IsRequired returns whether or not the flag is required +func (f BoolFlag) IsRequired() bool { + return f.Required +} + +// TakesValue returns true of the flag takes a value, otherwise false +func (f BoolFlag) TakesValue() bool { + return false +} + +// GetUsage returns the usage string for the flag +func (f BoolFlag) GetUsage() string { + return f.Usage +} + +// GetValue returns the flags value as string representation and an empty +// string if the flag takes no value at all. +func (f BoolFlag) GetValue() string { + return "" +} + +// Bool looks up the value of a local BoolFlag, returns +// false if not found +func (c *Context) Bool(name string) bool { + return lookupBool(name, c.flagSet) +} + +// GlobalBool looks up the value of a global BoolFlag, returns +// false if not found +func (c *Context) GlobalBool(name string) bool { + if fs := lookupGlobalFlagSet(name, c); fs != nil { + return lookupBool(name, fs) + } + return false +} + +// Apply populates the flag given the flag set and environment +// Ignores errors +func (f BoolFlag) Apply(set *flag.FlagSet) { + _ = f.ApplyWithError(set) +} + +// ApplyWithError populates the flag given the flag set and environment +func (f BoolFlag) ApplyWithError(set *flag.FlagSet) error { + val := false + if envVal, ok := flagFromFileEnv(f.FilePath, f.EnvVar); ok { + if envVal == "" { + val = false + } else { + envValBool, err := strconv.ParseBool(envVal) + if err != nil { + return fmt.Errorf("could not parse %s as bool value for flag %s: %s", envVal, f.Name, err) + } + val = envValBool + } + } + + eachName(f.Name, func(name string) { + if f.Destination != nil { + set.BoolVar(f.Destination, name, val, f.Usage) + return + } + set.Bool(name, val, f.Usage) + }) + + return nil +} + +func lookupBool(name string, set *flag.FlagSet) bool { + f := set.Lookup(name) + if f != nil { + parsed, err := strconv.ParseBool(f.Value.String()) + if err != nil { + return false + } + return parsed + } + return false +} diff --git a/vendor/github.com/urfave/cli/flag_bool_t.go b/vendor/github.com/urfave/cli/flag_bool_t.go new file mode 100644 index 000000000..cd0888fa2 --- /dev/null +++ b/vendor/github.com/urfave/cli/flag_bool_t.go @@ -0,0 +1,110 @@ +package cli + +import ( + "flag" + "fmt" + "strconv" +) + +// BoolTFlag is a flag with type bool that is true by default +type BoolTFlag struct { + Name string + Usage string + EnvVar string + FilePath string + Required bool + Hidden bool + Destination *bool +} + +// String returns a readable representation of this value +// (for usage defaults) +func (f BoolTFlag) String() string { + return FlagStringer(f) +} + +// GetName returns the name of the flag +func (f BoolTFlag) GetName() string { + return f.Name +} + +// IsRequired returns whether or not the flag is required +func (f BoolTFlag) IsRequired() bool { + return f.Required +} + +// TakesValue returns true of the flag takes a value, otherwise false +func (f BoolTFlag) TakesValue() bool { + return false +} + +// GetUsage returns the usage string for the flag +func (f BoolTFlag) GetUsage() string { + return f.Usage +} + +// GetValue returns the flags value as string representation and an empty +// string if the flag takes no value at all. +func (f BoolTFlag) GetValue() string { + return "" +} + +// BoolT looks up the value of a local BoolTFlag, returns +// false if not found +func (c *Context) BoolT(name string) bool { + return lookupBoolT(name, c.flagSet) +} + +// GlobalBoolT looks up the value of a global BoolTFlag, returns +// false if not found +func (c *Context) GlobalBoolT(name string) bool { + if fs := lookupGlobalFlagSet(name, c); fs != nil { + return lookupBoolT(name, fs) + } + return false +} + +// Apply populates the flag given the flag set and environment +// Ignores errors +func (f BoolTFlag) Apply(set *flag.FlagSet) { + _ = f.ApplyWithError(set) +} + +// ApplyWithError populates the flag given the flag set and environment +func (f BoolTFlag) ApplyWithError(set *flag.FlagSet) error { + val := true + + if envVal, ok := flagFromFileEnv(f.FilePath, f.EnvVar); ok { + if envVal == "" { + val = false + } else { + envValBool, err := strconv.ParseBool(envVal) + if err != nil { + return fmt.Errorf("could not parse %s as bool value for flag %s: %s", envVal, f.Name, err) + } + val = envValBool + } + } + + eachName(f.Name, func(name string) { + if f.Destination != nil { + set.BoolVar(f.Destination, name, val, f.Usage) + return + } + set.Bool(name, val, f.Usage) + }) + + return nil +} + +func lookupBoolT(name string, set *flag.FlagSet) bool { + f := set.Lookup(name) + if f != nil { + parsed, err := strconv.ParseBool(f.Value.String()) + if err != nil { + return false + } + return parsed + } + return false +} diff --git a/vendor/github.com/urfave/cli/flag_duration.go b/vendor/github.com/urfave/cli/flag_duration.go new file mode 100644 index 000000000..df4ade589 --- /dev/null +++ b/vendor/github.com/urfave/cli/flag_duration.go @@ -0,0 +1,106 @@ +package cli + +import ( + "flag" + "fmt" + "time" +) + +// DurationFlag is a flag with type time.Duration (see https://golang.org/pkg/time/#ParseDuration) +type DurationFlag struct { + Name string + Usage string + EnvVar string + FilePath string + Required bool + Hidden bool + Value time.Duration + Destination *time.Duration +} + +// String returns a readable representation of this value +// (for usage defaults) +func (f DurationFlag) String() string { + return FlagStringer(f) +} + +// GetName returns the name of the flag +func (f DurationFlag) GetName() string { + return f.Name +} + +// IsRequired returns whether or not the flag is required +func (f DurationFlag) IsRequired() bool { + return f.Required +} + +// TakesValue returns true of the flag takes a value, otherwise false +func (f DurationFlag) TakesValue() bool { + return true +} + +// GetUsage returns the usage string for the flag +func (f DurationFlag) GetUsage() string { + return f.Usage +} + +// GetValue returns the flags value as string representation and an empty +// string if the flag takes no value at all. +func (f DurationFlag) GetValue() string { + return f.Value.String() +} + +// Duration looks up the value of a local DurationFlag, returns +// 0 if not found +func (c *Context) Duration(name string) time.Duration { + return lookupDuration(name, c.flagSet) +} + +// GlobalDuration looks up the value of a global DurationFlag, returns +// 0 if not found +func (c *Context) GlobalDuration(name string) time.Duration { + if fs := lookupGlobalFlagSet(name, c); fs != nil { + return lookupDuration(name, fs) + } + return 0 +} + +// Apply populates the flag given the flag set and environment +// Ignores errors +func (f DurationFlag) Apply(set *flag.FlagSet) { + _ = f.ApplyWithError(set) +} + +// ApplyWithError populates the flag given the flag set and environment +func (f DurationFlag) ApplyWithError(set *flag.FlagSet) error { + if envVal, ok := flagFromFileEnv(f.FilePath, f.EnvVar); ok { + envValDuration, err := time.ParseDuration(envVal) + if err != nil { + return fmt.Errorf("could not parse %s as duration for flag %s: %s", envVal, f.Name, err) + } + + f.Value = envValDuration + } + + eachName(f.Name, func(name string) { + if f.Destination != nil { + set.DurationVar(f.Destination, name, f.Value, f.Usage) + return + } + set.Duration(name, f.Value, f.Usage) + }) + + return nil +} + +func lookupDuration(name string, set *flag.FlagSet) time.Duration { + f := set.Lookup(name) + if f != nil { + parsed, err := time.ParseDuration(f.Value.String()) + if err != nil { + return 0 + } + return parsed + } + return 0 +} diff --git a/vendor/github.com/urfave/cli/flag_float64.go b/vendor/github.com/urfave/cli/flag_float64.go new file mode 100644 index 000000000..65398d3b5 --- /dev/null +++ b/vendor/github.com/urfave/cli/flag_float64.go @@ -0,0 +1,106 @@ +package cli + +import ( + "flag" + "fmt" + "strconv" +) + +// Float64Flag is a flag with type float64 +type Float64Flag struct { + Name string + Usage string + EnvVar string + FilePath string + Required bool + Hidden bool + Value float64 + Destination *float64 +} + +// String returns a readable representation of this value +// (for usage defaults) +func (f Float64Flag) String() string { + return FlagStringer(f) +} + +// GetName returns the name of the flag +func (f Float64Flag) GetName() string { + return f.Name +} + +// IsRequired returns whether or not the flag is required +func (f Float64Flag) IsRequired() bool { + return f.Required +} + +// TakesValue returns true of the flag takes a value, otherwise false +func (f Float64Flag) TakesValue() bool { + return true +} + +// GetUsage returns the usage string for the flag +func (f Float64Flag) GetUsage() string { + return f.Usage +} + +// GetValue returns the flags value as string representation and an empty +// string if the flag takes no value at all. +func (f Float64Flag) GetValue() string { + return fmt.Sprintf("%f", f.Value) +} + +// Float64 looks up the value of a local Float64Flag, returns +// 0 if not found +func (c *Context) Float64(name string) float64 { + return lookupFloat64(name, c.flagSet) +} + +// GlobalFloat64 looks up the value of a global Float64Flag, returns +// 0 if not found +func (c *Context) GlobalFloat64(name string) float64 { + if fs := lookupGlobalFlagSet(name, c); fs != nil { + return lookupFloat64(name, fs) + } + return 0 +} + +// Apply populates the flag given the flag set and environment +// Ignores errors +func (f Float64Flag) Apply(set *flag.FlagSet) { + _ = f.ApplyWithError(set) +} + +// ApplyWithError populates the flag given the flag set and environment +func (f Float64Flag) ApplyWithError(set *flag.FlagSet) error { + if envVal, ok := flagFromFileEnv(f.FilePath, f.EnvVar); ok { + envValFloat, err := strconv.ParseFloat(envVal, 10) + if err != nil { + return fmt.Errorf("could not parse %s as float64 value for flag %s: %s", envVal, f.Name, err) + } + + f.Value = envValFloat + } + + eachName(f.Name, func(name string) { + if f.Destination != nil { + set.Float64Var(f.Destination, name, f.Value, f.Usage) + return + } + set.Float64(name, f.Value, f.Usage) + }) + + return nil +} + +func lookupFloat64(name string, set *flag.FlagSet) float64 { + f := set.Lookup(name) + if f != nil { + parsed, err := strconv.ParseFloat(f.Value.String(), 64) + if err != nil { + return 0 + } + return parsed + } + return 0 +} diff --git a/vendor/github.com/urfave/cli/flag_generated.go b/vendor/github.com/urfave/cli/flag_generated.go deleted file mode 100644 index 491b61956..000000000 --- a/vendor/github.com/urfave/cli/flag_generated.go +++ /dev/null @@ -1,627 +0,0 @@ -package cli - -import ( - "flag" - "strconv" - "time" -) - -// WARNING: This file is generated! - -// BoolFlag is a flag with type bool -type BoolFlag struct { - Name string - Usage string - EnvVar string - Hidden bool - Destination *bool -} - -// String returns a readable representation of this value -// (for usage defaults) -func (f BoolFlag) String() string { - return FlagStringer(f) -} - -// GetName returns the name of the flag -func (f BoolFlag) GetName() string { - return f.Name -} - -// Bool looks up the value of a local BoolFlag, returns -// false if not found -func (c *Context) Bool(name string) bool { - return lookupBool(name, c.flagSet) -} - -// GlobalBool looks up the value of a global BoolFlag, returns -// false if not found -func (c *Context) GlobalBool(name string) bool { - if fs := lookupGlobalFlagSet(name, c); fs != nil { - return lookupBool(name, fs) - } - return false -} - -func lookupBool(name string, set *flag.FlagSet) bool { - f := set.Lookup(name) - if f != nil { - parsed, err := strconv.ParseBool(f.Value.String()) - if err != nil { - return false - } - return parsed - } - return false -} - -// BoolTFlag is a flag with type bool that is true by default -type BoolTFlag struct { - Name string - Usage string - EnvVar string - Hidden bool - Destination *bool -} - -// String returns a readable representation of this value -// (for usage defaults) -func (f BoolTFlag) String() string { - return FlagStringer(f) -} - -// GetName returns the name of the flag -func (f BoolTFlag) GetName() string { - return f.Name -} - -// BoolT looks up the value of a local BoolTFlag, returns -// false if not found -func (c *Context) BoolT(name string) bool { - return lookupBoolT(name, c.flagSet) -} - -// GlobalBoolT looks up the value of a global BoolTFlag, returns -// false if not found -func (c *Context) GlobalBoolT(name string) bool { - if fs := lookupGlobalFlagSet(name, c); fs != nil { - return lookupBoolT(name, fs) - } - return false -} - -func lookupBoolT(name string, set *flag.FlagSet) bool { - f := set.Lookup(name) - if f != nil { - parsed, err := strconv.ParseBool(f.Value.String()) - if err != nil { - return false - } - return parsed - } - return false -} - -// DurationFlag is a flag with type time.Duration (see https://golang.org/pkg/time/#ParseDuration) -type DurationFlag struct { - Name string - Usage string - EnvVar string - Hidden bool - Value time.Duration - Destination *time.Duration -} - -// String returns a readable representation of this value -// (for usage defaults) -func (f DurationFlag) String() string { - return FlagStringer(f) -} - -// GetName returns the name of the flag -func (f DurationFlag) GetName() string { - return f.Name -} - -// Duration looks up the value of a local DurationFlag, returns -// 0 if not found -func (c *Context) Duration(name string) time.Duration { - return lookupDuration(name, c.flagSet) -} - -// GlobalDuration looks up the value of a global DurationFlag, returns -// 0 if not found -func (c *Context) GlobalDuration(name string) time.Duration { - if fs := lookupGlobalFlagSet(name, c); fs != nil { - return lookupDuration(name, fs) - } - return 0 -} - -func lookupDuration(name string, set *flag.FlagSet) time.Duration { - f := set.Lookup(name) - if f != nil { - parsed, err := time.ParseDuration(f.Value.String()) - if err != nil { - return 0 - } - return parsed - } - return 0 -} - -// Float64Flag is a flag with type float64 -type Float64Flag struct { - Name string - Usage string - EnvVar string - Hidden bool - Value float64 - Destination *float64 -} - -// String returns a readable representation of this value -// (for usage defaults) -func (f Float64Flag) String() string { - return FlagStringer(f) -} - -// GetName returns the name of the flag -func (f Float64Flag) GetName() string { - return f.Name -} - -// Float64 looks up the value of a local Float64Flag, returns -// 0 if not found -func (c *Context) Float64(name string) float64 { - return lookupFloat64(name, c.flagSet) -} - -// GlobalFloat64 looks up the value of a global Float64Flag, returns -// 0 if not found -func (c *Context) GlobalFloat64(name string) float64 { - if fs := lookupGlobalFlagSet(name, c); fs != nil { - return lookupFloat64(name, fs) - } - return 0 -} - -func lookupFloat64(name string, set *flag.FlagSet) float64 { - f := set.Lookup(name) - if f != nil { - parsed, err := strconv.ParseFloat(f.Value.String(), 64) - if err != nil { - return 0 - } - return parsed - } - return 0 -} - -// GenericFlag is a flag with type Generic -type GenericFlag struct { - Name string - Usage string - EnvVar string - Hidden bool - Value Generic -} - -// String returns a readable representation of this value -// (for usage defaults) -func (f GenericFlag) String() string { - return FlagStringer(f) -} - -// GetName returns the name of the flag -func (f GenericFlag) GetName() string { - return f.Name -} - -// Generic looks up the value of a local GenericFlag, returns -// nil if not found -func (c *Context) Generic(name string) interface{} { - return lookupGeneric(name, c.flagSet) -} - -// GlobalGeneric looks up the value of a global GenericFlag, returns -// nil if not found -func (c *Context) GlobalGeneric(name string) interface{} { - if fs := lookupGlobalFlagSet(name, c); fs != nil { - return lookupGeneric(name, fs) - } - return nil -} - -func lookupGeneric(name string, set *flag.FlagSet) interface{} { - f := set.Lookup(name) - if f != nil { - parsed, err := f.Value, error(nil) - if err != nil { - return nil - } - return parsed - } - return nil -} - -// Int64Flag is a flag with type int64 -type Int64Flag struct { - Name string - Usage string - EnvVar string - Hidden bool - Value int64 - Destination *int64 -} - -// String returns a readable representation of this value -// (for usage defaults) -func (f Int64Flag) String() string { - return FlagStringer(f) -} - -// GetName returns the name of the flag -func (f Int64Flag) GetName() string { - return f.Name -} - -// Int64 looks up the value of a local Int64Flag, returns -// 0 if not found -func (c *Context) Int64(name string) int64 { - return lookupInt64(name, c.flagSet) -} - -// GlobalInt64 looks up the value of a global Int64Flag, returns -// 0 if not found -func (c *Context) GlobalInt64(name string) int64 { - if fs := lookupGlobalFlagSet(name, c); fs != nil { - return lookupInt64(name, fs) - } - return 0 -} - -func lookupInt64(name string, set *flag.FlagSet) int64 { - f := set.Lookup(name) - if f != nil { - parsed, err := strconv.ParseInt(f.Value.String(), 0, 64) - if err != nil { - return 0 - } - return parsed - } - return 0 -} - -// IntFlag is a flag with type int -type IntFlag struct { - Name string - Usage string - EnvVar string - Hidden bool - Value int - Destination *int -} - -// String returns a readable representation of this value -// (for usage defaults) -func (f IntFlag) String() string { - return FlagStringer(f) -} - -// GetName returns the name of the flag -func (f IntFlag) GetName() string { - return f.Name -} - -// Int looks up the value of a local IntFlag, returns -// 0 if not found -func (c *Context) Int(name string) int { - return lookupInt(name, c.flagSet) -} - -// GlobalInt looks up the value of a global IntFlag, returns -// 0 if not found -func (c *Context) GlobalInt(name string) int { - if fs := lookupGlobalFlagSet(name, c); fs != nil { - return lookupInt(name, fs) - } - return 0 -} - -func lookupInt(name string, set *flag.FlagSet) int { - f := set.Lookup(name) - if f != nil { - parsed, err := strconv.ParseInt(f.Value.String(), 0, 64) - if err != nil { - return 0 - } - return int(parsed) - } - return 0 -} - -// IntSliceFlag is a flag with type *IntSlice -type IntSliceFlag struct { - Name string - Usage string - EnvVar string - Hidden bool - Value *IntSlice -} - -// String returns a readable representation of this value -// (for usage defaults) -func (f IntSliceFlag) String() string { - return FlagStringer(f) -} - -// GetName returns the name of the flag -func (f IntSliceFlag) GetName() string { - return f.Name -} - -// IntSlice looks up the value of a local IntSliceFlag, returns -// nil if not found -func (c *Context) IntSlice(name string) []int { - return lookupIntSlice(name, c.flagSet) -} - -// GlobalIntSlice looks up the value of a global IntSliceFlag, returns -// nil if not found -func (c *Context) GlobalIntSlice(name string) []int { - if fs := lookupGlobalFlagSet(name, c); fs != nil { - return lookupIntSlice(name, fs) - } - return nil -} - -func lookupIntSlice(name string, set *flag.FlagSet) []int { - f := set.Lookup(name) - if f != nil { - parsed, err := (f.Value.(*IntSlice)).Value(), error(nil) - if err != nil { - return nil - } - return parsed - } - return nil -} - -// Int64SliceFlag is a flag with type *Int64Slice -type Int64SliceFlag struct { - Name string - Usage string - EnvVar string - Hidden bool - Value *Int64Slice -} - -// String returns a readable representation of this value -// (for usage defaults) -func (f Int64SliceFlag) String() string { - return FlagStringer(f) -} - -// GetName returns the name of the flag -func (f Int64SliceFlag) GetName() string { - return f.Name -} - -// Int64Slice looks up the value of a local Int64SliceFlag, returns -// nil if not found -func (c *Context) Int64Slice(name string) []int64 { - return lookupInt64Slice(name, c.flagSet) -} - -// GlobalInt64Slice looks up the value of a global Int64SliceFlag, returns -// nil if not found -func (c *Context) GlobalInt64Slice(name string) []int64 { - if fs := lookupGlobalFlagSet(name, c); fs != nil { - return lookupInt64Slice(name, fs) - } - return nil -} - -func lookupInt64Slice(name string, set *flag.FlagSet) []int64 { - f := set.Lookup(name) - if f != nil { - parsed, err := (f.Value.(*Int64Slice)).Value(), error(nil) - if err != nil { - return nil - } - return parsed - } - return nil -} - -// StringFlag is a flag with type string -type StringFlag struct { - Name string - Usage string - EnvVar string - Hidden bool - Value string - Destination *string -} - -// String returns a readable representation of this value -// (for usage defaults) -func (f StringFlag) String() string { - return FlagStringer(f) -} - -// GetName returns the name of the flag -func (f StringFlag) GetName() string { - return f.Name -} - -// String looks up the value of a local StringFlag, returns -// "" if not found -func (c *Context) String(name string) string { - return lookupString(name, c.flagSet) -} - -// GlobalString looks up the value of a global StringFlag, returns -// "" if not found -func (c *Context) GlobalString(name string) string { - if fs := lookupGlobalFlagSet(name, c); fs != nil { - return lookupString(name, fs) - } - return "" -} - -func lookupString(name string, set *flag.FlagSet) string { - f := set.Lookup(name) - if f != nil { - parsed, err := f.Value.String(), error(nil) - if err != nil { - return "" - } - return parsed - } - return "" -} - -// StringSliceFlag is a flag with type *StringSlice -type StringSliceFlag struct { - Name string - Usage string - EnvVar string - Hidden bool - Value *StringSlice -} - -// String returns a readable representation of this value -// (for usage defaults) -func (f StringSliceFlag) String() string { - return FlagStringer(f) -} - -// GetName returns the name of the flag -func (f StringSliceFlag) GetName() string { - return f.Name -} - -// StringSlice looks up the value of a local StringSliceFlag, returns -// nil if not found -func (c *Context) StringSlice(name string) []string { - return lookupStringSlice(name, c.flagSet) -} - -// GlobalStringSlice looks up the value of a global StringSliceFlag, returns -// nil if not found -func (c *Context) GlobalStringSlice(name string) []string { - if fs := lookupGlobalFlagSet(name, c); fs != nil { - return lookupStringSlice(name, fs) - } - return nil -} - -func lookupStringSlice(name string, set *flag.FlagSet) []string { - f := set.Lookup(name) - if f != nil { - parsed, err := (f.Value.(*StringSlice)).Value(), error(nil) - if err != nil { - return nil - } - return parsed - } - return nil -} - -// Uint64Flag is a flag with type uint64 -type Uint64Flag struct { - Name string - Usage string - EnvVar string - Hidden bool - Value uint64 - Destination *uint64 -} - -// String returns a readable representation of this value -// (for usage defaults) -func (f Uint64Flag) String() string { - return FlagStringer(f) -} - -// GetName returns the name of the flag -func (f Uint64Flag) GetName() string { - return f.Name -} - -// Uint64 looks up the value of a local Uint64Flag, returns -// 0 if not found -func (c *Context) Uint64(name string) uint64 { - return lookupUint64(name, c.flagSet) -} - -// GlobalUint64 looks up the value of a global Uint64Flag, returns -// 0 if not found -func (c *Context) GlobalUint64(name string) uint64 { - if fs := lookupGlobalFlagSet(name, c); fs != nil { - return lookupUint64(name, fs) - } - return 0 -} - -func lookupUint64(name string, set *flag.FlagSet) uint64 { - f := set.Lookup(name) - if f != nil { - parsed, err := strconv.ParseUint(f.Value.String(), 0, 64) - if err != nil { - return 0 - } - return parsed - } - return 0 -} - -// UintFlag is a flag with type uint -type UintFlag struct { - Name string - Usage string - EnvVar string - Hidden bool - Value uint - Destination *uint -} - -// String returns a readable representation of this value -// (for usage defaults) -func (f UintFlag) String() string { - return FlagStringer(f) -} - -// GetName returns the name of the flag -func (f UintFlag) GetName() string { - return f.Name -} - -// Uint looks up the value of a local UintFlag, returns -// 0 if not found -func (c *Context) Uint(name string) uint { - return lookupUint(name, c.flagSet) -} - -// GlobalUint looks up the value of a global UintFlag, returns -// 0 if not found -func (c *Context) GlobalUint(name string) uint { - if fs := lookupGlobalFlagSet(name, c); fs != nil { - return lookupUint(name, fs) - } - return 0 -} - -func lookupUint(name string, set *flag.FlagSet) uint { - f := set.Lookup(name) - if f != nil { - parsed, err := strconv.ParseUint(f.Value.String(), 0, 64) - if err != nil { - return 0 - } - return uint(parsed) - } - return 0 -} diff --git a/vendor/github.com/urfave/cli/flag_generic.go b/vendor/github.com/urfave/cli/flag_generic.go new file mode 100644 index 000000000..c43dae7d0 --- /dev/null +++ b/vendor/github.com/urfave/cli/flag_generic.go @@ -0,0 +1,110 @@ +package cli + +import ( + "flag" + "fmt" +) + +// Generic is a generic parseable type identified by a specific flag +type Generic interface { + Set(value string) error + String() string +} + +// GenericFlag is a flag with type Generic +type GenericFlag struct { + Name string + Usage string + EnvVar string + FilePath string + Required bool + Hidden bool + TakesFile bool + Value Generic +} + +// String returns a readable representation of this value +// (for usage defaults) +func (f GenericFlag) String() string { + return FlagStringer(f) +} + +// GetName returns the name of the flag +func (f GenericFlag) GetName() string { + return f.Name +} + +// IsRequired returns whether or not the flag is required +func (f GenericFlag) IsRequired() bool { + return f.Required +} + +// TakesValue returns true of the flag takes a value, otherwise false +func (f GenericFlag) TakesValue() bool { + return true +} + +// GetUsage returns the usage string for the flag +func (f GenericFlag) GetUsage() string { + return f.Usage +} + +// GetValue returns the flags value as string representation and an empty +// string if the flag takes no value at all. +func (f GenericFlag) GetValue() string { + if f.Value != nil { + return f.Value.String() + } + return "" +} + +// Apply takes the flagset and calls Set on the generic flag with the value +// provided by the user for parsing by the flag +// Ignores parsing errors +func (f GenericFlag) Apply(set *flag.FlagSet) { + _ = f.ApplyWithError(set) +} + +// ApplyWithError takes the flagset and calls Set on the generic flag with the value +// provided by the user for parsing by the flag +func (f GenericFlag) ApplyWithError(set *flag.FlagSet) error { + val := f.Value + if fileEnvVal, ok := flagFromFileEnv(f.FilePath, f.EnvVar); ok { + if err := val.Set(fileEnvVal); err != nil { + return fmt.Errorf("could not parse %s as value for flag %s: %s", fileEnvVal, f.Name, err) + } + } + + eachName(f.Name, func(name string) { + set.Var(f.Value, name, f.Usage) + }) + + return nil +} + +// Generic looks up the value of a local GenericFlag, returns +// nil if not found +func (c *Context) Generic(name string) interface{} { + return lookupGeneric(name, c.flagSet) +} + +// GlobalGeneric looks up the value of a global GenericFlag, returns +// nil if not found +func (c *Context) GlobalGeneric(name string) interface{} { + if fs := lookupGlobalFlagSet(name, c); fs != nil { + return lookupGeneric(name, fs) + } + return nil +} + +func lookupGeneric(name string, set *flag.FlagSet) interface{} { + f := set.Lookup(name) + if f != nil { + parsed, err := f.Value, error(nil) + if err != nil { + return nil + } + return parsed + } + return nil +} diff --git a/vendor/github.com/urfave/cli/flag_int.go b/vendor/github.com/urfave/cli/flag_int.go new file mode 100644 index 000000000..bae32e281 --- /dev/null +++ b/vendor/github.com/urfave/cli/flag_int.go @@ -0,0 +1,105 @@ +package cli + +import ( + "flag" + "fmt" + "strconv" +) + +// IntFlag is a flag with type int +type IntFlag struct { + Name string + Usage string + EnvVar string + FilePath string + Required bool + Hidden bool + Value int + Destination *int +} + +// String returns a readable representation of this value +// (for usage defaults) +func (f IntFlag) String() string { + return FlagStringer(f) +} + +// GetName returns the name of the flag +func (f IntFlag) GetName() string { + return f.Name +} + +// IsRequired returns whether or not the flag is required +func (f IntFlag) IsRequired() bool { + return f.Required +} + +// TakesValue returns true of the flag takes a value, otherwise false +func (f IntFlag) TakesValue() bool { + return true +} + +// GetUsage returns the usage string for the flag +func (f IntFlag) GetUsage() string { + return f.Usage +} + +// GetValue returns the flags value as string representation and an empty +// string if the flag takes no value at all. +func (f IntFlag) GetValue() string { + return fmt.Sprintf("%d", f.Value) +} + +// Apply populates the flag given the flag set and environment +// Ignores errors +func (f IntFlag) Apply(set *flag.FlagSet) { + _ = f.ApplyWithError(set) +} + +// ApplyWithError populates the flag given the flag set and environment +func (f IntFlag) ApplyWithError(set *flag.FlagSet) error { + if envVal, ok := flagFromFileEnv(f.FilePath, f.EnvVar); ok { + envValInt, err := strconv.ParseInt(envVal, 0, 64) + if err != nil { + return fmt.Errorf("could not parse %s as int value for flag %s: %s", envVal, f.Name, err) + } + f.Value = int(envValInt) + } + + eachName(f.Name, func(name string) { + if f.Destination != nil { + set.IntVar(f.Destination, name, f.Value, f.Usage) + return + } + set.Int(name, f.Value, f.Usage) + }) + + return nil +} + +// Int looks up the value of a local IntFlag, returns +// 0 if not found +func (c *Context) Int(name string) int { + return lookupInt(name, c.flagSet) +} + +// GlobalInt looks up the value of a global IntFlag, returns +// 0 if not found +func (c *Context) GlobalInt(name string) int { + if fs := lookupGlobalFlagSet(name, c); fs != nil { + return lookupInt(name, fs) + } + return 0 +} + +func lookupInt(name string, set *flag.FlagSet) int { + f := set.Lookup(name) + if f != nil { + parsed, err := strconv.ParseInt(f.Value.String(), 0, 64) + if err != nil { + return 0 + } + return int(parsed) + } + return 0 +} diff --git a/vendor/github.com/urfave/cli/flag_int64.go b/vendor/github.com/urfave/cli/flag_int64.go new file mode 100644 index 000000000..aaafbe9d6 --- /dev/null +++ b/vendor/github.com/urfave/cli/flag_int64.go @@ -0,0 +1,106 @@ +package cli + +import ( + "flag" + "fmt" + "strconv" +) + +// Int64Flag is a flag with type int64 +type Int64Flag struct { + Name string + Usage string + EnvVar string + FilePath string + Required bool + Hidden bool + Value int64 + Destination *int64 +} + +// String returns a readable representation of this value +// (for usage defaults) +func (f Int64Flag) String() string { + return FlagStringer(f) +} + +// GetName returns the name of the flag +func (f Int64Flag) GetName() string { + return f.Name +} + +// IsRequired returns whether or not the flag is required +func (f Int64Flag) IsRequired() bool { + return f.Required +} + +// TakesValue returns true of the flag takes a value, otherwise false +func (f Int64Flag) TakesValue() bool { + return true +} + +// GetUsage returns the usage string for the flag +func (f Int64Flag) GetUsage() string { + return f.Usage +} + +// GetValue returns the flags value as string representation and an empty +// string if the flag takes no value at all. +func (f Int64Flag) GetValue() string { + return fmt.Sprintf("%d", f.Value) +} + +// Apply populates the flag given the flag set and environment +// Ignores errors +func (f Int64Flag) Apply(set *flag.FlagSet) { + _ = f.ApplyWithError(set) +} + +// ApplyWithError populates the flag given the flag set and environment +func (f Int64Flag) ApplyWithError(set *flag.FlagSet) error { + if envVal, ok := flagFromFileEnv(f.FilePath, f.EnvVar); ok { + envValInt, err := strconv.ParseInt(envVal, 0, 64) + if err != nil { + return fmt.Errorf("could not parse %s as int value for flag %s: %s", envVal, f.Name, err) + } + + f.Value = envValInt + } + + eachName(f.Name, func(name string) { + if f.Destination != nil { + set.Int64Var(f.Destination, name, f.Value, f.Usage) + return + } + set.Int64(name, f.Value, f.Usage) + }) + + return nil +} + +// Int64 looks up the value of a local Int64Flag, returns +// 0 if not found +func (c *Context) Int64(name string) int64 { + return lookupInt64(name, c.flagSet) +} + +// GlobalInt64 looks up the value of a global Int64Flag, returns +// 0 if not found +func (c *Context) GlobalInt64(name string) int64 { + if fs := lookupGlobalFlagSet(name, c); fs != nil { + return lookupInt64(name, fs) + } + return 0 +} + +func lookupInt64(name string, set *flag.FlagSet) int64 { + f := set.Lookup(name) + if f != nil { + parsed, err := strconv.ParseInt(f.Value.String(), 0, 64) + if err != nil { + return 0 + } + return parsed + } + return 0 +} diff --git a/vendor/github.com/urfave/cli/flag_int64_slice.go b/vendor/github.com/urfave/cli/flag_int64_slice.go new file mode 100644 index 000000000..ed2e983b6 --- /dev/null +++ b/vendor/github.com/urfave/cli/flag_int64_slice.go @@ -0,0 +1,141 @@ +package cli + +import ( + "flag" + "fmt" + "strconv" + "strings" +) + +// Int64Slice is an opaque type for []int to satisfy flag.Value and flag.Getter +type Int64Slice []int64 + +// Set parses the value into an integer and appends it to the list of values +func (f *Int64Slice) Set(value string) error { + tmp, err := strconv.ParseInt(value, 10, 64) + if err != nil { + return err + } + *f = append(*f, tmp) + return nil +} + +// String returns a readable representation of this value (for usage defaults) +func (f *Int64Slice) String() string { + return fmt.Sprintf("%#v", *f) +} + +// Value returns the slice of ints set by this flag +func (f *Int64Slice) Value() []int64 { + return *f +} + +// Get returns the slice of ints set by this flag +func (f *Int64Slice) Get() interface{} { + return *f +} + +// Int64SliceFlag is a flag with type *Int64Slice +type Int64SliceFlag struct { + Name string + Usage string + EnvVar string + FilePath string + Required bool + Hidden bool + Value *Int64Slice +} + +// String returns a readable representation of this value +// (for usage defaults) +func (f Int64SliceFlag) String() string { + return FlagStringer(f) +} + +// GetName returns the name of the flag +func (f Int64SliceFlag) GetName() string { + return f.Name +} + +// IsRequired returns whether or not the flag is required +func (f Int64SliceFlag) IsRequired() bool { + return f.Required +} + +// TakesValue returns true of the flag takes a value, otherwise false +func (f Int64SliceFlag) TakesValue() bool { + return true +} + +// GetUsage returns the usage string for the flag +func (f Int64SliceFlag) GetUsage() string { + return f.Usage +} + +// GetValue returns the flags value as string representation and an empty +// string if the flag takes no value at all. +func (f Int64SliceFlag) GetValue() string { + if f.Value != nil { + return f.Value.String() + } + return "" +} + +// Apply populates the flag given the flag set and environment +// Ignores errors +func (f Int64SliceFlag) Apply(set *flag.FlagSet) { + _ = f.ApplyWithError(set) +} + +// ApplyWithError populates the flag given the flag set and environment +func (f Int64SliceFlag) ApplyWithError(set *flag.FlagSet) error { + if envVal, ok := flagFromFileEnv(f.FilePath, f.EnvVar); ok { + newVal := &Int64Slice{} + for _, s := range strings.Split(envVal, ",") { + s = strings.TrimSpace(s) + if err := newVal.Set(s); err != nil { + return fmt.Errorf("could not parse %s as int64 slice value for flag %s: %s", envVal, f.Name, err) + } + } + if f.Value == nil { + f.Value = newVal + } else { + *f.Value = *newVal + } + } + + eachName(f.Name, func(name string) { + if f.Value == nil { + f.Value = &Int64Slice{} + } + set.Var(f.Value, name, f.Usage) + }) + return nil +} + +// Int64Slice looks up the value of a local Int64SliceFlag, returns +// nil if not found +func (c *Context) Int64Slice(name string) []int64 { + return lookupInt64Slice(name, c.flagSet) +} + +// GlobalInt64Slice looks up the value of a global Int64SliceFlag, returns +// nil if not found +func (c *Context) GlobalInt64Slice(name string) []int64 { + if fs := lookupGlobalFlagSet(name, c); fs != nil { + return lookupInt64Slice(name, fs) + } + return nil +} + +func lookupInt64Slice(name string, set *flag.FlagSet) []int64 { + f := set.Lookup(name) + if f != nil { + parsed, err := (f.Value.(*Int64Slice)).Value(), error(nil) + if err != nil { + return nil + } + return parsed + } + return nil +} diff --git a/vendor/github.com/urfave/cli/flag_int_slice.go b/vendor/github.com/urfave/cli/flag_int_slice.go new file mode 100644 index 000000000..c38d010fd --- /dev/null +++ b/vendor/github.com/urfave/cli/flag_int_slice.go @@ -0,0 +1,142 @@ +package cli + +import ( + "flag" + "fmt" + "strconv" + "strings" +) + +// IntSlice is an opaque type for []int to satisfy flag.Value and flag.Getter +type IntSlice []int + +// Set parses the value into an integer and appends it to the list of values +func (f *IntSlice) Set(value string) error { + tmp, err := strconv.Atoi(value) + if err != nil { + return err + } + *f = append(*f, tmp) + return nil +} + +// String returns a readable representation of this value (for usage defaults) +func (f *IntSlice) String() string { + return fmt.Sprintf("%#v", *f) +} + +// Value returns the slice of ints set by this flag +func (f *IntSlice) Value() []int { + return *f +} + +// Get returns the slice of ints set by this flag +func (f *IntSlice) Get() interface{} { + return *f +} + +// IntSliceFlag is a flag with type *IntSlice +type IntSliceFlag struct { + Name string + Usage string + EnvVar string + FilePath string + Required bool + Hidden bool + Value *IntSlice +} + +// String returns a readable representation of this value +// (for usage defaults) +func (f IntSliceFlag) String() string { + return FlagStringer(f) +} + +// GetName returns the name of the flag +func (f IntSliceFlag) GetName() string { + return f.Name +} + +// IsRequired returns whether or not the flag is required +func (f IntSliceFlag) IsRequired() bool { + return f.Required +} + +// TakesValue returns true of the flag takes a value, otherwise false +func (f IntSliceFlag) TakesValue() bool { + return true +} + +// GetUsage returns the usage string for the flag +func (f IntSliceFlag) GetUsage() string { + return f.Usage +} + +// GetValue returns the flags value as string representation and an empty +// string if the flag takes no value at all. +func (f IntSliceFlag) GetValue() string { + if f.Value != nil { + return f.Value.String() + } + return "" +} + +// Apply populates the flag given the flag set and environment +// Ignores errors +func (f IntSliceFlag) Apply(set *flag.FlagSet) { + _ = f.ApplyWithError(set) +} + +// ApplyWithError populates the flag given the flag set and environment +func (f IntSliceFlag) ApplyWithError(set *flag.FlagSet) error { + if envVal, ok := flagFromFileEnv(f.FilePath, f.EnvVar); ok { + newVal := &IntSlice{} + for _, s := range strings.Split(envVal, ",") { + s = strings.TrimSpace(s) + if err := newVal.Set(s); err != nil { + return fmt.Errorf("could not parse %s as int slice value for flag %s: %s", envVal, f.Name, err) + } + } + if f.Value == nil { + f.Value = newVal + } else { + *f.Value = *newVal + } + } + + eachName(f.Name, func(name string) { + if f.Value == nil { + f.Value = &IntSlice{} + } + set.Var(f.Value, name, f.Usage) + }) + + return nil +} + +// IntSlice looks up the value of a local IntSliceFlag, returns +// nil if not found +func (c *Context) IntSlice(name string) []int { + return lookupIntSlice(name, c.flagSet) +} + +// GlobalIntSlice looks up the value of a global IntSliceFlag, returns +// nil if not found +func (c *Context) GlobalIntSlice(name string) []int { + if fs := lookupGlobalFlagSet(name, c); fs != nil { + return lookupIntSlice(name, fs) + } + return nil +} + +func lookupIntSlice(name string, set *flag.FlagSet) []int { + f := set.Lookup(name) + if f != nil { + parsed, err := (f.Value.(*IntSlice)).Value(), error(nil) + if err != nil { + return nil + } + return parsed + } + return nil +} diff --git a/vendor/github.com/urfave/cli/flag_string.go b/vendor/github.com/urfave/cli/flag_string.go new file mode 100644 index 000000000..9f29da40b --- /dev/null +++ b/vendor/github.com/urfave/cli/flag_string.go @@ -0,0 +1,98 @@ +package cli + +import "flag" + +// StringFlag is a flag with type string +type StringFlag struct { + Name string + Usage string + EnvVar string + FilePath string + Required bool + Hidden bool + TakesFile bool + Value string + Destination *string +} + +// String returns a readable representation of this value +// (for usage defaults) +func (f StringFlag) String() string { + return FlagStringer(f) +} + +// GetName returns the name of the flag +func (f StringFlag) GetName() string { + return f.Name +} + +// IsRequired returns whether or not the flag is required +func (f StringFlag) IsRequired() bool { + return f.Required +} + +// TakesValue returns true of the flag takes a value, otherwise false +func (f StringFlag) TakesValue() bool { + return true +} + +// GetUsage returns the usage string for the flag +func (f StringFlag) GetUsage() string { + return f.Usage +} + +// GetValue returns the flags value as string representation and an empty +// string if the flag takes no value at all. +func (f StringFlag) GetValue() string { + return f.Value +} + +// Apply populates the flag given the flag set and environment +// Ignores errors +func (f StringFlag) Apply(set *flag.FlagSet) { + _ = f.ApplyWithError(set) +} + +// ApplyWithError populates the flag given the flag set and environment +func (f StringFlag) ApplyWithError(set *flag.FlagSet) error { + if envVal, ok := flagFromFileEnv(f.FilePath, f.EnvVar); ok { + f.Value = envVal + } + + eachName(f.Name, func(name string) { + if f.Destination != nil { + set.StringVar(f.Destination, name, f.Value, f.Usage) + return + } + set.String(name, f.Value, f.Usage) + }) + + return nil +} + +// String looks up the value of a local StringFlag, returns +// "" if not found +func (c *Context) String(name string) string { + return lookupString(name, c.flagSet) +} + +// GlobalString looks up the value of a global StringFlag, returns +// "" if not found +func (c *Context) GlobalString(name string) string { + if fs := lookupGlobalFlagSet(name, c); fs != nil { + return lookupString(name, fs) + } + return "" +} + +func lookupString(name string, set *flag.FlagSet) string { + f := set.Lookup(name) + if f != nil { + parsed, err := f.Value.String(), error(nil) + if err != nil { + return "" + } + return parsed + } + return "" +} diff --git a/vendor/github.com/urfave/cli/flag_string_slice.go b/vendor/github.com/urfave/cli/flag_string_slice.go new file mode 100644 index 000000000..e865b2ff0 --- /dev/null +++ b/vendor/github.com/urfave/cli/flag_string_slice.go @@ -0,0 +1,138 @@ +package cli + +import ( + "flag" + "fmt" + "strings" +) + +// StringSlice is an opaque type for []string to satisfy flag.Value and flag.Getter +type StringSlice []string + +// Set appends the string value to the list of values +func (f *StringSlice) Set(value string) error { + *f = append(*f, value) + return nil +} + +// String returns a readable representation of this value (for usage defaults) +func (f *StringSlice) String() string { + return fmt.Sprintf("%s", *f) +} + +// Value returns the slice of strings set by this flag +func (f *StringSlice) Value() []string { + return *f +} + +// Get returns the slice of strings set by this flag +func (f *StringSlice) Get() interface{} { + return *f +} + +// StringSliceFlag is a flag with type *StringSlice +type StringSliceFlag struct { + Name string + Usage string + EnvVar string + FilePath string + Required bool + Hidden bool + TakesFile bool + Value *StringSlice +} + +// String returns a readable representation of this value +// (for usage defaults) +func (f StringSliceFlag) String() string { + return FlagStringer(f) +} + +// GetName returns the name of the flag +func (f StringSliceFlag) GetName() string { + return f.Name +} + +// IsRequired returns whether or not the flag is required +func (f StringSliceFlag) IsRequired() bool { + return f.Required +} + +// TakesValue returns true of the flag takes a value, otherwise false +func (f StringSliceFlag) TakesValue() bool { + return true +} + +// GetUsage returns the usage string for the flag +func (f StringSliceFlag) GetUsage() string { + return f.Usage +} + +// GetValue returns the flags value as string representation and an empty +// string if the flag takes no value at all. +func (f StringSliceFlag) GetValue() string { + if f.Value != nil { + return f.Value.String() + } + return "" +} + +// Apply populates the flag given the flag set and environment +// Ignores errors +func (f StringSliceFlag) Apply(set *flag.FlagSet) { + _ = f.ApplyWithError(set) +} + +// ApplyWithError populates the flag given the flag set and environment +func (f StringSliceFlag) ApplyWithError(set *flag.FlagSet) error { + if envVal, ok := flagFromFileEnv(f.FilePath, f.EnvVar); ok { + newVal := &StringSlice{} + for _, s := range strings.Split(envVal, ",") { + s = strings.TrimSpace(s) + if err := newVal.Set(s); err != nil { + return fmt.Errorf("could not parse %s as string value for flag %s: %s", envVal, f.Name, err) + } + } + if f.Value == nil { + f.Value = newVal + } else { + *f.Value = *newVal + } + } + + eachName(f.Name, func(name string) { + if f.Value == nil { + f.Value = &StringSlice{} + } + set.Var(f.Value, name, f.Usage) + }) + + return nil +} + +// StringSlice looks up the value of a local StringSliceFlag, returns +// nil if not found +func (c *Context) StringSlice(name string) []string { + return lookupStringSlice(name, c.flagSet) +} + +// GlobalStringSlice looks up the value of a global StringSliceFlag, returns +// nil if not found +func (c *Context) GlobalStringSlice(name string) []string { + if fs := lookupGlobalFlagSet(name, c); fs != nil { + return lookupStringSlice(name, fs) + } + return nil +} + +func lookupStringSlice(name string, set *flag.FlagSet) []string { + f := set.Lookup(name) + if f != nil { + parsed, err := (f.Value.(*StringSlice)).Value(), error(nil) + if err != nil { + return nil + } + return parsed + } + return nil +} diff --git a/vendor/github.com/urfave/cli/flag_uint.go b/vendor/github.com/urfave/cli/flag_uint.go new file mode 100644 index 000000000..d6a04f408 --- /dev/null +++ b/vendor/github.com/urfave/cli/flag_uint.go @@ -0,0 +1,106 @@ +package cli + +import ( + "flag" + "fmt" + "strconv" +) + +// UintFlag is a flag with type uint +type UintFlag struct { + Name string + Usage string + EnvVar string + FilePath string + Required bool + Hidden bool + Value uint + Destination *uint +} + +// String returns a readable representation of this value +// (for usage defaults) +func (f UintFlag) String() string { + return FlagStringer(f) +} + +// GetName returns the name of the flag +func (f UintFlag) GetName() string { + return f.Name +} + +// IsRequired returns whether or not the flag is required +func (f UintFlag) IsRequired() bool { + return f.Required +} + +// TakesValue returns true of the flag takes a value, otherwise false +func (f UintFlag) TakesValue() bool { + return true +} + +// GetUsage returns the usage string for the flag +func (f UintFlag) GetUsage() string { + return f.Usage +} + +// Apply populates the flag given the flag set and environment +// Ignores errors +func (f UintFlag) Apply(set *flag.FlagSet) { + _ = f.ApplyWithError(set) +} + +// ApplyWithError populates the flag given the flag set and environment +func (f UintFlag) ApplyWithError(set *flag.FlagSet) error { + if envVal, ok := flagFromFileEnv(f.FilePath, f.EnvVar); ok { + envValInt, err := strconv.ParseUint(envVal, 0, 64) + if err != nil { + return fmt.Errorf("could not parse %s as uint value for flag %s: %s", envVal, f.Name, err) + } + + f.Value = uint(envValInt) + } + + eachName(f.Name, func(name string) { + if f.Destination != nil { + set.UintVar(f.Destination, name, f.Value, f.Usage) + return + } + set.Uint(name, f.Value, f.Usage) + }) + + return nil +} + +// GetValue returns the flags value as string representation and an empty +// string if the flag takes no value at all. +func (f UintFlag) GetValue() string { + return fmt.Sprintf("%d", f.Value) +} + +// Uint looks up the value of a local UintFlag, returns +// 0 if not found +func (c *Context) Uint(name string) uint { + return lookupUint(name, c.flagSet) +} + +// GlobalUint looks up the value of a global UintFlag, returns +// 0 if not found +func (c *Context) GlobalUint(name string) uint { + if fs := lookupGlobalFlagSet(name, c); fs != nil { + return lookupUint(name, fs) + } + return 0 +} + +func lookupUint(name string, set *flag.FlagSet) uint { + f := set.Lookup(name) + if f != nil { + parsed, err := strconv.ParseUint(f.Value.String(), 0, 64) + if err != nil { + return 0 + } + return uint(parsed) + } + return 0 +} diff --git a/vendor/github.com/urfave/cli/flag_uint64.go b/vendor/github.com/urfave/cli/flag_uint64.go new file mode 100644 index 000000000..ea6493a8b --- /dev/null +++ b/vendor/github.com/urfave/cli/flag_uint64.go @@ -0,0 +1,106 @@ +package cli + +import ( + "flag" + "fmt" + "strconv" +) + +// Uint64Flag is a flag with type uint64 +type Uint64Flag struct { + Name string + Usage string + EnvVar string + FilePath string + Required bool + Hidden bool + Value uint64 + Destination *uint64 +} + +// String returns a readable representation of this value +// (for usage defaults) +func (f Uint64Flag) String() string { + return FlagStringer(f) +} + +// GetName returns the name of the flag +func (f Uint64Flag) GetName() string { + return f.Name +} + +// IsRequired returns whether or not the flag is required +func (f Uint64Flag) IsRequired() bool { + return f.Required +} + +// TakesValue returns true of the flag takes a value, otherwise false +func (f Uint64Flag) TakesValue() bool { + return true +} + +// GetUsage returns the usage string for the flag +func (f Uint64Flag) GetUsage() string { + return f.Usage +} + +// GetValue returns the flags value as string representation and an empty +// string if the flag takes no value at all. +func (f Uint64Flag) GetValue() string { + return fmt.Sprintf("%d", f.Value) +} + +// Apply populates the flag given the flag set and environment +// Ignores errors +func (f Uint64Flag) Apply(set *flag.FlagSet) { + _ = f.ApplyWithError(set) +} + +// ApplyWithError populates the flag given the flag set and environment +func (f Uint64Flag) ApplyWithError(set *flag.FlagSet) error { + if envVal, ok := flagFromFileEnv(f.FilePath, f.EnvVar); ok { + envValInt, err := strconv.ParseUint(envVal, 0, 64) + if err != nil { + return fmt.Errorf("could not parse %s as uint64 value for flag %s: %s", envVal, f.Name, err) + } + + f.Value = envValInt + } + + eachName(f.Name, func(name string) { + if f.Destination != nil { + set.Uint64Var(f.Destination, name, f.Value, f.Usage) + return + } + set.Uint64(name, f.Value, f.Usage) + }) + + return nil +} + +// Uint64 looks up the value of a local Uint64Flag, returns +// 0 if not found +func (c *Context) Uint64(name string) uint64 { + return lookupUint64(name, c.flagSet) +} + +// GlobalUint64 looks up the value of a global Uint64Flag, returns +// 0 if not found +func (c *Context) GlobalUint64(name string) uint64 { + if fs := lookupGlobalFlagSet(name, c); fs != nil { + return lookupUint64(name, fs) + } + return 0 +} + +func lookupUint64(name string, set *flag.FlagSet) uint64 { + f := set.Lookup(name) + if f != nil { + parsed, err := strconv.ParseUint(f.Value.String(), 0, 64) + if err != nil { + return 0 + } + return parsed + } + return 0 +} diff --git a/vendor/github.com/urfave/cli/funcs.go b/vendor/github.com/urfave/cli/funcs.go index cba5e6cb0..0036b1130 100644 --- a/vendor/github.com/urfave/cli/funcs.go +++ b/vendor/github.com/urfave/cli/funcs.go @@ -23,6 +23,22 @@ type CommandNotFoundFunc func(*Context, string) // is displayed and the execution is interrupted. type OnUsageErrorFunc func(context *Context, err error, isSubcommand bool) error +// ExitErrHandlerFunc is executed if provided in order to handle ExitError values +// returned by Actions and Before/After functions. +type ExitErrHandlerFunc func(context *Context, err error) + // FlagStringFunc is used by the help generation to display a flag, which is // expected to be a single line. type FlagStringFunc func(Flag) string + +// FlagNamePrefixFunc is used by the default FlagStringFunc to create prefix +// text for a flag's full name. +type FlagNamePrefixFunc func(fullName, placeholder string) string + +// FlagEnvHintFunc is used by the default FlagStringFunc to annotate flag help +// with the environment variable details. +type FlagEnvHintFunc func(envVar, str string) string + +// FlagFileHintFunc is used by the default FlagStringFunc to annotate flag help +// with the file path details. +type FlagFileHintFunc func(filePath, str string) string diff --git a/vendor/github.com/urfave/cli/generate-flag-types b/vendor/github.com/urfave/cli/generate-flag-types deleted file mode 100644 index 7147381ce..000000000 --- a/vendor/github.com/urfave/cli/generate-flag-types +++ /dev/null @@ -1,255 +0,0 @@ -#!/usr/bin/env python -""" -The flag types that ship with the cli library have many things in common, and -so we can take advantage of the `go generate` command to create much of the -source code from a list of definitions. These definitions attempt to cover -the parts that vary between flag types, and should evolve as needed. - -An example of the minimum definition needed is: - - { - "name": "SomeType", - "type": "sometype", - "context_default": "nil" - } - -In this example, the code generated for the `cli` package will include a type -named `SomeTypeFlag` that is expected to wrap a value of type `sometype`. -Fetching values by name via `*cli.Context` will default to a value of `nil`. - -A more complete, albeit somewhat redundant, example showing all available -definition keys is: - - { - "name": "VeryMuchType", - "type": "*VeryMuchType", - "value": true, - "dest": false, - "doctail": " which really only wraps a []float64, oh well!", - "context_type": "[]float64", - "context_default": "nil", - "parser": "parseVeryMuchType(f.Value.String())", - "parser_cast": "[]float64(parsed)" - } - -The meaning of each field is as follows: - - name (string) - The type "name", which will be suffixed with - `Flag` when generating the type definition - for `cli` and the wrapper type for `altsrc` - type (string) - The type that the generated `Flag` type for `cli` - is expected to "contain" as its `.Value` member - value (bool) - Should the generated `cli` type have a `Value` - member? - dest (bool) - Should the generated `cli` type support a - destination pointer? - doctail (string) - Additional docs for the `cli` flag type comment - context_type (string) - The literal type used in the `*cli.Context` - reader func signature - context_default (string) - The literal value used as the default by the - `*cli.Context` reader funcs when no value is - present - parser (string) - Literal code used to parse the flag `f`, - expected to have a return signature of - (value, error) - parser_cast (string) - Literal code used to cast the `parsed` value - returned from the `parser` code -""" - -from __future__ import print_function, unicode_literals - -import argparse -import json -import os -import subprocess -import sys -import tempfile -import textwrap - - -class _FancyFormatter(argparse.ArgumentDefaultsHelpFormatter, - argparse.RawDescriptionHelpFormatter): - pass - - -def main(sysargs=sys.argv[:]): - parser = argparse.ArgumentParser( - description='Generate flag type code!', - formatter_class=_FancyFormatter) - parser.add_argument( - 'package', - type=str, default='cli', choices=_WRITEFUNCS.keys(), - help='Package for which flag types will be generated' - ) - parser.add_argument( - '-i', '--in-json', - type=argparse.FileType('r'), - default=sys.stdin, - help='Input JSON file which defines each type to be generated' - ) - parser.add_argument( - '-o', '--out-go', - type=argparse.FileType('w'), - default=sys.stdout, - help='Output file/stream to which generated source will be written' - ) - parser.epilog = __doc__ - - args = parser.parse_args(sysargs[1:]) - _generate_flag_types(_WRITEFUNCS[args.package], args.out_go, args.in_json) - return 0 - - -def _generate_flag_types(writefunc, output_go, input_json): - types = json.load(input_json) - - tmp = tempfile.NamedTemporaryFile(suffix='.go', delete=False) - writefunc(tmp, types) - tmp.close() - - new_content = subprocess.check_output( - ['goimports', tmp.name] - ).decode('utf-8') - - print(new_content, file=output_go, end='') - output_go.flush() - os.remove(tmp.name) - - -def _set_typedef_defaults(typedef): - typedef.setdefault('doctail', '') - typedef.setdefault('context_type', typedef['type']) - typedef.setdefault('dest', True) - typedef.setdefault('value', True) - typedef.setdefault('parser', 'f.Value, error(nil)') - typedef.setdefault('parser_cast', 'parsed') - - -def _write_cli_flag_types(outfile, types): - _fwrite(outfile, """\ - package cli - - // WARNING: This file is generated! - - """) - - for typedef in types: - _set_typedef_defaults(typedef) - - _fwrite(outfile, """\ - // {name}Flag is a flag with type {type}{doctail} - type {name}Flag struct {{ - Name string - Usage string - EnvVar string - Hidden bool - """.format(**typedef)) - - if typedef['value']: - _fwrite(outfile, """\ - Value {type} - """.format(**typedef)) - - if typedef['dest']: - _fwrite(outfile, """\ - Destination *{type} - """.format(**typedef)) - - _fwrite(outfile, "\n}\n\n") - - _fwrite(outfile, """\ - // String returns a readable representation of this value - // (for usage defaults) - func (f {name}Flag) String() string {{ - return FlagStringer(f) - }} - - // GetName returns the name of the flag - func (f {name}Flag) GetName() string {{ - return f.Name - }} - - // {name} looks up the value of a local {name}Flag, returns - // {context_default} if not found - func (c *Context) {name}(name string) {context_type} {{ - return lookup{name}(name, c.flagSet) - }} - - // Global{name} looks up the value of a global {name}Flag, returns - // {context_default} if not found - func (c *Context) Global{name}(name string) {context_type} {{ - if fs := lookupGlobalFlagSet(name, c); fs != nil {{ - return lookup{name}(name, fs) - }} - return {context_default} - }} - - func lookup{name}(name string, set *flag.FlagSet) {context_type} {{ - f := set.Lookup(name) - if f != nil {{ - parsed, err := {parser} - if err != nil {{ - return {context_default} - }} - return {parser_cast} - }} - return {context_default} - }} - """.format(**typedef)) - - -def _write_altsrc_flag_types(outfile, types): - _fwrite(outfile, """\ - package altsrc - - import ( - "gopkg.in/urfave/cli.v1" - ) - - // WARNING: This file is generated! - - """) - - for typedef in types: - _set_typedef_defaults(typedef) - - _fwrite(outfile, """\ - // {name}Flag is the flag type that wraps cli.{name}Flag to allow - // for other values to be specified - type {name}Flag struct {{ - cli.{name}Flag - set *flag.FlagSet - }} - - // New{name}Flag creates a new {name}Flag - func New{name}Flag(fl cli.{name}Flag) *{name}Flag {{ - return &{name}Flag{{{name}Flag: fl, set: nil}} - }} - - // Apply saves the flagSet for later usage calls, then calls the - // wrapped {name}Flag.Apply - func (f *{name}Flag) Apply(set *flag.FlagSet) {{ - f.set = set - f.{name}Flag.Apply(set) - }} - - // ApplyWithError saves the flagSet for later usage calls, then calls the - // wrapped {name}Flag.ApplyWithError - func (f *{name}Flag) ApplyWithError(set *flag.FlagSet) error {{ - f.set = set - return f.{name}Flag.ApplyWithError(set) - }} - """.format(**typedef)) - - -def _fwrite(outfile, text): - print(textwrap.dedent(text), end='', file=outfile) - - -_WRITEFUNCS = { - 'cli': _write_cli_flag_types, - 'altsrc': _write_altsrc_flag_types -} - -if __name__ == '__main__': - sys.exit(main()) diff --git a/vendor/github.com/urfave/cli/go.mod b/vendor/github.com/urfave/cli/go.mod new file mode 100644 index 000000000..7d04d2016 --- /dev/null +++ b/vendor/github.com/urfave/cli/go.mod @@ -0,0 +1,9 @@ +module github.com/urfave/cli + +go 1.11 + +require ( + github.com/BurntSushi/toml v0.3.1 + github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d + gopkg.in/yaml.v2 v2.2.2 +) diff --git a/vendor/github.com/urfave/cli/go.sum b/vendor/github.com/urfave/cli/go.sum new file mode 100644 index 000000000..ef121ff5d --- /dev/null +++ b/vendor/github.com/urfave/cli/go.sum @@ -0,0 +1,14 @@ +github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d h1:U+s90UTSYgptZMwQh2aRr3LuazLJIa+Pg3Kc1ylSYVY= +github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q= +github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo= +github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/vendor/github.com/urfave/cli/help.go b/vendor/github.com/urfave/cli/help.go index 57ec98d58..7a4ef6925 100644 --- a/vendor/github.com/urfave/cli/help.go +++ b/vendor/github.com/urfave/cli/help.go @@ -7,77 +7,9 @@ import ( "strings" "text/tabwriter" "text/template" + "unicode/utf8" ) -// AppHelpTemplate is the text template for the Default help topic. -// cli.go uses text/template to render templates. You can -// render custom help text by setting this variable. -var AppHelpTemplate = `NAME: - {{.Name}}{{if .Usage}} - {{.Usage}}{{end}} - -USAGE: - {{if .UsageText}}{{.UsageText}}{{else}}{{.HelpName}} {{if .VisibleFlags}}[global options]{{end}}{{if .Commands}} command [command options]{{end}} {{if .ArgsUsage}}{{.ArgsUsage}}{{else}}[arguments...]{{end}}{{end}}{{if .Version}}{{if not .HideVersion}} - -VERSION: - {{.Version}}{{end}}{{end}}{{if .Description}} - -DESCRIPTION: - {{.Description}}{{end}}{{if len .Authors}} - -AUTHOR{{with $length := len .Authors}}{{if ne 1 $length}}S{{end}}{{end}}: - {{range $index, $author := .Authors}}{{if $index}} - {{end}}{{$author}}{{end}}{{end}}{{if .VisibleCommands}} - -COMMANDS:{{range .VisibleCategories}}{{if .Name}} - {{.Name}}:{{end}}{{range .VisibleCommands}} - {{join .Names ", "}}{{"\t"}}{{.Usage}}{{end}}{{end}}{{end}}{{if .VisibleFlags}} - -GLOBAL OPTIONS: - {{range $index, $option := .VisibleFlags}}{{if $index}} - {{end}}{{$option}}{{end}}{{end}}{{if .Copyright}} - -COPYRIGHT: - {{.Copyright}}{{end}} -` - -// CommandHelpTemplate is the text template for the command help topic. -// cli.go uses text/template to render templates. You can -// render custom help text by setting this variable. -var CommandHelpTemplate = `NAME: - {{.HelpName}} - {{.Usage}} - -USAGE: - {{if .UsageText}}{{.UsageText}}{{else}}{{.HelpName}}{{if .VisibleFlags}} [command options]{{end}} {{if .ArgsUsage}}{{.ArgsUsage}}{{else}}[arguments...]{{end}}{{end}}{{if .Category}} - -CATEGORY: - {{.Category}}{{end}}{{if .Description}} - -DESCRIPTION: - {{.Description}}{{end}}{{if .VisibleFlags}} - -OPTIONS: - {{range .VisibleFlags}}{{.}} - {{end}}{{end}} -` - -// SubcommandHelpTemplate is the text template for the subcommand help topic. -// cli.go uses text/template to render templates. You can -// render custom help text by setting this variable. -var SubcommandHelpTemplate = `NAME: - {{.HelpName}} - {{if .Description}}{{.Description}}{{else}}{{.Usage}}{{end}} - -USAGE: - {{if .UsageText}}{{.UsageText}}{{else}}{{.HelpName}} command{{if .VisibleFlags}} [command options]{{end}} {{if .ArgsUsage}}{{.ArgsUsage}}{{else}}[arguments...]{{end}}{{end}} - -COMMANDS:{{range .VisibleCategories}}{{if .Name}} - {{.Name}}:{{end}}{{range .VisibleCommands}} - {{join .Names ", "}}{{"\t"}}{{.Usage}}{{end}} -{{end}}{{if .VisibleFlags}} -OPTIONS: - {{range .VisibleFlags}}{{.}} - {{end}}{{end}} -` - var helpCommand = Command{ Name: "help", Aliases: []string{"h"}, @@ -89,7 +21,7 @@ var helpCommand = Command{ return ShowCommandHelp(c, args.First()) } - ShowAppHelp(c) + _ = ShowAppHelp(c) return nil }, } @@ -129,7 +61,7 @@ var VersionPrinter = printVersion // ShowAppHelpAndExit - Prints the list of subcommands for the app and exits with exit code. func ShowAppHelpAndExit(c *Context, exitCode int) { - ShowAppHelp(c) + _ = ShowAppHelp(c) os.Exit(exitCode) } @@ -153,19 +85,94 @@ func ShowAppHelp(c *Context) (err error) { // DefaultAppComplete prints the list of subcommands as the default app completion method func DefaultAppComplete(c *Context) { - for _, command := range c.App.Commands { + DefaultCompleteWithFlags(nil)(c) +} + +func printCommandSuggestions(commands []Command, writer io.Writer) { + for _, command := range commands { if command.Hidden { continue } - for _, name := range command.Names() { - fmt.Fprintln(c.App.Writer, name) + if os.Getenv("_CLI_ZSH_AUTOCOMPLETE_HACK") == "1" { + for _, name := range command.Names() { + _, _ = fmt.Fprintf(writer, "%s:%s\n", name, command.Usage) + } + } else { + for _, name := range command.Names() { + _, _ = fmt.Fprintf(writer, "%s\n", name) + } + } + } +} + +func cliArgContains(flagName string) bool { + for _, name := range strings.Split(flagName, ",") { + name = strings.TrimSpace(name) + count := utf8.RuneCountInString(name) + if count > 2 { + count = 2 + } + flag := fmt.Sprintf("%s%s", strings.Repeat("-", count), name) + for _, a := range os.Args { + if a == flag { + return true + } + } + } + return false +} + +func printFlagSuggestions(lastArg string, flags []Flag, writer io.Writer) { + cur := strings.TrimPrefix(lastArg, "-") + cur = strings.TrimPrefix(cur, "-") + for _, flag := range flags { + if bflag, ok := flag.(BoolFlag); ok && bflag.Hidden { + continue + } + for _, name := range strings.Split(flag.GetName(), ",") { + name = strings.TrimSpace(name) + // this will get total count utf8 letters in flag name + count := utf8.RuneCountInString(name) + if count > 2 { + count = 2 // resuse this count to generate single - or -- in flag completion + } + // if flag name has more than one utf8 letter and last argument in cli has -- prefix then + // skip flag completion for short flags example -v or -x + if strings.HasPrefix(lastArg, "--") && count == 1 { + continue + } + // match if last argument matches this flag and it is not repeated + if strings.HasPrefix(name, cur) && cur != name && !cliArgContains(flag.GetName()) { + flagCompletion := fmt.Sprintf("%s%s", strings.Repeat("-", count), name) + _, _ = fmt.Fprintln(writer, flagCompletion) + } + } + } +} + +func DefaultCompleteWithFlags(cmd *Command) func(c *Context) { + return func(c *Context) { + if len(os.Args) > 2 { + lastArg := os.Args[len(os.Args)-2] + if strings.HasPrefix(lastArg, "-") { + printFlagSuggestions(lastArg, c.App.Flags, c.App.Writer) + if cmd != nil { + printFlagSuggestions(lastArg, cmd.Flags, c.App.Writer) + } + return + } + } + if cmd != nil { + printCommandSuggestions(cmd.Subcommands, c.App.Writer) + } else { + printCommandSuggestions(c.App.Commands, c.App.Writer) } } } // ShowCommandHelpAndExit - exits with code after showing help func ShowCommandHelpAndExit(c *Context, command string, code int) { - ShowCommandHelp(c, command) + _ = ShowCommandHelp(c, command) os.Exit(code) } @@ -207,7 +214,7 @@ func ShowVersion(c *Context) { } func printVersion(c *Context) { - fmt.Fprintf(c.App.Writer, "%v version %v\n", c.App.Name, c.App.Version) + _, _ = fmt.Fprintf(c.App.Writer, "%v version %v\n", c.App.Name, c.App.Version) } // ShowCompletions prints the lists of commands within a given context @@ -221,19 +228,22 @@ func ShowCompletions(c *Context) { // ShowCommandCompletions prints the custom completions for a given command func ShowCommandCompletions(ctx *Context, command string) { c := ctx.App.Command(command) - if c != nil && c.BashComplete != nil { - c.BashComplete(ctx) + if c != nil { + if c.BashComplete != nil { + c.BashComplete(ctx) + } else { + DefaultCompleteWithFlags(c)(ctx) + } } + } func printHelpCustom(out io.Writer, templ string, data interface{}, customFunc map[string]interface{}) { funcMap := template.FuncMap{ "join": strings.Join, } - if customFunc != nil { - for key, value := range customFunc { - funcMap[key] = value - } + for key, value := range customFunc { + funcMap[key] = value } w := tabwriter.NewWriter(out, 1, 8, 2, ' ', 0) @@ -243,11 +253,11 @@ func printHelpCustom(out io.Writer, templ string, data interface{}, customFunc m // If the writer is closed, t.Execute will fail, and there's nothing // we can do to recover. if os.Getenv("CLI_TEMPLATE_ERROR_DEBUG") != "" { - fmt.Fprintf(ErrWriter, "CLI TEMPLATE ERROR: %#v\n", err) + _, _ = fmt.Fprintf(ErrWriter, "CLI TEMPLATE ERROR: %#v\n", err) } return } - w.Flush() + _ = w.Flush() } func printHelp(out io.Writer, templ string, data interface{}) { @@ -280,7 +290,7 @@ func checkHelp(c *Context) bool { func checkCommandHelp(c *Context, name string) bool { if c.Bool("h") || c.Bool("help") { - ShowCommandHelp(c, name) + _ = ShowCommandHelp(c, name) return true } @@ -289,7 +299,7 @@ func checkCommandHelp(c *Context, name string) bool { func checkSubcommandHelp(c *Context) bool { if c.Bool("h") || c.Bool("help") { - ShowSubcommandHelp(c) + _ = ShowSubcommandHelp(c) return true } diff --git a/vendor/github.com/urfave/cli/parse.go b/vendor/github.com/urfave/cli/parse.go new file mode 100644 index 000000000..865accf10 --- /dev/null +++ b/vendor/github.com/urfave/cli/parse.go @@ -0,0 +1,80 @@ +package cli + +import ( + "flag" + "strings" +) + +type iterativeParser interface { + newFlagSet() (*flag.FlagSet, error) + useShortOptionHandling() bool +} + +// To enable short-option handling (e.g., "-it" vs "-i -t") we have to +// iteratively catch parsing errors. This way we achieve LR parsing without +// transforming any arguments. Otherwise, there is no way we can discriminate +// combined short options from common arguments that should be left untouched. +func parseIter(ip iterativeParser, args []string) (*flag.FlagSet, error) { + for { + set, err := ip.newFlagSet() + if err != nil { + return nil, err + } + + err = set.Parse(args) + if !ip.useShortOptionHandling() || err == nil { + return set, err + } + + errStr := err.Error() + trimmed := strings.TrimPrefix(errStr, "flag provided but not defined: ") + if errStr == trimmed { + return nil, err + } + + // regenerate the initial args with the split short opts + newArgs := []string{} + for i, arg := range args { + if arg != trimmed { + newArgs = append(newArgs, arg) + continue + } + + shortOpts := splitShortOptions(set, trimmed) + if len(shortOpts) == 1 { + return nil, err + } + + // add each short option and all remaining arguments + newArgs = append(newArgs, shortOpts...) + newArgs = append(newArgs, args[i+1:]...) + args = newArgs + } + } +} + +func splitShortOptions(set *flag.FlagSet, arg string) []string { + shortFlagsExist := func(s string) bool { + for _, c := range s[1:] { + if f := set.Lookup(string(c)); f == nil { + return false + } + } + return true + } + + if !isSplittable(arg) || !shortFlagsExist(arg) { + return []string{arg} + } + + separated := make([]string, 0, len(arg)-1) + for _, flagChar := range arg[1:] { + separated = append(separated, "-"+string(flagChar)) + } + + return separated +} + +func isSplittable(flagArg string) bool { + return strings.HasPrefix(flagArg, "-") && !strings.HasPrefix(flagArg, "--") && len(flagArg) > 2 +} diff --git a/vendor/github.com/urfave/cli/runtests b/vendor/github.com/urfave/cli/runtests deleted file mode 100644 index ee22bdeed..000000000 --- a/vendor/github.com/urfave/cli/runtests +++ /dev/null @@ -1,122 +0,0 @@ -#!/usr/bin/env python -from __future__ import print_function - -import argparse -import os -import sys -import tempfile - -from subprocess import check_call, check_output - - -PACKAGE_NAME = os.environ.get( - 'CLI_PACKAGE_NAME', 'github.com/urfave/cli' -) - - -def main(sysargs=sys.argv[:]): - targets = { - 'vet': _vet, - 'test': _test, - 'gfmrun': _gfmrun, - 'toc': _toc, - 'gen': _gen, - } - - parser = argparse.ArgumentParser() - parser.add_argument( - 'target', nargs='?', choices=tuple(targets.keys()), default='test' - ) - args = parser.parse_args(sysargs[1:]) - - targets[args.target]() - return 0 - - -def _test(): - if check_output('go version'.split()).split()[2] < 'go1.2': - _run('go test -v .') - return - - coverprofiles = [] - for subpackage in ['', 'altsrc']: - coverprofile = 'cli.coverprofile' - if subpackage != '': - coverprofile = '{}.coverprofile'.format(subpackage) - - coverprofiles.append(coverprofile) - - _run('go test -v'.split() + [ - '-coverprofile={}'.format(coverprofile), - ('{}/{}'.format(PACKAGE_NAME, subpackage)).rstrip('/') - ]) - - combined_name = _combine_coverprofiles(coverprofiles) - _run('go tool cover -func={}'.format(combined_name)) - os.remove(combined_name) - - -def _gfmrun(): - go_version = check_output('go version'.split()).split()[2] - if go_version < 'go1.3': - print('runtests: skip on {}'.format(go_version), file=sys.stderr) - return - _run(['gfmrun', '-c', str(_gfmrun_count()), '-s', 'README.md']) - - -def _vet(): - _run('go vet ./...') - - -def _toc(): - _run('node_modules/.bin/markdown-toc -i README.md') - _run('git diff --exit-code') - - -def _gen(): - go_version = check_output('go version'.split()).split()[2] - if go_version < 'go1.5': - print('runtests: skip on {}'.format(go_version), file=sys.stderr) - return - - _run('go generate ./...') - _run('git diff --exit-code') - - -def _run(command): - if hasattr(command, 'split'): - command = command.split() - print('runtests: {}'.format(' '.join(command)), file=sys.stderr) - check_call(command) - - -def _gfmrun_count(): - with open('README.md') as infile: - lines = infile.read().splitlines() - return len(filter(_is_go_runnable, lines)) - - -def _is_go_runnable(line): - return line.startswith('package main') - - -def _combine_coverprofiles(coverprofiles): - combined = tempfile.NamedTemporaryFile( - suffix='.coverprofile', delete=False - ) - combined.write('mode: set\n') - - for coverprofile in coverprofiles: - with open(coverprofile, 'r') as infile: - for line in infile.readlines(): - if not line.startswith('mode: '): - combined.write(line) - - combined.flush() - name = combined.name - combined.close() - return name - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/vendor/github.com/urfave/cli/sort.go b/vendor/github.com/urfave/cli/sort.go new file mode 100644 index 000000000..23d1c2f77 --- /dev/null +++ b/vendor/github.com/urfave/cli/sort.go @@ -0,0 +1,29 @@ +package cli + +import "unicode" + +// lexicographicLess compares strings alphabetically considering case. +func lexicographicLess(i, j string) bool { + iRunes := []rune(i) + jRunes := []rune(j) + + lenShared := len(iRunes) + if lenShared > len(jRunes) { + lenShared = len(jRunes) + } + + for index := 0; index < lenShared; index++ { + ir := iRunes[index] + jr := jRunes[index] + + if lir, ljr := unicode.ToLower(ir), unicode.ToLower(jr); lir != ljr { + return lir < ljr + } + + if ir != jr { + return ir < jr + } + } + + return i < j +} diff --git a/vendor/github.com/urfave/cli/template.go b/vendor/github.com/urfave/cli/template.go new file mode 100644 index 000000000..c631fb97d --- /dev/null +++ b/vendor/github.com/urfave/cli/template.go @@ -0,0 +1,121 @@ +package cli + +// AppHelpTemplate is the text template for the Default help topic. +// cli.go uses text/template to render templates. You can +// render custom help text by setting this variable. +var AppHelpTemplate = `NAME: + {{.Name}}{{if .Usage}} - {{.Usage}}{{end}} + +USAGE: + {{if .UsageText}}{{.UsageText}}{{else}}{{.HelpName}} {{if .VisibleFlags}}[global options]{{end}}{{if .Commands}} command [command options]{{end}} {{if .ArgsUsage}}{{.ArgsUsage}}{{else}}[arguments...]{{end}}{{end}}{{if .Version}}{{if not .HideVersion}} + +VERSION: + {{.Version}}{{end}}{{end}}{{if .Description}} + +DESCRIPTION: + {{.Description}}{{end}}{{if len .Authors}} + +AUTHOR{{with $length := len .Authors}}{{if ne 1 $length}}S{{end}}{{end}}: + {{range $index, $author := .Authors}}{{if $index}} + {{end}}{{$author}}{{end}}{{end}}{{if .VisibleCommands}} + +COMMANDS:{{range .VisibleCategories}}{{if .Name}} + + {{.Name}}:{{range .VisibleCommands}} + {{join .Names ", "}}{{"\t"}}{{.Usage}}{{end}}{{else}}{{range .VisibleCommands}} + {{join .Names ", "}}{{"\t"}}{{.Usage}}{{end}}{{end}}{{end}}{{end}}{{if .VisibleFlags}} + +GLOBAL OPTIONS: + {{range $index, $option := .VisibleFlags}}{{if $index}} + {{end}}{{$option}}{{end}}{{end}}{{if .Copyright}} + +COPYRIGHT: + {{.Copyright}}{{end}} +` + +// CommandHelpTemplate is the text template for the command help topic. +// cli.go uses text/template to render templates. You can +// render custom help text by setting this variable. +var CommandHelpTemplate = `NAME: + {{.HelpName}} - {{.Usage}} + +USAGE: + {{if .UsageText}}{{.UsageText}}{{else}}{{.HelpName}}{{if .VisibleFlags}} [command options]{{end}} {{if .ArgsUsage}}{{.ArgsUsage}}{{else}}[arguments...]{{end}}{{end}}{{if .Category}} + +CATEGORY: + {{.Category}}{{end}}{{if .Description}} + +DESCRIPTION: + {{.Description}}{{end}}{{if .VisibleFlags}} + +OPTIONS: + {{range .VisibleFlags}}{{.}} + {{end}}{{end}} +` + +// SubcommandHelpTemplate is the text template for the subcommand help topic. +// cli.go uses text/template to render templates. You can +// render custom help text by setting this variable. +var SubcommandHelpTemplate = `NAME: + {{.HelpName}} - {{if .Description}}{{.Description}}{{else}}{{.Usage}}{{end}} + +USAGE: + {{if .UsageText}}{{.UsageText}}{{else}}{{.HelpName}} command{{if .VisibleFlags}} [command options]{{end}} {{if .ArgsUsage}}{{.ArgsUsage}}{{else}}[arguments...]{{end}}{{end}} + +COMMANDS:{{range .VisibleCategories}}{{if .Name}} + + {{.Name}}:{{range .VisibleCommands}} + {{join .Names ", "}}{{"\t"}}{{.Usage}}{{end}}{{else}}{{range .VisibleCommands}} + {{join .Names ", "}}{{"\t"}}{{.Usage}}{{end}}{{end}}{{end}}{{if .VisibleFlags}} + +OPTIONS: + {{range .VisibleFlags}}{{.}} + {{end}}{{end}} +` + +var MarkdownDocTemplate = `% {{ .App.Name }}(8) {{ .App.Description }} + +% {{ .App.Author }} + +# NAME + +{{ .App.Name }}{{ if .App.Usage }} - {{ .App.Usage }}{{ end }} + +# SYNOPSIS + +{{ .App.Name }} +{{ if .SynopsisArgs }} +` + "```" + ` +{{ range $v := .SynopsisArgs }}{{ $v }}{{ end }}` + "```" + ` +{{ end }}{{ if .App.UsageText }} +# DESCRIPTION + +{{ .App.UsageText }} +{{ end }} +**Usage**: + +` + "```" + ` +{{ .App.Name }} [GLOBAL OPTIONS] command [COMMAND OPTIONS] [ARGUMENTS...] +` + "```" + ` +{{ if .GlobalArgs }} +# GLOBAL OPTIONS +{{ range $v := .GlobalArgs }} +{{ $v }}{{ end }} +{{ end }}{{ if .Commands }} +# COMMANDS +{{ range $v := .Commands }} +{{ $v }}{{ end }}{{ end }}` + +var FishCompletionTemplate = `# {{ .App.Name }} fish shell completion + +function __fish_{{ .App.Name }}_no_subcommand --description 'Test if there has been any subcommand yet' + for i in (commandline -opc) + if contains -- $i{{ range $v := .AllCommands }} {{ $v }}{{ end }} + return 1 + end + end + return 0 +end + +{{ range $v := .Completions }}{{ $v }} +{{ end }}` diff --git a/vendor/golang.org/x/crypto/acme/acme.go b/vendor/golang.org/x/crypto/acme/acme.go index fa365b7b6..02fde12db 100644 --- a/vendor/golang.org/x/crypto/acme/acme.go +++ b/vendor/golang.org/x/crypto/acme/acme.go @@ -4,7 +4,10 @@ // Package acme provides an implementation of the // Automatic Certificate Management Environment (ACME) spec. -// See https://tools.ietf.org/html/draft-ietf-acme-acme-02 for details. +// The intial implementation was based on ACME draft-02 and +// is now being extended to comply with RFC 8555. +// See https://tools.ietf.org/html/draft-ietf-acme-acme-02 +// and https://tools.ietf.org/html/rfc8555 for details. // // Most common scenarios will want to use autocert subdirectory instead, // which provides automatic access to certificates from Let's Encrypt @@ -41,7 +44,7 @@ import ( const ( // LetsEncryptURL is the Directory endpoint of Let's Encrypt CA. - LetsEncryptURL = "https://acme-v01.api.letsencrypt.org/directory" + LetsEncryptURL = "https://acme-v02.api.letsencrypt.org/directory" // ALPNProto is the ALPN protocol name used by a CA server when validating // tls-alpn-01 challenges. @@ -57,7 +60,10 @@ var idPeACMEIdentifierV1 = asn1.ObjectIdentifier{1, 3, 6, 1, 5, 5, 7, 1, 30, 1} const ( maxChainLen = 5 // max depth and breadth of a certificate chain - maxCertSize = 1 << 20 // max size of a certificate, in bytes + maxCertSize = 1 << 20 // max size of a certificate, in DER bytes + // Used for decoding certs from application/pem-certificate-chain response, + // the default when in RFC mode. + maxCertChainSize = maxCertSize * maxChainLen // Max number of collected nonces kept in memory. // Expect usual peak of 1 or 2. @@ -116,21 +122,48 @@ type Client struct { // identifiable by the server, in case they are causing issues. UserAgent string - dirMu sync.Mutex // guards writes to dir - dir *Directory // cached result of Client's Discover method + cacheMu sync.Mutex + dir *Directory // cached result of Client's Discover method + kid keyID // cached Account.URI obtained from registerRFC or getAccountRFC noncesMu sync.Mutex nonces map[string]struct{} // nonces collected from previous responses } +// accountKID returns a key ID associated with c.Key, the account identity +// provided by the CA during RFC based registration. +// It assumes c.Discover has already been called. +// +// accountKID requires at most one network roundtrip. +// It caches only successful result. +// +// When in pre-RFC mode or when c.getRegRFC responds with an error, accountKID +// returns noKeyID. +func (c *Client) accountKID(ctx context.Context) keyID { + c.cacheMu.Lock() + defer c.cacheMu.Unlock() + if !c.dir.rfcCompliant() { + return noKeyID + } + if c.kid != noKeyID { + return c.kid + } + a, err := c.getRegRFC(ctx) + if err != nil { + return noKeyID + } + c.kid = keyID(a.URI) + return c.kid +} + // Discover performs ACME server discovery using c.DirectoryURL. // // It caches successful result. So, subsequent calls will not result in // a network round-trip. This also means mutating c.DirectoryURL after successful call // of this method will have no effect. func (c *Client) Discover(ctx context.Context) (Directory, error) { - c.dirMu.Lock() - defer c.dirMu.Unlock() + c.cacheMu.Lock() + defer c.cacheMu.Unlock() if c.dir != nil { return *c.dir, nil } @@ -143,27 +176,53 @@ func (c *Client) Discover(ctx context.Context) (Directory, error) { c.addNonce(res.Header) var v struct { - Reg string `json:"new-reg"` - Authz string `json:"new-authz"` - Cert string `json:"new-cert"` - Revoke string `json:"revoke-cert"` - Meta struct { - Terms string `json:"terms-of-service"` - Website string `json:"website"` - CAA []string `json:"caa-identities"` + Reg string `json:"new-reg"` + RegRFC string `json:"newAccount"` + Authz string `json:"new-authz"` + AuthzRFC string `json:"newAuthz"` + OrderRFC string `json:"newOrder"` + Cert string `json:"new-cert"` + Revoke string `json:"revoke-cert"` + RevokeRFC string `json:"revokeCert"` + NonceRFC string `json:"newNonce"` + KeyChangeRFC string `json:"keyChange"` + Meta struct { + Terms string `json:"terms-of-service"` + TermsRFC string `json:"termsOfService"` + WebsiteRFC string `json:"website"` + CAA []string `json:"caa-identities"` + CAARFC []string `json:"caaIdentities"` + ExternalAcctRFC bool `json:"externalAccountRequired"` } } if err := json.NewDecoder(res.Body).Decode(&v); err != nil { return Directory{}, err } + if v.OrderRFC == "" { + // Non-RFC compliant ACME CA. + c.dir = &Directory{ + RegURL: v.Reg, + AuthzURL: v.Authz, + CertURL: v.Cert, + RevokeURL: v.Revoke, + Terms: v.Meta.Terms, + Website: v.Meta.WebsiteRFC, + CAA: v.Meta.CAA, + } + return *c.dir, nil + } + // RFC compliant ACME CA. c.dir = &Directory{ - RegURL: v.Reg, - AuthzURL: v.Authz, - CertURL: v.Cert, - RevokeURL: v.Revoke, - Terms: v.Meta.Terms, - Website: v.Meta.Website, - CAA: v.Meta.CAA, + RegURL: v.RegRFC, + AuthzURL: v.AuthzRFC, + OrderURL: v.OrderRFC, + RevokeURL: v.RevokeRFC, + NonceURL: v.NonceRFC, + KeyChangeURL: v.KeyChangeRFC, + Terms: v.Meta.TermsRFC, + Website: v.Meta.WebsiteRFC, + CAA: v.Meta.CAARFC, + ExternalAccountRequired: v.Meta.ExternalAcctRFC, } return *c.dir, nil } @@ -176,6 +235,9 @@ func (c *Client) directoryURL() string { } // CreateCert requests a new certificate using the Certificate Signing Request csr encoded in DER format. +// It is incompatible with RFC 8555. Callers should use CreateOrderCert when interfacing +// with an RFC-compliant CA. +// // The exp argument indicates the desired certificate validity duration. CA may issue a certificate // with a different duration. // If the bundle argument is true, the returned value will also contain the CA (issuer) certificate chain. @@ -206,7 +268,7 @@ func (c *Client) CreateCert(ctx context.Context, csr []byte, exp time.Duration, req.NotAfter = now.Add(exp).Format(time.RFC3339) } - res, err := c.post(ctx, c.Key, c.dir.CertURL, req, wantStatus(http.StatusCreated)) + res, err := c.post(ctx, nil, c.dir.CertURL, req, wantStatus(http.StatusCreated)) if err != nil { return nil, "", err } @@ -227,12 +289,22 @@ func (c *Client) CreateCert(ctx context.Context, csr []byte, exp time.Duration, // It retries the request until the certificate is successfully retrieved, // context is cancelled by the caller or an error response is received. // -// The returned value will also contain the CA (issuer) certificate if the bundle argument is true. +// If the bundle argument is true, the returned value also contains the CA (issuer) +// certificate chain. // // FetchCert returns an error if the CA's response or chain was unreasonably large. // Callers are encouraged to parse the returned value to ensure the certificate is valid // and has expected features. func (c *Client) FetchCert(ctx context.Context, url string, bundle bool) ([][]byte, error) { + dir, err := c.Discover(ctx) + if err != nil { + return nil, err + } + if dir.rfcCompliant() { + return c.fetchCertRFC(ctx, url, bundle) + } + + // Legacy non-authenticated GET request. res, err := c.get(ctx, url, wantStatus(http.StatusOK)) if err != nil { return nil, err @@ -247,10 +319,15 @@ func (c *Client) FetchCert(ctx context.Context, url string, bundle bool) ([][]by // For instance, the key pair of the certificate may be authorized. // If the key is nil, c.Key is used instead. func (c *Client) RevokeCert(ctx context.Context, key crypto.Signer, cert []byte, reason CRLReasonCode) error { - if _, err := c.Discover(ctx); err != nil { + dir, err := c.Discover(ctx) + if err != nil { return err } + if dir.rfcCompliant() { + return c.revokeCertRFC(ctx, key, cert, reason) + } + // Legacy CA. body := &struct { Resource string `json:"resource"` Cert string `json:"certificate"` @@ -260,10 +337,7 @@ func (c *Client) RevokeCert(ctx context.Context, key crypto.Signer, cert []byte, Cert: base64.RawURLEncoding.EncodeToString(cert), Reason: int(reason), } - if key == nil { - key = c.Key - } - res, err := c.post(ctx, key, c.dir.RevokeURL, body, wantStatus(http.StatusOK)) + res, err := c.post(ctx, key, dir.RevokeURL, body, wantStatus(http.StatusOK)) if err != nil { return err } @@ -275,20 +349,30 @@ func (c *Client) RevokeCert(ctx context.Context, key crypto.Signer, cert []byte, // during account registration. See Register method of Client for more details. func AcceptTOS(tosURL string) bool { return true } -// Register creates a new account registration by following the "new-reg" flow. -// It returns the registered account. The account is not modified. +// Register creates a new account with the CA using c.Key. +// It returns the registered account. The account acct is not modified. // // The registration may require the caller to agree to the CA's Terms of Service (TOS). // If so, and the account has not indicated the acceptance of the terms (see Account for details), // Register calls prompt with a TOS URL provided by the CA. Prompt should report // whether the caller agrees to the terms. To always accept the terms, the caller can use AcceptTOS. -func (c *Client) Register(ctx context.Context, a *Account, prompt func(tosURL string) bool) (*Account, error) { - if _, err := c.Discover(ctx); err != nil { +// +// When interfacing with an RFC-compliant CA, non-RFC 8555 fields of acct are ignored +// and prompt is called if Directory's Terms field is non-zero. +// Also see Error's Instance field for when a CA requires already registered accounts to agree +// to an updated Terms of Service. +func (c *Client) Register(ctx context.Context, acct *Account, prompt func(tosURL string) bool) (*Account, error) { + dir, err := c.Discover(ctx) + if err != nil { return nil, err } + if dir.rfcCompliant() { + return c.registerRFC(ctx, acct, prompt) + } - var err error - if a, err = c.doReg(ctx, c.dir.RegURL, "new-reg", a); err != nil { + // Legacy ACME draft registration flow. + a, err := c.doReg(ctx, dir.RegURL, "new-reg", acct) + if err != nil { return nil, err } var accept bool @@ -302,9 +386,20 @@ func (c *Client) Register(ctx context.Context, a *Account, prompt func(tosURL st return a, err } -// GetReg retrieves an existing registration. -// The url argument is an Account URI. +// GetReg retrieves an existing account associated with c.Key. +// +// The url argument is an Account URI used with pre-RFC 8555 CAs. +// It is ignored when interfacing with an RFC-compliant CA. func (c *Client) GetReg(ctx context.Context, url string) (*Account, error) { + dir, err := c.Discover(ctx) + if err != nil { + return nil, err + } + if dir.rfcCompliant() { + return c.getRegRFC(ctx) + } + + // Legacy CA. a, err := c.doReg(ctx, url, "reg", nil) if err != nil { return nil, err @@ -315,9 +410,21 @@ func (c *Client) GetReg(ctx context.Context, url string) (*Account, error) { // UpdateReg updates an existing registration. // It returns an updated account copy. The provided account is not modified. -func (c *Client) UpdateReg(ctx context.Context, a *Account) (*Account, error) { - uri := a.URI - a, err := c.doReg(ctx, uri, "reg", a) +// +// When interfacing with RFC-compliant CAs, a.URI is ignored and the account URL +// associated with c.Key is used instead. +func (c *Client) UpdateReg(ctx context.Context, acct *Account) (*Account, error) { + dir, err := c.Discover(ctx) + if err != nil { + return nil, err + } + if dir.rfcCompliant() { + return c.updateRegRFC(ctx, acct) + } + + // Legacy CA. + uri := acct.URI + a, err := c.doReg(ctx, uri, "reg", acct) if err != nil { return nil, err } @@ -325,13 +432,21 @@ func (c *Client) UpdateReg(ctx context.Context, a *Account) (*Account, error) { return a, nil } -// Authorize performs the initial step in an authorization flow. +// Authorize performs the initial step in the pre-authorization flow, +// as opposed to order-based flow. // The caller will then need to choose from and perform a set of returned // challenges using c.Accept in order to successfully complete authorization. // +// Once complete, the caller can use AuthorizeOrder which the CA +// should provision with the already satisfied authorization. +// For pre-RFC CAs, the caller can proceed directly to requesting a certificate +// using CreateCert method. +// // If an authorization has been previously granted, the CA may return -// a valid authorization (Authorization.Status is StatusValid). If so, the caller -// need not fulfill any challenge and can proceed to requesting a certificate. +// a valid authorization which has its Status field set to StatusValid. +// +// More about pre-authorization can be found at +// https://tools.ietf.org/html/rfc8555#section-7.4.1. func (c *Client) Authorize(ctx context.Context, domain string) (*Authorization, error) { return c.authorize(ctx, "dns", domain) } @@ -362,7 +477,7 @@ func (c *Client) authorize(ctx context.Context, typ, val string) (*Authorization Resource: "new-authz", Identifier: authzID{Type: typ, Value: val}, } - res, err := c.post(ctx, c.Key, c.dir.AuthzURL, req, wantStatus(http.StatusCreated)) + res, err := c.post(ctx, nil, c.dir.AuthzURL, req, wantStatus(http.StatusCreated)) if err != nil { return nil, err } @@ -383,7 +498,17 @@ func (c *Client) authorize(ctx context.Context, typ, val string) (*Authorization // If a caller needs to poll an authorization until its status is final, // see the WaitAuthorization method. func (c *Client) GetAuthorization(ctx context.Context, url string) (*Authorization, error) { - res, err := c.get(ctx, url, wantStatus(http.StatusOK, http.StatusAccepted)) + dir, err := c.Discover(ctx) + if err != nil { + return nil, err + } + + var res *http.Response + if dir.rfcCompliant() { + res, err = c.postAsGet(ctx, url, wantStatus(http.StatusOK)) + } else { + res, err = c.get(ctx, url, wantStatus(http.StatusOK, http.StatusAccepted)) + } if err != nil { return nil, err } @@ -400,11 +525,16 @@ func (c *Client) GetAuthorization(ctx context.Context, url string) (*Authorizati // The url argument is an Authorization.URI value. // // If successful, the caller will be required to obtain a new authorization -// using the Authorize method before being able to request a new certificate -// for the domain associated with the authorization. +// using the Authorize or AuthorizeOrder methods before being able to request +// a new certificate for the domain associated with the authorization. // // It does not revoke existing certificates. func (c *Client) RevokeAuthorization(ctx context.Context, url string) error { + // Required for c.accountKID() when in RFC mode. + if _, err := c.Discover(ctx); err != nil { + return err + } + req := struct { Resource string `json:"resource"` Status string `json:"status"` @@ -414,7 +544,7 @@ func (c *Client) RevokeAuthorization(ctx context.Context, url string) error { Status: "deactivated", Delete: true, } - res, err := c.post(ctx, c.Key, url, req, wantStatus(http.StatusOK)) + res, err := c.post(ctx, nil, url, req, wantStatus(http.StatusOK)) if err != nil { return err } @@ -430,8 +560,18 @@ func (c *Client) RevokeAuthorization(ctx context.Context, url string) error { // In all other cases WaitAuthorization returns an error. // If the Status is StatusInvalid, the returned error is of type *AuthorizationError. func (c *Client) WaitAuthorization(ctx context.Context, url string) (*Authorization, error) { + // Required for c.accountKID() when in RFC mode. + dir, err := c.Discover(ctx) + if err != nil { + return nil, err + } + getfn := c.postAsGet + if !dir.rfcCompliant() { + getfn = c.get + } + for { - res, err := c.get(ctx, url, wantStatus(http.StatusOK, http.StatusAccepted)) + res, err := getfn(ctx, url, wantStatus(http.StatusOK, http.StatusAccepted)) if err != nil { return nil, err } @@ -474,10 +614,21 @@ func (c *Client) WaitAuthorization(ctx context.Context, url string) (*Authorizat // // A client typically polls a challenge status using this method. func (c *Client) GetChallenge(ctx context.Context, url string) (*Challenge, error) { - res, err := c.get(ctx, url, wantStatus(http.StatusOK, http.StatusAccepted)) + // Required for c.accountKID() when in RFC mode. + dir, err := c.Discover(ctx) if err != nil { return nil, err } + + getfn := c.postAsGet + if !dir.rfcCompliant() { + getfn = c.get + } + res, err := getfn(ctx, url, wantStatus(http.StatusOK, http.StatusAccepted)) + if err != nil { + return nil, err + } + defer res.Body.Close() v := wireChallenge{URI: url} if err := json.NewDecoder(res.Body).Decode(&v); err != nil { @@ -491,21 +642,29 @@ func (c *Client) GetChallenge(ctx context.Context, url string) (*Challenge, erro // // The server will then perform the validation asynchronously. func (c *Client) Accept(ctx context.Context, chal *Challenge) (*Challenge, error) { - auth, err := keyAuth(c.Key.Public(), chal.Token) + // Required for c.accountKID() when in RFC mode. + dir, err := c.Discover(ctx) if err != nil { return nil, err } - req := struct { - Resource string `json:"resource"` - Type string `json:"type"` - Auth string `json:"keyAuthorization"` - }{ - Resource: "challenge", - Type: chal.Type, - Auth: auth, + var req interface{} = json.RawMessage("{}") // RFC-compliant CA + if !dir.rfcCompliant() { + auth, err := keyAuth(c.Key.Public(), chal.Token) + if err != nil { + return nil, err + } + req = struct { + Resource string `json:"resource"` + Type string `json:"type"` + Auth string `json:"keyAuthorization"` + }{ + Resource: "challenge", + Type: chal.Type, + Auth: auth, + } } - res, err := c.post(ctx, c.Key, chal.URI, req, wantStatus( + res, err := c.post(ctx, nil, chal.URI, req, wantStatus( http.StatusOK, // according to the spec http.StatusAccepted, // Let's Encrypt: see https://goo.gl/WsJ7VT (acme-divergences.md) )) @@ -555,21 +714,8 @@ func (c *Client) HTTP01ChallengePath(token string) string { } // TLSSNI01ChallengeCert creates a certificate for TLS-SNI-01 challenge response. -// Servers can present the certificate to validate the challenge and prove control -// over a domain name. // -// The implementation is incomplete in that the returned value is a single certificate, -// computed only for Z0 of the key authorization. ACME CAs are expected to update -// their implementations to use the newer version, TLS-SNI-02. -// For more details on TLS-SNI-01 see https://tools.ietf.org/html/draft-ietf-acme-acme-01#section-7.3. -// -// The token argument is a Challenge.Token value. -// If a WithKey option is provided, its private part signs the returned cert, -// and the public part is used to specify the signee. -// If no WithKey option is provided, a new ECDSA key is generated using P-256 curve. -// -// The returned certificate is valid for the next 24 hours and must be presented only when -// the server name of the TLS ClientHello matches exactly the returned name value. +// Deprecated: This challenge type is unused in both draft-02 and RFC versions of ACME spec. func (c *Client) TLSSNI01ChallengeCert(token string, opt ...CertOption) (cert tls.Certificate, name string, err error) { ka, err := keyAuth(c.Key.Public(), token) if err != nil { @@ -586,17 +732,8 @@ func (c *Client) TLSSNI01ChallengeCert(token string, opt ...CertOption) (cert tl } // TLSSNI02ChallengeCert creates a certificate for TLS-SNI-02 challenge response. -// Servers can present the certificate to validate the challenge and prove control -// over a domain name. For more details on TLS-SNI-02 see -// https://tools.ietf.org/html/draft-ietf-acme-acme-03#section-7.3. // -// The token argument is a Challenge.Token value. -// If a WithKey option is provided, its private part signs the returned cert, -// and the public part is used to specify the signee. -// If no WithKey option is provided, a new ECDSA key is generated using P-256 curve. -// -// The returned certificate is valid for the next 24 hours and must be presented only when -// the server name in the TLS ClientHello matches exactly the returned name value. +// Deprecated: This challenge type is unused in both draft-02 and RFC versions of ACME spec. func (c *Client) TLSSNI02ChallengeCert(token string, opt ...CertOption) (cert tls.Certificate, name string, err error) { b := sha256.Sum256([]byte(token)) h := hex.EncodeToString(b[:]) @@ -663,7 +800,7 @@ func (c *Client) TLSALPN01ChallengeCert(token, domain string, opt ...CertOption) return tlsChallengeCert([]string{domain}, newOpt) } -// doReg sends all types of registration requests. +// doReg sends all types of registration requests the old way (pre-RFC world). // The type of request is identified by typ argument, which is a "resource" // in the ACME spec terms. // @@ -682,7 +819,7 @@ func (c *Client) doReg(ctx context.Context, url string, typ string, acct *Accoun req.Contact = acct.Contact req.Agreement = acct.AgreedTerms } - res, err := c.post(ctx, c.Key, url, req, wantStatus( + res, err := c.post(ctx, nil, url, req, wantStatus( http.StatusOK, // updates and deletes http.StatusCreated, // new account creation http.StatusAccepted, // Let's Encrypt divergent implementation @@ -721,12 +858,16 @@ func (c *Client) doReg(ctx context.Context, url string, typ string, acct *Accoun } // popNonce returns a nonce value previously stored with c.addNonce -// or fetches a fresh one from a URL by issuing a HEAD request. -// It first tries c.directoryURL() and then the provided url if the former fails. +// or fetches a fresh one from c.dir.NonceURL. +// If NonceURL is empty, it first tries c.directoryURL() and, failing that, +// the provided url. func (c *Client) popNonce(ctx context.Context, url string) (string, error) { c.noncesMu.Lock() defer c.noncesMu.Unlock() if len(c.nonces) == 0 { + if c.dir != nil && c.dir.NonceURL != "" { + return c.fetchNonce(ctx, c.dir.NonceURL) + } dirURL := c.directoryURL() v, err := c.fetchNonce(ctx, dirURL) if err != nil && url != dirURL { diff --git a/vendor/golang.org/x/crypto/acme/autocert/autocert.go b/vendor/golang.org/x/crypto/acme/autocert/autocert.go index 70ab355f3..acae1c3bc 100644 --- a/vendor/golang.org/x/crypto/acme/autocert/autocert.go +++ b/vendor/golang.org/x/crypto/acme/autocert/autocert.go @@ -35,6 +35,9 @@ import ( "golang.org/x/net/idna" ) +// DefaultACMEDirectory is the default ACME Directory URL used when the Manager's Client is nil. +const DefaultACMEDirectory = "https://acme-v02.api.letsencrypt.org/directory" + // createCertRetryAfter is how much time to wait before removing a failed state // entry due to an unsuccessful createCert call. // This is a variable instead of a const for testing. @@ -88,9 +91,9 @@ func defaultHostPolicy(context.Context, string) error { } // Manager is a stateful certificate manager built on top of acme.Client. -// It obtains and refreshes certificates automatically using "tls-alpn-01", -// "tls-sni-01", "tls-sni-02" and "http-01" challenge types, -// as well as providing them to a TLS server via tls.Config. +// It obtains and refreshes certificates automatically using "tls-alpn-01" +// or "http-01" challenge types, as well as providing them to a TLS server +// via tls.Config. // // You must specify a cache implementation, such as DirCache, // to reuse obtained certificates across program restarts. @@ -135,9 +138,10 @@ type Manager struct { // Client is used to perform low-level operations, such as account registration // and requesting new certificates. // - // If Client is nil, a zero-value acme.Client is used with acme.LetsEncryptURL - // as directory endpoint. If the Client.Key is nil, a new ECDSA P-256 key is - // generated and, if Cache is not nil, stored in cache. + // If Client is nil, a zero-value acme.Client is used with DefaultACMEDirectory + // as the directory endpoint. + // If the Client.Key is nil, a new ECDSA P-256 key is generated and, + // if Cache is not nil, stored in cache. // // Mutating the field after the first call of GetCertificate method will have no effect. Client *acme.Client @@ -174,8 +178,8 @@ type Manager struct { renewalMu sync.Mutex renewal map[certKey]*domainRenewal - // tokensMu guards the rest of the fields: tryHTTP01, certTokens and httpTokens. - tokensMu sync.RWMutex + // challengeMu guards tryHTTP01, certTokens and httpTokens. + challengeMu sync.RWMutex // tryHTTP01 indicates whether the Manager should try "http-01" challenge type // during the authorization flow. tryHTTP01 bool @@ -184,12 +188,11 @@ type Manager struct { // to be provisioned. // The entries are stored for the duration of the authorization flow. httpTokens map[string][]byte - // certTokens contains temporary certificates for tls-sni and tls-alpn challenges - // and is keyed by token domain name, which matches server name of ClientHello. - // Keys always have ".acme.invalid" suffix for tls-sni. Otherwise, they are domain names - // for tls-alpn. + // certTokens contains temporary certificates for tls-alpn-01 challenges + // and is keyed by the domain name which matches the ClientHello server name. // The entries are stored for the duration of the authorization flow. certTokens map[string]*tls.Certificate + // nowFunc, if not nil, returns the current time. This may be set for // testing purposes. nowFunc func() time.Time @@ -226,7 +229,7 @@ func (m *Manager) TLSConfig() *tls.Config { // GetCertificate implements the tls.Config.GetCertificate hook. // It provides a TLS certificate for hello.ServerName host, including answering -// tls-alpn-01 and *.acme.invalid (tls-sni-01 and tls-sni-02) challenges. +// tls-alpn-01 challenges. // All other fields of hello are ignored. // // If m.HostPolicy is non-nil, GetCertificate calls the policy before requesting @@ -235,9 +238,7 @@ func (m *Manager) TLSConfig() *tls.Config { // This does not affect cached certs. See HostPolicy field description for more details. // // If GetCertificate is used directly, instead of via Manager.TLSConfig, package users will -// also have to add acme.ALPNProto to NextProtos for tls-alpn-01, or use HTTPHandler -// for http-01. (The tls-sni-* challenges have been deprecated by popular ACME providers -// due to security issues in the ecosystem.) +// also have to add acme.ALPNProto to NextProtos for tls-alpn-01, or use HTTPHandler for http-01. func (m *Manager) GetCertificate(hello *tls.ClientHelloInfo) (*tls.Certificate, error) { if m.Prompt == nil { return nil, errors.New("acme/autocert: Manager.Prompt not set") @@ -269,13 +270,10 @@ func (m *Manager) GetCertificate(hello *tls.ClientHelloInfo) (*tls.Certificate, ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) defer cancel() - // Check whether this is a token cert requested for TLS-SNI or TLS-ALPN challenge. + // Check whether this is a token cert requested for TLS-ALPN challenge. if wantsTokenCert(hello) { - m.tokensMu.RLock() - defer m.tokensMu.RUnlock() - // It's ok to use the same token cert key for both tls-sni and tls-alpn - // because there's always at most 1 token cert per on-going domain authorization. - // See m.verify for details. + m.challengeMu.RLock() + defer m.challengeMu.RUnlock() if cert := m.certTokens[name]; cert != nil { return cert, nil } @@ -318,8 +316,7 @@ func wantsTokenCert(hello *tls.ClientHelloInfo) bool { if len(hello.SupportedProtos) == 1 && hello.SupportedProtos[0] == acme.ALPNProto { return true } - // tls-sni-xx - return strings.HasSuffix(hello.ServerName, ".acme.invalid") + return false } func supportsECDSA(hello *tls.ClientHelloInfo) bool { @@ -384,8 +381,8 @@ func supportsECDSA(hello *tls.ClientHelloInfo) bool { // If HTTPHandler is never called, the Manager will only use the "tls-alpn-01" // challenge for domain verification. func (m *Manager) HTTPHandler(fallback http.Handler) http.Handler { - m.tokensMu.Lock() - defer m.tokensMu.Unlock() + m.challengeMu.Lock() + defer m.challengeMu.Unlock() m.tryHTTP01 = true if fallback == nil { @@ -648,71 +645,64 @@ func (m *Manager) certState(ck certKey) (*certState, error) { // authorizedCert starts the domain ownership verification process and requests a new cert upon success. // The key argument is the certificate private key. func (m *Manager) authorizedCert(ctx context.Context, key crypto.Signer, ck certKey) (der [][]byte, leaf *x509.Certificate, err error) { - client, err := m.acmeClient(ctx) - if err != nil { - return nil, nil, err - } - - if err := m.verify(ctx, client, ck.domain); err != nil { - return nil, nil, err - } csr, err := certRequest(key, ck.domain, m.ExtraExtensions) if err != nil { return nil, nil, err } - der, _, err = client.CreateCert(ctx, csr, 0, true) - if err != nil { - return nil, nil, err - } - leaf, err = validCert(ck, der, key, m.now()) - if err != nil { - return nil, nil, err - } - return der, leaf, nil -} -// revokePendingAuthz revokes all authorizations idenfied by the elements of uri slice. -// It ignores revocation errors. -func (m *Manager) revokePendingAuthz(ctx context.Context, uri []string) { client, err := m.acmeClient(ctx) if err != nil { - return + return nil, nil, err } - for _, u := range uri { - client.RevokeAuthorization(ctx, u) + dir, err := client.Discover(ctx) + if err != nil { + return nil, nil, err } + + var chain [][]byte + switch { + // Pre-RFC legacy CA. + case dir.OrderURL == "": + if err := m.verify(ctx, client, ck.domain); err != nil { + return nil, nil, err + } + der, _, err := client.CreateCert(ctx, csr, 0, true) + if err != nil { + return nil, nil, err + } + chain = der + // RFC 8555 compliant CA. + default: + o, err := m.verifyRFC(ctx, client, ck.domain) + if err != nil { + return nil, nil, err + } + der, _, err := client.CreateOrderCert(ctx, o.FinalizeURL, csr, true) + if err != nil { + return nil, nil, err + } + chain = der + } + leaf, err = validCert(ck, chain, key, m.now()) + if err != nil { + return nil, nil, err + } + return chain, leaf, nil } -// verify runs the identifier (domain) authorization flow +// verify runs the identifier (domain) pre-authorization flow for legacy CAs // using each applicable ACME challenge type. func (m *Manager) verify(ctx context.Context, client *acme.Client, domain string) error { - // The list of challenge types we'll try to fulfill - // in this specific order. - challengeTypes := []string{"tls-alpn-01", "tls-sni-02", "tls-sni-01"} - m.tokensMu.RLock() - if m.tryHTTP01 { - challengeTypes = append(challengeTypes, "http-01") - } - m.tokensMu.RUnlock() - - // Keep track of pending authzs and revoke the ones that did not validate. - pendingAuthzs := make(map[string]bool) + // Remove all hanging authorizations to reduce rate limit quotas + // after we're done. + var authzURLs []string defer func() { - var uri []string - for k, pending := range pendingAuthzs { - if pending { - uri = append(uri, k) - } - } - if len(uri) > 0 { - // Use "detached" background context. - // The revocations need not happen in the current verification flow. - go m.revokePendingAuthz(context.Background(), uri) - } + go m.deactivatePendingAuthz(authzURLs) }() // errs accumulates challenge failure errors, printed if all fail errs := make(map[*acme.Challenge]error) + challengeTypes := m.supportedChallengeTypes() var nextTyp int // challengeType index of the next challenge type to try for { // Start domain authorization and get the challenge. @@ -720,6 +710,7 @@ func (m *Manager) verify(ctx context.Context, client *acme.Client, domain string if err != nil { return err } + authzURLs = append(authzURLs, authz.URI) // No point in accepting challenges if the authorization status // is in a final state. switch authz.Status { @@ -729,8 +720,6 @@ func (m *Manager) verify(ctx context.Context, client *acme.Client, domain string return fmt.Errorf("acme/autocert: invalid authorization %q", authz.URI) } - pendingAuthzs[authz.URI] = true - // Pick the next preferred challenge. var chal *acme.Challenge for chal == nil && nextTyp < len(challengeTypes) { @@ -760,11 +749,126 @@ func (m *Manager) verify(ctx context.Context, client *acme.Client, domain string errs[chal] = err continue } - delete(pendingAuthzs, authz.URI) return nil } } +// verifyRFC runs the identifier (domain) order-based authorization flow for RFC compliant CAs +// using each applicable ACME challenge type. +func (m *Manager) verifyRFC(ctx context.Context, client *acme.Client, domain string) (*acme.Order, error) { + // Try each supported challenge type starting with a new order each time. + // The nextTyp index of the next challenge type to try is shared across + // all order authorizations: if we've tried a challenge type once and it didn't work, + // it will most likely not work on another order's authorization either. + challengeTypes := m.supportedChallengeTypes() + nextTyp := 0 // challengeTypes index +AuthorizeOrderLoop: + for { + o, err := client.AuthorizeOrder(ctx, acme.DomainIDs(domain)) + if err != nil { + return nil, err + } + // Remove all hanging authorizations to reduce rate limit quotas + // after we're done. + defer func() { + go m.deactivatePendingAuthz(o.AuthzURLs) + }() + + // Check if there's actually anything we need to do. + switch o.Status { + case acme.StatusReady: + // Already authorized. + return o, nil + case acme.StatusPending: + // Continue normal Order-based flow. + default: + return nil, fmt.Errorf("acme/autocert: invalid new order status %q; order URL: %q", o.Status, o.URI) + } + + // Satisfy all pending authorizations. + for _, zurl := range o.AuthzURLs { + z, err := client.GetAuthorization(ctx, zurl) + if err != nil { + return nil, err + } + if z.Status != acme.StatusPending { + // We are interested only in pending authorizations. + continue + } + // Pick the next preferred challenge. + var chal *acme.Challenge + for chal == nil && nextTyp < len(challengeTypes) { + chal = pickChallenge(challengeTypes[nextTyp], z.Challenges) + nextTyp++ + } + if chal == nil { + return nil, fmt.Errorf("acme/autocert: unable to satisfy %q for domain %q: no viable challenge type found", z.URI, domain) + } + // Respond to the challenge and wait for validation result. + cleanup, err := m.fulfill(ctx, client, chal, domain) + if err != nil { + continue AuthorizeOrderLoop + } + defer cleanup() + if _, err := client.Accept(ctx, chal); err != nil { + continue AuthorizeOrderLoop + } + if _, err := client.WaitAuthorization(ctx, z.URI); err != nil { + continue AuthorizeOrderLoop + } + } + + // All authorizations are satisfied. + // Wait for the CA to update the order status. + o, err = client.WaitOrder(ctx, o.URI) + if err != nil { + continue AuthorizeOrderLoop + } + return o, nil + } +} + +func pickChallenge(typ string, chal []*acme.Challenge) *acme.Challenge { + for _, c := range chal { + if c.Type == typ { + return c + } + } + return nil +} + +func (m *Manager) supportedChallengeTypes() []string { + m.challengeMu.RLock() + defer m.challengeMu.RUnlock() + typ := []string{"tls-alpn-01"} + if m.tryHTTP01 { + typ = append(typ, "http-01") + } + return typ +} + +// deactivatePendingAuthz relinquishes all authorizations identified by the elements +// of the provided uri slice which are in "pending" state. +// It ignores revocation errors. +// +// deactivatePendingAuthz takes no context argument and instead runs with its own +// "detached" context because deactivations are done in a goroutine separate from +// that of the main issuance or renewal flow. +func (m *Manager) deactivatePendingAuthz(uri []string) { + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) + defer cancel() + client, err := m.acmeClient(ctx) + if err != nil { + return + } + for _, u := range uri { + z, err := client.GetAuthorization(ctx, u) + if err == nil && z.Status == acme.StatusPending { + client.RevokeAuthorization(ctx, u) + } + } +} + // fulfill provisions a response to the challenge chal. // The cleanup is non-nil only if provisioning succeeded. func (m *Manager) fulfill(ctx context.Context, client *acme.Client, chal *acme.Challenge, domain string) (cleanup func(), err error) { @@ -776,20 +880,6 @@ func (m *Manager) fulfill(ctx context.Context, client *acme.Client, chal *acme.C } m.putCertToken(ctx, domain, &cert) return func() { go m.deleteCertToken(domain) }, nil - case "tls-sni-01": - cert, name, err := client.TLSSNI01ChallengeCert(chal.Token) - if err != nil { - return nil, err - } - m.putCertToken(ctx, name, &cert) - return func() { go m.deleteCertToken(name) }, nil - case "tls-sni-02": - cert, name, err := client.TLSSNI02ChallengeCert(chal.Token) - if err != nil { - return nil, err - } - m.putCertToken(ctx, name, &cert) - return func() { go m.deleteCertToken(name) }, nil case "http-01": resp, err := client.HTTP01ChallengeResponse(chal.Token) if err != nil { @@ -802,20 +892,11 @@ func (m *Manager) fulfill(ctx context.Context, client *acme.Client, chal *acme.C return nil, fmt.Errorf("acme/autocert: unknown challenge type %q", chal.Type) } -func pickChallenge(typ string, chal []*acme.Challenge) *acme.Challenge { - for _, c := range chal { - if c.Type == typ { - return c - } - } - return nil -} - // putCertToken stores the token certificate with the specified name // in both m.certTokens map and m.Cache. func (m *Manager) putCertToken(ctx context.Context, name string, cert *tls.Certificate) { - m.tokensMu.Lock() - defer m.tokensMu.Unlock() + m.challengeMu.Lock() + defer m.challengeMu.Unlock() if m.certTokens == nil { m.certTokens = make(map[string]*tls.Certificate) } @@ -826,8 +907,8 @@ func (m *Manager) putCertToken(ctx context.Context, name string, cert *tls.Certi // deleteCertToken removes the token certificate with the specified name // from both m.certTokens map and m.Cache. func (m *Manager) deleteCertToken(name string) { - m.tokensMu.Lock() - defer m.tokensMu.Unlock() + m.challengeMu.Lock() + defer m.challengeMu.Unlock() delete(m.certTokens, name) if m.Cache != nil { ck := certKey{domain: name, isToken: true} @@ -838,8 +919,8 @@ func (m *Manager) deleteCertToken(name string) { // httpToken retrieves an existing http-01 token value from an in-memory map // or the optional cache. func (m *Manager) httpToken(ctx context.Context, tokenPath string) ([]byte, error) { - m.tokensMu.RLock() - defer m.tokensMu.RUnlock() + m.challengeMu.RLock() + defer m.challengeMu.RUnlock() if v, ok := m.httpTokens[tokenPath]; ok { return v, nil } @@ -854,8 +935,8 @@ func (m *Manager) httpToken(ctx context.Context, tokenPath string) ([]byte, erro // // It ignores any error returned from Cache.Put. func (m *Manager) putHTTPToken(ctx context.Context, tokenPath, val string) { - m.tokensMu.Lock() - defer m.tokensMu.Unlock() + m.challengeMu.Lock() + defer m.challengeMu.Unlock() if m.httpTokens == nil { m.httpTokens = make(map[string][]byte) } @@ -871,8 +952,8 @@ func (m *Manager) putHTTPToken(ctx context.Context, tokenPath, val string) { // // If m.Cache is non-nil, it blocks until Cache.Delete returns without a timeout. func (m *Manager) deleteHTTPToken(tokenPath string) { - m.tokensMu.Lock() - defer m.tokensMu.Unlock() + m.challengeMu.Lock() + defer m.challengeMu.Unlock() delete(m.httpTokens, tokenPath) if m.Cache != nil { m.Cache.Delete(context.Background(), httpTokenCacheKey(tokenPath)) @@ -971,7 +1052,7 @@ func (m *Manager) acmeClient(ctx context.Context) (*acme.Client, error) { client := m.Client if client == nil { - client = &acme.Client{DirectoryURL: acme.LetsEncryptURL} + client = &acme.Client{DirectoryURL: DefaultACMEDirectory} } if client.Key == nil { var err error @@ -989,14 +1070,23 @@ func (m *Manager) acmeClient(ctx context.Context) (*acme.Client, error) { } a := &acme.Account{Contact: contact} _, err := client.Register(ctx, a, m.Prompt) - if ae, ok := err.(*acme.Error); err == nil || ok && ae.StatusCode == http.StatusConflict { - // conflict indicates the key is already registered + if err == nil || isAccountAlreadyExist(err) { m.client = client err = nil } return m.client, err } +// isAccountAlreadyExist reports whether the err, as returned from acme.Client.Register, +// indicates the account has already been registered. +func isAccountAlreadyExist(err error) bool { + if err == acme.ErrAccountAlreadyExists { + return true + } + ae, ok := err.(*acme.Error) + return ok && ae.StatusCode == http.StatusConflict +} + func (m *Manager) hostPolicy() HostPolicy { if m.HostPolicy != nil { return m.HostPolicy diff --git a/vendor/golang.org/x/crypto/acme/autocert/cache.go b/vendor/golang.org/x/crypto/acme/autocert/cache.go index aa9aa845c..03f63022f 100644 --- a/vendor/golang.org/x/crypto/acme/autocert/cache.go +++ b/vendor/golang.org/x/crypto/acme/autocert/cache.go @@ -77,6 +77,7 @@ func (d DirCache) Put(ctx context.Context, name string, data []byte) error { if tmp, err = d.writeTempFile(name, data); err != nil { return } + defer os.Remove(tmp) select { case <-ctx.Done(): // Don't overwrite the file if the context was canceled. @@ -116,12 +117,17 @@ func (d DirCache) Delete(ctx context.Context, name string) error { } // writeTempFile writes b to a temporary file, closes the file and returns its path. -func (d DirCache) writeTempFile(prefix string, b []byte) (string, error) { +func (d DirCache) writeTempFile(prefix string, b []byte) (name string, reterr error) { // TempFile uses 0600 permissions f, err := ioutil.TempFile(string(d), prefix) if err != nil { return "", err } + defer func() { + if reterr != nil { + os.Remove(f.Name()) + } + }() if _, err := f.Write(b); err != nil { f.Close() return "", err diff --git a/vendor/golang.org/x/crypto/acme/http.go b/vendor/golang.org/x/crypto/acme/http.go index 600d5798b..c51943e71 100644 --- a/vendor/golang.org/x/crypto/acme/http.go +++ b/vendor/golang.org/x/crypto/acme/http.go @@ -155,8 +155,16 @@ func (c *Client) get(ctx context.Context, url string, ok resOkay) (*http.Respons } } +// postAsGet is POST-as-GET, a replacement for GET in RFC8555 +// as described in https://tools.ietf.org/html/rfc8555#section-6.3. +// It makes a POST request in KID form with zero JWS payload. +// See nopayload doc comments in jws.go. +func (c *Client) postAsGet(ctx context.Context, url string, ok resOkay) (*http.Response, error) { + return c.post(ctx, nil, url, noPayload, ok) +} + // post issues a signed POST request in JWS format using the provided key -// to the specified URL. +// to the specified URL. If key is nil, c.Key is used instead. // It returns a non-error value only when ok reports true. // // post retries unsuccessful attempts according to c.RetryBackoff @@ -193,14 +201,28 @@ func (c *Client) post(ctx context.Context, key crypto.Signer, url string, body i } // postNoRetry signs the body with the given key and POSTs it to the provided url. -// The body argument must be JSON-serializable. // It is used by c.post to retry unsuccessful attempts. +// The body argument must be JSON-serializable. +// +// If key argument is nil, c.Key is used to sign the request. +// If key argument is nil and c.accountKID returns a non-zero keyID, +// the request is sent in KID form. Otherwise, JWK form is used. +// +// In practice, when interfacing with RFC-compliant CAs most requests are sent in KID form +// and JWK is used only when KID is unavailable: new account endpoint and certificate +// revocation requests authenticated by a cert key. +// See jwsEncodeJSON for other details. func (c *Client) postNoRetry(ctx context.Context, key crypto.Signer, url string, body interface{}) (*http.Response, *http.Request, error) { + kid := noKeyID + if key == nil { + key = c.Key + kid = c.accountKID(ctx) + } nonce, err := c.popNonce(ctx, url) if err != nil { return nil, nil, err } - b, err := jwsEncodeJSON(body, key, nonce) + b, err := jwsEncodeJSON(body, key, kid, nonce, url) if err != nil { return nil, nil, err } diff --git a/vendor/golang.org/x/crypto/acme/jws.go b/vendor/golang.org/x/crypto/acme/jws.go index 1093b5039..cac8b6786 100644 --- a/vendor/golang.org/x/crypto/acme/jws.go +++ b/vendor/golang.org/x/crypto/acme/jws.go @@ -17,25 +17,53 @@ import ( "math/big" ) +// keyID is the account identity provided by a CA during registration. +type keyID string + +// noKeyID indicates that jwsEncodeJSON should compute and use JWK instead of a KID. +// See jwsEncodeJSON for details. +const noKeyID = keyID("") + +// noPayload indicates jwsEncodeJSON will encode zero-length octet string +// in a JWS request. This is called POST-as-GET in RFC 8555 and is used to make +// authenticated GET requests via POSTing with an empty payload. +// See https://tools.ietf.org/html/rfc8555#section-6.3 for more details. +const noPayload = "" + // jwsEncodeJSON signs claimset using provided key and a nonce. -// The result is serialized in JSON format. +// The result is serialized in JSON format containing either kid or jwk +// fields based on the provided keyID value. +// +// If kid is non-empty, its quoted value is inserted in the protected head +// as "kid" field value. Otherwise, JWK is computed using jwkEncode and inserted +// as "jwk" field value. The "jwk" and "kid" fields are mutually exclusive. +// // See https://tools.ietf.org/html/rfc7515#section-7. -func jwsEncodeJSON(claimset interface{}, key crypto.Signer, nonce string) ([]byte, error) { - jwk, err := jwkEncode(key.Public()) - if err != nil { - return nil, err - } +func jwsEncodeJSON(claimset interface{}, key crypto.Signer, kid keyID, nonce, url string) ([]byte, error) { alg, sha := jwsHasher(key.Public()) if alg == "" || !sha.Available() { return nil, ErrUnsupportedKey } - phead := fmt.Sprintf(`{"alg":%q,"jwk":%s,"nonce":%q}`, alg, jwk, nonce) - phead = base64.RawURLEncoding.EncodeToString([]byte(phead)) - cs, err := json.Marshal(claimset) - if err != nil { - return nil, err + var phead string + switch kid { + case noKeyID: + jwk, err := jwkEncode(key.Public()) + if err != nil { + return nil, err + } + phead = fmt.Sprintf(`{"alg":%q,"jwk":%s,"nonce":%q,"url":%q}`, alg, jwk, nonce, url) + default: + phead = fmt.Sprintf(`{"alg":%q,"kid":%q,"nonce":%q,"url":%q}`, alg, kid, nonce, url) + } + phead = base64.RawURLEncoding.EncodeToString([]byte(phead)) + var payload string + if claimset != noPayload { + cs, err := json.Marshal(claimset) + if err != nil { + return nil, err + } + payload = base64.RawURLEncoding.EncodeToString(cs) } - payload := base64.RawURLEncoding.EncodeToString(cs) hash := sha.New() hash.Write([]byte(phead + "." + payload)) sig, err := jwsSign(key, sha, hash.Sum(nil)) diff --git a/vendor/golang.org/x/crypto/acme/rfc8555.go b/vendor/golang.org/x/crypto/acme/rfc8555.go new file mode 100644 index 000000000..dfb57a66f --- /dev/null +++ b/vendor/golang.org/x/crypto/acme/rfc8555.go @@ -0,0 +1,392 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package acme + +import ( + "context" + "crypto" + "encoding/base64" + "encoding/json" + "encoding/pem" + "errors" + "fmt" + "io" + "io/ioutil" + "net/http" + "time" +) + +// DeactivateReg permanently disables an existing account associated with c.Key. +// A deactivated account can no longer request certificate issuance or access +// resources related to the account, such as orders or authorizations. +// +// It only works with CAs implementing RFC 8555. +func (c *Client) DeactivateReg(ctx context.Context) error { + url := string(c.accountKID(ctx)) + if url == "" { + return ErrNoAccount + } + req := json.RawMessage(`{"status": "deactivated"}`) + res, err := c.post(ctx, nil, url, req, wantStatus(http.StatusOK)) + if err != nil { + return err + } + res.Body.Close() + return nil +} + +// registerRFC is quivalent to c.Register but for CAs implementing RFC 8555. +// It expects c.Discover to have already been called. +// TODO: Implement externalAccountBinding. +func (c *Client) registerRFC(ctx context.Context, acct *Account, prompt func(tosURL string) bool) (*Account, error) { + c.cacheMu.Lock() // guard c.kid access + defer c.cacheMu.Unlock() + + req := struct { + TermsAgreed bool `json:"termsOfServiceAgreed,omitempty"` + Contact []string `json:"contact,omitempty"` + }{ + Contact: acct.Contact, + } + if c.dir.Terms != "" { + req.TermsAgreed = prompt(c.dir.Terms) + } + res, err := c.post(ctx, c.Key, c.dir.RegURL, req, wantStatus( + http.StatusOK, // account with this key already registered + http.StatusCreated, // new account created + )) + if err != nil { + return nil, err + } + + defer res.Body.Close() + a, err := responseAccount(res) + if err != nil { + return nil, err + } + // Cache Account URL even if we return an error to the caller. + // It is by all means a valid and usable "kid" value for future requests. + c.kid = keyID(a.URI) + if res.StatusCode == http.StatusOK { + return nil, ErrAccountAlreadyExists + } + return a, nil +} + +// updateGegRFC is equivalent to c.UpdateReg but for CAs implementing RFC 8555. +// It expects c.Discover to have already been called. +func (c *Client) updateRegRFC(ctx context.Context, a *Account) (*Account, error) { + url := string(c.accountKID(ctx)) + if url == "" { + return nil, ErrNoAccount + } + req := struct { + Contact []string `json:"contact,omitempty"` + }{ + Contact: a.Contact, + } + res, err := c.post(ctx, nil, url, req, wantStatus(http.StatusOK)) + if err != nil { + return nil, err + } + defer res.Body.Close() + return responseAccount(res) +} + +// getGegRFC is equivalent to c.GetReg but for CAs implementing RFC 8555. +// It expects c.Discover to have already been called. +func (c *Client) getRegRFC(ctx context.Context) (*Account, error) { + req := json.RawMessage(`{"onlyReturnExisting": true}`) + res, err := c.post(ctx, c.Key, c.dir.RegURL, req, wantStatus(http.StatusOK)) + if e, ok := err.(*Error); ok && e.ProblemType == "urn:ietf:params:acme:error:accountDoesNotExist" { + return nil, ErrNoAccount + } + if err != nil { + return nil, err + } + + defer res.Body.Close() + return responseAccount(res) +} + +func responseAccount(res *http.Response) (*Account, error) { + var v struct { + Status string + Contact []string + Orders string + } + if err := json.NewDecoder(res.Body).Decode(&v); err != nil { + return nil, fmt.Errorf("acme: invalid account response: %v", err) + } + return &Account{ + URI: res.Header.Get("Location"), + Status: v.Status, + Contact: v.Contact, + OrdersURL: v.Orders, + }, nil +} + +// AuthorizeOrder initiates the order-based application for certificate issuance, +// as opposed to pre-authorization in Authorize. +// It is only supported by CAs implementing RFC 8555. +// +// The caller then needs to fetch each authorization with GetAuthorization, +// identify those with StatusPending status and fulfill a challenge using Accept. +// Once all authorizations are satisfied, the caller will typically want to poll +// order status using WaitOrder until it's in StatusReady state. +// To finalize the order and obtain a certificate, the caller submits a CSR with CreateOrderCert. +func (c *Client) AuthorizeOrder(ctx context.Context, id []AuthzID, opt ...OrderOption) (*Order, error) { + dir, err := c.Discover(ctx) + if err != nil { + return nil, err + } + + req := struct { + Identifiers []wireAuthzID `json:"identifiers"` + NotBefore string `json:"notBefore,omitempty"` + NotAfter string `json:"notAfter,omitempty"` + }{} + for _, v := range id { + req.Identifiers = append(req.Identifiers, wireAuthzID{ + Type: v.Type, + Value: v.Value, + }) + } + for _, o := range opt { + switch o := o.(type) { + case orderNotBeforeOpt: + req.NotBefore = time.Time(o).Format(time.RFC3339) + case orderNotAfterOpt: + req.NotAfter = time.Time(o).Format(time.RFC3339) + default: + // Package's fault if we let this happen. + panic(fmt.Sprintf("unsupported order option type %T", o)) + } + } + + res, err := c.post(ctx, nil, dir.OrderURL, req, wantStatus(http.StatusCreated)) + if err != nil { + return nil, err + } + defer res.Body.Close() + return responseOrder(res) +} + +// GetOrder retrives an order identified by the given URL. +// For orders created with AuthorizeOrder, the url value is Order.URI. +// +// If a caller needs to poll an order until its status is final, +// see the WaitOrder method. +func (c *Client) GetOrder(ctx context.Context, url string) (*Order, error) { + if _, err := c.Discover(ctx); err != nil { + return nil, err + } + + res, err := c.postAsGet(ctx, url, wantStatus(http.StatusOK)) + if err != nil { + return nil, err + } + defer res.Body.Close() + return responseOrder(res) +} + +// WaitOrder polls an order from the given URL until it is in one of the final states, +// StatusReady, StatusValid or StatusInvalid, the CA responded with a non-retryable error +// or the context is done. +// +// It returns a non-nil Order only if its Status is StatusReady or StatusValid. +// In all other cases WaitOrder returns an error. +// If the Status is StatusInvalid, the returned error is of type *OrderError. +func (c *Client) WaitOrder(ctx context.Context, url string) (*Order, error) { + if _, err := c.Discover(ctx); err != nil { + return nil, err + } + for { + res, err := c.postAsGet(ctx, url, wantStatus(http.StatusOK)) + if err != nil { + return nil, err + } + o, err := responseOrder(res) + res.Body.Close() + switch { + case err != nil: + // Skip and retry. + case o.Status == StatusInvalid: + return nil, &OrderError{OrderURL: o.URI, Status: o.Status} + case o.Status == StatusReady || o.Status == StatusValid: + return o, nil + } + + d := retryAfter(res.Header.Get("Retry-After")) + if d == 0 { + // Default retry-after. + // Same reasoning as in WaitAuthorization. + d = time.Second + } + t := time.NewTimer(d) + select { + case <-ctx.Done(): + t.Stop() + return nil, ctx.Err() + case <-t.C: + // Retry. + } + } +} + +func responseOrder(res *http.Response) (*Order, error) { + var v struct { + Status string + Expires time.Time + Identifiers []wireAuthzID + NotBefore time.Time + NotAfter time.Time + Error *wireError + Authorizations []string + Finalize string + Certificate string + } + if err := json.NewDecoder(res.Body).Decode(&v); err != nil { + return nil, fmt.Errorf("acme: error reading order: %v", err) + } + o := &Order{ + URI: res.Header.Get("Location"), + Status: v.Status, + Expires: v.Expires, + NotBefore: v.NotBefore, + NotAfter: v.NotAfter, + AuthzURLs: v.Authorizations, + FinalizeURL: v.Finalize, + CertURL: v.Certificate, + } + for _, id := range v.Identifiers { + o.Identifiers = append(o.Identifiers, AuthzID{Type: id.Type, Value: id.Value}) + } + if v.Error != nil { + o.Error = v.Error.error(nil /* headers */) + } + return o, nil +} + +// CreateOrderCert submits the CSR (Certificate Signing Request) to a CA at the specified URL. +// The URL is the FinalizeURL field of an Order created with AuthorizeOrder. +// +// If the bundle argument is true, the returned value also contain the CA (issuer) +// certificate chain. Otherwise, only a leaf certificate is returned. +// The returned URL can be used to re-fetch the certificate using FetchCert. +// +// This method is only supported by CAs implementing RFC 8555. See CreateCert for pre-RFC CAs. +// +// CreateOrderCert returns an error if the CA's response is unreasonably large. +// Callers are encouraged to parse the returned value to ensure the certificate is valid and has the expected features. +func (c *Client) CreateOrderCert(ctx context.Context, url string, csr []byte, bundle bool) (der [][]byte, certURL string, err error) { + if _, err := c.Discover(ctx); err != nil { // required by c.accountKID + return nil, "", err + } + + // RFC describes this as "finalize order" request. + req := struct { + CSR string `json:"csr"` + }{ + CSR: base64.RawURLEncoding.EncodeToString(csr), + } + res, err := c.post(ctx, nil, url, req, wantStatus(http.StatusOK)) + if err != nil { + return nil, "", err + } + defer res.Body.Close() + o, err := responseOrder(res) + if err != nil { + return nil, "", err + } + + // Wait for CA to issue the cert if they haven't. + if o.Status != StatusValid { + o, err = c.WaitOrder(ctx, o.URI) + } + if err != nil { + return nil, "", err + } + // The only acceptable status post finalize and WaitOrder is "valid". + if o.Status != StatusValid { + return nil, "", &OrderError{OrderURL: o.URI, Status: o.Status} + } + crt, err := c.fetchCertRFC(ctx, o.CertURL, bundle) + return crt, o.CertURL, err +} + +// fetchCertRFC downloads issued certificate from the given URL. +// It expects the CA to respond with PEM-encoded certificate chain. +// +// The URL argument is the CertURL field of Order. +func (c *Client) fetchCertRFC(ctx context.Context, url string, bundle bool) ([][]byte, error) { + res, err := c.postAsGet(ctx, url, wantStatus(http.StatusOK)) + if err != nil { + return nil, err + } + defer res.Body.Close() + + // Get all the bytes up to a sane maximum. + // Account very roughly for base64 overhead. + const max = maxCertChainSize + maxCertChainSize/33 + b, err := ioutil.ReadAll(io.LimitReader(res.Body, max+1)) + if err != nil { + return nil, fmt.Errorf("acme: fetch cert response stream: %v", err) + } + if len(b) > max { + return nil, errors.New("acme: certificate chain is too big") + } + + // Decode PEM chain. + var chain [][]byte + for { + var p *pem.Block + p, b = pem.Decode(b) + if p == nil { + break + } + if p.Type != "CERTIFICATE" { + return nil, fmt.Errorf("acme: invalid PEM cert type %q", p.Type) + } + + chain = append(chain, p.Bytes) + if !bundle { + return chain, nil + } + if len(chain) > maxChainLen { + return nil, errors.New("acme: certificate chain is too long") + } + } + if len(chain) == 0 { + return nil, errors.New("acme: certificate chain is empty") + } + return chain, nil +} + +// sends a cert revocation request in either JWK form when key is non-nil or KID form otherwise. +func (c *Client) revokeCertRFC(ctx context.Context, key crypto.Signer, cert []byte, reason CRLReasonCode) error { + req := &struct { + Cert string `json:"certificate"` + Reason int `json:"reason"` + }{ + Cert: base64.RawURLEncoding.EncodeToString(cert), + Reason: int(reason), + } + res, err := c.post(ctx, key, c.dir.RevokeURL, req, wantStatus(http.StatusOK)) + if err != nil { + if isAlreadyRevoked(err) { + // Assume it is not an error to revoke an already revoked cert. + return nil + } + return err + } + defer res.Body.Close() + return nil +} + +func isAlreadyRevoked(err error) bool { + e, ok := err.(*Error) + return ok && e.ProblemType == "urn:ietf:params:acme:error:alreadyRevoked" +} diff --git a/vendor/golang.org/x/crypto/acme/types.go b/vendor/golang.org/x/crypto/acme/types.go index 54792c065..9c59097a0 100644 --- a/vendor/golang.org/x/crypto/acme/types.go +++ b/vendor/golang.org/x/crypto/acme/types.go @@ -14,14 +14,18 @@ import ( "time" ) -// ACME server response statuses used to describe Authorization and Challenge states. +// ACME status values of Account, Order, Authorization and Challenge objects. +// See https://tools.ietf.org/html/rfc8555#section-7.1.6 for details. const ( - StatusUnknown = "unknown" - StatusPending = "pending" - StatusProcessing = "processing" - StatusValid = "valid" - StatusInvalid = "invalid" - StatusRevoked = "revoked" + StatusDeactivated = "deactivated" + StatusExpired = "expired" + StatusInvalid = "invalid" + StatusPending = "pending" + StatusProcessing = "processing" + StatusReady = "ready" + StatusRevoked = "revoked" + StatusUnknown = "unknown" + StatusValid = "valid" ) // CRLReasonCode identifies the reason for a certificate revocation. @@ -41,8 +45,17 @@ const ( CRLReasonAACompromise CRLReasonCode = 10 ) -// ErrUnsupportedKey is returned when an unsupported key type is encountered. -var ErrUnsupportedKey = errors.New("acme: unknown key type; only RSA and ECDSA are supported") +var ( + // ErrUnsupportedKey is returned when an unsupported key type is encountered. + ErrUnsupportedKey = errors.New("acme: unknown key type; only RSA and ECDSA are supported") + + // ErrAccountAlreadyExists indicates that the Client's key has already been registered + // with the CA. It is returned by Register method. + ErrAccountAlreadyExists = errors.New("acme: account already exists") + + // ErrNoAccount indicates that the Client's key has not been registered with the CA. + ErrNoAccount = errors.New("acme: account does not exist") +) // Error is an ACME error, defined in Problem Details for HTTP APIs doc // http://tools.ietf.org/html/draft-ietf-appsawg-http-problem. @@ -54,6 +67,12 @@ type Error struct { ProblemType string // Detail is a human-readable explanation specific to this occurrence of the problem. Detail string + // Instance indicates a URL that the client should direct a human user to visit + // in order for instructions on how to agree to the updated Terms of Service. + // In such an event CA sets StatusCode to 403, ProblemType to + // "urn:ietf:params:acme:error:userActionRequired" and a Link header with relation + // "terms-of-service" containing the latest TOS URL. + Instance string // Header is the original server error response headers. // It may be nil. Header http.Header @@ -86,6 +105,21 @@ func (a *AuthorizationError) Error() string { return fmt.Sprintf("acme: authorization error for %s: %s", a.Identifier, strings.Join(e, "; ")) } +// OrderError is returned from Client's order related methods. +// It indicates the order is unusable and the clients should start over with +// AuthorizeOrder. +// +// The clients can still fetch the order object from CA using GetOrder +// to inspect its state. +type OrderError struct { + OrderURL string + Status string +} + +func (oe *OrderError) Error() string { + return fmt.Sprintf("acme: order %s status: %s", oe.OrderURL, oe.Status) +} + // RateLimit reports whether err represents a rate limit error and // any Retry-After duration returned by the server. // @@ -108,49 +142,88 @@ func RateLimit(err error) (time.Duration, bool) { } // Account is a user account. It is associated with a private key. +// Non-RFC 8555 fields are empty when interfacing with a compliant CA. type Account struct { // URI is the account unique ID, which is also a URL used to retrieve // account data from the CA. + // When interfacing with RFC 8555-compliant CAs, URI is the "kid" field + // value in JWS signed requests. URI string // Contact is a slice of contact info used during registration. + // See https://tools.ietf.org/html/rfc8555#section-7.3 for supported + // formats. Contact []string + // Status indicates current account status as returned by the CA. + // Possible values are StatusValid, StatusDeactivated, and StatusRevoked. + Status string + + // OrdersURL is a URL from which a list of orders submitted by this account + // can be fetched. + OrdersURL string + // The terms user has agreed to. // A value not matching CurrentTerms indicates that the user hasn't agreed // to the actual Terms of Service of the CA. + // + // It is non-RFC 8555 compliant. Package users can store the ToS they agree to + // during Client's Register call in the prompt callback function. AgreedTerms string // Actual terms of a CA. + // + // It is non-RFC 8555 compliant. Use Directory's Terms field. + // When a CA updates their terms and requires an account agreement, + // a URL at which instructions to do so is available in Error's Instance field. CurrentTerms string // Authz is the authorization URL used to initiate a new authz flow. + // + // It is non-RFC 8555 compliant. Use Directory's AuthzURL or OrderURL. Authz string // Authorizations is a URI from which a list of authorizations // granted to this account can be fetched via a GET request. + // + // It is non-RFC 8555 compliant and is obsoleted by OrdersURL. Authorizations string // Certificates is a URI from which a list of certificates // issued for this account can be fetched via a GET request. + // + // It is non-RFC 8555 compliant and is obsoleted by OrdersURL. Certificates string } // Directory is ACME server discovery data. +// See https://tools.ietf.org/html/rfc8555#section-7.1.1 for more details. type Directory struct { - // RegURL is an account endpoint URL, allowing for creating new - // and modifying existing accounts. + // NonceURL indicates an endpoint where to fetch fresh nonce values from. + NonceURL string + + // RegURL is an account endpoint URL, allowing for creating new accounts. + // Pre-RFC 8555 CAs also allow modifying existing accounts at this URL. RegURL string - // AuthzURL is used to initiate Identifier Authorization flow. + // OrderURL is used to initiate the certificate issuance flow + // as described in RFC 8555. + OrderURL string + + // AuthzURL is used to initiate identifier pre-authorization flow. + // Empty string indicates the flow is unsupported by the CA. AuthzURL string // CertURL is a new certificate issuance endpoint URL. + // It is non-RFC 8555 compliant and is obsoleted by OrderURL. CertURL string // RevokeURL is used to initiate a certificate revocation flow. RevokeURL string + // KeyChangeURL allows to perform account key rollover flow. + KeyChangeURL string + // Term is a URI identifying the current terms of service. Terms string @@ -162,44 +235,126 @@ type Directory struct { // recognises as referring to itself for the purposes of CAA record validation // as defined in RFC6844. CAA []string + + // ExternalAccountRequired indicates that the CA requires for all account-related + // requests to include external account binding information. + ExternalAccountRequired bool } -// Challenge encodes a returned CA challenge. -// Its Error field may be non-nil if the challenge is part of an Authorization -// with StatusInvalid. -type Challenge struct { - // Type is the challenge type, e.g. "http-01", "tls-sni-02", "dns-01". - Type string +// rfcCompliant reports whether the ACME server implements RFC 8555. +// Note that some servers may have incomplete RFC implementation +// even if the returned value is true. +// If rfcCompliant reports false, the server most likely implements draft-02. +func (d *Directory) rfcCompliant() bool { + return d.OrderURL != "" +} - // URI is where a challenge response can be posted to. +// Order represents a client's request for a certificate. +// It tracks the request flow progress through to issuance. +type Order struct { + // URI uniquely identifies an order. URI string - // Token is a random value that uniquely identifies the challenge. - Token string - - // Status identifies the status of this challenge. + // Status represents the current status of the order. + // It indicates which action the client should take. + // + // Possible values are StatusPending, StatusReady, StatusProcessing, StatusValid and StatusInvalid. + // Pending means the CA does not believe that the client has fulfilled the requirements. + // Ready indicates that the client has fulfilled all the requirements and can submit a CSR + // to obtain a certificate. This is done with Client's CreateOrderCert. + // Processing means the certificate is being issued. + // Valid indicates the CA has issued the certificate. It can be downloaded + // from the Order's CertURL. This is done with Client's FetchCert. + // Invalid means the certificate will not be issued. Users should consider this order + // abandoned. Status string - // Error indicates the reason for an authorization failure - // when this challenge was used. - // The type of a non-nil value is *Error. - Error error + // Expires is the timestamp after which CA considers this order invalid. + Expires time.Time + + // Identifiers contains all identifier objects which the order pertains to. + Identifiers []AuthzID + + // NotBefore is the requested value of the notBefore field in the certificate. + NotBefore time.Time + + // NotAfter is the requested value of the notAfter field in the certificate. + NotAfter time.Time + + // AuthzURLs represents authorizations to complete before a certificate + // for identifiers specified in the order can be issued. + // It also contains unexpired authorizations that the client has completed + // in the past. + // + // Authorization objects can be fetched using Client's GetAuthorization method. + // + // The required authorizations are dictated by CA policies. + // There may not be a 1:1 relationship between the identifiers and required authorizations. + // Required authorizations can be identified by their StatusPending status. + // + // For orders in the StatusValid or StatusInvalid state these are the authorizations + // which were completed. + AuthzURLs []string + + // FinalizeURL is the endpoint at which a CSR is submitted to obtain a certificate + // once all the authorizations are satisfied. + FinalizeURL string + + // CertURL points to the certificate that has been issued in response to this order. + CertURL string + + // The error that occurred while processing the order as received from a CA, if any. + Error *Error } +// OrderOption allows customizing Client.AuthorizeOrder call. +type OrderOption interface { + privateOrderOpt() +} + +// WithOrderNotBefore sets order's NotBefore field. +func WithOrderNotBefore(t time.Time) OrderOption { + return orderNotBeforeOpt(t) +} + +// WithOrderNotAfter sets order's NotAfter field. +func WithOrderNotAfter(t time.Time) OrderOption { + return orderNotAfterOpt(t) +} + +type orderNotBeforeOpt time.Time + +func (orderNotBeforeOpt) privateOrderOpt() {} + +type orderNotAfterOpt time.Time + +func (orderNotAfterOpt) privateOrderOpt() {} + // Authorization encodes an authorization response. type Authorization struct { // URI uniquely identifies a authorization. URI string - // Status identifies the status of an authorization. + // Status is the current status of an authorization. + // Possible values are StatusPending, StatusValid, StatusInvalid, StatusDeactivated, + // StatusExpired and StatusRevoked. Status string // Identifier is what the account is authorized to represent. Identifier AuthzID + // The timestamp after which the CA considers the authorization invalid. + Expires time.Time + + // Wildcard is true for authorizations of a wildcard domain name. + Wildcard bool + // Challenges that the client needs to fulfill in order to prove possession // of the identifier (for pending authorizations). - // For final authorizations, the challenges that were used. + // For valid authorizations, the challenge that was validated. + // For invalid authorizations, the challenge that was attempted and failed. + // + // RFC 8555 compatible CAs require users to fuflfill only one of the challenges. Challenges []*Challenge // A collection of sets of challenges, each of which would be sufficient @@ -207,24 +362,51 @@ type Authorization struct { // Clients must complete a set of challenges that covers at least one set. // Challenges are identified by their indices in the challenges array. // If this field is empty, the client needs to complete all challenges. + // + // This field is unused in RFC 8555. Combinations [][]int } // AuthzID is an identifier that an account is authorized to represent. type AuthzID struct { - Type string // The type of identifier, e.g. "dns". + Type string // The type of identifier, "dns" or "ip". Value string // The identifier itself, e.g. "example.org". } +// DomainIDs creates a slice of AuthzID with "dns" identifier type. +func DomainIDs(names ...string) []AuthzID { + a := make([]AuthzID, len(names)) + for i, v := range names { + a[i] = AuthzID{Type: "dns", Value: v} + } + return a +} + +// IPIDs creates a slice of AuthzID with "ip" identifier type. +// Each element of addr is textual form of an address as defined +// in RFC1123 Section 2.1 for IPv4 and in RFC5952 Section 4 for IPv6. +func IPIDs(addr ...string) []AuthzID { + a := make([]AuthzID, len(addr)) + for i, v := range addr { + a[i] = AuthzID{Type: "ip", Value: v} + } + return a +} + +// wireAuthzID is ACME JSON representation of authorization identifier objects. +type wireAuthzID struct { + Type string `json:"type"` + Value string `json:"value"` +} + // wireAuthz is ACME JSON representation of Authorization objects. type wireAuthz struct { + Identifier wireAuthzID Status string + Expires time.Time + Wildcard bool Challenges []wireChallenge Combinations [][]int - Identifier struct { - Type string - Value string - } } func (z *wireAuthz) authorization(uri string) *Authorization { @@ -232,8 +414,10 @@ func (z *wireAuthz) authorization(uri string) *Authorization { URI: uri, Status: z.Status, Identifier: AuthzID{Type: z.Identifier.Type, Value: z.Identifier.Value}, - Combinations: z.Combinations, // shallow copy + Expires: z.Expires, + Wildcard: z.Wildcard, Challenges: make([]*Challenge, len(z.Challenges)), + Combinations: z.Combinations, // shallow copy } for i, v := range z.Challenges { a.Challenges[i] = v.challenge() @@ -254,22 +438,55 @@ func (z *wireAuthz) error(uri string) *AuthorizationError { return err } +// Challenge encodes a returned CA challenge. +// Its Error field may be non-nil if the challenge is part of an Authorization +// with StatusInvalid. +type Challenge struct { + // Type is the challenge type, e.g. "http-01", "tls-alpn-01", "dns-01". + Type string + + // URI is where a challenge response can be posted to. + URI string + + // Token is a random value that uniquely identifies the challenge. + Token string + + // Status identifies the status of this challenge. + // In RFC 8555, possible values are StatusPending, StatusProcessing, StatusValid, + // and StatusInvalid. + Status string + + // Validated is the time at which the CA validated this challenge. + // Always zero value in pre-RFC 8555. + Validated time.Time + + // Error indicates the reason for an authorization failure + // when this challenge was used. + // The type of a non-nil value is *Error. + Error error +} + // wireChallenge is ACME JSON challenge representation. type wireChallenge struct { - URI string `json:"uri"` - Type string - Token string - Status string - Error *wireError + URL string `json:"url"` // RFC + URI string `json:"uri"` // pre-RFC + Type string + Token string + Status string + Validated time.Time + Error *wireError } func (c *wireChallenge) challenge() *Challenge { v := &Challenge{ - URI: c.URI, + URI: c.URL, Type: c.Type, Token: c.Token, Status: c.Status, } + if v.URI == "" { + v.URI = c.URI // c.URL was empty; use legacy + } if v.Status == "" { v.Status = StatusPending } @@ -282,9 +499,10 @@ func (c *wireChallenge) challenge() *Challenge { // wireError is a subset of fields of the Problem Details object // as described in https://tools.ietf.org/html/rfc7807#section-3.1. type wireError struct { - Status int - Type string - Detail string + Status int + Type string + Detail string + Instance string } func (e *wireError) error(h http.Header) *Error { @@ -292,6 +510,7 @@ func (e *wireError) error(h http.Header) *Error { StatusCode: e.Status, ProblemType: e.Type, Detail: e.Detail, + Instance: e.Instance, Header: h, } } diff --git a/vendor/golang.org/x/sys/unix/affinity_linux.go b/vendor/golang.org/x/sys/unix/affinity_linux.go index 72afe3338..6e5c81acd 100644 --- a/vendor/golang.org/x/sys/unix/affinity_linux.go +++ b/vendor/golang.org/x/sys/unix/affinity_linux.go @@ -7,6 +7,7 @@ package unix import ( + "math/bits" "unsafe" ) @@ -79,46 +80,7 @@ func (s *CPUSet) IsSet(cpu int) bool { func (s *CPUSet) Count() int { c := 0 for _, b := range s { - c += onesCount64(uint64(b)) + c += bits.OnesCount64(uint64(b)) } return c } - -// onesCount64 is a copy of Go 1.9's math/bits.OnesCount64. -// Once this package can require Go 1.9, we can delete this -// and update the caller to use bits.OnesCount64. -func onesCount64(x uint64) int { - const m0 = 0x5555555555555555 // 01010101 ... - const m1 = 0x3333333333333333 // 00110011 ... - const m2 = 0x0f0f0f0f0f0f0f0f // 00001111 ... - const m3 = 0x00ff00ff00ff00ff // etc. - const m4 = 0x0000ffff0000ffff - - // Implementation: Parallel summing of adjacent bits. - // See "Hacker's Delight", Chap. 5: Counting Bits. - // The following pattern shows the general approach: - // - // x = x>>1&(m0&m) + x&(m0&m) - // x = x>>2&(m1&m) + x&(m1&m) - // x = x>>4&(m2&m) + x&(m2&m) - // x = x>>8&(m3&m) + x&(m3&m) - // x = x>>16&(m4&m) + x&(m4&m) - // x = x>>32&(m5&m) + x&(m5&m) - // return int(x) - // - // Masking (& operations) can be left away when there's no - // danger that a field's sum will carry over into the next - // field: Since the result cannot be > 64, 8 bits is enough - // and we can ignore the masks for the shifts by 8 and up. - // Per "Hacker's Delight", the first line can be simplified - // more, but it saves at best one instruction, so we leave - // it alone for clarity. - const m = 1<<64 - 1 - x = x>>1&(m0&m) + x&(m0&m) - x = x>>2&(m1&m) + x&(m1&m) - x = (x>>4 + x) & (m2 & m) - x += x >> 8 - x += x >> 16 - x += x >> 32 - return int(x) & (1<<7 - 1) -} diff --git a/vendor/golang.org/x/sys/unix/dirent.go b/vendor/golang.org/x/sys/unix/dirent.go index 6f3460e69..304016b68 100644 --- a/vendor/golang.org/x/sys/unix/dirent.go +++ b/vendor/golang.org/x/sys/unix/dirent.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build aix darwin dragonfly freebsd linux nacl netbsd openbsd solaris +// +build aix darwin dragonfly freebsd linux netbsd openbsd solaris package unix diff --git a/vendor/golang.org/x/sys/unix/endian_little.go b/vendor/golang.org/x/sys/unix/endian_little.go index 085df2d8d..bcdb5d30e 100644 --- a/vendor/golang.org/x/sys/unix/endian_little.go +++ b/vendor/golang.org/x/sys/unix/endian_little.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // -// +build 386 amd64 amd64p32 arm arm64 ppc64le mipsle mips64le +// +build 386 amd64 amd64p32 arm arm64 ppc64le mipsle mips64le riscv64 package unix diff --git a/vendor/golang.org/x/sys/unix/ioctl.go b/vendor/golang.org/x/sys/unix/ioctl.go index f121a8d64..3559e5dcb 100644 --- a/vendor/golang.org/x/sys/unix/ioctl.go +++ b/vendor/golang.org/x/sys/unix/ioctl.go @@ -6,7 +6,19 @@ package unix -import "runtime" +import ( + "runtime" + "unsafe" +) + +// ioctl itself should not be exposed directly, but additional get/set +// functions for specific types are permissible. + +// IoctlSetInt performs an ioctl operation which sets an integer value +// on fd, using the specified request number. +func IoctlSetInt(fd int, req uint, value int) error { + return ioctl(fd, req, uintptr(value)) +} // IoctlSetWinsize performs an ioctl on fd with a *Winsize argument. // @@ -14,7 +26,7 @@ import "runtime" func IoctlSetWinsize(fd int, req uint, value *Winsize) error { // TODO: if we get the chance, remove the req parameter and // hardcode TIOCSWINSZ. - err := ioctlSetWinsize(fd, req, value) + err := ioctl(fd, req, uintptr(unsafe.Pointer(value))) runtime.KeepAlive(value) return err } @@ -24,7 +36,30 @@ func IoctlSetWinsize(fd int, req uint, value *Winsize) error { // The req value will usually be TCSETA or TIOCSETA. func IoctlSetTermios(fd int, req uint, value *Termios) error { // TODO: if we get the chance, remove the req parameter. - err := ioctlSetTermios(fd, req, value) + err := ioctl(fd, req, uintptr(unsafe.Pointer(value))) runtime.KeepAlive(value) return err } + +// IoctlGetInt performs an ioctl operation which gets an integer value +// from fd, using the specified request number. +// +// A few ioctl requests use the return value as an output parameter; +// for those, IoctlRetInt should be used instead of this function. +func IoctlGetInt(fd int, req uint) (int, error) { + var value int + err := ioctl(fd, req, uintptr(unsafe.Pointer(&value))) + return value, err +} + +func IoctlGetWinsize(fd int, req uint) (*Winsize, error) { + var value Winsize + err := ioctl(fd, req, uintptr(unsafe.Pointer(&value))) + return &value, err +} + +func IoctlGetTermios(fd int, req uint) (*Termios, error) { + var value Termios + err := ioctl(fd, req, uintptr(unsafe.Pointer(&value))) + return &value, err +} diff --git a/vendor/golang.org/x/sys/unix/mkall.sh b/vendor/golang.org/x/sys/unix/mkall.sh index 5a22eca96..890ec464c 100644 --- a/vendor/golang.org/x/sys/unix/mkall.sh +++ b/vendor/golang.org/x/sys/unix/mkall.sh @@ -212,9 +212,11 @@ esac echo "$mksyscall -tags $GOOS,$GOARCH $syscall_goos $GOOSARCH_in && gofmt -w zsyscall_$GOOSARCH.go && gofmt -w zsyscall_"$GOOSARCH"_gccgo.go && gofmt -w zsyscall_"$GOOSARCH"_gc.go " ; elif [ "$GOOS" == "darwin" ]; then # pre-1.12, direct syscalls - echo "$mksyscall -tags $GOOS,$GOARCH,!go1.12 $syscall_goos $GOOSARCH_in |gofmt >zsyscall_$GOOSARCH.1_11.go"; + echo "$mksyscall -tags $GOOS,$GOARCH,!go1.12 $syscall_goos syscall_darwin_${GOARCH}.1_11.go $GOOSARCH_in |gofmt >zsyscall_$GOOSARCH.1_11.go"; # 1.12 and later, syscalls via libSystem echo "$mksyscall -tags $GOOS,$GOARCH,go1.12 $syscall_goos $GOOSARCH_in |gofmt >zsyscall_$GOOSARCH.go"; + # 1.13 and later, syscalls via libSystem (including syscallPtr) + echo "$mksyscall -tags $GOOS,$GOARCH,go1.13 syscall_darwin.1_13.go |gofmt >zsyscall_$GOOSARCH.1_13.go"; else echo "$mksyscall -tags $GOOS,$GOARCH $syscall_goos $GOOSARCH_in |gofmt >zsyscall_$GOOSARCH.go"; fi diff --git a/vendor/golang.org/x/sys/unix/mkasm_darwin.go b/vendor/golang.org/x/sys/unix/mkasm_darwin.go index 4548b993d..6f7bb6edf 100644 --- a/vendor/golang.org/x/sys/unix/mkasm_darwin.go +++ b/vendor/golang.org/x/sys/unix/mkasm_darwin.go @@ -17,6 +17,34 @@ import ( "strings" ) +func writeASMFile(in string, fileName string, buildTags string) { + trampolines := map[string]bool{} + + var out bytes.Buffer + + fmt.Fprintf(&out, "// go run mkasm_darwin.go %s\n", strings.Join(os.Args[1:], " ")) + fmt.Fprintf(&out, "// Code generated by the command above; DO NOT EDIT.\n") + fmt.Fprintf(&out, "\n") + fmt.Fprintf(&out, "// +build %s\n", buildTags) + fmt.Fprintf(&out, "\n") + fmt.Fprintf(&out, "#include \"textflag.h\"\n") + for _, line := range strings.Split(in, "\n") { + if !strings.HasPrefix(line, "func ") || !strings.HasSuffix(line, "_trampoline()") { + continue + } + fn := line[5 : len(line)-13] + if !trampolines[fn] { + trampolines[fn] = true + fmt.Fprintf(&out, "TEXT ·%s_trampoline(SB),NOSPLIT,$0-0\n", fn) + fmt.Fprintf(&out, "\tJMP\t%s(SB)\n", fn) + } + } + err := ioutil.WriteFile(fileName, out.Bytes(), 0644) + if err != nil { + log.Fatalf("can't write %s: %s", fileName, err) + } +} + func main() { in1, err := ioutil.ReadFile("syscall_darwin.go") if err != nil { @@ -33,29 +61,18 @@ func main() { } in := string(in1) + string(in2) + string(in3) - trampolines := map[string]bool{} + writeASMFile(in, fmt.Sprintf("zsyscall_darwin_%s.s", arch), "go1.12") - var out bytes.Buffer - - fmt.Fprintf(&out, "// go run mkasm_darwin.go %s\n", strings.Join(os.Args[1:], " ")) - fmt.Fprintf(&out, "// Code generated by the command above; DO NOT EDIT.\n") - fmt.Fprintf(&out, "\n") - fmt.Fprintf(&out, "// +build go1.12\n") - fmt.Fprintf(&out, "\n") - fmt.Fprintf(&out, "#include \"textflag.h\"\n") - for _, line := range strings.Split(in, "\n") { - if !strings.HasPrefix(line, "func ") || !strings.HasSuffix(line, "_trampoline()") { - continue - } - fn := line[5 : len(line)-13] - if !trampolines[fn] { - trampolines[fn] = true - fmt.Fprintf(&out, "TEXT ·%s_trampoline(SB),NOSPLIT,$0-0\n", fn) - fmt.Fprintf(&out, "\tJMP\t%s(SB)\n", fn) - } - } - err = ioutil.WriteFile(fmt.Sprintf("zsyscall_darwin_%s.s", arch), out.Bytes(), 0644) + in1, err = ioutil.ReadFile("syscall_darwin.1_13.go") if err != nil { - log.Fatalf("can't write zsyscall_darwin_%s.s: %s", arch, err) + log.Fatalf("can't open syscall_darwin.1_13.go: %s", err) } + in2, err = ioutil.ReadFile(fmt.Sprintf("zsyscall_darwin_%s.1_13.go", arch)) + if err != nil { + log.Fatalf("can't open zsyscall_darwin_%s.1_13.go: %s", arch, err) + } + + in = string(in1) + string(in2) + + writeASMFile(in, fmt.Sprintf("zsyscall_darwin_%s.1_13.s", arch), "go1.13") } diff --git a/vendor/golang.org/x/sys/unix/mkerrors.sh b/vendor/golang.org/x/sys/unix/mkerrors.sh index 3d85f2795..67b84828a 100644 --- a/vendor/golang.org/x/sys/unix/mkerrors.sh +++ b/vendor/golang.org/x/sys/unix/mkerrors.sh @@ -60,6 +60,7 @@ includes_Darwin=' #include #include #include +#include #include #include #include @@ -80,6 +81,7 @@ includes_Darwin=' includes_DragonFly=' #include #include +#include #include #include #include @@ -103,6 +105,7 @@ includes_FreeBSD=' #include #include #include +#include #include #include #include @@ -179,52 +182,57 @@ struct ltchars { #include #include #include +#include #include #include #include #include +#include #include +#include #include +#include +#include +#include +#include +#include +#include +#include #include +#include #include #include #include #include #include #include -#include -#include -#include -#include -#include +#include #include #include +#include #include #include #include #include #include #include +#include #include +#include #include #include +#include #include -#include #include #include -#include -#include -#include #include -#include -#include +#include #include -#include +#include +#include +#include #include -#include -#include -#include -#include + #include #include @@ -263,6 +271,11 @@ struct ltchars { #define FS_KEY_DESC_PREFIX "fscrypt:" #define FS_KEY_DESC_PREFIX_SIZE 8 #define FS_MAX_KEY_SIZE 64 + +// The code generator produces -0x1 for (~0), but an unsigned value is necessary +// for the tipc_subscr timeout __u32 field. +#undef TIPC_WAIT_FOREVER +#define TIPC_WAIT_FOREVER 0xffffffff ' includes_NetBSD=' @@ -272,6 +285,7 @@ includes_NetBSD=' #include #include #include +#include #include #include #include @@ -298,6 +312,7 @@ includes_OpenBSD=' #include #include #include +#include #include #include #include @@ -334,6 +349,7 @@ includes_OpenBSD=' includes_SunOS=' #include #include +#include #include #include #include @@ -426,6 +442,7 @@ ccflags="$@" $2 == "XCASE" || $2 == "ALTWERASE" || $2 == "NOKERNINFO" || + $2 == "NFDBITS" || $2 ~ /^PAR/ || $2 ~ /^SIG[^_]/ || $2 ~ /^O[CNPFPL][A-Z]+[^_][A-Z]+$/ || @@ -435,6 +452,8 @@ ccflags="$@" $2 ~ /^TC[IO](ON|OFF)$/ || $2 ~ /^IN_/ || $2 ~ /^LOCK_(SH|EX|NB|UN)$/ || + $2 ~ /^LO_(KEY|NAME)_SIZE$/ || + $2 ~ /^LOOP_(CLR|CTL|GET|SET)_/ || $2 ~ /^(AF|SOCK|SO|SOL|IPPROTO|IP|IPV6|ICMP6|TCP|MCAST|EVFILT|NOTE|EV|SHUT|PROT|MAP|MFD|T?PACKET|MSG|SCM|MCL|DT|MADV|PR)_/ || $2 ~ /^TP_STATUS_/ || $2 ~ /^FALLOC_/ || @@ -448,6 +467,7 @@ ccflags="$@" $2 ~ /^SYSCTL_VERS/ || $2 !~ "MNT_BITS" && $2 ~ /^(MS|MNT|UMOUNT)_/ || + $2 ~ /^NS_GET_/ || $2 ~ /^TUN(SET|GET|ATTACH|DETACH)/ || $2 ~ /^(O|F|[ES]?FD|NAME|S|PTRACE|PT)_/ || $2 ~ /^KEXEC_/ || @@ -503,6 +523,7 @@ ccflags="$@" $2 ~ /^XDP_/ || $2 ~ /^(HDIO|WIN|SMART)_/ || $2 ~ /^CRYPTO_/ || + $2 ~ /^TIPC_/ || $2 !~ "WMESGLEN" && $2 ~ /^W[A-Z0-9]+$/ || $2 ~/^PPPIOC/ || diff --git a/vendor/golang.org/x/sys/unix/mksyscall.go b/vendor/golang.org/x/sys/unix/mksyscall.go index e4af9424e..9e540cc89 100644 --- a/vendor/golang.org/x/sys/unix/mksyscall.go +++ b/vendor/golang.org/x/sys/unix/mksyscall.go @@ -121,7 +121,7 @@ func main() { } libc := false - if goos == "darwin" && strings.Contains(buildTags(), ",go1.12") { + if goos == "darwin" && (strings.Contains(buildTags(), ",go1.12") || strings.Contains(buildTags(), ",go1.13")) { libc = true } trampolines := map[string]bool{} @@ -292,11 +292,6 @@ func main() { asm = "syscall_" + strings.ToLower(asm[:1]) + asm[1:] // internal syscall call sysname = strings.TrimPrefix(sysname, "SYS_") // remove SYS_ sysname = strings.ToLower(sysname) // lowercase - if sysname == "getdirentries64" { - // Special case - libSystem name and - // raw syscall name don't match. - sysname = "__getdirentries64" - } libcFn = sysname sysname = "funcPC(libc_" + sysname + "_trampoline)" } diff --git a/vendor/golang.org/x/sys/unix/syscall_aix.go b/vendor/golang.org/x/sys/unix/syscall_aix.go index 1aa065f9c..9ad8a0d4a 100644 --- a/vendor/golang.org/x/sys/unix/syscall_aix.go +++ b/vendor/golang.org/x/sys/unix/syscall_aix.go @@ -350,49 +350,12 @@ func (w WaitStatus) Signal() Signal { func (w WaitStatus) Continued() bool { return w&0x01000000 != 0 } -func (w WaitStatus) CoreDump() bool { return w&0x200 != 0 } +func (w WaitStatus) CoreDump() bool { return w&0x80 == 0x80 } func (w WaitStatus) TrapCause() int { return -1 } //sys ioctl(fd int, req uint, arg uintptr) (err error) -// ioctl itself should not be exposed directly, but additional get/set -// functions for specific types are permissible. - -// IoctlSetInt performs an ioctl operation which sets an integer value -// on fd, using the specified request number. -func IoctlSetInt(fd int, req uint, value int) error { - return ioctl(fd, req, uintptr(value)) -} - -func ioctlSetWinsize(fd int, req uint, value *Winsize) error { - return ioctl(fd, req, uintptr(unsafe.Pointer(value))) -} - -func ioctlSetTermios(fd int, req uint, value *Termios) error { - return ioctl(fd, req, uintptr(unsafe.Pointer(value))) -} - -// IoctlGetInt performs an ioctl operation which gets an integer value -// from fd, using the specified request number. -func IoctlGetInt(fd int, req uint) (int, error) { - var value int - err := ioctl(fd, req, uintptr(unsafe.Pointer(&value))) - return value, err -} - -func IoctlGetWinsize(fd int, req uint) (*Winsize, error) { - var value Winsize - err := ioctl(fd, req, uintptr(unsafe.Pointer(&value))) - return &value, err -} - -func IoctlGetTermios(fd int, req uint) (*Termios, error) { - var value Termios - err := ioctl(fd, req, uintptr(unsafe.Pointer(&value))) - return &value, err -} - // fcntl must never be called with cmd=F_DUP2FD because it doesn't work on AIX // There is no way to create a custom fcntl and to keep //sys fcntl easily, // Therefore, the programmer must call dup2 instead of fcntl in this case. diff --git a/vendor/golang.org/x/sys/unix/syscall_aix_ppc.go b/vendor/golang.org/x/sys/unix/syscall_aix_ppc.go index bf05603f1..b3c8e3301 100644 --- a/vendor/golang.org/x/sys/unix/syscall_aix_ppc.go +++ b/vendor/golang.org/x/sys/unix/syscall_aix_ppc.go @@ -29,6 +29,10 @@ func (msghdr *Msghdr) SetControllen(length int) { msghdr.Controllen = uint32(length) } +func (msghdr *Msghdr) SetIovlen(length int) { + msghdr.Iovlen = int32(length) +} + func (cmsg *Cmsghdr) SetLen(length int) { cmsg.Len = uint32(length) } diff --git a/vendor/golang.org/x/sys/unix/syscall_aix_ppc64.go b/vendor/golang.org/x/sys/unix/syscall_aix_ppc64.go index 13d4321f4..9a6e02417 100644 --- a/vendor/golang.org/x/sys/unix/syscall_aix_ppc64.go +++ b/vendor/golang.org/x/sys/unix/syscall_aix_ppc64.go @@ -29,6 +29,10 @@ func (msghdr *Msghdr) SetControllen(length int) { msghdr.Controllen = uint32(length) } +func (msghdr *Msghdr) SetIovlen(length int) { + msghdr.Iovlen = int32(length) +} + func (cmsg *Cmsghdr) SetLen(length int) { cmsg.Len = uint32(length) } diff --git a/vendor/golang.org/x/sys/unix/syscall_bsd.go b/vendor/golang.org/x/sys/unix/syscall_bsd.go index 97a8eef6f..3e6671426 100644 --- a/vendor/golang.org/x/sys/unix/syscall_bsd.go +++ b/vendor/golang.org/x/sys/unix/syscall_bsd.go @@ -413,8 +413,6 @@ func Kevent(kq int, changes, events []Kevent_t, timeout *Timespec) (n int, err e return kevent(kq, change, len(changes), event, len(events), timeout) } -//sys sysctl(mib []_C_int, old *byte, oldlen *uintptr, new *byte, newlen uintptr) (err error) = SYS___SYSCTL - // sysctlmib translates name to mib number and appends any additional args. func sysctlmib(name string, args ...int) ([]_C_int, error) { // Translate name to mib number. diff --git a/vendor/golang.org/x/sys/unix/syscall_darwin.1_12.go b/vendor/golang.org/x/sys/unix/syscall_darwin.1_12.go new file mode 100644 index 000000000..6a15cba61 --- /dev/null +++ b/vendor/golang.org/x/sys/unix/syscall_darwin.1_12.go @@ -0,0 +1,29 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build darwin,go1.12,!go1.13 + +package unix + +import ( + "unsafe" +) + +func Getdirentries(fd int, buf []byte, basep *uintptr) (n int, err error) { + // To implement this using libSystem we'd need syscall_syscallPtr for + // fdopendir. However, syscallPtr was only added in Go 1.13, so we fall + // back to raw syscalls for this func on Go 1.12. + var p unsafe.Pointer + if len(buf) > 0 { + p = unsafe.Pointer(&buf[0]) + } else { + p = unsafe.Pointer(&_zero) + } + r0, _, e1 := Syscall6(SYS_GETDIRENTRIES64, uintptr(fd), uintptr(p), uintptr(len(buf)), uintptr(unsafe.Pointer(basep)), 0, 0) + n = int(r0) + if e1 != 0 { + return n, errnoErr(e1) + } + return n, nil +} diff --git a/vendor/golang.org/x/sys/unix/syscall_darwin.1_13.go b/vendor/golang.org/x/sys/unix/syscall_darwin.1_13.go new file mode 100644 index 000000000..24960c38b --- /dev/null +++ b/vendor/golang.org/x/sys/unix/syscall_darwin.1_13.go @@ -0,0 +1,103 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build darwin,go1.13 + +package unix + +import "unsafe" + +//sys closedir(dir uintptr) (err error) +//sys readdir_r(dir uintptr, entry *Dirent, result **Dirent) (res Errno) + +func fdopendir(fd int) (dir uintptr, err error) { + r0, _, e1 := syscall_syscallPtr(funcPC(libc_fdopendir_trampoline), uintptr(fd), 0, 0) + dir = uintptr(r0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +func libc_fdopendir_trampoline() + +//go:linkname libc_fdopendir libc_fdopendir +//go:cgo_import_dynamic libc_fdopendir fdopendir "/usr/lib/libSystem.B.dylib" + +func Getdirentries(fd int, buf []byte, basep *uintptr) (n int, err error) { + // Simulate Getdirentries using fdopendir/readdir_r/closedir. + const ptrSize = unsafe.Sizeof(uintptr(0)) + + // We store the number of entries to skip in the seek + // offset of fd. See issue #31368. + // It's not the full required semantics, but should handle the case + // of calling Getdirentries or ReadDirent repeatedly. + // It won't handle assigning the results of lseek to *basep, or handle + // the directory being edited underfoot. + skip, err := Seek(fd, 0, 1 /* SEEK_CUR */) + if err != nil { + return 0, err + } + + // We need to duplicate the incoming file descriptor + // because the caller expects to retain control of it, but + // fdopendir expects to take control of its argument. + // Just Dup'ing the file descriptor is not enough, as the + // result shares underlying state. Use Openat to make a really + // new file descriptor referring to the same directory. + fd2, err := Openat(fd, ".", O_RDONLY, 0) + if err != nil { + return 0, err + } + d, err := fdopendir(fd2) + if err != nil { + Close(fd2) + return 0, err + } + defer closedir(d) + + var cnt int64 + for { + var entry Dirent + var entryp *Dirent + e := readdir_r(d, &entry, &entryp) + if e != 0 { + return n, errnoErr(e) + } + if entryp == nil { + break + } + if skip > 0 { + skip-- + cnt++ + continue + } + reclen := int(entry.Reclen) + if reclen > len(buf) { + // Not enough room. Return for now. + // The counter will let us know where we should start up again. + // Note: this strategy for suspending in the middle and + // restarting is O(n^2) in the length of the directory. Oh well. + break + } + // Copy entry into return buffer. + s := struct { + ptr unsafe.Pointer + siz int + cap int + }{ptr: unsafe.Pointer(&entry), siz: reclen, cap: reclen} + copy(buf, *(*[]byte)(unsafe.Pointer(&s))) + buf = buf[reclen:] + n += reclen + cnt++ + } + // Set the seek offset of the input fd to record + // how many files we've already returned. + _, err = Seek(fd, cnt, 0 /* SEEK_SET */) + if err != nil { + return n, err + } + + return n, nil +} diff --git a/vendor/golang.org/x/sys/unix/syscall_darwin.go b/vendor/golang.org/x/sys/unix/syscall_darwin.go index 216b4ac9e..c5018a385 100644 --- a/vendor/golang.org/x/sys/unix/syscall_darwin.go +++ b/vendor/golang.org/x/sys/unix/syscall_darwin.go @@ -89,7 +89,6 @@ func direntNamlen(buf []byte) (uint64, bool) { return readInt(buf, unsafe.Offsetof(Dirent{}.Namlen), unsafe.Sizeof(Dirent{}.Namlen)) } -//sys ptrace(request int, pid int, addr uintptr, data uintptr) (err error) func PtraceAttach(pid int) (err error) { return ptrace(PT_ATTACH, pid, 0, 0) } func PtraceDetach(pid int) (err error) { return ptrace(PT_DETACH, pid, 0, 0) } @@ -340,43 +339,6 @@ func Kill(pid int, signum syscall.Signal) (err error) { return kill(pid, int(sig //sys ioctl(fd int, req uint, arg uintptr) (err error) -// ioctl itself should not be exposed directly, but additional get/set -// functions for specific types are permissible. - -// IoctlSetInt performs an ioctl operation which sets an integer value -// on fd, using the specified request number. -func IoctlSetInt(fd int, req uint, value int) error { - return ioctl(fd, req, uintptr(value)) -} - -func ioctlSetWinsize(fd int, req uint, value *Winsize) error { - return ioctl(fd, req, uintptr(unsafe.Pointer(value))) -} - -func ioctlSetTermios(fd int, req uint, value *Termios) error { - return ioctl(fd, req, uintptr(unsafe.Pointer(value))) -} - -// IoctlGetInt performs an ioctl operation which gets an integer value -// from fd, using the specified request number. -func IoctlGetInt(fd int, req uint) (int, error) { - var value int - err := ioctl(fd, req, uintptr(unsafe.Pointer(&value))) - return value, err -} - -func IoctlGetWinsize(fd int, req uint) (*Winsize, error) { - var value Winsize - err := ioctl(fd, req, uintptr(unsafe.Pointer(&value))) - return &value, err -} - -func IoctlGetTermios(fd int, req uint) (*Termios, error) { - var value Termios - err := ioctl(fd, req, uintptr(unsafe.Pointer(&value))) - return &value, err -} - func Uname(uname *Utsname) error { mib := []_C_int{CTL_KERN, KERN_OSTYPE} n := unsafe.Sizeof(uname.Sysname) @@ -498,7 +460,7 @@ func Sendfile(outfd int, infd int, offset *int64, count int) (written int, err e //sys Revoke(path string) (err error) //sys Rmdir(path string) (err error) //sys Seek(fd int, offset int64, whence int) (newoffset int64, err error) = SYS_LSEEK -//sys Select(n int, r *FdSet, w *FdSet, e *FdSet, timeout *Timeval) (err error) +//sys Select(nfd int, r *FdSet, w *FdSet, e *FdSet, timeout *Timeval) (n int, err error) //sys Setegid(egid int) (err error) //sysnb Seteuid(euid int) (err error) //sysnb Setgid(gid int) (err error) diff --git a/vendor/golang.org/x/sys/unix/syscall_darwin_386.1_11.go b/vendor/golang.org/x/sys/unix/syscall_darwin_386.1_11.go new file mode 100644 index 000000000..6b223f91a --- /dev/null +++ b/vendor/golang.org/x/sys/unix/syscall_darwin_386.1_11.go @@ -0,0 +1,9 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build darwin,386,!go1.12 + +package unix + +//sys Getdirentries(fd int, buf []byte, basep *uintptr) (n int, err error) = SYS_GETDIRENTRIES64 diff --git a/vendor/golang.org/x/sys/unix/syscall_darwin_386.go b/vendor/golang.org/x/sys/unix/syscall_darwin_386.go index 489726fa9..dd756e708 100644 --- a/vendor/golang.org/x/sys/unix/syscall_darwin_386.go +++ b/vendor/golang.org/x/sys/unix/syscall_darwin_386.go @@ -10,6 +10,9 @@ import ( "syscall" ) +//sys sysctl(mib []_C_int, old *byte, oldlen *uintptr, new *byte, newlen uintptr) (err error) = SYS___SYSCTL +//sys ptrace(request int, pid int, addr uintptr, data uintptr) (err error) + func setTimespec(sec, nsec int64) Timespec { return Timespec{Sec: int32(sec), Nsec: int32(nsec)} } @@ -43,6 +46,10 @@ func (msghdr *Msghdr) SetControllen(length int) { msghdr.Controllen = uint32(length) } +func (msghdr *Msghdr) SetIovlen(length int) { + msghdr.Iovlen = int32(length) +} + func (cmsg *Cmsghdr) SetLen(length int) { cmsg.Len = uint32(length) } @@ -56,7 +63,6 @@ const SYS___SYSCTL = SYS_SYSCTL //sys Fstat(fd int, stat *Stat_t) (err error) = SYS_FSTAT64 //sys Fstatat(fd int, path string, stat *Stat_t, flags int) (err error) = SYS_FSTATAT64 //sys Fstatfs(fd int, stat *Statfs_t) (err error) = SYS_FSTATFS64 -//sys Getdirentries(fd int, buf []byte, basep *uintptr) (n int, err error) = SYS_GETDIRENTRIES64 //sys getfsstat(buf unsafe.Pointer, size uintptr, flags int) (n int, err error) = SYS_GETFSSTAT64 //sys Lstat(path string, stat *Stat_t) (err error) = SYS_LSTAT64 //sys Stat(path string, stat *Stat_t) (err error) = SYS_STAT64 diff --git a/vendor/golang.org/x/sys/unix/syscall_darwin_amd64.1_11.go b/vendor/golang.org/x/sys/unix/syscall_darwin_amd64.1_11.go new file mode 100644 index 000000000..68ebd6fab --- /dev/null +++ b/vendor/golang.org/x/sys/unix/syscall_darwin_amd64.1_11.go @@ -0,0 +1,9 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build darwin,amd64,!go1.12 + +package unix + +//sys Getdirentries(fd int, buf []byte, basep *uintptr) (n int, err error) = SYS_GETDIRENTRIES64 diff --git a/vendor/golang.org/x/sys/unix/syscall_darwin_amd64.go b/vendor/golang.org/x/sys/unix/syscall_darwin_amd64.go index 914b89bde..7f148c428 100644 --- a/vendor/golang.org/x/sys/unix/syscall_darwin_amd64.go +++ b/vendor/golang.org/x/sys/unix/syscall_darwin_amd64.go @@ -10,6 +10,9 @@ import ( "syscall" ) +//sys sysctl(mib []_C_int, old *byte, oldlen *uintptr, new *byte, newlen uintptr) (err error) = SYS___SYSCTL +//sys ptrace(request int, pid int, addr uintptr, data uintptr) (err error) + func setTimespec(sec, nsec int64) Timespec { return Timespec{Sec: sec, Nsec: nsec} } @@ -43,6 +46,10 @@ func (msghdr *Msghdr) SetControllen(length int) { msghdr.Controllen = uint32(length) } +func (msghdr *Msghdr) SetIovlen(length int) { + msghdr.Iovlen = int32(length) +} + func (cmsg *Cmsghdr) SetLen(length int) { cmsg.Len = uint32(length) } @@ -56,7 +63,6 @@ const SYS___SYSCTL = SYS_SYSCTL //sys Fstat(fd int, stat *Stat_t) (err error) = SYS_FSTAT64 //sys Fstatat(fd int, path string, stat *Stat_t, flags int) (err error) = SYS_FSTATAT64 //sys Fstatfs(fd int, stat *Statfs_t) (err error) = SYS_FSTATFS64 -//sys Getdirentries(fd int, buf []byte, basep *uintptr) (n int, err error) = SYS_GETDIRENTRIES64 //sys getfsstat(buf unsafe.Pointer, size uintptr, flags int) (n int, err error) = SYS_GETFSSTAT64 //sys Lstat(path string, stat *Stat_t) (err error) = SYS_LSTAT64 //sys Stat(path string, stat *Stat_t) (err error) = SYS_STAT64 diff --git a/vendor/golang.org/x/sys/unix/syscall_darwin_arm.1_11.go b/vendor/golang.org/x/sys/unix/syscall_darwin_arm.1_11.go new file mode 100644 index 000000000..c81510da2 --- /dev/null +++ b/vendor/golang.org/x/sys/unix/syscall_darwin_arm.1_11.go @@ -0,0 +1,11 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build darwin,386,!go1.12 + +package unix + +func Getdirentries(fd int, buf []byte, basep *uintptr) (n int, err error) { + return 0, ENOSYS +} diff --git a/vendor/golang.org/x/sys/unix/syscall_darwin_arm.go b/vendor/golang.org/x/sys/unix/syscall_darwin_arm.go index 4a284cf50..58be02e71 100644 --- a/vendor/golang.org/x/sys/unix/syscall_darwin_arm.go +++ b/vendor/golang.org/x/sys/unix/syscall_darwin_arm.go @@ -8,6 +8,14 @@ import ( "syscall" ) +func ptrace(request int, pid int, addr uintptr, data uintptr) error { + return ENOTSUP +} + +func sysctl(mib []_C_int, old *byte, oldlen *uintptr, new *byte, newlen uintptr) error { + return ENOTSUP +} + func setTimespec(sec, nsec int64) Timespec { return Timespec{Sec: int32(sec), Nsec: int32(nsec)} } @@ -41,6 +49,10 @@ func (msghdr *Msghdr) SetControllen(length int) { msghdr.Controllen = uint32(length) } +func (msghdr *Msghdr) SetIovlen(length int) { + msghdr.Iovlen = int32(length) +} + func (cmsg *Cmsghdr) SetLen(length int) { cmsg.Len = uint32(length) } @@ -58,7 +70,3 @@ const SYS___SYSCTL = SYS_SYSCTL //sys Lstat(path string, stat *Stat_t) (err error) //sys Stat(path string, stat *Stat_t) (err error) //sys Statfs(path string, stat *Statfs_t) (err error) - -func Getdirentries(fd int, buf []byte, basep *uintptr) (n int, err error) { - return 0, ENOSYS -} diff --git a/vendor/golang.org/x/sys/unix/syscall_darwin_arm64.1_11.go b/vendor/golang.org/x/sys/unix/syscall_darwin_arm64.1_11.go new file mode 100644 index 000000000..01d450406 --- /dev/null +++ b/vendor/golang.org/x/sys/unix/syscall_darwin_arm64.1_11.go @@ -0,0 +1,11 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build darwin,arm64,!go1.12 + +package unix + +func Getdirentries(fd int, buf []byte, basep *uintptr) (n int, err error) { + return 0, ENOSYS +} diff --git a/vendor/golang.org/x/sys/unix/syscall_darwin_arm64.go b/vendor/golang.org/x/sys/unix/syscall_darwin_arm64.go index 52dcd88f6..1ee931f97 100644 --- a/vendor/golang.org/x/sys/unix/syscall_darwin_arm64.go +++ b/vendor/golang.org/x/sys/unix/syscall_darwin_arm64.go @@ -10,6 +10,14 @@ import ( "syscall" ) +func ptrace(request int, pid int, addr uintptr, data uintptr) error { + return ENOTSUP +} + +func sysctl(mib []_C_int, old *byte, oldlen *uintptr, new *byte, newlen uintptr) error { + return ENOTSUP +} + func setTimespec(sec, nsec int64) Timespec { return Timespec{Sec: sec, Nsec: nsec} } @@ -43,6 +51,10 @@ func (msghdr *Msghdr) SetControllen(length int) { msghdr.Controllen = uint32(length) } +func (msghdr *Msghdr) SetIovlen(length int) { + msghdr.Iovlen = int32(length) +} + func (cmsg *Cmsghdr) SetLen(length int) { cmsg.Len = uint32(length) } @@ -60,7 +72,3 @@ const SYS___SYSCTL = SYS_SYSCTL //sys Lstat(path string, stat *Stat_t) (err error) //sys Stat(path string, stat *Stat_t) (err error) //sys Statfs(path string, stat *Statfs_t) (err error) - -func Getdirentries(fd int, buf []byte, basep *uintptr) (n int, err error) { - return 0, ENOSYS -} diff --git a/vendor/golang.org/x/sys/unix/syscall_darwin_libSystem.go b/vendor/golang.org/x/sys/unix/syscall_darwin_libSystem.go index 4b4ae460f..f34c86c89 100644 --- a/vendor/golang.org/x/sys/unix/syscall_darwin_libSystem.go +++ b/vendor/golang.org/x/sys/unix/syscall_darwin_libSystem.go @@ -15,6 +15,7 @@ func syscall_syscall6X(fn, a1, a2, a3, a4, a5, a6 uintptr) (r1, r2 uintptr, err func syscall_syscall9(fn, a1, a2, a3, a4, a5, a6, a7, a8, a9 uintptr) (r1, r2 uintptr, err Errno) // 32-bit only func syscall_rawSyscall(fn, a1, a2, a3 uintptr) (r1, r2 uintptr, err Errno) func syscall_rawSyscall6(fn, a1, a2, a3, a4, a5, a6 uintptr) (r1, r2 uintptr, err Errno) +func syscall_syscallPtr(fn, a1, a2, a3 uintptr) (r1, r2 uintptr, err Errno) //go:linkname syscall_syscall syscall.syscall //go:linkname syscall_syscall6 syscall.syscall6 @@ -22,6 +23,7 @@ func syscall_rawSyscall6(fn, a1, a2, a3, a4, a5, a6 uintptr) (r1, r2 uintptr, er //go:linkname syscall_syscall9 syscall.syscall9 //go:linkname syscall_rawSyscall syscall.rawSyscall //go:linkname syscall_rawSyscall6 syscall.rawSyscall6 +//go:linkname syscall_syscallPtr syscall.syscallPtr // Find the entry point for f. See comments in runtime/proc.go for the // function of the same name. diff --git a/vendor/golang.org/x/sys/unix/syscall_dragonfly.go b/vendor/golang.org/x/sys/unix/syscall_dragonfly.go index 260a400f9..8c8d50297 100644 --- a/vendor/golang.org/x/sys/unix/syscall_dragonfly.go +++ b/vendor/golang.org/x/sys/unix/syscall_dragonfly.go @@ -14,6 +14,8 @@ package unix import "unsafe" +//sys sysctl(mib []_C_int, old *byte, oldlen *uintptr, new *byte, newlen uintptr) (err error) = SYS___SYSCTL + // SockaddrDatalink implements the Sockaddr interface for AF_LINK type sockets. type SockaddrDatalink struct { Len uint8 @@ -150,43 +152,6 @@ func setattrlistTimes(path string, times []Timespec, flags int) error { //sys ioctl(fd int, req uint, arg uintptr) (err error) -// ioctl itself should not be exposed directly, but additional get/set -// functions for specific types are permissible. - -// IoctlSetInt performs an ioctl operation which sets an integer value -// on fd, using the specified request number. -func IoctlSetInt(fd int, req uint, value int) error { - return ioctl(fd, req, uintptr(value)) -} - -func ioctlSetWinsize(fd int, req uint, value *Winsize) error { - return ioctl(fd, req, uintptr(unsafe.Pointer(value))) -} - -func ioctlSetTermios(fd int, req uint, value *Termios) error { - return ioctl(fd, req, uintptr(unsafe.Pointer(value))) -} - -// IoctlGetInt performs an ioctl operation which gets an integer value -// from fd, using the specified request number. -func IoctlGetInt(fd int, req uint) (int, error) { - var value int - err := ioctl(fd, req, uintptr(unsafe.Pointer(&value))) - return value, err -} - -func IoctlGetWinsize(fd int, req uint) (*Winsize, error) { - var value Winsize - err := ioctl(fd, req, uintptr(unsafe.Pointer(&value))) - return &value, err -} - -func IoctlGetTermios(fd int, req uint) (*Termios, error) { - var value Termios - err := ioctl(fd, req, uintptr(unsafe.Pointer(&value))) - return &value, err -} - func sysctlUname(mib []_C_int, old *byte, oldlen *uintptr) error { err := sysctl(mib, old, oldlen, nil, 0) if err != nil { @@ -325,7 +290,7 @@ func Sendfile(outfd int, infd int, offset *int64, count int) (written int, err e //sys Revoke(path string) (err error) //sys Rmdir(path string) (err error) //sys Seek(fd int, offset int64, whence int) (newoffset int64, err error) = SYS_LSEEK -//sys Select(n int, r *FdSet, w *FdSet, e *FdSet, timeout *Timeval) (err error) +//sys Select(nfd int, r *FdSet, w *FdSet, e *FdSet, timeout *Timeval) (n int, err error) //sysnb Setegid(egid int) (err error) //sysnb Seteuid(euid int) (err error) //sysnb Setgid(gid int) (err error) diff --git a/vendor/golang.org/x/sys/unix/syscall_dragonfly_amd64.go b/vendor/golang.org/x/sys/unix/syscall_dragonfly_amd64.go index 9babb31ea..a6b4830ac 100644 --- a/vendor/golang.org/x/sys/unix/syscall_dragonfly_amd64.go +++ b/vendor/golang.org/x/sys/unix/syscall_dragonfly_amd64.go @@ -33,6 +33,10 @@ func (msghdr *Msghdr) SetControllen(length int) { msghdr.Controllen = uint32(length) } +func (msghdr *Msghdr) SetIovlen(length int) { + msghdr.Iovlen = int32(length) +} + func (cmsg *Cmsghdr) SetLen(length int) { cmsg.Len = uint32(length) } diff --git a/vendor/golang.org/x/sys/unix/syscall_freebsd.go b/vendor/golang.org/x/sys/unix/syscall_freebsd.go index 329d240b9..25ac9340c 100644 --- a/vendor/golang.org/x/sys/unix/syscall_freebsd.go +++ b/vendor/golang.org/x/sys/unix/syscall_freebsd.go @@ -36,6 +36,8 @@ var ( // INO64_FIRST from /usr/src/lib/libc/sys/compat-ino64.h const _ino64First = 1200031 +//sys sysctl(mib []_C_int, old *byte, oldlen *uintptr, new *byte, newlen uintptr) (err error) = SYS___SYSCTL + func supportsABI(ver uint32) bool { osreldateOnce.Do(func() { osreldate, _ = SysctlUint32("kern.osreldate") }) return osreldate >= ver @@ -201,43 +203,6 @@ func setattrlistTimes(path string, times []Timespec, flags int) error { //sys ioctl(fd int, req uint, arg uintptr) (err error) -// ioctl itself should not be exposed directly, but additional get/set -// functions for specific types are permissible. - -// IoctlSetInt performs an ioctl operation which sets an integer value -// on fd, using the specified request number. -func IoctlSetInt(fd int, req uint, value int) error { - return ioctl(fd, req, uintptr(value)) -} - -func ioctlSetWinsize(fd int, req uint, value *Winsize) error { - return ioctl(fd, req, uintptr(unsafe.Pointer(value))) -} - -func ioctlSetTermios(fd int, req uint, value *Termios) error { - return ioctl(fd, req, uintptr(unsafe.Pointer(value))) -} - -// IoctlGetInt performs an ioctl operation which gets an integer value -// from fd, using the specified request number. -func IoctlGetInt(fd int, req uint) (int, error) { - var value int - err := ioctl(fd, req, uintptr(unsafe.Pointer(&value))) - return value, err -} - -func IoctlGetWinsize(fd int, req uint) (*Winsize, error) { - var value Winsize - err := ioctl(fd, req, uintptr(unsafe.Pointer(&value))) - return &value, err -} - -func IoctlGetTermios(fd int, req uint) (*Termios, error) { - var value Termios - err := ioctl(fd, req, uintptr(unsafe.Pointer(&value))) - return &value, err -} - func Uname(uname *Utsname) error { mib := []_C_int{CTL_KERN, KERN_OSTYPE} n := unsafe.Sizeof(uname.Sysname) @@ -688,7 +653,7 @@ func PtraceSingleStep(pid int) (err error) { //sys Revoke(path string) (err error) //sys Rmdir(path string) (err error) //sys Seek(fd int, offset int64, whence int) (newoffset int64, err error) = SYS_LSEEK -//sys Select(n int, r *FdSet, w *FdSet, e *FdSet, timeout *Timeval) (err error) +//sys Select(nfd int, r *FdSet, w *FdSet, e *FdSet, timeout *Timeval) (n int, err error) //sysnb Setegid(egid int) (err error) //sysnb Seteuid(euid int) (err error) //sysnb Setgid(gid int) (err error) diff --git a/vendor/golang.org/x/sys/unix/syscall_freebsd_386.go b/vendor/golang.org/x/sys/unix/syscall_freebsd_386.go index 21e03958c..dcc56457a 100644 --- a/vendor/golang.org/x/sys/unix/syscall_freebsd_386.go +++ b/vendor/golang.org/x/sys/unix/syscall_freebsd_386.go @@ -33,6 +33,10 @@ func (msghdr *Msghdr) SetControllen(length int) { msghdr.Controllen = uint32(length) } +func (msghdr *Msghdr) SetIovlen(length int) { + msghdr.Iovlen = int32(length) +} + func (cmsg *Cmsghdr) SetLen(length int) { cmsg.Len = uint32(length) } diff --git a/vendor/golang.org/x/sys/unix/syscall_freebsd_amd64.go b/vendor/golang.org/x/sys/unix/syscall_freebsd_amd64.go index 9c945a657..321c3bace 100644 --- a/vendor/golang.org/x/sys/unix/syscall_freebsd_amd64.go +++ b/vendor/golang.org/x/sys/unix/syscall_freebsd_amd64.go @@ -33,6 +33,10 @@ func (msghdr *Msghdr) SetControllen(length int) { msghdr.Controllen = uint32(length) } +func (msghdr *Msghdr) SetIovlen(length int) { + msghdr.Iovlen = int32(length) +} + func (cmsg *Cmsghdr) SetLen(length int) { cmsg.Len = uint32(length) } diff --git a/vendor/golang.org/x/sys/unix/syscall_freebsd_arm.go b/vendor/golang.org/x/sys/unix/syscall_freebsd_arm.go index 5cd6243f2..697700831 100644 --- a/vendor/golang.org/x/sys/unix/syscall_freebsd_arm.go +++ b/vendor/golang.org/x/sys/unix/syscall_freebsd_arm.go @@ -33,6 +33,10 @@ func (msghdr *Msghdr) SetControllen(length int) { msghdr.Controllen = uint32(length) } +func (msghdr *Msghdr) SetIovlen(length int) { + msghdr.Iovlen = int32(length) +} + func (cmsg *Cmsghdr) SetLen(length int) { cmsg.Len = uint32(length) } diff --git a/vendor/golang.org/x/sys/unix/syscall_freebsd_arm64.go b/vendor/golang.org/x/sys/unix/syscall_freebsd_arm64.go index a31805487..dbbbfd603 100644 --- a/vendor/golang.org/x/sys/unix/syscall_freebsd_arm64.go +++ b/vendor/golang.org/x/sys/unix/syscall_freebsd_arm64.go @@ -33,6 +33,10 @@ func (msghdr *Msghdr) SetControllen(length int) { msghdr.Controllen = uint32(length) } +func (msghdr *Msghdr) SetIovlen(length int) { + msghdr.Iovlen = int32(length) +} + func (cmsg *Cmsghdr) SetLen(length int) { cmsg.Len = uint32(length) } diff --git a/vendor/golang.org/x/sys/unix/syscall_linux.go b/vendor/golang.org/x/sys/unix/syscall_linux.go index 637b5017b..ebf3195b6 100644 --- a/vendor/golang.org/x/sys/unix/syscall_linux.go +++ b/vendor/golang.org/x/sys/unix/syscall_linux.go @@ -71,6 +71,17 @@ func Fchmodat(dirfd int, path string, mode uint32, flags int) (err error) { // ioctl itself should not be exposed directly, but additional get/set // functions for specific types are permissible. +// IoctlRetInt performs an ioctl operation specified by req on a device +// associated with opened file descriptor fd, and returns a non-negative +// integer that is returned by the ioctl syscall. +func IoctlRetInt(fd int, req uint) (int, error) { + ret, _, err := Syscall(SYS_IOCTL, uintptr(fd), uintptr(req), 0) + if err != 0 { + return 0, err + } + return int(ret), nil +} + // IoctlSetPointerInt performs an ioctl operation which sets an // integer value on fd, using the specified request number. The ioctl // argument is called with a pointer to the integer value, rather than @@ -80,52 +91,18 @@ func IoctlSetPointerInt(fd int, req uint, value int) error { return ioctl(fd, req, uintptr(unsafe.Pointer(&v))) } -// IoctlSetInt performs an ioctl operation which sets an integer value -// on fd, using the specified request number. -func IoctlSetInt(fd int, req uint, value int) error { - return ioctl(fd, req, uintptr(value)) -} - -func ioctlSetWinsize(fd int, req uint, value *Winsize) error { - return ioctl(fd, req, uintptr(unsafe.Pointer(value))) -} - -func ioctlSetTermios(fd int, req uint, value *Termios) error { - return ioctl(fd, req, uintptr(unsafe.Pointer(value))) -} - func IoctlSetRTCTime(fd int, value *RTCTime) error { err := ioctl(fd, RTC_SET_TIME, uintptr(unsafe.Pointer(value))) runtime.KeepAlive(value) return err } -// IoctlGetInt performs an ioctl operation which gets an integer value -// from fd, using the specified request number. -func IoctlGetInt(fd int, req uint) (int, error) { - var value int - err := ioctl(fd, req, uintptr(unsafe.Pointer(&value))) - return value, err -} - func IoctlGetUint32(fd int, req uint) (uint32, error) { var value uint32 err := ioctl(fd, req, uintptr(unsafe.Pointer(&value))) return value, err } -func IoctlGetWinsize(fd int, req uint) (*Winsize, error) { - var value Winsize - err := ioctl(fd, req, uintptr(unsafe.Pointer(&value))) - return &value, err -} - -func IoctlGetTermios(fd int, req uint) (*Termios, error) { - var value Termios - err := ioctl(fd, req, uintptr(unsafe.Pointer(&value))) - return &value, err -} - func IoctlGetRTCTime(fd int) (*RTCTime, error) { var value RTCTime err := ioctl(fd, RTC_RD_TIME, uintptr(unsafe.Pointer(&value))) @@ -798,6 +775,70 @@ func (sa *SockaddrPPPoE) sockaddr() (unsafe.Pointer, _Socklen, error) { return unsafe.Pointer(&sa.raw), SizeofSockaddrPPPoX, nil } +// SockaddrTIPC implements the Sockaddr interface for AF_TIPC type sockets. +// For more information on TIPC, see: http://tipc.sourceforge.net/. +type SockaddrTIPC struct { + // Scope is the publication scopes when binding service/service range. + // Should be set to TIPC_CLUSTER_SCOPE or TIPC_NODE_SCOPE. + Scope int + + // Addr is the type of address used to manipulate a socket. Addr must be + // one of: + // - *TIPCSocketAddr: "id" variant in the C addr union + // - *TIPCServiceRange: "nameseq" variant in the C addr union + // - *TIPCServiceName: "name" variant in the C addr union + // + // If nil, EINVAL will be returned when the structure is used. + Addr TIPCAddr + + raw RawSockaddrTIPC +} + +// TIPCAddr is implemented by types that can be used as an address for +// SockaddrTIPC. It is only implemented by *TIPCSocketAddr, *TIPCServiceRange, +// and *TIPCServiceName. +type TIPCAddr interface { + tipcAddrtype() uint8 + tipcAddr() [12]byte +} + +func (sa *TIPCSocketAddr) tipcAddr() [12]byte { + var out [12]byte + copy(out[:], (*(*[unsafe.Sizeof(TIPCSocketAddr{})]byte)(unsafe.Pointer(sa)))[:]) + return out +} + +func (sa *TIPCSocketAddr) tipcAddrtype() uint8 { return TIPC_SOCKET_ADDR } + +func (sa *TIPCServiceRange) tipcAddr() [12]byte { + var out [12]byte + copy(out[:], (*(*[unsafe.Sizeof(TIPCServiceRange{})]byte)(unsafe.Pointer(sa)))[:]) + return out +} + +func (sa *TIPCServiceRange) tipcAddrtype() uint8 { return TIPC_SERVICE_RANGE } + +func (sa *TIPCServiceName) tipcAddr() [12]byte { + var out [12]byte + copy(out[:], (*(*[unsafe.Sizeof(TIPCServiceName{})]byte)(unsafe.Pointer(sa)))[:]) + return out +} + +func (sa *TIPCServiceName) tipcAddrtype() uint8 { return TIPC_SERVICE_ADDR } + +func (sa *SockaddrTIPC) sockaddr() (unsafe.Pointer, _Socklen, error) { + if sa.Addr == nil { + return nil, 0, EINVAL + } + + sa.raw.Family = AF_TIPC + sa.raw.Scope = int8(sa.Scope) + sa.raw.Addrtype = sa.Addr.tipcAddrtype() + sa.raw.Addr = sa.Addr.tipcAddr() + + return unsafe.Pointer(&sa.raw), SizeofSockaddrTIPC, nil +} + func anyToSockaddr(fd int, rsa *RawSockaddrAny) (Sockaddr, error) { switch rsa.Addr.Family { case AF_NETLINK: @@ -923,6 +964,27 @@ func anyToSockaddr(fd int, rsa *RawSockaddrAny) (Sockaddr, error) { break } } + return sa, nil + case AF_TIPC: + pp := (*RawSockaddrTIPC)(unsafe.Pointer(rsa)) + + sa := &SockaddrTIPC{ + Scope: int(pp.Scope), + } + + // Determine which union variant is present in pp.Addr by checking + // pp.Addrtype. + switch pp.Addrtype { + case TIPC_SERVICE_RANGE: + sa.Addr = (*TIPCServiceRange)(unsafe.Pointer(&pp.Addr)) + case TIPC_SERVICE_ADDR: + sa.Addr = (*TIPCServiceName)(unsafe.Pointer(&pp.Addr)) + case TIPC_SOCKET_ADDR: + sa.Addr = (*TIPCSocketAddr)(unsafe.Pointer(&pp.Addr)) + default: + return nil, EINVAL + } + return sa, nil } return nil, EAFNOSUPPORT @@ -1160,6 +1222,34 @@ func KeyctlDHCompute(params *KeyctlDHParams, buffer []byte) (size int, err error return keyctlDH(KEYCTL_DH_COMPUTE, params, buffer) } +// KeyctlRestrictKeyring implements the KEYCTL_RESTRICT_KEYRING command. This +// command limits the set of keys that can be linked to the keyring, regardless +// of keyring permissions. The command requires the "setattr" permission. +// +// When called with an empty keyType the command locks the keyring, preventing +// any further keys from being linked to the keyring. +// +// The "asymmetric" keyType defines restrictions requiring key payloads to be +// DER encoded X.509 certificates signed by keys in another keyring. Restrictions +// for "asymmetric" include "builtin_trusted", "builtin_and_secondary_trusted", +// "key_or_keyring:", and "key_or_keyring::chain". +// +// As of Linux 4.12, only the "asymmetric" keyType defines type-specific +// restrictions. +// +// See the full documentation at: +// http://man7.org/linux/man-pages/man3/keyctl_restrict_keyring.3.html +// http://man7.org/linux/man-pages/man2/keyctl.2.html +func KeyctlRestrictKeyring(ringid int, keyType string, restriction string) error { + if keyType == "" { + return keyctlRestrictKeyring(KEYCTL_RESTRICT_KEYRING, ringid) + } + return keyctlRestrictKeyringByType(KEYCTL_RESTRICT_KEYRING, ringid, keyType, restriction) +} + +//sys keyctlRestrictKeyringByType(cmd int, arg2 int, keyType string, restriction string) (err error) = SYS_KEYCTL +//sys keyctlRestrictKeyring(cmd int, arg2 int) (err error) = SYS_KEYCTL + func Recvmsg(fd int, p, oob []byte, flags int) (n, oobn int, recvflags int, from Sockaddr, err error) { var msg Msghdr var rsa RawSockaddrAny @@ -1403,8 +1493,12 @@ func PtraceSyscall(pid int, signal int) (err error) { func PtraceSingleStep(pid int) (err error) { return ptrace(PTRACE_SINGLESTEP, pid, 0, 0) } +func PtraceInterrupt(pid int) (err error) { return ptrace(PTRACE_INTERRUPT, pid, 0, 0) } + func PtraceAttach(pid int) (err error) { return ptrace(PTRACE_ATTACH, pid, 0, 0) } +func PtraceSeize(pid int) (err error) { return ptrace(PTRACE_SEIZE, pid, 0, 0) } + func PtraceDetach(pid int) (err error) { return ptrace(PTRACE_DETACH, pid, 0, 0) } //sys reboot(magic1 uint, magic2 uint, cmd int, arg string) (err error) @@ -1761,6 +1855,17 @@ func OpenByHandleAt(mountFD int, handle FileHandle, flags int) (fd int, err erro return openByHandleAt(mountFD, handle.fileHandle, flags) } +// Klogset wraps the sys_syslog system call; it sets console_loglevel to +// the value specified by arg and passes a dummy pointer to bufp. +func Klogset(typ int, arg int) (err error) { + var p unsafe.Pointer + _, _, errno := Syscall(SYS_SYSLOG, uintptr(typ), uintptr(p), uintptr(arg)) + if errno != 0 { + return errnoErr(errno) + } + return nil +} + /* * Unimplemented */ diff --git a/vendor/golang.org/x/sys/unix/syscall_linux_386.go b/vendor/golang.org/x/sys/unix/syscall_linux_386.go index e2f8cf6e5..e7fa665e6 100644 --- a/vendor/golang.org/x/sys/unix/syscall_linux_386.go +++ b/vendor/golang.org/x/sys/unix/syscall_linux_386.go @@ -372,6 +372,10 @@ func (msghdr *Msghdr) SetControllen(length int) { msghdr.Controllen = uint32(length) } +func (msghdr *Msghdr) SetIovlen(length int) { + msghdr.Iovlen = uint32(length) +} + func (cmsg *Cmsghdr) SetLen(length int) { cmsg.Len = uint32(length) } diff --git a/vendor/golang.org/x/sys/unix/syscall_linux_amd64.go b/vendor/golang.org/x/sys/unix/syscall_linux_amd64.go index 87a30744d..088ce0f93 100644 --- a/vendor/golang.org/x/sys/unix/syscall_linux_amd64.go +++ b/vendor/golang.org/x/sys/unix/syscall_linux_amd64.go @@ -163,6 +163,10 @@ func (msghdr *Msghdr) SetControllen(length int) { msghdr.Controllen = uint64(length) } +func (msghdr *Msghdr) SetIovlen(length int) { + msghdr.Iovlen = uint64(length) +} + func (cmsg *Cmsghdr) SetLen(length int) { cmsg.Len = uint64(length) } diff --git a/vendor/golang.org/x/sys/unix/syscall_linux_arm.go b/vendor/golang.org/x/sys/unix/syscall_linux_arm.go index f62679443..11930fc8f 100644 --- a/vendor/golang.org/x/sys/unix/syscall_linux_arm.go +++ b/vendor/golang.org/x/sys/unix/syscall_linux_arm.go @@ -252,6 +252,10 @@ func (msghdr *Msghdr) SetControllen(length int) { msghdr.Controllen = uint32(length) } +func (msghdr *Msghdr) SetIovlen(length int) { + msghdr.Iovlen = uint32(length) +} + func (cmsg *Cmsghdr) SetLen(length int) { cmsg.Len = uint32(length) } diff --git a/vendor/golang.org/x/sys/unix/syscall_linux_arm64.go b/vendor/golang.org/x/sys/unix/syscall_linux_arm64.go index cb20b15d5..251e2d971 100644 --- a/vendor/golang.org/x/sys/unix/syscall_linux_arm64.go +++ b/vendor/golang.org/x/sys/unix/syscall_linux_arm64.go @@ -180,6 +180,10 @@ func (msghdr *Msghdr) SetControllen(length int) { msghdr.Controllen = uint64(length) } +func (msghdr *Msghdr) SetIovlen(length int) { + msghdr.Iovlen = uint64(length) +} + func (cmsg *Cmsghdr) SetLen(length int) { cmsg.Len = uint64(length) } diff --git a/vendor/golang.org/x/sys/unix/syscall_linux_mips64x.go b/vendor/golang.org/x/sys/unix/syscall_linux_mips64x.go index b3b21ec1e..7562fe97b 100644 --- a/vendor/golang.org/x/sys/unix/syscall_linux_mips64x.go +++ b/vendor/golang.org/x/sys/unix/syscall_linux_mips64x.go @@ -208,6 +208,10 @@ func (msghdr *Msghdr) SetControllen(length int) { msghdr.Controllen = uint64(length) } +func (msghdr *Msghdr) SetIovlen(length int) { + msghdr.Iovlen = uint64(length) +} + func (cmsg *Cmsghdr) SetLen(length int) { cmsg.Len = uint64(length) } diff --git a/vendor/golang.org/x/sys/unix/syscall_linux_mipsx.go b/vendor/golang.org/x/sys/unix/syscall_linux_mipsx.go index 5144d4e13..a939ff8f2 100644 --- a/vendor/golang.org/x/sys/unix/syscall_linux_mipsx.go +++ b/vendor/golang.org/x/sys/unix/syscall_linux_mipsx.go @@ -220,6 +220,10 @@ func (msghdr *Msghdr) SetControllen(length int) { msghdr.Controllen = uint32(length) } +func (msghdr *Msghdr) SetIovlen(length int) { + msghdr.Iovlen = uint32(length) +} + func (cmsg *Cmsghdr) SetLen(length int) { cmsg.Len = uint32(length) } diff --git a/vendor/golang.org/x/sys/unix/syscall_linux_ppc64x.go b/vendor/golang.org/x/sys/unix/syscall_linux_ppc64x.go index 0a100b66a..28d6d0f22 100644 --- a/vendor/golang.org/x/sys/unix/syscall_linux_ppc64x.go +++ b/vendor/golang.org/x/sys/unix/syscall_linux_ppc64x.go @@ -91,6 +91,10 @@ func (msghdr *Msghdr) SetControllen(length int) { msghdr.Controllen = uint64(length) } +func (msghdr *Msghdr) SetIovlen(length int) { + msghdr.Iovlen = uint64(length) +} + func (cmsg *Cmsghdr) SetLen(length int) { cmsg.Len = uint64(length) } diff --git a/vendor/golang.org/x/sys/unix/syscall_linux_riscv64.go b/vendor/golang.org/x/sys/unix/syscall_linux_riscv64.go index 6230f6405..6798c2625 100644 --- a/vendor/golang.org/x/sys/unix/syscall_linux_riscv64.go +++ b/vendor/golang.org/x/sys/unix/syscall_linux_riscv64.go @@ -179,6 +179,10 @@ func (msghdr *Msghdr) SetControllen(length int) { msghdr.Controllen = uint64(length) } +func (msghdr *Msghdr) SetIovlen(length int) { + msghdr.Iovlen = uint64(length) +} + func (cmsg *Cmsghdr) SetLen(length int) { cmsg.Len = uint64(length) } diff --git a/vendor/golang.org/x/sys/unix/syscall_linux_s390x.go b/vendor/golang.org/x/sys/unix/syscall_linux_s390x.go index f81dbdc9c..eb5cb1a71 100644 --- a/vendor/golang.org/x/sys/unix/syscall_linux_s390x.go +++ b/vendor/golang.org/x/sys/unix/syscall_linux_s390x.go @@ -120,6 +120,10 @@ func (msghdr *Msghdr) SetControllen(length int) { msghdr.Controllen = uint64(length) } +func (msghdr *Msghdr) SetIovlen(length int) { + msghdr.Iovlen = uint64(length) +} + func (cmsg *Cmsghdr) SetLen(length int) { cmsg.Len = uint64(length) } diff --git a/vendor/golang.org/x/sys/unix/syscall_linux_sparc64.go b/vendor/golang.org/x/sys/unix/syscall_linux_sparc64.go index b69565616..37321c12e 100644 --- a/vendor/golang.org/x/sys/unix/syscall_linux_sparc64.go +++ b/vendor/golang.org/x/sys/unix/syscall_linux_sparc64.go @@ -107,6 +107,10 @@ func (msghdr *Msghdr) SetControllen(length int) { msghdr.Controllen = uint64(length) } +func (msghdr *Msghdr) SetIovlen(length int) { + msghdr.Iovlen = uint64(length) +} + func (cmsg *Cmsghdr) SetLen(length int) { cmsg.Len = uint64(length) } diff --git a/vendor/golang.org/x/sys/unix/syscall_netbsd.go b/vendor/golang.org/x/sys/unix/syscall_netbsd.go index 5ef309040..f95463ee2 100644 --- a/vendor/golang.org/x/sys/unix/syscall_netbsd.go +++ b/vendor/golang.org/x/sys/unix/syscall_netbsd.go @@ -18,6 +18,8 @@ import ( "unsafe" ) +//sys sysctl(mib []_C_int, old *byte, oldlen *uintptr, new *byte, newlen uintptr) (err error) = SYS___SYSCTL + // SockaddrDatalink implements the Sockaddr interface for AF_LINK type sockets. type SockaddrDatalink struct { Len uint8 @@ -187,43 +189,6 @@ func setattrlistTimes(path string, times []Timespec, flags int) error { //sys ioctl(fd int, req uint, arg uintptr) (err error) -// ioctl itself should not be exposed directly, but additional get/set -// functions for specific types are permissible. - -// IoctlSetInt performs an ioctl operation which sets an integer value -// on fd, using the specified request number. -func IoctlSetInt(fd int, req uint, value int) error { - return ioctl(fd, req, uintptr(value)) -} - -func ioctlSetWinsize(fd int, req uint, value *Winsize) error { - return ioctl(fd, req, uintptr(unsafe.Pointer(value))) -} - -func ioctlSetTermios(fd int, req uint, value *Termios) error { - return ioctl(fd, req, uintptr(unsafe.Pointer(value))) -} - -// IoctlGetInt performs an ioctl operation which gets an integer value -// from fd, using the specified request number. -func IoctlGetInt(fd int, req uint) (int, error) { - var value int - err := ioctl(fd, req, uintptr(unsafe.Pointer(&value))) - return value, err -} - -func IoctlGetWinsize(fd int, req uint) (*Winsize, error) { - var value Winsize - err := ioctl(fd, req, uintptr(unsafe.Pointer(&value))) - return &value, err -} - -func IoctlGetTermios(fd int, req uint) (*Termios, error) { - var value Termios - err := ioctl(fd, req, uintptr(unsafe.Pointer(&value))) - return &value, err -} - func IoctlGetPtmget(fd int, req uint) (*Ptmget, error) { var value Ptmget err := ioctl(fd, req, uintptr(unsafe.Pointer(&value))) @@ -365,7 +330,7 @@ func Sendfile(outfd int, infd int, offset *int64, count int) (written int, err e //sys Revoke(path string) (err error) //sys Rmdir(path string) (err error) //sys Seek(fd int, offset int64, whence int) (newoffset int64, err error) = SYS_LSEEK -//sys Select(n int, r *FdSet, w *FdSet, e *FdSet, timeout *Timeval) (err error) +//sys Select(nfd int, r *FdSet, w *FdSet, e *FdSet, timeout *Timeval) (n int, err error) //sysnb Setegid(egid int) (err error) //sysnb Seteuid(euid int) (err error) //sysnb Setgid(gid int) (err error) diff --git a/vendor/golang.org/x/sys/unix/syscall_netbsd_386.go b/vendor/golang.org/x/sys/unix/syscall_netbsd_386.go index 24f74e58c..24da8b524 100644 --- a/vendor/golang.org/x/sys/unix/syscall_netbsd_386.go +++ b/vendor/golang.org/x/sys/unix/syscall_netbsd_386.go @@ -28,6 +28,10 @@ func (msghdr *Msghdr) SetControllen(length int) { msghdr.Controllen = uint32(length) } +func (msghdr *Msghdr) SetIovlen(length int) { + msghdr.Iovlen = int32(length) +} + func (cmsg *Cmsghdr) SetLen(length int) { cmsg.Len = uint32(length) } diff --git a/vendor/golang.org/x/sys/unix/syscall_netbsd_amd64.go b/vendor/golang.org/x/sys/unix/syscall_netbsd_amd64.go index 6878bf7ff..25a0ac825 100644 --- a/vendor/golang.org/x/sys/unix/syscall_netbsd_amd64.go +++ b/vendor/golang.org/x/sys/unix/syscall_netbsd_amd64.go @@ -28,6 +28,10 @@ func (msghdr *Msghdr) SetControllen(length int) { msghdr.Controllen = uint32(length) } +func (msghdr *Msghdr) SetIovlen(length int) { + msghdr.Iovlen = int32(length) +} + func (cmsg *Cmsghdr) SetLen(length int) { cmsg.Len = uint32(length) } diff --git a/vendor/golang.org/x/sys/unix/syscall_netbsd_arm.go b/vendor/golang.org/x/sys/unix/syscall_netbsd_arm.go index dbbfcf71d..21591ecd4 100644 --- a/vendor/golang.org/x/sys/unix/syscall_netbsd_arm.go +++ b/vendor/golang.org/x/sys/unix/syscall_netbsd_arm.go @@ -28,6 +28,10 @@ func (msghdr *Msghdr) SetControllen(length int) { msghdr.Controllen = uint32(length) } +func (msghdr *Msghdr) SetIovlen(length int) { + msghdr.Iovlen = int32(length) +} + func (cmsg *Cmsghdr) SetLen(length int) { cmsg.Len = uint32(length) } diff --git a/vendor/golang.org/x/sys