forked from vikunja/vikunja
Compare commits
160 Commits
release/0.
...
master
Author | SHA1 | Date |
---|---|---|
renovate | 87048818ce | |
kolaente | d2d610e0f5 | |
kolaente | ae6144c042 | |
kolaente | d1c65935e6 | |
konrad | 2b5c9ae7a8 | |
kolaente | f67fe2ce25 | |
renovate | 23e84f3fa4 | |
renovate | 70a076c4fe | |
renovate | 58c3b1616f | |
renovate | 24d27a93c8 | |
renovate | 27aa8662c0 | |
renovate | 479f9238ff | |
renovate | 2b84be5167 | |
renovate | 5d45af707b | |
renovate | e9a8d8c157 | |
renovate | 1cc49806e0 | |
kolaente | 316ac0558b | |
freaktechnik | ffce9b51cc | |
renovate | 760278fde6 | |
kolaente | 980fe21050 | |
kolaente | 707709deb1 | |
renovate | 6270bb3e77 | |
renovate | 2c2701c054 | |
kolaente | 2b8c9c698d | |
kolaente | 80367d60d4 | |
renovate | 688ca65edf | |
freaktechnik | dcefc18b98 | |
freaktechnik | 214f2f008e | |
kolaente | 004e432e7c | |
renovate | 47486af06d | |
kolaente | 8b9b1984fc | |
kolaente | 6dd7bcb0fe | |
kolaente | b94950d7c2 | |
konrad | 618353bf95 | |
freaktechnik | 1555081939 | |
renovate | 7fe9e6d3f6 | |
renovate | cbba0695a8 | |
renovate | 921526e086 | |
konrad | 1b21339bf8 | |
renovate | e9d6daa1a3 | |
renovate | 8b001313f4 | |
renovate | e0c8eca669 | |
renovate | c629130b3b | |
kolaente | 9252225d7f | |
kolaente | 38b5c7fb6c | |
kolaente | e26df26f78 | |
renovate | 158e07e581 | |
konrad | 699d3d6060 | |
konrad | d56a611be7 | |
kolaente | bf5d8af3f6 | |
renovate | 0769098357 | |
renovate | 71094d981f | |
renovate | 32d97f1451 | |
renovate | bf9d1c634a | |
renovate | ebd96d7766 | |
renovate | 089d156259 | |
renovate | dd589022e4 | |
renovate | b850f65295 | |
kolaente | b3d09cd2d4 | |
konrad | 64d125afd9 | |
renovate | dd5d64da3e | |
kolaente | 1776eb56fe | |
renovate | f8c135f22e | |
konrad | 0fb2edf051 | |
renovate | a6fdf114d1 | |
renovate | ca1b33d24d | |
renovate | c98b9bbee6 | |
renovate | 19a0a85c73 | |
renovate | 77122b8f1b | |
renovate | 436af467d6 | |
renovate | 04130e4ea3 | |
kolaente | 7e6e44e787 | |
kolaente | 9e7ca8df51 | |
konrad | 6bdddd462a | |
konrad | e5559137dd | |
kolaente | ecf09e17a8 | |
konrad | 14d706c91e | |
kolaente | 5317a89623 | |
kolaente | e9b1786188 | |
kolaente | 313289d28d | |
kolaente | 05e237560d | |
kolaente | e4dd314079 | |
kolaente | 118c7f25b5 | |
kolaente | b64a80da0b | |
kolaente | bb26c9d97c | |
kolaente | edf3854632 | |
kolaente | 19a66450ec | |
kolaente | 3a839dfb86 | |
kolaente | 78b261e440 | |
kolaente | 471d1b0ec5 | |
kolaente | 58dfbe13ed | |
kolaente | 1a4eef1056 | |
kolaente | 8da7db3e26 | |
konrad | d359130bcf | |
renovate | 05099e1784 | |
renovate | 79970ebb4a | |
renovate | 27b4086351 | |
renovate | ae7eafd6ad | |
renovate | 21b5aee054 | |
renovate | 2b34a8d4e6 | |
renovate | b4771c1bce | |
renovate | c83858bf7e | |
renovate | 08b8964b3d | |
kolaente | d88551e99d | |
renovate | ebd71d1f04 | |
renovate | a61ab0c5cf | |
kolaente | fa718e2576 | |
kolaente | c517a87b85 | |
kolaente | 28fd0e91ee | |
renovate | 14c27600d8 | |
kolaente | dedce20780 | |
kolaente | a58b932743 | |
kolaente | 301bebf8d3 | |
kolaente | d192c36c39 | |
kolaente | bdfb804bb2 | |
konrad | 16dbcfda7e | |
renovate | 0169ecc37e | |
kolaente | 4a70c81b33 | |
renovate | 5e84ce639f | |
renovate | 6c45388da9 | |
konrad | bd8c1c3bb7 | |
renovate | 28b8cabea5 | |
renovate | 7c91803056 | |
renovate | b375e1d043 | |
kolaente | d718d247c8 | |
kolaente | 6a82d4e2af | |
kolaente | 11722bf029 | |
konrad | dfb7730b63 | |
renovate | c9117dd037 | |
konrad | e4539ef232 | |
renovate | 0ba6ae7a18 | |
renovate | 25ecc4a510 | |
renovate | 259c2195dc | |
kolaente | 7bdc9dd428 | |
renovate | 5f8872f8cc | |
renovate | 5794ede6f6 | |
kolaente | 41cf73a473 | |
renovate | f8d84139fa | |
kolaente | d3964ff4bd | |
kolaente | 9acba7d3f0 | |
kolaente | 2d567bfe0f | |
renovate | 7207aa60fb | |
renovate | 2b9af951bf | |
renovate | c84efcbbcc | |
kolaente | 092aae3260 | |
renovate | 222582fb0c | |
kolaente | a99367bc5f | |
kolaente | c47d5c7228 | |
kolaente | 3d709e3bb7 | |
renovate | 3a9360a57b | |
renovate | dd3c4cd032 | |
kolaente | 2a4a622518 | |
konrad | 4db06ba9a1 | |
kolaente | 96f366f5e7 | |
kolaente | 78791f31a4 | |
kolaente | ec3fa9300b | |
kolaente | 9fa7e30a0a | |
jtojnar | 158d98c2bd | |
kolaente | ae12871bd7 | |
kolaente | 7141050f8b |
206
.drone1.yml
206
.drone1.yml
|
@ -52,13 +52,26 @@ steps:
|
||||||
commands:
|
commands:
|
||||||
- git fetch --tags
|
- git fetch --tags
|
||||||
|
|
||||||
- name: build
|
# We're statically compiling the magefile to avoid race condition issues caused by multiple pipeline steps
|
||||||
|
# compiling the same magefile at the same time. It's also faster if each step does not need to compile it first.
|
||||||
|
- name: mage
|
||||||
image: vikunja/golang-build:latest
|
image: vikunja/golang-build:latest
|
||||||
pull: true
|
pull: true
|
||||||
environment:
|
environment:
|
||||||
GOPROXY: 'https://goproxy.kolaente.de'
|
GOPROXY: 'https://goproxy.kolaente.de'
|
||||||
commands:
|
commands:
|
||||||
- make build
|
- mage -compile ./mage-static
|
||||||
|
when:
|
||||||
|
event: [ push, tag, pull_request ]
|
||||||
|
|
||||||
|
- name: build
|
||||||
|
image: vikunja/golang-build:latest
|
||||||
|
pull: true
|
||||||
|
environment:
|
||||||
|
GOPROXY: 'https://goproxy.kolaente.de'
|
||||||
|
depends_on: [ mage ]
|
||||||
|
commands:
|
||||||
|
- ./mage-static build:build
|
||||||
when:
|
when:
|
||||||
event: [ push, tag, pull_request ]
|
event: [ push, tag, pull_request ]
|
||||||
|
|
||||||
|
@ -69,17 +82,10 @@ steps:
|
||||||
GOPROXY: 'https://goproxy.kolaente.de'
|
GOPROXY: 'https://goproxy.kolaente.de'
|
||||||
depends_on: [ build ]
|
depends_on: [ build ]
|
||||||
commands:
|
commands:
|
||||||
- make generate
|
- ./mage-static build:generate
|
||||||
- make lint
|
- ./mage-static check:got-swag
|
||||||
- make fmt-check
|
- wget -O - -q https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin v1.31.0
|
||||||
# - make got-swag # Commented out until we figured out how to get this working on drone
|
- ./mage-static check:golangci
|
||||||
- make ineffassign-check
|
|
||||||
- make misspell-check
|
|
||||||
- make goconst-check
|
|
||||||
- make gocyclo-check
|
|
||||||
- make static-check
|
|
||||||
- wget -O - -q https://raw.githubusercontent.com/securego/gosec/master/install.sh | sh -s -- -b $GOPATH/bin v2.2.0 # Need to manually install as it does not support being installed via go modules like the rest.
|
|
||||||
- make gosec-check
|
|
||||||
when:
|
when:
|
||||||
event: [ push, tag, pull_request ]
|
event: [ push, tag, pull_request ]
|
||||||
|
|
||||||
|
@ -152,9 +158,9 @@ steps:
|
||||||
environment:
|
environment:
|
||||||
GOPROXY: 'https://goproxy.kolaente.de'
|
GOPROXY: 'https://goproxy.kolaente.de'
|
||||||
commands:
|
commands:
|
||||||
- make generate
|
- ./mage-static build:generate
|
||||||
- make test
|
- ./mage-static test:unit
|
||||||
depends_on: [ fetch-tags ]
|
depends_on: [ fetch-tags, mage ]
|
||||||
when:
|
when:
|
||||||
event: [ push, tag, pull_request ]
|
event: [ push, tag, pull_request ]
|
||||||
|
|
||||||
|
@ -166,9 +172,9 @@ steps:
|
||||||
VIKUNJA_TESTS_USE_CONFIG: 1
|
VIKUNJA_TESTS_USE_CONFIG: 1
|
||||||
VIKUNJA_DATABASE_TYPE: sqlite
|
VIKUNJA_DATABASE_TYPE: sqlite
|
||||||
commands:
|
commands:
|
||||||
- make generate
|
- ./mage-static build:generate
|
||||||
- make test
|
- ./mage-static test:unit
|
||||||
depends_on: [ fetch-tags ]
|
depends_on: [ fetch-tags, mage ]
|
||||||
when:
|
when:
|
||||||
event: [ push, tag, pull_request ]
|
event: [ push, tag, pull_request ]
|
||||||
|
|
||||||
|
@ -184,9 +190,9 @@ steps:
|
||||||
VIKUNJA_DATABASE_PASSWORD: vikunjatest
|
VIKUNJA_DATABASE_PASSWORD: vikunjatest
|
||||||
VIKUNJA_DATABASE_DATABASE: vikunjatest
|
VIKUNJA_DATABASE_DATABASE: vikunjatest
|
||||||
commands:
|
commands:
|
||||||
- make generate
|
- ./mage-static build:generate
|
||||||
- make test
|
- ./mage-static test:unit
|
||||||
depends_on: [ fetch-tags ]
|
depends_on: [ fetch-tags, mage ]
|
||||||
when:
|
when:
|
||||||
event: [ push, tag, pull_request ]
|
event: [ push, tag, pull_request ]
|
||||||
|
|
||||||
|
@ -203,9 +209,9 @@ steps:
|
||||||
VIKUNJA_DATABASE_DATABASE: vikunjatest
|
VIKUNJA_DATABASE_DATABASE: vikunjatest
|
||||||
VIKUNJA_DATABASE_SSLMODE: disable
|
VIKUNJA_DATABASE_SSLMODE: disable
|
||||||
commands:
|
commands:
|
||||||
- make generate
|
- ./mage-static build:generate
|
||||||
- make test
|
- ./mage-static test:unit
|
||||||
depends_on: [ fetch-tags ]
|
depends_on: [ fetch-tags, mage ]
|
||||||
when:
|
when:
|
||||||
event: [ push, tag, pull_request ]
|
event: [ push, tag, pull_request ]
|
||||||
|
|
||||||
|
@ -215,9 +221,9 @@ steps:
|
||||||
environment:
|
environment:
|
||||||
GOPROXY: 'https://goproxy.kolaente.de'
|
GOPROXY: 'https://goproxy.kolaente.de'
|
||||||
commands:
|
commands:
|
||||||
- make generate
|
- ./mage-static build:generate
|
||||||
- make integration-test
|
- ./mage-static test:integration
|
||||||
depends_on: [ fetch-tags ]
|
depends_on: [ fetch-tags, mage ]
|
||||||
when:
|
when:
|
||||||
event: [ push, tag, pull_request ]
|
event: [ push, tag, pull_request ]
|
||||||
|
|
||||||
|
@ -229,9 +235,9 @@ steps:
|
||||||
VIKUNJA_TESTS_USE_CONFIG: 1
|
VIKUNJA_TESTS_USE_CONFIG: 1
|
||||||
VIKUNJA_DATABASE_TYPE: sqlite
|
VIKUNJA_DATABASE_TYPE: sqlite
|
||||||
commands:
|
commands:
|
||||||
- make generate
|
- ./mage-static build:generate
|
||||||
- make integration-test
|
- ./mage-static test:integration
|
||||||
depends_on: [ fetch-tags ]
|
depends_on: [ fetch-tags, mage ]
|
||||||
when:
|
when:
|
||||||
event: [ push, tag, pull_request ]
|
event: [ push, tag, pull_request ]
|
||||||
|
|
||||||
|
@ -247,9 +253,9 @@ steps:
|
||||||
VIKUNJA_DATABASE_PASSWORD: vikunjatest
|
VIKUNJA_DATABASE_PASSWORD: vikunjatest
|
||||||
VIKUNJA_DATABASE_DATABASE: vikunjatest
|
VIKUNJA_DATABASE_DATABASE: vikunjatest
|
||||||
commands:
|
commands:
|
||||||
- make generate
|
- ./mage-static build:generate
|
||||||
- make integration-test
|
- ./mage-static test:integration
|
||||||
depends_on: [ fetch-tags ]
|
depends_on: [ fetch-tags, mage ]
|
||||||
when:
|
when:
|
||||||
event: [ push, tag, pull_request ]
|
event: [ push, tag, pull_request ]
|
||||||
|
|
||||||
|
@ -266,9 +272,9 @@ steps:
|
||||||
VIKUNJA_DATABASE_DATABASE: vikunjatest
|
VIKUNJA_DATABASE_DATABASE: vikunjatest
|
||||||
VIKUNJA_DATABASE_SSLMODE: disable
|
VIKUNJA_DATABASE_SSLMODE: disable
|
||||||
commands:
|
commands:
|
||||||
- make generate
|
- ./mage-static build:generate
|
||||||
- make integration-test
|
- ./mage-static test:integration
|
||||||
depends_on: [ fetch-tags ]
|
depends_on: [ fetch-tags, mage ]
|
||||||
when:
|
when:
|
||||||
event: [ push, tag, pull_request ]
|
event: [ push, tag, pull_request ]
|
||||||
|
|
||||||
|
@ -299,14 +305,27 @@ steps:
|
||||||
commands:
|
commands:
|
||||||
- git fetch --tags
|
- git fetch --tags
|
||||||
|
|
||||||
|
# We're statically compiling the magefile to avoid race condition issues caused by multiple pipeline steps
|
||||||
|
# compiling the same magefile at the same time. It's also faster if each step does not need to compile it first.
|
||||||
|
- name: mage
|
||||||
|
image: vikunja/golang-build:latest
|
||||||
|
pull: true
|
||||||
|
environment:
|
||||||
|
GOPROXY: 'https://goproxy.kolaente.de'
|
||||||
|
commands:
|
||||||
|
- mage -compile ./mage-static
|
||||||
|
when:
|
||||||
|
event: [ push, tag, pull_request ]
|
||||||
|
|
||||||
- name: before-static-build
|
- name: before-static-build
|
||||||
image: techknowlogick/xgo:latest
|
image: techknowlogick/xgo:latest
|
||||||
pull: true
|
pull: true
|
||||||
commands:
|
commands:
|
||||||
- export PATH=$PATH:$GOPATH/bin
|
- export PATH=$PATH:$GOPATH/bin
|
||||||
- make generate
|
- go install github.com/magefile/mage
|
||||||
- make release-dirs
|
- ./mage-static build:generate
|
||||||
depends_on: [ fetch-tags ]
|
- ./mage-static release:dirs
|
||||||
|
depends_on: [ fetch-tags, mage ]
|
||||||
|
|
||||||
- name: static-build-windows
|
- name: static-build-windows
|
||||||
image: techknowlogick/xgo:latest
|
image: techknowlogick/xgo:latest
|
||||||
|
@ -317,7 +336,8 @@ steps:
|
||||||
GOPATH: /srv/app
|
GOPATH: /srv/app
|
||||||
commands:
|
commands:
|
||||||
- export PATH=$PATH:$GOPATH/bin
|
- export PATH=$PATH:$GOPATH/bin
|
||||||
- make release-windows
|
- go install github.com/magefile/mage
|
||||||
|
- ./mage-static release:windows
|
||||||
depends_on: [ before-static-build ]
|
depends_on: [ before-static-build ]
|
||||||
|
|
||||||
- name: static-build-linux
|
- name: static-build-linux
|
||||||
|
@ -329,7 +349,8 @@ steps:
|
||||||
GOPATH: /srv/app
|
GOPATH: /srv/app
|
||||||
commands:
|
commands:
|
||||||
- export PATH=$PATH:$GOPATH/bin
|
- export PATH=$PATH:$GOPATH/bin
|
||||||
- make release-linux
|
- go install github.com/magefile/mage
|
||||||
|
- ./mage-static release:linux
|
||||||
depends_on: [ before-static-build ]
|
depends_on: [ before-static-build ]
|
||||||
|
|
||||||
- name: static-build-darwin
|
- name: static-build-darwin
|
||||||
|
@ -341,7 +362,8 @@ steps:
|
||||||
GOPATH: /srv/app
|
GOPATH: /srv/app
|
||||||
commands:
|
commands:
|
||||||
- export PATH=$PATH:$GOPATH/bin
|
- export PATH=$PATH:$GOPATH/bin
|
||||||
- make release-darwin
|
- go install github.com/magefile/mage
|
||||||
|
- ./mage-static release:darwin
|
||||||
depends_on: [ before-static-build ]
|
depends_on: [ before-static-build ]
|
||||||
|
|
||||||
- name: after-build-compress
|
- name: after-build-compress
|
||||||
|
@ -352,7 +374,7 @@ steps:
|
||||||
- static-build-linux
|
- static-build-linux
|
||||||
- static-build-darwin
|
- static-build-darwin
|
||||||
commands:
|
commands:
|
||||||
- make release-compress
|
- ./mage-static release:compress
|
||||||
|
|
||||||
- name: after-build-static
|
- name: after-build-static
|
||||||
image: techknowlogick/xgo:latest
|
image: techknowlogick/xgo:latest
|
||||||
|
@ -360,10 +382,11 @@ steps:
|
||||||
depends_on:
|
depends_on:
|
||||||
- after-build-compress
|
- after-build-compress
|
||||||
commands:
|
commands:
|
||||||
- make release-copy
|
- go install github.com/magefile/mage
|
||||||
- make release-check
|
- ./mage-static release:copy
|
||||||
- make release-os-package
|
- ./mage-static release:check
|
||||||
- make release-zip
|
- ./mage-static release:os-package
|
||||||
|
- ./mage-static release:zip
|
||||||
|
|
||||||
- name: sign-release
|
- name: sign-release
|
||||||
image: plugins/gpgsign:1
|
image: plugins/gpgsign:1
|
||||||
|
@ -383,48 +406,96 @@ steps:
|
||||||
image: plugins/s3:1
|
image: plugins/s3:1
|
||||||
pull: true
|
pull: true
|
||||||
settings:
|
settings:
|
||||||
bucket: vikunja
|
bucket: vikunja-releases
|
||||||
access_key:
|
access_key:
|
||||||
from_secret: aws_access_key_id
|
from_secret: aws_access_key_id
|
||||||
secret_key:
|
secret_key:
|
||||||
from_secret: aws_secret_access_key
|
from_secret: aws_secret_access_key
|
||||||
endpoint: https://storage.kolaente.de
|
endpoint: https://s3.fr-par.scw.cloud
|
||||||
|
region: fr-par
|
||||||
path_style: true
|
path_style: true
|
||||||
strip_prefix: dist/zip/
|
strip_prefix: dist/zip/
|
||||||
source: dist/zip/*
|
source: dist/zip/*
|
||||||
target: /api/master/
|
target: /api/master/
|
||||||
trigger:
|
when:
|
||||||
ref:
|
branch:
|
||||||
- refs/heads/master
|
- master
|
||||||
|
event:
|
||||||
|
- push
|
||||||
depends_on: [ sign-release ]
|
depends_on: [ sign-release ]
|
||||||
|
|
||||||
- name: release-version
|
- name: release-version
|
||||||
image: plugins/s3:1
|
image: plugins/s3:1
|
||||||
pull: true
|
pull: true
|
||||||
settings:
|
settings:
|
||||||
bucket: vikunja
|
bucket: vikunja-releases
|
||||||
access_key:
|
access_key:
|
||||||
from_secret: aws_access_key_id
|
from_secret: aws_access_key_id
|
||||||
secret_key:
|
secret_key:
|
||||||
from_secret: aws_secret_access_key
|
from_secret: aws_secret_access_key
|
||||||
endpoint: https://storage.kolaente.de
|
endpoint: https://s3.fr-par.scw.cloud
|
||||||
|
region: fr-par
|
||||||
path_style: true
|
path_style: true
|
||||||
strip_prefix: dist/zip/
|
strip_prefix: dist/zip/
|
||||||
source: dist/zip/*
|
source: dist/zip/*
|
||||||
target: /api/${DRONE_TAG##v}/
|
target: /api/${DRONE_TAG##v}/
|
||||||
trigger:
|
when:
|
||||||
ref:
|
event:
|
||||||
- "refs/tags/**"
|
- tag
|
||||||
depends_on: [ sign-release ]
|
depends_on: [ sign-release ]
|
||||||
|
|
||||||
# Build a debian package and push it to our bucket
|
# Build os packages and push it to our bucket
|
||||||
- name: build-deb
|
- name: build-os-packages
|
||||||
image: kolaente/fpm
|
image: goreleaser/nfpm
|
||||||
pull: true
|
pull: true
|
||||||
commands:
|
commands:
|
||||||
- make build-deb
|
- apk add git go
|
||||||
|
- ./mage-static release:packages
|
||||||
depends_on: [ static-build-linux ]
|
depends_on: [ static-build-linux ]
|
||||||
|
|
||||||
|
# Push the os releases to our pseudo-s3-bucket
|
||||||
|
- name: release-os-latest
|
||||||
|
image: plugins/s3:1
|
||||||
|
pull: true
|
||||||
|
settings:
|
||||||
|
bucket: vikunja-releases
|
||||||
|
access_key:
|
||||||
|
from_secret: aws_access_key_id
|
||||||
|
secret_key:
|
||||||
|
from_secret: aws_secret_access_key
|
||||||
|
endpoint: https://s3.fr-par.scw.cloud
|
||||||
|
region: fr-par
|
||||||
|
path_style: true
|
||||||
|
strip_prefix: dist/os-packages/
|
||||||
|
source: dist/os-packages/*
|
||||||
|
target: /api/master/
|
||||||
|
when:
|
||||||
|
branch:
|
||||||
|
- master
|
||||||
|
event:
|
||||||
|
- push
|
||||||
|
depends_on: [ build-os-packages ]
|
||||||
|
|
||||||
|
- name: release-os-version
|
||||||
|
image: plugins/s3:1
|
||||||
|
pull: true
|
||||||
|
settings:
|
||||||
|
bucket: vikunja-releases
|
||||||
|
access_key:
|
||||||
|
from_secret: aws_access_key_id
|
||||||
|
secret_key:
|
||||||
|
from_secret: aws_secret_access_key
|
||||||
|
endpoint: https://s3.fr-par.scw.cloud
|
||||||
|
region: fr-par
|
||||||
|
path_style: true
|
||||||
|
strip_prefix: dist/os-packages/
|
||||||
|
source: dist/os-packages/*
|
||||||
|
target: /api/${DRONE_TAG##v}/
|
||||||
|
when:
|
||||||
|
event:
|
||||||
|
- tag
|
||||||
|
depends_on: [ build-os-packages ]
|
||||||
|
|
||||||
- name: deb-structure
|
- name: deb-structure
|
||||||
image: kolaente/reprepro
|
image: kolaente/reprepro
|
||||||
pull: true
|
pull: true
|
||||||
|
@ -440,20 +511,21 @@ steps:
|
||||||
- gpg --import ~/frederik.gpg
|
- gpg --import ~/frederik.gpg
|
||||||
- mkdir debian/conf -p
|
- mkdir debian/conf -p
|
||||||
- cp build/reprepro-dist-conf debian/conf/distributions
|
- cp build/reprepro-dist-conf debian/conf/distributions
|
||||||
- make reprepro
|
- ./mage-static release:reprepro
|
||||||
depends_on: [ build-deb ]
|
depends_on: [ build-os-packages ]
|
||||||
|
|
||||||
# Push the releases to our pseudo-s3-bucket
|
# Push the releases to our pseudo-s3-bucket
|
||||||
- name: release-deb
|
- name: release-deb
|
||||||
image: plugins/s3:1
|
image: plugins/s3:1
|
||||||
pull: true
|
pull: true
|
||||||
settings:
|
settings:
|
||||||
bucket: vikunja
|
bucket: vikunja-releases
|
||||||
access_key:
|
access_key:
|
||||||
from_secret: aws_access_key_id
|
from_secret: aws_access_key_id
|
||||||
secret_key:
|
secret_key:
|
||||||
from_secret: aws_secret_access_key
|
from_secret: aws_secret_access_key
|
||||||
endpoint: https://storage.kolaente.de
|
endpoint: https://s3.fr-par.scw.cloud
|
||||||
|
region: fr-par
|
||||||
path_style: true
|
path_style: true
|
||||||
strip_prefix: debian
|
strip_prefix: debian
|
||||||
source: debian/*/*/*/*/*
|
source: debian/*/*/*/*/*
|
||||||
|
@ -626,7 +698,7 @@ depends_on:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: telegram
|
- name: telegram
|
||||||
image: appleboy/drone-telegram
|
image: appleboy/drone-telegram:1-linux-amd64
|
||||||
settings:
|
settings:
|
||||||
token:
|
token:
|
||||||
from_secret: TELEGRAM_TOKEN
|
from_secret: TELEGRAM_TOKEN
|
||||||
|
|
|
@ -6,6 +6,6 @@
|
||||||
|
|
||||||
* [ ] I added or improved tests
|
* [ ] I added or improved tests
|
||||||
* [ ] I added or improved docs for my feature
|
* [ ] I added or improved docs for my feature
|
||||||
* [ ] Swagger (including `make do-the-swag`)
|
* [ ] Swagger (including `mage do-the-swag`)
|
||||||
* [ ] Error codes
|
* [ ] Error codes
|
||||||
* [ ] New config options
|
* [ ] New config options (including adding them to `config.yml.saml` and running `mage generate-docs`)
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
custom: https://www.buymeacoffee.com/kolaente
|
|
@ -22,3 +22,4 @@ files/
|
||||||
!pkg/files/
|
!pkg/files/
|
||||||
vikunja-dump*
|
vikunja-dump*
|
||||||
vendor/
|
vendor/
|
||||||
|
os-packages/
|
||||||
|
|
|
@ -0,0 +1,76 @@
|
||||||
|
run:
|
||||||
|
timeout: 5m
|
||||||
|
tests: true
|
||||||
|
|
||||||
|
linters:
|
||||||
|
enable:
|
||||||
|
- megacheck
|
||||||
|
- govet
|
||||||
|
- goconst
|
||||||
|
- gocritic
|
||||||
|
- gocyclo
|
||||||
|
- goerr113
|
||||||
|
- goheader
|
||||||
|
- gofmt
|
||||||
|
- goimports
|
||||||
|
- golint
|
||||||
|
- misspell
|
||||||
|
disable:
|
||||||
|
- scopelint # Obsolete, using exportloopref instead
|
||||||
|
presets:
|
||||||
|
- bugs
|
||||||
|
- unused
|
||||||
|
fast: false
|
||||||
|
|
||||||
|
linter-settings:
|
||||||
|
nestif:
|
||||||
|
min-complexity: 6
|
||||||
|
goheader:
|
||||||
|
template-path: code-hesader-template.txt
|
||||||
|
|
||||||
|
issues:
|
||||||
|
exclude-rules:
|
||||||
|
# Exclude some linters from running on tests files.
|
||||||
|
- path: _test\.go
|
||||||
|
linters:
|
||||||
|
- gocyclo
|
||||||
|
- deadcode
|
||||||
|
- path: pkg/integrations/*
|
||||||
|
linters:
|
||||||
|
- gocyclo
|
||||||
|
- deadcode
|
||||||
|
- varcheck
|
||||||
|
- unparam
|
||||||
|
- bodyclose
|
||||||
|
- path: pkg/integrations/*
|
||||||
|
text: "unlambda"
|
||||||
|
linters:
|
||||||
|
- gocritic
|
||||||
|
- path: pkg/modules/background/unsplash/unsplash\.go
|
||||||
|
linters:
|
||||||
|
- bodyclose
|
||||||
|
- path: pkg/migration/*
|
||||||
|
linters:
|
||||||
|
- exhaustive
|
||||||
|
- goconst
|
||||||
|
- goerr113
|
||||||
|
- path: pkg/models/task_collection_filter\.go
|
||||||
|
linters:
|
||||||
|
- exhaustive
|
||||||
|
- path: pkg/utils/random_string\.go
|
||||||
|
text: "G404:" # We don't care about cryptographically secure randomness when we're using that utility function.
|
||||||
|
linters:
|
||||||
|
- gosec
|
||||||
|
- path: pkg/modules/dump/*
|
||||||
|
linters:
|
||||||
|
- goerr113
|
||||||
|
- path: pkg/
|
||||||
|
text: "err113: do not define dynamic errors, use wrapped static errors instead:"
|
||||||
|
linters:
|
||||||
|
- goerr113
|
||||||
|
- text: "commentFormatting: put a space between `//` and comment text"
|
||||||
|
linters:
|
||||||
|
- gocritic
|
||||||
|
- path: pkg/modules/migration
|
||||||
|
linters:
|
||||||
|
- gocyclo
|
164
CHANGELOG.md
164
CHANGELOG.md
|
@ -7,6 +7,170 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||||
|
|
||||||
All releases can be found on https://code.vikunja.io/api/releases.
|
All releases can be found on https://code.vikunja.io/api/releases.
|
||||||
|
|
||||||
|
## [0.15.1] - 2020-10-20
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
* Fix not possible to create tasks if metrics were enabled
|
||||||
|
|
||||||
|
## [0.15.0] - 2020-10-19
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
* Add app support info for DAV (#692)
|
||||||
|
* Add better tests for namespaces
|
||||||
|
* Add caldav enabled/disabled to /info endpoint
|
||||||
|
* Add checks if tasks exist in maps before trying to access them
|
||||||
|
* Add config option to force ssl connections to connect with the mailer
|
||||||
|
* Add dav proxy directions to example proxy configurations
|
||||||
|
* Add docs about using vikunja with utf-8 characters
|
||||||
|
* Add FreeBSD guide to installation docs
|
||||||
|
* Add github sponsor link
|
||||||
|
* Add Golangci Lint (#676)
|
||||||
|
* Add mage command to create a new migration
|
||||||
|
* Add option to configure legal urls
|
||||||
|
* Add rootpath to deb command to not include everything in the deb file
|
||||||
|
* Add toc to docs
|
||||||
|
* Add update route to toggle team member admin status
|
||||||
|
* Add util function to move files
|
||||||
|
* Disable gocyclo for migration modules
|
||||||
|
* Favorite lists (#654)
|
||||||
|
* Favorite tasks (#653)
|
||||||
|
* Generate config docs from sample config (#684)
|
||||||
|
* Kanban bucket limits (#652)
|
||||||
|
* Key-Value Storages (#674)
|
||||||
|
* Manage users via cli (#632)
|
||||||
|
* Mention client_max_body_size in nginx proxy settings
|
||||||
|
* More avatar providers (#622)
|
||||||
|
* Return rights when reading a single item (#626)
|
||||||
|
* Saved filters (#655)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
* Cleanup references to make
|
||||||
|
* Don't add a subtask to the top level of tasks to not add it twice in the list
|
||||||
|
* Fetch tasks for caldav lists (#641)
|
||||||
|
* Fix building for darwin with mage
|
||||||
|
* Fix creating lists with non ascii characters (#607)
|
||||||
|
* Fix decoding active users from redis
|
||||||
|
* Fix dockerimage build
|
||||||
|
* Fix docs index links
|
||||||
|
* Fix duplicating a list with background
|
||||||
|
* "Fix" gocyclo
|
||||||
|
* Fix loading list background information for uploaded backgrounds
|
||||||
|
* Fix migrating items with large items from todoist
|
||||||
|
* Fix nfpm command in drone
|
||||||
|
* Fix parsing todoist reminder dates
|
||||||
|
* Fix reading passwords on windows
|
||||||
|
* Fix release commands in drone
|
||||||
|
* Fix release trigger
|
||||||
|
* Fix release trigger in drone
|
||||||
|
* Fix token renew for link shares
|
||||||
|
* Fix trigger for pushing release artifacts to drone
|
||||||
|
* Fix updating team admin status
|
||||||
|
* Fix upload avatar not working
|
||||||
|
* Fix users with disabled totp but not enrolled being unable to login
|
||||||
|
* Makefile: make add EXTRA_GOFLAG to GOFLAGS (#605)
|
||||||
|
* Make sure built binary files are executable when compressing with upx
|
||||||
|
* Make sure lists which would have a duplicate identifier can still be duplicated
|
||||||
|
* Make sure the metrics map accesses only happen explicitly
|
||||||
|
* Make sure to copy the permissions as well when moving files
|
||||||
|
* Make sure to only initialize all variables when needed
|
||||||
|
* Make sure to require admin rights when modifying list/namespace users to be consistent with teams
|
||||||
|
* Make sure we have git installed when building os packages
|
||||||
|
* Make sure we have go installed when building os packages (for build step dependencies)
|
||||||
|
* Only check if a bucket limit is exceeded when moving a task between buckets
|
||||||
|
* Only try to download attachments from todoist when there is a url
|
||||||
|
* Pin telegram notification plugin in drone
|
||||||
|
* Regenerate swagger docs
|
||||||
|
* Skip directories when moving build release artefacts in drone
|
||||||
|
* Support absolute iCal timestamps in CalDAV requests (#691)
|
||||||
|
* Work around tasks with attachments not being duplicated
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
* Replace renovate tokens with env
|
||||||
|
* Switch s3 release bucket to scaleway
|
||||||
|
* Switch to mage (#651)
|
||||||
|
* Testing improvements (#666)
|
||||||
|
* Update docs with testmail command + reorder
|
||||||
|
* Update github.com/asaskevich/govalidator commit hash to 29e1ff8 (#639)
|
||||||
|
* Update github.com/asaskevich/govalidator commit hash to 50839af (#637)
|
||||||
|
* Update github.com/asaskevich/govalidator commit hash to 7a23bdc (#657)
|
||||||
|
* Update github.com/asaskevich/govalidator commit hash to df4adff (#552)
|
||||||
|
* Update github.com/c2h5oh/datasize commit hash to 48ed595 (#644)
|
||||||
|
* Update github.com/gordonklaus/ineffassign commit hash to e36bfde (#625)
|
||||||
|
* Update github.com/jgautheron/goconst commit hash to 8f5268c (#658)
|
||||||
|
* Update github.com/shurcooL/vfsgen commit hash to 0d455de (#642)
|
||||||
|
* Update golang.org/x/crypto commit hash to 123391f (#619)
|
||||||
|
* Update golang.org/x/crypto commit hash to 5c72a88 (#640)
|
||||||
|
* Update golang.org/x/crypto commit hash to 7f63de1 (#672)
|
||||||
|
* Update golang.org/x/crypto commit hash to 84dcc77 (#678)
|
||||||
|
* Update golang.org/x/crypto commit hash to 948cd5f (#609)
|
||||||
|
* Update golang.org/x/crypto commit hash to 9e8e0b3 (#685)
|
||||||
|
* Update golang.org/x/crypto commit hash to ab33eee (#608)
|
||||||
|
* Update golang.org/x/crypto commit hash to afb6bcd (#668)
|
||||||
|
* Update golang.org/x/crypto commit hash to c90954c (#671)
|
||||||
|
* Update golang.org/x/crypto commit hash to eb9a90e (#669)
|
||||||
|
* Update golang.org/x/image commit hash to 4578eab (#663)
|
||||||
|
* Update golang.org/x/image commit hash to a67d67e (#664)
|
||||||
|
* Update golang.org/x/image commit hash to e162460 (#665)
|
||||||
|
* Update golang.org/x/image commit hash to e59bae6 (#659)
|
||||||
|
* Update golang.org/x/sync commit hash to 3042136 (#667)
|
||||||
|
* Update golang.org/x/sync commit hash to b3e1573 (#675)
|
||||||
|
* Update module 4d63.com/tz to v1.2.0 (#631)
|
||||||
|
* Update module fzipp/gocyclo to v0.2.0 (#686)
|
||||||
|
* Update module fzipp/gocyclo to v0.3.0 (#687)
|
||||||
|
* Update module getsentry/sentry-go to v0.7.0 (#617)
|
||||||
|
* Update module go-errors/errors to v1.1.1 (#677)
|
||||||
|
* Update module go-testfixtures/testfixtures/v3 to v3.4.0 (#627)
|
||||||
|
* Update module go-testfixtures/testfixtures/v3 to v3.4.1 (#693)
|
||||||
|
* Update module iancoleman/strcase to v0.1.0 (#636)
|
||||||
|
* Update module iancoleman/strcase to v0.1.1 (#645)
|
||||||
|
* Update module iancoleman/strcase to v0.1.2 (#660)
|
||||||
|
* Update module imdario/mergo to v0.3.10 (#615)
|
||||||
|
* Update module imdario/mergo to v0.3.11 (#629)
|
||||||
|
* Update module labstack/echo/v4 to v4.1.17 (#646)
|
||||||
|
* Update module lib/pq to v1.7.1 (#616)
|
||||||
|
* Update module lib/pq to v1.8.0 (#618)
|
||||||
|
* Update module mattn/go-sqlite3 to v1.14.1 (#638)
|
||||||
|
* Update module mattn/go-sqlite3 to v1.14.2 (#647)
|
||||||
|
* Update module mattn/go-sqlite3 to v1.14.3 (#661)
|
||||||
|
* Update module mattn/go-sqlite3 to v1.14.4 (#670)
|
||||||
|
* Update module prometheus/client_golang to v1.8.0 (#681)
|
||||||
|
* Update module spf13/afero to v1.3.2 (#610)
|
||||||
|
* Update module spf13/afero to v1.3.3 (#623)
|
||||||
|
* Update module spf13/afero to v1.3.4 (#628)
|
||||||
|
* Update module spf13/afero to v1.3.5 (#650)
|
||||||
|
* Update module spf13/afero to v1.4.0 (#662)
|
||||||
|
* Update module spf13/afero to v1.4.1 (#673)
|
||||||
|
* Update module spf13/cobra to v1.1.0 (#679)
|
||||||
|
* Update module spf13/cobra to v1.1.1 (#690)
|
||||||
|
* Update module spf13/viper to v1.7.1 (#620)
|
||||||
|
* Update module src.techknowlogick.com/xgo to v1.1.0+1.15.0 (#630)
|
||||||
|
* Update module src.techknowlogick.com/xgo to v1 (#613)
|
||||||
|
* Update module swaggo/swag to v1.6.8 (#680)
|
||||||
|
* Update renovate token
|
||||||
|
* Update src.techknowlogick.com/xgo commit hash to 7c2e3c9 (#611)
|
||||||
|
* Update src.techknowlogick.com/xgo commit hash to 96de19c (#612)
|
||||||
|
* update theme
|
||||||
|
* Update xgo to v1.0.0+1.14.6
|
||||||
|
* Use db sessions for task-related things (#621)
|
||||||
|
* Use nfpm to build deb, rpm and apk packages (#689)
|
||||||
|
|
||||||
|
## [0.14.1] - 2020-07-07
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
* Fix creating lists with non ascii characters (#607)
|
||||||
|
* Fix decoding active users from redis
|
||||||
|
* Fix parsing todoist reminder dates
|
||||||
|
* Make sure the metrics map accesses only happen explicitly
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
* Update docs theme
|
||||||
|
|
||||||
## [0.14.0] - 2020-07-01
|
## [0.14.0] - 2020-07-01
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
|
@ -16,7 +16,8 @@ WORKDIR ${GOPATH}/src/code.vikunja.io/api
|
||||||
|
|
||||||
# Checkout version if set
|
# Checkout version if set
|
||||||
RUN if [ -n "${VIKUNJA_VERSION}" ]; then git checkout "${VIKUNJA_VERSION}"; fi \
|
RUN if [ -n "${VIKUNJA_VERSION}" ]; then git checkout "${VIKUNJA_VERSION}"; fi \
|
||||||
&& make clean generate build
|
&& go install github.com/magefile/mage \
|
||||||
|
&& mage build:clean build:build
|
||||||
|
|
||||||
###################
|
###################
|
||||||
# The actual image
|
# The actual image
|
||||||
|
|
247
Makefile
247
Makefile
|
@ -1,247 +0,0 @@
|
||||||
DIST := dist
|
|
||||||
IMPORT := code.vikunja.io/api
|
|
||||||
|
|
||||||
SED_INPLACE := sed -i
|
|
||||||
|
|
||||||
ifeq ($(OS), Windows_NT)
|
|
||||||
EXECUTABLE := vikunja.exe
|
|
||||||
else
|
|
||||||
EXECUTABLE := vikunja
|
|
||||||
UNAME_S := $(shell uname -s)
|
|
||||||
ifeq ($(UNAME_S),Darwin)
|
|
||||||
SED_INPLACE := sed -i ''
|
|
||||||
endif
|
|
||||||
endif
|
|
||||||
|
|
||||||
GOFILES := $(shell find . -name "*.go" -type f ! -path "*/bindata.go")
|
|
||||||
GOFMT ?= gofmt -s
|
|
||||||
|
|
||||||
GOFLAGS := -v
|
|
||||||
EXTRA_GOFLAGS ?=
|
|
||||||
|
|
||||||
LDFLAGS := -X "code.vikunja.io/api/pkg/version.Version=$(shell git describe --tags --always --abbrev=10 | sed 's/-/+/' | sed 's/^v//' | sed 's/-g/-/')" -X "main.Tags=$(TAGS)"
|
|
||||||
|
|
||||||
PACKAGES ?= $(filter-out code.vikunja.io/api/pkg/integrations,$(shell go list))
|
|
||||||
SOURCES ?= $(shell find . -name "*.go" -type f)
|
|
||||||
|
|
||||||
TAGS ?=
|
|
||||||
|
|
||||||
ifeq ($(OS), Windows_NT)
|
|
||||||
EXECUTABLE := vikunja.exe
|
|
||||||
else
|
|
||||||
EXECUTABLE := vikunja
|
|
||||||
endif
|
|
||||||
|
|
||||||
ifneq ($(DRONE_TAG),)
|
|
||||||
VERSION ?= $(subst v,,$(DRONE_TAG))
|
|
||||||
else
|
|
||||||
ifneq ($(DRONE_BRANCH),)
|
|
||||||
VERSION ?= $(subst release/v,,$(DRONE_BRANCH))
|
|
||||||
else
|
|
||||||
VERSION ?= master
|
|
||||||
endif
|
|
||||||
endif
|
|
||||||
|
|
||||||
ifeq ($(DRONE_WORKSPACE),'')
|
|
||||||
BINLOCATION := $(EXECUTABLE)
|
|
||||||
else
|
|
||||||
BINLOCATION := $(DIST)/binaries/$(EXECUTABLE)-$(VERSION)-linux-amd64
|
|
||||||
endif
|
|
||||||
|
|
||||||
ifeq ($(VERSION),master)
|
|
||||||
PKGVERSION := $(shell git describe --tags --always --abbrev=10 | sed 's/-/+/' | sed 's/^v//' | sed 's/-g/-/')
|
|
||||||
else
|
|
||||||
PKGVERSION := $(VERSION)
|
|
||||||
endif
|
|
||||||
|
|
||||||
.PHONY: all
|
|
||||||
all: build
|
|
||||||
|
|
||||||
.PHONY: clean
|
|
||||||
clean:
|
|
||||||
go clean ./...
|
|
||||||
rm -rf $(EXECUTABLE) $(DIST) $(BINDATA)
|
|
||||||
|
|
||||||
.PHONY: test
|
|
||||||
test:
|
|
||||||
# We run everything sequentially and not in parallel to prevent issues with real test databases
|
|
||||||
VIKUNJA_SERVICE_ROOTPATH=$(shell pwd) go test $(GOFLAGS) -p 1 -cover -coverprofile cover.out $(PACKAGES)
|
|
||||||
|
|
||||||
.PHONY: test-coverage
|
|
||||||
test-coverage: test
|
|
||||||
go tool cover -html=cover.out -o cover.html
|
|
||||||
|
|
||||||
.PHONY: integration-test
|
|
||||||
integration-test:
|
|
||||||
# We run everything sequentially and not in parallel to prevent issues with real test databases
|
|
||||||
VIKUNJA_SERVICE_ROOTPATH=$(shell pwd) go test $(GOFLAGS) -p 1 code.vikunja.io/api/pkg/integrations
|
|
||||||
|
|
||||||
.PHONY: lint
|
|
||||||
lint:
|
|
||||||
@hash golint > /dev/null 2>&1; if [ $$? -ne 0 ]; then \
|
|
||||||
go install $(GOFLAGS) golang.org/x/lint/golint; \
|
|
||||||
fi
|
|
||||||
for PKG in $(PACKAGES); do golint -set_exit_status $$PKG || exit 1; done;
|
|
||||||
|
|
||||||
.PHONY: fmt
|
|
||||||
fmt:
|
|
||||||
$(GOFMT) -w $(GOFILES)
|
|
||||||
|
|
||||||
.PHONY: fmt-check
|
|
||||||
fmt-check:
|
|
||||||
# get all go files and run go fmt on them
|
|
||||||
@diff=$$($(GOFMT) -d $(GOFILES)); \
|
|
||||||
if [ -n "$$diff" ]; then \
|
|
||||||
echo "Please run 'make fmt' and commit the result:"; \
|
|
||||||
echo "$${diff}"; \
|
|
||||||
exit 1; \
|
|
||||||
fi;
|
|
||||||
|
|
||||||
.PHONY: build
|
|
||||||
build: generate $(EXECUTABLE)
|
|
||||||
|
|
||||||
.PHONY: generate
|
|
||||||
generate:
|
|
||||||
go generate code.vikunja.io/api/pkg/static
|
|
||||||
|
|
||||||
$(EXECUTABLE): $(SOURCES)
|
|
||||||
go build $(GOFLAGS) $(EXTRA_GOFLAGS) -tags '$(TAGS)' -ldflags '-s -w $(LDFLAGS)' -o $@
|
|
||||||
|
|
||||||
.PHONY: compress-build
|
|
||||||
compress-build:
|
|
||||||
upx -9 $(EXECUTABLE)
|
|
||||||
|
|
||||||
.PHONY: release
|
|
||||||
release: release-dirs release-windows release-linux release-darwin release-copy release-check release-os-package release-zip
|
|
||||||
|
|
||||||
.PHONY: release-dirs
|
|
||||||
release-dirs:
|
|
||||||
mkdir -p $(DIST)/binaries $(DIST)/release $(DIST)/zip
|
|
||||||
|
|
||||||
.PHONY: release-windows
|
|
||||||
release-windows:
|
|
||||||
@hash xgo > /dev/null 2>&1; if [ $$? -ne 0 ]; then \
|
|
||||||
go install $(GOFLAGS) src.techknowlogick.com/xgo; \
|
|
||||||
fi
|
|
||||||
xgo -dest $(DIST)/binaries -tags 'netgo $(TAGS)' -ldflags '-linkmode external -extldflags "-static" $(LDFLAGS)' -targets 'windows/*' -out vikunja-$(VERSION) .
|
|
||||||
ifneq ($(DRONE_WORKSPACE),'')
|
|
||||||
mv /build/* $(DIST)/binaries
|
|
||||||
endif
|
|
||||||
|
|
||||||
.PHONY: release-linux
|
|
||||||
release-linux:
|
|
||||||
@hash xgo > /dev/null 2>&1; if [ $$? -ne 0 ]; then \
|
|
||||||
go install $(GOFLAGS) src.techknowlogick.com/xgo; \
|
|
||||||
fi
|
|
||||||
xgo -dest $(DIST)/binaries -tags 'netgo $(TAGS)' -ldflags '-linkmode external -extldflags "-static" $(LDFLAGS)' -targets 'linux/*' -out vikunja-$(VERSION) .
|
|
||||||
ifneq ($(DRONE_WORKSPACE),'')
|
|
||||||
mv /build/* $(DIST)/binaries
|
|
||||||
endif
|
|
||||||
|
|
||||||
.PHONY: release-darwin
|
|
||||||
release-darwin:
|
|
||||||
@hash xgo > /dev/null 2>&1; if [ $$? -ne 0 ]; then \
|
|
||||||
go install $(GOFLAGS) src.techknowlogick.com/xgo; \
|
|
||||||
fi
|
|
||||||
xgo -dest $(DIST)/binaries -tags 'netgo $(TAGS)' -ldflags '$(LDFLAGS)' -targets 'darwin/*' -out vikunja-$(VERSION) .
|
|
||||||
ifneq ($(DRONE_WORKSPACE),'')
|
|
||||||
mv /build/* $(DIST)/binaries
|
|
||||||
endif
|
|
||||||
|
|
||||||
# Compresses all releases made by make release-* but not mips* releases since upx can't handle these.
|
|
||||||
.PHONY: release-compress
|
|
||||||
release-compress:
|
|
||||||
$(foreach file,$(filter-out $(wildcard $(wildcard $(DIST)/binaries/$(EXECUTABLE)-*mips*)),$(wildcard $(DIST)/binaries/$(EXECUTABLE)-*)), upx -9 $(file);)
|
|
||||||
|
|
||||||
.PHONY: release-copy
|
|
||||||
release-copy:
|
|
||||||
$(foreach file,$(wildcard $(DIST)/binaries/$(EXECUTABLE)-*),cp $(file) $(DIST)/release/$(notdir $(file));)
|
|
||||||
|
|
||||||
.PHONY: release-check
|
|
||||||
release-check:
|
|
||||||
cd $(DIST)/release; $(foreach file,$(wildcard $(DIST)/release/$(EXECUTABLE)-*),sha256sum $(notdir $(file)) > $(notdir $(file)).sha256;)
|
|
||||||
|
|
||||||
.PHONY: release-os-package
|
|
||||||
release-os-package:
|
|
||||||
$(foreach file,$(filter-out %.sha256,$(wildcard $(DIST)/release/$(EXECUTABLE)-*)),mkdir $(file)-full;mv $(file) $(file)-full/; mv $(file).sha256 $(file)-full/; cp config.yml.sample $(file)-full/config.yml; cp LICENSE $(file)-full/; )
|
|
||||||
|
|
||||||
.PHONY: release-zip
|
|
||||||
release-zip:
|
|
||||||
$(foreach file,$(wildcard $(DIST)/release/$(EXECUTABLE)-*),cd $(file); zip -r ../../zip/$(shell basename $(file)).zip *; cd ../../../; )
|
|
||||||
|
|
||||||
# Builds a deb package using fpm from a previously created binary (using make build)
|
|
||||||
.PHONY: build-deb
|
|
||||||
build-deb:
|
|
||||||
fpm -s dir -t deb --url https://vikunja.io -n vikunja -v $(PKGVERSION) --license GPLv3 --directories /opt/vikunja --after-install ./build/after-install.sh --description 'Vikunja is an open-source todo application, written in Go. It lets you create lists,tasks and share them via teams or directly between users.' -m maintainers@vikunja.io ./$(BINLOCATION)=/opt/vikunja/vikunja ./config.yml.sample=/etc/vikunja/config.yml;
|
|
||||||
|
|
||||||
.PHONY: reprepro
|
|
||||||
reprepro:
|
|
||||||
reprepro_expect debian includedeb strech ./$(EXECUTABLE)_$(PKGVERSION)_amd64.deb
|
|
||||||
|
|
||||||
.PHONY: got-swag
|
|
||||||
got-swag: do-the-swag
|
|
||||||
@diff=$$(git diff docs/swagger/swagger.json); \
|
|
||||||
if [ -n "$$diff" ]; then \
|
|
||||||
echo "Please run 'make do-the-swag' and commit the result:"; \
|
|
||||||
echo "$${diff}"; \
|
|
||||||
exit 1; \
|
|
||||||
fi;
|
|
||||||
|
|
||||||
.PHONY: do-the-swag
|
|
||||||
do-the-swag:
|
|
||||||
@hash swag > /dev/null 2>&1; if [ $$? -ne 0 ]; then \
|
|
||||||
go install $(GOFLAGS) github.com/swaggo/swag/cmd/swag; \
|
|
||||||
fi
|
|
||||||
swag init -g pkg/routes/routes.go --parseDependency -o ./pkg/swagger;
|
|
||||||
# Fix the generated swagger file, currently a workaround until swaggo can properly use go mod
|
|
||||||
sed -i '/"definitions": {/a "code.vikunja.io.web.HTTPError": {"type": "object","properties": {"code": {"type": "integer"},"message": {"type": "string"}}},' pkg/swagger/docs.go;
|
|
||||||
sed -i 's/code.vikunja.io\/web.HTTPError/code.vikunja.io.web.HTTPError/g' pkg/swagger/docs.go;
|
|
||||||
sed -i 's/package\ docs/package\ swagger/g' pkg/swagger/docs.go;
|
|
||||||
sed -i 's/` + \\"`\\" + `/` + "`" + `/g' pkg/swagger/docs.go;
|
|
||||||
|
|
||||||
.PHONY: misspell-check
|
|
||||||
misspell-check:
|
|
||||||
@hash misspell > /dev/null 2>&1; if [ $$? -ne 0 ]; then \
|
|
||||||
go install $(GOFLAGS) github.com/client9/misspell/cmd/misspell; \
|
|
||||||
fi
|
|
||||||
for S in $(GOFILES); do misspell -error $$S || exit 1; done;
|
|
||||||
|
|
||||||
.PHONY: ineffassign-check
|
|
||||||
ineffassign-check:
|
|
||||||
@hash ineffassign > /dev/null 2>&1; if [ $$? -ne 0 ]; then \
|
|
||||||
go install $(GOFLAGS) github.com/gordonklaus/ineffassign; \
|
|
||||||
fi
|
|
||||||
for S in $(GOFILES); do ineffassign $$S || exit 1; done;
|
|
||||||
|
|
||||||
.PHONY: gocyclo-check
|
|
||||||
gocyclo-check:
|
|
||||||
@hash gocyclo > /dev/null 2>&1; if [ $$? -ne 0 ]; then \
|
|
||||||
go get -u github.com/fzipp/gocyclo; \
|
|
||||||
go install $(GOFLAGS) github.com/fzipp/gocyclo; \
|
|
||||||
fi
|
|
||||||
for S in $(GOFILES); do gocyclo -over 47 $$S || exit 1; done;
|
|
||||||
|
|
||||||
.PHONY: static-check
|
|
||||||
static-check:
|
|
||||||
@hash staticcheck > /dev/null 2>&1; if [ $$? -ne 0 ]; then \
|
|
||||||
go get -u honnef.co/go/tools; \
|
|
||||||
go install $(GOFLAGS) honnef.co/go/tools/cmd/staticcheck; \
|
|
||||||
fi
|
|
||||||
staticcheck $(PACKAGES);
|
|
||||||
|
|
||||||
.PHONY: gosec-check
|
|
||||||
gosec-check:
|
|
||||||
@hash gosec > /dev/null 2>&1; if [ $$? -ne 0 ]; then \
|
|
||||||
echo "Please manually install gosec by running"; \
|
|
||||||
echo "curl -sfL https://raw.githubusercontent.com/securego/gosec/master/install.sh | bash -s -- -b $GOPATH/bin v2.2.0"; \
|
|
||||||
exit 1; \
|
|
||||||
fi
|
|
||||||
gosec ./...
|
|
||||||
|
|
||||||
.PHONY: goconst-check
|
|
||||||
goconst-check:
|
|
||||||
@hash goconst > /dev/null 2>&1; if [ $$? -ne 0 ]; then \
|
|
||||||
go get -u github.com/jgautheron/goconst/cmd/goconst; \
|
|
||||||
go install $(GOFLAGS) github.com/jgautheron/goconst/cmd/goconst; \
|
|
||||||
fi;
|
|
||||||
for S in $(PACKAGES); do goconst $$S || exit 1; done;
|
|
|
@ -1,8 +1,8 @@
|
||||||
<img src="https://vikunja.io/images/vikunja-logo.svg" alt="" style="display: block;width: 50%;margin: 0 auto;" width="50%"/>
|
<img src="https://vikunja.io/images/vikunja-logo.svg" alt="" style="display: block;width: 50%;margin: 0 auto;" width="50%"/>
|
||||||
|
|
||||||
[![Build Status](https://drone1.kolaente.de/api/badges/vikunja/api/status.svg)](https://drone1.kolaente.de/vikunja/api)
|
[![Build Status](https://drone.kolaente.de/api/badges/vikunja/api/status.svg)](https://drone.kolaente.de/vikunja/api)
|
||||||
[![License: GPL v3](https://img.shields.io/badge/License-GPL%20v3-blue.svg)](LICENSE)
|
[![License: GPL v3](https://img.shields.io/badge/License-GPL%20v3-blue.svg)](LICENSE)
|
||||||
[![Download](https://img.shields.io/badge/download-v0.14.0-brightgreen.svg)](https://dl.vikunja.io)
|
[![Download](https://img.shields.io/badge/download-v0.15.1-brightgreen.svg)](https://dl.vikunja.io)
|
||||||
[![Docker Pulls](https://img.shields.io/docker/pulls/vikunja/api.svg)](https://hub.docker.com/r/vikunja/api/)
|
[![Docker Pulls](https://img.shields.io/docker/pulls/vikunja/api.svg)](https://hub.docker.com/r/vikunja/api/)
|
||||||
[![Swagger Docs](https://img.shields.io/badge/swagger-docs-brightgreen.svg)](https://try.vikunja.io/api/v1/docs)
|
[![Swagger Docs](https://img.shields.io/badge/swagger-docs-brightgreen.svg)](https://try.vikunja.io/api/v1/docs)
|
||||||
[![Go Report Card](https://goreportcard.com/badge/git.kolaente.de/vikunja/api)](https://goreportcard.com/report/git.kolaente.de/vikunja/api)
|
[![Go Report Card](https://goreportcard.com/badge/git.kolaente.de/vikunja/api)](https://goreportcard.com/report/git.kolaente.de/vikunja/api)
|
||||||
|
@ -35,7 +35,7 @@ try it on [try.vikunja.io](https://try.vikunja.io)!
|
||||||
* [Installing](https://vikunja.io/docs/installing/)
|
* [Installing](https://vikunja.io/docs/installing/)
|
||||||
* [Build from source](https://vikunja.io/docs/build-from-sources/)
|
* [Build from source](https://vikunja.io/docs/build-from-sources/)
|
||||||
* [Development setup](https://vikunja.io/docs/development/)
|
* [Development setup](https://vikunja.io/docs/development/)
|
||||||
* [Makefile](https://vikunja.io/docs/makefile/)
|
* [Magefile](https://vikunja.io/docs/mage/)
|
||||||
* [Testing](https://vikunja.io/docs/testing/)
|
* [Testing](https://vikunja.io/docs/testing/)
|
||||||
|
|
||||||
All docs can be found on [the vikunja home page](https://vikunja.io/docs/).
|
All docs can be found on [the vikunja home page](https://vikunja.io/docs/).
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
ln -s /opt/vikunja/vikunja /usr/bin/vikunja
|
|
||||||
|
|
||||||
# Fix the config to contain proper values
|
# Fix the config to contain proper values
|
||||||
NEW_SECRET=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 32 | head -n 1)
|
NEW_SECRET=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 32 | head -n 1)
|
||||||
|
|
|
@ -0,0 +1,15 @@
|
||||||
|
Vikunja is a to-do list application to facilitate your life.
|
||||||
|
Copyright 2018-2020 Vikunja and contributors. All rights reserved.
|
||||||
|
|
||||||
|
This program is free software: you can redistribute it and/or modify
|
||||||
|
it under the terms of the GNU General Public License as published by
|
||||||
|
the Free Software Foundation, either version 3 of the License, or
|
||||||
|
(at your option) any later version.
|
||||||
|
|
||||||
|
This program is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
GNU General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU General Public License
|
||||||
|
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
@ -61,8 +61,10 @@ database:
|
||||||
cache:
|
cache:
|
||||||
# If cache is enabled or not
|
# If cache is enabled or not
|
||||||
enabled: false
|
enabled: false
|
||||||
# Cache type. Possible values are memory or redis, you'll need to enable redis below when using redis
|
# Cache type. Possible values are "keyvalue", "memory" or "redis".
|
||||||
type: memory
|
# When choosing "keyvalue" this setting follows the one configured in the "keyvalue" section.
|
||||||
|
# When choosing "redis" you will need to configure the redis connection seperately.
|
||||||
|
type: keyvalue
|
||||||
# When using memory this defines the maximum size an element can take
|
# When using memory this defines the maximum size an element can take
|
||||||
maxelementsize: 1000
|
maxelementsize: 1000
|
||||||
|
|
||||||
|
@ -106,6 +108,8 @@ mailer:
|
||||||
queuelength: 100
|
queuelength: 100
|
||||||
# The timeout in seconds after which the current open connection to the mailserver will be closed.
|
# The timeout in seconds after which the current open connection to the mailserver will be closed.
|
||||||
queuetimeout: 30
|
queuetimeout: 30
|
||||||
|
# By default, vikunja will try to connect with starttls, use this option to force it to use ssl.
|
||||||
|
forcessl: false
|
||||||
|
|
||||||
log:
|
log:
|
||||||
# A folder where all the logfiles should go.
|
# A folder where all the logfiles should go.
|
||||||
|
@ -134,8 +138,10 @@ ratelimit:
|
||||||
period: 60
|
period: 60
|
||||||
# The max number of requests a user is allowed to do in the configured time period
|
# The max number of requests a user is allowed to do in the configured time period
|
||||||
limit: 100
|
limit: 100
|
||||||
# The store where the limit counter for each user is stored. Possible values are "memory" or "redis"
|
# The store where the limit counter for each user is stored.
|
||||||
store: memory
|
# Possible values are "keyvalue", "memory" or "redis".
|
||||||
|
# When choosing "keyvalue" this setting follows the one configured in the "keyvalue" section.
|
||||||
|
store: keyvalue
|
||||||
|
|
||||||
files:
|
files:
|
||||||
# The path where files are stored
|
# The path where files are stored
|
||||||
|
@ -176,10 +182,6 @@ migration:
|
||||||
redirecturl:
|
redirecturl:
|
||||||
|
|
||||||
avatar:
|
avatar:
|
||||||
# Switch between avatar providers. Possible values are gravatar and default.
|
|
||||||
# gravatar will fetch the avatar based on the user email.
|
|
||||||
# default will return a default avatar for every request.
|
|
||||||
provider: gravatar
|
|
||||||
# When using gravatar, this is the duration in seconds until a cached gravatar user avatar expires
|
# When using gravatar, this is the duration in seconds until a cached gravatar user avatar expires
|
||||||
gravatarexpiration: 3600
|
gravatarexpiration: 3600
|
||||||
|
|
||||||
|
@ -201,3 +203,44 @@ backgrounds:
|
||||||
# It will only show in the UI if your application has been approved for Enterprise usage, therefore if
|
# It will only show in the UI if your application has been approved for Enterprise usage, therefore if
|
||||||
# you’re in Demo mode, you can also find the ID in the URL at the end: https://unsplash.com/oauth/applications/:application_id
|
# you’re in Demo mode, you can also find the ID in the URL at the end: https://unsplash.com/oauth/applications/:application_id
|
||||||
applicationid:
|
applicationid:
|
||||||
|
|
||||||
|
# Legal urls
|
||||||
|
# Will be shown in the frontend if configured here
|
||||||
|
legal:
|
||||||
|
imprinturl:
|
||||||
|
privacyurl:
|
||||||
|
|
||||||
|
# Key Value Storage settings
|
||||||
|
# The Key Value Storage is used for different kinds of things like metrics and a few cache systems.
|
||||||
|
keyvalue:
|
||||||
|
# The type of the storage backend. Can be either "memory" or "redis". If "redis" is chosen it needs to be configured seperately.
|
||||||
|
type: "memory"
|
||||||
|
|
||||||
|
auth:
|
||||||
|
# Local authentication will let users log in and register (if enabled) through the db.
|
||||||
|
# This is the default auth mechanism and does not require any additional configuration.
|
||||||
|
local:
|
||||||
|
# Enable or disable local authentication
|
||||||
|
enabled: true
|
||||||
|
# OpenID configuration will allow users to authenticate through a third-party OpenID Connect compatible provider.<br/>
|
||||||
|
# The provider needs to support the `openid`, `profile` and `email` scopes.<br/>
|
||||||
|
# **Note:** The frontend expects to be redirected after authentication by the third party
|
||||||
|
# to <frontend-url>/auth/openid/<auth key>. Please make sure to configure the redirect url with your third party
|
||||||
|
# auth service accordingy if you're using the default vikunja frontend.
|
||||||
|
# Take a look at the [default config file](https://kolaente.dev/vikunja/api/src/branch/master/config.yml.sample) for more information about how to configure openid authentication.
|
||||||
|
openid:
|
||||||
|
# Enable or disable OpenID Connect authentication
|
||||||
|
enabled: false
|
||||||
|
# The url to redirect clients to. Defaults to the configured frontend url. If you're using Vikunja with the official
|
||||||
|
# frontend, you don't need to change this value.
|
||||||
|
redirecturl: <frontend url>
|
||||||
|
# A list of enabled providers
|
||||||
|
providers:
|
||||||
|
# The name of the provider as it will appear in the frontend.
|
||||||
|
- name:
|
||||||
|
# The auth url to send users to if they want to authenticate using OpenID Connect.
|
||||||
|
authurl:
|
||||||
|
# The client ID used to authenticate Vikunja at the OpenID Connect provider.
|
||||||
|
clientid:
|
||||||
|
# The client secret used to authenticate Vikunja at the OpenID Connect provider.
|
||||||
|
clientsecret:
|
||||||
|
|
|
@ -20,6 +20,11 @@ params:
|
||||||
plausibleDomain: vikunja.io
|
plausibleDomain: vikunja.io
|
||||||
plausibleURL: https://analytics.kolaente.de
|
plausibleURL: https://analytics.kolaente.de
|
||||||
|
|
||||||
|
markup:
|
||||||
|
goldmark:
|
||||||
|
renderer:
|
||||||
|
unsafe: true
|
||||||
|
|
||||||
menu:
|
menu:
|
||||||
page:
|
page:
|
||||||
- name: Home
|
- name: Home
|
||||||
|
|
|
@ -17,9 +17,9 @@ To learn more about the what, why and how, take a look at [the features page](ht
|
||||||
|
|
||||||
## Start
|
## Start
|
||||||
|
|
||||||
A good starting point if you want to install and host Vikunja on your server are [the install documentation](installing)
|
A good starting point if you want to install and host Vikunja on your server are [the install documentation]({{< ref "./setup/install.md">}})
|
||||||
and [available configuration options](config-options).
|
and [available configuration options]({{< ref "./setup/config.md">}}).
|
||||||
|
|
||||||
## Developing
|
## Developing
|
||||||
|
|
||||||
If you want to start contributing to Vikunja, take a look at [the development docs](development).
|
If you want to start contributing to Vikunja, take a look at [the development docs]({{< ref "./development/development.md">}}).
|
|
@ -16,6 +16,8 @@ Additionally, they can also be run directly by using the `migrate` command.
|
||||||
We use [xormigrate](https://github.com/techknowlogick/xormigrate) to handle migrations,
|
We use [xormigrate](https://github.com/techknowlogick/xormigrate) to handle migrations,
|
||||||
which is based on gormigrate.
|
which is based on gormigrate.
|
||||||
|
|
||||||
|
{{< table_of_contents >}}
|
||||||
|
|
||||||
## Add a new migration
|
## Add a new migration
|
||||||
|
|
||||||
All migrations are stored in `pkg/migrations` and files should have the same name as their id.
|
All migrations are stored in `pkg/migrations` and files should have the same name as their id.
|
||||||
|
|
|
@ -17,7 +17,9 @@ If you don't intend to add new dependencies, go `1.9` and above should be fine.
|
||||||
|
|
||||||
To contribute to Vikunja, fork the project and work on the master branch.
|
To contribute to Vikunja, fork the project and work on the master branch.
|
||||||
|
|
||||||
A lot of developing tasks are automated using a Makefile, so make sure to [take a look at it]({{< ref "make.md">}}).
|
A lot of developing tasks are automated using a Magefile, so make sure to [take a look at it]({{< ref "mage.md">}}).
|
||||||
|
|
||||||
|
{{< table_of_contents >}}
|
||||||
|
|
||||||
## Libraries
|
## Libraries
|
||||||
|
|
||||||
|
@ -50,8 +52,8 @@ git remote add origin git@git.kolaente.de:<USERNAME>/api.git
|
||||||
git fetch --all --prune
|
git fetch --all --prune
|
||||||
{{< /highlight >}}
|
{{< /highlight >}}
|
||||||
|
|
||||||
This should provide a working development environment for Vikunja. Take a look at the Makefile to get an overview about
|
This should provide a working development environment for Vikunja. Take a look at the Magefile to get an overview about
|
||||||
the available tasks. The most common tasks should be `make test` which will start our test environment and `make build`
|
the available tasks. The most common tasks should be `mage test:unit` which will start our test environment and `mage build:build`
|
||||||
which will build a vikunja binary into the working directory. Writing test cases is not mandatory to contribute, but it
|
which will build a vikunja binary into the working directory. Writing test cases is not mandatory to contribute, but it
|
||||||
is highly encouraged and helps developers sleep at night.
|
is highly encouraged and helps developers sleep at night.
|
||||||
|
|
||||||
|
@ -62,4 +64,4 @@ That’s it! You are ready to hack on Vikunja. Test changes, push them to the re
|
||||||
Each Vikunja release contains all static assets directly compiled into the binary.
|
Each Vikunja release contains all static assets directly compiled into the binary.
|
||||||
To prevent this during development, use the `dev` tag when developing.
|
To prevent this during development, use the `dev` tag when developing.
|
||||||
|
|
||||||
See the [make docs](make.md#statically-compile-all-templates-into-the-binary) about how to compile with static assets for a release.
|
See the [mage docs](mage.md#statically-compile-all-templates-into-the-binary) about how to compile with static assets for a release.
|
||||||
|
|
|
@ -0,0 +1,192 @@
|
||||||
|
---
|
||||||
|
date: "2019-02-12:00:00+02:00"
|
||||||
|
title: "Magefile"
|
||||||
|
draft: false
|
||||||
|
type: "doc"
|
||||||
|
menu:
|
||||||
|
sidebar:
|
||||||
|
parent: "development"
|
||||||
|
---
|
||||||
|
|
||||||
|
# Mage
|
||||||
|
|
||||||
|
Vikunja uses [Mage](https://magefile.org/) to script common development tasks and even releasing.
|
||||||
|
Mage is a pure go solution which allows for greater flexibility and things like better paralelization.
|
||||||
|
|
||||||
|
This document explains what taks are available and what they do.
|
||||||
|
|
||||||
|
{{< table_of_contents >}}
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
To use mage, you'll need to install the mage cli.
|
||||||
|
To install it, run the following command:
|
||||||
|
|
||||||
|
```
|
||||||
|
go install github.com/magefile/mage
|
||||||
|
```
|
||||||
|
|
||||||
|
## Categories
|
||||||
|
|
||||||
|
There are multiple categories of subcommands in the magefile:
|
||||||
|
|
||||||
|
* `build`: Contains commands to build a single binary
|
||||||
|
* `check`: Contains commands to statically check the source code
|
||||||
|
* `release`: Contains commands to release Vikunja with everything that's required
|
||||||
|
* `test`: Contains commands to run all kinds of tests
|
||||||
|
* `dev`: Contains commands to run development tasks
|
||||||
|
* `misc`: Commands which do not belong in either of the other categories
|
||||||
|
|
||||||
|
## CI
|
||||||
|
|
||||||
|
These tasks are automatically run in our CI every time someone pushes to master or you update a pull request:
|
||||||
|
|
||||||
|
* `mage check:lint`
|
||||||
|
* `mage check:fmt`
|
||||||
|
* `mage check:ineffassign`
|
||||||
|
* `mage check:misspell`
|
||||||
|
* `mage check:goconst`
|
||||||
|
* `mage build:generate`
|
||||||
|
* `mage build:build`
|
||||||
|
|
||||||
|
## Build
|
||||||
|
|
||||||
|
### Build Vikunja
|
||||||
|
|
||||||
|
{{< highlight bash >}}
|
||||||
|
mage build:build
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
Builds a `vikunja`-binary in the root directory of the repo for the platform it is run on.
|
||||||
|
|
||||||
|
### Statically compile all templates into the binary
|
||||||
|
|
||||||
|
{{< highlight bash >}}
|
||||||
|
mage build:generate
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
This generates static code with all templates, meaning no template need to be referenced at runtime.
|
||||||
|
|
||||||
|
### clean
|
||||||
|
|
||||||
|
{{< highlight bash >}}
|
||||||
|
mage build:clean
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
Cleans all build, executable and bindata files
|
||||||
|
|
||||||
|
## Check
|
||||||
|
|
||||||
|
All check sub-commands exit with a status code of 1 if the check fails.
|
||||||
|
|
||||||
|
Various code-checks are available:
|
||||||
|
|
||||||
|
* `mage check:all`: Runs fmt-check, lint, got-swag, misspell-check, ineffasign-check, gocyclo-check, static-check, gosec-check, goconst-check all in parallel
|
||||||
|
* `mage check:fmt`: Checks if the code is properly formatted with go fmt
|
||||||
|
* `mage check:go-sec`: Checks the source code for potential security issues by scanning the Go AST using the [gosec tool](https://github.com/securego/gosec)
|
||||||
|
* `mage check:goconst`: Checks for repeated strings that could be replaced by a constant using [goconst](https://github.com/jgautheron/goconst/)
|
||||||
|
* `mage check:gocyclo`: Checks for the cyclomatic complexity of the source code using [gocyclo](https://github.com/fzipp/gocyclo)
|
||||||
|
* `mage check:got-swag`: Checks if the swagger docs need to be re-generated from the code annotations
|
||||||
|
* `mage check:ineffassign`: Checks the source code for ineffectual assigns using [ineffassign](https://github.com/gordonklaus/ineffassign)
|
||||||
|
* `mage check:lint`: Runs golint on all packages
|
||||||
|
* `mage check:misspell`: Checks the source code for misspellings
|
||||||
|
* `mage check:static`: Statically analyzes the source code about a range of different problems using [staticcheck](https://staticcheck.io/docs/)
|
||||||
|
|
||||||
|
## Release
|
||||||
|
|
||||||
|
### Build Releases
|
||||||
|
|
||||||
|
{{< highlight bash >}}
|
||||||
|
mage release
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
Builds binaries for all platforms and zips them with a copy of the `templates/` folder.
|
||||||
|
All built zip files are stored into `dist/zips/`. Binaries are stored in `dist/binaries/`,
|
||||||
|
binaries bundled with `templates` are stored in `dist/releases/`.
|
||||||
|
|
||||||
|
All cross-platform binaries built using this series of commands are built with the help of
|
||||||
|
[xgo](https://github.com/techknowlogick/xgo). The mage command will automatically install the
|
||||||
|
binary to be able to use it.
|
||||||
|
|
||||||
|
`mage release:release` is a shortcut to execute `mage release:dirs release:windows release:linux release:darwin release:copy release:check release:os-package release:zip`.
|
||||||
|
|
||||||
|
* `mage release:dirs` creates all directories needed
|
||||||
|
* `mage release:windows`/`release:linux`/`release:darwin` execute xgo to build for their respective platforms
|
||||||
|
* `mage release:copy` bundles binaries with a copy of the `LICENSE` and sample config files to then be zipped
|
||||||
|
* `mage release:check` creates sha256 checksums for each binary which will be included in the zip file
|
||||||
|
* `mage release:os-package` bundles a binary with the `sha256` checksum file, a sample `config.yml` and a copy of the license in a folder for each architecture
|
||||||
|
* `mage release:compress` compresses all build binaries with `upx` to save space
|
||||||
|
* `mage release:zip` paclages a zip file for the files created by `release:os-package`
|
||||||
|
|
||||||
|
### Build os packages
|
||||||
|
|
||||||
|
{{< highlight bash >}}
|
||||||
|
mage release:packages
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
Will build `.deb`, `.rpm` and `.apk` packages to `dist/os-packages`.
|
||||||
|
|
||||||
|
### Make a debian repo
|
||||||
|
|
||||||
|
{{< highlight bash >}}
|
||||||
|
mage release:reprepro
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
Takes an already built debian package and creates a debian repo structure around it.
|
||||||
|
|
||||||
|
Used to be run inside a [docker container](https://git.kolaente.de/konrad/reprepro-docker) in the CI process when releasing.
|
||||||
|
|
||||||
|
## Test
|
||||||
|
|
||||||
|
### unit
|
||||||
|
|
||||||
|
{{< highlight bash >}}
|
||||||
|
mage test:unit
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
Runs all tests except integration tests.
|
||||||
|
|
||||||
|
### coverage
|
||||||
|
|
||||||
|
{{< highlight bash >}}
|
||||||
|
mage test:coverage
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
Runs all tests except integration tests and generates a `coverage.html` file to inspect the code coverage.
|
||||||
|
|
||||||
|
### integration
|
||||||
|
|
||||||
|
{{< highlight bash >}}
|
||||||
|
mage test:integration
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
Runs all integration tests.
|
||||||
|
|
||||||
|
## Dev
|
||||||
|
|
||||||
|
### Create a new migration
|
||||||
|
|
||||||
|
{{< highlight bash >}}
|
||||||
|
mage dev:create-migration
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
Creates a new migration with the current date.
|
||||||
|
Will ask for the name of the struct you want to create a migration for.
|
||||||
|
|
||||||
|
## Misc
|
||||||
|
|
||||||
|
### Format the code
|
||||||
|
|
||||||
|
{{< highlight bash >}}
|
||||||
|
mage fmt
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
Formats all source code using `go fmt`.
|
||||||
|
|
||||||
|
### Generate swagger definitions from code comments
|
||||||
|
|
||||||
|
{{< highlight bash >}}
|
||||||
|
mage do-the-swag
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
Generates swagger definitions from the comment annotations in the code.
|
|
@ -1,151 +0,0 @@
|
||||||
---
|
|
||||||
date: "2019-02-12:00:00+02:00"
|
|
||||||
title: "Makefile"
|
|
||||||
draft: false
|
|
||||||
type: "doc"
|
|
||||||
menu:
|
|
||||||
sidebar:
|
|
||||||
parent: "development"
|
|
||||||
---
|
|
||||||
|
|
||||||
# Makefile
|
|
||||||
|
|
||||||
We scripted a lot of tasks used mostly for developing into the makefile. This documents explains what
|
|
||||||
taks are available and what they do.
|
|
||||||
|
|
||||||
## CI
|
|
||||||
|
|
||||||
These tasks are automatically run in our CI every time someone pushes to master or you update a pull request:
|
|
||||||
|
|
||||||
* `make lint`
|
|
||||||
* `make fmt-check`
|
|
||||||
* `make ineffassign-check`
|
|
||||||
* `make misspell-check`
|
|
||||||
* `make goconst-check`
|
|
||||||
* `make generate`
|
|
||||||
* `make build`
|
|
||||||
|
|
||||||
### clean
|
|
||||||
|
|
||||||
{{< highlight bash >}}
|
|
||||||
make clean
|
|
||||||
{{< /highlight >}}
|
|
||||||
|
|
||||||
Clears all builds and binaries.
|
|
||||||
|
|
||||||
### test
|
|
||||||
|
|
||||||
{{< highlight bash >}}
|
|
||||||
make test
|
|
||||||
{{< /highlight >}}
|
|
||||||
|
|
||||||
Runs all tests in Vikunja.
|
|
||||||
|
|
||||||
### Format the code
|
|
||||||
|
|
||||||
{{< highlight bash >}}
|
|
||||||
make fmt
|
|
||||||
{{< /highlight >}}
|
|
||||||
|
|
||||||
Formats all source code using `go fmt`.
|
|
||||||
|
|
||||||
#### Check formatting
|
|
||||||
|
|
||||||
{{< highlight bash >}}
|
|
||||||
make fmt-check
|
|
||||||
{{< /highlight >}}
|
|
||||||
|
|
||||||
Checks if the code needs to be formatted. Fails if it does.
|
|
||||||
|
|
||||||
### Build Vikunja
|
|
||||||
|
|
||||||
{{< highlight bash >}}
|
|
||||||
make build
|
|
||||||
{{< /highlight >}}
|
|
||||||
|
|
||||||
Builds a `vikunja`-binary in the root directory of the repo for the platform it is run on.
|
|
||||||
|
|
||||||
### Statically compile all templates into the binary
|
|
||||||
|
|
||||||
{{< highlight bash >}}
|
|
||||||
make generate
|
|
||||||
{{< /highlight >}}
|
|
||||||
|
|
||||||
This generates static code with all templates, meaning no template need to be referenced at runtime.
|
|
||||||
|
|
||||||
### Compress the built binary
|
|
||||||
|
|
||||||
{{< highlight bash >}}
|
|
||||||
make compress-build
|
|
||||||
{{< /highlight >}}
|
|
||||||
|
|
||||||
Go binaries are very big.
|
|
||||||
To make the vikunja binary smaller, we can compress it using [upx](https://upx.github.io/).
|
|
||||||
|
|
||||||
### Build Releases
|
|
||||||
|
|
||||||
{{< highlight bash >}}
|
|
||||||
make release
|
|
||||||
{{< /highlight >}}
|
|
||||||
|
|
||||||
Builds binaries for all platforms and zips them with a copy of the `templates/` folder.
|
|
||||||
All built zip files are stored into `dist/zips/`. Binaries are stored in `dist/binaries/`,
|
|
||||||
binaries bundled with `templates` are stored in `dist/releases/`.
|
|
||||||
|
|
||||||
All cross-platform binaries built using this series of commands are built with the help of
|
|
||||||
[xgo](https://github.com/techknowlogick/xgo). The make command will automatically install the
|
|
||||||
binary to be able to use it.
|
|
||||||
|
|
||||||
`make release` is actually just a shortcut to execute `make release-dirs release-windows release-linux release-darwin release-copy release-check release-os-package release-zip`.
|
|
||||||
|
|
||||||
* `release-dirs` creates all directories needed
|
|
||||||
* `release-windows`/`release-linux`/`release-darwin` execute xgo to build for their respective platforms
|
|
||||||
* `release-copy` bundles binaries with a copy of `templates/` to then be zipped
|
|
||||||
* `release-check` creates sha256 checksums for each binary which will be included in the zip file
|
|
||||||
* `release-os-package` bundles a binary with a copy of the `templates/` folder, the `sha256` checksum file, a sample `config.yml` and a copy of the license in a folder for each architecture
|
|
||||||
* `release-compress` compresses all build binaries, see `compress-build`
|
|
||||||
* `release-zip` makes a zip file for the files created by `release-os-package`
|
|
||||||
|
|
||||||
### Build debian packages
|
|
||||||
|
|
||||||
{{< highlight bash >}}
|
|
||||||
make build-deb
|
|
||||||
{{< /highlight >}}
|
|
||||||
|
|
||||||
Will build a `.deb` package into the current folder. You need to have [fpm](https://fpm.readthedocs.io/en/latest/intro.html) installed to be able to do this.
|
|
||||||
|
|
||||||
#### Make a debian repo
|
|
||||||
|
|
||||||
{{< highlight bash >}}
|
|
||||||
make reprepro
|
|
||||||
{{< /highlight >}}
|
|
||||||
|
|
||||||
Takes an already built debian package and creates a debian repo structure around it.
|
|
||||||
|
|
||||||
Used to be run inside a [docker container](https://git.kolaente.de/konrad/reprepro-docker) in the CI process when releasing.
|
|
||||||
|
|
||||||
### Generate swagger definitions from code comments
|
|
||||||
|
|
||||||
{{< highlight bash >}}
|
|
||||||
make do-the-swag
|
|
||||||
{{< /highlight >}}
|
|
||||||
|
|
||||||
Generates swagger definitions from the comments in the code.
|
|
||||||
|
|
||||||
#### Check if swagger generation is needed
|
|
||||||
|
|
||||||
{{< highlight bash >}}
|
|
||||||
make got-swag
|
|
||||||
{{< /highlight >}}
|
|
||||||
|
|
||||||
This command is currently more an experiment, use it with caution.
|
|
||||||
It may bring up wrong results.
|
|
||||||
|
|
||||||
### Code-Checks
|
|
||||||
|
|
||||||
* `misspell-check`: Checks for commonly misspelled words
|
|
||||||
* `ineffassign-check`: Checks for ineffectual assignments in the code using [ineffassign](https://github.com/gordonklaus/ineffassign).
|
|
||||||
* `gocyclo-check`: Calculates cyclomatic complexities of functions using [gocyclo](https://github.com/fzipp/gocyclo).
|
|
||||||
* `static-check`: Analyzes the code for bugs, improvements and more using [staticcheck](https://staticcheck.io/docs/).
|
|
||||||
* `gosec-check`: Inspects source code for security problems by scanning the Go AST using the [gosec tool](https://github.com/securego/gosec).
|
|
||||||
* `goconst-check`: Finds repeated strings that could be replaced by a constant using [goconst](https://github.com/jgautheron/goconst/).
|
|
|
@ -16,12 +16,14 @@ To make this easier, we have put together a few helpers which are documented on
|
||||||
In general, each migrator implements a migrator interface which is then called from a client.
|
In general, each migrator implements a migrator interface which is then called from a client.
|
||||||
The interface makes it possible to use helper methods which handle http an focus only on the implementation of the migrator itself.
|
The interface makes it possible to use helper methods which handle http an focus only on the implementation of the migrator itself.
|
||||||
|
|
||||||
### Structure
|
{{< table_of_contents >}}
|
||||||
|
|
||||||
|
## Structure
|
||||||
|
|
||||||
All migrator implementations live in their own package in `pkg/modules/migration/<name-of-the-service>`.
|
All migrator implementations live in their own package in `pkg/modules/migration/<name-of-the-service>`.
|
||||||
When creating a new migrator, you should place all related code inside that module.
|
When creating a new migrator, you should place all related code inside that module.
|
||||||
|
|
||||||
### Migrator interface
|
## Migrator interface
|
||||||
|
|
||||||
The migrator interface is defined as follows:
|
The migrator interface is defined as follows:
|
||||||
|
|
||||||
|
@ -41,7 +43,7 @@ type Migrator interface {
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
### Defining http routes
|
## Defining http routes
|
||||||
|
|
||||||
Once your migrator implements the migration interface, it becomes possible to use the helper http handlers.
|
Once your migrator implements the migration interface, it becomes possible to use the helper http handlers.
|
||||||
Their usage is very similar to the [general web handler](https://kolaente.dev/vikunja/web#user-content-defining-routes-using-the-standard-web-handler):
|
Their usage is very similar to the [general web handler](https://kolaente.dev/vikunja/web#user-content-defining-routes-using-the-standard-web-handler):
|
||||||
|
@ -63,7 +65,7 @@ if config.MigrationWunderlistEnable.GetBool() {
|
||||||
|
|
||||||
You should also document the routes with [swagger annotations]({{< ref "../practical-instructions/swagger-docs.md" >}}).
|
You should also document the routes with [swagger annotations]({{< ref "../practical-instructions/swagger-docs.md" >}}).
|
||||||
|
|
||||||
### Insertion helper method
|
## Insertion helper method
|
||||||
|
|
||||||
There is a method available in the `migration` package which takes a fully nested Vikunja structure and creates it with all relations.
|
There is a method available in the `migration` package which takes a fully nested Vikunja structure and creates it with all relations.
|
||||||
This means you start by adding a namespace, then add lists inside of that namespace, then tasks in the lists and so on.
|
This means you start by adding a namespace, then add lists inside of that namespace, then tasks in the lists and so on.
|
||||||
|
@ -81,7 +83,7 @@ if err != nil {
|
||||||
err = migration.InsertFromStructure(fullVikunjaHierachie, user)
|
err = migration.InsertFromStructure(fullVikunjaHierachie, user)
|
||||||
```
|
```
|
||||||
|
|
||||||
### Configuration
|
## Configuration
|
||||||
|
|
||||||
You should add at least an option to enable or disable the migration.
|
You should add at least an option to enable or disable the migration.
|
||||||
Chances are, you'll need some more options for things like client ID and secret
|
Chances are, you'll need some more options for things like client ID and secret
|
||||||
|
@ -90,7 +92,7 @@ Chances are, you'll need some more options for things like client ID and secret
|
||||||
The easiest way to implement an on/off switch is to check whether your migration service is enabled or not when
|
The easiest way to implement an on/off switch is to check whether your migration service is enabled or not when
|
||||||
registering the routes, and then simply don't registering the routes in the case it is disabled.
|
registering the routes, and then simply don't registering the routes in the case it is disabled.
|
||||||
|
|
||||||
#### Making the migrator public in `/info`
|
### Making the migrator public in `/info`
|
||||||
|
|
||||||
You should make your migrator available in the `/info` endpoint so that frontends can display options to enable them or not.
|
You should make your migrator available in the `/info` endpoint so that frontends can display options to enable them or not.
|
||||||
To do this, add an entry to `pkg/routes/api/v1/info.go`.
|
To do this, add an entry to `pkg/routes/api/v1/info.go`.
|
||||||
|
|
|
@ -45,9 +45,11 @@ In general, this api repo has the following structure:
|
||||||
|
|
||||||
This document will explain what these mean and what you can find where.
|
This document will explain what these mean and what you can find where.
|
||||||
|
|
||||||
|
{{< table_of_contents >}}
|
||||||
|
|
||||||
## Root level
|
## Root level
|
||||||
|
|
||||||
The root directory is where [the config file]({{< ref "../setup/config.md">}}), [Makefile]({{< ref "make.md">}}), license, drone config,
|
The root directory is where [the config file]({{< ref "../setup/config.md">}}), [Magefile]({{< ref "mage.md">}}), license, drone config,
|
||||||
application entry point (`main.go`) and so on are located.
|
application entry point (`main.go`) and so on are located.
|
||||||
|
|
||||||
## docker
|
## docker
|
||||||
|
@ -152,11 +154,11 @@ Every handler function which does not use the standard web handler should live h
|
||||||
|
|
||||||
### static
|
### static
|
||||||
|
|
||||||
All static files generated by `make generate` live here.
|
All static files generated by `mage generate` live here.
|
||||||
|
|
||||||
### swagger
|
### swagger
|
||||||
|
|
||||||
This is where the [generated]({{< ref "make.md#generate-swagger-definitions-from-code-comments">}} [api docs]({{< ref "../usage/api.md">}}) live.
|
This is where the [generated]({{< ref "mage.md#generate-swagger-definitions-from-code-comments">}} [api docs]({{< ref "../usage/api.md">}}) live.
|
||||||
You usually don't need to touch this package.
|
You usually don't need to touch this package.
|
||||||
|
|
||||||
### user
|
### user
|
||||||
|
@ -175,7 +177,7 @@ See their function definitions for instructions on how to use them.
|
||||||
### version
|
### version
|
||||||
|
|
||||||
The single purpouse of this package is to hold the current vikunja version which gets overridden through build flags
|
The single purpouse of this package is to hold the current vikunja version which gets overridden through build flags
|
||||||
each time `make release` or `make build` is run.
|
each time `mage release` or `mage build` is run.
|
||||||
It is a seperate package to avoid import cycles with other packages.
|
It is a seperate package to avoid import cycles with other packages.
|
||||||
|
|
||||||
## REST-Tests
|
## REST-Tests
|
||||||
|
|
|
@ -10,40 +10,42 @@ menu:
|
||||||
|
|
||||||
# Testing
|
# Testing
|
||||||
|
|
||||||
You can run unit tests with [our `Makefile`]({{< ref "make.md">}}) with
|
You can run unit tests with [our `Magefile`]({{< ref "mage.md">}}) with
|
||||||
|
|
||||||
{{< highlight bash >}}
|
{{< highlight bash >}}
|
||||||
make test
|
mage test:unit
|
||||||
{{< /highlight >}}
|
{{< /highlight >}}
|
||||||
|
|
||||||
### Running tests with config
|
{{< table_of_contents >}}
|
||||||
|
|
||||||
|
## Running tests with config
|
||||||
|
|
||||||
You can run tests with all available config variables if you want, enabeling you to run tests for a lot of scenarios.
|
You can run tests with all available config variables if you want, enabeling you to run tests for a lot of scenarios.
|
||||||
|
|
||||||
To use the normal config set the enviroment variable `VIKUNJA_TESTS_USE_CONFIG=1`.
|
To use the normal config set the enviroment variable `VIKUNJA_TESTS_USE_CONFIG=1`.
|
||||||
|
|
||||||
### Show sql queries
|
## Show sql queries
|
||||||
|
|
||||||
When `UNIT_TESTS_VERBOSE=1` is set, all sql queries will be shown when tests are run.
|
When `UNIT_TESTS_VERBOSE=1` is set, all sql queries will be shown when tests are run.
|
||||||
|
|
||||||
### Fixtures
|
## Fixtures
|
||||||
|
|
||||||
All tests are run against a set of db fixtures.
|
All tests are run against a set of db fixtures.
|
||||||
These fixtures are defined in `pkg/models/fixtures` in YAML-Files which represent the database structure.
|
These fixtures are defined in `pkg/models/fixtures` in YAML-Files which represent the database structure.
|
||||||
|
|
||||||
When you add a new test case which requires new database entries to test against, update these files.
|
When you add a new test case which requires new database entries to test against, update these files.
|
||||||
|
|
||||||
# Integration tests
|
## Integration tests
|
||||||
|
|
||||||
All integration tests live in `pkg/integrations`.
|
All integration tests live in `pkg/integrations`.
|
||||||
You can run them by executing `make integration-test`.
|
You can run them by executing `mage test:integration`.
|
||||||
|
|
||||||
The integration tests use the same config and fixtures as the unit tests and therefor have the same options available,
|
The integration tests use the same config and fixtures as the unit tests and therefor have the same options available,
|
||||||
see at the beginning of this document.
|
see at the beginning of this document.
|
||||||
|
|
||||||
To run integration tests, use `make integration-test`.
|
To run integration tests, use `mage test:integration`.
|
||||||
|
|
||||||
# Initializing db fixtures when writing tests
|
## Initializing db fixtures when writing tests
|
||||||
|
|
||||||
All db fixtures for all tests live in the `pkg/db/fixtures/` folder as yaml files.
|
All db fixtures for all tests live in the `pkg/db/fixtures/` folder as yaml files.
|
||||||
Each file has the same name as the table the fixtures are for.
|
Each file has the same name as the table the fixtures are for.
|
||||||
|
|
|
@ -18,7 +18,9 @@ This is used whenever you make a call to the database to get or update data.
|
||||||
|
|
||||||
This xorm instance is set up and initialized every time vikunja is started.
|
This xorm instance is set up and initialized every time vikunja is started.
|
||||||
|
|
||||||
### Adding new database tables
|
{{< table_of_contents >}}
|
||||||
|
|
||||||
|
## Adding new database tables
|
||||||
|
|
||||||
To add a new table to the database, add a an instance of your struct to the `tables` variable in the
|
To add a new table to the database, add a an instance of your struct to the `tables` variable in the
|
||||||
init function in `pkg/models/models.go`. Xorm will sync them automatically.
|
init function in `pkg/models/models.go`. Xorm will sync them automatically.
|
||||||
|
@ -27,7 +29,7 @@ You also need to add a pointer to the `tablesWithPointer` slice to enable cachin
|
||||||
|
|
||||||
To learn more about how to configure your struct to create "good" tables, refer to [the xorm documentaion](http://xorm.io/docs/).
|
To learn more about how to configure your struct to create "good" tables, refer to [the xorm documentaion](http://xorm.io/docs/).
|
||||||
|
|
||||||
### Adding data to test fixtures
|
## Adding data to test fixtures
|
||||||
|
|
||||||
Adding data for test fixtures is done in via `yaml` files insinde of `pkg/models/fixtures`.
|
Adding data for test fixtures is done in via `yaml` files insinde of `pkg/models/fixtures`.
|
||||||
|
|
||||||
|
|
|
@ -12,6 +12,8 @@ menu:
|
||||||
|
|
||||||
This document explains how to use the mailer to send emails and what to do to create a new kind of email to be sent.
|
This document explains how to use the mailer to send emails and what to do to create a new kind of email to be sent.
|
||||||
|
|
||||||
|
{{< table_of_contents >}}
|
||||||
|
|
||||||
## Sending emails
|
## Sending emails
|
||||||
|
|
||||||
**Note:** You should use mail templates whenever possible (see below).
|
**Note:** You should use mail templates whenever possible (see below).
|
||||||
|
@ -30,7 +32,7 @@ type Opts struct {
|
||||||
}
|
}
|
||||||
{{< /highlight >}}
|
{{< /highlight >}}
|
||||||
|
|
||||||
## Sending emails based on a template
|
### Sending emails based on a template
|
||||||
|
|
||||||
For each mail with a template, there are two email templates: One for plaintext emails, one for html emails.
|
For each mail with a template, there are two email templates: One for plaintext emails, one for html emails.
|
||||||
|
|
||||||
|
@ -41,7 +43,7 @@ To send a mail based on a template, use the function `mail.SendMailWithTemplate(
|
||||||
`to` and `subject` are pretty much self-explanatory, `tpl` is the name of the template, without `.html.tmpl` or `.plain.tmpl`.
|
`to` and `subject` are pretty much self-explanatory, `tpl` is the name of the template, without `.html.tmpl` or `.plain.tmpl`.
|
||||||
`data` is a map you can pass additional data to your template.
|
`data` is a map you can pass additional data to your template.
|
||||||
|
|
||||||
#### Sending a mail with a template
|
### Sending a mail with a template
|
||||||
|
|
||||||
A basic html email template would look like this:
|
A basic html email template would look like this:
|
||||||
|
|
||||||
|
|
|
@ -15,6 +15,8 @@ Metrics work by exposing a `/metrics` endpoint which can then be accessed by pro
|
||||||
To keep the load on the database minimal, metrics are stored and updated in redis.
|
To keep the load on the database minimal, metrics are stored and updated in redis.
|
||||||
The `metrics` package provides several functions to create and update metrics.
|
The `metrics` package provides several functions to create and update metrics.
|
||||||
|
|
||||||
|
{{< table_of_contents >}}
|
||||||
|
|
||||||
## New metrics
|
## New metrics
|
||||||
|
|
||||||
First, define a `const` with the metric key in redis. This is done in `pkg/metrics/metrics.go`.
|
First, define a `const` with the metric key in redis. This is done in `pkg/metrics/metrics.go`.
|
||||||
|
@ -41,6 +43,6 @@ Because metrics are stored in redis, you are responsible to increase or decrease
|
||||||
To do this, use `metrics.UpdateCount(value, key)` where `value` is the amount you want to cange it (you can pass
|
To do this, use `metrics.UpdateCount(value, key)` where `value` is the amount you want to cange it (you can pass
|
||||||
negative values to decrease it) and `key` it the redis key used to define the metric.
|
negative values to decrease it) and `key` it the redis key used to define the metric.
|
||||||
|
|
||||||
# Using it
|
## Using it
|
||||||
|
|
||||||
A Prometheus config with a Grafana template is available at [our git repo](https://git.kolaente.de/vikunja/monitoring).
|
A Prometheus config with a Grafana template is available at [our git repo](https://git.kolaente.de/vikunja/monitoring).
|
||||||
|
|
|
@ -12,7 +12,7 @@ menu:
|
||||||
|
|
||||||
The api documentation is generated using [swaggo](https://github.com/swaggo/swag) from comments.
|
The api documentation is generated using [swaggo](https://github.com/swaggo/swag) from comments.
|
||||||
|
|
||||||
### Documenting structs
|
## Documenting structs
|
||||||
|
|
||||||
You should always comment every field which will be exposed as a json in the api.
|
You should always comment every field which will be exposed as a json in the api.
|
||||||
These comments will show up in the documentation, it'll make it easier for developers using the api.
|
These comments will show up in the documentation, it'll make it easier for developers using the api.
|
||||||
|
|
|
@ -13,6 +13,8 @@ menu:
|
||||||
Vikunja does not store any data outside of the database.
|
Vikunja does not store any data outside of the database.
|
||||||
So, all you need to backup are the contents of that database and maybe the config file.
|
So, all you need to backup are the contents of that database and maybe the config file.
|
||||||
|
|
||||||
|
{{< table_of_contents >}}
|
||||||
|
|
||||||
## MySQL
|
## MySQL
|
||||||
|
|
||||||
To create a backup from mysql use the `mysqldump` command:
|
To create a backup from mysql use the `mysqldump` command:
|
||||||
|
|
|
@ -14,16 +14,16 @@ Vikunja being a go application, has no other dependencies than go itself.
|
||||||
All libraries are bundeled inside the repo in the `vendor/` folder, so all it boils down to are these steps:
|
All libraries are bundeled inside the repo in the `vendor/` folder, so all it boils down to are these steps:
|
||||||
|
|
||||||
1. Make sure [Go](https://golang.org/doc/install) is properly installed on your system. You'll need at least Go `1.9`.
|
1. Make sure [Go](https://golang.org/doc/install) is properly installed on your system. You'll need at least Go `1.9`.
|
||||||
2. Make sure [Make](https://www.gnu.org/software/make/) is properly installed on your system.
|
2. Make sure [Mage](https://magefile) is properly installed on your system.
|
||||||
3. Clone the repo with `git clone https://code.vikunja.io/api`
|
3. Clone the repo with `git clone https://code.vikunja.io/api`
|
||||||
3. Run `make build` in the source of this repo. This will build a binary in the root of the repo which will be able to run on your system.
|
3. Run `mage build:build` in the source of this repo. This will build a binary in the root of the repo which will be able to run on your system.
|
||||||
|
|
||||||
*Note:* Static ressources such as email templates are built into the binary.
|
*Note:* Static ressources such as email templates are built into the binary.
|
||||||
For these to work, you may need to run `make generate` before building the vikunja binary.
|
For these to work, you may need to run `mage build:generate` before building the vikunja binary.
|
||||||
When builing entirely with `make`, you dont need to do this, `make generate` will be run automatically when running `make build`.
|
When builing entirely with `mage`, you dont need to do this, `mage build:generate` will be run automatically when running `mage build:build`.
|
||||||
|
|
||||||
# Build for different architectures
|
# Build for different architectures
|
||||||
|
|
||||||
To build for other platforms and architectures than the one you're currently on, simply run `make release` or `make release-{linux|windows|darwin}`.
|
To build for other platforms and architectures than the one you're currently on, simply run `mage release:release` or `mage release:{linux|windows|darwin}`.
|
||||||
|
|
||||||
More options are available, please refer to the [makefile docs]({{< ref "../development/make.md">}}) for more details.
|
More options are available, please refer to the [magefile docs]({{< ref "../development/mage.md">}}) for more details.
|
|
@ -27,7 +27,14 @@ first:
|
||||||
child: true
|
child: true
|
||||||
{{< /highlight >}}
|
{{< /highlight >}}
|
||||||
|
|
||||||
## Config file locations
|
# Formats
|
||||||
|
|
||||||
|
Vikunja supports using `toml`, `yaml`, `hcl`, `ini`, `json`, envfile, env variables and Java Properties files.
|
||||||
|
We reccomend yaml or toml, but you're free to use whatever you want.
|
||||||
|
|
||||||
|
Vikunja provides a default [`config.yml`](https://kolaente.dev/vikunja/api/src/branch/master/config.yml.sample) file which you can use as a starting point.
|
||||||
|
|
||||||
|
# Config file locations
|
||||||
|
|
||||||
Vikunja will search on various places for a config file:
|
Vikunja will search on various places for a config file:
|
||||||
|
|
||||||
|
@ -38,210 +45,545 @@ Vikunja will search on various places for a config file:
|
||||||
|
|
||||||
# Default configuration with explanations
|
# Default configuration with explanations
|
||||||
|
|
||||||
This is the same as the `config.yml.sample` file you'll find in the root of vikunja.
|
The following explains all possible config variables and their defaults.
|
||||||
|
You can find a full example configuration file in [here](https://code.vikunja.io/api/src/branch/master/config.yml.sample).
|
||||||
|
|
||||||
{{< highlight yaml >}}
|
If you don't provide a value in your config file, their default will be used.
|
||||||
service:
|
|
||||||
# This token is used to verify issued JWT tokens.
|
|
||||||
# Default is a random token which will be generated at each startup of vikunja.
|
|
||||||
# (This means all already issued tokens will be invalid once you restart vikunja)
|
|
||||||
JWTSecret: "cei6gaezoosah2bao3ieZohkae5aicah"
|
|
||||||
# The interface on which to run the webserver
|
|
||||||
interface: ":3456"
|
|
||||||
# The URL of the frontend, used to send password reset emails.
|
|
||||||
frontendurl: ""
|
|
||||||
# The base path on the file system where the binary and assets are.
|
|
||||||
# Vikunja will also look in this path for a config file, so you could provide only this variable to point to a folder
|
|
||||||
# with a config file which will then be used.
|
|
||||||
rootpath: <the path of the executable>
|
|
||||||
# The max number of items which can be returned per page
|
|
||||||
maxitemsperpage: 50
|
|
||||||
# If set to true, enables a /metrics endpoint for prometheus to collect metrics about the system
|
|
||||||
# You'll need to use redis for this in order to enable common metrics over multiple nodes
|
|
||||||
enablemetrics: false
|
|
||||||
# Enable the caldav endpoint, see the docs for more details
|
|
||||||
enablecaldav: true
|
|
||||||
# Set the motd message, available from the /info endpoint
|
|
||||||
motd: ""
|
|
||||||
# Enable sharing of lists via a link
|
|
||||||
enablelinksharing: true
|
|
||||||
# Whether to let new users registering themselves or not
|
|
||||||
enableregistration: true
|
|
||||||
# Whether to enable task attachments or not
|
|
||||||
enabletaskattachments: true
|
|
||||||
# The time zone all timestamps are in
|
|
||||||
timezone: GMT
|
|
||||||
# Whether task comments should be enabled or not
|
|
||||||
enabletaskcomments: true
|
|
||||||
# Whether totp is enabled. In most cases you want to leave that enabled.
|
|
||||||
enabletotp: true
|
|
||||||
# If not empty, enables logging of crashes and unhandled errors in sentry.
|
|
||||||
sentrydsn: ''
|
|
||||||
|
|
||||||
database:
|
## Nesting
|
||||||
# Database type to use. Supported types are mysql, postgres and sqlite.
|
|
||||||
type: "sqlite"
|
|
||||||
# Database user which is used to connect to the database.
|
|
||||||
user: "vikunja"
|
|
||||||
# Databse password
|
|
||||||
password: ""
|
|
||||||
# Databse host
|
|
||||||
host: "localhost"
|
|
||||||
# Databse to use
|
|
||||||
database: "vikunja"
|
|
||||||
# When using sqlite, this is the path where to store the data
|
|
||||||
path: "./vikunja.db"
|
|
||||||
# Sets the max open connections to the database. Only used when using mysql and postgres.
|
|
||||||
maxopenconnections: 100
|
|
||||||
# Sets the maximum number of idle connections to the db.
|
|
||||||
maxidleconnections: 50
|
|
||||||
# The maximum lifetime of a single db connection in miliseconds.
|
|
||||||
maxconnectionlifetime: 10000
|
|
||||||
# Secure connection mode. Only used with postgres.
|
|
||||||
# (see https://pkg.go.dev/github.com/lib/pq?tab=doc#hdr-Connection_String_Parameters)
|
|
||||||
sslmode: disable
|
|
||||||
|
|
||||||
cache:
|
Most config variables are nested under some "higher-level" key.
|
||||||
# If cache is enabled or not
|
For example, the `interface` config variable is a child of the `service` key.
|
||||||
enabled: false
|
|
||||||
# Cache type. Possible values are memory or redis, you'll need to enable redis below when using redis
|
|
||||||
type: memory
|
|
||||||
# When using memory this defines the maximum size an element can take
|
|
||||||
maxelementsize: 1000
|
|
||||||
|
|
||||||
redis:
|
The docs below aim to reflect that leveling, but please also have a lookt at [the default config](https://code.vikunja.io/api/src/branch/master/config.yml.sample) file
|
||||||
# Whether to enable redis or not
|
to better grasp how the nesting looks like.
|
||||||
enabled: false
|
|
||||||
# The host of the redis server including its port.
|
|
||||||
host: 'localhost:6379'
|
|
||||||
# The password used to authenicate against the redis server
|
|
||||||
password: ''
|
|
||||||
# 0 means default database
|
|
||||||
db: 0
|
|
||||||
|
|
||||||
cors:
|
<!-- Generated config will be injected here -->
|
||||||
# Whether to enable or disable cors headers.
|
|
||||||
# Note: If you want to put the frontend and the api on seperate domains or ports, you will need to enable this.
|
|
||||||
# Otherwise the frontend won't be able to make requests to the api through the browser.
|
|
||||||
enable: true
|
|
||||||
# A list of origins which may access the api.
|
|
||||||
origins:
|
|
||||||
- *
|
|
||||||
# How long (in seconds) the results of a preflight request can be cached.
|
|
||||||
maxage: 0
|
|
||||||
|
|
||||||
mailer:
|
---
|
||||||
# Whether to enable the mailer or not. If it is disabled, all users are enabled right away and password reset is not possible.
|
|
||||||
enabled: false
|
|
||||||
# SMTP Host
|
|
||||||
host: ""
|
|
||||||
# SMTP Host port
|
|
||||||
port: 587
|
|
||||||
# SMTP username
|
|
||||||
username: "user"
|
|
||||||
# SMTP password
|
|
||||||
password: ""
|
|
||||||
# Wether to skip verification of the tls certificate on the server
|
|
||||||
skiptlsverify: false
|
|
||||||
# The default from address when sending emails
|
|
||||||
fromemail: "mail@vikunja"
|
|
||||||
# The length of the mail queue.
|
|
||||||
queuelength: 100
|
|
||||||
# The timeout in seconds after which the current open connection to the mailserver will be closed.
|
|
||||||
queuetimeout: 30
|
|
||||||
|
|
||||||
log:
|
## service
|
||||||
# A folder where all the logfiles should go.
|
|
||||||
path: <rootpath>logs
|
|
||||||
# Whether to show any logging at all or none
|
|
||||||
enabled: true
|
|
||||||
# Where the normal log should go. Possible values are stdout, stderr, file or off to disable standard logging.
|
|
||||||
standard: "stdout"
|
|
||||||
# Change the log level. Possible values (case-insensitive) are CRITICAL, ERROR, WARNING, NOTICE, INFO, DEBUG.
|
|
||||||
level: "INFO"
|
|
||||||
# Whether or not to log database queries. Useful for debugging. Possible values are stdout, stderr, file or off to disable database logging.
|
|
||||||
database: "off"
|
|
||||||
# The log level for database log messages. Possible values (case-insensitive) are CRITICAL, ERROR, WARNING, NOTICE, INFO, DEBUG.
|
|
||||||
databaselevel: "WARNING"
|
|
||||||
# Whether to log http requests or not. Possible values are stdout, stderr, file or off to disable http logging.
|
|
||||||
http: "stdout"
|
|
||||||
# Echo has its own logging which usually is unnessecary, which is why it is disabled by default. Possible values are stdout, stderr, file or off to disable standard logging.
|
|
||||||
echo: "off"
|
|
||||||
|
|
||||||
ratelimit:
|
|
||||||
# whether or not to enable the rate limit
|
|
||||||
enabled: false
|
|
||||||
# The kind on which rates are based. Can be either "user" for a rate limit per user or "ip" for an ip-based rate limit.
|
|
||||||
kind: user
|
|
||||||
# The time period in seconds for the limit
|
|
||||||
period: 60
|
|
||||||
# The max number of requests a user is allowed to do in the configured time period
|
|
||||||
limit: 100
|
|
||||||
# The store where the limit counter for each user is stored. Possible values are "memory" or "redis"
|
|
||||||
store: memory
|
|
||||||
|
|
||||||
files:
|
|
||||||
# The path where files are stored
|
|
||||||
basepath: ./files # relative to the binary
|
|
||||||
# The maximum size of a file, as a human-readable string.
|
|
||||||
# Warning: The max size is limited 2^64-1 bytes due to the underlying datatype
|
|
||||||
maxsize: 20MB
|
|
||||||
|
|
||||||
migration:
|
|
||||||
# These are the settings for the wunderlist migrator
|
|
||||||
wunderlist:
|
|
||||||
# Wheter to enable the wunderlist migrator or not
|
|
||||||
enable: false
|
|
||||||
# The client id, required for making requests to the wunderlist api
|
|
||||||
# You need to register your vikunja instance at https://developer.wunderlist.com/apps/new to get this
|
|
||||||
clientid:
|
|
||||||
# The client secret, also required for making requests to the wunderlist api
|
|
||||||
clientsecret:
|
|
||||||
# The url where clients are redirected after they authorized Vikunja to access their wunderlist stuff.
|
|
||||||
# This needs to match the url you entered when registering your Vikunja instance at wunderlist.
|
|
||||||
# This is usually the frontend url where the frontend then makes a request to /migration/wunderlist/migrate
|
|
||||||
# with the code obtained from the wunderlist api.
|
|
||||||
# Note that the vikunja frontend expects this to be /migrate/wunderlist
|
|
||||||
redirecturl:
|
|
||||||
todoist:
|
|
||||||
# Wheter to enable the todoist migrator or not
|
|
||||||
enable: false
|
|
||||||
# The client id, required for making requests to the wunderlist api
|
|
||||||
# You need to register your vikunja instance at https://developer.todoist.com/appconsole.html to get this
|
|
||||||
clientid:
|
|
||||||
# The client secret, also required for making requests to the todoist api
|
|
||||||
clientsecret:
|
|
||||||
# The url where clients are redirected after they authorized Vikunja to access their todoist items.
|
|
||||||
# This needs to match the url you entered when registering your Vikunja instance at todoist.
|
|
||||||
# This is usually the frontend url where the frontend then makes a request to /migration/todoist/migrate
|
|
||||||
# with the code obtained from the todoist api.
|
|
||||||
# Note that the vikunja frontend expects this to be /migrate/todoist
|
|
||||||
redirecturl:
|
|
||||||
|
|
||||||
avatar:
|
### JWTSecret
|
||||||
# Switch between avatar providers. Possible values are gravatar and default.
|
|
||||||
# gravatar will fetch the avatar based on the user email.
|
This token is used to verify issued JWT tokens.
|
||||||
# default will return a default avatar for every request.
|
Default is a random token which will be generated at each startup of vikunja.
|
||||||
provider: gravatar
|
(This means all already issued tokens will be invalid once you restart vikunja)
|
||||||
# When using gravatar, this is the duration in seconds until a cached gravatar user avatar expires
|
|
||||||
gravatarexpiration: 3600
|
Default: `<jwt-secret>`
|
||||||
|
|
||||||
|
### interface
|
||||||
|
|
||||||
|
The interface on which to run the webserver
|
||||||
|
|
||||||
|
Default: `:3456`
|
||||||
|
|
||||||
|
### frontendurl
|
||||||
|
|
||||||
|
The URL of the frontend, used to send password reset emails.
|
||||||
|
|
||||||
|
Default: `<empty>`
|
||||||
|
|
||||||
|
### rootpath
|
||||||
|
|
||||||
|
The base path on the file system where the binary and assets are.
|
||||||
|
Vikunja will also look in this path for a config file, so you could provide only this variable to point to a folder
|
||||||
|
with a config file which will then be used.
|
||||||
|
|
||||||
|
Default: `<rootpath>`
|
||||||
|
|
||||||
|
### maxitemsperpage
|
||||||
|
|
||||||
|
The max number of items which can be returned per page
|
||||||
|
|
||||||
|
Default: `50`
|
||||||
|
|
||||||
|
### enablemetrics
|
||||||
|
|
||||||
|
If set to true, enables a /metrics endpoint for prometheus to collect metrics about the system
|
||||||
|
You'll need to use redis for this in order to enable common metrics over multiple nodes
|
||||||
|
|
||||||
|
Default: `false`
|
||||||
|
|
||||||
|
### enablecaldav
|
||||||
|
|
||||||
|
Enable the caldav endpoint, see the docs for more details
|
||||||
|
|
||||||
|
Default: `true`
|
||||||
|
|
||||||
|
### motd
|
||||||
|
|
||||||
|
Set the motd message, available from the /info endpoint
|
||||||
|
|
||||||
|
Default: `<empty>`
|
||||||
|
|
||||||
|
### enablelinksharing
|
||||||
|
|
||||||
|
Enable sharing of lists via a link
|
||||||
|
|
||||||
|
Default: `true`
|
||||||
|
|
||||||
|
### enableregistration
|
||||||
|
|
||||||
|
Whether to let new users registering themselves or not
|
||||||
|
|
||||||
|
Default: `true`
|
||||||
|
|
||||||
|
### enabletaskattachments
|
||||||
|
|
||||||
|
Whether to enable task attachments or not
|
||||||
|
|
||||||
|
Default: `true`
|
||||||
|
|
||||||
|
### timezone
|
||||||
|
|
||||||
|
The time zone all timestamps are in
|
||||||
|
|
||||||
|
Default: `GMT`
|
||||||
|
|
||||||
|
### enabletaskcomments
|
||||||
|
|
||||||
|
Whether task comments should be enabled or not
|
||||||
|
|
||||||
|
Default: `true`
|
||||||
|
|
||||||
|
### enabletotp
|
||||||
|
|
||||||
|
Whether totp is enabled. In most cases you want to leave that enabled.
|
||||||
|
|
||||||
|
Default: `true`
|
||||||
|
|
||||||
|
### sentrydsn
|
||||||
|
|
||||||
|
If not empty, enables logging of crashes and unhandled errors in sentry.
|
||||||
|
|
||||||
|
Default: `<empty>`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## database
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### type
|
||||||
|
|
||||||
|
Database type to use. Supported types are mysql, postgres and sqlite.
|
||||||
|
|
||||||
|
Default: `sqlite`
|
||||||
|
|
||||||
|
### user
|
||||||
|
|
||||||
|
Database user which is used to connect to the database.
|
||||||
|
|
||||||
|
Default: `vikunja`
|
||||||
|
|
||||||
|
### password
|
||||||
|
|
||||||
|
Databse password
|
||||||
|
|
||||||
|
Default: `<empty>`
|
||||||
|
|
||||||
|
### host
|
||||||
|
|
||||||
|
Databse host
|
||||||
|
|
||||||
|
Default: `localhost`
|
||||||
|
|
||||||
|
### database
|
||||||
|
|
||||||
|
Databse to use
|
||||||
|
|
||||||
|
Default: `vikunja`
|
||||||
|
|
||||||
|
### path
|
||||||
|
|
||||||
|
When using sqlite, this is the path where to store the data
|
||||||
|
|
||||||
|
Default: `./vikunja.db`
|
||||||
|
|
||||||
|
### maxopenconnections
|
||||||
|
|
||||||
|
Sets the max open connections to the database. Only used when using mysql and postgres.
|
||||||
|
|
||||||
|
Default: `100`
|
||||||
|
|
||||||
|
### maxidleconnections
|
||||||
|
|
||||||
|
Sets the maximum number of idle connections to the db.
|
||||||
|
|
||||||
|
Default: `50`
|
||||||
|
|
||||||
|
### maxconnectionlifetime
|
||||||
|
|
||||||
|
The maximum lifetime of a single db connection in miliseconds.
|
||||||
|
|
||||||
|
Default: `10000`
|
||||||
|
|
||||||
|
### sslmode
|
||||||
|
|
||||||
|
Secure connection mode. Only used with postgres.
|
||||||
|
(see https://pkg.go.dev/github.com/lib/pq?tab=doc#hdr-Connection_String_Parameters)
|
||||||
|
|
||||||
|
Default: `disable`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## cache
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### enabled
|
||||||
|
|
||||||
|
If cache is enabled or not
|
||||||
|
|
||||||
|
Default: `false`
|
||||||
|
|
||||||
|
### type
|
||||||
|
|
||||||
|
Cache type. Possible values are "keyvalue", "memory" or "redis".
|
||||||
|
When choosing "keyvalue" this setting follows the one configured in the "keyvalue" section.
|
||||||
|
When choosing "redis" you will need to configure the redis connection seperately.
|
||||||
|
|
||||||
|
Default: `keyvalue`
|
||||||
|
|
||||||
|
### maxelementsize
|
||||||
|
|
||||||
|
When using memory this defines the maximum size an element can take
|
||||||
|
|
||||||
|
Default: `1000`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## redis
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### enabled
|
||||||
|
|
||||||
|
Whether to enable redis or not
|
||||||
|
|
||||||
|
Default: `false`
|
||||||
|
|
||||||
|
### host
|
||||||
|
|
||||||
|
The host of the redis server including its port.
|
||||||
|
|
||||||
|
Default: `localhost:6379`
|
||||||
|
|
||||||
|
### password
|
||||||
|
|
||||||
|
The password used to authenicate against the redis server
|
||||||
|
|
||||||
|
Default: `<empty>`
|
||||||
|
|
||||||
|
### db
|
||||||
|
|
||||||
|
0 means default database
|
||||||
|
|
||||||
|
Default: `0`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## cors
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### enable
|
||||||
|
|
||||||
|
Whether to enable or disable cors headers.
|
||||||
|
Note: If you want to put the frontend and the api on seperate domains or ports, you will need to enable this.
|
||||||
|
Otherwise the frontend won't be able to make requests to the api through the browser.
|
||||||
|
|
||||||
|
Default: `true`
|
||||||
|
|
||||||
|
### origins
|
||||||
|
|
||||||
|
A list of origins which may access the api.
|
||||||
|
|
||||||
|
Default: `<empty>`
|
||||||
|
|
||||||
|
### maxage
|
||||||
|
|
||||||
|
How long (in seconds) the results of a preflight request can be cached.
|
||||||
|
|
||||||
|
Default: `0`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## mailer
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### enabled
|
||||||
|
|
||||||
|
Whether to enable the mailer or not. If it is disabled, all users are enabled right away and password reset is not possible.
|
||||||
|
|
||||||
|
Default: `false`
|
||||||
|
|
||||||
|
### host
|
||||||
|
|
||||||
|
SMTP Host
|
||||||
|
|
||||||
|
Default: `<empty>`
|
||||||
|
|
||||||
|
### port
|
||||||
|
|
||||||
|
SMTP Host port
|
||||||
|
|
||||||
|
Default: `587`
|
||||||
|
|
||||||
|
### username
|
||||||
|
|
||||||
|
SMTP username
|
||||||
|
|
||||||
|
Default: `user`
|
||||||
|
|
||||||
|
### password
|
||||||
|
|
||||||
|
SMTP password
|
||||||
|
|
||||||
|
Default: `<empty>`
|
||||||
|
|
||||||
|
### skiptlsverify
|
||||||
|
|
||||||
|
Wether to skip verification of the tls certificate on the server
|
||||||
|
|
||||||
|
Default: `false`
|
||||||
|
|
||||||
|
### fromemail
|
||||||
|
|
||||||
|
The default from address when sending emails
|
||||||
|
|
||||||
|
Default: `mail@vikunja`
|
||||||
|
|
||||||
|
### queuelength
|
||||||
|
|
||||||
|
The length of the mail queue.
|
||||||
|
|
||||||
|
Default: `100`
|
||||||
|
|
||||||
|
### queuetimeout
|
||||||
|
|
||||||
|
The timeout in seconds after which the current open connection to the mailserver will be closed.
|
||||||
|
|
||||||
|
Default: `30`
|
||||||
|
|
||||||
|
### forcessl
|
||||||
|
|
||||||
|
By default, vikunja will try to connect with starttls, use this option to force it to use ssl.
|
||||||
|
|
||||||
|
Default: `false`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## log
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### path
|
||||||
|
|
||||||
|
A folder where all the logfiles should go.
|
||||||
|
|
||||||
|
Default: `<rootpath>logs`
|
||||||
|
|
||||||
|
### enabled
|
||||||
|
|
||||||
|
Whether to show any logging at all or none
|
||||||
|
|
||||||
|
Default: `true`
|
||||||
|
|
||||||
|
### standard
|
||||||
|
|
||||||
|
Where the normal log should go. Possible values are stdout, stderr, file or off to disable standard logging.
|
||||||
|
|
||||||
|
Default: `stdout`
|
||||||
|
|
||||||
|
### level
|
||||||
|
|
||||||
|
Change the log level. Possible values (case-insensitive) are CRITICAL, ERROR, WARNING, NOTICE, INFO, DEBUG.
|
||||||
|
|
||||||
|
Default: `INFO`
|
||||||
|
|
||||||
|
### database
|
||||||
|
|
||||||
|
Whether or not to log database queries. Useful for debugging. Possible values are stdout, stderr, file or off to disable database logging.
|
||||||
|
|
||||||
|
Default: `off`
|
||||||
|
|
||||||
|
### databaselevel
|
||||||
|
|
||||||
|
The log level for database log messages. Possible values (case-insensitive) are CRITICAL, ERROR, WARNING, NOTICE, INFO, DEBUG.
|
||||||
|
|
||||||
|
Default: `WARNING`
|
||||||
|
|
||||||
|
### http
|
||||||
|
|
||||||
|
Whether to log http requests or not. Possible values are stdout, stderr, file or off to disable http logging.
|
||||||
|
|
||||||
|
Default: `stdout`
|
||||||
|
|
||||||
|
### echo
|
||||||
|
|
||||||
|
Echo has its own logging which usually is unnessecary, which is why it is disabled by default. Possible values are stdout, stderr, file or off to disable standard logging.
|
||||||
|
|
||||||
|
Default: `off`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## ratelimit
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### enabled
|
||||||
|
|
||||||
|
whether or not to enable the rate limit
|
||||||
|
|
||||||
|
Default: `false`
|
||||||
|
|
||||||
|
### kind
|
||||||
|
|
||||||
|
The kind on which rates are based. Can be either "user" for a rate limit per user or "ip" for an ip-based rate limit.
|
||||||
|
|
||||||
|
Default: `user`
|
||||||
|
|
||||||
|
### period
|
||||||
|
|
||||||
|
The time period in seconds for the limit
|
||||||
|
|
||||||
|
Default: `60`
|
||||||
|
|
||||||
|
### limit
|
||||||
|
|
||||||
|
The max number of requests a user is allowed to do in the configured time period
|
||||||
|
|
||||||
|
Default: `100`
|
||||||
|
|
||||||
|
### store
|
||||||
|
|
||||||
|
The store where the limit counter for each user is stored.
|
||||||
|
Possible values are "keyvalue", "memory" or "redis".
|
||||||
|
When choosing "keyvalue" this setting follows the one configured in the "keyvalue" section.
|
||||||
|
|
||||||
|
Default: `keyvalue`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## files
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### basepath
|
||||||
|
|
||||||
|
The path where files are stored
|
||||||
|
|
||||||
|
Default: `./files`
|
||||||
|
|
||||||
|
### maxsize
|
||||||
|
|
||||||
|
The maximum size of a file, as a human-readable string.
|
||||||
|
Warning: The max size is limited 2^64-1 bytes due to the underlying datatype
|
||||||
|
|
||||||
|
Default: `20MB`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## migration
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### wunderlist
|
||||||
|
|
||||||
|
These are the settings for the wunderlist migrator
|
||||||
|
|
||||||
|
Default: `<empty>`
|
||||||
|
|
||||||
|
### todoist
|
||||||
|
|
||||||
|
Default: `<empty>`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## avatar
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### gravatarexpiration
|
||||||
|
|
||||||
|
When using gravatar, this is the duration in seconds until a cached gravatar user avatar expires
|
||||||
|
|
||||||
|
Default: `3600`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## backgrounds
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### enabled
|
||||||
|
|
||||||
|
Whether to enable backgrounds for lists at all.
|
||||||
|
|
||||||
|
Default: `true`
|
||||||
|
|
||||||
|
### providers
|
||||||
|
|
||||||
|
Default: `<empty>`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## legal
|
||||||
|
|
||||||
|
Legal urls
|
||||||
|
Will be shown in the frontend if configured here
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### imprinturl
|
||||||
|
|
||||||
|
Default: `<empty>`
|
||||||
|
|
||||||
|
### privacyurl
|
||||||
|
|
||||||
|
Default: `<empty>`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## keyvalue
|
||||||
|
|
||||||
|
Key Value Storage settings
|
||||||
|
The Key Value Storage is used for different kinds of things like metrics and a few cache systems.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### type
|
||||||
|
|
||||||
|
The type of the storage backend. Can be either "memory" or "redis". If "redis" is chosen it needs to be configured seperately.
|
||||||
|
|
||||||
|
Default: `memory`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## auth
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### local
|
||||||
|
|
||||||
|
Local authentication will let users log in and register (if enabled) through the db.
|
||||||
|
This is the default auth mechanism and does not require any additional configuration.
|
||||||
|
|
||||||
|
Default: `<empty>`
|
||||||
|
|
||||||
|
### openid
|
||||||
|
|
||||||
|
OpenID configuration will allow users to authenticate through a third-party OpenID Connect compatible provider.<br/>
|
||||||
|
The provider needs to support the `openid`, `profile` and `email` scopes.<br/>
|
||||||
|
**Note:** The frontend expects to be redirected after authentication by the third party
|
||||||
|
to <frontend-url>/auth/openid/<auth key>. Please make sure to configure the redirect url with your third party
|
||||||
|
auth service accordingy if you're using the default vikunja frontend.
|
||||||
|
Take a look at the [default config file](https://kolaente.dev/vikunja/api/src/branch/master/config.yml.sample) for more information about how to configure openid authentication.
|
||||||
|
|
||||||
|
Default: `<empty>`
|
||||||
|
|
||||||
backgrounds:
|
|
||||||
# Whether to enable backgrounds for lists at all.
|
|
||||||
enabled: true
|
|
||||||
providers:
|
|
||||||
upload:
|
|
||||||
# Whethere to enable uploaded list backgrounds
|
|
||||||
enabled: true
|
|
||||||
unsplash:
|
|
||||||
# Whether to enable setting backgrounds from unsplash as list backgrounds
|
|
||||||
enabled: false
|
|
||||||
# You need to create an application for your installation at https://unsplash.com/oauth/applications/new
|
|
||||||
# and set the access token below.
|
|
||||||
accesstoken:
|
|
||||||
# The unsplash application id is only used for pingback and required as per their api guidelines.
|
|
||||||
# You can find the Application ID in the dashboard for your API application. It should be a numeric ID.
|
|
||||||
# It will only show in the UI if your application has been approved for Enterprise usage, therefore if
|
|
||||||
# you’re in Demo mode, you can also find the ID in the URL at the end: https://unsplash.com/oauth/applications/:application_id
|
|
||||||
applicationid:
|
|
||||||
{{< /highlight >}}
|
|
||||||
|
|
|
@ -17,6 +17,8 @@ We'll use [docker compose](https://docs.docker.com/compose/) to make handling th
|
||||||
|
|
||||||
> If you have any issues setting up vikunja, please don't hesitate to reach out to us via [matrix](https://riot.im/app/#/room/!dCRiCiLaCCFVNlDnYs:matrix.org?via=matrix.org), the [community forum](https://community.vikunja.io/) or even [email](mailto:hello@vikunja.io).
|
> If you have any issues setting up vikunja, please don't hesitate to reach out to us via [matrix](https://riot.im/app/#/room/!dCRiCiLaCCFVNlDnYs:matrix.org?via=matrix.org), the [community forum](https://community.vikunja.io/) or even [email](mailto:hello@vikunja.io).
|
||||||
|
|
||||||
|
{{< table_of_contents >}}
|
||||||
|
|
||||||
## Preparations (optional)
|
## Preparations (optional)
|
||||||
|
|
||||||
Create a directory for the project where all data and the compose file will live in.
|
Create a directory for the project where all data and the compose file will live in.
|
||||||
|
@ -31,6 +33,7 @@ version: '3'
|
||||||
services:
|
services:
|
||||||
db:
|
db:
|
||||||
image: mariadb:10
|
image: mariadb:10
|
||||||
|
command: --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci
|
||||||
environment:
|
environment:
|
||||||
MYSQL_ROOT_PASSWORD: supersecret
|
MYSQL_ROOT_PASSWORD: supersecret
|
||||||
MYSQL_DATABASE: vikunja
|
MYSQL_DATABASE: vikunja
|
||||||
|
@ -87,15 +90,22 @@ server {
|
||||||
proxy_pass http://frontend:80;
|
proxy_pass http://frontend:80;
|
||||||
}
|
}
|
||||||
|
|
||||||
location /api/ {
|
location ~* ^/(api|dav|\.well-known)/ {
|
||||||
proxy_pass http://api:3456;
|
proxy_pass http://api:3456;
|
||||||
|
client_max_body_size 20M;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
{{< /highlight >}}
|
{{< /highlight >}}
|
||||||
|
|
||||||
This is a simple proxy configuration which will forward all requests to `/api/` to the api container and everything else to the frontend.
|
This is a simple proxy configuration which will forward all requests to `/api/` to the api container and everything else to the frontend.
|
||||||
|
|
||||||
**Note:** Even if you want to make your installation available under a different port, you don't need to change anything in this configuration.
|
<div class="notification is-info">
|
||||||
|
<b>NOTE:</b> Even if you want to make your installation available under a different port, you don't need to change anything in this configuration.
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="notification is-warning">
|
||||||
|
<b>NOTE:</b> If you change the max upload size in Vikunja's settings, you'll need to also change the <code>client_max_body_size</code> in the nginx proxy config.
|
||||||
|
</div>
|
||||||
|
|
||||||
## Run it
|
## Run it
|
||||||
|
|
||||||
|
|
|
@ -15,7 +15,15 @@ It uses an nginx container or traefik on the host to proxy backend and frontend
|
||||||
|
|
||||||
For all available configuration options, see [configuration]({{< ref "config.md">}}).
|
For all available configuration options, see [configuration]({{< ref "config.md">}}).
|
||||||
|
|
||||||
### Redis
|
<div class="notification is-warning">
|
||||||
|
<b>NOTE:</b> If you intend to run Vikunja with mysql and/or to use non-latin characters
|
||||||
|
<a href="{{< ref "utf-8.md">}}">make sure your db is utf-8 compatible</a>.<br/>
|
||||||
|
All examples on this page already reflect this and do not require additional work.
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{{< table_of_contents >}}
|
||||||
|
|
||||||
|
## Redis
|
||||||
|
|
||||||
To use redis, you'll need to add this to the config examples below:
|
To use redis, you'll need to add this to the config examples below:
|
||||||
|
|
||||||
|
@ -68,7 +76,7 @@ services:
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
labels:
|
labels:
|
||||||
- "traefik.enable=true"
|
- "traefik.enable=true"
|
||||||
- "traefik.http.routers.vikunja-api.rule=Host(`vikunja.example.com`) && PathPrefix(`/api/v1`)"
|
- "traefik.http.routers.vikunja-api.rule=Host(`vikunja.example.com`) && PathPrefix(`/api/v1`, `/dav/`, `/.well-known/`)"
|
||||||
- "traefik.http.routers.vikunja-api.entrypoints=https"
|
- "traefik.http.routers.vikunja-api.entrypoints=https"
|
||||||
- "traefik.http.routers.vikunja-api.tls.certResolver=acme"
|
- "traefik.http.routers.vikunja-api.tls.certResolver=acme"
|
||||||
frontend:
|
frontend:
|
||||||
|
@ -84,6 +92,7 @@ services:
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
db:
|
db:
|
||||||
image: mariadb:10
|
image: mariadb:10
|
||||||
|
command: --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci
|
||||||
environment:
|
environment:
|
||||||
MYSQL_ROOT_PASSWORD: supersupersecret
|
MYSQL_ROOT_PASSWORD: supersupersecret
|
||||||
MYSQL_USER: vikunja
|
MYSQL_USER: vikunja
|
||||||
|
@ -126,7 +135,7 @@ services:
|
||||||
labels:
|
labels:
|
||||||
- "traefik.docker.network=web"
|
- "traefik.docker.network=web"
|
||||||
- "traefik.enable=true"
|
- "traefik.enable=true"
|
||||||
- "traefik.frontend.rule=Host:vikunja.example.com;PathPrefix:/api/v1"
|
- "traefik.frontend.rule=Host:vikunja.example.com;PathPrefix:/api/v1,/dav/,/.well-known"
|
||||||
- "traefik.port=3456"
|
- "traefik.port=3456"
|
||||||
- "traefik.protocol=http"
|
- "traefik.protocol=http"
|
||||||
frontend:
|
frontend:
|
||||||
|
@ -143,6 +152,7 @@ services:
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
db:
|
db:
|
||||||
image: mariadb:10
|
image: mariadb:10
|
||||||
|
command: --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci
|
||||||
environment:
|
environment:
|
||||||
MYSQL_ROOT_PASSWORD: supersupersecret
|
MYSQL_ROOT_PASSWORD: supersupersecret
|
||||||
MYSQL_USER: vikunja
|
MYSQL_USER: vikunja
|
||||||
|
@ -171,12 +181,17 @@ server {
|
||||||
proxy_pass http://frontend:80;
|
proxy_pass http://frontend:80;
|
||||||
}
|
}
|
||||||
|
|
||||||
location /api/ {
|
location ~* ^/(api|dav|\.well-known)/ {
|
||||||
proxy_pass http://api:3456;
|
proxy_pass http://api:3456;
|
||||||
|
client_max_body_size 20M;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
{{< /highlight >}}
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
<div class="notification is-warning">
|
||||||
|
<b>NOTE:</b> If you change the max upload size in Vikunja's settings, you'll need to also change the <code>client_max_body_size</code> in the nginx proxy config.
|
||||||
|
</div>
|
||||||
|
|
||||||
`docker-compose.yml` config:
|
`docker-compose.yml` config:
|
||||||
|
|
||||||
{{< highlight yaml >}}
|
{{< highlight yaml >}}
|
||||||
|
@ -185,6 +200,7 @@ version: '3'
|
||||||
services:
|
services:
|
||||||
db:
|
db:
|
||||||
image: mariadb:10
|
image: mariadb:10
|
||||||
|
command: --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci
|
||||||
environment:
|
environment:
|
||||||
MYSQL_ROOT_PASSWORD: supersecret
|
MYSQL_ROOT_PASSWORD: supersecret
|
||||||
MYSQL_DATABASE: vikunja
|
MYSQL_DATABASE: vikunja
|
||||||
|
@ -226,6 +242,8 @@ You will need the following `Caddyfile` on your host (or elsewhere, but then you
|
||||||
{{< highlight conf >}}
|
{{< highlight conf >}}
|
||||||
vikunja.example.com {
|
vikunja.example.com {
|
||||||
reverse_proxy /api/* api:3456
|
reverse_proxy /api/* api:3456
|
||||||
|
reverse_proxy /.well-known/* api:3456
|
||||||
|
reverse_proxy /dav/* api:3456
|
||||||
reverse_proxy frontend:80
|
reverse_proxy frontend:80
|
||||||
}
|
}
|
||||||
{{< /highlight >}}
|
{{< /highlight >}}
|
||||||
|
@ -238,6 +256,7 @@ version: '3'
|
||||||
services:
|
services:
|
||||||
db:
|
db:
|
||||||
image: mariadb:10
|
image: mariadb:10
|
||||||
|
command: --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci
|
||||||
environment:
|
environment:
|
||||||
MYSQL_ROOT_PASSWORD: supersecret
|
MYSQL_ROOT_PASSWORD: supersecret
|
||||||
MYSQL_DATABASE: vikunja
|
MYSQL_DATABASE: vikunja
|
||||||
|
|
|
@ -10,6 +10,13 @@ menu:
|
||||||
|
|
||||||
# Backend
|
# Backend
|
||||||
|
|
||||||
|
<div class="notification is-warning">
|
||||||
|
<b>NOTE:</b> If you intend to run Vikunja with mysql and/or to use non-latin characters
|
||||||
|
<a href="{{< ref "utf-8.md">}}">make sure your db is utf-8 compatible</a>.
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{{< table_of_contents >}}
|
||||||
|
|
||||||
## Install from binary
|
## Install from binary
|
||||||
|
|
||||||
Download a copy of Vikunja from the [download page](https://vikunja.io/en/download/) for your architecture.
|
Download a copy of Vikunja from the [download page](https://vikunja.io/en/download/) for your architecture.
|
||||||
|
@ -148,6 +155,7 @@ services:
|
||||||
- ./files:/app/vikunja/files
|
- ./files:/app/vikunja/files
|
||||||
db:
|
db:
|
||||||
image: mariadb:10
|
image: mariadb:10
|
||||||
|
command: --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci
|
||||||
environment:
|
environment:
|
||||||
MYSQL_ROOT_PASSWORD: supersecret
|
MYSQL_ROOT_PASSWORD: supersecret
|
||||||
MYSQL_DATABASE: vikunja
|
MYSQL_DATABASE: vikunja
|
||||||
|
@ -170,6 +178,100 @@ dpkg -i vikunja.deb
|
||||||
This will install the backend to `/opt/vikunja`.
|
This will install the backend to `/opt/vikunja`.
|
||||||
To configure it, use the config file in `/etc/vikunja/config.yml`.
|
To configure it, use the config file in `/etc/vikunja/config.yml`.
|
||||||
|
|
||||||
|
## FreeBSD / FreeNAS
|
||||||
|
|
||||||
|
Unfortunately, we currently can't provide pre-built binaries for FreeBSD.
|
||||||
|
As a workaround, it is possible to compile vikunja for FreeBSD directly on a FreeBSD machine, a guide is available below:
|
||||||
|
|
||||||
|
*Thanks to HungrySkeleton who originally created this guide [in the forum](https://community.vikunja.io/t/freebsd-support/69/11).*
|
||||||
|
|
||||||
|
### Jail Setup
|
||||||
|
|
||||||
|
1. Create jail named ```vikunja```
|
||||||
|
2. Set jail properties to 'auto start'
|
||||||
|
3. Mount storage (```/mnt``` to ```jailData/vikunja```)
|
||||||
|
4. Start jail & SSH into it
|
||||||
|
|
||||||
|
### Installing packages
|
||||||
|
|
||||||
|
{{< highlight bash >}}
|
||||||
|
pkg update && pkg upgrade -y
|
||||||
|
pkg install nano git go gmake
|
||||||
|
go install github.com/magefile/mage
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
### Clone vikunja repo
|
||||||
|
|
||||||
|
{{< highlight bash >}}
|
||||||
|
mkdir /mnt/GO/code.vikunja.io
|
||||||
|
cd /mnt/GO/code.vikunja.io
|
||||||
|
git clone https://code.vikunja.io/api
|
||||||
|
cd /mnt/GO/code.vikunja.io/api
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
### Compile binaries
|
||||||
|
|
||||||
|
{{< highlight bash >}}
|
||||||
|
go install
|
||||||
|
mage build
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
### Create folder to install backend server into
|
||||||
|
|
||||||
|
{{< highlight bash >}}
|
||||||
|
mkdir /mnt/backend
|
||||||
|
cp /mnt/GO/code.vikunja.io/api/vikunja /mnt/backend/vikunja
|
||||||
|
cd /mnt/backend
|
||||||
|
chmod +x /mnt/backend/vikunja
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
### Set vikunja to boot on startup
|
||||||
|
|
||||||
|
{{< highlight bash >}}
|
||||||
|
nano /etc/rc.d/vikunja
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
Then paste into the file:
|
||||||
|
|
||||||
|
{{< highlight bash >}}
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
. /etc/rc.subr
|
||||||
|
|
||||||
|
name=vikunja
|
||||||
|
rcvar=vikunja_enable
|
||||||
|
|
||||||
|
command="/mnt/backend/${name}"
|
||||||
|
|
||||||
|
load_rc_config $name
|
||||||
|
run_rc_command "$1"
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
Save and exit. Then execute:
|
||||||
|
|
||||||
|
{{< highlight bash >}}
|
||||||
|
chmod +x /etc/rc.d/vikunja
|
||||||
|
nano /etc/rc.conf
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
Then add line to bottom of file:
|
||||||
|
|
||||||
|
{{< highlight bash >}}
|
||||||
|
vikunja_enable="YES"
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
Test vikunja now works with
|
||||||
|
|
||||||
|
{{< highlight bash >}}
|
||||||
|
service vikunja start
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
The API is now available through IP:
|
||||||
|
|
||||||
|
```
|
||||||
|
192.168.1.XXX:3456
|
||||||
|
```
|
||||||
|
|
||||||
## Configuration
|
## Configuration
|
||||||
|
|
||||||
See [available configuration options]({{< ref "config.md">}}).
|
See [available configuration options]({{< ref "config.md">}}).
|
||||||
|
|
|
@ -17,6 +17,8 @@ Unzip them and store them somewhere your server can access them.
|
||||||
|
|
||||||
You also need to configure a rewrite condition to internally redirect all requests to `index.html` which handles all urls.
|
You also need to configure a rewrite condition to internally redirect all requests to `index.html` which handles all urls.
|
||||||
|
|
||||||
|
{{< table_of_contents >}}
|
||||||
|
|
||||||
## API URL configuration
|
## API URL configuration
|
||||||
|
|
||||||
By default, the frontend assumes it can reach the api at `/api/v1` relative to the frontend url.
|
By default, the frontend assumes it can reach the api at `/api/v1` relative to the frontend url.
|
||||||
|
|
|
@ -30,6 +30,7 @@ This document provides an overview and instructions for the different methods.
|
||||||
* [Docker]({{< ref "install-backend.md#docker">}})
|
* [Docker]({{< ref "install-backend.md#docker">}})
|
||||||
* [Debian packages]({{< ref "install-backend.md#debian-packages">}})
|
* [Debian packages]({{< ref "install-backend.md#debian-packages">}})
|
||||||
* [Configuration]({{< ref "config.md">}})
|
* [Configuration]({{< ref "config.md">}})
|
||||||
|
* [UTF-8 Settings]({{< ref "utf-8.md">}})
|
||||||
* [Frontend]({{< ref "install-frontend.md">}})
|
* [Frontend]({{< ref "install-frontend.md">}})
|
||||||
* [Docker]({{< ref "install-frontend.md#docker">}})
|
* [Docker]({{< ref "install-frontend.md#docker">}})
|
||||||
* [NGINX]({{< ref "install-frontend.md#nginx">}})
|
* [NGINX]({{< ref "install-frontend.md#nginx">}})
|
||||||
|
|
|
@ -13,6 +13,8 @@ menu:
|
||||||
These examples assume you have an instance of the backend running on your server listening on port `3456`.
|
These examples assume you have an instance of the backend running on your server listening on port `3456`.
|
||||||
If you've changed this setting, you need to update the server configurations accordingly.
|
If you've changed this setting, you need to update the server configurations accordingly.
|
||||||
|
|
||||||
|
{{< table_of_contents >}}
|
||||||
|
|
||||||
## NGINX
|
## NGINX
|
||||||
|
|
||||||
Below are two example configurations which you can put in your `nginx.conf`:
|
Below are two example configurations which you can put in your `nginx.conf`:
|
||||||
|
@ -43,12 +45,17 @@ server {
|
||||||
index index.html index.htm;
|
index index.html index.htm;
|
||||||
}
|
}
|
||||||
|
|
||||||
location /api/ {
|
location ~* ^/(api|dav|\.well-known)/ {
|
||||||
proxy_pass http://localhost:3456;
|
proxy_pass http://localhost:3456;
|
||||||
|
client_max_body_size 20M;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
{{< /highlight >}}
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
<div class="notification is-warning">
|
||||||
|
<b>NOTE:</b> If you change the max upload size in Vikunja's settings, you'll need to also change the <code>client_max_body_size</code> in the nginx proxy config.
|
||||||
|
</div>
|
||||||
|
|
||||||
### without gzip
|
### without gzip
|
||||||
|
|
||||||
{{< highlight conf >}}
|
{{< highlight conf >}}
|
||||||
|
@ -62,12 +69,17 @@ server {
|
||||||
index index.html index.htm;
|
index index.html index.htm;
|
||||||
}
|
}
|
||||||
|
|
||||||
location /api/ {
|
location ~* ^/(api|dav|\.well-known)/ {
|
||||||
proxy_pass http://localhost:3456;
|
proxy_pass http://localhost:3456;
|
||||||
|
client_max_body_size 20M;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
{{< /highlight >}}
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
<div class="notification is-warning">
|
||||||
|
<b>NOTE:</b> If you change the max upload size in Vikunja's settings, you'll need to also change the <code>client_max_body_size</code> in the nginx proxy config.
|
||||||
|
</div>
|
||||||
|
|
||||||
## Apache
|
## Apache
|
||||||
|
|
||||||
Put the following config in `cat /etc/apache2/sites-available/vikunja.conf`:
|
Put the following config in `cat /etc/apache2/sites-available/vikunja.conf`:
|
||||||
|
@ -82,14 +94,18 @@ Put the following config in `cat /etc/apache2/sites-available/vikunja.conf`:
|
||||||
</Proxy>
|
</Proxy>
|
||||||
ProxyPass /api http://localhost:3456/api
|
ProxyPass /api http://localhost:3456/api
|
||||||
ProxyPassReverse /api http://localhost:3456/api
|
ProxyPassReverse /api http://localhost:3456/api
|
||||||
|
ProxyPass /dav http://localhost:3456/dav
|
||||||
|
ProxyPassReverse /dav http://localhost:3456/dav
|
||||||
|
ProxyPass /.well-known http://localhost:3456/.well-known
|
||||||
|
ProxyPassReverse /.well-known http://localhost:3456/.well-known
|
||||||
|
|
||||||
DocumentRoot /var/www/html
|
DocumentRoot /var/www/html
|
||||||
RewriteEngine On
|
RewriteEngine On
|
||||||
RewriteRule ^\/?(config\.json|favicon\.ico|css|fonts|images|img|js|api) - [L]
|
RewriteRule ^\/?(config\.json|favicon\.ico|css|fonts|images|img|js|api|dav|\.well-known) - [L]
|
||||||
RewriteRule ^(.*)$ /index.html [QSA,L]
|
RewriteRule ^(.*)$ /index.html [QSA,L]
|
||||||
</VirtualHost>
|
</VirtualHost>
|
||||||
{{< /highlight >}}
|
{{< /highlight >}}
|
||||||
|
|
||||||
**Note:** The apache modules `proxy` and `proxy_http` must be enabled for this.
|
**Note:** The apache modules `proxy`, `proxy_http` and `rewrite` must be enabled for this.
|
||||||
|
|
||||||
For more details see the [frontend apache configuration]({{< ref "install-frontend.md#apache">}}).
|
For more details see the [frontend apache configuration]({{< ref "install-frontend.md#apache">}}).
|
|
@ -0,0 +1,108 @@
|
||||||
|
---
|
||||||
|
date: "2020-07-06:00:00+02:00"
|
||||||
|
title: "UTF-8 Settings"
|
||||||
|
draft: false
|
||||||
|
type: "doc"
|
||||||
|
menu:
|
||||||
|
sidebar:
|
||||||
|
parent: "setup"
|
||||||
|
---
|
||||||
|
|
||||||
|
# UTF-8 Settings
|
||||||
|
|
||||||
|
Vikunja itself is always fully capable of handling utf-8 characters.
|
||||||
|
However, your database might be not.
|
||||||
|
Vikunja itself will work just fine until you want to use non-latin characters in your tasks/lists/etc.
|
||||||
|
|
||||||
|
On this page, you will find information about how to fully ensure non-latin characters like aüäß or emojis work
|
||||||
|
with your installation.
|
||||||
|
|
||||||
|
{{< table_of_contents >}}
|
||||||
|
|
||||||
|
## Postgresql & SQLite
|
||||||
|
|
||||||
|
Postgresql and SQLite should handle utf-8 just fine - If you discover any issues nonetheless, please
|
||||||
|
[drop us a message](https://vikunja.io/contact/).
|
||||||
|
|
||||||
|
## MySQL
|
||||||
|
|
||||||
|
MySQL is not able to handle utf-8 by default.
|
||||||
|
To fix this, follow the steps below.
|
||||||
|
|
||||||
|
To find out if your db supports utf-8, run the following in a shell or similar, assuming the database
|
||||||
|
you're using for vikunja is called `vikunja`:
|
||||||
|
|
||||||
|
{{< highlight sql >}}
|
||||||
|
SELECT default_character_set_name FROM information_schema.SCHEMATA WHERE schema_name = 'vikunja';
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
This will get you a result like the following:
|
||||||
|
|
||||||
|
```
|
||||||
|
+----------------------------+
|
||||||
|
| default_character_set_name |
|
||||||
|
+----------------------------+
|
||||||
|
| latin1 |
|
||||||
|
+----------------------------+
|
||||||
|
1 row in set (0.001 sec)
|
||||||
|
```
|
||||||
|
|
||||||
|
The charset `latin1` means the db is encoded in the `latin1` encoding which does not support utf-8 characters.
|
||||||
|
|
||||||
|
(The following guide is based on [this thread from stackoverflow](https://dba.stackexchange.com/a/104866))
|
||||||
|
|
||||||
|
### 0. Backup your database
|
||||||
|
|
||||||
|
Before attempting any conversion, please [back up your database]({{< ref "backups.md">}}).
|
||||||
|
|
||||||
|
### 1. Create a pre-conversion script
|
||||||
|
|
||||||
|
Copy the following sql statements in a file called `preAlterTables.sql` and replace all occurences of `vikunja` with
|
||||||
|
the name of your database:
|
||||||
|
|
||||||
|
{{< highlight sql >}}
|
||||||
|
use information_schema;
|
||||||
|
SELECT concat("ALTER DATABASE `",table_schema,"` CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci;") as _sql
|
||||||
|
FROM `TABLES` where table_schema like 'vikunja' and TABLE_TYPE='BASE TABLE' group by table_schema;
|
||||||
|
SELECT concat("ALTER TABLE `",table_schema,"`.`",table_name,"` CONVERT TO CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;") as _sql
|
||||||
|
FROM `TABLES` where table_schema like 'vikunja' and TABLE_TYPE='BASE TABLE' group by table_schema, table_name;
|
||||||
|
SELECT concat("ALTER TABLE `",table_schema,"`.`",table_name, "` CHANGE `",column_name,"` `",column_name,"` ",data_type,"(",character_maximum_length,") CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci",IF(is_nullable="YES"," NULL"," NOT NULL"),";") as _sql
|
||||||
|
FROM `COLUMNS` where table_schema like 'vikunja' and data_type in ('varchar','char');
|
||||||
|
SELECT concat("ALTER TABLE `",table_schema,"`.`",table_name, "` CHANGE `",column_name,"` `",column_name,"` ",data_type," CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci",IF(is_nullable="YES"," NULL"," NOT NULL"),";") as _sql
|
||||||
|
FROM `COLUMNS` where table_schema like 'vikunja' and data_type in ('text','tinytext','mediumtext','longtext');
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
### 2. Run the pre-conversion script
|
||||||
|
|
||||||
|
Running this will create the actual migration script for your particular database structure and save it in a file called `alterTables.sql`:
|
||||||
|
|
||||||
|
{{< highlight bash >}}
|
||||||
|
mysql -uroot < preAlterTables.sql | egrep '^ALTER' > alterTables.sql
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
### 3. Convert the database
|
||||||
|
|
||||||
|
At this point converting is just a matter of executing the previously generated sql script:
|
||||||
|
|
||||||
|
{{< highlight bash >}}
|
||||||
|
mysql -uroot < alterTables.sql
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
### 4. Verify it was successfully converted
|
||||||
|
|
||||||
|
If everything worked as intended, your db collation should now look like this:
|
||||||
|
|
||||||
|
{{< highlight sql >}}
|
||||||
|
SELECT default_character_set_name FROM information_schema.SCHEMATA WHERE schema_name = 'vikunja';
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
Should get you:
|
||||||
|
|
||||||
|
```
|
||||||
|
+----------------------------+
|
||||||
|
| default_character_set_name |
|
||||||
|
+----------------------------+
|
||||||
|
| utf8mb4 |
|
||||||
|
+----------------------------+
|
||||||
|
1 row in set (0.001 sec)
|
||||||
|
```
|
|
@ -16,6 +16,8 @@ menu:
|
||||||
|
|
||||||
Vikunja supports managing tasks via the [caldav VTODO](https://tools.ietf.org/html/rfc5545#section-3.6.2) extension.
|
Vikunja supports managing tasks via the [caldav VTODO](https://tools.ietf.org/html/rfc5545#section-3.6.2) extension.
|
||||||
|
|
||||||
|
{{< table_of_contents >}}
|
||||||
|
|
||||||
## URLs
|
## URLs
|
||||||
|
|
||||||
All urls are located under the `/dav` subspace.
|
All urls are located under the `/dav` subspace.
|
||||||
|
@ -64,13 +66,15 @@ Vikunja **currently does not** support these properties:
|
||||||
|
|
||||||
## Tested Clients
|
## Tested Clients
|
||||||
|
|
||||||
#### Working
|
### Working
|
||||||
|
|
||||||
* [Evolution](https://wiki.gnome.org/Apps/Evolution/)
|
* [Evolution](https://wiki.gnome.org/Apps/Evolution/)
|
||||||
|
* [OpenTasks](https://opentasks.app/) + [DAVx⁵](https://www.davx5.com/)
|
||||||
|
|
||||||
#### Not working
|
### Not working
|
||||||
|
|
||||||
* [Tasks (Android)](https://tasks.org/)
|
* [Tasks (Android)](https://tasks.org/)
|
||||||
|
* [Thunderbird (68)](https://www.thunderbird.net/)
|
||||||
|
|
||||||
## Dev logs
|
## Dev logs
|
||||||
|
|
||||||
|
|
|
@ -13,8 +13,12 @@ menu:
|
||||||
You can interact with Vikunja using its `cli` interface.
|
You can interact with Vikunja using its `cli` interface.
|
||||||
The following commands are available:
|
The following commands are available:
|
||||||
|
|
||||||
|
* [dump](#dump)
|
||||||
* [help](#help)
|
* [help](#help)
|
||||||
* [migrate](#migrate)
|
* [migrate](#migrate)
|
||||||
|
* [restore](#restore)
|
||||||
|
* [testmail](#testmail)
|
||||||
|
* [user](#user)
|
||||||
* [version](#version)
|
* [version](#version)
|
||||||
* [web](#web)
|
* [web](#web)
|
||||||
|
|
||||||
|
@ -22,6 +26,16 @@ If you don't specify a command, the [`web`](#web) command will be executed.
|
||||||
|
|
||||||
All commands use the same standard [config file]({{< ref "../setup/config.md">}}).
|
All commands use the same standard [config file]({{< ref "../setup/config.md">}}).
|
||||||
|
|
||||||
|
### `dump`
|
||||||
|
|
||||||
|
Creates a zip file with all vikunja-related files.
|
||||||
|
This includes config, version, all files and the full database.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
{{< highlight bash >}}
|
||||||
|
$ vikunja dump
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
### `help`
|
### `help`
|
||||||
|
|
||||||
Shows more detailed help about any command.
|
Shows more detailed help about any command.
|
||||||
|
@ -63,6 +77,91 @@ $ vikunja migrate rollback [flags]
|
||||||
Flags:
|
Flags:
|
||||||
* `-n`, `--name` string: The id of the migration you want to roll back until.
|
* `-n`, `--name` string: The id of the migration you want to roll back until.
|
||||||
|
|
||||||
|
### `restore`
|
||||||
|
|
||||||
|
Restores a previously created dump from a zip file, see `dump`.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
{{< highlight bash >}}
|
||||||
|
$ vikunja restore <path to dump zip file>
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
### `testmail`
|
||||||
|
|
||||||
|
Sends a test mail using the configured smtp connection.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
{{< highlight bash >}}
|
||||||
|
$ vikunja testmail <email to send the test mail to>
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
### `user`
|
||||||
|
|
||||||
|
Bundles a few commands to manage users.
|
||||||
|
|
||||||
|
#### `user change-status`
|
||||||
|
|
||||||
|
Enable or disable a user. Will toggle the current status if no flag (`--enable` or `--disable`) is provided.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
{{< highlight bash >}}
|
||||||
|
$ vikunja user change-status <user id> <flags>
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
Flags:
|
||||||
|
* `-d`, `--disable`: Disable the user.
|
||||||
|
* `-e`, `--enable`: Enable the user.
|
||||||
|
|
||||||
|
#### `user create`
|
||||||
|
|
||||||
|
Create a new user.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
{{< highlight bash >}}
|
||||||
|
$ vikunja user create <flags>
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
Flags:
|
||||||
|
* `-a`, `--avatar-provider`: The avatar provider of the new user. Optional.
|
||||||
|
* `-e`, `--email`: The email address of the new user.
|
||||||
|
* `-p`, `--password`: The password of the new user. You will be asked to enter it if not provided through the flag.
|
||||||
|
* `-u`, `--username`: The username of the new user.
|
||||||
|
|
||||||
|
#### `user list`
|
||||||
|
|
||||||
|
Shows a list of all users.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
{{< highlight bash >}}
|
||||||
|
$ vikunja user list
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
#### `user reset-password`
|
||||||
|
|
||||||
|
Reset a users password, either through mailing them a reset link or directly.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
{{< highlight bash >}}
|
||||||
|
$ vikunja user reset-password <flags>
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
Flags:
|
||||||
|
* `-d`, `--direct`: If provided, reset the password directly instead of sending the user a reset mail.
|
||||||
|
* `-p`, `--password`: The new password of the user. Only used in combination with --direct. You will be asked to enter it if not provided through the flag.
|
||||||
|
|
||||||
|
#### `user update`
|
||||||
|
|
||||||
|
Update an existing user.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
{{< highlight bash >}}
|
||||||
|
$ vikunja user update <user id>
|
||||||
|
{{< /highlight >}}
|
||||||
|
|
||||||
|
Flags:
|
||||||
|
* `-a`, `--avatar-provider`: The new avatar provider of the new user.
|
||||||
|
* `-e`, `--email`: The new email address of the user.
|
||||||
|
* `-u`, `--username`: The new username of the user.
|
||||||
|
|
||||||
### `version`
|
### `version`
|
||||||
|
|
||||||
|
@ -82,22 +181,3 @@ Usage:
|
||||||
{{< highlight bash >}}
|
{{< highlight bash >}}
|
||||||
$ vikunja web
|
$ vikunja web
|
||||||
{{< /highlight >}}
|
{{< /highlight >}}
|
||||||
|
|
||||||
### `dump`
|
|
||||||
|
|
||||||
Creates a zip file with all vikunja-related files.
|
|
||||||
This includes config, version, all files and the full database.
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
{{< highlight bash >}}
|
|
||||||
$ vikunja dump
|
|
||||||
{{< /highlight >}}
|
|
||||||
|
|
||||||
### `restore`
|
|
||||||
|
|
||||||
Restores a previously created dump from a zip file, see `dump`.
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
{{< highlight bash >}}
|
|
||||||
$ vikunja restore <path to dump zip file>
|
|
||||||
{{< /highlight >}}
|
|
||||||
|
|
|
@ -12,13 +12,15 @@ menu:
|
||||||
|
|
||||||
This document describes the different errors Vikunja can return.
|
This document describes the different errors Vikunja can return.
|
||||||
|
|
||||||
### Generic
|
{{< table_of_contents >}}
|
||||||
|
|
||||||
|
## Generic
|
||||||
|
|
||||||
| ErrorCode | HTTP Status Code | Description |
|
| ErrorCode | HTTP Status Code | Description |
|
||||||
|-----------|------------------|-------------|
|
|-----------|------------------|-------------|
|
||||||
| 0001 | 403 | Generic forbidden error. |
|
| 0001 | 403 | Generic forbidden error. |
|
||||||
|
|
||||||
### User
|
## User
|
||||||
|
|
||||||
| ErrorCode | HTTP Status Code | Description |
|
| ErrorCode | HTTP Status Code | Description |
|
||||||
|-----------|------------------|-------------|
|
|-----------|------------------|-------------|
|
||||||
|
@ -37,15 +39,16 @@ This document describes the different errors Vikunja can return.
|
||||||
| 1015 | 412 | Totp is already enabled for this user. |
|
| 1015 | 412 | Totp is already enabled for this user. |
|
||||||
| 1016 | 412 | Totp is not enabled for this user. |
|
| 1016 | 412 | Totp is not enabled for this user. |
|
||||||
| 1017 | 412 | The provided Totp passcode is invalid. |
|
| 1017 | 412 | The provided Totp passcode is invalid. |
|
||||||
|
| 1018 | 412 | The provided user avatar provider type setting is invalid. |
|
||||||
|
|
||||||
### Validation
|
## Validation
|
||||||
|
|
||||||
| ErrorCode | HTTP Status Code | Description |
|
| ErrorCode | HTTP Status Code | Description |
|
||||||
|-----------|------------------|-------------|
|
|-----------|------------------|-------------|
|
||||||
| 2001 | 400 | ID cannot be empty or 0. |
|
| 2001 | 400 | ID cannot be empty or 0. |
|
||||||
| 2002 | 400 | Some of the request data was invalid. The response contains an aditional array with all invalid fields. |
|
| 2002 | 400 | Some of the request data was invalid. The response contains an aditional array with all invalid fields. |
|
||||||
|
|
||||||
### List
|
## List
|
||||||
|
|
||||||
| ErrorCode | HTTP Status Code | Description |
|
| ErrorCode | HTTP Status Code | Description |
|
||||||
|-----------|------------------|-------------|
|
|-----------|------------------|-------------|
|
||||||
|
@ -56,7 +59,7 @@ This document describes the different errors Vikunja can return.
|
||||||
| 3007 | 400 | A list with this identifier already exists. |
|
| 3007 | 400 | A list with this identifier already exists. |
|
||||||
| 3008 | 412 | The list is archived and can therefore only be accessed read only. This is also true for all tasks associated with this list. |
|
| 3008 | 412 | The list is archived and can therefore only be accessed read only. This is also true for all tasks associated with this list. |
|
||||||
|
|
||||||
### Task
|
## Task
|
||||||
|
|
||||||
| ErrorCode | HTTP Status Code | Description |
|
| ErrorCode | HTTP Status Code | Description |
|
||||||
|-----------|------------------|-------------|
|
|-----------|------------------|-------------|
|
||||||
|
@ -80,7 +83,7 @@ This document describes the different errors Vikunja can return.
|
||||||
| 4018 | 403 | Invalid task filter concatinator. |
|
| 4018 | 403 | Invalid task filter concatinator. |
|
||||||
| 4019 | 403 | Invalid task filter value. |
|
| 4019 | 403 | Invalid task filter value. |
|
||||||
|
|
||||||
### Namespace
|
## Namespace
|
||||||
|
|
||||||
| ErrorCode | HTTP Status Code | Description |
|
| ErrorCode | HTTP Status Code | Description |
|
||||||
|-----------|------------------|-------------|
|
|-----------|------------------|-------------|
|
||||||
|
@ -92,7 +95,7 @@ This document describes the different errors Vikunja can return.
|
||||||
| 5011 | 409 | This user has already access to that namespace. |
|
| 5011 | 409 | This user has already access to that namespace. |
|
||||||
| 5012 | 412 | The namespace is archived and can therefore only be accessed read only. |
|
| 5012 | 412 | The namespace is archived and can therefore only be accessed read only. |
|
||||||
|
|
||||||
### Team
|
## Team
|
||||||
|
|
||||||
| ErrorCode | HTTP Status Code | Description |
|
| ErrorCode | HTTP Status Code | Description |
|
||||||
|-----------|------------------|-------------|
|
|-----------|------------------|-------------|
|
||||||
|
@ -103,14 +106,14 @@ This document describes the different errors Vikunja can return.
|
||||||
| 6006 | 400 | Cannot delete the last team member. |
|
| 6006 | 400 | Cannot delete the last team member. |
|
||||||
| 6007 | 403 | The team does not have access to the list to perform that action. |
|
| 6007 | 403 | The team does not have access to the list to perform that action. |
|
||||||
|
|
||||||
### User List Access
|
## User List Access
|
||||||
|
|
||||||
| ErrorCode | HTTP Status Code | Description |
|
| ErrorCode | HTTP Status Code | Description |
|
||||||
|-----------|------------------|-------------|
|
|-----------|------------------|-------------|
|
||||||
| 7002 | 409 | The user already has access to that list. |
|
| 7002 | 409 | The user already has access to that list. |
|
||||||
| 7003 | 403 | The user does not have access to that list. |
|
| 7003 | 403 | The user does not have access to that list. |
|
||||||
|
|
||||||
### Label
|
## Label
|
||||||
|
|
||||||
| ErrorCode | HTTP Status Code | Description |
|
| ErrorCode | HTTP Status Code | Description |
|
||||||
|-----------|------------------|-------------|
|
|-----------|------------------|-------------|
|
||||||
|
@ -118,16 +121,24 @@ This document describes the different errors Vikunja can return.
|
||||||
| 8002 | 404 | The label does not exist. |
|
| 8002 | 404 | The label does not exist. |
|
||||||
| 8003 | 403 | The user does not have access to this label. |
|
| 8003 | 403 | The user does not have access to this label. |
|
||||||
|
|
||||||
### Right
|
## Right
|
||||||
|
|
||||||
| ErrorCode | HTTP Status Code | Description |
|
| ErrorCode | HTTP Status Code | Description |
|
||||||
|-----------|------------------|-------------|
|
|-----------|------------------|-------------|
|
||||||
| 9001 | 403 | The right is invalid. |
|
| 9001 | 403 | The right is invalid. |
|
||||||
|
|
||||||
### Kanban
|
## Kanban
|
||||||
|
|
||||||
| ErrorCode | HTTP Status Code | Description |
|
| ErrorCode | HTTP Status Code | Description |
|
||||||
|-----------|------------------|-------------|
|
|-----------|------------------|-------------|
|
||||||
| 10001 | 404 | The bucket does not exist. |
|
| 10001 | 404 | The bucket does not exist. |
|
||||||
| 10002 | 400 | The bucket does not belong to that list. |
|
| 10002 | 400 | The bucket does not belong to that list. |
|
||||||
| 10003 | 412 | You cannot remove the last bucket on a list. |
|
| 10003 | 412 | You cannot remove the last bucket on a list. |
|
||||||
|
| 10004 | 412 | You cannot add the task to this bucket as it already exceeded the limit of tasks it can hold. |
|
||||||
|
|
||||||
|
## Saved Filters
|
||||||
|
|
||||||
|
| ErrorCode | HTTP Status Code | Description |
|
||||||
|
|-----------|------------------|-------------|
|
||||||
|
| 11001 | 404 | The saved filter does not exist. |
|
||||||
|
| 11002 | 412 | Saved filters are not available for link shares. |
|
||||||
|
|
|
@ -23,7 +23,7 @@ The following values are possible:
|
||||||
| 1 | Read and write. Namespaces or lists shared with this right can be read and written to by the team or user. |
|
| 1 | Read and write. Namespaces or lists shared with this right can be read and written to by the team or user. |
|
||||||
| 2 | Admin. Can do anything like read and write, but can additionally manage sharing options. |
|
| 2 | Admin. Can do anything like read and write, but can additionally manage sharing options. |
|
||||||
|
|
||||||
### Team admins
|
## Team admins
|
||||||
|
|
||||||
When adding or querying a team, every member has an additional boolean value stating if it is admin or not.
|
When adding or querying a team, every member has an additional boolean value stating if it is admin or not.
|
||||||
A team admin can also add and remove team members and also change whether a user in the team is admin or not.
|
A team admin can also add and remove team members and also change whether a user in the team is admin or not.
|
|
@ -1 +1 @@
|
||||||
Subproject commit f50566db25df9fa03243ba06d17511e050d4be95
|
Subproject commit 958219fc84db455ed58d7a4380bbffc8d04fd5cf
|
85
go.mod
85
go.mod
|
@ -17,67 +17,80 @@
|
||||||
module code.vikunja.io/api
|
module code.vikunja.io/api
|
||||||
|
|
||||||
require (
|
require (
|
||||||
4d63.com/tz v1.1.0
|
4d63.com/tz v1.2.0
|
||||||
code.vikunja.io/web v0.0.0-20200618164749-a5f3d450d39a
|
code.vikunja.io/web v0.0.0-20200809154828-8767618f181f
|
||||||
|
dmitri.shuralyov.com/go/generated v0.0.0-20170818220700-b1254a446363 // indirect
|
||||||
gitea.com/xorm/xorm-redis-cache v0.2.0
|
gitea.com/xorm/xorm-redis-cache v0.2.0
|
||||||
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751
|
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751
|
||||||
github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a
|
github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef
|
||||||
github.com/beevik/etree v1.1.0 // indirect
|
github.com/beevik/etree v1.1.0 // indirect
|
||||||
github.com/c2h5oh/datasize v0.0.0-20200112174442-28bbd4740fee
|
github.com/c2h5oh/datasize v0.0.0-20200825124411-48ed595a09d2
|
||||||
github.com/client9/misspell v0.3.4
|
github.com/client9/misspell v0.3.4
|
||||||
|
github.com/coreos/go-oidc v2.2.1+incompatible
|
||||||
github.com/cweill/gotests v1.5.3
|
github.com/cweill/gotests v1.5.3
|
||||||
github.com/d4l3k/messagediff v1.2.1 // indirect
|
github.com/d4l3k/messagediff v1.2.1 // indirect
|
||||||
github.com/dgrijalva/jwt-go v3.2.0+incompatible
|
github.com/dgrijalva/jwt-go v3.2.0+incompatible
|
||||||
github.com/fsnotify/fsnotify v1.4.9 // indirect
|
github.com/disintegration/imaging v1.6.2
|
||||||
github.com/fzipp/gocyclo v0.0.0-20150627053110-6acd4345c835
|
github.com/dustinkirkland/golang-petname v0.0.0-20191129215211-8e5a1ed0cff0
|
||||||
github.com/getsentry/sentry-go v0.6.1
|
github.com/fzipp/gocyclo v0.3.1
|
||||||
github.com/go-openapi/jsonreference v0.19.3 // indirect
|
github.com/gabriel-vasile/mimetype v1.1.2
|
||||||
github.com/go-openapi/spec v0.19.4 // indirect
|
github.com/getsentry/sentry-go v0.8.0
|
||||||
|
github.com/go-errors/errors v1.1.1
|
||||||
|
github.com/go-openapi/swag v0.19.9 // indirect
|
||||||
github.com/go-redis/redis/v7 v7.4.0
|
github.com/go-redis/redis/v7 v7.4.0
|
||||||
github.com/go-sql-driver/mysql v1.5.0
|
github.com/go-sql-driver/mysql v1.5.0
|
||||||
github.com/go-testfixtures/testfixtures/v3 v3.3.0
|
github.com/go-testfixtures/testfixtures/v3 v3.4.1
|
||||||
github.com/gordonklaus/ineffassign v0.0.0-20200309095847-7953dde2c7bf
|
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0
|
||||||
github.com/iancoleman/strcase v0.0.0-20191112232945-16388991a334
|
github.com/google/go-cmp v0.5.2 // indirect
|
||||||
github.com/imdario/mergo v0.3.9
|
github.com/gordonklaus/ineffassign v0.0.0-20201107091007-3b93a8888063
|
||||||
github.com/jgautheron/goconst v0.0.0-20200227150835-cda7ea3bf591
|
github.com/iancoleman/strcase v0.1.2
|
||||||
|
github.com/imdario/mergo v0.3.11
|
||||||
|
github.com/jgautheron/goconst v0.0.0-20201117150253-ccae5bf973f3
|
||||||
github.com/kr/text v0.2.0 // indirect
|
github.com/kr/text v0.2.0 // indirect
|
||||||
github.com/labstack/echo/v4 v4.1.16
|
github.com/labstack/echo/v4 v4.1.17
|
||||||
github.com/labstack/gommon v0.3.0
|
github.com/labstack/gommon v0.3.0
|
||||||
github.com/laurent22/ical-go v0.1.1-0.20181107184520-7e5d6ade8eef
|
github.com/laurent22/ical-go v0.1.1-0.20181107184520-7e5d6ade8eef
|
||||||
github.com/lib/pq v1.7.0
|
github.com/lib/pq v1.8.0
|
||||||
github.com/mailru/easyjson v0.7.0 // indirect
|
github.com/magefile/mage v1.10.0
|
||||||
github.com/mattn/go-sqlite3 v1.14.0
|
github.com/mailru/easyjson v0.7.6 // indirect
|
||||||
|
github.com/mattn/go-sqlite3 v1.14.5
|
||||||
|
github.com/mitchellh/mapstructure v1.3.2 // indirect
|
||||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e // indirect
|
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e // indirect
|
||||||
github.com/olekukonko/tablewriter v0.0.4
|
github.com/olekukonko/tablewriter v0.0.4
|
||||||
github.com/onsi/ginkgo v1.12.0 // indirect
|
github.com/onsi/ginkgo v1.13.0 // indirect
|
||||||
github.com/onsi/gomega v1.9.0 // indirect
|
|
||||||
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7
|
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7
|
||||||
github.com/pelletier/go-toml v1.4.0 // indirect
|
github.com/pelletier/go-toml v1.8.0 // indirect
|
||||||
github.com/pquerna/otp v1.2.0
|
github.com/pquerna/otp v1.3.0
|
||||||
github.com/prometheus/client_golang v1.7.1
|
github.com/pquerna/cachecontrol v0.0.0-20200921180117-858c6e7e6b7e // indirect
|
||||||
|
github.com/prometheus/client_golang v1.8.0
|
||||||
github.com/samedi/caldav-go v3.0.0+incompatible
|
github.com/samedi/caldav-go v3.0.0+incompatible
|
||||||
github.com/shurcooL/httpfs v0.0.0-20190707220628-8d4bc4ba7749
|
github.com/shurcooL/httpfs v0.0.0-20190707220628-8d4bc4ba7749
|
||||||
github.com/shurcooL/vfsgen v0.0.0-20200627165143-92b8a710ab6c
|
github.com/shurcooL/vfsgen v0.0.0-20200824052919-0d455de96546
|
||||||
github.com/spf13/afero v1.3.1
|
github.com/spf13/afero v1.4.1
|
||||||
github.com/spf13/cobra v1.0.0
|
github.com/spf13/cast v1.3.1 // indirect
|
||||||
|
github.com/spf13/cobra v1.1.1
|
||||||
github.com/spf13/jwalterweatherman v1.1.0 // indirect
|
github.com/spf13/jwalterweatherman v1.1.0 // indirect
|
||||||
github.com/spf13/viper v1.7.0
|
github.com/spf13/viper v1.7.1
|
||||||
github.com/stretchr/testify v1.6.1
|
github.com/stretchr/testify v1.6.1
|
||||||
github.com/swaggo/swag v1.6.7
|
github.com/swaggo/swag v1.6.9
|
||||||
github.com/ulule/limiter/v3 v3.5.0
|
github.com/ulule/limiter/v3 v3.5.0
|
||||||
github.com/urfave/cli v1.22.2 // indirect
|
golang.org/x/crypto v0.0.0-20201112155050-0c6587e931a9
|
||||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9
|
golang.org/x/image v0.0.0-20200927104501-e162460cd6b5
|
||||||
golang.org/x/lint v0.0.0-20200302205851-738671d3881b
|
golang.org/x/lint v0.0.0-20200302205851-738671d3881b
|
||||||
golang.org/x/net v0.0.0-20200602114024-627f9648deb9 // indirect
|
golang.org/x/net v0.0.0-20201016165138-7b1cca2348c0 // indirect
|
||||||
golang.org/x/text v0.3.3 // indirect
|
golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58
|
||||||
golang.org/x/tools v0.0.0-20200410194907-79a7a3126eef // indirect
|
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9
|
||||||
|
golang.org/x/tools v0.0.0-20201017001424-6003fad69a88 // indirect
|
||||||
gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect
|
gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect
|
||||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect
|
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect
|
||||||
gopkg.in/d4l3k/messagediff.v1 v1.2.1
|
gopkg.in/d4l3k/messagediff.v1 v1.2.1
|
||||||
gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df
|
gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df
|
||||||
honnef.co/go/tools v0.0.1-2020.1.4
|
gopkg.in/ini.v1 v1.57.0 // indirect
|
||||||
src.techknowlogick.com/xgo v0.0.0-20200602060627-a09175ea9056
|
gopkg.in/square/go-jose.v2 v2.5.1 // indirect
|
||||||
src.techknowlogick.com/xormigrate v1.3.0
|
gopkg.in/yaml.v3 v3.0.0-20200605160147-a5ece683394c
|
||||||
|
honnef.co/go/tools v0.0.1-2020.1.5
|
||||||
|
src.techknowlogick.com/xgo v1.1.1-0.20200811225412-bff6512e7c9c
|
||||||
|
src.techknowlogick.com/xormigrate v1.4.0
|
||||||
xorm.io/builder v0.3.7
|
xorm.io/builder v0.3.7
|
||||||
xorm.io/core v0.7.3
|
xorm.io/core v0.7.3
|
||||||
xorm.io/xorm v1.0.2
|
xorm.io/xorm v1.0.2
|
||||||
|
|
|
@ -0,0 +1,925 @@
|
||||||
|
// Vikunja is a to-do list application to facilitate your life.
|
||||||
|
// Copyright 2018-2020 Vikunja and contributors. All rights reserved.
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU General Public License as published by
|
||||||
|
// the Free Software Foundation, either version 3 of the License, or
|
||||||
|
// (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
// +build mage
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"crypto/sha256"
|
||||||
|
"fmt"
|
||||||
|
"github.com/magefile/mage/mg"
|
||||||
|
"golang.org/x/sync/errgroup"
|
||||||
|
"gopkg.in/yaml.v3"
|
||||||
|
"io"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"path/filepath"
|
||||||
|
"runtime"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
PACKAGE = `code.vikunja.io/api`
|
||||||
|
DIST = `dist`
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
Goflags = []string{
|
||||||
|
"-v",
|
||||||
|
}
|
||||||
|
Executable = "vikunja"
|
||||||
|
Ldflags = ""
|
||||||
|
Tags = ""
|
||||||
|
VersionNumber = "dev"
|
||||||
|
Version = "master" // This holds the built version, master by default, when building from a tag or release branch, their name
|
||||||
|
BinLocation = ""
|
||||||
|
PkgVersion = "master"
|
||||||
|
ApiPackages = []string{}
|
||||||
|
RootPath = ""
|
||||||
|
GoFiles = []string{}
|
||||||
|
|
||||||
|
// Aliases are mage aliases of targets
|
||||||
|
Aliases = map[string]interface{}{
|
||||||
|
"build": Build.Build,
|
||||||
|
"do-the-swag": DoTheSwag,
|
||||||
|
"check:got-swag": Check.GotSwag,
|
||||||
|
"release:os-package": Release.OsPackage,
|
||||||
|
"dev:create-migration": Dev.CreateMigration,
|
||||||
|
"generate-docs": GenerateDocs,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
func setVersion() {
|
||||||
|
versionCmd := exec.Command("git", "describe", "--tags", "--always", "--abbrev=10")
|
||||||
|
version, err := versionCmd.Output()
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Error getting version: %s\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
VersionNumber = strings.Trim(string(version), "\n")
|
||||||
|
VersionNumber = strings.Replace(VersionNumber, "-", "+", 1)
|
||||||
|
VersionNumber = strings.Replace(VersionNumber, "-g", "-", 1)
|
||||||
|
|
||||||
|
if os.Getenv("DRONE_TAG") != "" {
|
||||||
|
Version = os.Getenv("DRONE_TAG")
|
||||||
|
} else if os.Getenv("DRONE_BRANCH") != "" {
|
||||||
|
Version = strings.Replace(os.Getenv("DRONE_BRANCH"), "release/v", "", 1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func setBinLocation() {
|
||||||
|
if os.Getenv("DRONE_WORKSPACE") != "" {
|
||||||
|
BinLocation = DIST + `/binaries/` + Executable + `-` + Version + `-linux-amd64`
|
||||||
|
} else {
|
||||||
|
BinLocation = Executable
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func setPkgVersion() {
|
||||||
|
if Version == "master" {
|
||||||
|
PkgVersion = VersionNumber
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func setExecutable() {
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
Executable += ".exe"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func setApiPackages() {
|
||||||
|
cmd := exec.Command("go", "list", "all")
|
||||||
|
pkgs, err := cmd.Output()
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Error getting packages: %s\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
for _, p := range strings.Split(string(pkgs), "\n") {
|
||||||
|
if strings.Contains(p, "code.vikunja.io/api") && !strings.Contains(p, "code.vikunja.io/api/pkg/integrations") {
|
||||||
|
ApiPackages = append(ApiPackages, p)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func setRootPath() {
|
||||||
|
pwd, err := os.Getwd()
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Error getting pwd: %s\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
if err := os.Setenv("VIKUNJA_SERVICE_ROOTPATH", pwd); err != nil {
|
||||||
|
fmt.Printf("Error setting root path: %s\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
RootPath = pwd
|
||||||
|
}
|
||||||
|
|
||||||
|
func setGoFiles() {
|
||||||
|
// GOFILES := $(shell find . -name "*.go" -type f ! -path "*/bindata.go")
|
||||||
|
cmd := exec.Command("find", ".", "-name", "*.go", "-type", "f", "!", "-path", "*/bindata.go")
|
||||||
|
files, err := cmd.Output()
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Error getting go files: %s\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
for _, f := range strings.Split(string(files), "\n") {
|
||||||
|
if strings.HasSuffix(f, ".go") {
|
||||||
|
GoFiles = append(GoFiles, RootPath+strings.TrimLeft(f, "."))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Some variables can always get initialized, so we do just that.
|
||||||
|
func init() {
|
||||||
|
setExecutable()
|
||||||
|
setRootPath()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Some variables have external dependencies (like git) which may not always be available.
|
||||||
|
func initVars() {
|
||||||
|
Tags = os.Getenv("TAGS")
|
||||||
|
setVersion()
|
||||||
|
setBinLocation()
|
||||||
|
setPkgVersion()
|
||||||
|
setApiPackages()
|
||||||
|
setGoFiles()
|
||||||
|
Ldflags = `-X "` + PACKAGE + `/pkg/version.Version=` + VersionNumber + `" -X "main.Tags=` + Tags + `"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func runAndStreamOutput(cmd string, args ...string) {
|
||||||
|
c := exec.Command(cmd, args...)
|
||||||
|
|
||||||
|
c.Env = os.Environ()
|
||||||
|
c.Dir = RootPath
|
||||||
|
|
||||||
|
fmt.Printf("%s\n\n", c.String())
|
||||||
|
|
||||||
|
stdout, _ := c.StdoutPipe()
|
||||||
|
errbuf := bytes.Buffer{}
|
||||||
|
c.Stderr = &errbuf
|
||||||
|
c.Start()
|
||||||
|
|
||||||
|
reader := bufio.NewReader(stdout)
|
||||||
|
line, err := reader.ReadString('\n')
|
||||||
|
for err == nil {
|
||||||
|
fmt.Print(line)
|
||||||
|
line, err = reader.ReadString('\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := c.Wait(); err != nil {
|
||||||
|
fmt.Printf(errbuf.String())
|
||||||
|
fmt.Printf("Error: %s\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Will check if the tool exists and if not install it from the provided import path
|
||||||
|
// If any errors occur, it will exit with a status code of 1.
|
||||||
|
func checkAndInstallGoTool(tool, importPath string) {
|
||||||
|
if err := exec.Command(tool).Run(); err != nil && strings.Contains(err.Error(), "executable file not found") {
|
||||||
|
fmt.Printf("%s not installed, installing %s...\n", tool, importPath)
|
||||||
|
if err := exec.Command("go", "install", Goflags[0], importPath).Run(); err != nil {
|
||||||
|
fmt.Printf("Error installing %s\n", tool)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
fmt.Println("Installed.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculates a hash of a file
|
||||||
|
func calculateSha256FileHash(path string) (hash string, err error) {
|
||||||
|
f, err := os.Open(path)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
|
||||||
|
h := sha256.New()
|
||||||
|
if _, err := io.Copy(h, f); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
return fmt.Sprintf("%x", h.Sum(nil)), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy the src file to dst. Any existing file will be overwritten and will not
|
||||||
|
// copy file attributes.
|
||||||
|
func copyFile(src, dst string) error {
|
||||||
|
in, err := os.Open(src)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer in.Close()
|
||||||
|
|
||||||
|
out, err := os.Create(dst)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer out.Close()
|
||||||
|
|
||||||
|
_, err = io.Copy(out, in)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
si, err := os.Stat(src)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := os.Chmod(dst, si.Mode()); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return out.Close()
|
||||||
|
}
|
||||||
|
|
||||||
|
// os.Rename has issues with moving files between docker volumes.
|
||||||
|
// Because of this limitaion, it fails in drone.
|
||||||
|
// Source: https://gist.github.com/var23rav/23ae5d0d4d830aff886c3c970b8f6c6b
|
||||||
|
func moveFile(src, dst string) error {
|
||||||
|
inputFile, err := os.Open(src)
|
||||||
|
defer inputFile.Close()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("couldn't open source file: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
outputFile, err := os.Create(dst)
|
||||||
|
defer outputFile.Close()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("couldn't open dest file: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = io.Copy(outputFile, inputFile)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("writing to output file failed: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Make sure to copy copy the permissions of the original file as well
|
||||||
|
si, err := os.Stat(src)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := os.Chmod(dst, si.Mode()); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// The copy was successful, so now delete the original file
|
||||||
|
err = os.Remove(src)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed removing original file: %s", err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Formats the code using go fmt
|
||||||
|
func Fmt() {
|
||||||
|
mg.Deps(initVars)
|
||||||
|
args := append([]string{"-s", "-w"}, GoFiles...)
|
||||||
|
runAndStreamOutput("gofmt", args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generates the swagger docs from the code annotations
|
||||||
|
func DoTheSwag() {
|
||||||
|
mg.Deps(initVars)
|
||||||
|
checkAndInstallGoTool("swag", "github.com/swaggo/swag/cmd/swag")
|
||||||
|
runAndStreamOutput("swag", "init", "-g", "./pkg/routes/routes.go", "--parseDependency", "-d", RootPath, "-o", RootPath+"/pkg/swagger")
|
||||||
|
}
|
||||||
|
|
||||||
|
type Test mg.Namespace
|
||||||
|
|
||||||
|
// Runs all tests except integration tests
|
||||||
|
func (Test) Unit() {
|
||||||
|
mg.Deps(initVars)
|
||||||
|
// We run everything sequentially and not in parallel to prevent issues with real test databases
|
||||||
|
args := append([]string{"test", Goflags[0], "-p", "1"}, ApiPackages...)
|
||||||
|
runAndStreamOutput("go", args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Runs the tests and builds the coverage html file from coverage output
|
||||||
|
func (Test) Coverage() {
|
||||||
|
mg.Deps(initVars)
|
||||||
|
mg.Deps(Test.Unit)
|
||||||
|
runAndStreamOutput("go", "tool", "cover", "-html=cover.out", "-o", "cover.html")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Runs the integration tests
|
||||||
|
func (Test) Integration() {
|
||||||
|
mg.Deps(initVars)
|
||||||
|
// We run everything sequentially and not in parallel to prevent issues with real test databases
|
||||||
|
runAndStreamOutput("go", "test", Goflags[0], "-p", "1", PACKAGE+"/pkg/integrations")
|
||||||
|
}
|
||||||
|
|
||||||
|
type Check mg.Namespace
|
||||||
|
|
||||||
|
// Checks if the swagger docs need to be re-generated from the code annotations
|
||||||
|
func (Check) GotSwag() {
|
||||||
|
mg.Deps(initVars)
|
||||||
|
// The check is pretty cheaply done: We take the hash of the swagger.json file, generate the docs,
|
||||||
|
// hash the file again and compare the two hashes to see if anything changed. If that's the case,
|
||||||
|
// regenerating the docs is necessary.
|
||||||
|
// swag is not capable of just outputting the generated docs to stdout, therefore we need to do it this way.
|
||||||
|
// Another drawback of this is obviously it will only work once - we're not resetting the newly generated
|
||||||
|
// docs after the check. This behaviour is good enough for ci though.
|
||||||
|
oldHash, err := calculateSha256FileHash(RootPath + "/pkg/swagger/swagger.json")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Error getting old hash of the swagger docs: %s", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
DoTheSwag()
|
||||||
|
|
||||||
|
newHash, err := calculateSha256FileHash(RootPath + "/pkg/swagger/swagger.json")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Error getting new hash of the swagger docs: %s", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
if oldHash != newHash {
|
||||||
|
fmt.Println("Swagger docs are not up to date.")
|
||||||
|
fmt.Println("Please run 'mage do-the-swag' and commit the result.")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func checkGolangCiLintInstalled() {
|
||||||
|
mg.Deps(initVars)
|
||||||
|
if err := exec.Command("golangci-lint").Run(); err != nil && strings.Contains(err.Error(), "executable file not found") {
|
||||||
|
fmt.Println("Please manually install golangci-lint by running")
|
||||||
|
fmt.Println("curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin v1.31.0")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (Check) Golangci() {
|
||||||
|
checkGolangCiLintInstalled()
|
||||||
|
runAndStreamOutput("golangci-lint", "run")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (Check) GolangciFix() {
|
||||||
|
checkGolangCiLintInstalled()
|
||||||
|
runAndStreamOutput("golangci-lint", "run", "--fix")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Runs fmt-check, lint, got-swag, misspell-check, ineffasign-check, gocyclo-check, static-check, gosec-check, goconst-check all in parallel
|
||||||
|
func (Check) All() {
|
||||||
|
mg.Deps(initVars)
|
||||||
|
mg.Deps(
|
||||||
|
Check.Golangci,
|
||||||
|
Check.GotSwag,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
type Build mg.Namespace
|
||||||
|
|
||||||
|
// Cleans all build, executable and bindata files
|
||||||
|
func (Build) Clean() error {
|
||||||
|
mg.Deps(initVars)
|
||||||
|
if err := exec.Command("go", "clean", "./...").Run(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := os.Remove(Executable); err != nil && !os.IsNotExist(err) {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := os.RemoveAll(DIST); err != nil && !os.IsNotExist(err) {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := os.RemoveAll(BinLocation); err != nil && !os.IsNotExist(err) {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generates static content into the final binary
|
||||||
|
func (Build) Generate() {
|
||||||
|
mg.Deps(initVars)
|
||||||
|
runAndStreamOutput("go", "generate", PACKAGE+"/pkg/static")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Builds a vikunja binary, ready to run
|
||||||
|
func (Build) Build() {
|
||||||
|
mg.Deps(initVars)
|
||||||
|
mg.Deps(Build.Generate)
|
||||||
|
runAndStreamOutput("go", "build", Goflags[0], "-tags", Tags, "-ldflags", "-s -w "+Ldflags, "-o", Executable)
|
||||||
|
}
|
||||||
|
|
||||||
|
type Release mg.Namespace
|
||||||
|
|
||||||
|
// Runs all steps in the right order to create release packages for various platforms
|
||||||
|
func (Release) Release(ctx context.Context) error {
|
||||||
|
mg.Deps(initVars)
|
||||||
|
mg.Deps(Build.Generate, Release.Dirs)
|
||||||
|
mg.Deps(Release.Windows, Release.Linux, Release.Darwin)
|
||||||
|
|
||||||
|
// Run compiling in parallel to speed it up
|
||||||
|
errs, _ := errgroup.WithContext(ctx)
|
||||||
|
errs.Go((Release{}).Windows)
|
||||||
|
errs.Go((Release{}).Linux)
|
||||||
|
errs.Go((Release{}).Darwin)
|
||||||
|
if err := errs.Wait(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := (Release{}).Compress(ctx); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := (Release{}).Copy(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := (Release{}).Check(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := (Release{}).OsPackage(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := (Release{}).Zip(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Creates all directories needed to release vikunja
|
||||||
|
func (Release) Dirs() error {
|
||||||
|
for _, d := range []string{"binaries", "release", "zip"} {
|
||||||
|
if err := os.MkdirAll(RootPath+"/"+DIST+"/"+d, 0755); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func runXgo(targets string) error {
|
||||||
|
mg.Deps(initVars)
|
||||||
|
checkAndInstallGoTool("xgo", "src.techknowlogick.com/xgo")
|
||||||
|
|
||||||
|
extraLdflags := `-linkmode external -extldflags "-static" `
|
||||||
|
|
||||||
|
// See https://github.com/techknowlogick/xgo/issues/79
|
||||||
|
if strings.HasPrefix(targets, "darwin") {
|
||||||
|
extraLdflags = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
runAndStreamOutput("xgo",
|
||||||
|
"-dest", RootPath+"/"+DIST+"/binaries",
|
||||||
|
"-tags", "netgo "+Tags,
|
||||||
|
"-ldflags", extraLdflags+Ldflags,
|
||||||
|
"-targets", targets,
|
||||||
|
"-out", Executable+"-"+Version,
|
||||||
|
RootPath)
|
||||||
|
if os.Getenv("DRONE_WORKSPACE") != "" {
|
||||||
|
return filepath.Walk("/build/", func(path string, info os.FileInfo, err error) error {
|
||||||
|
// Skip directories
|
||||||
|
if info.IsDir() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return moveFile(path, RootPath+"/"+DIST+"/binaries/"+info.Name())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Builds binaries for windows
|
||||||
|
func (Release) Windows() error {
|
||||||
|
return runXgo("windows/*")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Builds binaries for linux
|
||||||
|
func (Release) Linux() error {
|
||||||
|
return runXgo("linux/*")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Builds binaries for darwin
|
||||||
|
func (Release) Darwin() error {
|
||||||
|
return runXgo("darwin/*")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compresses the built binaries in dist/binaries/ to reduce their filesize
|
||||||
|
func (Release) Compress(ctx context.Context) error {
|
||||||
|
// $(foreach file,$(filter-out $(wildcard $(wildcard $(DIST)/binaries/$(EXECUTABLE)-*mips*)),$(wildcard $(DIST)/binaries/$(EXECUTABLE)-*)), upx -9 $(file);)
|
||||||
|
|
||||||
|
errs, _ := errgroup.WithContext(ctx)
|
||||||
|
|
||||||
|
filepath.Walk(RootPath+"/"+DIST+"/binaries/", func(path string, info os.FileInfo, err error) error {
|
||||||
|
// Only executable files
|
||||||
|
if !strings.Contains(info.Name(), Executable) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
// No mips or s390x for you today
|
||||||
|
if strings.Contains(info.Name(), "mips") || strings.Contains(info.Name(), "s390x") {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Runs compressing in parallel since upx is single-threaded
|
||||||
|
errs.Go(func() error {
|
||||||
|
runAndStreamOutput("chmod", "+x", path) // Make sure all binaries are executable. Sometimes the CI does weired things and they're not.
|
||||||
|
runAndStreamOutput("upx", "-9", path)
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
return errs.Wait()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copies all built binaries to dist/release/ in preparation for creating the os packages
|
||||||
|
func (Release) Copy() error {
|
||||||
|
return filepath.Walk(RootPath+"/"+DIST+"/binaries/", func(path string, info os.FileInfo, err error) error {
|
||||||
|
// Only executable files
|
||||||
|
if !strings.Contains(info.Name(), Executable) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return copyFile(path, RootPath+"/"+DIST+"/release/"+info.Name())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Creates sha256 checksum files for each binary in dist/release/
|
||||||
|
func (Release) Check() error {
|
||||||
|
p := RootPath + "/" + DIST + "/release/"
|
||||||
|
return filepath.Walk(p, func(path string, info os.FileInfo, err error) error {
|
||||||
|
if info.IsDir() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
f, err := os.Create(p + info.Name() + ".sha256")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
hash, err := calculateSha256FileHash(path)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = f.WriteString(hash + " " + info.Name())
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return f.Close()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Creates a folder for each
|
||||||
|
func (Release) OsPackage() error {
|
||||||
|
p := RootPath + "/" + DIST + "/release/"
|
||||||
|
|
||||||
|
// We first put all files in a map to then iterate over it since the walk function would otherwise also iterate
|
||||||
|
// over the newly created files, creating some kind of endless loop.
|
||||||
|
bins := make(map[string]os.FileInfo)
|
||||||
|
if err := filepath.Walk(p, func(path string, info os.FileInfo, err error) error {
|
||||||
|
if strings.Contains(info.Name(), ".sha256") || info.IsDir() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
bins[path] = info
|
||||||
|
return nil
|
||||||
|
}); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
for path, info := range bins {
|
||||||
|
folder := p + info.Name() + "-full/"
|
||||||
|
if err := os.Mkdir(folder, 0755); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := moveFile(p+info.Name()+".sha256", folder+info.Name()+".sha256"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := moveFile(path, folder+info.Name()); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := copyFile(RootPath+"/config.yml.sample", folder+"config.yml.sample"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := copyFile(RootPath+"/LICENSE", folder+"LICENSE"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Creates a zip file from all os-package folders in dist/release
|
||||||
|
func (Release) Zip() error {
|
||||||
|
p := RootPath + "/" + DIST + "/release/"
|
||||||
|
if err := filepath.Walk(p, func(path string, info os.FileInfo, err error) error {
|
||||||
|
if !info.IsDir() || info.Name() == "release" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Zipping %s...\n", info.Name())
|
||||||
|
|
||||||
|
c := exec.Command("zip", "-r", RootPath+"/"+DIST+"/zip/"+info.Name(), ".", "-i", "*")
|
||||||
|
c.Dir = path
|
||||||
|
out, err := c.Output()
|
||||||
|
fmt.Print(string(out))
|
||||||
|
return err
|
||||||
|
}); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Creates a debian repo structure
|
||||||
|
func (Release) Reprepro() {
|
||||||
|
mg.Deps(setVersion, setBinLocation)
|
||||||
|
runAndStreamOutput("reprepro_expect", "debian", "includedeb", "strech", RootPath+"/"+DIST+"/os-packages/"+Executable+"_"+strings.ReplaceAll(VersionNumber, "v0", "0")+"_amd64.deb")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Creates deb, rpm and apk packages
|
||||||
|
func (Release) Packages() error {
|
||||||
|
mg.Deps(initVars)
|
||||||
|
var err error
|
||||||
|
binpath := "nfpm"
|
||||||
|
err = exec.Command(binpath).Run()
|
||||||
|
if err != nil && strings.Contains(err.Error(), "executable file not found") {
|
||||||
|
binpath = "/nfpm"
|
||||||
|
err = exec.Command(binpath).Run()
|
||||||
|
}
|
||||||
|
if err != nil && strings.Contains(err.Error(), "executable file not found") {
|
||||||
|
fmt.Println("Please manually install nfpm by running")
|
||||||
|
fmt.Println("curl -sfL https://install.goreleaser.com/github.com/goreleaser/nfpm.sh | sh -s -- -b $(go env GOPATH)/bin")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Because nfpm does not support templating, we replace the values in the config file and restore it after running
|
||||||
|
nfpmConfigPath := RootPath + "/nfpm.yaml"
|
||||||
|
nfpmconfig, err := ioutil.ReadFile(nfpmConfigPath)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
fixedConfig := strings.ReplaceAll(string(nfpmconfig), "<version>", VersionNumber)
|
||||||
|
fixedConfig = strings.ReplaceAll(fixedConfig, "<binlocation>", BinLocation)
|
||||||
|
if err := ioutil.WriteFile(nfpmConfigPath, []byte(fixedConfig), 0); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
releasePath := RootPath + "/" + DIST + "/os-packages/"
|
||||||
|
if err := os.MkdirAll(releasePath, 0755); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
runAndStreamOutput(binpath, "pkg", "--packager", "deb", "--target", releasePath)
|
||||||
|
runAndStreamOutput(binpath, "pkg", "--packager", "rpm", "--target", releasePath)
|
||||||
|
runAndStreamOutput(binpath, "pkg", "--packager", "apk", "--target", releasePath)
|
||||||
|
|
||||||
|
return ioutil.WriteFile(nfpmConfigPath, nfpmconfig, 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
type Dev mg.Namespace
|
||||||
|
|
||||||
|
// Creates a new bare db migration skeleton in pkg/migration with the current date
|
||||||
|
func (Dev) CreateMigration() error {
|
||||||
|
|
||||||
|
reader := bufio.NewReader(os.Stdin)
|
||||||
|
fmt.Print("Enter the name of the struct: ")
|
||||||
|
str, _ := reader.ReadString('\n')
|
||||||
|
str = strings.Trim(str, "\n")
|
||||||
|
|
||||||
|
date := time.Now().Format("20060102150405")
|
||||||
|
|
||||||
|
migration := `// Vikunja is a to-do list application to facilitate your life.
|
||||||
|
// Copyright 2018-2020 Vikunja and contributors. All rights reserved.
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU General Public License as published by
|
||||||
|
// the Free Software Foundation, either version 3 of the License, or
|
||||||
|
// (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
package migration
|
||||||
|
|
||||||
|
import (
|
||||||
|
"src.techknowlogick.com/xormigrate"
|
||||||
|
"xorm.io/xorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ` + str + date + ` struct {
|
||||||
|
}
|
||||||
|
|
||||||
|
func (` + str + date + `) TableName() string {
|
||||||
|
return "` + str + `"
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
migrations = append(migrations, &xormigrate.Migration{
|
||||||
|
ID: "` + date + `",
|
||||||
|
Description: "",
|
||||||
|
Migrate: func(tx *xorm.Engine) error {
|
||||||
|
return tx.Sync2(` + str + date + `{})
|
||||||
|
},
|
||||||
|
Rollback: func(tx *xorm.Engine) error {
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
`
|
||||||
|
f, err := os.Create(RootPath + "/pkg/migration/" + date + ".go")
|
||||||
|
defer f.Close()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = f.WriteString(migration)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
type configOption struct {
|
||||||
|
key string
|
||||||
|
description string
|
||||||
|
defaultValue string
|
||||||
|
|
||||||
|
children []*configOption
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseYamlConfigNode(node *yaml.Node) (config *configOption) {
|
||||||
|
config = &configOption{
|
||||||
|
key: node.Value,
|
||||||
|
description: strings.ReplaceAll(node.HeadComment, "# ", ""),
|
||||||
|
}
|
||||||
|
|
||||||
|
valMap := make(map[string]*configOption)
|
||||||
|
|
||||||
|
var lastOption *configOption
|
||||||
|
|
||||||
|
for i, n2 := range node.Content {
|
||||||
|
coo := &configOption{
|
||||||
|
key: n2.Value,
|
||||||
|
description: strings.ReplaceAll(n2.HeadComment, "# ", ""),
|
||||||
|
}
|
||||||
|
|
||||||
|
// If there's a key in valMap for the current key we should use that to append etc
|
||||||
|
// Else we just create a new configobject
|
||||||
|
co, exists := valMap[n2.Value]
|
||||||
|
if exists {
|
||||||
|
co.description = coo.description
|
||||||
|
} else {
|
||||||
|
valMap[n2.Value] = coo
|
||||||
|
config.children = append(config.children, coo)
|
||||||
|
}
|
||||||
|
|
||||||
|
// fmt.Println(i, coo.key, coo.description, n2.Value)
|
||||||
|
|
||||||
|
if i%2 == 0 {
|
||||||
|
lastOption = coo
|
||||||
|
continue
|
||||||
|
} else {
|
||||||
|
lastOption.defaultValue = n2.Value
|
||||||
|
}
|
||||||
|
|
||||||
|
if i-1 >= 0 && i-1 <= len(node.Content) && node.Content[i-1].Value != "" {
|
||||||
|
coo.defaultValue = n2.Value
|
||||||
|
coo.key = node.Content[i-1].Value
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(n2.Content) > 0 {
|
||||||
|
for _, n := range n2.Content {
|
||||||
|
coo.children = append(coo.children, parseYamlConfigNode(n))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return config
|
||||||
|
}
|
||||||
|
|
||||||
|
func printConfig(config []*configOption, level int) (rendered string) {
|
||||||
|
|
||||||
|
// Keep track of what we already printed to prevent printing things twice
|
||||||
|
printed := make(map[string]bool)
|
||||||
|
|
||||||
|
for _, option := range config {
|
||||||
|
|
||||||
|
if option.key != "" {
|
||||||
|
|
||||||
|
// Filter out all config objects where the default value == key
|
||||||
|
// Yaml is weired: It gives you a slice with an entry each for the key and their value.
|
||||||
|
if printed[option.key] {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if level == 0 {
|
||||||
|
rendered += "---\n\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
rendered += "#"
|
||||||
|
for i := 0; i <= level; i++ {
|
||||||
|
rendered += "#"
|
||||||
|
}
|
||||||
|
rendered += " " + option.key + "\n\n"
|
||||||
|
|
||||||
|
if option.description != "" {
|
||||||
|
rendered += option.description + "\n\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Top level config values never have a default value
|
||||||
|
if level > 0 {
|
||||||
|
rendered += "Default: `" + option.defaultValue
|
||||||
|
if option.defaultValue == "" {
|
||||||
|
rendered += "<empty>"
|
||||||
|
}
|
||||||
|
rendered += "`\n"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
printed[option.key] = true
|
||||||
|
rendered += "\n" + printConfig(option.children, level+1)
|
||||||
|
}
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
configDocPath = `docs/content/doc/setup/config.md`
|
||||||
|
configInjectComment = `<!-- Generated config will be injected here -->`
|
||||||
|
)
|
||||||
|
|
||||||
|
// Generates the error docs from a commented config.yml.sample file in the repo root.
|
||||||
|
func GenerateDocs() error {
|
||||||
|
|
||||||
|
config, err := ioutil.ReadFile("config.yml.sample")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
var d yaml.Node
|
||||||
|
err = yaml.Unmarshal(config, &d)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
conf := []*configOption{}
|
||||||
|
|
||||||
|
for _, node := range d.Content {
|
||||||
|
for _, n := range node.Content {
|
||||||
|
co := parseYamlConfigNode(n)
|
||||||
|
conf = append(conf, co)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
renderedConfig := printConfig(conf, 0)
|
||||||
|
|
||||||
|
// Rebuild the config
|
||||||
|
file, err := os.OpenFile(configDocPath, os.O_RDWR, 0)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
// We read the config doc up until the marker, then stop and append our generated config
|
||||||
|
fullConfig := ""
|
||||||
|
|
||||||
|
scanner := bufio.NewScanner(file)
|
||||||
|
for scanner.Scan() {
|
||||||
|
t := scanner.Text()
|
||||||
|
fullConfig += t + "\n"
|
||||||
|
|
||||||
|
if t == configInjectComment {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := scanner.Err(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
fullConfig += "\n" + renderedConfig
|
||||||
|
|
||||||
|
// We write the full file to prevent old content leftovers at the end
|
||||||
|
// I know, there are probably better ways to do this.
|
||||||
|
if err := ioutil.WriteFile(configDocPath, []byte(fullConfig), 0); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
|
@ -0,0 +1,18 @@
|
||||||
|
name: "vikunja"
|
||||||
|
arch: "amd64"
|
||||||
|
platform: "linux"
|
||||||
|
version: "<version>"
|
||||||
|
description: "Vikunja is an open-source todo application, written in Go. It lets you create lists,tasks and share them via teams or directly between users."
|
||||||
|
maintainer: "Vikunja Maintainers <maintainers@vikunja.io>"
|
||||||
|
homepage: "https://vikunja.io"
|
||||||
|
section: "default"
|
||||||
|
priority: "extra"
|
||||||
|
license: "GPLv3"
|
||||||
|
files:
|
||||||
|
<binlocation>: /opt/vikunja/vikunja
|
||||||
|
config_files:
|
||||||
|
./config.yml.sample: /etc/vikunja/config.yml
|
||||||
|
symlinks:
|
||||||
|
/opt/vikunja/vikunja: /usr/local/bin/vikunja
|
||||||
|
scripts:
|
||||||
|
postinstall: ./build/after-install.sh
|
|
@ -17,16 +17,18 @@
|
||||||
package caldav
|
package caldav
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"code.vikunja.io/api/pkg/config"
|
|
||||||
"code.vikunja.io/api/pkg/user"
|
|
||||||
"code.vikunja.io/api/pkg/utils"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"regexp"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"code.vikunja.io/api/pkg/config"
|
||||||
|
"code.vikunja.io/api/pkg/user"
|
||||||
|
"code.vikunja.io/api/pkg/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
// DateFormat ist the caldav date format
|
// DateFormat is the caldav date format
|
||||||
const DateFormat = `20060102T150405`
|
const DateFormat = `20060102T150405`
|
||||||
|
|
||||||
// Event holds a single caldav event
|
// Event holds a single caldav event
|
||||||
|
@ -91,11 +93,17 @@ PRODID:-//` + config.ProdID + `//EN`
|
||||||
e.UID = makeCalDavTimeFromTimeStamp(e.Timestamp) + utils.Sha256(e.Summary)
|
e.UID = makeCalDavTimeFromTimeStamp(e.Timestamp) + utils.Sha256(e.Summary)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
formattedDescription := ""
|
||||||
|
if e.Description != "" {
|
||||||
|
re := regexp.MustCompile(`\r?\n`)
|
||||||
|
formattedDescription = re.ReplaceAllString(e.Description, "\\n")
|
||||||
|
}
|
||||||
|
|
||||||
caldavevents += `
|
caldavevents += `
|
||||||
BEGIN:VEVENT
|
BEGIN:VEVENT
|
||||||
UID:` + e.UID + `
|
UID:` + e.UID + `
|
||||||
SUMMARY:` + e.Summary + `
|
SUMMARY:` + e.Summary + `
|
||||||
DESCRIPTION:` + e.Description + `
|
DESCRIPTION:` + formattedDescription + `
|
||||||
DTSTAMP:` + makeCalDavTimeFromTimeStamp(e.Timestamp) + `
|
DTSTAMP:` + makeCalDavTimeFromTimeStamp(e.Timestamp) + `
|
||||||
DTSTART:` + makeCalDavTimeFromTimeStamp(e.Start) + `
|
DTSTART:` + makeCalDavTimeFromTimeStamp(e.Start) + `
|
||||||
DTEND:` + makeCalDavTimeFromTimeStamp(e.End)
|
DTEND:` + makeCalDavTimeFromTimeStamp(e.End)
|
||||||
|
@ -151,12 +159,15 @@ DTSTART: ` + makeCalDavTimeFromTimeStamp(t.Start)
|
||||||
DTEND: ` + makeCalDavTimeFromTimeStamp(t.End)
|
DTEND: ` + makeCalDavTimeFromTimeStamp(t.End)
|
||||||
}
|
}
|
||||||
if t.Description != "" {
|
if t.Description != "" {
|
||||||
|
re := regexp.MustCompile(`\r?\n`)
|
||||||
|
formattedDescription := re.ReplaceAllString(t.Description, "\\n")
|
||||||
caldavtodos += `
|
caldavtodos += `
|
||||||
DESCRIPTION:` + t.Description
|
DESCRIPTION:` + formattedDescription
|
||||||
}
|
}
|
||||||
if t.Completed.Unix() > 0 {
|
if t.Completed.Unix() > 0 {
|
||||||
caldavtodos += `
|
caldavtodos += `
|
||||||
COMPLETED: ` + makeCalDavTimeFromTimeStamp(t.Completed)
|
COMPLETED:` + makeCalDavTimeFromTimeStamp(t.Completed) + `
|
||||||
|
STATUS:COMPLETED`
|
||||||
}
|
}
|
||||||
if t.Organizer != nil {
|
if t.Organizer != nil {
|
||||||
caldavtodos += `
|
caldavtodos += `
|
||||||
|
|
|
@ -17,10 +17,11 @@
|
||||||
package caldav
|
package caldav
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"code.vikunja.io/api/pkg/config"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"code.vikunja.io/api/pkg/config"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestParseEvents(t *testing.T) {
|
func TestParseEvents(t *testing.T) {
|
||||||
|
@ -238,6 +239,41 @@ DTSTAMP:20181202T050024
|
||||||
DTSTART:20181202T050024
|
DTSTART:20181202T050024
|
||||||
DTEND:20181202T050320
|
DTEND:20181202T050320
|
||||||
END:VEVENT
|
END:VEVENT
|
||||||
|
END:VCALENDAR`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Test caldavparsing with multiline description",
|
||||||
|
args: args{
|
||||||
|
config: &Config{
|
||||||
|
Name: "test",
|
||||||
|
ProdID: "RandomProdID which is not random",
|
||||||
|
},
|
||||||
|
events: []*Event{
|
||||||
|
{
|
||||||
|
Summary: "Event #1",
|
||||||
|
Description: `Lorem Ipsum
|
||||||
|
Dolor sit amet`,
|
||||||
|
UID: "randommduid",
|
||||||
|
Timestamp: time.Unix(1543626724, 0).In(config.GetTimeZone()),
|
||||||
|
Start: time.Unix(1543626724, 0).In(config.GetTimeZone()),
|
||||||
|
End: time.Unix(1543627824, 0).In(config.GetTimeZone()),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantCaldavevents: `BEGIN:VCALENDAR
|
||||||
|
VERSION:2.0
|
||||||
|
METHOD:PUBLISH
|
||||||
|
X-PUBLISHED-TTL:PT4H
|
||||||
|
X-WR-CALNAME:test
|
||||||
|
PRODID:-//RandomProdID which is not random//EN
|
||||||
|
BEGIN:VEVENT
|
||||||
|
UID:randommduid
|
||||||
|
SUMMARY:Event #1
|
||||||
|
DESCRIPTION:Lorem Ipsum\nDolor sit amet
|
||||||
|
DTSTAMP:20181201T011204
|
||||||
|
DTSTART:20181201T011204
|
||||||
|
DTEND:20181201T013024
|
||||||
|
END:VEVENT
|
||||||
END:VCALENDAR`,
|
END:VCALENDAR`,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -248,3 +284,88 @@ END:VCALENDAR`,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestParseTodos(t *testing.T) {
|
||||||
|
type args struct {
|
||||||
|
config *Config
|
||||||
|
todos []*Todo
|
||||||
|
}
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
args args
|
||||||
|
wantCaldavtasks string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "Test caldavparsing with multiline description",
|
||||||
|
args: args{
|
||||||
|
config: &Config{
|
||||||
|
Name: "test",
|
||||||
|
ProdID: "RandomProdID which is not random",
|
||||||
|
},
|
||||||
|
todos: []*Todo{
|
||||||
|
{
|
||||||
|
Summary: "Todo #1",
|
||||||
|
Description: `Lorem Ipsum
|
||||||
|
Dolor sit amet`,
|
||||||
|
UID: "randommduid",
|
||||||
|
Timestamp: time.Unix(1543626724, 0).In(config.GetTimeZone()),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantCaldavtasks: `BEGIN:VCALENDAR
|
||||||
|
VERSION:2.0
|
||||||
|
METHOD:PUBLISH
|
||||||
|
X-PUBLISHED-TTL:PT4H
|
||||||
|
X-WR-CALNAME:test
|
||||||
|
PRODID:-//RandomProdID which is not random//EN
|
||||||
|
BEGIN:VTODO
|
||||||
|
UID:randommduid
|
||||||
|
DTSTAMP:20181201T011204
|
||||||
|
SUMMARY:Todo #1
|
||||||
|
DESCRIPTION:Lorem Ipsum\nDolor sit amet
|
||||||
|
LAST-MODIFIED:00010101T000000
|
||||||
|
END:VTODO
|
||||||
|
END:VCALENDAR`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Test caldavparsing with completed task",
|
||||||
|
args: args{
|
||||||
|
config: &Config{
|
||||||
|
Name: "test",
|
||||||
|
ProdID: "RandomProdID which is not random",
|
||||||
|
},
|
||||||
|
todos: []*Todo{
|
||||||
|
{
|
||||||
|
Summary: "Todo #1",
|
||||||
|
Description: "Lorem Ipsum",
|
||||||
|
UID: "randommduid",
|
||||||
|
Timestamp: time.Unix(1543626724, 0).In(config.GetTimeZone()),
|
||||||
|
Completed: time.Unix(1543627824, 0).In(config.GetTimeZone()),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantCaldavtasks: `BEGIN:VCALENDAR
|
||||||
|
VERSION:2.0
|
||||||
|
METHOD:PUBLISH
|
||||||
|
X-PUBLISHED-TTL:PT4H
|
||||||
|
X-WR-CALNAME:test
|
||||||
|
PRODID:-//RandomProdID which is not random//EN
|
||||||
|
BEGIN:VTODO
|
||||||
|
UID:randommduid
|
||||||
|
DTSTAMP:20181201T011204
|
||||||
|
SUMMARY:Todo #1
|
||||||
|
DESCRIPTION:Lorem Ipsum
|
||||||
|
COMPLETED:20181201T013024
|
||||||
|
STATUS:COMPLETED
|
||||||
|
LAST-MODIFIED:00010101T000000
|
||||||
|
END:VTODO
|
||||||
|
END:VCALENDAR`,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
gotCaldavtasks := ParseTodos(tt.args.config, tt.args.todos)
|
||||||
|
assert.Equal(t, gotCaldavtasks, tt.wantCaldavtasks)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -18,8 +18,9 @@ package cmd
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/spf13/cobra"
|
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
)
|
)
|
||||||
|
|
||||||
var rootCmd = &cobra.Command{
|
var rootCmd = &cobra.Command{
|
||||||
|
|
|
@ -17,11 +17,12 @@
|
||||||
package cmd
|
package cmd
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
"code.vikunja.io/api/pkg/initialize"
|
"code.vikunja.io/api/pkg/initialize"
|
||||||
"code.vikunja.io/api/pkg/log"
|
"code.vikunja.io/api/pkg/log"
|
||||||
"code.vikunja.io/api/pkg/modules/dump"
|
"code.vikunja.io/api/pkg/modules/dump"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
"time"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
|
|
@ -0,0 +1,255 @@
|
||||||
|
// Copyright 2020 Vikunja and contriubtors. All rights reserved.
|
||||||
|
//
|
||||||
|
// This file is part of Vikunja.
|
||||||
|
//
|
||||||
|
// Vikunja is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU General Public License as published by
|
||||||
|
// the Free Software Foundation, either version 3 of the License, or
|
||||||
|
// (at your option) any later version.
|
||||||
|
//
|
||||||
|
// Vikunja is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU General Public License
|
||||||
|
// along with Vikunja. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"code.vikunja.io/api/pkg/initialize"
|
||||||
|
"code.vikunja.io/api/pkg/log"
|
||||||
|
"code.vikunja.io/api/pkg/models"
|
||||||
|
"code.vikunja.io/api/pkg/user"
|
||||||
|
"github.com/olekukonko/tablewriter"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
"golang.org/x/crypto/ssh/terminal"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
userFlagUsername string
|
||||||
|
userFlagEmail string
|
||||||
|
userFlagPassword string
|
||||||
|
userFlagAvatar = "default"
|
||||||
|
userFlagResetPasswordDirectly bool
|
||||||
|
userFlagEnableUser bool
|
||||||
|
userFlagDisableUser bool
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
// User create flags
|
||||||
|
userCreateCmd.Flags().StringVarP(&userFlagUsername, "username", "u", "", "The username of the new user.")
|
||||||
|
_ = userCreateCmd.MarkFlagRequired("username")
|
||||||
|
userCreateCmd.Flags().StringVarP(&userFlagEmail, "email", "e", "", "The email address of the new user.")
|
||||||
|
_ = userCreateCmd.MarkFlagRequired("email")
|
||||||
|
userCreateCmd.Flags().StringVarP(&userFlagPassword, "password", "p", "", "The password of the new user. You will be asked to enter it if not provided through the flag.")
|
||||||
|
userCreateCmd.Flags().StringVarP(&userFlagAvatar, "avatar-provider", "a", "", "The avatar provider of the new user. Optional.")
|
||||||
|
|
||||||
|
// User update flags
|
||||||
|
userUpdateCmd.Flags().StringVarP(&userFlagUsername, "username", "u", "", "The new username of the user.")
|
||||||
|
userUpdateCmd.Flags().StringVarP(&userFlagEmail, "email", "e", "", "The new email address of the user.")
|
||||||
|
userUpdateCmd.Flags().StringVarP(&userFlagAvatar, "avatar-provider", "a", "", "The new avatar provider of the new user.")
|
||||||
|
|
||||||
|
// Reset PW flags
|
||||||
|
userResetPasswordCmd.Flags().BoolVarP(&userFlagResetPasswordDirectly, "direct", "d", false, "If provided, reset the password directly instead of sending the user a reset mail.")
|
||||||
|
userResetPasswordCmd.Flags().StringVarP(&userFlagPassword, "password", "p", "", "The new password of the user. Only used in combination with --direct. You will be asked to enter it if not provided through the flag.")
|
||||||
|
|
||||||
|
// Change status flags
|
||||||
|
userChangeEnabledCmd.Flags().BoolVarP(&userFlagDisableUser, "disable", "d", false, "Disable the user.")
|
||||||
|
userChangeEnabledCmd.Flags().BoolVarP(&userFlagEnableUser, "enable", "e", false, "Enable the user.")
|
||||||
|
|
||||||
|
userCmd.AddCommand(userListCmd, userCreateCmd, userUpdateCmd, userResetPasswordCmd, userChangeEnabledCmd)
|
||||||
|
rootCmd.AddCommand(userCmd)
|
||||||
|
}
|
||||||
|
|
||||||
|
func getPasswordFromFlagOrInput() (pw string) {
|
||||||
|
pw = userFlagPassword
|
||||||
|
if userFlagPassword == "" {
|
||||||
|
fmt.Print("Enter Password: ")
|
||||||
|
bytePW, err := terminal.ReadPassword(int(os.Stdin.Fd()))
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Error reading password: %s", err)
|
||||||
|
}
|
||||||
|
fmt.Printf("\nConfirm Password: ")
|
||||||
|
byteConfirmPW, err := terminal.ReadPassword(int(os.Stdin.Fd()))
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Error reading password: %s", err)
|
||||||
|
}
|
||||||
|
if string(bytePW) != string(byteConfirmPW) {
|
||||||
|
log.Critical("Passwords don't match!")
|
||||||
|
}
|
||||||
|
fmt.Printf("\n")
|
||||||
|
pw = strings.TrimSpace(string(bytePW))
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func getUserFromArg(arg string) *user.User {
|
||||||
|
id, err := strconv.ParseInt(arg, 10, 64)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Invalid user id: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
u, err := user.GetUserByID(id)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Could not get user: %s", err)
|
||||||
|
}
|
||||||
|
return u
|
||||||
|
}
|
||||||
|
|
||||||
|
var userCmd = &cobra.Command{
|
||||||
|
Use: "user",
|
||||||
|
Short: "Manage users locally through the cli.",
|
||||||
|
}
|
||||||
|
|
||||||
|
var userListCmd = &cobra.Command{
|
||||||
|
Use: "list",
|
||||||
|
Short: "Shows a list of all users.",
|
||||||
|
PreRun: func(cmd *cobra.Command, args []string) {
|
||||||
|
initialize.FullInit()
|
||||||
|
},
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
users, err := user.ListUsers("")
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Error getting users: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
table := tablewriter.NewWriter(os.Stdout)
|
||||||
|
table.SetHeader([]string{
|
||||||
|
"ID",
|
||||||
|
"Username",
|
||||||
|
"Email",
|
||||||
|
"Active",
|
||||||
|
"Created",
|
||||||
|
"Updated",
|
||||||
|
})
|
||||||
|
|
||||||
|
for _, u := range users {
|
||||||
|
table.Append([]string{
|
||||||
|
strconv.FormatInt(u.ID, 10),
|
||||||
|
u.Username,
|
||||||
|
u.Email,
|
||||||
|
strconv.FormatBool(u.IsActive),
|
||||||
|
u.Created.Format(time.RFC3339),
|
||||||
|
u.Updated.Format(time.RFC3339),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
table.Render()
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
var userCreateCmd = &cobra.Command{
|
||||||
|
Use: "create",
|
||||||
|
Short: "Create a new user.",
|
||||||
|
PreRun: func(cmd *cobra.Command, args []string) {
|
||||||
|
initialize.FullInit()
|
||||||
|
},
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
u := &user.User{
|
||||||
|
Username: userFlagUsername,
|
||||||
|
Email: userFlagEmail,
|
||||||
|
Password: getPasswordFromFlagOrInput(),
|
||||||
|
}
|
||||||
|
newUser, err := user.CreateUser(u)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Error creating new user: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
err = models.CreateNewNamespaceForUser(newUser)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Error creating new namespace for user: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("\nUser was created successfully.\n")
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
var userUpdateCmd = &cobra.Command{
|
||||||
|
Use: "update [user id]",
|
||||||
|
Short: "Update an existing user.",
|
||||||
|
Args: cobra.ExactArgs(1),
|
||||||
|
PreRun: func(cmd *cobra.Command, args []string) {
|
||||||
|
initialize.FullInit()
|
||||||
|
},
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
u := getUserFromArg(args[0])
|
||||||
|
|
||||||
|
if userFlagUsername != "" {
|
||||||
|
u.Username = userFlagUsername
|
||||||
|
}
|
||||||
|
if userFlagEmail != "" {
|
||||||
|
u.Email = userFlagEmail
|
||||||
|
}
|
||||||
|
if userFlagAvatar != "default" {
|
||||||
|
u.AvatarProvider = userFlagAvatar
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := user.UpdateUser(u)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Error updating the user: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println("User updated successfully.")
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
var userResetPasswordCmd = &cobra.Command{
|
||||||
|
Use: "reset-password [user id]",
|
||||||
|
Short: "Reset a users password, either through mailing them a reset link or directly.",
|
||||||
|
PreRun: func(cmd *cobra.Command, args []string) {
|
||||||
|
initialize.FullInit()
|
||||||
|
},
|
||||||
|
Args: cobra.ExactArgs(1),
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
u := getUserFromArg(args[0])
|
||||||
|
|
||||||
|
// By default we reset as usual, only with specific flag directly.
|
||||||
|
if userFlagResetPasswordDirectly {
|
||||||
|
err := user.UpdateUserPassword(u, getPasswordFromFlagOrInput())
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Could not update user password: %s", err)
|
||||||
|
}
|
||||||
|
fmt.Println("Password updated successfully.")
|
||||||
|
} else {
|
||||||
|
err := user.RequestUserPasswordResetToken(u)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Could not send password reset email: %s", err)
|
||||||
|
}
|
||||||
|
fmt.Println("Password reset email sent successfully.")
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
var userChangeEnabledCmd = &cobra.Command{
|
||||||
|
Use: "change-status [user id]",
|
||||||
|
Short: "Enable or disable a user. Will toggle the current status if no flag (--enable or --disable) is provided.",
|
||||||
|
PreRun: func(cmd *cobra.Command, args []string) {
|
||||||
|
initialize.FullInit()
|
||||||
|
},
|
||||||
|
Args: cobra.ExactArgs(1),
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
u := getUserFromArg(args[0])
|
||||||
|
|
||||||
|
if userFlagEnableUser {
|
||||||
|
u.IsActive = true
|
||||||
|
} else if userFlagDisableUser {
|
||||||
|
u.IsActive = false
|
||||||
|
} else {
|
||||||
|
u.IsActive = !u.IsActive
|
||||||
|
}
|
||||||
|
_, err := user.UpdateUser(u)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Could not enable the user")
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("User status successfully changed, user is now active: %t.\n", u.IsActive)
|
||||||
|
},
|
||||||
|
}
|
|
@ -17,10 +17,11 @@
|
||||||
package cmd
|
package cmd
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"code.vikunja.io/api/pkg/version"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/spf13/cobra"
|
|
||||||
"runtime"
|
"runtime"
|
||||||
|
|
||||||
|
"code.vikunja.io/api/pkg/version"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
|
|
@ -17,17 +17,18 @@
|
||||||
package cmd
|
package cmd
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
|
"os"
|
||||||
|
"os/signal"
|
||||||
|
"time"
|
||||||
|
|
||||||
"code.vikunja.io/api/pkg/config"
|
"code.vikunja.io/api/pkg/config"
|
||||||
"code.vikunja.io/api/pkg/initialize"
|
"code.vikunja.io/api/pkg/initialize"
|
||||||
"code.vikunja.io/api/pkg/log"
|
"code.vikunja.io/api/pkg/log"
|
||||||
"code.vikunja.io/api/pkg/routes"
|
"code.vikunja.io/api/pkg/routes"
|
||||||
"code.vikunja.io/api/pkg/swagger"
|
"code.vikunja.io/api/pkg/swagger"
|
||||||
"code.vikunja.io/api/pkg/version"
|
"code.vikunja.io/api/pkg/version"
|
||||||
"context"
|
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
"os"
|
|
||||||
"os/signal"
|
|
||||||
"time"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
|
|
@ -52,6 +52,14 @@ const (
|
||||||
ServiceEnableTotp Key = `service.enabletotp`
|
ServiceEnableTotp Key = `service.enabletotp`
|
||||||
ServiceSentryDsn Key = `service.sentrydsn`
|
ServiceSentryDsn Key = `service.sentrydsn`
|
||||||
|
|
||||||
|
AuthLocalEnabled Key = `auth.local.enabled`
|
||||||
|
AuthOpenIDEnabled Key = `auth.openid.enabled`
|
||||||
|
AuthOpenIDRedirectURL Key = `auth.openid.redirecturl`
|
||||||
|
AuthOpenIDProviders Key = `auth.openid.providers`
|
||||||
|
|
||||||
|
LegalImprintURL Key = `legal.imprinturl`
|
||||||
|
LegalPrivacyURL Key = `legal.privacyurl`
|
||||||
|
|
||||||
DatabaseType Key = `database.type`
|
DatabaseType Key = `database.type`
|
||||||
DatabaseHost Key = `database.host`
|
DatabaseHost Key = `database.host`
|
||||||
DatabaseUser Key = `database.user`
|
DatabaseUser Key = `database.user`
|
||||||
|
@ -76,6 +84,7 @@ const (
|
||||||
MailerFromEmail Key = `mailer.fromemail`
|
MailerFromEmail Key = `mailer.fromemail`
|
||||||
MailerQueuelength Key = `mailer.queuelength`
|
MailerQueuelength Key = `mailer.queuelength`
|
||||||
MailerQueueTimeout Key = `mailer.queuetimeout`
|
MailerQueueTimeout Key = `mailer.queuetimeout`
|
||||||
|
MailerForceSSL Key = `mailer.forcessl`
|
||||||
|
|
||||||
RedisEnabled Key = `redis.enabled`
|
RedisEnabled Key = `redis.enabled`
|
||||||
RedisHost Key = `redis.host`
|
RedisHost Key = `redis.host`
|
||||||
|
@ -113,7 +122,6 @@ const (
|
||||||
CorsOrigins Key = `cors.origins`
|
CorsOrigins Key = `cors.origins`
|
||||||
CorsMaxAge Key = `cors.maxage`
|
CorsMaxAge Key = `cors.maxage`
|
||||||
|
|
||||||
AvatarProvider Key = `avatar.provider`
|
|
||||||
AvatarGravaterExpiration Key = `avatar.gravatarexpiration`
|
AvatarGravaterExpiration Key = `avatar.gravatarexpiration`
|
||||||
|
|
||||||
BackgroundsEnabled Key = `backgrounds.enabled`
|
BackgroundsEnabled Key = `backgrounds.enabled`
|
||||||
|
@ -121,6 +129,8 @@ const (
|
||||||
BackgroundsUnsplashEnabled Key = `backgrounds.providers.unsplash.enabled`
|
BackgroundsUnsplashEnabled Key = `backgrounds.providers.unsplash.enabled`
|
||||||
BackgroundsUnsplashAccessToken Key = `backgrounds.providers.unsplash.accesstoken`
|
BackgroundsUnsplashAccessToken Key = `backgrounds.providers.unsplash.accesstoken`
|
||||||
BackgroundsUnsplashApplicationID Key = `backgrounds.providers.unsplash.applicationid`
|
BackgroundsUnsplashApplicationID Key = `backgrounds.providers.unsplash.applicationid`
|
||||||
|
|
||||||
|
KeyvalueType Key = `keyvalue.type`
|
||||||
)
|
)
|
||||||
|
|
||||||
// GetString returns a string config value
|
// GetString returns a string config value
|
||||||
|
@ -153,6 +163,11 @@ func (k Key) GetStringSlice() []string {
|
||||||
return viper.GetStringSlice(string(k))
|
return viper.GetStringSlice(string(k))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Get returns the raw value from a config option
|
||||||
|
func (k Key) Get() interface{} {
|
||||||
|
return viper.Get(string(k))
|
||||||
|
}
|
||||||
|
|
||||||
var timezone *time.Location
|
var timezone *time.Location
|
||||||
|
|
||||||
// GetTimeZone returns the time zone configured for vikunja
|
// GetTimeZone returns the time zone configured for vikunja
|
||||||
|
@ -211,6 +226,10 @@ func InitDefaultConfig() {
|
||||||
ServiceEnableTaskComments.setDefault(true)
|
ServiceEnableTaskComments.setDefault(true)
|
||||||
ServiceEnableTotp.setDefault(true)
|
ServiceEnableTotp.setDefault(true)
|
||||||
|
|
||||||
|
// Auth
|
||||||
|
AuthLocalEnabled.setDefault(true)
|
||||||
|
AuthOpenIDEnabled.setDefault(false)
|
||||||
|
|
||||||
// Database
|
// Database
|
||||||
DatabaseType.setDefault("sqlite")
|
DatabaseType.setDefault("sqlite")
|
||||||
DatabaseHost.setDefault("localhost")
|
DatabaseHost.setDefault("localhost")
|
||||||
|
@ -237,6 +256,7 @@ func InitDefaultConfig() {
|
||||||
MailerFromEmail.setDefault("mail@vikunja")
|
MailerFromEmail.setDefault("mail@vikunja")
|
||||||
MailerQueuelength.setDefault(100)
|
MailerQueuelength.setDefault(100)
|
||||||
MailerQueueTimeout.setDefault(30)
|
MailerQueueTimeout.setDefault(30)
|
||||||
|
MailerForceSSL.setDefault(false)
|
||||||
// Redis
|
// Redis
|
||||||
RedisEnabled.setDefault(false)
|
RedisEnabled.setDefault(false)
|
||||||
RedisHost.setDefault("localhost:6379")
|
RedisHost.setDefault("localhost:6379")
|
||||||
|
@ -268,12 +288,13 @@ func InitDefaultConfig() {
|
||||||
MigrationWunderlistEnable.setDefault(false)
|
MigrationWunderlistEnable.setDefault(false)
|
||||||
MigrationTodoistEnable.setDefault(false)
|
MigrationTodoistEnable.setDefault(false)
|
||||||
// Avatar
|
// Avatar
|
||||||
AvatarProvider.setDefault("gravatar")
|
|
||||||
AvatarGravaterExpiration.setDefault(3600)
|
AvatarGravaterExpiration.setDefault(3600)
|
||||||
// List Backgrounds
|
// List Backgrounds
|
||||||
BackgroundsEnabled.setDefault(true)
|
BackgroundsEnabled.setDefault(true)
|
||||||
BackgroundsUploadEnabled.setDefault(true)
|
BackgroundsUploadEnabled.setDefault(true)
|
||||||
BackgroundsUnsplashEnabled.setDefault(false)
|
BackgroundsUnsplashEnabled.setDefault(false)
|
||||||
|
// Key Value
|
||||||
|
KeyvalueType.setDefault("memory")
|
||||||
}
|
}
|
||||||
|
|
||||||
// InitConfig initializes the config, sets defaults etc.
|
// InitConfig initializes the config, sets defaults etc.
|
||||||
|
@ -307,6 +328,18 @@ func InitConfig() {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if CacheType.GetString() == "keyvalue" {
|
||||||
|
CacheType.Set(KeyvalueType.GetString())
|
||||||
|
}
|
||||||
|
|
||||||
|
if RateLimitStore.GetString() == "keyvalue" {
|
||||||
|
RateLimitStore.Set(KeyvalueType.GetString())
|
||||||
|
}
|
||||||
|
|
||||||
|
if AuthOpenIDRedirectURL.GetString() == "" {
|
||||||
|
AuthOpenIDRedirectURL.Set(ServiceFrontendurl.GetString() + "auth/openid/")
|
||||||
|
}
|
||||||
|
|
||||||
log.Printf("Using config file: %s", viper.ConfigFileUsed())
|
log.Printf("Using config file: %s", viper.ConfigFileUsed())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
18
pkg/db/db.go
18
pkg/db/db.go
|
@ -17,16 +17,17 @@
|
||||||
package db
|
package db
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"code.vikunja.io/api/pkg/config"
|
|
||||||
"code.vikunja.io/api/pkg/log"
|
|
||||||
"encoding/gob"
|
"encoding/gob"
|
||||||
"fmt"
|
"fmt"
|
||||||
xrc "gitea.com/xorm/xorm-redis-cache"
|
|
||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
"os"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"code.vikunja.io/api/pkg/config"
|
||||||
|
"code.vikunja.io/api/pkg/log"
|
||||||
|
xrc "gitea.com/xorm/xorm-redis-cache"
|
||||||
"xorm.io/core"
|
"xorm.io/core"
|
||||||
"xorm.io/xorm"
|
"xorm.io/xorm"
|
||||||
"xorm.io/xorm/caches"
|
"xorm.io/xorm/caches"
|
||||||
|
@ -52,23 +53,24 @@ func CreateDBEngine() (engine *xorm.Engine, err error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Use Mysql if set
|
// Use Mysql if set
|
||||||
if config.DatabaseType.GetString() == "mysql" {
|
switch config.DatabaseType.GetString() {
|
||||||
|
case "mysql":
|
||||||
engine, err = initMysqlEngine()
|
engine, err = initMysqlEngine()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
} else if config.DatabaseType.GetString() == "postgres" {
|
case "postgres":
|
||||||
engine, err = initPostgresEngine()
|
engine, err = initPostgresEngine()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
} else if config.DatabaseType.GetString() == "sqlite" {
|
case "sqlite":
|
||||||
// Otherwise use sqlite
|
// Otherwise use sqlite
|
||||||
engine, err = initSqliteEngine()
|
engine, err = initSqliteEngine()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
} else {
|
default:
|
||||||
log.Fatalf("Unknown database type %s", config.DatabaseType.GetString())
|
log.Fatalf("Unknown database type %s", config.DatabaseType.GetString())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -90,7 +92,7 @@ func CreateDBEngine() (engine *xorm.Engine, err error) {
|
||||||
cacher := caches.NewLRUCacher(caches.NewMemoryStore(), config.CacheMaxElementSize.GetInt())
|
cacher := caches.NewLRUCacher(caches.NewMemoryStore(), config.CacheMaxElementSize.GetInt())
|
||||||
engine.SetDefaultCacher(cacher)
|
engine.SetDefaultCacher(cacher)
|
||||||
case "redis":
|
case "redis":
|
||||||
cacher := xrc.NewRedisCacher(config.RedisEnabled.GetString(), config.RedisPassword.GetString(), xrc.DEFAULT_EXPIRATION, engine.Logger())
|
cacher := xrc.NewRedisCacher(config.RedisHost.GetString(), config.RedisPassword.GetString(), xrc.DEFAULT_EXPIRATION, engine.Logger())
|
||||||
engine.SetDefaultCacher(cacher)
|
engine.SetDefaultCacher(cacher)
|
||||||
default:
|
default:
|
||||||
log.Info("Did not find a valid cache type. Caching disabled. Please refer to the docs for poosible cache types.")
|
log.Info("Did not find a valid cache type. Caching disabled. Please refer to the docs for poosible cache types.")
|
||||||
|
|
|
@ -2,12 +2,14 @@
|
||||||
title: testbucket1
|
title: testbucket1
|
||||||
list_id: 1
|
list_id: 1
|
||||||
created_by_id: 1
|
created_by_id: 1
|
||||||
|
limit: 9999999 # This bucket has a limit we will never exceed in the tests to make sure the logic allows for buckets with limits
|
||||||
created: 2020-04-18 21:13:52
|
created: 2020-04-18 21:13:52
|
||||||
updated: 2020-04-18 21:13:52
|
updated: 2020-04-18 21:13:52
|
||||||
- id: 2
|
- id: 2
|
||||||
title: testbucket2
|
title: testbucket2
|
||||||
list_id: 1
|
list_id: 1
|
||||||
created_by_id: 1
|
created_by_id: 1
|
||||||
|
limit: 3
|
||||||
created: 2020-04-18 21:13:52
|
created: 2020-04-18 21:13:52
|
||||||
updated: 2020-04-18 21:13:52
|
updated: 2020-04-18 21:13:52
|
||||||
- id: 3
|
- id: 3
|
||||||
|
|
|
@ -199,3 +199,13 @@
|
||||||
is_archived: 1
|
is_archived: 1
|
||||||
updated: 2018-12-02 15:13:12
|
updated: 2018-12-02 15:13:12
|
||||||
created: 2018-12-01 15:13:12
|
created: 2018-12-01 15:13:12
|
||||||
|
-
|
||||||
|
id: 23
|
||||||
|
title: Test23
|
||||||
|
description: Lorem Ipsum
|
||||||
|
identifier: test23
|
||||||
|
owner_id: 12
|
||||||
|
namespace_id: 17
|
||||||
|
is_favorite: true
|
||||||
|
updated: 2018-12-02 15:13:12
|
||||||
|
created: 2018-12-01 15:13:12
|
||||||
|
|
|
@ -82,3 +82,9 @@
|
||||||
is_archived: 1
|
is_archived: 1
|
||||||
updated: 2018-12-02 15:13:12
|
updated: 2018-12-02 15:13:12
|
||||||
created: 2018-12-01 15:13:12
|
created: 2018-12-01 15:13:12
|
||||||
|
- id: 17
|
||||||
|
title: testnamespace17
|
||||||
|
description: Lorem Ipsum
|
||||||
|
owner_id: 12
|
||||||
|
updated: 2018-12-02 15:13:12
|
||||||
|
created: 2018-12-01 15:13:12
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
- id: 1
|
||||||
|
filters: '{"sort_by":null,"order_by":null,"filter_by":["start_date","end_date","due_date"],"filter_value":["2018-12-11T03:46:40+00:00","2018-12-13T11:20:01+00:00","2018-11-29T14:00:00+00:00"],"filter_comparator":["greater","less","greater"],"filter_concat":"","filter_include_nulls":false}'
|
||||||
|
title: testfilter1
|
||||||
|
owner_id: 1
|
||||||
|
updated: 2020-09-08 15:13:12
|
||||||
|
created: 2020-09-08 14:13:12
|
|
@ -8,6 +8,7 @@
|
||||||
created: 2018-12-01 01:12:04
|
created: 2018-12-01 01:12:04
|
||||||
updated: 2018-12-01 01:12:04
|
updated: 2018-12-01 01:12:04
|
||||||
bucket_id: 1
|
bucket_id: 1
|
||||||
|
is_favorite: true
|
||||||
- id: 2
|
- id: 2
|
||||||
title: 'task #2 done'
|
title: 'task #2 done'
|
||||||
done: true
|
done: true
|
||||||
|
@ -140,6 +141,7 @@
|
||||||
list_id: 6
|
list_id: 6
|
||||||
index: 1
|
index: 1
|
||||||
bucket_id: 6
|
bucket_id: 6
|
||||||
|
is_favorite: true
|
||||||
created: 2018-12-01 01:12:04
|
created: 2018-12-01 01:12:04
|
||||||
updated: 2018-12-01 01:12:04
|
updated: 2018-12-01 01:12:04
|
||||||
- id: 16
|
- id: 16
|
||||||
|
@ -315,6 +317,7 @@
|
||||||
list_id: 20
|
list_id: 20
|
||||||
index: 20
|
index: 20
|
||||||
bucket_id: 5
|
bucket_id: 5
|
||||||
|
is_favorite: true
|
||||||
created: 2018-12-01 01:12:04
|
created: 2018-12-01 01:12:04
|
||||||
updated: 2018-12-01 01:12:04
|
updated: 2018-12-01 01:12:04
|
||||||
- id: 35
|
- id: 35
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
password: '$2a$14$dcadBoMBL9jQoOcZK8Fju.cy0Ptx2oZECkKLnaa8ekRoTFe1w7To.' # 1234
|
password: '$2a$14$dcadBoMBL9jQoOcZK8Fju.cy0Ptx2oZECkKLnaa8ekRoTFe1w7To.' # 1234
|
||||||
email: 'user1@example.com'
|
email: 'user1@example.com'
|
||||||
is_active: true
|
is_active: true
|
||||||
|
issuer: local
|
||||||
updated: 2018-12-02 15:13:12
|
updated: 2018-12-02 15:13:12
|
||||||
created: 2018-12-01 15:13:12
|
created: 2018-12-01 15:13:12
|
||||||
-
|
-
|
||||||
|
@ -11,6 +12,7 @@
|
||||||
username: 'user2'
|
username: 'user2'
|
||||||
password: '$2a$14$dcadBoMBL9jQoOcZK8Fju.cy0Ptx2oZECkKLnaa8ekRoTFe1w7To.' # 1234
|
password: '$2a$14$dcadBoMBL9jQoOcZK8Fju.cy0Ptx2oZECkKLnaa8ekRoTFe1w7To.' # 1234
|
||||||
email: 'user2@example.com'
|
email: 'user2@example.com'
|
||||||
|
issuer: local
|
||||||
updated: 2018-12-02 15:13:12
|
updated: 2018-12-02 15:13:12
|
||||||
created: 2018-12-01 15:13:12
|
created: 2018-12-01 15:13:12
|
||||||
-
|
-
|
||||||
|
@ -19,6 +21,7 @@
|
||||||
password: '$2a$14$dcadBoMBL9jQoOcZK8Fju.cy0Ptx2oZECkKLnaa8ekRoTFe1w7To.' # 1234
|
password: '$2a$14$dcadBoMBL9jQoOcZK8Fju.cy0Ptx2oZECkKLnaa8ekRoTFe1w7To.' # 1234
|
||||||
email: 'user3@example.com'
|
email: 'user3@example.com'
|
||||||
password_reset_token: passwordresettesttoken
|
password_reset_token: passwordresettesttoken
|
||||||
|
issuer: local
|
||||||
updated: 2018-12-02 15:13:12
|
updated: 2018-12-02 15:13:12
|
||||||
created: 2018-12-01 15:13:12
|
created: 2018-12-01 15:13:12
|
||||||
-
|
-
|
||||||
|
@ -27,6 +30,7 @@
|
||||||
password: '$2a$14$dcadBoMBL9jQoOcZK8Fju.cy0Ptx2oZECkKLnaa8ekRoTFe1w7To.' # 1234
|
password: '$2a$14$dcadBoMBL9jQoOcZK8Fju.cy0Ptx2oZECkKLnaa8ekRoTFe1w7To.' # 1234
|
||||||
email: 'user4@example.com'
|
email: 'user4@example.com'
|
||||||
email_confirm_token: tiepiQueed8ahc7zeeFe1eveiy4Ein8osooxegiephauph2Ael
|
email_confirm_token: tiepiQueed8ahc7zeeFe1eveiy4Ein8osooxegiephauph2Ael
|
||||||
|
issuer: local
|
||||||
updated: 2018-12-02 15:13:12
|
updated: 2018-12-02 15:13:12
|
||||||
created: 2018-12-01 15:13:12
|
created: 2018-12-01 15:13:12
|
||||||
-
|
-
|
||||||
|
@ -36,6 +40,7 @@
|
||||||
email: 'user5@example.com'
|
email: 'user5@example.com'
|
||||||
email_confirm_token: tiepiQueed8ahc7zeeFe1eveiy4Ein8osooxegiephauph2Ael
|
email_confirm_token: tiepiQueed8ahc7zeeFe1eveiy4Ein8osooxegiephauph2Ael
|
||||||
is_active: false
|
is_active: false
|
||||||
|
issuer: local
|
||||||
updated: 2018-12-02 15:13:12
|
updated: 2018-12-02 15:13:12
|
||||||
created: 2018-12-01 15:13:12
|
created: 2018-12-01 15:13:12
|
||||||
# This use is used to create a whole bunch of lists which are then shared directly with a user
|
# This use is used to create a whole bunch of lists which are then shared directly with a user
|
||||||
|
@ -44,6 +49,7 @@
|
||||||
password: '$2a$14$dcadBoMBL9jQoOcZK8Fju.cy0Ptx2oZECkKLnaa8ekRoTFe1w7To.' # 1234
|
password: '$2a$14$dcadBoMBL9jQoOcZK8Fju.cy0Ptx2oZECkKLnaa8ekRoTFe1w7To.' # 1234
|
||||||
email: 'user6@example.com'
|
email: 'user6@example.com'
|
||||||
is_active: true
|
is_active: true
|
||||||
|
issuer: local
|
||||||
updated: 2018-12-02 15:13:12
|
updated: 2018-12-02 15:13:12
|
||||||
created: 2018-12-01 15:13:12
|
created: 2018-12-01 15:13:12
|
||||||
- id: 7
|
- id: 7
|
||||||
|
@ -51,6 +57,7 @@
|
||||||
password: '$2a$14$dcadBoMBL9jQoOcZK8Fju.cy0Ptx2oZECkKLnaa8ekRoTFe1w7To.' # 1234
|
password: '$2a$14$dcadBoMBL9jQoOcZK8Fju.cy0Ptx2oZECkKLnaa8ekRoTFe1w7To.' # 1234
|
||||||
email: 'user7@example.com'
|
email: 'user7@example.com'
|
||||||
is_active: true
|
is_active: true
|
||||||
|
issuer: local
|
||||||
updated: 2018-12-02 15:13:12
|
updated: 2018-12-02 15:13:12
|
||||||
created: 2018-12-01 15:13:12
|
created: 2018-12-01 15:13:12
|
||||||
- id: 8
|
- id: 8
|
||||||
|
@ -58,6 +65,7 @@
|
||||||
password: '$2a$14$dcadBoMBL9jQoOcZK8Fju.cy0Ptx2oZECkKLnaa8ekRoTFe1w7To.' # 1234
|
password: '$2a$14$dcadBoMBL9jQoOcZK8Fju.cy0Ptx2oZECkKLnaa8ekRoTFe1w7To.' # 1234
|
||||||
email: 'user8@example.com'
|
email: 'user8@example.com'
|
||||||
is_active: true
|
is_active: true
|
||||||
|
issuer: local
|
||||||
updated: 2018-12-02 15:13:12
|
updated: 2018-12-02 15:13:12
|
||||||
created: 2018-12-01 15:13:12
|
created: 2018-12-01 15:13:12
|
||||||
- id: 9
|
- id: 9
|
||||||
|
@ -65,6 +73,7 @@
|
||||||
password: '$2a$14$dcadBoMBL9jQoOcZK8Fju.cy0Ptx2oZECkKLnaa8ekRoTFe1w7To.' # 1234
|
password: '$2a$14$dcadBoMBL9jQoOcZK8Fju.cy0Ptx2oZECkKLnaa8ekRoTFe1w7To.' # 1234
|
||||||
email: 'user9@example.com'
|
email: 'user9@example.com'
|
||||||
is_active: true
|
is_active: true
|
||||||
|
issuer: local
|
||||||
updated: 2018-12-02 15:13:12
|
updated: 2018-12-02 15:13:12
|
||||||
created: 2018-12-01 15:13:12
|
created: 2018-12-01 15:13:12
|
||||||
- id: 10
|
- id: 10
|
||||||
|
@ -72,6 +81,7 @@
|
||||||
password: '$2a$14$dcadBoMBL9jQoOcZK8Fju.cy0Ptx2oZECkKLnaa8ekRoTFe1w7To.' # 1234
|
password: '$2a$14$dcadBoMBL9jQoOcZK8Fju.cy0Ptx2oZECkKLnaa8ekRoTFe1w7To.' # 1234
|
||||||
email: 'user10@example.com'
|
email: 'user10@example.com'
|
||||||
is_active: true
|
is_active: true
|
||||||
|
issuer: local
|
||||||
updated: 2018-12-02 15:13:12
|
updated: 2018-12-02 15:13:12
|
||||||
created: 2018-12-01 15:13:12
|
created: 2018-12-01 15:13:12
|
||||||
- id: 11
|
- id: 11
|
||||||
|
@ -79,6 +89,7 @@
|
||||||
password: '$2a$14$dcadBoMBL9jQoOcZK8Fju.cy0Ptx2oZECkKLnaa8ekRoTFe1w7To.' # 1234
|
password: '$2a$14$dcadBoMBL9jQoOcZK8Fju.cy0Ptx2oZECkKLnaa8ekRoTFe1w7To.' # 1234
|
||||||
email: 'user11@example.com'
|
email: 'user11@example.com'
|
||||||
is_active: true
|
is_active: true
|
||||||
|
issuer: local
|
||||||
updated: 2018-12-02 15:13:12
|
updated: 2018-12-02 15:13:12
|
||||||
created: 2018-12-01 15:13:12
|
created: 2018-12-01 15:13:12
|
||||||
- id: 12
|
- id: 12
|
||||||
|
@ -86,6 +97,7 @@
|
||||||
password: '$2a$14$dcadBoMBL9jQoOcZK8Fju.cy0Ptx2oZECkKLnaa8ekRoTFe1w7To.' # 1234
|
password: '$2a$14$dcadBoMBL9jQoOcZK8Fju.cy0Ptx2oZECkKLnaa8ekRoTFe1w7To.' # 1234
|
||||||
email: 'user12@example.com'
|
email: 'user12@example.com'
|
||||||
is_active: true
|
is_active: true
|
||||||
|
issuer: local
|
||||||
updated: 2018-12-02 15:13:12
|
updated: 2018-12-02 15:13:12
|
||||||
created: 2018-12-01 15:13:12
|
created: 2018-12-01 15:13:12
|
||||||
- id: 13
|
- id: 13
|
||||||
|
@ -93,5 +105,15 @@
|
||||||
password: '$2a$14$dcadBoMBL9jQoOcZK8Fju.cy0Ptx2oZECkKLnaa8ekRoTFe1w7To.' # 1234
|
password: '$2a$14$dcadBoMBL9jQoOcZK8Fju.cy0Ptx2oZECkKLnaa8ekRoTFe1w7To.' # 1234
|
||||||
email: 'user14@example.com'
|
email: 'user14@example.com'
|
||||||
is_active: true
|
is_active: true
|
||||||
|
issuer: local
|
||||||
|
updated: 2018-12-02 15:13:12
|
||||||
|
created: 2018-12-01 15:13:12
|
||||||
|
- id: 14
|
||||||
|
username: 'user14'
|
||||||
|
password: '$2a$14$dcadBoMBL9jQoOcZK8Fju.cy0Ptx2oZECkKLnaa8ekRoTFe1w7To.' # 1234
|
||||||
|
email: 'user15@some.service.com'
|
||||||
|
is_active: true
|
||||||
|
issuer: 'https://some.service.com'
|
||||||
|
subject: '12345'
|
||||||
updated: 2018-12-02 15:13:12
|
updated: 2018-12-02 15:13:12
|
||||||
created: 2018-12-01 15:13:12
|
created: 2018-12-01 15:13:12
|
||||||
|
|
|
@ -18,9 +18,13 @@
|
||||||
package db
|
package db
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
|
||||||
"code.vikunja.io/api/pkg/config"
|
"code.vikunja.io/api/pkg/config"
|
||||||
"code.vikunja.io/api/pkg/log"
|
"code.vikunja.io/api/pkg/log"
|
||||||
"os"
|
"github.com/stretchr/testify/assert"
|
||||||
"xorm.io/core"
|
"xorm.io/core"
|
||||||
"xorm.io/xorm"
|
"xorm.io/xorm"
|
||||||
)
|
)
|
||||||
|
@ -69,3 +73,32 @@ func InitTestFixtures(tablenames ...string) (err error) {
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// AssertExists checks and asserts the existence of certain entries in the db
|
||||||
|
func AssertExists(t *testing.T, table string, values map[string]interface{}, custom bool) {
|
||||||
|
var exists bool
|
||||||
|
var err error
|
||||||
|
v := make(map[string]interface{})
|
||||||
|
// Postgres sometimes needs to build raw sql. Because it won't always need to do this and this isn't fun, it's a flag.
|
||||||
|
if custom {
|
||||||
|
//#nosec
|
||||||
|
sql := "SELECT * FROM " + table + " WHERE "
|
||||||
|
for col, val := range values {
|
||||||
|
sql += col + "=" + fmt.Sprintf("%v", val) + " AND "
|
||||||
|
}
|
||||||
|
sql = sql[:len(sql)-5]
|
||||||
|
exists, err = x.SQL(sql).Get(&v)
|
||||||
|
} else {
|
||||||
|
exists, err = x.Table(table).Where(values).Get(&v)
|
||||||
|
}
|
||||||
|
assert.NoError(t, err, fmt.Sprintf("Failed to assert entries exist in db, error was: %s", err))
|
||||||
|
assert.True(t, exists, fmt.Sprintf("Entries %v do not exist in table %s", values, table))
|
||||||
|
}
|
||||||
|
|
||||||
|
// AssertMissing checks and asserts the nonexiste nce of certain entries in the db
|
||||||
|
func AssertMissing(t *testing.T, table string, values map[string]interface{}) {
|
||||||
|
v := make(map[string]interface{})
|
||||||
|
exists, err := x.Table(table).Where(values).Exist(&v)
|
||||||
|
assert.NoError(t, err, fmt.Sprintf("Failed to assert entries don't exist in db, error was: %s", err))
|
||||||
|
assert.False(t, exists, fmt.Sprintf("Entries %v exist in table %s", values, table))
|
||||||
|
}
|
||||||
|
|
|
@ -18,12 +18,13 @@
|
||||||
package db
|
package db
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"code.vikunja.io/api/pkg/config"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/go-testfixtures/testfixtures/v3"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"code.vikunja.io/api/pkg/config"
|
||||||
|
"github.com/go-testfixtures/testfixtures/v3"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
"xorm.io/xorm/schemas"
|
"xorm.io/xorm/schemas"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -29,7 +29,7 @@ func (err ErrFileDoesNotExist) Error() string {
|
||||||
return fmt.Sprintf("file %d does not exist", err.FileID)
|
return fmt.Sprintf("file %d does not exist", err.FileID)
|
||||||
}
|
}
|
||||||
|
|
||||||
//IsErrFileDoesNotExist checks if an error is ErrFileDoesNotExist
|
// IsErrFileDoesNotExist checks if an error is ErrFileDoesNotExist
|
||||||
func IsErrFileDoesNotExist(err error) bool {
|
func IsErrFileDoesNotExist(err error) bool {
|
||||||
_, ok := err.(ErrFileDoesNotExist)
|
_, ok := err.(ErrFileDoesNotExist)
|
||||||
return ok
|
return ok
|
||||||
|
@ -45,7 +45,7 @@ func (err ErrFileIsTooLarge) Error() string {
|
||||||
return fmt.Sprintf("file is too large [Size: %d]", err.Size)
|
return fmt.Sprintf("file is too large [Size: %d]", err.Size)
|
||||||
}
|
}
|
||||||
|
|
||||||
//IsErrFileIsTooLarge checks if an error is ErrFileIsTooLarge
|
// IsErrFileIsTooLarge checks if an error is ErrFileIsTooLarge
|
||||||
func IsErrFileIsTooLarge(err error) bool {
|
func IsErrFileIsTooLarge(err error) bool {
|
||||||
_, ok := err.(ErrFileIsTooLarge)
|
_, ok := err.(ErrFileIsTooLarge)
|
||||||
return ok
|
return ok
|
||||||
|
@ -62,7 +62,7 @@ func (err ErrFileIsNotUnsplashFile) Error() string {
|
||||||
return fmt.Sprintf("file was not downloaded from unsplash [FileID: %d]", err.FileID)
|
return fmt.Sprintf("file was not downloaded from unsplash [FileID: %d]", err.FileID)
|
||||||
}
|
}
|
||||||
|
|
||||||
//IsErrFileIsNotUnsplashFile checks if an error is ErrFileIsNotUnsplashFile
|
// IsErrFileIsNotUnsplashFile checks if an error is ErrFileIsNotUnsplashFile
|
||||||
func IsErrFileIsNotUnsplashFile(err error) bool {
|
func IsErrFileIsNotUnsplashFile(err error) bool {
|
||||||
_, ok := err.(ErrFileIsNotUnsplashFile)
|
_, ok := err.(ErrFileIsNotUnsplashFile)
|
||||||
return ok
|
return ok
|
||||||
|
|
|
@ -17,13 +17,14 @@
|
||||||
package files
|
package files
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
|
||||||
"code.vikunja.io/api/pkg/config"
|
"code.vikunja.io/api/pkg/config"
|
||||||
"code.vikunja.io/api/pkg/db"
|
"code.vikunja.io/api/pkg/db"
|
||||||
"code.vikunja.io/api/pkg/log"
|
"code.vikunja.io/api/pkg/log"
|
||||||
"github.com/spf13/afero"
|
"github.com/spf13/afero"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"os"
|
|
||||||
"testing"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// This file handles storing and retrieving a file for different backends
|
// This file handles storing and retrieving a file for different backends
|
||||||
|
@ -49,7 +50,7 @@ func initFixtures(t *testing.T) {
|
||||||
InitTestFileFixtures(t)
|
InitTestFileFixtures(t)
|
||||||
}
|
}
|
||||||
|
|
||||||
//InitTestFileFixtures initializes file fixtures
|
// InitTestFileFixtures initializes file fixtures
|
||||||
func InitTestFileFixtures(t *testing.T) {
|
func InitTestFileFixtures(t *testing.T) {
|
||||||
// Init fixture files
|
// Init fixture files
|
||||||
filename := config.FilesBasePath.GetString() + "/1"
|
filename := config.FilesBasePath.GetString() + "/1"
|
||||||
|
|
|
@ -17,13 +17,14 @@
|
||||||
package files
|
package files
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"io"
|
||||||
|
"strconv"
|
||||||
|
"time"
|
||||||
|
|
||||||
"code.vikunja.io/api/pkg/config"
|
"code.vikunja.io/api/pkg/config"
|
||||||
"code.vikunja.io/web"
|
"code.vikunja.io/web"
|
||||||
"github.com/c2h5oh/datasize"
|
"github.com/c2h5oh/datasize"
|
||||||
"github.com/spf13/afero"
|
"github.com/spf13/afero"
|
||||||
"io"
|
|
||||||
"strconv"
|
|
||||||
"time"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// File holds all information about a file
|
// File holds all information about a file
|
||||||
|
@ -67,7 +68,12 @@ func (f *File) LoadFileMetaByID() (err error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create creates a new file from an FileHeader
|
// Create creates a new file from an FileHeader
|
||||||
func Create(f io.ReadCloser, realname string, realsize uint64, a web.Auth) (file *File, err error) {
|
func Create(f io.Reader, realname string, realsize uint64, a web.Auth) (file *File, err error) {
|
||||||
|
return CreateWithMime(f, realname, realsize, a, "")
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateWithMime creates a new file from an FileHeader and sets its mime type
|
||||||
|
func CreateWithMime(f io.Reader, realname string, realsize uint64, a web.Auth, mime string) (file *File, err error) {
|
||||||
|
|
||||||
// Get and parse the configured file size
|
// Get and parse the configured file size
|
||||||
var maxSize datasize.ByteSize
|
var maxSize datasize.ByteSize
|
||||||
|
@ -84,6 +90,7 @@ func Create(f io.ReadCloser, realname string, realsize uint64, a web.Auth) (file
|
||||||
Name: realname,
|
Name: realname,
|
||||||
Size: realsize,
|
Size: realsize,
|
||||||
CreatedByID: a.GetID(),
|
CreatedByID: a.GetID(),
|
||||||
|
Mime: mime,
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err = x.Insert(file)
|
_, err = x.Insert(file)
|
||||||
|
@ -111,6 +118,6 @@ func (f *File) Delete() (err error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Save saves a file to storage
|
// Save saves a file to storage
|
||||||
func (f *File) Save(fcontent io.ReadCloser) error {
|
func (f *File) Save(fcontent io.Reader) error {
|
||||||
return afs.WriteReader(f.getFileName(), fcontent)
|
return afs.WriteReader(f.getFileName(), fcontent)
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,10 +18,11 @@
|
||||||
package files
|
package files
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"io"
|
"io"
|
||||||
"os"
|
"os"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
type testfile struct {
|
type testfile struct {
|
||||||
|
|
|
@ -23,6 +23,7 @@ import (
|
||||||
"code.vikunja.io/api/pkg/mail"
|
"code.vikunja.io/api/pkg/mail"
|
||||||
"code.vikunja.io/api/pkg/migration"
|
"code.vikunja.io/api/pkg/migration"
|
||||||
"code.vikunja.io/api/pkg/models"
|
"code.vikunja.io/api/pkg/models"
|
||||||
|
"code.vikunja.io/api/pkg/modules/keyvalue"
|
||||||
migrator "code.vikunja.io/api/pkg/modules/migration"
|
migrator "code.vikunja.io/api/pkg/modules/migration"
|
||||||
"code.vikunja.io/api/pkg/red"
|
"code.vikunja.io/api/pkg/red"
|
||||||
"code.vikunja.io/api/pkg/user"
|
"code.vikunja.io/api/pkg/user"
|
||||||
|
@ -36,6 +37,9 @@ func LightInit() {
|
||||||
// Init redis
|
// Init redis
|
||||||
red.InitRedis()
|
red.InitRedis()
|
||||||
|
|
||||||
|
// Init keyvalue store
|
||||||
|
keyvalue.InitStorage()
|
||||||
|
|
||||||
// Set logger
|
// Set logger
|
||||||
log.InitLogger()
|
log.InitLogger()
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,11 +18,12 @@
|
||||||
package integrations
|
package integrations
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"net/url"
|
||||||
|
"testing"
|
||||||
|
|
||||||
"code.vikunja.io/api/pkg/models"
|
"code.vikunja.io/api/pkg/models"
|
||||||
"code.vikunja.io/web/handler"
|
"code.vikunja.io/web/handler"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"net/url"
|
|
||||||
"testing"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// This tests the following behaviour:
|
// This tests the following behaviour:
|
||||||
|
|
|
@ -17,24 +17,25 @@
|
||||||
package integrations
|
package integrations
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"code.vikunja.io/api/pkg/config"
|
|
||||||
"code.vikunja.io/api/pkg/db"
|
|
||||||
"code.vikunja.io/api/pkg/files"
|
|
||||||
"code.vikunja.io/api/pkg/models"
|
|
||||||
"code.vikunja.io/api/pkg/routes"
|
|
||||||
v1 "code.vikunja.io/api/pkg/routes/api/v1"
|
|
||||||
"code.vikunja.io/api/pkg/user"
|
|
||||||
"code.vikunja.io/web"
|
|
||||||
"code.vikunja.io/web/handler"
|
|
||||||
"github.com/dgrijalva/jwt-go"
|
|
||||||
"github.com/labstack/echo/v4"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/http/httptest"
|
"net/http/httptest"
|
||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
"os"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"code.vikunja.io/api/pkg/config"
|
||||||
|
"code.vikunja.io/api/pkg/db"
|
||||||
|
"code.vikunja.io/api/pkg/files"
|
||||||
|
"code.vikunja.io/api/pkg/models"
|
||||||
|
"code.vikunja.io/api/pkg/modules/auth"
|
||||||
|
"code.vikunja.io/api/pkg/routes"
|
||||||
|
"code.vikunja.io/api/pkg/user"
|
||||||
|
"code.vikunja.io/web"
|
||||||
|
"code.vikunja.io/web/handler"
|
||||||
|
"github.com/dgrijalva/jwt-go"
|
||||||
|
"github.com/labstack/echo/v4"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
// These are the test users, the same way they are in the test database
|
// These are the test users, the same way they are in the test database
|
||||||
|
@ -118,7 +119,7 @@ func newTestRequest(t *testing.T, method string, handler func(ctx echo.Context)
|
||||||
|
|
||||||
func addUserTokenToContext(t *testing.T, user *user.User, c echo.Context) {
|
func addUserTokenToContext(t *testing.T, user *user.User, c echo.Context) {
|
||||||
// Get the token as a string
|
// Get the token as a string
|
||||||
token, err := v1.NewUserJWTAuthtoken(user)
|
token, err := auth.NewUserJWTAuthtoken(user)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
// We send the string token through the parsing function to get a valid jwt.Token
|
// We send the string token through the parsing function to get a valid jwt.Token
|
||||||
tken, err := jwt.Parse(token, func(t *jwt.Token) (interface{}, error) {
|
tken, err := jwt.Parse(token, func(t *jwt.Token) (interface{}, error) {
|
||||||
|
@ -130,7 +131,7 @@ func addUserTokenToContext(t *testing.T, user *user.User, c echo.Context) {
|
||||||
|
|
||||||
func addLinkShareTokenToContext(t *testing.T, share *models.LinkSharing, c echo.Context) {
|
func addLinkShareTokenToContext(t *testing.T, share *models.LinkSharing, c echo.Context) {
|
||||||
// Get the token as a string
|
// Get the token as a string
|
||||||
token, err := v1.NewLinkShareJWTAuthtoken(share)
|
token, err := auth.NewLinkShareJWTAuthtoken(share)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
// We send the string token through the parsing function to get a valid jwt.Token
|
// We send the string token through the parsing function to get a valid jwt.Token
|
||||||
tken, err := jwt.Parse(token, func(t *jwt.Token) (interface{}, error) {
|
tken, err := jwt.Parse(token, func(t *jwt.Token) (interface{}, error) {
|
||||||
|
|
|
@ -17,11 +17,12 @@
|
||||||
package integrations
|
package integrations
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
"code.vikunja.io/api/pkg/models"
|
"code.vikunja.io/api/pkg/models"
|
||||||
"code.vikunja.io/web/handler"
|
"code.vikunja.io/web/handler"
|
||||||
"github.com/labstack/echo/v4"
|
"github.com/labstack/echo/v4"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"testing"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestBucket(t *testing.T) {
|
func TestBucket(t *testing.T) {
|
||||||
|
|
|
@ -17,12 +17,13 @@
|
||||||
package integrations
|
package integrations
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"net/url"
|
||||||
|
"testing"
|
||||||
|
|
||||||
"code.vikunja.io/api/pkg/models"
|
"code.vikunja.io/api/pkg/models"
|
||||||
"code.vikunja.io/web/handler"
|
"code.vikunja.io/web/handler"
|
||||||
"github.com/labstack/echo/v4"
|
"github.com/labstack/echo/v4"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"net/url"
|
|
||||||
"testing"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestLinkSharing(t *testing.T) {
|
func TestLinkSharing(t *testing.T) {
|
||||||
|
|
|
@ -17,12 +17,13 @@
|
||||||
package integrations
|
package integrations
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"net/url"
|
||||||
|
"testing"
|
||||||
|
|
||||||
"code.vikunja.io/api/pkg/models"
|
"code.vikunja.io/api/pkg/models"
|
||||||
"code.vikunja.io/web/handler"
|
"code.vikunja.io/web/handler"
|
||||||
"github.com/labstack/echo/v4"
|
"github.com/labstack/echo/v4"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"net/url"
|
|
||||||
"testing"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestList(t *testing.T) {
|
func TestList(t *testing.T) {
|
||||||
|
@ -72,9 +73,10 @@ func TestList(t *testing.T) {
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Contains(t, rec.Body.String(), `"title":"Test1"`)
|
assert.Contains(t, rec.Body.String(), `"title":"Test1"`)
|
||||||
assert.NotContains(t, rec.Body.String(), `"title":"Test2"`)
|
assert.NotContains(t, rec.Body.String(), `"title":"Test2"`)
|
||||||
assert.Contains(t, rec.Body.String(), `"owner":{"id":1,"username":"user1",`)
|
assert.Contains(t, rec.Body.String(), `"owner":{"id":1,"name":"","username":"user1",`)
|
||||||
assert.NotContains(t, rec.Body.String(), `"owner":{"id":2,"username":"user2",`)
|
assert.NotContains(t, rec.Body.String(), `"owner":{"id":2,"name":"","username":"user2",`)
|
||||||
assert.NotContains(t, rec.Body.String(), `"tasks":`)
|
assert.NotContains(t, rec.Body.String(), `"tasks":`)
|
||||||
|
assert.Equal(t, "2", rec.Result().Header.Get("x-max-right")) // User 1 is owner so they should have admin rights.
|
||||||
})
|
})
|
||||||
t.Run("Nonexisting", func(t *testing.T) {
|
t.Run("Nonexisting", func(t *testing.T) {
|
||||||
_, err := testHandler.testReadOneWithUser(nil, map[string]string{"list": "9999"})
|
_, err := testHandler.testReadOneWithUser(nil, map[string]string{"list": "9999"})
|
||||||
|
@ -84,72 +86,85 @@ func TestList(t *testing.T) {
|
||||||
t.Run("Rights check", func(t *testing.T) {
|
t.Run("Rights check", func(t *testing.T) {
|
||||||
t.Run("Forbidden", func(t *testing.T) {
|
t.Run("Forbidden", func(t *testing.T) {
|
||||||
// Owned by user13
|
// Owned by user13
|
||||||
_, err := testHandler.testReadOneWithUser(nil, map[string]string{"list": "20"})
|
rec, err := testHandler.testReadOneWithUser(nil, map[string]string{"list": "20"})
|
||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
assert.Contains(t, err.(*echo.HTTPError).Message, `You don't have the right to see this`)
|
assert.Contains(t, err.(*echo.HTTPError).Message, `You don't have the right to see this`)
|
||||||
|
assert.Empty(t, rec.Result().Header.Get("x-max-rights"))
|
||||||
})
|
})
|
||||||
t.Run("Shared Via Team readonly", func(t *testing.T) {
|
t.Run("Shared Via Team readonly", func(t *testing.T) {
|
||||||
rec, err := testHandler.testReadOneWithUser(nil, map[string]string{"list": "6"})
|
rec, err := testHandler.testReadOneWithUser(nil, map[string]string{"list": "6"})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Contains(t, rec.Body.String(), `"title":"Test6"`)
|
assert.Contains(t, rec.Body.String(), `"title":"Test6"`)
|
||||||
|
assert.Equal(t, "0", rec.Result().Header.Get("x-max-right"))
|
||||||
})
|
})
|
||||||
t.Run("Shared Via Team write", func(t *testing.T) {
|
t.Run("Shared Via Team write", func(t *testing.T) {
|
||||||
rec, err := testHandler.testReadOneWithUser(nil, map[string]string{"list": "7"})
|
rec, err := testHandler.testReadOneWithUser(nil, map[string]string{"list": "7"})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Contains(t, rec.Body.String(), `"title":"Test7"`)
|
assert.Contains(t, rec.Body.String(), `"title":"Test7"`)
|
||||||
|
assert.Equal(t, "1", rec.Result().Header.Get("x-max-right"))
|
||||||
})
|
})
|
||||||
t.Run("Shared Via Team admin", func(t *testing.T) {
|
t.Run("Shared Via Team admin", func(t *testing.T) {
|
||||||
rec, err := testHandler.testReadOneWithUser(nil, map[string]string{"list": "8"})
|
rec, err := testHandler.testReadOneWithUser(nil, map[string]string{"list": "8"})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Contains(t, rec.Body.String(), `"title":"Test8"`)
|
assert.Contains(t, rec.Body.String(), `"title":"Test8"`)
|
||||||
|
assert.Equal(t, "2", rec.Result().Header.Get("x-max-right"))
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Shared Via User readonly", func(t *testing.T) {
|
t.Run("Shared Via User readonly", func(t *testing.T) {
|
||||||
rec, err := testHandler.testReadOneWithUser(nil, map[string]string{"list": "9"})
|
rec, err := testHandler.testReadOneWithUser(nil, map[string]string{"list": "9"})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Contains(t, rec.Body.String(), `"title":"Test9"`)
|
assert.Contains(t, rec.Body.String(), `"title":"Test9"`)
|
||||||
|
assert.Equal(t, "0", rec.Result().Header.Get("x-max-right"))
|
||||||
})
|
})
|
||||||
t.Run("Shared Via User write", func(t *testing.T) {
|
t.Run("Shared Via User write", func(t *testing.T) {
|
||||||
rec, err := testHandler.testReadOneWithUser(nil, map[string]string{"list": "10"})
|
rec, err := testHandler.testReadOneWithUser(nil, map[string]string{"list": "10"})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Contains(t, rec.Body.String(), `"title":"Test10"`)
|
assert.Contains(t, rec.Body.String(), `"title":"Test10"`)
|
||||||
|
assert.Equal(t, "1", rec.Result().Header.Get("x-max-right"))
|
||||||
})
|
})
|
||||||
t.Run("Shared Via User admin", func(t *testing.T) {
|
t.Run("Shared Via User admin", func(t *testing.T) {
|
||||||
rec, err := testHandler.testReadOneWithUser(nil, map[string]string{"list": "11"})
|
rec, err := testHandler.testReadOneWithUser(nil, map[string]string{"list": "11"})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Contains(t, rec.Body.String(), `"title":"Test11"`)
|
assert.Contains(t, rec.Body.String(), `"title":"Test11"`)
|
||||||
|
assert.Equal(t, "2", rec.Result().Header.Get("x-max-right"))
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Shared Via NamespaceTeam readonly", func(t *testing.T) {
|
t.Run("Shared Via NamespaceTeam readonly", func(t *testing.T) {
|
||||||
rec, err := testHandler.testReadOneWithUser(nil, map[string]string{"list": "12"})
|
rec, err := testHandler.testReadOneWithUser(nil, map[string]string{"list": "12"})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Contains(t, rec.Body.String(), `"title":"Test12"`)
|
assert.Contains(t, rec.Body.String(), `"title":"Test12"`)
|
||||||
|
assert.Equal(t, "0", rec.Result().Header.Get("x-max-right"))
|
||||||
})
|
})
|
||||||
t.Run("Shared Via NamespaceTeam write", func(t *testing.T) {
|
t.Run("Shared Via NamespaceTeam write", func(t *testing.T) {
|
||||||
rec, err := testHandler.testReadOneWithUser(nil, map[string]string{"list": "13"})
|
rec, err := testHandler.testReadOneWithUser(nil, map[string]string{"list": "13"})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Contains(t, rec.Body.String(), `"title":"Test13"`)
|
assert.Contains(t, rec.Body.String(), `"title":"Test13"`)
|
||||||
|
assert.Equal(t, "1", rec.Result().Header.Get("x-max-right"))
|
||||||
})
|
})
|
||||||
t.Run("Shared Via NamespaceTeam admin", func(t *testing.T) {
|
t.Run("Shared Via NamespaceTeam admin", func(t *testing.T) {
|
||||||
rec, err := testHandler.testReadOneWithUser(nil, map[string]string{"list": "14"})
|
rec, err := testHandler.testReadOneWithUser(nil, map[string]string{"list": "14"})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Contains(t, rec.Body.String(), `"title":"Test14"`)
|
assert.Contains(t, rec.Body.String(), `"title":"Test14"`)
|
||||||
|
assert.Equal(t, "2", rec.Result().Header.Get("x-max-right"))
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Shared Via NamespaceUser readonly", func(t *testing.T) {
|
t.Run("Shared Via NamespaceUser readonly", func(t *testing.T) {
|
||||||
rec, err := testHandler.testReadOneWithUser(nil, map[string]string{"list": "15"})
|
rec, err := testHandler.testReadOneWithUser(nil, map[string]string{"list": "15"})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Contains(t, rec.Body.String(), `"title":"Test15"`)
|
assert.Contains(t, rec.Body.String(), `"title":"Test15"`)
|
||||||
|
assert.Equal(t, "0", rec.Result().Header.Get("x-max-right"))
|
||||||
})
|
})
|
||||||
t.Run("Shared Via NamespaceUser write", func(t *testing.T) {
|
t.Run("Shared Via NamespaceUser write", func(t *testing.T) {
|
||||||
rec, err := testHandler.testReadOneWithUser(nil, map[string]string{"list": "16"})
|
rec, err := testHandler.testReadOneWithUser(nil, map[string]string{"list": "16"})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Contains(t, rec.Body.String(), `"title":"Test16"`)
|
assert.Contains(t, rec.Body.String(), `"title":"Test16"`)
|
||||||
|
assert.Equal(t, "1", rec.Result().Header.Get("x-max-right"))
|
||||||
})
|
})
|
||||||
t.Run("Shared Via NamespaceUser admin", func(t *testing.T) {
|
t.Run("Shared Via NamespaceUser admin", func(t *testing.T) {
|
||||||
rec, err := testHandler.testReadOneWithUser(nil, map[string]string{"list": "17"})
|
rec, err := testHandler.testReadOneWithUser(nil, map[string]string{"list": "17"})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Contains(t, rec.Body.String(), `"title":"Test17"`)
|
assert.Contains(t, rec.Body.String(), `"title":"Test17"`)
|
||||||
|
assert.Equal(t, "2", rec.Result().Header.Get("x-max-right"))
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -17,11 +17,12 @@
|
||||||
package integrations
|
package integrations
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"net/http"
|
||||||
|
"testing"
|
||||||
|
|
||||||
apiv1 "code.vikunja.io/api/pkg/routes/api/v1"
|
apiv1 "code.vikunja.io/api/pkg/routes/api/v1"
|
||||||
"code.vikunja.io/api/pkg/user"
|
"code.vikunja.io/api/pkg/user"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"net/http"
|
|
||||||
"testing"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestLogin(t *testing.T) {
|
func TestLogin(t *testing.T) {
|
||||||
|
|
|
@ -17,11 +17,12 @@
|
||||||
package integrations
|
package integrations
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"net/http"
|
||||||
|
"testing"
|
||||||
|
|
||||||
apiv1 "code.vikunja.io/api/pkg/routes/api/v1"
|
apiv1 "code.vikunja.io/api/pkg/routes/api/v1"
|
||||||
"code.vikunja.io/api/pkg/user"
|
"code.vikunja.io/api/pkg/user"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"net/http"
|
|
||||||
"testing"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestRegister(t *testing.T) {
|
func TestRegister(t *testing.T) {
|
||||||
|
|
|
@ -18,11 +18,12 @@
|
||||||
package integrations
|
package integrations
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"net/url"
|
||||||
|
"testing"
|
||||||
|
|
||||||
"code.vikunja.io/api/pkg/models"
|
"code.vikunja.io/api/pkg/models"
|
||||||
"code.vikunja.io/web/handler"
|
"code.vikunja.io/web/handler"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"net/url"
|
|
||||||
"testing"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestTaskCollection(t *testing.T) {
|
func TestTaskCollection(t *testing.T) {
|
||||||
|
@ -113,49 +114,49 @@ func TestTaskCollection(t *testing.T) {
|
||||||
t.Run("by priority", func(t *testing.T) {
|
t.Run("by priority", func(t *testing.T) {
|
||||||
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"priority"}}, urlParams)
|
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"priority"}}, urlParams)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Contains(t, rec.Body.String(), `{"id":33,"title":"task #33 with percent done","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0.5,"identifier":"test1-17","index":17,"related_tasks":{},"attachments":null,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":1,"position":0,"created_by":{"id":1,"username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":4,"title":"task #4 low prio","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":1,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-4","index":4,"related_tasks":{},"attachments":null,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":3,"title":"task #3 high prio","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":100,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-3","index":3,"related_tasks":{},"attachments":null,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}]`)
|
assert.Contains(t, rec.Body.String(), `{"id":33,"title":"task #33 with percent done","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0.5,"identifier":"test1-17","index":17,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":1,"position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":4,"title":"task #4 low prio","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":1,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-4","index":4,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":3,"title":"task #3 high prio","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":100,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-3","index":3,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}]`)
|
||||||
})
|
})
|
||||||
t.Run("by priority desc", func(t *testing.T) {
|
t.Run("by priority desc", func(t *testing.T) {
|
||||||
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"priority"}, "order_by": []string{"desc"}}, urlParams)
|
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"priority"}, "order_by": []string{"desc"}}, urlParams)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Contains(t, rec.Body.String(), `[{"id":3,"title":"task #3 high prio","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":100,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-3","index":3,"related_tasks":{},"attachments":null,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":4,"title":"task #4 low prio","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":1`)
|
assert.Contains(t, rec.Body.String(), `[{"id":3,"title":"task #3 high prio","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":100,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-3","index":3,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":4,"title":"task #4 low prio","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":1`)
|
||||||
})
|
})
|
||||||
t.Run("by priority asc", func(t *testing.T) {
|
t.Run("by priority asc", func(t *testing.T) {
|
||||||
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"priority"}, "order_by": []string{"asc"}}, urlParams)
|
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"priority"}, "order_by": []string{"asc"}}, urlParams)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Contains(t, rec.Body.String(), `{"id":33,"title":"task #33 with percent done","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0.5,"identifier":"test1-17","index":17,"related_tasks":{},"attachments":null,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":1,"position":0,"created_by":{"id":1,"username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":4,"title":"task #4 low prio","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":1,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-4","index":4,"related_tasks":{},"attachments":null,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":3,"title":"task #3 high prio","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":100,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-3","index":3,"related_tasks":{},"attachments":null,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}]`)
|
assert.Contains(t, rec.Body.String(), `{"id":33,"title":"task #33 with percent done","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0.5,"identifier":"test1-17","index":17,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":1,"position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":4,"title":"task #4 low prio","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":1,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-4","index":4,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":3,"title":"task #3 high prio","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":100,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-3","index":3,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}]`)
|
||||||
})
|
})
|
||||||
// should equal duedate asc
|
// should equal duedate asc
|
||||||
t.Run("by due_date", func(t *testing.T) {
|
t.Run("by due_date", func(t *testing.T) {
|
||||||
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"due_date"}}, urlParams)
|
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"due_date"}}, urlParams)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Contains(t, rec.Body.String(), `{"id":6,"title":"task #6 lower due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-11-30T22:25:24Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-6","index":6,"related_tasks":{},"attachments":null,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":3,"position":0,"created_by":{"id":1,"username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":5,"title":"task #5 higher due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-12-01T03:58:44Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-5","index":5,"related_tasks":{},"attachments":null,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}]`)
|
assert.Contains(t, rec.Body.String(), `{"id":6,"title":"task #6 lower due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-11-30T22:25:24Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-6","index":6,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":3,"position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":5,"title":"task #5 higher due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-12-01T03:58:44Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-5","index":5,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}]`)
|
||||||
})
|
})
|
||||||
t.Run("by duedate desc", func(t *testing.T) {
|
t.Run("by duedate desc", func(t *testing.T) {
|
||||||
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"due_date"}, "order_by": []string{"desc"}}, urlParams)
|
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"due_date"}, "order_by": []string{"desc"}}, urlParams)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Contains(t, rec.Body.String(), `[{"id":5,"title":"task #5 higher due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-12-01T03:58:44Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-5","index":5,"related_tasks":{},"attachments":null,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":6,"title":"task #6 lower due date`)
|
assert.Contains(t, rec.Body.String(), `[{"id":5,"title":"task #5 higher due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-12-01T03:58:44Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-5","index":5,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":6,"title":"task #6 lower due date`)
|
||||||
})
|
})
|
||||||
// Due date without unix suffix
|
// Due date without unix suffix
|
||||||
t.Run("by duedate asc without suffix", func(t *testing.T) {
|
t.Run("by duedate asc without suffix", func(t *testing.T) {
|
||||||
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"due_date"}, "order_by": []string{"asc"}}, urlParams)
|
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"due_date"}, "order_by": []string{"asc"}}, urlParams)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Contains(t, rec.Body.String(), `{"id":6,"title":"task #6 lower due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-11-30T22:25:24Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-6","index":6,"related_tasks":{},"attachments":null,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":3,"position":0,"created_by":{"id":1,"username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":5,"title":"task #5 higher due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-12-01T03:58:44Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-5","index":5,"related_tasks":{},"attachments":null,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}]`)
|
assert.Contains(t, rec.Body.String(), `{"id":6,"title":"task #6 lower due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-11-30T22:25:24Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-6","index":6,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":3,"position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":5,"title":"task #5 higher due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-12-01T03:58:44Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-5","index":5,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}]`)
|
||||||
})
|
})
|
||||||
t.Run("by due_date without suffix", func(t *testing.T) {
|
t.Run("by due_date without suffix", func(t *testing.T) {
|
||||||
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"due_date"}}, urlParams)
|
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"due_date"}}, urlParams)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Contains(t, rec.Body.String(), `{"id":6,"title":"task #6 lower due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-11-30T22:25:24Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-6","index":6,"related_tasks":{},"attachments":null,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":3,"position":0,"created_by":{"id":1,"username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":5,"title":"task #5 higher due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-12-01T03:58:44Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-5","index":5,"related_tasks":{},"attachments":null,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}]`)
|
assert.Contains(t, rec.Body.String(), `{"id":6,"title":"task #6 lower due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-11-30T22:25:24Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-6","index":6,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":3,"position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":5,"title":"task #5 higher due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-12-01T03:58:44Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-5","index":5,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}]`)
|
||||||
})
|
})
|
||||||
t.Run("by duedate desc without suffix", func(t *testing.T) {
|
t.Run("by duedate desc without suffix", func(t *testing.T) {
|
||||||
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"due_date"}, "order_by": []string{"desc"}}, urlParams)
|
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"due_date"}, "order_by": []string{"desc"}}, urlParams)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Contains(t, rec.Body.String(), `[{"id":5,"title":"task #5 higher due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-12-01T03:58:44Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-5","index":5,"related_tasks":{},"attachments":null,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":6,"title":"task #6 lower due date`)
|
assert.Contains(t, rec.Body.String(), `[{"id":5,"title":"task #5 higher due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-12-01T03:58:44Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-5","index":5,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":6,"title":"task #6 lower due date`)
|
||||||
})
|
})
|
||||||
t.Run("by duedate asc", func(t *testing.T) {
|
t.Run("by duedate asc", func(t *testing.T) {
|
||||||
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"due_date"}, "order_by": []string{"asc"}}, urlParams)
|
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"due_date"}, "order_by": []string{"asc"}}, urlParams)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Contains(t, rec.Body.String(), `{"id":6,"title":"task #6 lower due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-11-30T22:25:24Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-6","index":6,"related_tasks":{},"attachments":null,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":3,"position":0,"created_by":{"id":1,"username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":5,"title":"task #5 higher due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-12-01T03:58:44Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-5","index":5,"related_tasks":{},"attachments":null,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}]`)
|
assert.Contains(t, rec.Body.String(), `{"id":6,"title":"task #6 lower due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-11-30T22:25:24Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-6","index":6,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":3,"position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":5,"title":"task #5 higher due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-12-01T03:58:44Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-5","index":5,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}]`)
|
||||||
})
|
})
|
||||||
t.Run("invalid sort parameter", func(t *testing.T) {
|
t.Run("invalid sort parameter", func(t *testing.T) {
|
||||||
_, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"loremipsum"}}, urlParams)
|
_, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"loremipsum"}}, urlParams)
|
||||||
|
@ -171,10 +172,10 @@ func TestTaskCollection(t *testing.T) {
|
||||||
// Invalid parameter should not sort at all
|
// Invalid parameter should not sort at all
|
||||||
rec, err := testHandler.testReadAllWithUser(url.Values{"sort": []string{"loremipsum"}}, urlParams)
|
rec, err := testHandler.testReadAllWithUser(url.Values{"sort": []string{"loremipsum"}}, urlParams)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.NotContains(t, rec.Body.String(), `[{"id":3,"title":"task #3 high prio","description":"","done":false,"due_date":0,"reminder_dates":null,"repeat_after":0,"repeat_from_current_date":false,"priority":100,"start_date":0,"end_date":0,"assignees":null,"labels":null,"hex_color":"","created":1543626724,"updated":1543626724,"created_by":{"id":0,"username":"","email":"","created":0,"updated":0}},{"id":4,"title":"task #4 low prio","description":"","done":false,"due_date":0,"reminder_dates":null,"repeat_after":0,"repeat_from_current_date":false,"priority":1`)
|
assert.NotContains(t, rec.Body.String(), `[{"id":3,"title":"task #3 high prio","description":"","done":false,"due_date":0,"reminder_dates":null,"repeat_after":0,"repeat_from_current_date":false,"priority":100,"start_date":0,"end_date":0,"assignees":null,"labels":null,"hex_color":"","created":1543626724,"updated":1543626724,"created_by":{"id":0,"name":"","username":"","email":"","created":0,"updated":0}},{"id":4,"title":"task #4 low prio","description":"","done":false,"due_date":0,"reminder_dates":null,"repeat_after":0,"repeat_from_current_date":false,"priority":1`)
|
||||||
assert.NotContains(t, rec.Body.String(), `{"id":4,"title":"task #4 low prio","description":"","done":false,"due_date":0,"reminder_dates":null,"repeat_after":0,"repeat_from_current_date":false,"priority":1,"start_date":0,"end_date":0,"assignees":null,"labels":null,"hex_color":"","created":1543626724,"updated":1543626724,"created_by":{"id":0,"username":"","email":"","created":0,"updated":0}},{"id":3,"title":"task #3 high prio","description":"","done":false,"due_date":0,"reminder_dates":null,"repeat_after":0,"repeat_from_current_date":false,"priority":100,"start_date":0,"end_date":0,"assignees":null,"labels":null,"created":1543626724,"updated":1543626724,"created_by":{"id":0,"username":"","email":"","created":0,"updated":0}}]`)
|
assert.NotContains(t, rec.Body.String(), `{"id":4,"title":"task #4 low prio","description":"","done":false,"due_date":0,"reminder_dates":null,"repeat_after":0,"repeat_from_current_date":false,"priority":1,"start_date":0,"end_date":0,"assignees":null,"labels":null,"hex_color":"","created":1543626724,"updated":1543626724,"created_by":{"id":0,"name":"","username":"","email":"","created":0,"updated":0}},{"id":3,"title":"task #3 high prio","description":"","done":false,"due_date":0,"reminder_dates":null,"repeat_after":0,"repeat_from_current_date":false,"priority":100,"start_date":0,"end_date":0,"assignees":null,"labels":null,"created":1543626724,"updated":1543626724,"created_by":{"id":0,"name":"","username":"","email":"","created":0,"updated":0}}]`)
|
||||||
assert.NotContains(t, rec.Body.String(), `[{"id":5,"title":"task #5 higher due date","description":"","done":false,"due_date":1543636724,"reminder_dates":null,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":0,"end_date":0,"assignees":null,"labels":null,"hex_color":"","created":1543626724,"updated":1543626724,"created_by":{"id":0,"username":"","email":"","created":0,"updated":0}},{"id":6,"title":"task #6 lower due date"`)
|
assert.NotContains(t, rec.Body.String(), `[{"id":5,"title":"task #5 higher due date","description":"","done":false,"due_date":1543636724,"reminder_dates":null,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":0,"end_date":0,"assignees":null,"labels":null,"hex_color":"","created":1543626724,"updated":1543626724,"created_by":{"id":0,"name":"","username":"","email":"","created":0,"updated":0}},{"id":6,"title":"task #6 lower due date"`)
|
||||||
assert.NotContains(t, rec.Body.String(), `{"id":6,"title":"task #6 lower due date","description":"","done":false,"due_date":1543616724,"reminder_dates":null,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":0,"end_date":0,"assignees":null,"labels":null,"hex_color":"","created":1543626724,"updated":1543626724,"created_by":{"id":0,"username":"","email":"","created":0,"updated":0}},{"id":5,"title":"task #5 higher due date","description":"","done":false,"due_date":1543636724,"reminder_dates":null,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":0,"end_date":0,"assignees":null,"labels":null,"created":1543626724,"updated":1543626724,"created_by":{"id":0,"username":"","email":"","created":0,"updated":0}}]`)
|
assert.NotContains(t, rec.Body.String(), `{"id":6,"title":"task #6 lower due date","description":"","done":false,"due_date":1543616724,"reminder_dates":null,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":0,"end_date":0,"assignees":null,"labels":null,"hex_color":"","created":1543626724,"updated":1543626724,"created_by":{"id":0,"name":"","username":"","email":"","created":0,"updated":0}},{"id":5,"title":"task #5 higher due date","description":"","done":false,"due_date":1543636724,"reminder_dates":null,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":0,"end_date":0,"assignees":null,"labels":null,"created":1543626724,"updated":1543626724,"created_by":{"id":0,"name":"","username":"","email":"","created":0,"updated":0}}]`)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
t.Run("Filter", func(t *testing.T) {
|
t.Run("Filter", func(t *testing.T) {
|
||||||
|
@ -258,6 +259,29 @@ func TestTaskCollection(t *testing.T) {
|
||||||
assertHandlerErrorCode(t, err, models.ErrCodeInvalidTaskFilterValue)
|
assertHandlerErrorCode(t, err, models.ErrCodeInvalidTaskFilterValue)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
t.Run("saved filter", func(t *testing.T) {
|
||||||
|
t.Run("date range", func(t *testing.T) {
|
||||||
|
rec, err := testHandler.testReadAllWithUser(
|
||||||
|
nil,
|
||||||
|
map[string]string{"list": "-2"}, // Actually a saved filter - contains the same filter arguments as the start and end date filter from above
|
||||||
|
)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.NotContains(t, rec.Body.String(), `task #1`)
|
||||||
|
assert.NotContains(t, rec.Body.String(), `task #2`)
|
||||||
|
assert.NotContains(t, rec.Body.String(), `task #3`)
|
||||||
|
assert.NotContains(t, rec.Body.String(), `task #4`)
|
||||||
|
assert.Contains(t, rec.Body.String(), `task #5`)
|
||||||
|
assert.Contains(t, rec.Body.String(), `task #6`)
|
||||||
|
assert.Contains(t, rec.Body.String(), `task #7`)
|
||||||
|
assert.Contains(t, rec.Body.String(), `task #8`)
|
||||||
|
assert.Contains(t, rec.Body.String(), `task #9`)
|
||||||
|
assert.NotContains(t, rec.Body.String(), `task #10`)
|
||||||
|
assert.NotContains(t, rec.Body.String(), `task #11`)
|
||||||
|
assert.NotContains(t, rec.Body.String(), `task #12`)
|
||||||
|
assert.NotContains(t, rec.Body.String(), `task #13`)
|
||||||
|
assert.NotContains(t, rec.Body.String(), `task #14`)
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("ReadAll for all tasks", func(t *testing.T) {
|
t.Run("ReadAll for all tasks", func(t *testing.T) {
|
||||||
|
@ -318,42 +342,42 @@ func TestTaskCollection(t *testing.T) {
|
||||||
t.Run("by priority", func(t *testing.T) {
|
t.Run("by priority", func(t *testing.T) {
|
||||||
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"priority"}}, nil)
|
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"priority"}}, nil)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Contains(t, rec.Body.String(), `{"id":33,"title":"task #33 with percent done","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0.5,"identifier":"test1-17","index":17,"related_tasks":{},"attachments":null,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":1,"position":0,"created_by":{"id":1,"username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":4,"title":"task #4 low prio","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":1,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-4","index":4,"related_tasks":{},"attachments":null,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":3,"title":"task #3 high prio","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":100,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-3","index":3,"related_tasks":{},"attachments":null,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}]`)
|
assert.Contains(t, rec.Body.String(), `{"id":33,"title":"task #33 with percent done","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0.5,"identifier":"test1-17","index":17,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":1,"position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":4,"title":"task #4 low prio","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":1,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-4","index":4,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":3,"title":"task #3 high prio","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":100,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-3","index":3,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}]`)
|
||||||
})
|
})
|
||||||
t.Run("by priority desc", func(t *testing.T) {
|
t.Run("by priority desc", func(t *testing.T) {
|
||||||
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"priority"}, "order_by": []string{"desc"}}, nil)
|
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"priority"}, "order_by": []string{"desc"}}, nil)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Contains(t, rec.Body.String(), `[{"id":3,"title":"task #3 high prio","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":100,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-3","index":3,"related_tasks":{},"attachments":null,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":4,"title":"task #4 low prio","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":1`)
|
assert.Contains(t, rec.Body.String(), `[{"id":3,"title":"task #3 high prio","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":100,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-3","index":3,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":4,"title":"task #4 low prio","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":1`)
|
||||||
})
|
})
|
||||||
t.Run("by priority asc", func(t *testing.T) {
|
t.Run("by priority asc", func(t *testing.T) {
|
||||||
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"priority"}, "order_by": []string{"asc"}}, nil)
|
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"priority"}, "order_by": []string{"asc"}}, nil)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Contains(t, rec.Body.String(), `{"id":33,"title":"task #33 with percent done","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0.5,"identifier":"test1-17","index":17,"related_tasks":{},"attachments":null,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":1,"position":0,"created_by":{"id":1,"username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":4,"title":"task #4 low prio","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":1,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-4","index":4,"related_tasks":{},"attachments":null,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":3,"title":"task #3 high prio","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":100,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-3","index":3,"related_tasks":{},"attachments":null,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}]`)
|
assert.Contains(t, rec.Body.String(), `{"id":33,"title":"task #33 with percent done","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0.5,"identifier":"test1-17","index":17,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":1,"position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":4,"title":"task #4 low prio","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":1,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-4","index":4,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":3,"title":"task #3 high prio","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":100,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-3","index":3,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}]`)
|
||||||
})
|
})
|
||||||
// should equal duedate asc
|
// should equal duedate asc
|
||||||
t.Run("by due_date", func(t *testing.T) {
|
t.Run("by due_date", func(t *testing.T) {
|
||||||
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"due_date"}}, nil)
|
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"due_date"}}, nil)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Contains(t, rec.Body.String(), `{"id":6,"title":"task #6 lower due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-11-30T22:25:24Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-6","index":6,"related_tasks":{},"attachments":null,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":3,"position":0,"created_by":{"id":1,"username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":5,"title":"task #5 higher due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-12-01T03:58:44Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-5","index":5,"related_tasks":{},"attachments":null,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}]`)
|
assert.Contains(t, rec.Body.String(), `{"id":6,"title":"task #6 lower due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-11-30T22:25:24Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-6","index":6,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":3,"position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":5,"title":"task #5 higher due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-12-01T03:58:44Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-5","index":5,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}]`)
|
||||||
})
|
})
|
||||||
t.Run("by duedate desc", func(t *testing.T) {
|
t.Run("by duedate desc", func(t *testing.T) {
|
||||||
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"due_date"}, "order_by": []string{"desc"}}, nil)
|
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"due_date"}, "order_by": []string{"desc"}}, nil)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Contains(t, rec.Body.String(), `[{"id":5,"title":"task #5 higher due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-12-01T03:58:44Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-5","index":5,"related_tasks":{},"attachments":null,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":6,"title":"task #6 lower due date`)
|
assert.Contains(t, rec.Body.String(), `[{"id":5,"title":"task #5 higher due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-12-01T03:58:44Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-5","index":5,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":6,"title":"task #6 lower due date`)
|
||||||
})
|
})
|
||||||
t.Run("by duedate asc", func(t *testing.T) {
|
t.Run("by duedate asc", func(t *testing.T) {
|
||||||
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"due_date"}, "order_by": []string{"asc"}}, nil)
|
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"due_date"}, "order_by": []string{"asc"}}, nil)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Contains(t, rec.Body.String(), `{"id":6,"title":"task #6 lower due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-11-30T22:25:24Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-6","index":6,"related_tasks":{},"attachments":null,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":3,"position":0,"created_by":{"id":1,"username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":5,"title":"task #5 higher due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-12-01T03:58:44Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-5","index":5,"related_tasks":{},"attachments":null,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}]`)
|
assert.Contains(t, rec.Body.String(), `{"id":6,"title":"task #6 lower due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-11-30T22:25:24Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-6","index":6,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":3,"position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":5,"title":"task #5 higher due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-12-01T03:58:44Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-5","index":5,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}]`)
|
||||||
})
|
})
|
||||||
t.Run("invalid parameter", func(t *testing.T) {
|
t.Run("invalid parameter", func(t *testing.T) {
|
||||||
// Invalid parameter should not sort at all
|
// Invalid parameter should not sort at all
|
||||||
rec, err := testHandler.testReadAllWithUser(url.Values{"sort": []string{"loremipsum"}}, nil)
|
rec, err := testHandler.testReadAllWithUser(url.Values{"sort": []string{"loremipsum"}}, nil)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.NotContains(t, rec.Body.String(), `[{"id":3,"title":"task #3 high prio","description":"","done":false,"due_date":0,"reminder_dates":null,"repeat_after":0,"repeat_from_current_date":false,"priority":100,"start_date":0,"end_date":0,"assignees":null,"labels":null,"hex_color":"","created":1543626724,"updated":1543626724,"created_by":{"id":0,"username":"","email":"","created":0,"updated":0}},{"id":4,"title":"task #4 low prio","description":"","done":false,"due_date":0,"reminder_dates":null,"repeat_after":0,"repeat_from_current_date":false,"priority":1`)
|
assert.NotContains(t, rec.Body.String(), `[{"id":3,"title":"task #3 high prio","description":"","done":false,"due_date":0,"reminder_dates":null,"repeat_after":0,"repeat_from_current_date":false,"priority":100,"start_date":0,"end_date":0,"assignees":null,"labels":null,"hex_color":"","created":1543626724,"updated":1543626724,"created_by":{"id":0,"name":"","username":"","email":"","created":0,"updated":0}},{"id":4,"title":"task #4 low prio","description":"","done":false,"due_date":0,"reminder_dates":null,"repeat_after":0,"repeat_from_current_date":false,"priority":1`)
|
||||||
assert.NotContains(t, rec.Body.String(), `{"id":4,"title":"task #4 low prio","description":"","done":false,"due_date":0,"reminder_dates":null,"repeat_after":0,"repeat_from_current_date":false,"priority":1,"start_date":0,"end_date":0,"assignees":null,"labels":null,"hex_color":"","created":1543626724,"updated":1543626724,"created_by":{"id":0,"username":"","email":"","created":0,"updated":0}},{"id":3,"title":"task #3 high prio","description":"","done":false,"due_date":0,"reminder_dates":null,"repeat_after":0,"repeat_from_current_date":false,"priority":100,"start_date":0,"end_date":0,"assignees":null,"labels":null,"created":1543626724,"updated":1543626724,"created_by":{"id":0,"username":"","email":"","created":0,"updated":0}}]`)
|
assert.NotContains(t, rec.Body.String(), `{"id":4,"title":"task #4 low prio","description":"","done":false,"due_date":0,"reminder_dates":null,"repeat_after":0,"repeat_from_current_date":false,"priority":1,"start_date":0,"end_date":0,"assignees":null,"labels":null,"hex_color":"","created":1543626724,"updated":1543626724,"created_by":{"id":0,"name":"","username":"","email":"","created":0,"updated":0}},{"id":3,"title":"task #3 high prio","description":"","done":false,"due_date":0,"reminder_dates":null,"repeat_after":0,"repeat_from_current_date":false,"priority":100,"start_date":0,"end_date":0,"assignees":null,"labels":null,"created":1543626724,"updated":1543626724,"created_by":{"id":0,"name":"","username":"","email":"","created":0,"updated":0}}]`)
|
||||||
assert.NotContains(t, rec.Body.String(), `[{"id":5,"title":"task #5 higher due date","description":"","done":false,"due_date":1543636724,"reminder_dates":null,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":0,"end_date":0,"assignees":null,"labels":null,"hex_color":"","created":1543626724,"updated":1543626724,"created_by":{"id":0,"username":"","email":"","created":0,"updated":0}},{"id":6,"title":"task #6 lower due date"`)
|
assert.NotContains(t, rec.Body.String(), `[{"id":5,"title":"task #5 higher due date","description":"","done":false,"due_date":1543636724,"reminder_dates":null,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":0,"end_date":0,"assignees":null,"labels":null,"hex_color":"","created":1543626724,"updated":1543626724,"created_by":{"id":0,"name":"","username":"","email":"","created":0,"updated":0}},{"id":6,"title":"task #6 lower due date"`)
|
||||||
assert.NotContains(t, rec.Body.String(), `{"id":6,"title":"task #6 lower due date","description":"","done":false,"due_date":1543616724,"reminder_dates":null,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":0,"end_date":0,"assignees":null,"labels":null,"hex_color":"","created":1543626724,"updated":1543626724,"created_by":{"id":0,"username":"","email":"","created":0,"updated":0}},{"id":5,"title":"task #5 higher due date","description":"","done":false,"due_date":1543636724,"reminder_dates":null,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":0,"end_date":0,"assignees":null,"labels":null,"created":1543626724,"updated":1543626724,"created_by":{"id":0,"username":"","email":"","created":0,"updated":0}}]`)
|
assert.NotContains(t, rec.Body.String(), `{"id":6,"title":"task #6 lower due date","description":"","done":false,"due_date":1543616724,"reminder_dates":null,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":0,"end_date":0,"assignees":null,"labels":null,"hex_color":"","created":1543626724,"updated":1543626724,"created_by":{"id":0,"name":"","username":"","email":"","created":0,"updated":0}},{"id":5,"title":"task #5 higher due date","description":"","done":false,"due_date":1543636724,"reminder_dates":null,"repeat_after":0,"repeat_from_current_date":false,"priority":0,"start_date":0,"end_date":0,"assignees":null,"labels":null,"created":1543626724,"updated":1543626724,"created_by":{"id":0,"name":"","username":"","email":"","created":0,"updated":0}}]`)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
t.Run("Filter", func(t *testing.T) {
|
t.Run("Filter", func(t *testing.T) {
|
||||||
|
|
|
@ -17,11 +17,12 @@
|
||||||
package integrations
|
package integrations
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
"code.vikunja.io/api/pkg/models"
|
"code.vikunja.io/api/pkg/models"
|
||||||
"code.vikunja.io/web/handler"
|
"code.vikunja.io/web/handler"
|
||||||
"github.com/labstack/echo/v4"
|
"github.com/labstack/echo/v4"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"testing"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestTaskComments(t *testing.T) {
|
func TestTaskComments(t *testing.T) {
|
||||||
|
|
|
@ -17,11 +17,12 @@
|
||||||
package integrations
|
package integrations
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
"code.vikunja.io/api/pkg/models"
|
"code.vikunja.io/api/pkg/models"
|
||||||
"code.vikunja.io/web/handler"
|
"code.vikunja.io/web/handler"
|
||||||
"github.com/labstack/echo/v4"
|
"github.com/labstack/echo/v4"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"testing"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestTask(t *testing.T) {
|
func TestTask(t *testing.T) {
|
||||||
|
@ -289,9 +290,9 @@ func TestTask(t *testing.T) {
|
||||||
})
|
})
|
||||||
t.Run("Bucket", func(t *testing.T) {
|
t.Run("Bucket", func(t *testing.T) {
|
||||||
t.Run("Normal", func(t *testing.T) {
|
t.Run("Normal", func(t *testing.T) {
|
||||||
rec, err := testHandler.testUpdateWithUser(nil, map[string]string{"listtask": "1"}, `{"bucket_id":2}`)
|
rec, err := testHandler.testUpdateWithUser(nil, map[string]string{"listtask": "1"}, `{"bucket_id":3}`)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Contains(t, rec.Body.String(), `"bucket_id":2`)
|
assert.Contains(t, rec.Body.String(), `"bucket_id":3`)
|
||||||
assert.NotContains(t, rec.Body.String(), `"bucket_id":1`)
|
assert.NotContains(t, rec.Body.String(), `"bucket_id":1`)
|
||||||
})
|
})
|
||||||
t.Run("Different List", func(t *testing.T) {
|
t.Run("Different List", func(t *testing.T) {
|
||||||
|
@ -472,9 +473,9 @@ func TestTask(t *testing.T) {
|
||||||
})
|
})
|
||||||
t.Run("Bucket", func(t *testing.T) {
|
t.Run("Bucket", func(t *testing.T) {
|
||||||
t.Run("Normal", func(t *testing.T) {
|
t.Run("Normal", func(t *testing.T) {
|
||||||
rec, err := testHandler.testCreateWithUser(nil, map[string]string{"list": "1"}, `{"title":"Lorem Ipsum","bucket_id":2}`)
|
rec, err := testHandler.testCreateWithUser(nil, map[string]string{"list": "1"}, `{"title":"Lorem Ipsum","bucket_id":3}`)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Contains(t, rec.Body.String(), `"bucket_id":2`)
|
assert.Contains(t, rec.Body.String(), `"bucket_id":3`)
|
||||||
assert.NotContains(t, rec.Body.String(), `"bucket_id":1`)
|
assert.NotContains(t, rec.Body.String(), `"bucket_id":1`)
|
||||||
})
|
})
|
||||||
t.Run("Different List", func(t *testing.T) {
|
t.Run("Different List", func(t *testing.T) {
|
||||||
|
|
|
@ -17,10 +17,11 @@
|
||||||
package integrations
|
package integrations
|
||||||
|
|
||||||
import (
|
import (
|
||||||
apiv1 "code.vikunja.io/api/pkg/routes/api/v1"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
apiv1 "code.vikunja.io/api/pkg/routes/api/v1"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestCheckToken(t *testing.T) {
|
func TestCheckToken(t *testing.T) {
|
||||||
|
|
|
@ -17,11 +17,12 @@
|
||||||
package integrations
|
package integrations
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"net/http"
|
||||||
|
"testing"
|
||||||
|
|
||||||
apiv1 "code.vikunja.io/api/pkg/routes/api/v1"
|
apiv1 "code.vikunja.io/api/pkg/routes/api/v1"
|
||||||
"code.vikunja.io/api/pkg/user"
|
"code.vikunja.io/api/pkg/user"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"net/http"
|
|
||||||
"testing"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestUserChangePassword(t *testing.T) {
|
func TestUserChangePassword(t *testing.T) {
|
||||||
|
|
|
@ -17,12 +17,13 @@
|
||||||
package integrations
|
package integrations
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"net/http"
|
||||||
|
"testing"
|
||||||
|
|
||||||
apiv1 "code.vikunja.io/api/pkg/routes/api/v1"
|
apiv1 "code.vikunja.io/api/pkg/routes/api/v1"
|
||||||
"code.vikunja.io/api/pkg/user"
|
"code.vikunja.io/api/pkg/user"
|
||||||
"github.com/labstack/echo/v4"
|
"github.com/labstack/echo/v4"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"net/http"
|
|
||||||
"testing"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestUserConfirmEmail(t *testing.T) {
|
func TestUserConfirmEmail(t *testing.T) {
|
||||||
|
|
|
@ -17,10 +17,11 @@
|
||||||
package integrations
|
package integrations
|
||||||
|
|
||||||
import (
|
import (
|
||||||
apiv1 "code.vikunja.io/api/pkg/routes/api/v1"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
apiv1 "code.vikunja.io/api/pkg/routes/api/v1"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestUserList(t *testing.T) {
|
func TestUserList(t *testing.T) {
|
||||||
|
|
|
@ -17,12 +17,13 @@
|
||||||
package integrations
|
package integrations
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"net/http"
|
||||||
|
"testing"
|
||||||
|
|
||||||
apiv1 "code.vikunja.io/api/pkg/routes/api/v1"
|
apiv1 "code.vikunja.io/api/pkg/routes/api/v1"
|
||||||
"code.vikunja.io/api/pkg/user"
|
"code.vikunja.io/api/pkg/user"
|
||||||
"github.com/labstack/echo/v4"
|
"github.com/labstack/echo/v4"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"net/http"
|
|
||||||
"testing"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestUserRequestResetPasswordToken(t *testing.T) {
|
func TestUserRequestResetPasswordToken(t *testing.T) {
|
||||||
|
|
|
@ -17,12 +17,13 @@
|
||||||
package integrations
|
package integrations
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"net/http"
|
||||||
|
"testing"
|
||||||
|
|
||||||
apiv1 "code.vikunja.io/api/pkg/routes/api/v1"
|
apiv1 "code.vikunja.io/api/pkg/routes/api/v1"
|
||||||
"code.vikunja.io/api/pkg/user"
|
"code.vikunja.io/api/pkg/user"
|
||||||
"github.com/labstack/echo/v4"
|
"github.com/labstack/echo/v4"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"net/http"
|
|
||||||
"testing"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestUserPasswordReset(t *testing.T) {
|
func TestUserPasswordReset(t *testing.T) {
|
||||||
|
|
|
@ -17,10 +17,11 @@
|
||||||
package integrations
|
package integrations
|
||||||
|
|
||||||
import (
|
import (
|
||||||
apiv1 "code.vikunja.io/api/pkg/routes/api/v1"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
apiv1 "code.vikunja.io/api/pkg/routes/api/v1"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestUserShow(t *testing.T) {
|
func TestUserShow(t *testing.T) {
|
||||||
|
|
|
@ -17,13 +17,14 @@
|
||||||
package log
|
package log
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"code.vikunja.io/api/pkg/config"
|
|
||||||
"github.com/op/go-logging"
|
|
||||||
"github.com/spf13/viper"
|
|
||||||
"io"
|
"io"
|
||||||
"os"
|
"os"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"code.vikunja.io/api/pkg/config"
|
||||||
|
"github.com/op/go-logging"
|
||||||
|
"github.com/spf13/viper"
|
||||||
)
|
)
|
||||||
|
|
||||||
// ErrFmt holds the format for all the console logging
|
// ErrFmt holds the format for all the console logging
|
||||||
|
@ -105,7 +106,6 @@ func GetLogger() *logging.Logger {
|
||||||
return logInstance
|
return logInstance
|
||||||
}
|
}
|
||||||
|
|
||||||
/////
|
|
||||||
// The following functions are to be used as an "eye-candy", so one can just write log.Error() instead of log.Log.Error()
|
// The following functions are to be used as an "eye-candy", so one can just write log.Error() instead of log.Log.Error()
|
||||||
|
|
||||||
// Debug is for debug messages
|
// Debug is for debug messages
|
||||||
|
|
|
@ -17,10 +17,11 @@
|
||||||
package log
|
package log
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"code.vikunja.io/api/pkg/config"
|
|
||||||
"github.com/op/go-logging"
|
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"code.vikunja.io/api/pkg/config"
|
||||||
|
"github.com/op/go-logging"
|
||||||
"xorm.io/xorm/log"
|
"xorm.io/xorm/log"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -17,11 +17,12 @@
|
||||||
package mail
|
package mail
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"crypto/tls"
|
||||||
|
"time"
|
||||||
|
|
||||||
"code.vikunja.io/api/pkg/config"
|
"code.vikunja.io/api/pkg/config"
|
||||||
"code.vikunja.io/api/pkg/log"
|
"code.vikunja.io/api/pkg/log"
|
||||||
"crypto/tls"
|
|
||||||
"gopkg.in/gomail.v2"
|
"gopkg.in/gomail.v2"
|
||||||
"time"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Queue is the mail queue
|
// Queue is the mail queue
|
||||||
|
@ -34,6 +35,7 @@ func getDialer() *gomail.Dialer {
|
||||||
InsecureSkipVerify: config.MailerSkipTLSVerify.GetBool(),
|
InsecureSkipVerify: config.MailerSkipTLSVerify.GetBool(),
|
||||||
ServerName: config.MailerHost.GetString(),
|
ServerName: config.MailerHost.GetString(),
|
||||||
}
|
}
|
||||||
|
d.SSL = config.MailerForceSSL.GetBool()
|
||||||
return d
|
return d
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -18,13 +18,14 @@ package mail
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"html/template"
|
||||||
|
|
||||||
"code.vikunja.io/api/pkg/config"
|
"code.vikunja.io/api/pkg/config"
|
||||||
"code.vikunja.io/api/pkg/log"
|
"code.vikunja.io/api/pkg/log"
|
||||||
"code.vikunja.io/api/pkg/static"
|
"code.vikunja.io/api/pkg/static"
|
||||||
"code.vikunja.io/api/pkg/utils"
|
"code.vikunja.io/api/pkg/utils"
|
||||||
"github.com/shurcooL/httpfs/html/vfstemplate"
|
"github.com/shurcooL/httpfs/html/vfstemplate"
|
||||||
"gopkg.in/gomail.v2"
|
"gopkg.in/gomail.v2"
|
||||||
"html/template"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Opts holds infos for a mail
|
// Opts holds infos for a mail
|
||||||
|
|
|
@ -17,13 +17,14 @@
|
||||||
package metrics
|
package metrics
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
"code.vikunja.io/api/pkg/log"
|
"code.vikunja.io/api/pkg/log"
|
||||||
|
"code.vikunja.io/api/pkg/modules/keyvalue"
|
||||||
"code.vikunja.io/web"
|
"code.vikunja.io/web"
|
||||||
"encoding/gob"
|
|
||||||
"github.com/prometheus/client_golang/prometheus"
|
"github.com/prometheus/client_golang/prometheus"
|
||||||
"github.com/prometheus/client_golang/prometheus/promauto"
|
"github.com/prometheus/client_golang/prometheus/promauto"
|
||||||
"time"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// SecondsUntilInactive defines the seconds until a user is considered inactive
|
// SecondsUntilInactive defines the seconds until a user is considered inactive
|
||||||
|
@ -38,24 +39,35 @@ type ActiveUser struct {
|
||||||
LastSeen time.Time
|
LastSeen time.Time
|
||||||
}
|
}
|
||||||
|
|
||||||
// ActiveUsersMap is the type used to save active users
|
type activeUsersMap map[int64]*ActiveUser
|
||||||
type ActiveUsersMap map[int64]*ActiveUser
|
|
||||||
|
// ActiveUsers is the type used to save active users
|
||||||
|
type ActiveUsers struct {
|
||||||
|
users activeUsersMap
|
||||||
|
mutex *sync.Mutex
|
||||||
|
}
|
||||||
|
|
||||||
// activeUsers holds a map with all active users
|
// activeUsers holds a map with all active users
|
||||||
var activeUsers ActiveUsersMap
|
var activeUsers *ActiveUsers
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
activeUsers = make(ActiveUsersMap)
|
activeUsers = &ActiveUsers{
|
||||||
|
users: make(map[int64]*ActiveUser),
|
||||||
|
mutex: &sync.Mutex{},
|
||||||
|
}
|
||||||
|
|
||||||
promauto.NewGaugeFunc(prometheus.GaugeOpts{
|
promauto.NewGaugeFunc(prometheus.GaugeOpts{
|
||||||
Name: "vikunja_active_users",
|
Name: "vikunja_active_users",
|
||||||
Help: "The currently active users on this node",
|
Help: "The currently active users on this node",
|
||||||
}, func() float64 {
|
}, func() float64 {
|
||||||
|
|
||||||
allActiveUsers, err := GetActiveUsers()
|
allActiveUsers, err := getActiveUsers()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error(err.Error())
|
log.Error(err.Error())
|
||||||
}
|
}
|
||||||
|
if allActiveUsers == nil {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
activeUsersCount := 0
|
activeUsersCount := 0
|
||||||
for _, u := range allActiveUsers {
|
for _, u := range allActiveUsers {
|
||||||
if time.Since(u.LastSeen) < SecondsUntilInactive*time.Second {
|
if time.Since(u.LastSeen) < SecondsUntilInactive*time.Second {
|
||||||
|
@ -68,43 +80,30 @@ func init() {
|
||||||
|
|
||||||
// SetUserActive sets a user as active and pushes it to redis
|
// SetUserActive sets a user as active and pushes it to redis
|
||||||
func SetUserActive(a web.Auth) (err error) {
|
func SetUserActive(a web.Auth) (err error) {
|
||||||
activeUsers[a.GetID()] = &ActiveUser{
|
activeUsers.mutex.Lock()
|
||||||
|
activeUsers.users[a.GetID()] = &ActiveUser{
|
||||||
UserID: a.GetID(),
|
UserID: a.GetID(),
|
||||||
LastSeen: time.Now(),
|
LastSeen: time.Now(),
|
||||||
}
|
}
|
||||||
|
activeUsers.mutex.Unlock()
|
||||||
return PushActiveUsers()
|
return PushActiveUsers()
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetActiveUsers returns the active users from redis
|
// getActiveUsers returns the active users from redis
|
||||||
func GetActiveUsers() (users ActiveUsersMap, err error) {
|
func getActiveUsers() (users activeUsersMap, err error) {
|
||||||
|
u, err := keyvalue.Get(ActiveUsersKey)
|
||||||
activeUsersR, err := r.Get(ActiveUsersKey).Bytes()
|
|
||||||
if err != nil {
|
|
||||||
if err.Error() == "redis: nil" {
|
|
||||||
return users, nil
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
var b bytes.Buffer
|
|
||||||
_, err = b.Write(activeUsersR)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
d := gob.NewDecoder(&b)
|
|
||||||
if err := d.Decode(&users); err != nil {
|
users = u.(activeUsersMap)
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// PushActiveUsers pushed the content of the activeUsers map to redis
|
// PushActiveUsers pushed the content of the activeUsers map to redis
|
||||||
func PushActiveUsers() (err error) {
|
func PushActiveUsers() (err error) {
|
||||||
var b bytes.Buffer
|
activeUsers.mutex.Lock()
|
||||||
e := gob.NewEncoder(&b)
|
defer activeUsers.mutex.Unlock()
|
||||||
if err := e.Encode(activeUsers); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return r.Set(ActiveUsersKey, b.Bytes(), 0).Err()
|
return keyvalue.Put(ActiveUsersKey, activeUsers.users)
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,14 +19,12 @@ package metrics
|
||||||
import (
|
import (
|
||||||
"code.vikunja.io/api/pkg/config"
|
"code.vikunja.io/api/pkg/config"
|
||||||
"code.vikunja.io/api/pkg/log"
|
"code.vikunja.io/api/pkg/log"
|
||||||
"code.vikunja.io/api/pkg/red"
|
"code.vikunja.io/api/pkg/modules/keyvalue"
|
||||||
"github.com/go-redis/redis/v7"
|
e "code.vikunja.io/api/pkg/modules/keyvalue/error"
|
||||||
"github.com/prometheus/client_golang/prometheus"
|
"github.com/prometheus/client_golang/prometheus"
|
||||||
"github.com/prometheus/client_golang/prometheus/promauto"
|
"github.com/prometheus/client_golang/prometheus/promauto"
|
||||||
)
|
)
|
||||||
|
|
||||||
var r *redis.Client
|
|
||||||
|
|
||||||
const (
|
const (
|
||||||
// ListCountKey is the name of the key in which we save the list count
|
// ListCountKey is the name of the key in which we save the list count
|
||||||
ListCountKey = `listcount`
|
ListCountKey = `listcount`
|
||||||
|
@ -46,8 +44,6 @@ const (
|
||||||
|
|
||||||
// InitMetrics Initializes the metrics
|
// InitMetrics Initializes the metrics
|
||||||
func InitMetrics() {
|
func InitMetrics() {
|
||||||
r = red.GetRedis()
|
|
||||||
|
|
||||||
// init active users, sometimes we'll have garbage from previous runs in redis instead
|
// init active users, sometimes we'll have garbage from previous runs in redis instead
|
||||||
if err := PushActiveUsers(); err != nil {
|
if err := PushActiveUsers(); err != nil {
|
||||||
log.Fatalf("Could not set initial count for active users, error was %s", err)
|
log.Fatalf("Could not set initial count for active users, error was %s", err)
|
||||||
|
@ -101,18 +97,21 @@ func InitMetrics() {
|
||||||
|
|
||||||
// GetCount returns the current count from redis
|
// GetCount returns the current count from redis
|
||||||
func GetCount(key string) (count int64, err error) {
|
func GetCount(key string) (count int64, err error) {
|
||||||
count, err = r.Get(key).Int64()
|
cnt, err := keyvalue.Get(key)
|
||||||
if err != nil && err.Error() != "redis: nil" {
|
if err != nil {
|
||||||
return
|
if e.IsErrValueNotFoundForKey(err) {
|
||||||
|
return 0, nil
|
||||||
|
}
|
||||||
|
return 0, err
|
||||||
}
|
}
|
||||||
err = nil
|
count = cnt.(int64)
|
||||||
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetCount sets the list count to a given value
|
// SetCount sets the list count to a given value
|
||||||
func SetCount(count int64, key string) error {
|
func SetCount(count int64, key string) error {
|
||||||
return r.Set(key, count, 0).Err()
|
return keyvalue.Put(key, count)
|
||||||
}
|
}
|
||||||
|
|
||||||
// UpdateCount updates a count with a given amount
|
// UpdateCount updates a count with a given amount
|
||||||
|
@ -121,13 +120,13 @@ func UpdateCount(update int64, key string) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if update > 0 {
|
if update > 0 {
|
||||||
err := r.IncrBy(key, update).Err()
|
err := keyvalue.IncrBy(key, update)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error(err.Error())
|
log.Error(err.Error())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if update < 0 {
|
if update < 0 {
|
||||||
err := r.DecrBy(key, update).Err()
|
err := keyvalue.DecrBy(key, update)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error(err.Error())
|
log.Error(err.Error())
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,6 +18,7 @@ package migration
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"math"
|
"math"
|
||||||
|
|
||||||
"src.techknowlogick.com/xormigrate"
|
"src.techknowlogick.com/xormigrate"
|
||||||
"xorm.io/xorm"
|
"xorm.io/xorm"
|
||||||
)
|
)
|
||||||
|
|
|
@ -17,8 +17,9 @@
|
||||||
package migration
|
package migration
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"src.techknowlogick.com/xormigrate"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"src.techknowlogick.com/xormigrate"
|
||||||
"xorm.io/xorm"
|
"xorm.io/xorm"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -55,7 +56,7 @@ func init() {
|
||||||
// The general idea here is to take the title and slice it into pieces, until we found a unique piece.
|
// The general idea here is to take the title and slice it into pieces, until we found a unique piece.
|
||||||
|
|
||||||
var exists = true
|
var exists = true
|
||||||
titleSlug := strings.Replace(strings.ToUpper(l.Title), " ", "", -1)
|
titleSlug := []rune(strings.ReplaceAll(strings.ToUpper(l.Title), " ", ""))
|
||||||
|
|
||||||
// We can save at most 10 characters in the db, so we need to ensure it has at most 10 characters
|
// We can save at most 10 characters in the db, so we need to ensure it has at most 10 characters
|
||||||
if len(titleSlug) > 10 {
|
if len(titleSlug) > 10 {
|
||||||
|
@ -72,7 +73,7 @@ func init() {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Take a random part of the title slug, starting at the beginning
|
// Take a random part of the title slug, starting at the beginning
|
||||||
l.Identifier = titleSlug[i:]
|
l.Identifier = string(titleSlug[i:])
|
||||||
exists, err = sess.
|
exists, err = sess.
|
||||||
Where("identifier = ?", l.Identifier).
|
Where("identifier = ?", l.Identifier).
|
||||||
And("id != ?", l.ID).
|
And("id != ?", l.ID).
|
||||||
|
|
|
@ -18,8 +18,9 @@ package migration
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"src.techknowlogick.com/xormigrate"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"src.techknowlogick.com/xormigrate"
|
||||||
"xorm.io/xorm"
|
"xorm.io/xorm"
|
||||||
"xorm.io/xorm/schemas"
|
"xorm.io/xorm/schemas"
|
||||||
)
|
)
|
||||||
|
@ -654,7 +655,7 @@ create unique index UQE_users_namespace_id
|
||||||
// The statement is probably useless anyway since its only purpose is to clean up old tables
|
// The statement is probably useless anyway since its only purpose is to clean up old tables
|
||||||
// which may be leftovers from a previously failed migration. However, since the whole thing
|
// which may be leftovers from a previously failed migration. However, since the whole thing
|
||||||
// is wrapped in sessions, this is extremely unlikely to happen anyway.
|
// is wrapped in sessions, this is extremely unlikely to happen anyway.
|
||||||
//"ALTER TABLE " + table + " DROP COLUMN IF EXISTS " + colTmp + ";",
|
// "ALTER TABLE " + table + " DROP COLUMN IF EXISTS " + colTmp + ";",
|
||||||
"ALTER TABLE " + table + " ADD COLUMN " + colTmp + " DATETIME NULL;",
|
"ALTER TABLE " + table + " ADD COLUMN " + colTmp + " DATETIME NULL;",
|
||||||
// #nosec
|
// #nosec
|
||||||
"UPDATE " + table + " SET " + colTmp + " = IF(" + colOld + " = 0, NULL, FROM_UNIXTIME(" + colOld + "));",
|
"UPDATE " + table + " SET " + colTmp + " = IF(" + colOld + " = 0, NULL, FROM_UNIXTIME(" + colOld + "));",
|
||||||
|
|
|
@ -0,0 +1,50 @@
|
||||||
|
// Vikunja is a to-do list application to facilitate your life.
|
||||||
|
// Copyright 2018-2020 Vikunja and contributors. All rights reserved.
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU General Public License as published by
|
||||||
|
// the Free Software Foundation, either version 3 of the License, or
|
||||||
|
// (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
package migration
|
||||||
|
|
||||||
|
import (
|
||||||
|
"src.techknowlogick.com/xormigrate"
|
||||||
|
"xorm.io/xorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
type user20200801183357 struct {
|
||||||
|
AvatarProvider string `xorm:"varchar(255) null" json:"-"`
|
||||||
|
AvatarFileID int64 `xorn:"null" json:"-"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s user20200801183357) TableName() string {
|
||||||
|
return "users"
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
migrations = append(migrations, &xormigrate.Migration{
|
||||||
|
ID: "20200801183357",
|
||||||
|
Description: "Add avatar provider setting to user",
|
||||||
|
Migrate: func(tx *xorm.Engine) error {
|
||||||
|
err := tx.Sync2(user20200801183357{})
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = tx.Cols("avatar_provider").Update(&user20200801183357{AvatarProvider: "initials"})
|
||||||
|
return err
|
||||||
|
},
|
||||||
|
Rollback: func(tx *xorm.Engine) error {
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
|
@ -0,0 +1,43 @@
|
||||||
|
// Vikunja is a to-do list application to facilitate your life.
|
||||||
|
// Copyright 2018-2020 Vikunja and contributors. All rights reserved.
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU General Public License as published by
|
||||||
|
// the Free Software Foundation, either version 3 of the License, or
|
||||||
|
// (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
package migration
|
||||||
|
|
||||||
|
import (
|
||||||
|
"src.techknowlogick.com/xormigrate"
|
||||||
|
"xorm.io/xorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
type buckets20200904101559 struct {
|
||||||
|
Limit int64 `xorm:"default 0" json:"limit"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (buckets20200904101559) TableName() string {
|
||||||
|
return "buckets"
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
migrations = append(migrations, &xormigrate.Migration{
|
||||||
|
ID: "20200904101559",
|
||||||
|
Description: "Add limit field to kanban",
|
||||||
|
Migrate: func(tx *xorm.Engine) error {
|
||||||
|
return tx.Sync2(buckets20200904101559{})
|
||||||
|
},
|
||||||
|
Rollback: func(tx *xorm.Engine) error {
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
|
@ -0,0 +1,43 @@
|
||||||
|
// Vikunja is a to-do list application to facilitate your life.
|
||||||
|
// Copyright 2018-2020 Vikunja and contributors. All rights reserved.
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU General Public License as published by
|
||||||
|
// the Free Software Foundation, either version 3 of the License, or
|
||||||
|
// (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
package migration
|
||||||
|
|
||||||
|
import (
|
||||||
|
"src.techknowlogick.com/xormigrate"
|
||||||
|
"xorm.io/xorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
type tasks20200905151040 struct {
|
||||||
|
IsFavorite bool `xorm:"default false" json:"is_favorite"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (tasks20200905151040) TableName() string {
|
||||||
|
return "tasks"
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
migrations = append(migrations, &xormigrate.Migration{
|
||||||
|
ID: "20200905151040",
|
||||||
|
Description: "Add favorite field to tasks",
|
||||||
|
Migrate: func(tx *xorm.Engine) error {
|
||||||
|
return tx.Sync2(tasks20200905151040{})
|
||||||
|
},
|
||||||
|
Rollback: func(tx *xorm.Engine) error {
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
|
@ -0,0 +1,43 @@
|
||||||
|
// Vikunja is a to-do list application to facilitate your life.
|
||||||
|
// Copyright 2018-2020 Vikunja and contributors. All rights reserved.
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU General Public License as published by
|
||||||
|
// the Free Software Foundation, either version 3 of the License, or
|
||||||
|
// (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
package migration
|
||||||
|
|
||||||
|
import (
|
||||||
|
"src.techknowlogick.com/xormigrate"
|
||||||
|
"xorm.io/xorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
type list20200905232458 struct {
|
||||||
|
IsFavorite bool `xorm:"default false" json:"is_favorite"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (list20200905232458) TableName() string {
|
||||||
|
return "list"
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
migrations = append(migrations, &xormigrate.Migration{
|
||||||
|
ID: "20200905232458",
|
||||||
|
Description: "Add is_favorite field to lists",
|
||||||
|
Migrate: func(tx *xorm.Engine) error {
|
||||||
|
return tx.Sync2(list20200905232458{})
|
||||||
|
},
|
||||||
|
Rollback: func(tx *xorm.Engine) error {
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
|
@ -0,0 +1,52 @@
|
||||||
|
// Vikunja is a to-do list application to facilitate your life.
|
||||||
|
// Copyright 2018-2020 Vikunja and contributors. All rights reserved.
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU General Public License as published by
|
||||||
|
// the Free Software Foundation, either version 3 of the License, or
|
||||||
|
// (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
package migration
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"code.vikunja.io/api/pkg/models"
|
||||||
|
"src.techknowlogick.com/xormigrate"
|
||||||
|
"xorm.io/xorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
type savedFilters20200906184746 struct {
|
||||||
|
ID int64 `xorm:"autoincr not null unique pk" json:"id"`
|
||||||
|
Filters *models.TaskCollection `xorm:"JSON not null" json:"filters"`
|
||||||
|
Title string `xorm:"varchar(250) not null" json:"title" valid:"required,runelength(1|250)" minLength:"1" maxLength:"250"`
|
||||||
|
Description string `xorm:"longtext null" json:"description"`
|
||||||
|
OwnerID int64 `xorm:"int(11) not null INDEX" json:"-"`
|
||||||
|
Created time.Time `xorm:"created not null" json:"created"`
|
||||||
|
Updated time.Time `xorm:"updated not null" json:"updated"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (savedFilters20200906184746) TableName() string {
|
||||||
|
return "saved_filters"
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
migrations = append(migrations, &xormigrate.Migration{
|
||||||
|
ID: "20200906184746",
|
||||||
|
Description: "Add the saved filters column",
|
||||||
|
Migrate: func(tx *xorm.Engine) error {
|
||||||
|
return tx.Sync2(savedFilters20200906184746{})
|
||||||
|
},
|
||||||
|
Rollback: func(tx *xorm.Engine) error {
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
|
@ -0,0 +1,50 @@
|
||||||
|
// Vikunja is a to-do list application to facilitate your life.
|
||||||
|
// Copyright 2018-2020 Vikunja and contributors. All rights reserved.
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU General Public License as published by
|
||||||
|
// the Free Software Foundation, either version 3 of the License, or
|
||||||
|
// (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
package migration
|
||||||
|
|
||||||
|
import (
|
||||||
|
"src.techknowlogick.com/xormigrate"
|
||||||
|
"xorm.io/xorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
type user20201025195822 struct {
|
||||||
|
Issuer string `xorm:"text null" json:"-"`
|
||||||
|
Subject string `xorm:"text null" json:"-"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (user20201025195822) TableName() string {
|
||||||
|
return "users"
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
migrations = append(migrations, &xormigrate.Migration{
|
||||||
|
ID: "20201025195822",
|
||||||
|
Description: "",
|
||||||
|
Migrate: func(tx *xorm.Engine) error {
|
||||||
|
err := tx.Sync2(user20201025195822{})
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = tx.Cols("issuer").Update(&user20201025195822{Issuer: "local"})
|
||||||
|
return err
|
||||||
|
},
|
||||||
|
Rollback: func(tx *xorm.Engine) error {
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
|
@ -0,0 +1,43 @@
|
||||||
|
// Vikunja is a to-do list application to facilitate your life.
|
||||||
|
// Copyright 2018-2020 Vikunja and contributors. All rights reserved.
|
||||||
|
//
|
||||||
|
// This program is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU General Public License as published by
|
||||||
|
// the Free Software Foundation, either version 3 of the License, or
|
||||||
|
// (at your option) any later version.
|
||||||
|
//
|
||||||
|
// This program is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU General Public License for more details.
|
||||||
|
//
|
||||||
|
// You should have received a copy of the GNU General Public License
|
||||||
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
package migration
|
||||||
|
|
||||||
|
import (
|
||||||
|
"src.techknowlogick.com/xormigrate"
|
||||||
|
"xorm.io/xorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
type user20201121181647 struct {
|
||||||
|
Name string `xorm:"text null" json:"name"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (user20201121181647) TableName() string {
|
||||||
|
return "users"
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
migrations = append(migrations, &xormigrate.Migration{
|
||||||
|
ID: "20201121181647",
|
||||||
|
Description: "Add a name field to user",
|
||||||
|
Migrate: func(tx *xorm.Engine) error {
|
||||||
|
return tx.Sync2(user20201121181647{})
|
||||||
|
},
|
||||||
|
Rollback: func(tx *xorm.Engine) error {
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
|
@ -17,6 +17,9 @@
|
||||||
package migration
|
package migration
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"os"
|
||||||
|
"sort"
|
||||||
|
|
||||||
"code.vikunja.io/api/pkg/config"
|
"code.vikunja.io/api/pkg/config"
|
||||||
"code.vikunja.io/api/pkg/db"
|
"code.vikunja.io/api/pkg/db"
|
||||||
"code.vikunja.io/api/pkg/files"
|
"code.vikunja.io/api/pkg/files"
|
||||||
|
@ -25,8 +28,6 @@ import (
|
||||||
"code.vikunja.io/api/pkg/modules/migration"
|
"code.vikunja.io/api/pkg/modules/migration"
|
||||||
"code.vikunja.io/api/pkg/user"
|
"code.vikunja.io/api/pkg/user"
|
||||||
"github.com/olekukonko/tablewriter"
|
"github.com/olekukonko/tablewriter"
|
||||||
"os"
|
|
||||||
"sort"
|
|
||||||
"src.techknowlogick.com/xormigrate"
|
"src.techknowlogick.com/xormigrate"
|
||||||
"xorm.io/xorm"
|
"xorm.io/xorm"
|
||||||
)
|
)
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue