forked from vikunja/vikunja
Compare commits
1 Commits
main
...
renovate/a
Author | SHA1 | Date | |
---|---|---|---|
b693983789 |
@ -138,7 +138,7 @@ steps:
|
||||
GOPROXY: 'https://goproxy.kolaente.de'
|
||||
depends_on: [ build ]
|
||||
commands:
|
||||
- wget -O - -q https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin v1.45.2
|
||||
- wget -O - -q https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin v1.31.0
|
||||
- ./mage-static check:all
|
||||
when:
|
||||
event: [ push, tag, pull_request ]
|
||||
@ -621,7 +621,7 @@ steps:
|
||||
- tar -xzf vikunja-theme.tar.gz
|
||||
|
||||
- name: build
|
||||
image: klakegg/hugo:0.93.3
|
||||
image: monachus/hugo:v0.75.1
|
||||
pull: true
|
||||
commands:
|
||||
- cd docs
|
||||
@ -874,6 +874,6 @@ steps:
|
||||
- failure
|
||||
---
|
||||
kind: signature
|
||||
hmac: 1c4c211e66e4b6eddd2a1c1bad31e5c960d4f67d6033f4d5c4de7896dfae6c30
|
||||
hmac: 4b36351eaff966fb0e28775059a8a4e091eca7a3ef4c52df2a050fd6c59d409e
|
||||
|
||||
...
|
||||
|
@ -13,11 +13,10 @@ linters:
|
||||
- goheader
|
||||
- gofmt
|
||||
- goimports
|
||||
- revive
|
||||
- golint
|
||||
- misspell
|
||||
disable:
|
||||
- scopelint # Obsolete, using exportloopref instead
|
||||
- durationcheck
|
||||
presets:
|
||||
- bugs
|
||||
- unused
|
||||
@ -36,7 +35,6 @@ issues:
|
||||
linters:
|
||||
- gocyclo
|
||||
- deadcode
|
||||
- errorlint
|
||||
- path: pkg/integrations/*
|
||||
linters:
|
||||
- gocyclo
|
||||
@ -82,9 +80,3 @@ issues:
|
||||
- text: "Missed string"
|
||||
linters:
|
||||
- goheader
|
||||
- path: pkg/.*/error.go
|
||||
linters:
|
||||
- errorlint
|
||||
- path: pkg/models/favorites\.go
|
||||
linters:
|
||||
- nilerr
|
||||
|
5
.vscode/settings.json
vendored
5
.vscode/settings.json
vendored
@ -1,5 +0,0 @@
|
||||
{
|
||||
"go.testEnvVars": {
|
||||
"VIKUNJA_SERVICE_ROOTPATH": "${workspaceRoot}"
|
||||
}
|
||||
}
|
@ -23,7 +23,7 @@ RUN if [ -n "${VIKUNJA_VERSION}" ]; then git checkout "${VIKUNJA_VERSION}"; fi \
|
||||
# The actual image
|
||||
# Note: I wanted to use the scratch image here, but unfortunatly the go-sqlite bindings require cgo and
|
||||
# because of this, the container would not start when I compiled the image without cgo.
|
||||
FROM alpine:3.12
|
||||
FROM alpine:3.15
|
||||
LABEL maintainer="maintainers@vikunja.io"
|
||||
|
||||
WORKDIR /app/vikunja/
|
||||
|
@ -6,10 +6,6 @@ service:
|
||||
# The duration of the issed JWT tokens in seconds.
|
||||
# The default is 259200 seconds (3 Days).
|
||||
jwtttl: 259200
|
||||
# The duration of the "remember me" time in seconds. When the login request is made with
|
||||
# the long param set, the token returned will be valid for this period.
|
||||
# The default is 2592000 seconds (30 Days).
|
||||
jwtttllong: 2592000
|
||||
# The interface on which to run the webserver
|
||||
interface: ":3456"
|
||||
# Path to Unix socket. If set, it will be created and used instead of tcp
|
||||
@ -22,8 +18,6 @@ service:
|
||||
# Vikunja will also look in this path for a config file, so you could provide only this variable to point to a folder
|
||||
# with a config file which will then be used.
|
||||
rootpath: <rootpath>
|
||||
# Path on the file system to serve static files from. Set to the path of the frontend files to host frontend alongside the api.
|
||||
staticpath: ""
|
||||
# The max number of items which can be returned per page
|
||||
maxitemsperpage: 50
|
||||
# Enable the caldav endpoint, see the docs for more details
|
||||
@ -62,11 +56,11 @@ database:
|
||||
type: "sqlite"
|
||||
# Database user which is used to connect to the database.
|
||||
user: "vikunja"
|
||||
# Database password
|
||||
# Databse password
|
||||
password: ""
|
||||
# Database host
|
||||
# Databse host
|
||||
host: "localhost"
|
||||
# Database to use
|
||||
# Databse to use
|
||||
database: "vikunja"
|
||||
# When using sqlite, this is the path where to store the data
|
||||
path: "./vikunja.db"
|
||||
@ -79,12 +73,6 @@ database:
|
||||
# Secure connection mode. Only used with postgres.
|
||||
# (see https://pkg.go.dev/github.com/lib/pq?tab=doc#hdr-Connection_String_Parameters)
|
||||
sslmode: disable
|
||||
# The path to the client cert. Only used with postgres.
|
||||
sslcert: ""
|
||||
# The path to the client key. Only used with postgres.
|
||||
sslkey: ""
|
||||
# The path to the ca cert. Only used with postgres.
|
||||
sslrootcert: ""
|
||||
# Enable SSL/TLS for mysql connections. Options: false, true, skip-verify, preferred
|
||||
tls: false
|
||||
|
||||
|
@ -2,7 +2,7 @@ baseurl: https://vikunja.io/docs/
|
||||
title: Vikunja
|
||||
theme: vikunja
|
||||
enableRobotsTXT: true
|
||||
canonifyURLs: false
|
||||
canonifyURLs: true
|
||||
|
||||
pygmentsUseClasses: true
|
||||
|
||||
|
@ -26,7 +26,7 @@ If you plan to do a bigger change, it is better to open an issue for discussion
|
||||
|
||||
The code for the api is located at [code.vikunja.io/api](https://code.vikunja.io/api).
|
||||
|
||||
We use go modules to manage third-party libraries for Vikunja, so you'll need at least go `1.17` to use these.
|
||||
We use go modules to manage third-party libraries for Vikunja, so you'll need at least go `1.11` to use these.
|
||||
|
||||
A lot of developing tasks are automated using a Magefile, so make sure to [take a look at it]({{< ref "mage.md">}}).
|
||||
|
||||
|
@ -11,9 +11,9 @@ menu:
|
||||
# Mage
|
||||
|
||||
Vikunja uses [Mage](https://magefile.org/) to script common development tasks and even releasing.
|
||||
Mage is a pure go solution which allows for greater flexibility and things like better parallelization.
|
||||
Mage is a pure go solution which allows for greater flexibility and things like better paralelization.
|
||||
|
||||
This document explains what tasks are available and what they do.
|
||||
This document explains what taks are available and what they do.
|
||||
|
||||
{{< table_of_contents >}}
|
||||
|
||||
|
@ -10,31 +10,32 @@ menu:
|
||||
|
||||
# Testing
|
||||
|
||||
{{< table_of_contents >}}
|
||||
|
||||
## API Tests
|
||||
|
||||
The following parts are about the kinds of tests in the API package and how to run them.
|
||||
|
||||
### Prerequesites
|
||||
|
||||
To run any kind of test, you need to specify Vikunja's [root path](https://vikunja.io/docs/config-options/#rootpath).
|
||||
This is required to make sure all test fixtures are correctly loaded.
|
||||
|
||||
The easies way to do that is to set the environment variable `VIKUNJA_SERVICE_ROOTPATH` to the path where you cloned the working directory.
|
||||
|
||||
### Unit tests
|
||||
|
||||
To run unit tests with [mage]({{< ref "mage.md">}}), execute
|
||||
You can run unit tests with [mage]({{< ref "mage.md">}}) with
|
||||
|
||||
{{< highlight bash >}}
|
||||
mage test:unit
|
||||
{{< /highlight >}}
|
||||
|
||||
In Vikunja, everything that is not an integration test counts as unit test - even if it accesses the db.
|
||||
This definition is a bit blurry, but we haven't found a better one yet.
|
||||
{{< table_of_contents >}}
|
||||
|
||||
### Integration tests
|
||||
## Running tests with config
|
||||
|
||||
You can run tests with all available config variables if you want, enabeling you to run tests for a lot of scenarios.
|
||||
|
||||
To use the normal config set the enviroment variable `VIKUNJA_TESTS_USE_CONFIG=1`.
|
||||
|
||||
## Show sql queries
|
||||
|
||||
When `UNIT_TESTS_VERBOSE=1` is set, all sql queries will be shown when tests are run.
|
||||
|
||||
## Fixtures
|
||||
|
||||
All tests are run against a set of db fixtures.
|
||||
These fixtures are defined in `pkg/models/fixtures` in YAML-Files which represent the database structure.
|
||||
|
||||
When you add a new test case which requires new database entries to test against, update these files.
|
||||
|
||||
## Integration tests
|
||||
|
||||
All integration tests live in `pkg/integrations`.
|
||||
You can run them by executing `mage test:integration`.
|
||||
@ -44,25 +45,7 @@ see at the beginning of this document.
|
||||
|
||||
To run integration tests, use `mage test:integration`.
|
||||
|
||||
### Running tests with config
|
||||
|
||||
You can run tests with all available config variables if you want, enabeling you to run tests for a lot of scenarios.
|
||||
We use this in CI to run all tests with different databases.
|
||||
|
||||
To use the normal config set the enviroment variable `VIKUNJA_TESTS_USE_CONFIG=1`.
|
||||
|
||||
### Showing sql queries
|
||||
|
||||
When the environment variable `UNIT_TESTS_VERBOSE=1` is set, all sql queries will be shown during the test run.
|
||||
|
||||
### Fixtures
|
||||
|
||||
All tests are run against a set of db fixtures.
|
||||
These fixtures are defined in `pkg/models/fixtures` in YAML-Files which represent the database structure.
|
||||
|
||||
When you add a new test case which requires new database entries to test against, update these files.
|
||||
|
||||
#### Initializing db fixtures when writing tests
|
||||
## Initializing db fixtures when writing tests
|
||||
|
||||
All db fixtures for all tests live in the `pkg/db/fixtures/` folder as yaml files.
|
||||
Each file has the same name as the table the fixtures are for.
|
||||
@ -71,39 +54,19 @@ You should put new fixtures in this folder.
|
||||
When initializing db fixtures, you are responsible for defining which tables your package needs in your test init function.
|
||||
Usually, this is done as follows (this code snippet is taken from the `user` package):
|
||||
|
||||
{{< highlight go >}}
|
||||
```go
|
||||
err = db.InitTestFixtures("users")
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
{{< /highlight >}}
|
||||
```
|
||||
|
||||
In your actual tests, you then load the fixtures into the in-memory db like so:
|
||||
|
||||
{{< highlight go >}}
|
||||
```go
|
||||
db.LoadAndAssertFixtures(t)
|
||||
{{< /highlight >}}
|
||||
```
|
||||
|
||||
This will load all fixtures you defined in your test init method.
|
||||
You should always use this method to load fixtures, the only exception is when your package tests require extra test
|
||||
fixtures other than db fixtures (like files).
|
||||
|
||||
## Frontend tests
|
||||
|
||||
The frontend has end to end tests with Cypress that use a Vikunja instance and drive a browser against it.
|
||||
Check out the docs [in the frontend repo](https://kolaente.dev/vikunja/frontend/src/branch/main/cypress/README.md) about how they work and how to get them running.
|
||||
|
||||
### Unit Tests
|
||||
|
||||
To run the frontend unit tests, run
|
||||
|
||||
{{< highlight bash >}}
|
||||
yarn test:unit
|
||||
{{< /highlight >}}
|
||||
|
||||
The frontend also has a watcher available that re-runs all unit tests every time you change something.
|
||||
To use it, simply run
|
||||
|
||||
{{< highlight bash >}}
|
||||
yarn test:unit-watch
|
||||
{{< /highlight >}}
|
||||
|
@ -10,37 +10,20 @@ menu:
|
||||
|
||||
# Build Vikunja from source
|
||||
|
||||
To completely build Vikunja from source, you need to build the api and frontend.
|
||||
Vikunja being a go application, has no other dependencies than go itself.
|
||||
All libraries are bundeled inside the repo in the `vendor/` folder, so all it boils down to are these steps:
|
||||
|
||||
{{< table_of_contents >}}
|
||||
|
||||
## API
|
||||
|
||||
The Vikunja API has no other dependencies than go itself.
|
||||
That means compiling it boils down to these steps:
|
||||
|
||||
1. Make sure [Go](https://golang.org/doc/install) is properly installed on your system. You'll need at least Go `1.17`.
|
||||
2. Make sure [Mage](https://magefile.org) is properly installed on your system.
|
||||
3. Clone the repo with `git clone https://code.vikunja.io/api` and switch into the directory.
|
||||
1. Make sure [Go](https://golang.org/doc/install) is properly installed on your system. You'll need at least Go `1.9`.
|
||||
2. Make sure [Mage](https://magefile) is properly installed on your system.
|
||||
3. Clone the repo with `git clone https://code.vikunja.io/api`
|
||||
3. Run `mage build:build` in the source of this repo. This will build a binary in the root of the repo which will be able to run on your system.
|
||||
|
||||
*Note:* Static ressources such as email templates are built into the binary.
|
||||
For these to work, you may need to run `mage build:generate` before building the vikunja binary.
|
||||
When builing entirely with `mage`, you dont need to do this, `mage build:generate` will be run automatically when running `mage build:build`.
|
||||
|
||||
### Build for different architectures
|
||||
# Build for different architectures
|
||||
|
||||
To build for other platforms and architectures than the one you're currently on, simply run `mage release:release` or `mage release:{linux|windows|darwin}`.
|
||||
|
||||
More options are available, please refer to the [magefile docs]({{< ref "../development/mage.md">}}) for more details.
|
||||
|
||||
## Frontend
|
||||
|
||||
The code for the frontend is located at [code.vikunja.io/frontend](https://code.vikunja.io/frontend).
|
||||
|
||||
You need to have yarn v1 and nodejs in version 16 installed.
|
||||
|
||||
1. Make sure [yarn v1](https://yarnpkg.com/getting-started/install) is properly installed on your system.
|
||||
3. Clone the repo with `git clone https://code.vikunja.io/frontend` and switch into the directory.
|
||||
3. Install all dependencies with `yarn install`
|
||||
4. Build the frontend with `yarn build`. This will result in a js bundle in the `dist/` folder which you can deploy.
|
||||
More options are available, please refer to the [magefile docs]({{< ref "../development/mage.md">}}) for more details.
|
@ -91,19 +91,6 @@ Full path: `service.jwtttl`
|
||||
Environment path: `VIKUNJA_SERVICE_JWTTTL`
|
||||
|
||||
|
||||
### jwtttllong
|
||||
|
||||
The duration of the "remember me" time in seconds. When the login request is made with
|
||||
the long param set, the token returned will be valid for this period.
|
||||
The default is 2592000 seconds (30 Days).
|
||||
|
||||
Default: `2592000`
|
||||
|
||||
Full path: `service.jwtttllong`
|
||||
|
||||
Environment path: `VIKUNJA_SERVICE_JWTTTLLONG`
|
||||
|
||||
|
||||
### interface
|
||||
|
||||
The interface on which to run the webserver
|
||||
@ -161,17 +148,6 @@ Full path: `service.rootpath`
|
||||
Environment path: `VIKUNJA_SERVICE_ROOTPATH`
|
||||
|
||||
|
||||
### staticpath
|
||||
|
||||
Path on the file system to serve static files from. Set to the path of the frontend files to host frontend alongside the api.
|
||||
|
||||
Default: `<empty>`
|
||||
|
||||
Full path: `service.staticpath`
|
||||
|
||||
Environment path: `VIKUNJA_SERVICE_STATICPATH`
|
||||
|
||||
|
||||
### maxitemsperpage
|
||||
|
||||
The max number of items which can be returned per page
|
||||
@ -351,7 +327,7 @@ Environment path: `VIKUNJA_DATABASE_USER`
|
||||
|
||||
### password
|
||||
|
||||
Database password
|
||||
Databse password
|
||||
|
||||
Default: `<empty>`
|
||||
|
||||
@ -362,7 +338,7 @@ Environment path: `VIKUNJA_DATABASE_PASSWORD`
|
||||
|
||||
### host
|
||||
|
||||
Database host
|
||||
Databse host
|
||||
|
||||
Default: `localhost`
|
||||
|
||||
@ -373,7 +349,7 @@ Environment path: `VIKUNJA_DATABASE_HOST`
|
||||
|
||||
### database
|
||||
|
||||
Database to use
|
||||
Databse to use
|
||||
|
||||
Default: `vikunja`
|
||||
|
||||
@ -438,39 +414,6 @@ Full path: `database.sslmode`
|
||||
Environment path: `VIKUNJA_DATABASE_SSLMODE`
|
||||
|
||||
|
||||
### sslcert
|
||||
|
||||
The path to the client cert. Only used with postgres.
|
||||
|
||||
Default: `<empty>`
|
||||
|
||||
Full path: `database.sslcert`
|
||||
|
||||
Environment path: `VIKUNJA_DATABASE_SSLCERT`
|
||||
|
||||
|
||||
### sslkey
|
||||
|
||||
The path to the client key. Only used with postgres.
|
||||
|
||||
Default: `<empty>`
|
||||
|
||||
Full path: `database.sslkey`
|
||||
|
||||
Environment path: `VIKUNJA_DATABASE_SSLKEY`
|
||||
|
||||
|
||||
### sslrootcert
|
||||
|
||||
The path to the ca cert. Only used with postgres.
|
||||
|
||||
Default: `<empty>`
|
||||
|
||||
Full path: `database.sslrootcert`
|
||||
|
||||
Environment path: `VIKUNJA_DATABASE_SSLROOTCERT`
|
||||
|
||||
|
||||
### tls
|
||||
|
||||
Enable SSL/TLS for mysql connections. Options: false, true, skip-verify, preferred
|
||||
|
@ -370,81 +370,3 @@ services:
|
||||
volumes:
|
||||
- ./Caddyfile:/etc/caddy/Caddyfile:ro
|
||||
{{< /highlight >}}
|
||||
|
||||
## Setup on a Synology NAS
|
||||
|
||||
There is a proxy preinstalled in DSM, so if you want to access vikunja from outside,
|
||||
you can prepare 2 proxy rules:
|
||||
|
||||
* a redirection rule for vikunja's api (see example screenshot using port 3456)
|
||||
* a similar redirection rule for vikunja's frontend (using port 4321)
|
||||
|
||||

|
||||
|
||||
You should also add 2 empty folders for mariadb and vikunja inside Synology's
|
||||
docker main folders:
|
||||
|
||||
* Docker
|
||||
* vikunja
|
||||
* mariadb
|
||||
|
||||
Synology has it's own GUI for managing Docker containers... But it's easier via docker compose.
|
||||
|
||||
To do that, you can
|
||||
|
||||
* either activate SSH and paste the adapted compose file in a terminal (using Putty or similar)
|
||||
* without activating SSH as a "custom script" (go to Control Panel / Task Scheduler / Create / Scheduled Task / User-defined script)
|
||||
* without activating SSH, by using Portainer (you have to install first, check out [this tutorial](https://www.portainer.io/blog/how-to-install-portainer-on-a-synology-nas) for exmple):
|
||||
1. Go to **Dashboard / Stacks** click the button **"Add Stack"**
|
||||
2. Give it the name Vikunja and paste the adapted docker compose file
|
||||
3. Deploy the Stack with the "Delpoy Stack" button:
|
||||
|
||||

|
||||
|
||||
The docker-compose file we're going to use is very similar to the [example without any proxy](#example-without-any-proxy) above:
|
||||
|
||||
{{< highlight yaml >}}
|
||||
version: '3'
|
||||
|
||||
services:
|
||||
db:
|
||||
image: mariadb:10
|
||||
command: --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci
|
||||
environment:
|
||||
MYSQL_ROOT_PASSWORD: supersecret
|
||||
MYSQL_USER: vikunja
|
||||
MYSQL_PASSWORD: secret
|
||||
MYSQL_DATABASE: vikunja
|
||||
volumes:
|
||||
- ./db:/var/lib/mysql
|
||||
restart: unless-stopped
|
||||
api:
|
||||
image: vikunja/api
|
||||
environment:
|
||||
VIKUNJA_DATABASE_HOST: db
|
||||
VIKUNJA_DATABASE_PASSWORD: secret
|
||||
VIKUNJA_DATABASE_TYPE: mysql
|
||||
VIKUNJA_DATABASE_USER: vikunja
|
||||
VIKUNJA_DATABASE_DATABASE: vikunja
|
||||
ports:
|
||||
- 3456:3456
|
||||
volumes:
|
||||
- ./files:/app/vikunja/files
|
||||
depends_on:
|
||||
- db
|
||||
restart: unless-stopped
|
||||
frontend:
|
||||
image: vikunja/frontend
|
||||
ports:
|
||||
- 4321:80
|
||||
environment:
|
||||
VIKUNJA_API_URL: http://vikunja-api-domain.tld/api/v1
|
||||
restart: unless-stopped
|
||||
{{< /highlight >}}
|
||||
|
||||
You may want to change the volumes to match the rest of your setup.
|
||||
|
||||
Once deployed, you might want to change the [`PUID` and `GUID` settings]({{< ref "install-backend.md">}}#setting-user-and-group-id-of-the-user-running-vikunja) or [set the time zone]({{< ref "config.md">}}#timezone).
|
||||
|
||||
After registering all your users, you might also want to [disable the user registration]({{<ref "config.md">}}#enableregistration).
|
||||
|
||||
|
@ -52,7 +52,7 @@ ln -s /opt/vikunja/vikunja /usr/bin/vikunja
|
||||
|
||||
### Systemd service
|
||||
|
||||
Save the following service file to `/etc/systemd/system/vikunja.service` and adapt it to your needs:
|
||||
Take the following `service` file and adapt it to your needs:
|
||||
|
||||
{{< highlight service >}}
|
||||
[Unit]
|
||||
@ -83,6 +83,8 @@ WantedBy=multi-user.target
|
||||
|
||||
If you've installed Vikunja to a directory other than `/opt/vikunja`, you need to adapt `WorkingDirectory` accordingly.
|
||||
|
||||
Save the file to `/etc/systemd/system/vikunja.service`
|
||||
|
||||
After you made all nessecary modifications, it's time to start the service:
|
||||
|
||||
{{< highlight bash >}}
|
||||
|
@ -80,22 +80,6 @@ server {
|
||||
<b>NOTE:</b> If you change the max upload size in Vikunja's settings, you'll need to also change the <code>client_max_body_size</code> in the nginx proxy config.
|
||||
</div>
|
||||
|
||||
## NGINX Proxy Manager (NPM)
|
||||
|
||||
1. Create a standard Proxy Host for the Vikunja Frontend within NPM and point it to the URL you plan to use. The next several steps will enable the Proxy Host to successfully navigate to the API (on port 3456).
|
||||
2. Verify that the page will pull up in your browser. (Do not bother trying to log in. It won't work. Trust me.)
|
||||
3. Now, we'll work with the NPM container, so you need to identify the container name for your NPM installation. e.g. NGINX-PM
|
||||
4. From the command line, enter `sudo docker exec -it [NGINX-PM container name] /bin/bash` and navigate to the proxy hosts folder where the `.conf` files are stashed. Probably `/data/nginx/proxy_host`. (This folder is a persistent folder created in the NPM container and mounted by NPM.)
|
||||
5. Locate the `.conf` file where the server_name inside the file matches your Vikunja Proxy Host. Once found, add the following code, unchanged, just above the existing location block in that file. (They are listed by number, not name.)
|
||||
```
|
||||
location ~* ^/(api|dav|\.well-known)/ {
|
||||
proxy_pass http://api:3456;
|
||||
client_max_body_size 20M;
|
||||
}
|
||||
```
|
||||
6. After saving the edited file, return to NPM's UI browser window and refresh the page to verify your Proxy Host for Vikunja is still online.
|
||||
7. Now, switch over to your Vikunja browswer window and hit refresh. If you configured your URL correctly in original Vikunja container, you should be all set and the browser will correctly show Vikunja. If not, you'll need to adjust the address in the top of the login subscreen to match your proxy address.
|
||||
|
||||
## Apache
|
||||
|
||||
Put the following config in `cat /etc/apache2/sites-available/vikunja.conf`:
|
||||
@ -124,4 +108,4 @@ Put the following config in `cat /etc/apache2/sites-available/vikunja.conf`:
|
||||
|
||||
**Note:** The apache modules `proxy`, `proxy_http` and `rewrite` must be enabled for this.
|
||||
|
||||
For more details see the [frontend apache configuration]({{< ref "install-frontend.md#apache">}}).
|
||||
For more details see the [frontend apache configuration]({{< ref "install-frontend.md#apache">}}).
|
@ -18,8 +18,4 @@ server {
|
||||
location /docs/contact {
|
||||
return 301 $scheme://vikunja.io/en/contact;
|
||||
}
|
||||
|
||||
location /docs/docs {
|
||||
return 301 $scheme://vikunja.io/docs;
|
||||
}
|
||||
}
|
||||
|
BIN
docs/static/synology-proxy-1.png
vendored
BIN
docs/static/synology-proxy-1.png
vendored
Binary file not shown.
Before Width: | Height: | Size: 121 KiB |
BIN
docs/static/synology-proxy-2.png
vendored
BIN
docs/static/synology-proxy-2.png
vendored
Binary file not shown.
Before Width: | Height: | Size: 502 KiB |
47
go.mod
47
go.mod
@ -22,56 +22,53 @@ require (
|
||||
github.com/ThreeDotsLabs/watermill v1.1.1
|
||||
github.com/adlio/trello v1.9.0
|
||||
github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef
|
||||
github.com/bbrks/go-blurhash v1.1.1
|
||||
github.com/beevik/etree v1.1.0 // indirect
|
||||
github.com/c2h5oh/datasize v0.0.0-20200825124411-48ed595a09d2
|
||||
github.com/coreos/go-oidc/v3 v3.2.0
|
||||
github.com/coreos/go-oidc/v3 v3.1.0
|
||||
github.com/cweill/gotests v1.6.0
|
||||
github.com/d4l3k/messagediff v1.2.1
|
||||
github.com/disintegration/imaging v1.6.2
|
||||
github.com/dustinkirkland/golang-petname v0.0.0-20191129215211-8e5a1ed0cff0
|
||||
github.com/gabriel-vasile/mimetype v1.4.0
|
||||
github.com/getsentry/sentry-go v0.13.0
|
||||
github.com/getsentry/sentry-go v0.11.0
|
||||
github.com/go-errors/errors v1.1.1 // indirect
|
||||
github.com/go-redis/redis/v8 v8.11.5
|
||||
github.com/go-redis/redis/v8 v8.11.4
|
||||
github.com/go-sql-driver/mysql v1.6.0
|
||||
github.com/go-testfixtures/testfixtures/v3 v3.6.1
|
||||
github.com/golang-jwt/jwt/v4 v4.4.1
|
||||
github.com/golang-jwt/jwt/v4 v4.1.0
|
||||
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0
|
||||
github.com/golang/snappy v0.0.4 // indirect
|
||||
github.com/iancoleman/strcase v0.2.0
|
||||
github.com/imdario/mergo v0.3.13
|
||||
github.com/labstack/echo/v4 v4.7.2
|
||||
github.com/imdario/mergo v0.3.12
|
||||
github.com/labstack/echo/v4 v4.6.1
|
||||
github.com/labstack/gommon v0.3.1
|
||||
github.com/laurent22/ical-go v0.1.1-0.20181107184520-7e5d6ade8eef
|
||||
github.com/lib/pq v1.10.6
|
||||
github.com/magefile/mage v1.13.0
|
||||
github.com/mattn/go-sqlite3 v1.14.13
|
||||
github.com/lib/pq v1.10.4
|
||||
github.com/magefile/mage v1.11.0
|
||||
github.com/mattn/go-sqlite3 v1.14.9
|
||||
github.com/olekukonko/tablewriter v0.0.5
|
||||
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7
|
||||
github.com/pquerna/otp v1.3.0
|
||||
github.com/prometheus/client_golang v1.12.2
|
||||
github.com/prometheus/client_golang v1.11.0
|
||||
github.com/robfig/cron/v3 v3.0.1
|
||||
github.com/samedi/caldav-go v3.0.0+incompatible
|
||||
github.com/spf13/afero v1.8.2
|
||||
github.com/spf13/cobra v1.4.0
|
||||
github.com/spf13/viper v1.11.0
|
||||
github.com/stretchr/testify v1.7.2
|
||||
github.com/swaggo/swag v1.8.2
|
||||
github.com/tkuchiki/go-timezone v0.2.2
|
||||
github.com/ulule/limiter/v3 v3.10.0
|
||||
github.com/vectordotdev/go-datemath v0.1.1-0.20211214182920-0a4ac8742b93
|
||||
github.com/yuin/goldmark v1.4.12
|
||||
golang.org/x/crypto v0.0.0-20220411220226-7b82a4e95df4
|
||||
golang.org/x/image v0.0.0-20220302094943-723b81ca9867
|
||||
golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5
|
||||
github.com/spf13/afero v1.6.0
|
||||
github.com/spf13/cobra v1.2.1
|
||||
github.com/spf13/viper v1.9.0
|
||||
github.com/stretchr/testify v1.7.0
|
||||
github.com/swaggo/swag v1.7.4
|
||||
github.com/ulule/limiter/v3 v3.9.0
|
||||
github.com/yuin/goldmark v1.4.4
|
||||
golang.org/x/crypto v0.0.0-20211117183948-ae814b36b871
|
||||
golang.org/x/image v0.0.0-20211028202545-6944b10bf410
|
||||
golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c
|
||||
golang.org/x/sys v0.0.0-20220412211240-33da011f77ad
|
||||
golang.org/x/sys v0.0.0-20211117180635-dee7805ff2e1
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211
|
||||
gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect
|
||||
gopkg.in/d4l3k/messagediff.v1 v1.2.1
|
||||
gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b
|
||||
src.techknowlogick.com/xgo v1.4.1-0.20210311222705-d25c33fcd864
|
||||
src.techknowlogick.com/xormigrate v1.4.0
|
||||
xorm.io/builder v0.3.9
|
||||
|
@ -14,7 +14,6 @@
|
||||
// You should have received a copy of the GNU Affero General Public Licensee
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
//go:build mage
|
||||
// +build mage
|
||||
|
||||
package main
|
||||
@ -350,7 +349,7 @@ func (Test) Unit() {
|
||||
mg.Deps(initVars)
|
||||
setApiPackages()
|
||||
// We run everything sequentially and not in parallel to prevent issues with real test databases
|
||||
args := append([]string{"test", Goflags[0], "-p", "1", "-coverprofile", "cover.out", "-timeout", "20m"}, ApiPackages...)
|
||||
args := append([]string{"test", Goflags[0], "-p", "1", "-timeout", "20m"}, ApiPackages...)
|
||||
runAndStreamOutput("go", args...)
|
||||
}
|
||||
|
||||
|
@ -24,8 +24,6 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/asaskevich/govalidator"
|
||||
|
||||
"code.vikunja.io/api/pkg/db"
|
||||
"code.vikunja.io/api/pkg/initialize"
|
||||
"code.vikunja.io/api/pkg/log"
|
||||
@ -177,11 +175,6 @@ var userCreateCmd = &cobra.Command{
|
||||
Email: userFlagEmail,
|
||||
Password: getPasswordFromFlagOrInput(),
|
||||
}
|
||||
|
||||
if !govalidator.IsEmail(userFlagEmail) {
|
||||
log.Fatalf("Provided email is invalid.")
|
||||
}
|
||||
|
||||
newUser, err := user.CreateUser(s, u)
|
||||
if err != nil {
|
||||
_ = s.Rollback()
|
||||
|
@ -21,10 +21,8 @@ import (
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
"time"
|
||||
_ "time/tzdata" // Imports time zone data instead of relying on the os
|
||||
@ -40,16 +38,14 @@ const (
|
||||
// #nosec
|
||||
ServiceJWTSecret Key = `service.JWTSecret`
|
||||
ServiceJWTTTL Key = `service.jwtttl`
|
||||
ServiceJWTTTLLong Key = `service.jwtttllong`
|
||||
ServiceInterface Key = `service.interface`
|
||||
ServiceUnixSocket Key = `service.unixsocket`
|
||||
ServiceUnixSocketMode Key = `service.unixsocketmode`
|
||||
ServiceFrontendurl Key = `service.frontendurl`
|
||||
ServiceEnableCaldav Key = `service.enablecaldav`
|
||||
ServiceRootpath Key = `service.rootpath`
|
||||
ServiceStaticpath Key = `service.staticpath`
|
||||
ServiceMaxItemsPerPage Key = `service.maxitemsperpage`
|
||||
// Deprecated: Use metrics.enabled
|
||||
// Deprecated. Use metrics.enabled
|
||||
ServiceEnableMetrics Key = `service.enablemetrics`
|
||||
ServiceMotd Key = `service.motd`
|
||||
ServiceEnableLinkSharing Key = `service.enablelinksharing`
|
||||
@ -81,9 +77,6 @@ const (
|
||||
DatabaseMaxIdleConnections Key = `database.maxidleconnections`
|
||||
DatabaseMaxConnectionLifetime Key = `database.maxconnectionlifetime`
|
||||
DatabaseSslMode Key = `database.sslmode`
|
||||
DatabaseSslCert Key = `database.sslcert`
|
||||
DatabaseSslKey Key = `database.sslkey`
|
||||
DatabaseSslRootCert Key = `database.sslrootcert`
|
||||
DatabaseTLS Key = `database.tls`
|
||||
|
||||
CacheEnabled Key = `cache.enabled`
|
||||
@ -223,39 +216,6 @@ func (k Key) setDefault(i interface{}) {
|
||||
viper.SetDefault(string(k), i)
|
||||
}
|
||||
|
||||
// Tries different methods to figure out the binary folder.
|
||||
// Copied and adopted from https://github.com/speedata/publisher/commit/3b668668d57edef04ea854d5bbd58f83eb1b799f
|
||||
func getBinaryDirLocation() string {
|
||||
// First, check if the standard library gives us the path. This will work 99% of the time.
|
||||
ex, err := os.Executable()
|
||||
if err == nil {
|
||||
return filepath.Dir(ex)
|
||||
}
|
||||
|
||||
// Then check if the binary was run with a full path and use that if that's the case.
|
||||
if strings.Contains(os.Args[0], "/") {
|
||||
binDir, err := filepath.Abs(filepath.Dir(os.Args[0]))
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
return binDir
|
||||
}
|
||||
|
||||
exeSuffix := ""
|
||||
if runtime.GOOS == "windows" {
|
||||
exeSuffix = ".exe"
|
||||
}
|
||||
|
||||
// All else failing, search for a vikunja binary in the current $PATH.
|
||||
// This can give wrong results.
|
||||
exeLocation, err := exec.LookPath("vikunja" + exeSuffix)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
return filepath.Dir(exeLocation)
|
||||
}
|
||||
|
||||
// InitDefaultConfig sets default config values
|
||||
// This is an extra function so we can call it when initializing tests without initializing the full config
|
||||
func InitDefaultConfig() {
|
||||
@ -267,15 +227,18 @@ func InitDefaultConfig() {
|
||||
|
||||
// Service
|
||||
ServiceJWTSecret.setDefault(random)
|
||||
ServiceJWTTTL.setDefault(259200) // 72 hours
|
||||
ServiceJWTTTLLong.setDefault(2592000) // 30 days
|
||||
ServiceJWTTTL.setDefault(259200)
|
||||
ServiceInterface.setDefault(":3456")
|
||||
ServiceUnixSocket.setDefault("")
|
||||
ServiceFrontendurl.setDefault("")
|
||||
ServiceEnableCaldav.setDefault(true)
|
||||
|
||||
ServiceRootpath.setDefault(getBinaryDirLocation())
|
||||
ServiceStaticpath.setDefault("")
|
||||
ex, err := os.Executable()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
exPath := filepath.Dir(ex)
|
||||
ServiceRootpath.setDefault(exPath)
|
||||
ServiceMaxItemsPerPage.setDefault(50)
|
||||
ServiceEnableMetrics.setDefault(false)
|
||||
ServiceMotd.setDefault("")
|
||||
@ -303,9 +266,6 @@ func InitDefaultConfig() {
|
||||
DatabaseMaxIdleConnections.setDefault(50)
|
||||
DatabaseMaxConnectionLifetime.setDefault(10000)
|
||||
DatabaseSslMode.setDefault("disable")
|
||||
DatabaseSslCert.setDefault("")
|
||||
DatabaseSslKey.setDefault("")
|
||||
DatabaseSslRootCert.setDefault("")
|
||||
DatabaseTLS.setDefault("false")
|
||||
|
||||
// Cacher
|
||||
@ -393,17 +353,11 @@ func InitConfig() {
|
||||
|
||||
viper.AddConfigPath(".")
|
||||
viper.SetConfigName("config")
|
||||
|
||||
err = viper.ReadInConfig()
|
||||
if viper.ConfigFileUsed() != "" {
|
||||
log.Printf("Using config file: %s", viper.ConfigFileUsed())
|
||||
|
||||
if err != nil {
|
||||
log.Println(err.Error())
|
||||
log.Println("Using default config.")
|
||||
}
|
||||
} else {
|
||||
log.Println("No config file found, using default or config from environment variables.")
|
||||
if err != nil {
|
||||
log.Println(err.Error())
|
||||
log.Println("Using default config.")
|
||||
return
|
||||
}
|
||||
|
||||
if CacheType.GetString() == "keyvalue" {
|
||||
@ -438,6 +392,8 @@ func InitConfig() {
|
||||
log.Println("WARNING: service.enablemetrics is deprecated and will be removed in a future release. Please use metrics.enable.")
|
||||
MetricsEnabled.Set(true)
|
||||
}
|
||||
|
||||
log.Printf("Using config file: %s", viper.ConfigFileUsed())
|
||||
}
|
||||
|
||||
func random(length int) (string, error) {
|
||||
|
@ -150,16 +150,13 @@ func parsePostgreSQLHostPort(info string) (string, string) {
|
||||
|
||||
func initPostgresEngine() (engine *xorm.Engine, err error) {
|
||||
host, port := parsePostgreSQLHostPort(config.DatabaseHost.GetString())
|
||||
connStr := fmt.Sprintf("host=%s port=%s user=%s password=%s dbname=%s sslmode=%s sslcert=%s sslkey=%s sslrootcert=%s",
|
||||
connStr := fmt.Sprintf("host=%s port=%s user=%s password=%s dbname=%s sslmode=%s",
|
||||
host,
|
||||
port,
|
||||
url.PathEscape(config.DatabaseUser.GetString()),
|
||||
url.PathEscape(config.DatabasePassword.GetString()),
|
||||
config.DatabaseDatabase.GetString(),
|
||||
config.DatabaseSslMode.GetString(),
|
||||
config.DatabaseSslCert.GetString(),
|
||||
config.DatabaseSslKey.GetString(),
|
||||
config.DatabaseSslRootCert.GetString(),
|
||||
)
|
||||
|
||||
engine, err = xorm.NewEngine("postgres", connStr)
|
||||
@ -189,7 +186,7 @@ func initSqliteEngine() (engine *xorm.Engine, err error) {
|
||||
}
|
||||
file, err := os.OpenFile(path, os.O_RDWR|os.O_CREATE, 0)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not open database file [uid=%d, gid=%d]: %w", os.Getuid(), os.Getgid(), err)
|
||||
return nil, fmt.Errorf("could not open database file [uid=%d, gid=%d]: %s", os.Getuid(), os.Getgid(), err)
|
||||
}
|
||||
_ = file.Close() // We directly close the file because we only want to check if it is writable. It will be reopened lazily later by xorm.
|
||||
|
||||
|
@ -17,7 +17,6 @@
|
||||
package files
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"io"
|
||||
"os"
|
||||
"strconv"
|
||||
@ -137,10 +136,9 @@ func (f *File) Delete() (err error) {
|
||||
|
||||
err = afs.Remove(f.getFileName())
|
||||
if err != nil {
|
||||
var perr *os.PathError
|
||||
if errors.As(err, &perr) {
|
||||
if e, is := err.(*os.PathError); is {
|
||||
// Don't fail when removing the file failed
|
||||
log.Errorf("Error deleting file %d: %w", err)
|
||||
log.Errorf("Error deleting file %d: %s", e.Error())
|
||||
return s.Commit()
|
||||
}
|
||||
|
||||
|
@ -78,15 +78,15 @@ func FullInit() {
|
||||
|
||||
LightInit()
|
||||
|
||||
// Initialize the files handler
|
||||
files.InitFileHandler()
|
||||
|
||||
// Run the migrations
|
||||
migration.Migrate(nil)
|
||||
|
||||
// Set Engine
|
||||
InitEngines()
|
||||
|
||||
// Initialize the files handler
|
||||
files.InitFileHandler()
|
||||
|
||||
// Start the mail daemon
|
||||
mail.StartMailDaemon()
|
||||
|
||||
|
@ -17,7 +17,6 @@
|
||||
package integrations
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"net/url"
|
||||
@ -120,7 +119,7 @@ func newTestRequest(t *testing.T, method string, handler func(ctx echo.Context)
|
||||
|
||||
func addUserTokenToContext(t *testing.T, user *user.User, c echo.Context) {
|
||||
// Get the token as a string
|
||||
token, err := auth.NewUserJWTAuthtoken(user, false)
|
||||
token, err := auth.NewUserJWTAuthtoken(user)
|
||||
assert.NoError(t, err)
|
||||
// We send the string token through the parsing function to get a valid jwt.Token
|
||||
tken, err := jwt.Parse(token, func(t *jwt.Token) (interface{}, error) {
|
||||
@ -175,8 +174,8 @@ func assertHandlerErrorCode(t *testing.T, err error, expectedErrorCode int) {
|
||||
t.Error("Error is nil")
|
||||
t.FailNow()
|
||||
}
|
||||
var httperr *echo.HTTPError
|
||||
if !errors.As(err, &httperr) {
|
||||
httperr, ok := err.(*echo.HTTPError)
|
||||
if !ok {
|
||||
t.Error("Error is not *echo.HTTPError")
|
||||
t.FailNow()
|
||||
}
|
||||
|
@ -113,7 +113,7 @@ func TestTaskCollection(t *testing.T) {
|
||||
t.Run("by priority", func(t *testing.T) {
|
||||
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"priority"}}, urlParams)
|
||||
assert.NoError(t, err)
|
||||
assert.Contains(t, rec.Body.String(), `{"id":33,"title":"task #33 with percent done","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0.5,"identifier":"test1-17","index":17,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":1,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}]`)
|
||||
assert.Contains(t, rec.Body.String(), `{"id":33,"title":"task #33 with percent done","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0.5,"identifier":"test1-17","index":17,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":1,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":4,"title":"task #4 low prio","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":1,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-4","index":4,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":3,"title":"task #3 high prio","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":100,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-3","index":3,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}]`)
|
||||
})
|
||||
t.Run("by priority desc", func(t *testing.T) {
|
||||
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"priority"}, "order_by": []string{"desc"}}, urlParams)
|
||||
@ -123,13 +123,13 @@ func TestTaskCollection(t *testing.T) {
|
||||
t.Run("by priority asc", func(t *testing.T) {
|
||||
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"priority"}, "order_by": []string{"asc"}}, urlParams)
|
||||
assert.NoError(t, err)
|
||||
assert.Contains(t, rec.Body.String(), `{"id":33,"title":"task #33 with percent done","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0.5,"identifier":"test1-17","index":17,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":1,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}]`)
|
||||
assert.Contains(t, rec.Body.String(), `{"id":33,"title":"task #33 with percent done","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0.5,"identifier":"test1-17","index":17,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":1,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":4,"title":"task #4 low prio","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":1,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-4","index":4,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":3,"title":"task #3 high prio","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":100,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-3","index":3,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}]`)
|
||||
})
|
||||
// should equal duedate asc
|
||||
t.Run("by due_date", func(t *testing.T) {
|
||||
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"due_date"}}, urlParams)
|
||||
assert.NoError(t, err)
|
||||
assert.Contains(t, rec.Body.String(), `[{"id":6,"title":"task #6 lower due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-11-30T22:25:24Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-6","index":6,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":3,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}`)
|
||||
assert.Contains(t, rec.Body.String(), `{"id":6,"title":"task #6 lower due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-11-30T22:25:24Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-6","index":6,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":3,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":5,"title":"task #5 higher due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-12-01T03:58:44Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-5","index":5,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}]`)
|
||||
})
|
||||
t.Run("by duedate desc", func(t *testing.T) {
|
||||
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"due_date"}, "order_by": []string{"desc"}}, urlParams)
|
||||
@ -140,12 +140,12 @@ func TestTaskCollection(t *testing.T) {
|
||||
t.Run("by duedate asc without suffix", func(t *testing.T) {
|
||||
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"due_date"}, "order_by": []string{"asc"}}, urlParams)
|
||||
assert.NoError(t, err)
|
||||
assert.Contains(t, rec.Body.String(), `[{"id":6,"title":"task #6 lower due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-11-30T22:25:24Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-6","index":6,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":3,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}`)
|
||||
assert.Contains(t, rec.Body.String(), `{"id":6,"title":"task #6 lower due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-11-30T22:25:24Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-6","index":6,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":3,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":5,"title":"task #5 higher due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-12-01T03:58:44Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-5","index":5,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}]`)
|
||||
})
|
||||
t.Run("by due_date without suffix", func(t *testing.T) {
|
||||
t.Run("by due_date without suffix", func(t *testing.T) {
|
||||
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"due_date"}}, urlParams)
|
||||
assert.NoError(t, err)
|
||||
assert.Contains(t, rec.Body.String(), `[{"id":6,"title":"task #6 lower due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-11-30T22:25:24Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-6","index":6,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":3,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}`)
|
||||
assert.Contains(t, rec.Body.String(), `{"id":6,"title":"task #6 lower due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-11-30T22:25:24Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-6","index":6,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":3,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":5,"title":"task #5 higher due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-12-01T03:58:44Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-5","index":5,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}]`)
|
||||
})
|
||||
t.Run("by duedate desc without suffix", func(t *testing.T) {
|
||||
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"due_date"}, "order_by": []string{"desc"}}, urlParams)
|
||||
@ -155,7 +155,7 @@ func TestTaskCollection(t *testing.T) {
|
||||
t.Run("by duedate asc", func(t *testing.T) {
|
||||
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"due_date"}, "order_by": []string{"asc"}}, urlParams)
|
||||
assert.NoError(t, err)
|
||||
assert.Contains(t, rec.Body.String(), `[{"id":6,"title":"task #6 lower due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-11-30T22:25:24Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-6","index":6,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":3,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}`)
|
||||
assert.Contains(t, rec.Body.String(), `{"id":6,"title":"task #6 lower due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-11-30T22:25:24Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-6","index":6,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":3,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":5,"title":"task #5 higher due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-12-01T03:58:44Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-5","index":5,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}]`)
|
||||
})
|
||||
t.Run("invalid sort parameter", func(t *testing.T) {
|
||||
_, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"loremipsum"}}, urlParams)
|
||||
@ -244,37 +244,12 @@ func TestTaskCollection(t *testing.T) {
|
||||
// the current date.
|
||||
assert.Equal(t, "[]\n", rec.Body.String())
|
||||
})
|
||||
t.Run("unix timestamps", func(t *testing.T) {
|
||||
rec, err := testHandler.testReadAllWithUser(
|
||||
url.Values{
|
||||
"filter_by": []string{"start_date", "end_date", "due_date"},
|
||||
"filter_value": []string{"1544500000", "1513164001", "1543500000"},
|
||||
"filter_comparator": []string{"greater", "less", "greater"},
|
||||
},
|
||||
urlParams,
|
||||
)
|
||||
assert.NoError(t, err)
|
||||
assert.NotContains(t, rec.Body.String(), `task #1`)
|
||||
assert.NotContains(t, rec.Body.String(), `task #2`)
|
||||
assert.NotContains(t, rec.Body.String(), `task #3`)
|
||||
assert.NotContains(t, rec.Body.String(), `task #4`)
|
||||
assert.Contains(t, rec.Body.String(), `task #5`)
|
||||
assert.Contains(t, rec.Body.String(), `task #6`)
|
||||
assert.Contains(t, rec.Body.String(), `task #7`)
|
||||
assert.NotContains(t, rec.Body.String(), `task #8`)
|
||||
assert.Contains(t, rec.Body.String(), `task #9`)
|
||||
assert.NotContains(t, rec.Body.String(), `task #10`)
|
||||
assert.NotContains(t, rec.Body.String(), `task #11`)
|
||||
assert.NotContains(t, rec.Body.String(), `task #12`)
|
||||
assert.NotContains(t, rec.Body.String(), `task #13`)
|
||||
assert.NotContains(t, rec.Body.String(), `task #14`)
|
||||
})
|
||||
})
|
||||
t.Run("invalid date", func(t *testing.T) {
|
||||
_, err := testHandler.testReadAllWithUser(
|
||||
url.Values{
|
||||
"filter_by": []string{"due_date"},
|
||||
"filter_value": []string{"invalid"},
|
||||
"filter_value": []string{"1540000000"},
|
||||
"filter_comparator": []string{"greater"},
|
||||
},
|
||||
nil,
|
||||
@ -366,7 +341,7 @@ func TestTaskCollection(t *testing.T) {
|
||||
t.Run("by priority", func(t *testing.T) {
|
||||
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"priority"}}, nil)
|
||||
assert.NoError(t, err)
|
||||
assert.Contains(t, rec.Body.String(), `{"id":33,"title":"task #33 with percent done","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0.5,"identifier":"test1-17","index":17,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":1,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}]`)
|
||||
assert.Contains(t, rec.Body.String(), `{"id":33,"title":"task #33 with percent done","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0.5,"identifier":"test1-17","index":17,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":1,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":4,"title":"task #4 low prio","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":1,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-4","index":4,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":3,"title":"task #3 high prio","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":100,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-3","index":3,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}]`)
|
||||
})
|
||||
t.Run("by priority desc", func(t *testing.T) {
|
||||
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"priority"}, "order_by": []string{"desc"}}, nil)
|
||||
@ -376,13 +351,13 @@ func TestTaskCollection(t *testing.T) {
|
||||
t.Run("by priority asc", func(t *testing.T) {
|
||||
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"priority"}, "order_by": []string{"asc"}}, nil)
|
||||
assert.NoError(t, err)
|
||||
assert.Contains(t, rec.Body.String(), `{"id":33,"title":"task #33 with percent done","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0.5,"identifier":"test1-17","index":17,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":1,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}]`)
|
||||
assert.Contains(t, rec.Body.String(), `{"id":33,"title":"task #33 with percent done","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0.5,"identifier":"test1-17","index":17,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":1,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":4,"title":"task #4 low prio","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":1,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-4","index":4,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":3,"title":"task #3 high prio","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"0001-01-01T00:00:00Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":100,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-3","index":3,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}]`)
|
||||
})
|
||||
// should equal duedate asc
|
||||
t.Run("by due_date", func(t *testing.T) {
|
||||
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"due_date"}}, nil)
|
||||
assert.NoError(t, err)
|
||||
assert.Contains(t, rec.Body.String(), `[{"id":6,"title":"task #6 lower due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-11-30T22:25:24Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-6","index":6,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":3,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":5,"title":"task #5 higher due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-12-01T03:58:44Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-5","index":5,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}`)
|
||||
assert.Contains(t, rec.Body.String(), `{"id":6,"title":"task #6 lower due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-11-30T22:25:24Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-6","index":6,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":3,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":5,"title":"task #5 higher due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-12-01T03:58:44Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-5","index":5,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}]`)
|
||||
})
|
||||
t.Run("by duedate desc", func(t *testing.T) {
|
||||
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"due_date"}, "order_by": []string{"desc"}}, nil)
|
||||
@ -392,7 +367,7 @@ func TestTaskCollection(t *testing.T) {
|
||||
t.Run("by duedate asc", func(t *testing.T) {
|
||||
rec, err := testHandler.testReadAllWithUser(url.Values{"sort_by": []string{"due_date"}, "order_by": []string{"asc"}}, nil)
|
||||
assert.NoError(t, err)
|
||||
assert.Contains(t, rec.Body.String(), `[{"id":6,"title":"task #6 lower due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-11-30T22:25:24Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-6","index":6,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":3,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":5,"title":"task #5 higher due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-12-01T03:58:44Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-5","index":5,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}`)
|
||||
assert.Contains(t, rec.Body.String(), `{"id":6,"title":"task #6 lower due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-11-30T22:25:24Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-6","index":6,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":3,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}},{"id":5,"title":"task #5 higher due date","description":"","done":false,"done_at":"0001-01-01T00:00:00Z","due_date":"2018-12-01T03:58:44Z","reminder_dates":null,"list_id":1,"repeat_after":0,"repeat_mode":0,"priority":0,"start_date":"0001-01-01T00:00:00Z","end_date":"0001-01-01T00:00:00Z","assignees":null,"labels":null,"hex_color":"","percent_done":0,"identifier":"test1-5","index":5,"related_tasks":{},"attachments":null,"is_favorite":false,"created":"2018-12-01T01:12:04Z","updated":"2018-12-01T01:12:04Z","bucket_id":2,"position":0,"kanban_position":0,"created_by":{"id":1,"name":"","username":"user1","created":"2018-12-01T15:13:12Z","updated":"2018-12-02T15:13:12Z"}}]`)
|
||||
})
|
||||
t.Run("invalid parameter", func(t *testing.T) {
|
||||
// Invalid parameter should not sort at all
|
||||
@ -476,7 +451,7 @@ func TestTaskCollection(t *testing.T) {
|
||||
_, err := testHandler.testReadAllWithUser(
|
||||
url.Values{
|
||||
"filter_by": []string{"due_date"},
|
||||
"filter_value": []string{"invalid"},
|
||||
"filter_value": []string{"1540000000"},
|
||||
"filter_comparator": []string{"greater"},
|
||||
},
|
||||
nil,
|
||||
|
@ -63,26 +63,23 @@ func InitLogger() {
|
||||
}
|
||||
}
|
||||
|
||||
// The backend is the part which actually handles logging the log entries somewhere.
|
||||
cf := config.LogStandard.GetString()
|
||||
var backend logging.Backend
|
||||
backend = &NoopBackend{}
|
||||
if cf != "off" && cf != "false" {
|
||||
// We define our two backends
|
||||
if config.LogStandard.GetString() != "off" {
|
||||
stdWriter := GetLogWriter("standard")
|
||||
|
||||
level, err := logging.LogLevel(strings.ToUpper(config.LogLevel.GetString()))
|
||||
if err != nil {
|
||||
Fatalf("Error setting database log level: %s", err.Error())
|
||||
}
|
||||
|
||||
logBackend := logging.NewLogBackend(stdWriter, "", 0)
|
||||
backend = logging.NewBackendFormatter(logBackend, logging.MustStringFormatter(Fmt+"\n"))
|
||||
backend := logging.NewBackendFormatter(logBackend, logging.MustStringFormatter(Fmt+"\n"))
|
||||
|
||||
backendLeveled := logging.AddModuleLevel(backend)
|
||||
backendLeveled.SetLevel(level, logModule)
|
||||
|
||||
logInstance.SetBackend(backendLeveled)
|
||||
}
|
||||
|
||||
level, err := logging.LogLevel(strings.ToUpper(config.LogLevel.GetString()))
|
||||
if err != nil {
|
||||
Fatalf("Error setting database log level: %s", err.Error())
|
||||
}
|
||||
|
||||
backendLeveled := logging.AddModuleLevel(backend)
|
||||
backendLeveled.SetLevel(level, logModule)
|
||||
|
||||
logInstance.SetBackend(backendLeveled)
|
||||
}
|
||||
|
||||
// GetLogWriter returns the writer to where the normal log goes, depending on the config
|
||||
|
@ -1,28 +0,0 @@
|
||||
// Vikunja is a to-do list application to facilitate your life.
|
||||
// Copyright 2018-2021 Vikunja and contributors. All rights reserved.
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public Licensee as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public Licensee for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public Licensee
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
package log
|
||||
|
||||
import (
|
||||
"github.com/op/go-logging"
|
||||
)
|
||||
|
||||
// NoopBackend doesn't log anything. Used in cases where we want to disable logging completely.
|
||||
type NoopBackend struct{}
|
||||
|
||||
func (n *NoopBackend) Log(level logging.Level, i int, record *logging.Record) error {
|
||||
return nil
|
||||
}
|
@ -45,13 +45,8 @@ func NewWatermillLogger() *WatermillLogger {
|
||||
logger: logging.MustGetLogger(watermillLogModule),
|
||||
}
|
||||
|
||||
cf := config.LogEvents.GetString()
|
||||
var backend logging.Backend
|
||||
backend = &NoopBackend{}
|
||||
if cf != "off" && cf != "false" {
|
||||
logBackend := logging.NewLogBackend(GetLogWriter("events"), "", 0)
|
||||
backend = logging.NewBackendFormatter(logBackend, logging.MustStringFormatter(watermillFmt+"\n"))
|
||||
}
|
||||
logBackend := logging.NewLogBackend(GetLogWriter("events"), "", 0)
|
||||
backend := logging.NewBackendFormatter(logBackend, logging.MustStringFormatter(watermillFmt+"\n"))
|
||||
|
||||
backendLeveled := logging.AddModuleLevel(backend)
|
||||
backendLeveled.SetLevel(level, watermillLogModule)
|
||||
|
@ -683,17 +683,17 @@ create unique index UQE_users_namespace_id
|
||||
|
||||
sess := tx.NewSession()
|
||||
if err := sess.Begin(); err != nil {
|
||||
return fmt.Errorf("unable to open session: %w", err)
|
||||
return fmt.Errorf("unable to open session: %s", err)
|
||||
}
|
||||
for _, s := range sql {
|
||||
_, err := sess.Exec(s)
|
||||
if err != nil {
|
||||
_ = sess.Rollback()
|
||||
return fmt.Errorf("error executing update data for table %s, column %s: %w", table, column, err)
|
||||
return fmt.Errorf("error executing update data for table %s, column %s: %s", table, column, err)
|
||||
}
|
||||
}
|
||||
if err := sess.Commit(); err != nil {
|
||||
return fmt.Errorf("error committing data change: %w", err)
|
||||
return fmt.Errorf("error committing data change: %s", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -1,43 +0,0 @@
|
||||
// Vikunja is a to-do list application to facilitate your life.
|
||||
// Copyright 2018-2021 Vikunja and contributors. All rights reserved.
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public Licensee as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public Licensee for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public Licensee
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
package migration
|
||||
|
||||
import (
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/xorm"
|
||||
)
|
||||
|
||||
type user20211212151642 struct {
|
||||
Language string `xorm:"varchar(50) null" json:"-"`
|
||||
}
|
||||
|
||||
func (user20211212151642) TableName() string {
|
||||
return "users"
|
||||
}
|
||||
|
||||
func init() {
|
||||
migrations = append(migrations, &xormigrate.Migration{
|
||||
ID: "20211212151642",
|
||||
Description: "Add user language field",
|
||||
Migrate: func(tx *xorm.Engine) error {
|
||||
return tx.Sync2(user20211212151642{})
|
||||
},
|
||||
Rollback: func(tx *xorm.Engine) error {
|
||||
return nil
|
||||
},
|
||||
})
|
||||
}
|
@ -1,95 +0,0 @@
|
||||
// Vikunja is a to-do list application to facilitate your life.
|
||||
// Copyright 2018-2021 Vikunja and contributors. All rights reserved.
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public Licensee as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public Licensee for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public Licensee
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
package migration
|
||||
|
||||
import (
|
||||
"image"
|
||||
|
||||
"code.vikunja.io/api/pkg/files"
|
||||
"code.vikunja.io/api/pkg/log"
|
||||
"github.com/bbrks/go-blurhash"
|
||||
"golang.org/x/image/draw"
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/xorm"
|
||||
)
|
||||
|
||||
type lists20211212210054 struct {
|
||||
ID int64 `xorm:"bigint autoincr not null unique pk" json:"id" param:"list"`
|
||||
BackgroundFileID int64 `xorm:"null" json:"-"`
|
||||
BackgroundBlurHash string `xorm:"varchar(50) null" json:"background_blur_hash"`
|
||||
}
|
||||
|
||||
func (lists20211212210054) TableName() string {
|
||||
return "lists"
|
||||
}
|
||||
|
||||
func init() {
|
||||
migrations = append(migrations, &xormigrate.Migration{
|
||||
ID: "20211212210054",
|
||||
Description: "Add blurHash to list backgrounds.",
|
||||
Migrate: func(tx *xorm.Engine) error {
|
||||
err := tx.Sync2(lists20211212210054{})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
lists := []*lists20211212210054{}
|
||||
err = tx.Where("background_file_id is not null AND background_file_id != ?", 0).Find(&lists)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
log.Infof("Creating BlurHash for %d list backgrounds, this might take a while...", len(lists))
|
||||
|
||||
for _, l := range lists {
|
||||
bgFile := &files.File{
|
||||
ID: l.BackgroundFileID,
|
||||
}
|
||||
if err := bgFile.LoadFileByID(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
src, _, err := image.Decode(bgFile.File)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
dst := image.NewRGBA(image.Rect(0, 0, 32, 32))
|
||||
draw.NearestNeighbor.Scale(dst, dst.Rect, src, src.Bounds(), draw.Over, nil)
|
||||
|
||||
hash, err := blurhash.Encode(4, 3, dst)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
l.BackgroundBlurHash = hash
|
||||
_, err = tx.Where("id = ?", l.ID).
|
||||
Cols("background_blur_hash").
|
||||
Update(l)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
log.Debugf("Created BlurHash for list %d", l.ID)
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
Rollback: func(tx *xorm.Engine) error {
|
||||
return nil
|
||||
},
|
||||
})
|
||||
}
|
@ -1,50 +0,0 @@
|
||||
// Vikunja is a to-do list application to facilitate your life.
|
||||
// Copyright 2018-2021 Vikunja and contributors. All rights reserved.
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public Licensee as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public Licensee for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public Licensee
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
package migration
|
||||
|
||||
import (
|
||||
"code.vikunja.io/api/pkg/config"
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/xorm"
|
||||
)
|
||||
|
||||
type users20220112211537 struct {
|
||||
Timezone string `xorm:"varchar(255) null" json:"-"`
|
||||
}
|
||||
|
||||
func (users20220112211537) TableName() string {
|
||||
return "users"
|
||||
}
|
||||
|
||||
func init() {
|
||||
migrations = append(migrations, &xormigrate.Migration{
|
||||
ID: "20220112211537",
|
||||
Description: "Add time zone setting for users",
|
||||
Migrate: func(tx *xorm.Engine) error {
|
||||
err := tx.Sync2(users20220112211537{})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = tx.Update(&users20220112211537{Timezone: config.GetTimeZone().String()})
|
||||
return err
|
||||
},
|
||||
Rollback: func(tx *xorm.Engine) error {
|
||||
return nil
|
||||
},
|
||||
})
|
||||
}
|
@ -1,43 +0,0 @@
|
||||
// Vikunja is a to-do list application to facilitate your life.
|
||||
// Copyright 2018-2021 Vikunja and contributors. All rights reserved.
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public Licensee as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public Licensee for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public Licensee
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
package migration
|
||||
|
||||
import (
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/xorm"
|
||||
)
|
||||
|
||||
type users20220611191202 struct {
|
||||
HomeListID int64 `xorm:"bigint null index" json:"-"`
|
||||
}
|
||||
|
||||
func (users20220611191202) TableName() string {
|
||||
return "users"
|
||||
}
|
||||
|
||||
func init() {
|
||||
migrations = append(migrations, &xormigrate.Migration{
|
||||
ID: "20220611191202",
|
||||
Description: "Add home list for new tasks setting to users",
|
||||
Migrate: func(tx *xorm.Engine) error {
|
||||
return tx.Sync2(users20220611191202{})
|
||||
},
|
||||
Rollback: func(tx *xorm.Engine) error {
|
||||
return nil
|
||||
},
|
||||
})
|
||||
}
|
@ -49,7 +49,7 @@ func ExportUserData(s *xorm.Session, u *user.User) (err error) {
|
||||
// Open zip
|
||||
dumpFile, err := os.Create(tmpFilename)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error opening dump file: %w", err)
|
||||
return fmt.Errorf("error opening dump file: %s", err)
|
||||
}
|
||||
defer dumpFile.Close()
|
||||
|
||||
|
@ -59,8 +59,6 @@ type List struct {
|
||||
BackgroundFileID int64 `xorm:"null" json:"-"`
|
||||
// Holds extra information about the background set since some background providers require attribution or similar. If not null, the background can be accessed at /lists/{listID}/background
|
||||
BackgroundInformation interface{} `xorm:"-" json:"background_information"`
|
||||
// Contains a very small version of the list background to use as a blurry preview until the actual background is loaded. Check out https://blurha.sh/ to learn how it works.
|
||||
BackgroundBlurHash string `xorm:"varchar(50) null" json:"background_blur_hash"`
|
||||
|
||||
// True if a list is a favorite. Favorite lists show up in a separate namespace. This value depends on the user making the call to the api.
|
||||
IsFavorite bool `xorm:"-" json:"is_favorite"`
|
||||
@ -632,7 +630,6 @@ func UpdateList(s *xorm.Session, list *List, auth web.Auth, updateListBackground
|
||||
"is_archived",
|
||||
"identifier",
|
||||
"hex_color",
|
||||
"namespace_id",
|
||||
"position",
|
||||
}
|
||||
if list.Description != "" {
|
||||
@ -640,7 +637,7 @@ func UpdateList(s *xorm.Session, list *List, auth web.Auth, updateListBackground
|
||||
}
|
||||
|
||||
if updateListBackground {
|
||||
colsToUpdate = append(colsToUpdate, "background_file_id", "background_blur_hash")
|
||||
colsToUpdate = append(colsToUpdate, "background_file_id")
|
||||
}
|
||||
|
||||
wasFavorite, err := isFavorite(s, list.ID, auth, FavoriteKindList)
|
||||
@ -801,15 +798,14 @@ func (l *List) Delete(s *xorm.Session, a web.Auth) (err error) {
|
||||
}
|
||||
|
||||
// SetListBackground sets a background file as list background in the db
|
||||
func SetListBackground(s *xorm.Session, listID int64, background *files.File, blurHash string) (err error) {
|
||||
func SetListBackground(s *xorm.Session, listID int64, background *files.File) (err error) {
|
||||
l := &List{
|
||||
ID: listID,
|
||||
BackgroundFileID: background.ID,
|
||||
BackgroundBlurHash: blurHash,
|
||||
ID: listID,
|
||||
BackgroundFileID: background.ID,
|
||||
}
|
||||
_, err = s.
|
||||
Where("id = ?", l.ID).
|
||||
Cols("background_file_id", "background_blur_hash").
|
||||
Cols("background_file_id").
|
||||
Update(l)
|
||||
return
|
||||
}
|
||||
|
@ -144,7 +144,7 @@ func (ld *ListDuplicate) Create(s *xorm.Session, doer web.Auth) (err error) {
|
||||
}
|
||||
}
|
||||
|
||||
if err := SetListBackground(s, ld.List.ID, file, ld.List.BackgroundBlurHash); err != nil {
|
||||
if err := SetListBackground(s, ld.List.ID, file); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@ -216,7 +216,7 @@ func duplicateTasks(s *xorm.Session, doer web.Auth, ld *ListDuplicate, bucketMap
|
||||
// It is used to map old task items to new ones.
|
||||
taskMap := make(map[int64]int64)
|
||||
// Create + update all tasks (includes reminders)
|
||||
oldTaskIDs := make([]int64, 0, len(tasks))
|
||||
oldTaskIDs := make([]int64, len(tasks))
|
||||
for _, t := range tasks {
|
||||
oldID := t.ID
|
||||
t.ID = 0
|
||||
|
@ -116,25 +116,6 @@ func (l *List) CanUpdate(s *xorm.Session, a web.Auth) (canUpdate bool, err error
|
||||
return false, nil
|
||||
}
|
||||
|
||||
// Get the list
|
||||
ol, err := GetListSimpleByID(s, l.ID)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
// Check if we're moving the list into a different namespace.
|
||||
// If that is the case, we need to verify permissions to do so.
|
||||
if l.NamespaceID != 0 && l.NamespaceID != ol.NamespaceID {
|
||||
newNamespace := &Namespace{ID: l.NamespaceID}
|
||||
can, err := newNamespace.CanWrite(s, a)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
if !can {
|
||||
return false, ErrGenericForbidden{}
|
||||
}
|
||||
}
|
||||
|
||||
fid := getSavedFilterIDFromListID(l.ID)
|
||||
if fid > 0 {
|
||||
sf, err := getSavedFilterSimpleByID(s, fid)
|
||||
|
@ -163,65 +163,6 @@ func TestList_CreateOrUpdate(t *testing.T) {
|
||||
assert.True(t, IsErrListIdentifierIsNotUnique(err))
|
||||
_ = s.Close()
|
||||
})
|
||||
t.Run("change namespace", func(t *testing.T) {
|
||||
t.Run("own", func(t *testing.T) {
|
||||
usr := &user.User{
|
||||
ID: 6,
|
||||
Username: "user6",
|
||||
Email: "user6@example.com",
|
||||
}
|
||||
|
||||
db.LoadAndAssertFixtures(t)
|
||||
s := db.NewSession()
|
||||
list := List{
|
||||
ID: 6,
|
||||
Title: "Test6",
|
||||
Description: "Lorem Ipsum",
|
||||
NamespaceID: 7, // from 6
|
||||
}
|
||||
can, err := list.CanUpdate(s, usr)
|
||||
assert.NoError(t, err)
|
||||
assert.True(t, can)
|
||||
err = list.Update(s, usr)
|
||||
assert.NoError(t, err)
|
||||
err = s.Commit()
|
||||
assert.NoError(t, err)
|
||||
db.AssertExists(t, "lists", map[string]interface{}{
|
||||
"id": list.ID,
|
||||
"title": list.Title,
|
||||
"description": list.Description,
|
||||
"namespace_id": list.NamespaceID,
|
||||
}, false)
|
||||
})
|
||||
// FIXME: The check for whether the namespace is archived is missing in namespace.CanWrite
|
||||
// t.Run("archived own", func(t *testing.T) {
|
||||
// db.LoadAndAssertFixtures(t)
|
||||
// s := db.NewSession()
|
||||
// list := List{
|
||||
// ID: 1,
|
||||
// Title: "Test1",
|
||||
// Description: "Lorem Ipsum",
|
||||
// NamespaceID: 16, // from 1
|
||||
// }
|
||||
// can, err := list.CanUpdate(s, usr)
|
||||
// assert.NoError(t, err)
|
||||
// assert.False(t, can) // namespace is archived and thus not writeable
|
||||
// _ = s.Close()
|
||||
// })
|
||||
t.Run("others", func(t *testing.T) {
|
||||
db.LoadAndAssertFixtures(t)
|
||||
s := db.NewSession()
|
||||
list := List{
|
||||
ID: 1,
|
||||
Title: "Test1",
|
||||
Description: "Lorem Ipsum",
|
||||
NamespaceID: 2, // from 1
|
||||
}
|
||||
can, _ := list.CanUpdate(s, usr)
|
||||
assert.False(t, can) // namespace is not writeable by us
|
||||
_ = s.Close()
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -603,17 +603,20 @@ func (n *Namespace) ReadAll(s *xorm.Session, a web.Auth, search string, page int
|
||||
// @Failure 500 {object} models.Message "Internal error"
|
||||
// @Router /namespaces [put]
|
||||
func (n *Namespace) Create(s *xorm.Session, a web.Auth) (err error) {
|
||||
// Check if we have at least a title
|
||||
// Check if we have at least a name
|
||||
if n.Title == "" {
|
||||
return ErrNamespaceNameCannotBeEmpty{NamespaceID: 0, UserID: a.GetID()}
|
||||
}
|
||||
n.ID = 0 // This would otherwise prevent the creation of new lists after one was created
|
||||
|
||||
// Check if the User exists
|
||||
n.Owner, err = user.GetUserByID(s, a.GetID())
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
n.OwnerID = n.Owner.ID
|
||||
|
||||
// Insert
|
||||
if _, err = s.Insert(n); err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -131,7 +131,7 @@ func getTaskFilterOptsFromCollection(tf *TaskCollection) (opts *taskOptions, err
|
||||
// @Param sort_by query string false "The sorting parameter. You can pass this multiple times to get the tasks ordered by multiple different parametes, along with `order_by`. Possible values to sort by are `id`, `title`, `description`, `done`, `done_at`, `due_date`, `created_by_id`, `list_id`, `repeat_after`, `priority`, `start_date`, `end_date`, `hex_color`, `percent_done`, `uid`, `created`, `updated`. Default is `id`."
|
||||
// @Param order_by query string false "The ordering parameter. Possible values to order by are `asc` or `desc`. Default is `asc`."
|
||||
// @Param filter_by query string false "The name of the field to filter by. Allowed values are all task properties. Task properties which are their own object require passing in the id of that entity. Accepts an array for multiple filters which will be chanied together, all supplied filter must match."
|
||||
// @Param filter_value query string false "The value to filter for. You can use [grafana](https://grafana.com/docs/grafana/latest/dashboards/time-range-controls)- or [elasticsearch](https://www.elastic.co/guide/en/elasticsearch/reference/7.3/common-options.html#date-math)-style relative dates for all date fields like `due_date`, `start_date`, `end_date`, etc."
|
||||
// @Param filter_value query string false "The value to filter for."
|
||||
// @Param filter_comparator query string false "The comparator to use for a filter. Available values are `equals`, `greater`, `greater_equals`, `less`, `less_equals`, `like` and `in`. `in` expects comma-separated values in `filter_value`. Defaults to `equals`"
|
||||
// @Param filter_concat query string false "The concatinator to use for filters. Available values are `and` or `or`. Defaults to `or`."
|
||||
// @Param filter_include_nulls query string false "If set to true the result will include filtered fields whose value is set to `null`. Available values are `true` or `false`. Defaults to `false`."
|
||||
|
@ -25,7 +25,6 @@ import (
|
||||
|
||||
"code.vikunja.io/api/pkg/config"
|
||||
"github.com/iancoleman/strcase"
|
||||
"github.com/vectordotdev/go-datemath"
|
||||
"xorm.io/xorm/schemas"
|
||||
)
|
||||
|
||||
@ -160,14 +159,8 @@ func getValueForField(field reflect.StructField, rawValue string) (value interfa
|
||||
value, err = strconv.ParseBool(rawValue)
|
||||
case reflect.Struct:
|
||||
if field.Type == schemas.TimeType {
|
||||
var t datemath.Expression
|
||||
t, err = datemath.Parse(rawValue)
|
||||
if err == nil {
|
||||
value = t.Time(datemath.WithLocation(config.GetTimeZone()))
|
||||
} else {
|
||||
value, err = time.Parse(time.RFC3339, rawValue)
|
||||
value = value.(time.Time).In(config.GetTimeZone())
|
||||
}
|
||||
value, err = time.Parse(time.RFC3339, rawValue)
|
||||
value = value.(time.Time).In(config.GetTimeZone())
|
||||
}
|
||||
case reflect.Slice:
|
||||
// If this is a slice of pointers we're dealing with some property which is a relation
|
||||
|
@ -1046,9 +1046,6 @@ func TestTaskCollection_ReadAll(t *testing.T) {
|
||||
a: &user.User{ID: 1},
|
||||
},
|
||||
want: []*Task{
|
||||
// The only tasks with a position set
|
||||
task1,
|
||||
task2,
|
||||
// the other ones don't have a position set
|
||||
task3,
|
||||
task4,
|
||||
@ -1079,51 +1076,9 @@ func TestTaskCollection_ReadAll(t *testing.T) {
|
||||
task31,
|
||||
task32,
|
||||
task33,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "order by due date",
|
||||
fields: fields{
|
||||
SortBy: []string{"due_date", "id"},
|
||||
OrderBy: []string{"asc", "desc"},
|
||||
},
|
||||
args: args{
|
||||
a: &user.User{ID: 1},
|
||||
},
|
||||
want: []*Task{
|
||||
// The only tasks with a due date
|
||||
task6,
|
||||
task5,
|
||||
// The other ones don't have a due date
|
||||
task33,
|
||||
task32,
|
||||
task31,
|
||||
task30,
|
||||
task29,
|
||||
task28,
|
||||
task27,
|
||||
task26,
|
||||
task25,
|
||||
task24,
|
||||
task23,
|
||||
task22,
|
||||
task21,
|
||||
task20,
|
||||
task19,
|
||||
task18,
|
||||
task17,
|
||||
task16,
|
||||
task15,
|
||||
task12,
|
||||
task11,
|
||||
task10,
|
||||
task9,
|
||||
task8,
|
||||
task7,
|
||||
task4,
|
||||
task3,
|
||||
task2,
|
||||
// The only tasks with a position set
|
||||
task1,
|
||||
task2,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -61,11 +61,11 @@ func getTaskUsersForTasks(s *xorm.Session, taskIDs []int64, cond builder.Cond) (
|
||||
// Get all creators of tasks
|
||||
creators := make(map[int64]*user.User, len(taskIDs))
|
||||
err = s.
|
||||
Select("users.id, users.username, users.email, users.name, users.timezone").
|
||||
Select("users.id, users.username, users.email, users.name").
|
||||
Join("LEFT", "tasks", "tasks.created_by_id = users.id").
|
||||
In("tasks.id", taskIDs).
|
||||
Where(cond).
|
||||
GroupBy("tasks.id, users.id, users.username, users.email, users.name, users.timezone").
|
||||
GroupBy("tasks.id, users.id, users.username, users.email, users.name").
|
||||
Find(&creators)
|
||||
if err != nil {
|
||||
return
|
||||
@ -77,14 +77,14 @@ func getTaskUsersForTasks(s *xorm.Session, taskIDs []int64, cond builder.Cond) (
|
||||
return
|
||||
}
|
||||
|
||||
for _, task := range taskMap {
|
||||
u, exists := creators[task.CreatedByID]
|
||||
for _, taskID := range taskIDs {
|
||||
u, exists := creators[taskMap[taskID].CreatedByID]
|
||||
if !exists {
|
||||
continue
|
||||
}
|
||||
|
||||
taskUsers = append(taskUsers, &taskUser{
|
||||
Task: taskMap[task.ID],
|
||||
Task: taskMap[taskID],
|
||||
User: u,
|
||||
})
|
||||
}
|
||||
@ -110,9 +110,8 @@ func getTaskUsersForTasks(s *xorm.Session, taskIDs []int64, cond builder.Cond) (
|
||||
return
|
||||
}
|
||||
|
||||
func getTasksWithRemindersDueAndTheirUsers(s *xorm.Session, now time.Time) (reminderNotifications []*ReminderDueNotification, err error) {
|
||||
func getTasksWithRemindersInTheNextMinute(s *xorm.Session, now time.Time) (taskIDs []int64, err error) {
|
||||
now = utils.GetTimeWithoutNanoSeconds(now)
|
||||
reminderNotifications = []*ReminderDueNotification{}
|
||||
|
||||
nextMinute := now.Add(1 * time.Minute)
|
||||
|
||||
@ -121,8 +120,7 @@ func getTasksWithRemindersDueAndTheirUsers(s *xorm.Session, now time.Time) (remi
|
||||
reminders := []*TaskReminder{}
|
||||
err = s.
|
||||
Join("INNER", "tasks", "tasks.id = task_reminders.task_id").
|
||||
// All reminders from -12h to +14h to include all time zones
|
||||
Where("reminder >= ? and reminder < ?", now.Add(time.Hour*-12).Format(dbTimeFormat), nextMinute.Add(time.Hour*14).Format(dbTimeFormat)).
|
||||
Where("reminder >= ? and reminder < ?", now.Format(dbTimeFormat), nextMinute.Format(dbTimeFormat)).
|
||||
And("tasks.done = false").
|
||||
Find(&reminders)
|
||||
if err != nil {
|
||||
@ -135,56 +133,11 @@ func getTasksWithRemindersDueAndTheirUsers(s *xorm.Session, now time.Time) (remi
|
||||
return
|
||||
}
|
||||
|
||||
var taskIDs []int64
|
||||
// We're sending a reminder to everyone who is assigned to the task or has created it.
|
||||
for _, r := range reminders {
|
||||
taskIDs = append(taskIDs, r.TaskID)
|
||||
}
|
||||
|
||||
if len(taskIDs) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
usersWithReminders, err := getTaskUsersForTasks(s, taskIDs, builder.Eq{"users.email_reminders_enabled": true})
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
usersPerTask := make(map[int64][]*taskUser, len(usersWithReminders))
|
||||
for _, ur := range usersWithReminders {
|
||||
usersPerTask[ur.Task.ID] = append(usersPerTask[ur.Task.ID], ur)
|
||||
}
|
||||
|
||||
// Time zone cache per time zone string to avoid parsing the same time zone over and over again
|
||||
tzs := make(map[string]*time.Location)
|
||||
// Figure out which reminders are actually due in the time zone of the users
|
||||
for _, r := range reminders {
|
||||
|
||||
for _, u := range usersPerTask[r.TaskID] {
|
||||
|
||||
if u.User.Timezone == "" {
|
||||
u.User.Timezone = config.GetTimeZone().String()
|
||||
}
|
||||
|
||||
// I think this will break once there's more reminders than what we can handle in one minute
|
||||
tz, exists := tzs[u.User.Timezone]
|
||||
if !exists {
|
||||
tz, err = time.LoadLocation(u.User.Timezone)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
tzs[u.User.Timezone] = tz
|
||||
}
|
||||
|
||||
actualReminder := r.Reminder.In(tz)
|
||||
if (actualReminder.After(now) && actualReminder.Before(now.Add(time.Minute))) || actualReminder.Equal(now) {
|
||||
reminderNotifications = append(reminderNotifications, &ReminderDueNotification{
|
||||
User: u.User,
|
||||
Task: u.Task,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
@ -209,26 +162,37 @@ func RegisterReminderCron() {
|
||||
defer s.Close()
|
||||
|
||||
now := time.Now()
|
||||
reminders, err := getTasksWithRemindersDueAndTheirUsers(s, now)
|
||||
taskIDs, err := getTasksWithRemindersInTheNextMinute(s, now)
|
||||
if err != nil {
|
||||
log.Errorf("[Task Reminder Cron] Could not get tasks with reminders in the next minute: %s", err)
|
||||
return
|
||||
}
|
||||
|
||||
if len(reminders) == 0 {
|
||||
if len(taskIDs) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
log.Debugf("[Task Reminder Cron] Sending %d reminders", len(reminders))
|
||||
users, err := getTaskUsersForTasks(s, taskIDs, builder.Eq{"users.email_reminders_enabled": true})
|
||||
if err != nil {
|
||||
log.Errorf("[Task Reminder Cron] Could not get task users to send them reminders: %s", err)
|
||||
return
|
||||
}
|
||||
|
||||
for _, n := range reminders {
|
||||
err = notifications.Notify(n.User, n)
|
||||
log.Debugf("[Task Reminder Cron] Sending reminders to %d users", len(users))
|
||||
|
||||
for _, u := range users {
|
||||
n := &ReminderDueNotification{
|
||||
User: u.User,
|
||||
Task: u.Task,
|
||||
}
|
||||
|
||||
err = notifications.Notify(u.User, n)
|
||||
if err != nil {
|
||||
log.Errorf("[Task Reminder Cron] Could not notify user %d: %s", n.User.ID, err)
|
||||
log.Errorf("[Task Reminder Cron] Could not notify user %d: %s", u.User.ID, err)
|
||||
return
|
||||
}
|
||||
|
||||
log.Debugf("[Task Reminder Cron] Sent reminder email for task %d to user %d", n.Task.ID, n.User.ID)
|
||||
log.Debugf("[Task Reminder Cron] Sent reminder email for task %d to user %d", u.Task.ID, u.User.ID)
|
||||
}
|
||||
})
|
||||
if err != nil {
|
||||
|
@ -32,10 +32,10 @@ func TestReminderGetTasksInTheNextMinute(t *testing.T) {
|
||||
|
||||
now, err := time.Parse(time.RFC3339Nano, "2018-12-01T01:13:00Z")
|
||||
assert.NoError(t, err)
|
||||
notifications, err := getTasksWithRemindersDueAndTheirUsers(s, now)
|
||||
taskIDs, err := getTasksWithRemindersInTheNextMinute(s, now)
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, notifications, 1)
|
||||
assert.Equal(t, int64(27), notifications[0].Task.ID)
|
||||
assert.Len(t, taskIDs, 1)
|
||||
assert.Equal(t, int64(27), taskIDs[0])
|
||||
})
|
||||
t.Run("Found No Tasks", func(t *testing.T) {
|
||||
db.LoadAndAssertFixtures(t)
|
||||
@ -44,7 +44,7 @@ func TestReminderGetTasksInTheNextMinute(t *testing.T) {
|
||||
|
||||
now, err := time.Parse(time.RFC3339Nano, "2018-12-02T01:13:00Z")
|
||||
assert.NoError(t, err)
|
||||
taskIDs, err := getTasksWithRemindersDueAndTheirUsers(s, now)
|
||||
taskIDs, err := getTasksWithRemindersInTheNextMinute(s, now)
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, taskIDs, 0)
|
||||
})
|
||||
|
@ -296,20 +296,17 @@ func getRawTasksForLists(s *xorm.Session, lists []*List, a web.Auth, opts *taskO
|
||||
if err := param.validate(); err != nil {
|
||||
return nil, 0, 0, err
|
||||
}
|
||||
|
||||
// Mysql sorts columns with null values before ones without null value.
|
||||
// Because it does not have support for NULLS FIRST or NULLS LAST we work around this by
|
||||
// first sorting for null (or not null) values and then the order we actually want to.
|
||||
if db.Type() == schemas.MYSQL {
|
||||
orderby += param.sortBy + " IS NULL, "
|
||||
}
|
||||
|
||||
orderby += param.sortBy + " " + param.orderBy.String()
|
||||
|
||||
// Postgres and sqlite allow us to control how columns with null values are sorted.
|
||||
// Postgres sorts by default entries with null values after ones with values.
|
||||
// To make that consistent with the sort order we have and other dbms, we're adding a separate clause here.
|
||||
if db.Type() == schemas.POSTGRES || db.Type() == schemas.SQLITE {
|
||||
orderby += " NULLS LAST"
|
||||
if db.Type() == schemas.POSTGRES {
|
||||
if param.orderBy == orderAscending {
|
||||
orderby += " NULLS FIRST"
|
||||
}
|
||||
if param.orderBy == orderDescending {
|
||||
orderby += " NULLS LAST"
|
||||
}
|
||||
}
|
||||
|
||||
if (i + 1) < len(opts.sortby) {
|
||||
@ -405,7 +402,7 @@ func getRawTasksForLists(s *xorm.Session, lists []*List, a web.Auth, opts *taskO
|
||||
return nil, 0, 0, err
|
||||
}
|
||||
|
||||
userListIDs := make([]int64, 0, len(userLists))
|
||||
userListIDs := make([]int64, len(userLists))
|
||||
for _, l := range userLists {
|
||||
userListIDs = append(userListIDs, l.ID)
|
||||
}
|
||||
|
@ -59,7 +59,7 @@ func deleteUsers() {
|
||||
now := time.Now()
|
||||
|
||||
for _, u := range users {
|
||||
if !u.DeletionScheduledAt.Before(now) {
|
||||
if u.DeletionScheduledAt.Before(now) {
|
||||
log.Debugf("User %d is not yet scheduled for deletion. Scheduled at %s, now is %s", u.ID, u.DeletionScheduledAt, now)
|
||||
continue
|
||||
}
|
||||
@ -87,18 +87,17 @@ func deleteUsers() {
|
||||
}
|
||||
}
|
||||
|
||||
func getNamespacesToDelete(s *xorm.Session, u *user.User) (namespacesToDelete []*Namespace, err error) {
|
||||
namespacesToDelete = []*Namespace{}
|
||||
// DeleteUser completely removes a user and all their associated lists, namespaces and tasks.
|
||||
// This action is irrevocable.
|
||||
// Public to allow deletion from the CLI.
|
||||
func DeleteUser(s *xorm.Session, u *user.User) (err error) {
|
||||
namespacesToDelete := []*Namespace{}
|
||||
// Get all namespaces and lists this u has access to
|
||||
nm := &Namespace{IsArchived: true}
|
||||
res, _, _, err := nm.ReadAll(s, u, "", 1, -1)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
|
||||
if res == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
namespaces := res.([]*NamespaceWithLists)
|
||||
for _, n := range namespaces {
|
||||
if n.ID < 0 {
|
||||
@ -107,14 +106,14 @@ func getNamespacesToDelete(s *xorm.Session, u *user.User) (namespacesToDelete []
|
||||
|
||||
hadUsers, err := ensureNamespaceAdminUser(s, &n.Namespace)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
if hadUsers {
|
||||
continue
|
||||
}
|
||||
hadTeams, err := ensureNamespaceAdminTeam(s, &n.Namespace)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
if hadTeams {
|
||||
continue
|
||||
@ -123,21 +122,13 @@ func getNamespacesToDelete(s *xorm.Session, u *user.User) (namespacesToDelete []
|
||||
namespacesToDelete = append(namespacesToDelete, &n.Namespace)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func getListsToDelete(s *xorm.Session, u *user.User) (listsToDelete []*List, err error) {
|
||||
listsToDelete = []*List{}
|
||||
// Get all lists to delete
|
||||
listsToDelete := []*List{}
|
||||
lm := &List{IsArchived: true}
|
||||
res, _, _, err := lm.ReadAll(s, u, "", 0, -1)
|
||||
res, _, _, err = lm.ReadAll(s, u, "", 0, -1)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
|
||||
if res == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
lists := res.([]*List)
|
||||
for _, l := range lists {
|
||||
if l.ID < 0 {
|
||||
@ -146,16 +137,15 @@ func getListsToDelete(s *xorm.Session, u *user.User) (listsToDelete []*List, err
|
||||
|
||||
hadUsers, err := ensureListAdminUser(s, l)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
if hadUsers {
|
||||
continue
|
||||
}
|
||||
hadTeams, err := ensureListAdminTeam(s, l)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
|
||||
if hadTeams {
|
||||
continue
|
||||
}
|
||||
@ -163,23 +153,6 @@ func getListsToDelete(s *xorm.Session, u *user.User) (listsToDelete []*List, err
|
||||
listsToDelete = append(listsToDelete, l)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// DeleteUser completely removes a user and all their associated lists, namespaces and tasks.
|
||||
// This action is irrevocable.
|
||||
// Public to allow deletion from the CLI.
|
||||
func DeleteUser(s *xorm.Session, u *user.User) (err error) {
|
||||
namespacesToDelete, err := getNamespacesToDelete(s, u)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
listsToDelete, err := getListsToDelete(s, u)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Delete everything not shared with anybody else
|
||||
for _, n := range namespacesToDelete {
|
||||
err = deleteNamespace(s, n, u, false)
|
||||
@ -195,7 +168,7 @@ func DeleteUser(s *xorm.Session, u *user.User) (err error) {
|
||||
}
|
||||
}
|
||||
|
||||
_, err = s.Where("id = ?", u.ID).Delete(&user.User{})
|
||||
_, err = s.Where("id = ?", u.ID).Delete(u)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -27,35 +27,21 @@ import (
|
||||
)
|
||||
|
||||
func TestDeleteUser(t *testing.T) {
|
||||
t.Run("normal", func(t *testing.T) {
|
||||
db.LoadAndAssertFixtures(t)
|
||||
s := db.NewSession()
|
||||
defer s.Close()
|
||||
notifications.Fake()
|
||||
db.LoadAndAssertFixtures(t)
|
||||
s := db.NewSession()
|
||||
defer s.Close()
|
||||
notifications.Fake()
|
||||
|
||||
u := &user.User{ID: 6}
|
||||
err := DeleteUser(s, u)
|
||||
u := &user.User{ID: 6}
|
||||
err := DeleteUser(s, u)
|
||||
|
||||
assert.NoError(t, err)
|
||||
db.AssertMissing(t, "users", map[string]interface{}{"id": u.ID})
|
||||
db.AssertMissing(t, "lists", map[string]interface{}{"id": 24}) // only user6 had access to this list
|
||||
db.AssertExists(t, "lists", map[string]interface{}{"id": 6}, false)
|
||||
db.AssertExists(t, "lists", map[string]interface{}{"id": 7}, false)
|
||||
db.AssertExists(t, "lists", map[string]interface{}{"id": 8}, false)
|
||||
db.AssertExists(t, "lists", map[string]interface{}{"id": 9}, false)
|
||||
db.AssertExists(t, "lists", map[string]interface{}{"id": 10}, false)
|
||||
db.AssertExists(t, "lists", map[string]interface{}{"id": 11}, false)
|
||||
})
|
||||
t.Run("user with no namespaces", func(t *testing.T) {
|
||||
db.LoadAndAssertFixtures(t)
|
||||
s := db.NewSession()
|
||||
defer s.Close()
|
||||
notifications.Fake()
|
||||
|
||||
u := &user.User{ID: 4}
|
||||
err := DeleteUser(s, u)
|
||||
|
||||
assert.NoError(t, err)
|
||||
// No assertions for deleted lists and namespaces since that user doesn't have any
|
||||
})
|
||||
assert.NoError(t, err)
|
||||
db.AssertMissing(t, "users", map[string]interface{}{"id": u.ID})
|
||||
db.AssertMissing(t, "lists", map[string]interface{}{"id": 24}) // only user6 had access to this list
|
||||
db.AssertExists(t, "lists", map[string]interface{}{"id": 6}, false)
|
||||
db.AssertExists(t, "lists", map[string]interface{}{"id": 7}, false)
|
||||
db.AssertExists(t, "lists", map[string]interface{}{"id": 8}, false)
|
||||
db.AssertExists(t, "lists", map[string]interface{}{"id": 9}, false)
|
||||
db.AssertExists(t, "lists", map[string]interface{}{"id": 10}, false)
|
||||
db.AssertExists(t, "lists", map[string]interface{}{"id": 11}, false)
|
||||
}
|
||||
|
@ -91,7 +91,7 @@ func ListUsersFromList(s *xorm.Session, l *List, search string) (users []*user.U
|
||||
uidmap[u.TeamNamespaceUserID] = true
|
||||
}
|
||||
|
||||
uids := make([]int64, 0, len(uidmap))
|
||||
uids := make([]int64, len(uidmap))
|
||||
for id := range uidmap {
|
||||
uids = append(uids, id)
|
||||
}
|
||||
|
@ -42,8 +42,8 @@ type Token struct {
|
||||
}
|
||||
|
||||
// NewUserAuthTokenResponse creates a new user auth token response from a user object.
|
||||
func NewUserAuthTokenResponse(u *user.User, c echo.Context, long bool) error {
|
||||
t, err := NewUserJWTAuthtoken(u, long)
|
||||
func NewUserAuthTokenResponse(u *user.User, c echo.Context) error {
|
||||
t, err := NewUserJWTAuthtoken(u)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -52,13 +52,10 @@ func NewUserAuthTokenResponse(u *user.User, c echo.Context, long bool) error {
|
||||
}
|
||||
|
||||
// NewUserJWTAuthtoken generates and signes a new jwt token for a user. This is a global function to be able to call it from integration tests.
|
||||
func NewUserJWTAuthtoken(u *user.User, long bool) (token string, err error) {
|
||||
func NewUserJWTAuthtoken(u *user.User) (token string, err error) {
|
||||
t := jwt.New(jwt.SigningMethodHS256)
|
||||
|
||||
var ttl = time.Duration(config.ServiceJWTTTL.GetInt64())
|
||||
if long {
|
||||
ttl = time.Duration(config.ServiceJWTTTLLong.GetInt64())
|
||||
}
|
||||
var exp = time.Now().Add(time.Second * ttl).Unix()
|
||||
|
||||
// Set claims
|
||||
@ -71,7 +68,6 @@ func NewUserJWTAuthtoken(u *user.User, long bool) (token string, err error) {
|
||||
claims["name"] = u.Name
|
||||
claims["emailRemindersEnabled"] = u.EmailRemindersEnabled
|
||||
claims["isLocalUser"] = u.Issuer == user.IssuerLocal
|
||||
claims["long"] = long
|
||||
|
||||
// Generate encoded token and send it as response.
|
||||
return t.SignedString([]byte(config.ServiceJWTSecret.GetString()))
|
||||
|
@ -19,7 +19,6 @@ package openid
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"math/rand"
|
||||
"net/http"
|
||||
"time"
|
||||
@ -105,13 +104,12 @@ func HandleCallback(c echo.Context) error {
|
||||
// Parse the access & ID token
|
||||
oauth2Token, err := provider.Oauth2Config.Exchange(context.Background(), cb.Code)
|
||||
if err != nil {
|
||||
var rerr *oauth2.RetrieveError
|
||||
if errors.As(err, &rerr) {
|
||||
if rerr, is := err.(*oauth2.RetrieveError); is {
|
||||
log.Error(err)
|
||||
|
||||
details := make(map[string]interface{})
|
||||
if err := json.Unmarshal(rerr.Body, &details); err != nil {
|
||||
log.Errorf("Error unmarshalling token for provider %s: %v", provider.Name, err)
|
||||
log.Errorf("Error unmarshaling token for provider %s: %v", provider.Name, err)
|
||||
return handler.HandleHTTPError(err, c)
|
||||
}
|
||||
|
||||
@ -200,7 +198,7 @@ func HandleCallback(c echo.Context) error {
|
||||
}
|
||||
|
||||
// Create token
|
||||
return auth.NewUserAuthTokenResponse(u, c, false)
|
||||
return auth.NewUserAuthTokenResponse(u, c)
|
||||
}
|
||||
|
||||
func getOrCreateUser(s *xorm.Session, cl *claims, issuer, subject string) (u *user.User, err error) {
|
||||
@ -218,7 +216,6 @@ func getOrCreateUser(s *xorm.Session, cl *claims, issuer, subject string) (u *us
|
||||
uu := &user.User{
|
||||
Username: cl.PreferredUsername,
|
||||
Email: cl.Email,
|
||||
Name: cl.Name,
|
||||
Status: user.StatusActive,
|
||||
Issuer: issuer,
|
||||
Subject: subject,
|
||||
|
@ -1,122 +0,0 @@
|
||||
// Vikunja is a to-do list application to facilitate your life.
|
||||
// Copyright 2018-2021 Vikunja and contributors. All rights reserved.
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public Licensee as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public Licensee for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public Licensee
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
package marble
|
||||
|
||||
import (
|
||||
"math"
|
||||
"strconv"
|
||||
|
||||
"code.vikunja.io/api/pkg/user"
|
||||
)
|
||||
|
||||
// Provider generates a random avatar based on https://github.com/boringdesigners/boring-avatars
|
||||
type Provider struct {
|
||||
}
|
||||
|
||||
const avatarSize = 80
|
||||
|
||||
var colors = []string{
|
||||
"#A3A948",
|
||||
"#EDB92E",
|
||||
"#F85931",
|
||||
"#CE1836",
|
||||
"#009989",
|
||||
}
|
||||
|
||||
type props struct {
|
||||
Color string
|
||||
TranslateX int
|
||||
TranslateY int
|
||||
Rotate int
|
||||
Scale float64
|
||||
}
|
||||
|
||||
func getUnit(number int, rang, index int) int {
|
||||
value := number % rang
|
||||
|
||||
digit := math.Floor(math.Mod(float64(number)/math.Pow(10, float64(index)), 10))
|
||||
|
||||
if index > 0 && (math.Mod(digit, 2) == 0) {
|
||||
return -value
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
func getPropsForUser(u *user.User) []*props {
|
||||
ps := []*props{}
|
||||
for i := 0; i < 3; i++ {
|
||||
f := float64(getUnit(int(u.ID)*(i+1), avatarSize/10, 0))
|
||||
ps = append(ps, &props{
|
||||
Color: colors[(int(u.ID)+i)%(len(colors)-1)],
|
||||
TranslateX: getUnit(int(u.ID)*(i+1), avatarSize/10, 1),
|
||||
TranslateY: getUnit(int(u.ID)*(i+1), avatarSize/10, 2),
|
||||
Scale: 1.2 + f/10,
|
||||
Rotate: getUnit(int(u.ID)*(i+1), 360, 1),
|
||||
})
|
||||
}
|
||||
|
||||
return ps
|
||||
}
|
||||
|
||||
func (p *Provider) GetAvatar(u *user.User, size int64) (avatar []byte, mimeType string, err error) {
|
||||
|
||||
s := strconv.FormatInt(size, 10)
|
||||
avatarSizeStr := strconv.Itoa(avatarSize)
|
||||
avatarSizeHalf := strconv.Itoa(avatarSize / 2)
|
||||
|
||||
ps := getPropsForUser(u)
|
||||
|
||||
return []byte(`<svg
|
||||
viewBox="0 0 ` + avatarSizeStr + ` ` + avatarSizeStr + `"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
width="` + s + `"
|
||||
height="` + s + `"
|
||||
>
|
||||
<mask id="mask__marble" maskUnits="userSpaceOnUse" x="0" y="0" width="` + avatarSizeStr + `" height="` + avatarSizeStr + `">
|
||||
<rect width="` + avatarSizeStr + `" height="` + avatarSizeStr + `" rx="` + strconv.Itoa(avatarSize*2) + `" fill="white" />
|
||||
</mask>
|
||||
<g mask="url(#mask__marble)">
|
||||
<rect width="` + avatarSizeStr + `" height="` + avatarSizeStr + `" rx="2" fill="` + ps[0].Color + `" />
|
||||
<path
|
||||
filter="url(#prefix__filter0_f)"
|
||||
d="M32.414 59.35L50.376 70.5H72.5v-71H33.728L26.5 13.381l19.057 27.08L32.414 59.35z"
|
||||
fill="` + ps[1].Color + `"
|
||||
transform="translate(` + strconv.Itoa(ps[1].TranslateX) + ` ` + strconv.Itoa(ps[1].TranslateY) + `) rotate(` + strconv.Itoa(ps[1].Rotate) + ` ` + avatarSizeHalf + ` ` + avatarSizeHalf + `) scale(` + strconv.FormatFloat(ps[2].Scale, 'f', 2, 64) + `)"
|
||||
/>
|
||||
<path
|
||||
filter="url(#prefix__filter0_f)"
|
||||
style="mix-blend-mode: overlay;"
|
||||
d="M22.216 24L0 46.75l14.108 38.129L78 86l-3.081-59.276-22.378 4.005 12.972 20.186-23.35 27.395L22.215 24z"
|
||||
fill="` + ps[2].Color + `"
|
||||
transform="translate(` + strconv.Itoa(ps[2].TranslateX) + ` ` + strconv.Itoa(ps[2].TranslateY) + `) rotate(` + strconv.Itoa(ps[2].Rotate) + ` ` + avatarSizeHalf + ` ` + avatarSizeHalf + `) scale(` + strconv.FormatFloat(ps[2].Scale, 'f', 2, 64) + `)"
|
||||
/>
|
||||
</g>
|
||||
<defs>
|
||||
<filter
|
||||
id="prefix__filter0_f"
|
||||
filterUnits="userSpaceOnUse"
|
||||
colorInterpolationFilters="sRGB"
|
||||
>
|
||||
<feFlood flood-opacity="0" result="BackgroundImageFix" />
|
||||
<feBlend in="SourceGraphic" in2="BackgroundImageFix" result="shape" />
|
||||
<feGaussianBlur stdDeviation="7" result="effect1_foregroundBlur" />
|
||||
</filter>
|
||||
</defs>
|
||||
</svg>`), "image/svg+xml", nil
|
||||
}
|
@ -24,10 +24,9 @@ import (
|
||||
|
||||
// Image represents an image which can be used as a list background
|
||||
type Image struct {
|
||||
ID string `json:"id"`
|
||||
URL string `json:"url"`
|
||||
Thumb string `json:"thumb,omitempty"`
|
||||
BlurHash string `json:"blur_hash"`
|
||||
ID string `json:"id"`
|
||||
URL string `json:"url"`
|
||||
Thumb string `json:"thumb,omitempty"`
|
||||
// This can be used to supply extra information from an image provider to clients
|
||||
Info interface{} `json:"info,omitempty"`
|
||||
}
|
||||
|
@ -17,31 +17,24 @@
|
||||
package handler
|
||||
|
||||
import (
|
||||
"image"
|
||||
_ "image/gif" // To make sure the decoder used for generating blurHashes recognizes gifs
|
||||
_ "image/jpeg" // To make sure the decoder used for generating blurHashes recognizes jpgs
|
||||
_ "image/png" // To make sure the decoder used for generating blurHashes recognizes pngs
|
||||
"io"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"code.vikunja.io/api/pkg/db"
|
||||
"xorm.io/xorm"
|
||||
|
||||
"code.vikunja.io/api/pkg/files"
|
||||
"code.vikunja.io/api/pkg/log"
|
||||
"code.vikunja.io/api/pkg/models"
|
||||
auth2 "code.vikunja.io/api/pkg/modules/auth"
|
||||
"code.vikunja.io/api/pkg/modules/background"
|
||||
"code.vikunja.io/api/pkg/modules/background/unsplash"
|
||||
"code.vikunja.io/api/pkg/modules/background/upload"
|
||||
"code.vikunja.io/web"
|
||||
"code.vikunja.io/web/handler"
|
||||
|
||||
"github.com/bbrks/go-blurhash"
|
||||
"github.com/gabriel-vasile/mimetype"
|
||||
"github.com/labstack/echo/v4"
|
||||
"golang.org/x/image/draw"
|
||||
"xorm.io/xorm"
|
||||
)
|
||||
|
||||
// BackgroundProvider represents a thing which holds a background provider
|
||||
@ -141,18 +134,6 @@ func (bp *BackgroundProvider) SetBackground(c echo.Context) error {
|
||||
return c.JSON(http.StatusOK, list)
|
||||
}
|
||||
|
||||
func CreateBlurHash(srcf io.Reader) (hash string, err error) {
|
||||
src, _, err := image.Decode(srcf)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
dst := image.NewRGBA(image.Rect(0, 0, 32, 32))
|
||||
draw.NearestNeighbor.Scale(dst, dst.Rect, src, src.Bounds(), draw.Over, nil)
|
||||
|
||||
return blurhash.Encode(4, 3, dst)
|
||||
}
|
||||
|
||||
// UploadBackground uploads a background and passes the id of the uploaded file as an Image to the Set function of the BackgroundProvider.
|
||||
func (bp *BackgroundProvider) UploadBackground(c echo.Context) error {
|
||||
s := db.NewSession()
|
||||
@ -164,21 +145,23 @@ func (bp *BackgroundProvider) UploadBackground(c echo.Context) error {
|
||||
return handler.HandleHTTPError(err, c)
|
||||
}
|
||||
|
||||
p := bp.Provider()
|
||||
|
||||
// Get + upload the image
|
||||
file, err := c.FormFile("background")
|
||||
if err != nil {
|
||||
_ = s.Rollback()
|
||||
return err
|
||||
}
|
||||
srcf, err := file.Open()
|
||||
src, err := file.Open()
|
||||
if err != nil {
|
||||
_ = s.Rollback()
|
||||
return err
|
||||
}
|
||||
defer srcf.Close()
|
||||
defer src.Close()
|
||||
|
||||
// Validate we're dealing with an image
|
||||
mime, err := mimetype.DetectReader(srcf)
|
||||
mime, err := mimetype.DetectReader(src)
|
||||
if err != nil {
|
||||
_ = s.Rollback()
|
||||
return handler.HandleHTTPError(err, c)
|
||||
@ -187,8 +170,10 @@ func (bp *BackgroundProvider) UploadBackground(c echo.Context) error {
|
||||
_ = s.Rollback()
|
||||
return c.JSON(http.StatusBadRequest, models.Message{Message: "Uploaded file is no image."})
|
||||
}
|
||||
_, _ = src.Seek(0, io.SeekStart)
|
||||
|
||||
err = SaveBackgroundFile(s, auth, list, srcf, file.Filename, uint64(file.Size))
|
||||
// Save the file
|
||||
f, err := files.CreateWithMime(src, file.Filename, uint64(file.Size), auth, mime.String())
|
||||
if err != nil {
|
||||
_ = s.Rollback()
|
||||
if files.IsErrFileIsTooLarge(err) {
|
||||
@ -198,6 +183,14 @@ func (bp *BackgroundProvider) UploadBackground(c echo.Context) error {
|
||||
return handler.HandleHTTPError(err, c)
|
||||
}
|
||||
|
||||
image := &background.Image{ID: strconv.FormatInt(f.ID, 10)}
|
||||
|
||||
err = p.Set(s, image, list, auth)
|
||||
if err != nil {
|
||||
_ = s.Rollback()
|
||||
return handler.HandleHTTPError(err, c)
|
||||
}
|
||||
|
||||
if err := s.Commit(); err != nil {
|
||||
_ = s.Rollback()
|
||||
return handler.HandleHTTPError(err, c)
|
||||
@ -206,27 +199,6 @@ func (bp *BackgroundProvider) UploadBackground(c echo.Context) error {
|
||||
return c.JSON(http.StatusOK, list)
|
||||
}
|
||||
|
||||
func SaveBackgroundFile(s *xorm.Session, auth web.Auth, list *models.List, srcf io.ReadSeeker, filename string, filesize uint64) (err error) {
|
||||
_, _ = srcf.Seek(0, io.SeekStart)
|
||||
f, err := files.Create(srcf, filename, filesize, auth)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Generate a blurHash
|
||||
_, _ = srcf.Seek(0, io.SeekStart)
|
||||
list.BackgroundBlurHash, err = CreateBlurHash(srcf)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Save it
|
||||
p := upload.Provider{}
|
||||
img := &background.Image{ID: strconv.FormatInt(f.ID, 10)}
|
||||
err = p.Set(s, img, list, auth)
|
||||
return err
|
||||
}
|
||||
|
||||
func checkListBackgroundRights(s *xorm.Session, c echo.Context) (list *models.List, auth web.Auth, err error) {
|
||||
auth, err = auth2.GetAuthFromClaims(c)
|
||||
if err != nil {
|
||||
@ -328,7 +300,6 @@ func RemoveListBackground(c echo.Context) error {
|
||||
|
||||
list.BackgroundFileID = 0
|
||||
list.BackgroundInformation = nil
|
||||
list.BackgroundBlurHash = ""
|
||||
err = models.UpdateList(s, list, auth, true)
|
||||
if err != nil {
|
||||
return err
|
||||
|
@ -61,7 +61,6 @@ type Photo struct {
|
||||
Height int `json:"height"`
|
||||
Color string `json:"color"`
|
||||
Description string `json:"description"`
|
||||
BlurHash string `json:"blur_hash"`
|
||||
User struct {
|
||||
Username string `json:"username"`
|
||||
Name string `json:"name"`
|
||||
@ -179,9 +178,8 @@ func (p *Provider) Search(s *xorm.Session, search string, page int64) (result []
|
||||
result = []*background.Image{}
|
||||
for _, p := range collectionResult {
|
||||
result = append(result, &background.Image{
|
||||
ID: p.ID,
|
||||
URL: getImageID(p.Urls.Raw),
|
||||
BlurHash: p.BlurHash,
|
||||
ID: p.ID,
|
||||
URL: getImageID(p.Urls.Raw),
|
||||
Info: &models.UnsplashPhoto{
|
||||
UnsplashID: p.ID,
|
||||
Author: p.User.Username,
|
||||
@ -215,9 +213,8 @@ func (p *Provider) Search(s *xorm.Session, search string, page int64) (result []
|
||||
result = []*background.Image{}
|
||||
for _, p := range searchResult.Results {
|
||||
result = append(result, &background.Image{
|
||||
ID: p.ID,
|
||||
URL: getImageID(p.Urls.Raw),
|
||||
BlurHash: p.BlurHash,
|
||||
ID: p.ID,
|
||||
URL: getImageID(p.Urls.Raw),
|
||||
Info: &models.UnsplashPhoto{
|
||||
UnsplashID: p.ID,
|
||||
Author: p.User.Username,
|
||||
@ -318,7 +315,7 @@ func (p *Provider) Set(s *xorm.Session, image *background.Image, list *models.Li
|
||||
list.BackgroundInformation = unsplashPhoto
|
||||
|
||||
// Set it as the list background
|
||||
return models.SetListBackground(s, list.ID, file, photo.BlurHash)
|
||||
return models.SetListBackground(s, list.ID, file)
|
||||
}
|
||||
|
||||
// Pingback pings the unsplash api if an unsplash photo has been accessed.
|
||||
|
@ -52,7 +52,7 @@ func (p *Provider) Search(s *xorm.Session, search string, page int64) (result []
|
||||
// @Failure 404 {object} models.Message "The list does not exist."
|
||||
// @Failure 500 {object} models.Message "Internal error"
|
||||
// @Router /lists/{id}/backgrounds/upload [put]
|
||||
func (p *Provider) Set(s *xorm.Session, img *background.Image, list *models.List, auth web.Auth) (err error) {
|
||||
func (p *Provider) Set(s *xorm.Session, image *background.Image, list *models.List, auth web.Auth) (err error) {
|
||||
// Remove the old background if one exists
|
||||
if list.BackgroundFileID != 0 {
|
||||
file := files.File{ID: list.BackgroundFileID}
|
||||
@ -62,12 +62,12 @@ func (p *Provider) Set(s *xorm.Session, img *background.Image, list *models.List
|
||||
}
|
||||
|
||||
file := &files.File{}
|
||||
file.ID, err = strconv.ParseInt(img.ID, 10, 64)
|
||||
file.ID, err = strconv.ParseInt(image.ID, 10, 64)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
list.BackgroundInformation = &models.ListBackgroundType{Type: models.ListBackgroundUpload}
|
||||
|
||||
return models.SetListBackground(s, list.ID, file, list.BackgroundBlurHash)
|
||||
return models.SetListBackground(s, list.ID, file)
|
||||
}
|
||||
|
@ -21,7 +21,6 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"code.vikunja.io/api/pkg/db"
|
||||
"code.vikunja.io/api/pkg/files"
|
||||
@ -35,7 +34,7 @@ import (
|
||||
func Dump(filename string) error {
|
||||
dumpFile, err := os.Create(filename)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error opening dump file: %w", err)
|
||||
return fmt.Errorf("error opening dump file: %s", err)
|
||||
}
|
||||
defer dumpFile.Close()
|
||||
|
||||
@ -44,36 +43,17 @@ func Dump(filename string) error {
|
||||
|
||||
// Config
|
||||
log.Info("Start dumping config file...")
|
||||
if viper.ConfigFileUsed() != "" {
|
||||
err = writeFileToZip(viper.ConfigFileUsed(), dumpWriter)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error saving config file: %w", err)
|
||||
}
|
||||
} else {
|
||||
log.Warning("No config file found, not including one in the dump. This usually happens when environment variables are used for configuration.")
|
||||
err = writeFileToZip(viper.ConfigFileUsed(), dumpWriter)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error saving config file: %s", err)
|
||||
}
|
||||
log.Info("Dumped config file")
|
||||
|
||||
env := os.Environ()
|
||||
dotEnv := ""
|
||||
for _, e := range env {
|
||||
if strings.Contains(e, "VIKUNJA_") {
|
||||
dotEnv += e + "\n"
|
||||
}
|
||||
}
|
||||
if dotEnv != "" {
|
||||
err = utils.WriteBytesToZip(".env", []byte(dotEnv), dumpWriter)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error saving env file: %w", err)
|
||||
}
|
||||
log.Info("Dumped .env file")
|
||||
}
|
||||
|
||||
// Version
|
||||
log.Info("Start dumping version file...")
|
||||
err = utils.WriteBytesToZip("VERSION", []byte(version.Version), dumpWriter)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error saving version: %w", err)
|
||||
return fmt.Errorf("error saving version: %s", err)
|
||||
}
|
||||
log.Info("Dumped version")
|
||||
|
||||
@ -81,12 +61,12 @@ func Dump(filename string) error {
|
||||
log.Info("Start dumping database...")
|
||||
data, err := db.Dump()
|
||||
if err != nil {
|
||||
return fmt.Errorf("error saving database data: %w", err)
|
||||
return fmt.Errorf("error saving database data: %s", err)
|
||||
}
|
||||
for t, d := range data {
|
||||
err = utils.WriteBytesToZip("database/"+t+".json", d, dumpWriter)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error writing database table %s: %w", t, err)
|
||||
return fmt.Errorf("error writing database table %s: %s", t, err)
|
||||
}
|
||||
}
|
||||
log.Info("Dumped database")
|
||||
@ -95,7 +75,7 @@ func Dump(filename string) error {
|
||||
log.Info("Start dumping files...")
|
||||
allFiles, err := files.Dump()
|
||||
if err != nil {
|
||||
return fmt.Errorf("error saving file: %w", err)
|
||||
return fmt.Errorf("error saving file: %s", err)
|
||||
}
|
||||
|
||||
err = utils.WriteFilesToZip(allFiles, dumpWriter)
|
||||
|
@ -44,7 +44,7 @@ func Restore(filename string) error {
|
||||
|
||||
r, err := zip.OpenReader(filename)
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not open zip file: %w", err)
|
||||
return fmt.Errorf("could not open zip file: %s", err)
|
||||
}
|
||||
|
||||
log.Warning("Restoring a dump will wipe your current installation!")
|
||||
@ -52,7 +52,7 @@ func Restore(filename string) error {
|
||||
cr := bufio.NewReader(os.Stdin)
|
||||
text, err := cr.ReadString('\n')
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not read confirmation message: %w", err)
|
||||
return fmt.Errorf("could not read confirmation message: %s", err)
|
||||
}
|
||||
if text != "Yes, I understand\n" {
|
||||
return fmt.Errorf("invalid confirmation message")
|
||||
@ -60,7 +60,6 @@ func Restore(filename string) error {
|
||||
|
||||
// Find the configFile, database and files files
|
||||
var configFile *zip.File
|
||||
var dotEnvFile *zip.File
|
||||
dbfiles := make(map[string]*zip.File)
|
||||
filesFiles := make(map[string]*zip.File)
|
||||
for _, file := range r.File {
|
||||
@ -73,21 +72,44 @@ func Restore(filename string) error {
|
||||
dbfiles[fname[:len(fname)-5]] = file
|
||||
continue
|
||||
}
|
||||
if file.Name == ".env" {
|
||||
dotEnvFile = file
|
||||
continue
|
||||
}
|
||||
if strings.HasPrefix(file.Name, "files/") {
|
||||
filesFiles[strings.ReplaceAll(file.Name, "files/", "")] = file
|
||||
}
|
||||
}
|
||||
if configFile == nil {
|
||||
return fmt.Errorf("dump does not contain a config file")
|
||||
}
|
||||
|
||||
///////
|
||||
// Restore the config file
|
||||
err = restoreConfig(configFile, dotEnvFile)
|
||||
if configFile.UncompressedSize64 > maxConfigSize {
|
||||
return fmt.Errorf("config file too large, is %d, max size is %d", configFile.UncompressedSize64, maxConfigSize)
|
||||
}
|
||||
|
||||
outFile, err := os.OpenFile(configFile.Name, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, configFile.Mode())
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not open config file for writing: %s", err)
|
||||
}
|
||||
|
||||
cfgr, err := configFile.Open()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// #nosec - We eliminated the potential decompression bomb by erroring out above if the file is larger than a threshold.
|
||||
_, err = io.Copy(outFile, cfgr)
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not create config file: %s", err)
|
||||
}
|
||||
|
||||
_ = cfgr.Close()
|
||||
_ = outFile.Close()
|
||||
|
||||
log.Infof("The config file has been restored to '%s'.", configFile.Name)
|
||||
log.Infof("You can now make changes to it, hit enter when you're done.")
|
||||
if _, err := bufio.NewReader(os.Stdin).ReadString('\n'); err != nil {
|
||||
return fmt.Errorf("could not read from stdin: %s", err)
|
||||
}
|
||||
log.Info("Restoring...")
|
||||
|
||||
// Init the configFile again since the restored configuration is most likely different from the one before
|
||||
@ -99,7 +121,7 @@ func Restore(filename string) error {
|
||||
// Restore the db
|
||||
// Start by wiping everything
|
||||
if err := db.WipeEverything(); err != nil {
|
||||
return fmt.Errorf("could not wipe database: %w", err)
|
||||
return fmt.Errorf("could not wipe database: %s", err)
|
||||
}
|
||||
log.Info("Wiped database.")
|
||||
|
||||
@ -108,18 +130,18 @@ func Restore(filename string) error {
|
||||
migrations := dbfiles["migration"]
|
||||
rc, err := migrations.Open()
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not open migrations: %w", err)
|
||||
return fmt.Errorf("could not open migrations: %s", err)
|
||||
}
|
||||
defer rc.Close()
|
||||
|
||||
var buf bytes.Buffer
|
||||
if _, err := buf.ReadFrom(rc); err != nil {
|
||||
return fmt.Errorf("could not read migrations: %w", err)
|
||||
return fmt.Errorf("could not read migrations: %s", err)
|
||||
}
|
||||
|
||||
ms := []*xormigrate.Migration{}
|
||||
if err := json.Unmarshal(buf.Bytes(), &ms); err != nil {
|
||||
return fmt.Errorf("could not read migrations: %w", err)
|
||||
return fmt.Errorf("could not read migrations: %s", err)
|
||||
}
|
||||
sort.Slice(ms, func(i, j int) bool {
|
||||
return ms[i].ID > ms[j].ID
|
||||
@ -127,17 +149,17 @@ func Restore(filename string) error {
|
||||
|
||||
lastMigration := ms[len(ms)-1]
|
||||
if err := migration.MigrateTo(lastMigration.ID, nil); err != nil {
|
||||
return fmt.Errorf("could not create db structure: %w", err)
|
||||
return fmt.Errorf("could not create db structure: %s", err)
|
||||
}
|
||||
|
||||
// Restore all db data
|
||||
for table, d := range dbfiles {
|
||||
content, err := unmarshalFileToJSON(d)
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not read table %s: %w", table, err)
|
||||
return fmt.Errorf("could not read table %s: %s", table, err)
|
||||
}
|
||||
if err := db.Restore(table, content); err != nil {
|
||||
return fmt.Errorf("could not restore table data for table %s: %w", table, err)
|
||||
return fmt.Errorf("could not restore table data for table %s: %s", table, err)
|
||||
}
|
||||
log.Infof("Restored table %s", table)
|
||||
}
|
||||
@ -151,18 +173,18 @@ func Restore(filename string) error {
|
||||
for i, file := range filesFiles {
|
||||
id, err := strconv.ParseInt(i, 10, 64)
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not parse file id %s: %w", i, err)
|
||||
return fmt.Errorf("could not parse file id %s: %s", i, err)
|
||||
}
|
||||
|
||||
f := &files.File{ID: id}
|
||||
|
||||
fc, err := file.Open()
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not open file %s: %w", i, err)
|
||||
return fmt.Errorf("could not open file %s: %s", i, err)
|
||||
}
|
||||
|
||||
if err := f.Save(fc); err != nil {
|
||||
return fmt.Errorf("could not save file: %w", err)
|
||||
return fmt.Errorf("could not save file: %s", err)
|
||||
}
|
||||
|
||||
_ = fc.Close()
|
||||
@ -196,62 +218,3 @@ func unmarshalFileToJSON(file *zip.File) (contents []map[string]interface{}, err
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func restoreConfig(configFile, dotEnvFile *zip.File) error {
|
||||
if configFile != nil {
|
||||
if configFile.UncompressedSize64 > maxConfigSize {
|
||||
return fmt.Errorf("config file too large, is %d, max size is %d", configFile.UncompressedSize64, maxConfigSize)
|
||||
}
|
||||
|
||||
outFile, err := os.OpenFile(configFile.Name, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, configFile.Mode())
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not open config file for writing: %w", err)
|
||||
}
|
||||
|
||||
cfgr, err := configFile.Open()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// #nosec - We eliminated the potential decompression bomb by erroring out above if the file is larger than a threshold.
|
||||
_, err = io.Copy(outFile, cfgr)
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not create config file: %w", err)
|
||||
}
|
||||
|
||||
_ = cfgr.Close()
|
||||
_ = outFile.Close()
|
||||
|
||||
log.Infof("The config file has been restored to '%s'.", configFile.Name)
|
||||
log.Infof("You can now make changes to it, hit enter when you're done.")
|
||||
if _, err := bufio.NewReader(os.Stdin).ReadString('\n'); err != nil {
|
||||
return fmt.Errorf("could not read from stdin: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
log.Warning("No config file found, not restoring one.")
|
||||
log.Warning("You'll likely have had Vikunja configured through environment variables.")
|
||||
|
||||
if dotEnvFile != nil {
|
||||
dotenv, err := dotEnvFile.Open()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
buf := bytes.Buffer{}
|
||||
_, err = buf.ReadFrom(dotenv)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
log.Warningf("Please make sure the following settings are properly configured in your instance:\n%s", buf.String())
|
||||
log.Warning("Make sure your current config matches the following env variables, confirm by pressing enter when done.")
|
||||
log.Warning("If your config does not match, you'll have to make the changes and restart the restoring process afterwards.")
|
||||
if _, err := bufio.NewReader(os.Stdin).ReadString('\n'); err != nil {
|
||||
return fmt.Errorf("could not read from stdin: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
@ -20,11 +20,10 @@ import (
|
||||
"bytes"
|
||||
"io/ioutil"
|
||||
|
||||
"code.vikunja.io/api/pkg/modules/background/handler"
|
||||
|
||||
"xorm.io/xorm"
|
||||
|
||||
"code.vikunja.io/api/pkg/db"
|
||||
"code.vikunja.io/api/pkg/files"
|
||||
"code.vikunja.io/api/pkg/log"
|
||||
"code.vikunja.io/api/pkg/models"
|
||||
"code.vikunja.io/api/pkg/user"
|
||||
@ -52,29 +51,13 @@ func insertFromStructure(s *xorm.Session, str []*models.NamespaceWithListsAndTas
|
||||
|
||||
labels := make(map[string]*models.Label)
|
||||
|
||||
archivedLists := []int64{}
|
||||
archivedNamespaces := []int64{}
|
||||
|
||||
// Create all namespaces
|
||||
for _, n := range str {
|
||||
n.ID = 0
|
||||
|
||||
// Saving the archived status to archive the namespace again after creating it
|
||||
var wasArchived bool
|
||||
if n.IsArchived {
|
||||
n.IsArchived = false
|
||||
wasArchived = true
|
||||
}
|
||||
|
||||
err = n.Create(s, user)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if wasArchived {
|
||||
archivedNamespaces = append(archivedNamespaces, n.ID)
|
||||
}
|
||||
|
||||
log.Debugf("[creating structure] Created namespace %d", n.ID)
|
||||
log.Debugf("[creating structure] Creating %d lists", len(n.Lists))
|
||||
|
||||
@ -87,39 +70,29 @@ func insertFromStructure(s *xorm.Session, str []*models.NamespaceWithListsAndTas
|
||||
originalBackgroundInformation := l.BackgroundInformation
|
||||
needsDefaultBucket := false
|
||||
|
||||
// Saving the archived status to archive the list again after creating it
|
||||
var wasArchived bool
|
||||
if l.IsArchived {
|
||||
wasArchived = true
|
||||
l.IsArchived = false
|
||||
}
|
||||
|
||||
l.NamespaceID = n.ID
|
||||
l.ID = 0
|
||||
err = l.Create(s, user)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if wasArchived {
|
||||
archivedLists = append(archivedLists, l.ID)
|
||||
}
|
||||
|
||||
log.Debugf("[creating structure] Created list %d", l.ID)
|
||||
|
||||
bf, is := originalBackgroundInformation.(*bytes.Buffer)
|
||||
backgroundFile, is := originalBackgroundInformation.(*bytes.Buffer)
|
||||
if is {
|
||||
|
||||
backgroundFile := bytes.NewReader(bf.Bytes())
|
||||
|
||||
log.Debugf("[creating structure] Creating a background file for list %d", l.ID)
|
||||
|
||||
err = handler.SaveBackgroundFile(s, user, &l.List, backgroundFile, "", uint64(backgroundFile.Len()))
|
||||
file, err := files.Create(backgroundFile, "", uint64(backgroundFile.Len()), user)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
log.Debugf("[creating structure] Created a background file for list %d", l.ID)
|
||||
err = models.SetListBackground(s, l.ID, file)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
log.Debugf("[creating structure] Created a background file as new file %d for list %d", file.ID, l.ID)
|
||||
}
|
||||
|
||||
// Create all buckets
|
||||
@ -243,7 +216,7 @@ func insertFromStructure(s *xorm.Session, str []*models.NamespaceWithListsAndTas
|
||||
TaskID: t.ID,
|
||||
}
|
||||
err = lt.Create(s, user)
|
||||
if err != nil && !models.IsErrLabelIsAlreadyOnTask(err) {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
log.Debugf("[creating structure] Associated task %d with label %d", t.ID, lb.ID)
|
||||
@ -278,26 +251,6 @@ func insertFromStructure(s *xorm.Session, str []*models.NamespaceWithListsAndTas
|
||||
}
|
||||
}
|
||||
|
||||
if len(archivedLists) > 0 {
|
||||
_, err = s.
|
||||
Cols("is_archived").
|
||||
In("id", archivedLists).
|
||||
Update(&models.List{IsArchived: true})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if len(archivedNamespaces) > 0 {
|
||||
_, err = s.
|
||||
Cols("is_archived").
|
||||
In("id", archivedNamespaces).
|
||||
Update(&models.Namespace{IsArchived: true})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
log.Debugf("[creating structure] Done inserting new task structure")
|
||||
|
||||
return nil
|
||||
|
@ -23,7 +23,6 @@ import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"code.vikunja.io/api/pkg/config"
|
||||
@ -34,7 +33,6 @@ import (
|
||||
)
|
||||
|
||||
const apiScopes = `tasks.read tasks.read.shared`
|
||||
const apiPrefix = `https://graph.microsoft.com/v1.0/me/todo/`
|
||||
|
||||
type Migration struct {
|
||||
Code string `json:"code"`
|
||||
@ -94,7 +92,6 @@ type recurrence struct {
|
||||
|
||||
type tasksResponse struct {
|
||||
OdataContext string `json:"@odata.context"`
|
||||
Nextlink string `json:"@odata.nextLink"`
|
||||
Value []*task `json:"value"`
|
||||
}
|
||||
|
||||
@ -181,7 +178,7 @@ func getMicrosoftGraphAuthToken(code string) (accessToken string, err error) {
|
||||
}
|
||||
|
||||
func makeAuthenticatedGetRequest(token, urlPart string, v interface{}) error {
|
||||
req, err := http.NewRequestWithContext(context.Background(), http.MethodGet, apiPrefix+urlPart, nil)
|
||||
req, err := http.NewRequestWithContext(context.Background(), http.MethodGet, "https://graph.microsoft.com/v1.0/me/todo/"+urlPart, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -227,30 +224,17 @@ func getMicrosoftTodoData(token string) (microsoftTodoData []*list, err error) {
|
||||
log.Debugf("[Microsoft Todo Migration] Got %d lists", len(lists.Value))
|
||||
|
||||
for _, list := range lists.Value {
|
||||
link := "lists/" + list.ID + "/tasks"
|
||||
list.Tasks = []*task{}
|
||||
|
||||
// Microsoft's Graph API has pagination, so we're going through all pages to get all tasks
|
||||
for {
|
||||
tr := &tasksResponse{}
|
||||
|
||||
err = makeAuthenticatedGetRequest(token, link, tr)
|
||||
if err != nil {
|
||||
log.Errorf("[Microsoft Todo Migration] Could not get tasks for list %s: %s", list.ID, err)
|
||||
return
|
||||
}
|
||||
|
||||
log.Debugf("[Microsoft Todo Migration] Got %d tasks for list %s", len(tr.Value), list.ID)
|
||||
|
||||
list.Tasks = append(list.Tasks, tr.Value...)
|
||||
|
||||
if tr.Nextlink == "" {
|
||||
break
|
||||
}
|
||||
|
||||
link = strings.ReplaceAll(tr.Nextlink, apiPrefix, "")
|
||||
tasksResponse := &tasksResponse{}
|
||||
err = makeAuthenticatedGetRequest(token, "lists/"+list.ID+"/tasks", tasksResponse)
|
||||
if err != nil {
|
||||
log.Errorf("[Microsoft Todo Migration] Could not get tasks for list %s: %s", list.ID, err)
|
||||
return
|
||||
}
|
||||
|
||||
log.Debugf("[Microsoft Todo Migration] Got %d tasks for list %s", len(tasksResponse.Value), list.ID)
|
||||
|
||||
list.Tasks = tasksResponse.Value
|
||||
|
||||
microsoftTodoData = append(microsoftTodoData, list)
|
||||
}
|
||||
|
||||
|
@ -241,15 +241,6 @@ func (m *Migration) AuthURL() string {
|
||||
}
|
||||
|
||||
func parseDate(dateString string) (date time.Time, err error) {
|
||||
if len(dateString) == 10 {
|
||||
// We're probably dealing with a date in the form of 2021-11-23 without a time
|
||||
date, err = time.Parse("2006-01-02", dateString)
|
||||
if err == nil {
|
||||
// round the day to eod
|
||||
return date.Add(time.Hour*23 + time.Minute*59), nil
|
||||
}
|
||||
}
|
||||
|
||||
date, err = time.Parse("2006-01-02T15:04:05Z", dateString)
|
||||
if err != nil {
|
||||
date, err = time.Parse("2006-01-02T15:04:05", dateString)
|
||||
|
@ -39,7 +39,7 @@ func TestConvertTodoistToVikunja(t *testing.T) {
|
||||
time3, err := time.Parse(time.RFC3339Nano, "2014-10-21T08:25:05Z")
|
||||
assert.NoError(t, err)
|
||||
time3 = time3.In(config.GetTimeZone())
|
||||
dueTime, err := time.Parse(time.RFC3339Nano, "2020-05-31T23:59:00Z")
|
||||
dueTime, err := time.Parse(time.RFC3339Nano, "2020-05-31T00:00:00Z")
|
||||
assert.NoError(t, err)
|
||||
dueTime = dueTime.In(config.GetTimeZone())
|
||||
dueTimeWithTime, err := time.Parse(time.RFC3339Nano, "2021-01-31T19:00:00Z")
|
||||
@ -401,7 +401,7 @@ func TestConvertTodoistToVikunja(t *testing.T) {
|
||||
Done: false,
|
||||
Created: time1,
|
||||
Reminders: []time.Time{
|
||||
time.Date(2020, time.June, 15, 23, 59, 0, 0, time.UTC).In(config.GetTimeZone()),
|
||||
time.Date(2020, time.June, 15, 0, 0, 0, 0, time.UTC).In(config.GetTimeZone()),
|
||||
time.Date(2020, time.June, 16, 7, 0, 0, 0, time.UTC).In(config.GetTimeZone()),
|
||||
},
|
||||
},
|
||||
|
@ -64,7 +64,7 @@ func (v *FileMigrator) Name() string {
|
||||
func (v *FileMigrator) Migrate(user *user.User, file io.ReaderAt, size int64) error {
|
||||
r, err := zip.NewReader(file, size)
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not open import file: %w", err)
|
||||
return fmt.Errorf("could not open import file: %s", err)
|
||||
}
|
||||
|
||||
log.Debugf(logPrefix+"Importing a zip file containing %d files", len(r.File))
|
||||
@ -77,7 +77,7 @@ func (v *FileMigrator) Migrate(user *user.User, file io.ReaderAt, size int64) er
|
||||
fname := strings.ReplaceAll(f.Name, "files/", "")
|
||||
id, err := strconv.ParseInt(fname, 10, 64)
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not convert file id: %w", err)
|
||||
return fmt.Errorf("could not convert file id: %s", err)
|
||||
}
|
||||
storedFiles[id] = f
|
||||
log.Debugf(logPrefix + "Found a blob file")
|
||||
@ -104,18 +104,18 @@ func (v *FileMigrator) Migrate(user *user.User, file io.ReaderAt, size int64) er
|
||||
// Import the bulk of Vikunja data
|
||||
df, err := dataFile.Open()
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not open data file: %w", err)
|
||||
return fmt.Errorf("could not open data file: %s", err)
|
||||
}
|
||||
defer df.Close()
|
||||
|
||||
var bufData bytes.Buffer
|
||||
if _, err := bufData.ReadFrom(df); err != nil {
|
||||
return fmt.Errorf("could not read data file: %w", err)
|
||||
return fmt.Errorf("could not read data file: %s", err)
|
||||
}
|
||||
|
||||
namespaces := []*models.NamespaceWithListsAndTasks{}
|
||||
if err := json.Unmarshal(bufData.Bytes(), &namespaces); err != nil {
|
||||
return fmt.Errorf("could not read data: %w", err)
|
||||
return fmt.Errorf("could not read data: %s", err)
|
||||
}
|
||||
|
||||
for _, n := range namespaces {
|
||||
@ -123,11 +123,11 @@ func (v *FileMigrator) Migrate(user *user.User, file io.ReaderAt, size int64) er
|
||||
if b, exists := storedFiles[l.BackgroundFileID]; exists {
|
||||
bf, err := b.Open()
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not open list background file %d for reading: %w", l.BackgroundFileID, err)
|
||||
return fmt.Errorf("could not open list background file %d for reading: %s", l.BackgroundFileID, err)
|
||||
}
|
||||
var buf bytes.Buffer
|
||||
if _, err := buf.ReadFrom(bf); err != nil {
|
||||
return fmt.Errorf("could not read list background file %d: %w", l.BackgroundFileID, err)
|
||||
return fmt.Errorf("could not read list background file %d: %s", l.BackgroundFileID, err)
|
||||
}
|
||||
|
||||
l.BackgroundInformation = &buf
|
||||
@ -143,11 +143,11 @@ func (v *FileMigrator) Migrate(user *user.User, file io.ReaderAt, size int64) er
|
||||
for _, attachment := range t.Attachments {
|
||||
af, err := storedFiles[attachment.File.ID].Open()
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not open attachment %d for reading: %w", attachment.ID, err)
|
||||
return fmt.Errorf("could not open attachment %d for reading: %s", attachment.ID, err)
|
||||
}
|
||||
var buf bytes.Buffer
|
||||
if _, err := buf.ReadFrom(af); err != nil {
|
||||
return fmt.Errorf("could not read attachment %d: %w", attachment.ID, err)
|
||||
return fmt.Errorf("could not read attachment %d: %s", attachment.ID, err)
|
||||
}
|
||||
|
||||
attachment.ID = 0
|
||||
@ -160,7 +160,7 @@ func (v *FileMigrator) Migrate(user *user.User, file io.ReaderAt, size int64) er
|
||||
|
||||
err = migration.InsertFromStructure(namespaces, user)
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not insert data: %w", err)
|
||||
return fmt.Errorf("could not insert data: %s", err)
|
||||
}
|
||||
|
||||
if filterFile == nil {
|
||||
@ -172,18 +172,18 @@ func (v *FileMigrator) Migrate(user *user.User, file io.ReaderAt, size int64) er
|
||||
// Import filters
|
||||
ff, err := filterFile.Open()
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not open filters file: %w", err)
|
||||
return fmt.Errorf("could not open filters file: %s", err)
|
||||
}
|
||||
defer ff.Close()
|
||||
|
||||
var bufFilter bytes.Buffer
|
||||
if _, err := bufFilter.ReadFrom(ff); err != nil {
|
||||
return fmt.Errorf("could not read filters file: %w", err)
|
||||
return fmt.Errorf("could not read filters file: %s", err)
|
||||
}
|
||||
|
||||
filters := []*models.SavedFilter{}
|
||||
if err := json.Unmarshal(bufFilter.Bytes(), &filters); err != nil {
|
||||
return fmt.Errorf("could not read filter data: %w", err)
|
||||
return fmt.Errorf("could not read filter data: %s", err)
|
||||
}
|
||||
|
||||
log.Debugf(logPrefix+"Importing %d saved filters", len(filters))
|
||||
|
@ -25,7 +25,6 @@ import (
|
||||
"code.vikunja.io/api/pkg/modules/avatar/empty"
|
||||
"code.vikunja.io/api/pkg/modules/avatar/gravatar"
|
||||
"code.vikunja.io/api/pkg/modules/avatar/initials"
|
||||
"code.vikunja.io/api/pkg/modules/avatar/marble"
|
||||
"code.vikunja.io/api/pkg/modules/avatar/upload"
|
||||
"code.vikunja.io/api/pkg/user"
|
||||
"code.vikunja.io/web/handler"
|
||||
@ -78,8 +77,6 @@ func GetAvatar(c echo.Context) error {
|
||||
avatarProvider = &initials.Provider{}
|
||||
case "upload":
|
||||
avatarProvider = &upload.Provider{}
|
||||
case "marble":
|
||||
avatarProvider = &marble.Provider{}
|
||||
default:
|
||||
avatarProvider = &empty.Provider{}
|
||||
}
|
||||
|
@ -102,7 +102,7 @@ func Login(c echo.Context) error {
|
||||
}
|
||||
|
||||
// Create token
|
||||
return auth.NewUserAuthTokenResponse(user, c, u.LongToken)
|
||||
return auth.NewUserAuthTokenResponse(user, c)
|
||||
}
|
||||
|
||||
// RenewToken gives a new token to every user with a valid token
|
||||
@ -156,12 +156,6 @@ func RenewToken(c echo.Context) (err error) {
|
||||
return handler.HandleHTTPError(err, c)
|
||||
}
|
||||
|
||||
var long bool
|
||||
lng, has := claims["long"]
|
||||
if has {
|
||||
long = lng.(bool)
|
||||
}
|
||||
|
||||
// Create token
|
||||
return auth.NewUserAuthTokenResponse(user, c, long)
|
||||
return auth.NewUserAuthTokenResponse(user, c)
|
||||
}
|
||||
|
@ -1,112 +0,0 @@
|
||||
// Vikunja is a to-do list application to facilitate your life.
|
||||
// Copyright 2018-2021 Vikunja and contributors. All rights reserved.
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public Licensee as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public Licensee for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public Licensee
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
package v1
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"code.vikunja.io/api/pkg/models"
|
||||
|
||||
"code.vikunja.io/api/pkg/user"
|
||||
"code.vikunja.io/web/handler"
|
||||
"github.com/labstack/echo/v4"
|
||||
)
|
||||
|
||||
// GenerateCaldavToken is the handler to create a caldav token
|
||||
// @Summary Generate a caldav token
|
||||
// @Description Generates a caldav token which can be used for the caldav api. It is not possible to see the token again after it was generated.
|
||||
// @tags user
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Security JWTKeyAuth
|
||||
// @Success 200 {object} user.Token
|
||||
// @Failure 400 {object} web.HTTPError "Something's invalid."
|
||||
// @Failure 404 {object} web.HTTPError "User does not exist."
|
||||
// @Failure 500 {object} models.Message "Internal server error."
|
||||
// @Router /user/settings/token/caldav [put]
|
||||
func GenerateCaldavToken(c echo.Context) (err error) {
|
||||
|
||||
u, err := user.GetCurrentUser(c)
|
||||
if err != nil {
|
||||
return handler.HandleHTTPError(err, c)
|
||||
}
|
||||
|
||||
token, err := user.GenerateNewCaldavToken(u)
|
||||
if err != nil {
|
||||
return handler.HandleHTTPError(err, c)
|
||||
}
|
||||
|
||||
return c.JSON(http.StatusCreated, token)
|
||||
}
|
||||
|
||||
// GetCaldavTokens is the handler to return a list of all caldav tokens for the current user
|
||||
// @Summary Returns the caldav tokens for the current user
|
||||
// @Description Return the IDs and created dates of all caldav tokens for the current user.
|
||||
// @tags user
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Security JWTKeyAuth
|
||||
// @Success 200 {array} user.Token
|
||||
// @Failure 400 {object} web.HTTPError "Something's invalid."
|
||||
// @Failure 404 {object} web.HTTPError "User does not exist."
|
||||
// @Failure 500 {object} models.Message "Internal server error."
|
||||
// @Router /user/settings/token/caldav [get]
|
||||
func GetCaldavTokens(c echo.Context) error {
|
||||
u, err := user.GetCurrentUser(c)
|
||||
if err != nil {
|
||||
return handler.HandleHTTPError(err, c)
|
||||
}
|
||||
|
||||
tokens, err := user.GetCaldavTokens(u)
|
||||
if err != nil {
|
||||
return handler.HandleHTTPError(err, c)
|
||||
}
|
||||
|
||||
return c.JSON(http.StatusCreated, tokens)
|
||||
}
|
||||
|
||||
// DeleteCaldavToken is the handler to delete a caldv token
|
||||
// @Summary Delete a caldav token by id
|
||||
// @tags user
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Security JWTKeyAuth
|
||||
// @Param id path int true "Token ID"
|
||||
// @Success 200 {object} models.Message
|
||||
// @Failure 400 {object} web.HTTPError "Something's invalid."
|
||||
// @Failure 404 {object} web.HTTPError "User does not exist."
|
||||
// @Failure 500 {object} models.Message "Internal server error."
|
||||
// @Router /user/settings/token/caldav/{id} [get]
|
||||
func DeleteCaldavToken(c echo.Context) error {
|
||||
u, err := user.GetCurrentUser(c)
|
||||
if err != nil {
|
||||
return handler.HandleHTTPError(err, c)
|
||||
}
|
||||
|
||||
id, err := strconv.ParseInt(c.Param("id"), 10, 64)
|
||||
if err != nil {
|
||||
return handler.HandleHTTPError(err, c)
|
||||
}
|
||||
|
||||
err = user.DeleteCaldavTokenByID(u, id)
|
||||
if err != nil {
|
||||
return handler.HandleHTTPError(err, c)
|
||||
}
|
||||
|
||||
return c.JSON(http.StatusOK, &models.Message{Message: "The token was deleted successfully."})
|
||||
}
|
@ -19,18 +19,17 @@ package v1
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/labstack/echo/v4"
|
||||
"github.com/tkuchiki/go-timezone"
|
||||
|
||||
"code.vikunja.io/api/pkg/db"
|
||||
|
||||
"code.vikunja.io/api/pkg/models"
|
||||
user2 "code.vikunja.io/api/pkg/user"
|
||||
"code.vikunja.io/web/handler"
|
||||
"github.com/labstack/echo/v4"
|
||||
)
|
||||
|
||||
// UserAvatarProvider holds the user avatar provider type
|
||||
type UserAvatarProvider struct {
|
||||
// The avatar provider. Valid types are `gravatar` (uses the user email), `upload`, `initials`, `marble` (generates a random avatar for each user), `default`.
|
||||
// The avatar provider. Valid types are `gravatar` (uses the user email), `upload`, `initials`, `default`.
|
||||
AvatarProvider string `json:"avatar_provider"`
|
||||
}
|
||||
|
||||
@ -49,13 +48,8 @@ type UserSettings struct {
|
||||
// If a task is created without a specified list this value should be used. Applies
|
||||
// to tasks made directly in API and from clients.
|
||||
DefaultListID int64 `json:"default_list_id"`
|
||||
HomeListID int64 `json:"home_list_id"`
|
||||
// The day when the week starts for this user. 0 = sunday, 1 = monday, etc.
|
||||
WeekStart int `json:"week_start"`
|
||||
// The user's language
|
||||
Language string `json:"language"`
|
||||
// The user's time zone. Used to send task reminders in the time zone of the user.
|
||||
Timezone string `json:"timezone"`
|
||||
}
|
||||
|
||||
// GetUserAvatarProvider returns the currently set user avatar
|
||||
@ -182,10 +176,7 @@ func UpdateGeneralUserSettings(c echo.Context) error {
|
||||
user.DiscoverableByName = us.DiscoverableByName
|
||||
user.OverdueTasksRemindersEnabled = us.OverdueTasksRemindersEnabled
|
||||
user.DefaultListID = us.DefaultListID
|
||||
user.HomeListID = us.HomeListID
|
||||
user.WeekStart = us.WeekStart
|
||||
user.Language = us.Language
|
||||
user.Timezone = us.Timezone
|
||||
|
||||
_, err = user2.UpdateUser(s, user)
|
||||
if err != nil {
|
||||
@ -200,31 +191,3 @@ func UpdateGeneralUserSettings(c echo.Context) error {
|
||||
|
||||
return c.JSON(http.StatusOK, &models.Message{Message: "The settings were updated successfully."})
|
||||
}
|
||||
|
||||
// GetAvailableTimezones
|
||||
// @Summary Get all available time zones on this vikunja instance
|
||||
// @Description Because available time zones depend on the system Vikunja is running on, this endpoint returns a list of all valid time zones this particular Vikunja instance can handle. The list of time zones is not sorted, you should sort it on the client.
|
||||
// @tags user
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Security JWTKeyAuth
|
||||
// @Success 200 {array} string "All available time zones."
|
||||
// @Failure 500 {object} models.Message "Internal server error."
|
||||
// @Router /user/timezones [get]
|
||||
func GetAvailableTimezones(c echo.Context) error {
|
||||
|
||||
allTimezones := timezone.New().Timezones()
|
||||
timezoneMap := make(map[string]bool) // to filter all duplicates
|
||||
for _, s := range allTimezones {
|
||||
for _, t := range s {
|
||||
timezoneMap[t] = true
|
||||
}
|
||||
}
|
||||
|
||||
ts := []string{}
|
||||
for s := range timezoneMap {
|
||||
ts = append(ts, s)
|
||||
}
|
||||
|
||||
return c.JSON(http.StatusOK, ts)
|
||||
}
|
||||
|
@ -72,10 +72,7 @@ func UserShow(c echo.Context) error {
|
||||
DiscoverableByEmail: u.DiscoverableByEmail,
|
||||
OverdueTasksRemindersEnabled: u.OverdueTasksRemindersEnabled,
|
||||
DefaultListID: u.DefaultListID,
|
||||
HomeListID: u.HomeListID,
|
||||
WeekStart: u.WeekStart,
|
||||
Language: u.Language,
|
||||
Timezone: u.Timezone,
|
||||
},
|
||||
DeletionScheduledAt: u.DeletionScheduledAt,
|
||||
IsLocalUser: u.Issuer == user.IssuerLocal,
|
||||
|
@ -18,7 +18,6 @@ package v1
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"image/jpeg"
|
||||
"net/http"
|
||||
@ -92,8 +91,7 @@ func UserTOTPEnable(c echo.Context) error {
|
||||
}
|
||||
if err := c.Bind(passcode); err != nil {
|
||||
log.Debugf("Invalid model error. Internal error was: %s", err.Error())
|
||||
var he *echo.HTTPError
|
||||
if errors.As(err, &he) {
|
||||
if he, is := err.(*echo.HTTPError); is {
|
||||
return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid model provided. Error was: %s", he.Message))
|
||||
}
|
||||
return echo.NewHTTPError(http.StatusBadRequest, "Invalid model provided.")
|
||||
@ -133,8 +131,7 @@ func UserTOTPDisable(c echo.Context) error {
|
||||
login := &user.Login{}
|
||||
if err := c.Bind(login); err != nil {
|
||||
log.Debugf("Invalid model error. Internal error was: %s", err.Error())
|
||||
var he *echo.HTTPError
|
||||
if errors.As(err, &he) {
|
||||
if he, is := err.(*echo.HTTPError); is {
|
||||
return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid model provided. Error was: %s", he.Message))
|
||||
}
|
||||
return echo.NewHTTPError(http.StatusBadRequest, "Invalid model provided.")
|
||||
|
@ -17,7 +17,6 @@
|
||||
package v1
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
@ -48,8 +47,7 @@ func UpdateUserEmail(c echo.Context) (err error) {
|
||||
var emailUpdate = &user.EmailUpdate{}
|
||||
if err := c.Bind(emailUpdate); err != nil {
|
||||
log.Debugf("Invalid model error. Internal error was: %s", err.Error())
|
||||
var he *echo.HTTPError
|
||||
if errors.As(err, &he) {
|
||||
if he, is := err.(*echo.HTTPError); is {
|
||||
return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid model provided. Error was: %s", he.Message))
|
||||
}
|
||||
return echo.NewHTTPError(http.StatusBadRequest, "Invalid model provided.")
|
||||
|
@ -1,70 +0,0 @@
|
||||
// Vikunja is a to-do list application to facilitate your life.
|
||||
// Copyright 2018-2021 Vikunja and contributors. All rights reserved.
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public Licensee as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public Licensee for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public Licensee
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
package caldav
|
||||
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"code.vikunja.io/api/pkg/db"
|
||||
"code.vikunja.io/api/pkg/log"
|
||||
"code.vikunja.io/api/pkg/user"
|
||||
|
||||
"github.com/labstack/echo/v4"
|
||||
"golang.org/x/crypto/bcrypt"
|
||||
)
|
||||
|
||||
func BasicAuth(username, password string, c echo.Context) (bool, error) {
|
||||
creds := &user.Login{
|
||||
Username: username,
|
||||
Password: password,
|
||||
}
|
||||
s := db.NewSession()
|
||||
defer s.Close()
|
||||
u, err := user.CheckUserCredentials(s, creds)
|
||||
if err != nil && !user.IsErrWrongUsernameOrPassword(err) {
|
||||
log.Errorf("Error during basic auth for caldav: %v", err)
|
||||
return false, nil
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
c.Set("userBasicAuth", u)
|
||||
return true, nil
|
||||
}
|
||||
|
||||
tokens, err := user.GetCaldavTokens(u)
|
||||
if err != nil {
|
||||
log.Errorf("Error while getting tokens for caldav auth: %v", err)
|
||||
return false, nil
|
||||
}
|
||||
|
||||
// Looping over all tokens until we find one that matches
|
||||
for _, token := range tokens {
|
||||
err = bcrypt.CompareHashAndPassword([]byte(token.Token), []byte(password))
|
||||
if err != nil {
|
||||
if errors.Is(err, bcrypt.ErrMismatchedHashAndPassword) {
|
||||
continue
|
||||
}
|
||||
log.Errorf("Error while verifying tokens for caldav auth: %v", err)
|
||||
return false, nil
|
||||
}
|
||||
|
||||
c.Set("userBasicAuth", u)
|
||||
return true, nil
|
||||
}
|
||||
|
||||
return false, nil
|
||||
}
|
@ -75,6 +75,7 @@ import (
|
||||
apiv1 "code.vikunja.io/api/pkg/routes/api/v1"
|
||||
"code.vikunja.io/api/pkg/routes/caldav"
|
||||
_ "code.vikunja.io/api/pkg/swagger" // To generate swagger docs
|
||||
"code.vikunja.io/api/pkg/user"
|
||||
"code.vikunja.io/api/pkg/version"
|
||||
"code.vikunja.io/web"
|
||||
"code.vikunja.io/web/handler"
|
||||
@ -155,8 +156,7 @@ func NewEcho() *echo.Echo {
|
||||
|
||||
e.HTTPErrorHandler = func(err error, c echo.Context) {
|
||||
// Only capture errors not already handled by echo
|
||||
var herr *echo.HTTPError
|
||||
if errors.As(err, &herr) {
|
||||
if _, ok := err.(*echo.HTTPError); !ok {
|
||||
hub := sentryecho.GetHubFromContext(c)
|
||||
if hub != nil {
|
||||
hub.WithScope(func(scope *sentry.Scope) {
|
||||
@ -193,7 +193,7 @@ func RegisterRoutes(e *echo.Echo) {
|
||||
if config.ServiceEnableCaldav.GetBool() {
|
||||
// Caldav routes
|
||||
wkg := e.Group("/.well-known")
|
||||
wkg.Use(middleware.BasicAuth(caldav.BasicAuth))
|
||||
wkg.Use(middleware.BasicAuth(caldavBasicAuth))
|
||||
wkg.Any("/caldav", caldav.PrincipalHandler)
|
||||
wkg.Any("/caldav/", caldav.PrincipalHandler)
|
||||
c := e.Group("/dav")
|
||||
@ -203,11 +203,6 @@ func RegisterRoutes(e *echo.Echo) {
|
||||
// healthcheck
|
||||
e.GET("/health", HealthcheckHandler)
|
||||
|
||||
// static files
|
||||
if static := config.ServiceStaticpath.GetString(); static != "" {
|
||||
e.Use(middleware.Static(static))
|
||||
}
|
||||
|
||||
// CORS_SHIT
|
||||
if config.CorsEnable.GetBool() {
|
||||
e.Use(middleware.CORSWithConfig(middleware.CORSConfig{
|
||||
@ -326,10 +321,6 @@ func registerAPIRoutes(a *echo.Group) {
|
||||
u.POST("/settings/general", apiv1.UpdateGeneralUserSettings)
|
||||
u.POST("/export/request", apiv1.RequestUserDataExport)
|
||||
u.POST("/export/download", apiv1.DownloadUserDataExport)
|
||||
u.GET("/timezones", apiv1.GetAvailableTimezones)
|
||||
u.PUT("/settings/token/caldav", apiv1.GenerateCaldavToken)
|
||||
u.GET("/settings/token/caldav", apiv1.GetCaldavTokens)
|
||||
u.DELETE("/settings/token/caldav/:id", apiv1.DeleteCaldavToken)
|
||||
|
||||
if config.ServiceEnableTotp.GetBool() {
|
||||
u.GET("/settings/totp", apiv1.UserTOTP)
|
||||
@ -670,7 +661,7 @@ func registerMigrations(m *echo.Group) {
|
||||
func registerCalDavRoutes(c *echo.Group) {
|
||||
|
||||
// Basic auth middleware
|
||||
c.Use(middleware.BasicAuth(caldav.BasicAuth))
|
||||
c.Use(middleware.BasicAuth(caldavBasicAuth))
|
||||
|
||||
// THIS is the entry point for caldav clients, otherwise lists will show up double
|
||||
c.Any("", caldav.EntryHandler)
|
||||
@ -682,3 +673,26 @@ func registerCalDavRoutes(c *echo.Group) {
|
||||
c.Any("/lists/:list/", caldav.ListHandler)
|
||||
c.Any("/lists/:list/:task", caldav.TaskHandler) // Mostly used for editing
|
||||
}
|
||||
|
||||
func caldavBasicAuth(username, password string, c echo.Context) (bool, error) {
|
||||
creds := &user.Login{
|
||||
Username: username,
|
||||
Password: password,
|
||||
}
|
||||
s := db.NewSession()
|
||||
defer s.Close()
|
||||
u, err := user.CheckUserCredentials(s, creds)
|
||||
if err != nil {
|
||||
_ = s.Rollback()
|
||||
log.Errorf("Error during basic auth for caldav: %v", err)
|
||||
return false, nil
|
||||
}
|
||||
|
||||
if err := s.Commit(); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
// Save the user in echo context for later use
|
||||
c.Set("userBasicAuth", u)
|
||||
return true, nil
|
||||
}
|
||||
|
@ -1,10 +1,17 @@
|
||||
// Package swagger GENERATED BY SWAG; DO NOT EDIT
|
||||
// Package swagger GENERATED BY THE COMMAND ABOVE; DO NOT EDIT
|
||||
// This file was generated by swaggo/swag
|
||||
package swagger
|
||||
|
||||
import "github.com/swaggo/swag"
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"strings"
|
||||
"text/template"
|
||||
|
||||
const docTemplate = `{
|
||||
"github.com/swaggo/swag"
|
||||
)
|
||||
|
||||
var doc = `{
|
||||
"schemes": {{ marshal .Schemes }},
|
||||
"swagger": "2.0",
|
||||
"info": {
|
||||
@ -1988,7 +1995,7 @@ const docTemplate = `{
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "The value to filter for. You can use [grafana](https://grafana.com/docs/grafana/latest/dashboards/time-range-controls)- or [elasticsearch](https://www.elastic.co/guide/en/elasticsearch/reference/7.3/common-options.html#date-math)-style relative dates for all date fields like ` + "`" + `due_date` + "`" + `, ` + "`" + `start_date` + "`" + `, ` + "`" + `end_date` + "`" + `, etc.",
|
||||
"description": "The value to filter for.",
|
||||
"name": "filter_value",
|
||||
"in": "query"
|
||||
},
|
||||
@ -7019,153 +7026,6 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
},
|
||||
"/user/settings/token/caldav": {
|
||||
"get": {
|
||||
"security": [
|
||||
{
|
||||
"JWTKeyAuth": []
|
||||
}
|
||||
],
|
||||
"description": "Return the IDs and created dates of all caldav tokens for the current user.",
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"user"
|
||||
],
|
||||
"summary": "Returns the caldav tokens for the current user",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/user.Token"
|
||||
}
|
||||
}
|
||||
},
|
||||
"400": {
|
||||
"description": "Something's invalid.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/web.HTTPError"
|
||||
}
|
||||
},
|
||||
"404": {
|
||||
"description": "User does not exist.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/web.HTTPError"
|
||||
}
|
||||
},
|
||||
"500": {
|
||||
"description": "Internal server error.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/models.Message"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"put": {
|
||||
"security": [
|
||||
{
|
||||
"JWTKeyAuth": []
|
||||
}
|
||||
],
|
||||
"description": "Generates a caldav token which can be used for the caldav api. It is not possible to see the token again after it was generated.",
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"user"
|
||||
],
|
||||
"summary": "Generate a caldav token",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/user.Token"
|
||||
}
|
||||
},
|
||||
"400": {
|
||||
"description": "Something's invalid.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/web.HTTPError"
|
||||
}
|
||||
},
|
||||
"404": {
|
||||
"description": "User does not exist.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/web.HTTPError"
|
||||
}
|
||||
},
|
||||
"500": {
|
||||
"description": "Internal server error.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/models.Message"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/user/settings/token/caldav/{id}": {
|
||||
"get": {
|
||||
"security": [
|
||||
{
|
||||
"JWTKeyAuth": []
|
||||
}
|
||||
],
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"user"
|
||||
],
|
||||
"summary": "Delete a caldav token by id",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "integer",
|
||||
"description": "Token ID",
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/models.Message"
|
||||
}
|
||||
},
|
||||
"400": {
|
||||
"description": "Something's invalid.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/web.HTTPError"
|
||||
}
|
||||
},
|
||||
"404": {
|
||||
"description": "User does not exist.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/web.HTTPError"
|
||||
}
|
||||
},
|
||||
"500": {
|
||||
"description": "Internal server error.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/models.Message"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/user/settings/totp": {
|
||||
"get": {
|
||||
"security": [
|
||||
@ -7400,43 +7260,6 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
},
|
||||
"/user/timezones": {
|
||||
"get": {
|
||||
"security": [
|
||||
{
|
||||
"JWTKeyAuth": []
|
||||
}
|
||||
],
|
||||
"description": "Because available time zones depend on the system Vikunja is running on, this endpoint returns a list of all valid time zones this particular Vikunja instance can handle. The list of time zones is not sorted, you should sort it on the client.",
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"user"
|
||||
],
|
||||
"summary": "Get all available time zones on this vikunja instance",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "All available time zones.",
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"500": {
|
||||
"description": "Internal server error.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/models.Message"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/user/token": {
|
||||
"post": {
|
||||
"description": "Returns a new valid jwt user token with an extended length.",
|
||||
@ -7577,9 +7400,6 @@ const docTemplate = `{
|
||||
"background.Image": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"blur_hash": {
|
||||
"type": "string"
|
||||
},
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
@ -8044,10 +7864,6 @@ const docTemplate = `{
|
||||
"models.List": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"background_blur_hash": {
|
||||
"description": "Contains a very small version of the list background to use as a blurry preview until the actual background is loaded. Check out https://blurha.sh/ to learn how it works.",
|
||||
"type": "string"
|
||||
},
|
||||
"background_information": {
|
||||
"description": "Holds extra information about the background set since some background providers require attribution or similar. If not null, the background can be accessed at /lists/{listID}/background"
|
||||
},
|
||||
@ -8829,9 +8645,7 @@ const docTemplate = `{
|
||||
"minLength": 1
|
||||
},
|
||||
"right": {
|
||||
"description": "The right this team has. 0 = Read only, 1 = Read \u0026 Write, 2 = Admin. See the docs for more details.",
|
||||
"type": "integer",
|
||||
"default": 0
|
||||
"type": "integer"
|
||||
},
|
||||
"updated": {
|
||||
"description": "A timestamp when this relation was last updated. You cannot change this value.",
|
||||
@ -8862,9 +8676,7 @@ const docTemplate = `{
|
||||
"type": "string"
|
||||
},
|
||||
"right": {
|
||||
"description": "The right this user has. 0 = Read only, 1 = Read \u0026 Write, 2 = Admin. See the docs for more details.",
|
||||
"type": "integer",
|
||||
"default": 0
|
||||
"type": "integer"
|
||||
},
|
||||
"updated": {
|
||||
"description": "A timestamp when this task was last updated. You cannot change this value.",
|
||||
@ -8998,10 +8810,6 @@ const docTemplate = `{
|
||||
"user.Login": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"long_token": {
|
||||
"description": "If true, the token returned will be valid a lot longer than default. Useful for \"remember me\" style logins.",
|
||||
"type": "boolean"
|
||||
},
|
||||
"password": {
|
||||
"description": "The password for the user.",
|
||||
"type": "string"
|
||||
@ -9062,20 +8870,6 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
},
|
||||
"user.Token": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"created": {
|
||||
"type": "string"
|
||||
},
|
||||
"id": {
|
||||
"type": "integer"
|
||||
},
|
||||
"token": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"user.User": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@ -9121,7 +8915,7 @@ const docTemplate = `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"avatar_provider": {
|
||||
"description": "The avatar provider. Valid types are ` + "`" + `gravatar` + "`" + ` (uses the user email), ` + "`" + `upload` + "`" + `, ` + "`" + `initials` + "`" + `, ` + "`" + `marble` + "`" + ` (generates a random avatar for each user), ` + "`" + `default` + "`" + `.",
|
||||
"description": "The avatar provider. Valid types are ` + "`" + `gravatar` + "`" + ` (uses the user email), ` + "`" + `upload` + "`" + `, ` + "`" + `initials` + "`" + `, ` + "`" + `default` + "`" + `.",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
@ -9160,10 +8954,6 @@ const docTemplate = `{
|
||||
"description": "If a task is created without a specified list this value should be used. Applies\nto tasks made directly in API and from clients.",
|
||||
"type": "integer"
|
||||
},
|
||||
"home_list_id": {
|
||||
"description": "Used by frontend. The User List that the Users wants to be displayed inside the Home View",
|
||||
"type": "integer"
|
||||
},
|
||||
"discoverable_by_email": {
|
||||
"description": "If true, the user can be found when searching for their exact email.",
|
||||
"type": "boolean"
|
||||
@ -9176,10 +8966,6 @@ const docTemplate = `{
|
||||
"description": "If enabled, sends email reminders of tasks to the user.",
|
||||
"type": "boolean"
|
||||
},
|
||||
"language": {
|
||||
"description": "The user's language",
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"description": "The new name of the current user.",
|
||||
"type": "string"
|
||||
@ -9188,10 +8974,6 @@ const docTemplate = `{
|
||||
"description": "If enabled, the user will get an email for their overdue tasks each morning.",
|
||||
"type": "boolean"
|
||||
},
|
||||
"timezone": {
|
||||
"description": "The user's time zone. Used to send task reminders in the time zone of the user.",
|
||||
"type": "string"
|
||||
},
|
||||
"week_start": {
|
||||
"description": "The day when the week starts for this user. 0 = sunday, 1 = monday, etc.",
|
||||
"type": "integer"
|
||||
@ -9337,18 +9119,56 @@ const docTemplate = `{
|
||||
}
|
||||
}`
|
||||
|
||||
type swaggerInfo struct {
|
||||
Version string
|
||||
Host string
|
||||
BasePath string
|
||||
Schemes []string
|
||||
Title string
|
||||
Description string
|
||||
}
|
||||
|
||||
// SwaggerInfo holds exported Swagger Info so clients can modify it
|
||||
var SwaggerInfo = &swag.Spec{
|
||||
Version: "",
|
||||
Host: "",
|
||||
BasePath: "/api/v1",
|
||||
Schemes: []string{},
|
||||
Title: "Vikunja API",
|
||||
Description: "# Pagination\nEvery endpoint capable of pagination will return two headers:\n* `x-pagination-total-pages`: The total number of available pages for this request\n* `x-pagination-result-count`: The number of items returned for this request.\n# Rights\nAll endpoints which return a single item (list, task, namespace, etc.) - no array - will also return a `x-max-right` header with the max right the user has on this item as an int where `0` is `Read Only`, `1` is `Read & Write` and `2` is `Admin`.\nThis can be used to show or hide ui elements based on the rights the user has.\n# Authorization\n**JWT-Auth:** Main authorization method, used for most of the requests. Needs `Authorization: Bearer <jwt-token>`-header to authenticate successfully.\n\n**BasicAuth:** Only used when requesting tasks via caldav.\n<!-- ReDoc-Inject: <security-definitions> -->",
|
||||
InfoInstanceName: "swagger",
|
||||
SwaggerTemplate: docTemplate,
|
||||
var SwaggerInfo = swaggerInfo{
|
||||
Version: "",
|
||||
Host: "",
|
||||
BasePath: "/api/v1",
|
||||
Schemes: []string{},
|
||||
Title: "Vikunja API",
|
||||
Description: "# Pagination\nEvery endpoint capable of pagination will return two headers:\n* `x-pagination-total-pages`: The total number of available pages for this request\n* `x-pagination-result-count`: The number of items returned for this request.\n# Rights\nAll endpoints which return a single item (list, task, namespace, etc.) - no array - will also return a `x-max-right` header with the max right the user has on this item as an int where `0` is `Read Only`, `1` is `Read & Write` and `2` is `Admin`.\nThis can be used to show or hide ui elements based on the rights the user has.\n# Authorization\n**JWT-Auth:** Main authorization method, used for most of the requests. Needs `Authorization: Bearer <jwt-token>`-header to authenticate successfully.\n\n**BasicAuth:** Only used when requesting tasks via caldav.\n<!-- ReDoc-Inject: <security-definitions> -->",
|
||||
}
|
||||
|
||||
type s struct{}
|
||||
|
||||
func (s *s) ReadDoc() string {
|
||||
sInfo := SwaggerInfo
|
||||
sInfo.Description = strings.Replace(sInfo.Description, "\n", "\\n", -1)
|
||||
|
||||
t, err := template.New("swagger_info").Funcs(template.FuncMap{
|
||||
"marshal": func(v interface{}) string {
|
||||
a, _ := json.Marshal(v)
|
||||
return string(a)
|
||||
},
|
||||
"escape": func(v interface{}) string {
|
||||
// escape tabs
|
||||
str := strings.Replace(v.(string), "\t", "\\t", -1)
|
||||
// replace " with \", and if that results in \\", replace that with \\\"
|
||||
str = strings.Replace(str, "\"", "\\\"", -1)
|
||||
return strings.Replace(str, "\\\\\"", "\\\\\\\"", -1)
|
||||
},
|
||||
}).Parse(doc)
|
||||
if err != nil {
|
||||
return doc
|
||||
}
|
||||
|
||||
var tpl bytes.Buffer
|
||||
if err := t.Execute(&tpl, sInfo); err != nil {
|
||||
return doc
|
||||
}
|
||||
|
||||
return tpl.String()
|
||||
}
|
||||
|
||||
func init() {
|
||||
swag.Register(SwaggerInfo.InstanceName(), SwaggerInfo)
|
||||
swag.Register(swag.Name, &s{})
|
||||
}
|
||||
|
@ -1979,7 +1979,7 @@
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "The value to filter for. You can use [grafana](https://grafana.com/docs/grafana/latest/dashboards/time-range-controls)- or [elasticsearch](https://www.elastic.co/guide/en/elasticsearch/reference/7.3/common-options.html#date-math)-style relative dates for all date fields like `due_date`, `start_date`, `end_date`, etc.",
|
||||
"description": "The value to filter for.",
|
||||
"name": "filter_value",
|
||||
"in": "query"
|
||||
},
|
||||
@ -7010,153 +7010,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"/user/settings/token/caldav": {
|
||||
"get": {
|
||||
"security": [
|
||||
{
|
||||
"JWTKeyAuth": []
|
||||
}
|
||||
],
|
||||
"description": "Return the IDs and created dates of all caldav tokens for the current user.",
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"user"
|
||||
],
|
||||
"summary": "Returns the caldav tokens for the current user",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/user.Token"
|
||||
}
|
||||
}
|
||||
},
|
||||
"400": {
|
||||
"description": "Something's invalid.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/web.HTTPError"
|
||||
}
|
||||
},
|
||||
"404": {
|
||||
"description": "User does not exist.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/web.HTTPError"
|
||||
}
|
||||
},
|
||||
"500": {
|
||||
"description": "Internal server error.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/models.Message"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"put": {
|
||||
"security": [
|
||||
{
|
||||
"JWTKeyAuth": []
|
||||
}
|
||||
],
|
||||
"description": "Generates a caldav token which can be used for the caldav api. It is not possible to see the token again after it was generated.",
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"user"
|
||||
],
|
||||
"summary": "Generate a caldav token",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/user.Token"
|
||||
}
|
||||
},
|
||||
"400": {
|
||||
"description": "Something's invalid.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/web.HTTPError"
|
||||
}
|
||||
},
|
||||
"404": {
|
||||
"description": "User does not exist.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/web.HTTPError"
|
||||
}
|
||||
},
|
||||
"500": {
|
||||
"description": "Internal server error.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/models.Message"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/user/settings/token/caldav/{id}": {
|
||||
"get": {
|
||||
"security": [
|
||||
{
|
||||
"JWTKeyAuth": []
|
||||
}
|
||||
],
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"user"
|
||||
],
|
||||
"summary": "Delete a caldav token by id",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "integer",
|
||||
"description": "Token ID",
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/models.Message"
|
||||
}
|
||||
},
|
||||
"400": {
|
||||
"description": "Something's invalid.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/web.HTTPError"
|
||||
}
|
||||
},
|
||||
"404": {
|
||||
"description": "User does not exist.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/web.HTTPError"
|
||||
}
|
||||
},
|
||||
"500": {
|
||||
"description": "Internal server error.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/models.Message"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/user/settings/totp": {
|
||||
"get": {
|
||||
"security": [
|
||||
@ -7391,43 +7244,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"/user/timezones": {
|
||||
"get": {
|
||||
"security": [
|
||||
{
|
||||
"JWTKeyAuth": []
|
||||
}
|
||||
],
|
||||
"description": "Because available time zones depend on the system Vikunja is running on, this endpoint returns a list of all valid time zones this particular Vikunja instance can handle. The list of time zones is not sorted, you should sort it on the client.",
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"user"
|
||||
],
|
||||
"summary": "Get all available time zones on this vikunja instance",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "All available time zones.",
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"500": {
|
||||
"description": "Internal server error.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/models.Message"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/user/token": {
|
||||
"post": {
|
||||
"description": "Returns a new valid jwt user token with an extended length.",
|
||||
@ -7568,9 +7384,6 @@
|
||||
"background.Image": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"blur_hash": {
|
||||
"type": "string"
|
||||
},
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
@ -8035,10 +7848,6 @@
|
||||
"models.List": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"background_blur_hash": {
|
||||
"description": "Contains a very small version of the list background to use as a blurry preview until the actual background is loaded. Check out https://blurha.sh/ to learn how it works.",
|
||||
"type": "string"
|
||||
},
|
||||
"background_information": {
|
||||
"description": "Holds extra information about the background set since some background providers require attribution or similar. If not null, the background can be accessed at /lists/{listID}/background"
|
||||
},
|
||||
@ -8820,9 +8629,7 @@
|
||||
"minLength": 1
|
||||
},
|
||||
"right": {
|
||||
"description": "The right this team has. 0 = Read only, 1 = Read \u0026 Write, 2 = Admin. See the docs for more details.",
|
||||
"type": "integer",
|
||||
"default": 0
|
||||
"type": "integer"
|
||||
},
|
||||
"updated": {
|
||||
"description": "A timestamp when this relation was last updated. You cannot change this value.",
|
||||
@ -8853,9 +8660,7 @@
|
||||
"type": "string"
|
||||
},
|
||||
"right": {
|
||||
"description": "The right this user has. 0 = Read only, 1 = Read \u0026 Write, 2 = Admin. See the docs for more details.",
|
||||
"type": "integer",
|
||||
"default": 0
|
||||
"type": "integer"
|
||||
},
|
||||
"updated": {
|
||||
"description": "A timestamp when this task was last updated. You cannot change this value.",
|
||||
@ -8989,10 +8794,6 @@
|
||||
"user.Login": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"long_token": {
|
||||
"description": "If true, the token returned will be valid a lot longer than default. Useful for \"remember me\" style logins.",
|
||||
"type": "boolean"
|
||||
},
|
||||
"password": {
|
||||
"description": "The password for the user.",
|
||||
"type": "string"
|
||||
@ -9053,20 +8854,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"user.Token": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"created": {
|
||||
"type": "string"
|
||||
},
|
||||
"id": {
|
||||
"type": "integer"
|
||||
},
|
||||
"token": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"user.User": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@ -9112,7 +8899,7 @@
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"avatar_provider": {
|
||||
"description": "The avatar provider. Valid types are `gravatar` (uses the user email), `upload`, `initials`, `marble` (generates a random avatar for each user), `default`.",
|
||||
"description": "The avatar provider. Valid types are `gravatar` (uses the user email), `upload`, `initials`, `default`.",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
@ -9151,10 +8938,6 @@
|
||||
"description": "If a task is created without a specified list this value should be used. Applies\nto tasks made directly in API and from clients.",
|
||||
"type": "integer"
|
||||
},
|
||||
"home_list_id": {
|
||||
"description": "Used by frontend. The User List that the Users wants to be displayed inside the Home View",
|
||||
"type": "integer"
|
||||
},
|
||||
"discoverable_by_email": {
|
||||
"description": "If true, the user can be found when searching for their exact email.",
|
||||
"type": "boolean"
|
||||
@ -9167,10 +8950,6 @@
|
||||
"description": "If enabled, sends email reminders of tasks to the user.",
|
||||
"type": "boolean"
|
||||
},
|
||||
"language": {
|
||||
"description": "The user's language",
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"description": "The new name of the current user.",
|
||||
"type": "string"
|
||||
@ -9179,10 +8958,6 @@
|
||||
"description": "If enabled, the user will get an email for their overdue tasks each morning.",
|
||||
"type": "boolean"
|
||||
},
|
||||
"timezone": {
|
||||
"description": "The user's time zone. Used to send task reminders in the time zone of the user.",
|
||||
"type": "string"
|
||||
},
|
||||
"week_start": {
|
||||
"description": "The day when the week starts for this user. 0 = sunday, 1 = monday, etc.",
|
||||
"type": "integer"
|
||||
|
@ -7,8 +7,6 @@ definitions:
|
||||
type: object
|
||||
background.Image:
|
||||
properties:
|
||||
blur_hash:
|
||||
type: string
|
||||
id:
|
||||
type: string
|
||||
info:
|
||||
@ -391,11 +389,6 @@ definitions:
|
||||
type: object
|
||||
models.List:
|
||||
properties:
|
||||
background_blur_hash:
|
||||
description: Contains a very small version of the list background to use as
|
||||
a blurry preview until the actual background is loaded. Check out https://blurha.sh/
|
||||
to learn how it works.
|
||||
type: string
|
||||
background_information:
|
||||
description: Holds extra information about the background set since some background
|
||||
providers require attribution or similar. If not null, the background can
|
||||
@ -1042,9 +1035,6 @@ definitions:
|
||||
minLength: 1
|
||||
type: string
|
||||
right:
|
||||
default: 0
|
||||
description: The right this team has. 0 = Read only, 1 = Read & Write, 2 =
|
||||
Admin. See the docs for more details.
|
||||
type: integer
|
||||
updated:
|
||||
description: A timestamp when this relation was last updated. You cannot change
|
||||
@ -1070,9 +1060,6 @@ definitions:
|
||||
description: The full name of the user.
|
||||
type: string
|
||||
right:
|
||||
default: 0
|
||||
description: The right this user has. 0 = Read only, 1 = Read & Write, 2 =
|
||||
Admin. See the docs for more details.
|
||||
type: integer
|
||||
updated:
|
||||
description: A timestamp when this task was last updated. You cannot change
|
||||
@ -1170,10 +1157,6 @@ definitions:
|
||||
type: object
|
||||
user.Login:
|
||||
properties:
|
||||
long_token:
|
||||
description: If true, the token returned will be valid a lot longer than default.
|
||||
Useful for "remember me" style logins.
|
||||
type: boolean
|
||||
password:
|
||||
description: The password for the user.
|
||||
type: string
|
||||
@ -1216,15 +1199,6 @@ definitions:
|
||||
passcode:
|
||||
type: string
|
||||
type: object
|
||||
user.Token:
|
||||
properties:
|
||||
created:
|
||||
type: string
|
||||
id:
|
||||
type: integer
|
||||
token:
|
||||
type: string
|
||||
type: object
|
||||
user.User:
|
||||
properties:
|
||||
created:
|
||||
@ -1261,8 +1235,7 @@ definitions:
|
||||
properties:
|
||||
avatar_provider:
|
||||
description: The avatar provider. Valid types are `gravatar` (uses the user
|
||||
email), `upload`, `initials`, `marble` (generates a random avatar for each
|
||||
user), `default`.
|
||||
email), `upload`, `initials`, `default`.
|
||||
type: string
|
||||
type: object
|
||||
v1.UserDeletionRequestConfirm:
|
||||
@ -1285,10 +1258,6 @@ definitions:
|
||||
v1.UserSettings:
|
||||
properties:
|
||||
default_list_id:
|
||||
description: |-
|
||||
Used by frontend. The User List that the Users wants to be displayed inside the Home View
|
||||
type: integer
|
||||
home_list_id:
|
||||
description: |-
|
||||
If a task is created without a specified list this value should be used. Applies
|
||||
to tasks made directly in API and from clients.
|
||||
@ -1304,9 +1273,6 @@ definitions:
|
||||
email_reminders_enabled:
|
||||
description: If enabled, sends email reminders of tasks to the user.
|
||||
type: boolean
|
||||
language:
|
||||
description: The user's language
|
||||
type: string
|
||||
name:
|
||||
description: The new name of the current user.
|
||||
type: string
|
||||
@ -1314,10 +1280,6 @@ definitions:
|
||||
description: If enabled, the user will get an email for their overdue tasks
|
||||
each morning.
|
||||
type: boolean
|
||||
timezone:
|
||||
description: The user's time zone. Used to send task reminders in the time
|
||||
zone of the user.
|
||||
type: string
|
||||
week_start:
|
||||
description: The day when the week starts for this user. 0 = sunday, 1 = monday,
|
||||
etc.
|
||||
@ -2924,10 +2886,7 @@ paths:
|
||||
in: query
|
||||
name: filter_by
|
||||
type: string
|
||||
- description: The value to filter for. You can use [grafana](https://grafana.com/docs/grafana/latest/dashboards/time-range-controls)-
|
||||
or [elasticsearch](https://www.elastic.co/guide/en/elasticsearch/reference/7.3/common-options.html#date-math)-style
|
||||
relative dates for all date fields like `due_date`, `start_date`, `end_date`,
|
||||
etc.
|
||||
- description: The value to filter for.
|
||||
in: query
|
||||
name: filter_value
|
||||
type: string
|
||||
@ -6098,101 +6057,6 @@ paths:
|
||||
summary: Change general user settings of the current user.
|
||||
tags:
|
||||
- user
|
||||
/user/settings/token/caldav:
|
||||
get:
|
||||
consumes:
|
||||
- application/json
|
||||
description: Return the IDs and created dates of all caldav tokens for the current
|
||||
user.
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"200":
|
||||
description: OK
|
||||
schema:
|
||||
items:
|
||||
$ref: '#/definitions/user.Token'
|
||||
type: array
|
||||
"400":
|
||||
description: Something's invalid.
|
||||
schema:
|
||||
$ref: '#/definitions/web.HTTPError'
|
||||
"404":
|
||||
description: User does not exist.
|
||||
schema:
|
||||
$ref: '#/definitions/web.HTTPError'
|
||||
"500":
|
||||
description: Internal server error.
|
||||
schema:
|
||||
$ref: '#/definitions/models.Message'
|
||||
security:
|
||||
- JWTKeyAuth: []
|
||||
summary: Returns the caldav tokens for the current user
|
||||
tags:
|
||||
- user
|
||||
put:
|
||||
consumes:
|
||||
- application/json
|
||||
description: Generates a caldav token which can be used for the caldav api.
|
||||
It is not possible to see the token again after it was generated.
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"200":
|
||||
description: OK
|
||||
schema:
|
||||
$ref: '#/definitions/user.Token'
|
||||
"400":
|
||||
description: Something's invalid.
|
||||
schema:
|
||||
$ref: '#/definitions/web.HTTPError'
|
||||
"404":
|
||||
description: User does not exist.
|
||||
schema:
|
||||
$ref: '#/definitions/web.HTTPError'
|
||||
"500":
|
||||
description: Internal server error.
|
||||
schema:
|
||||
$ref: '#/definitions/models.Message'
|
||||
security:
|
||||
- JWTKeyAuth: []
|
||||
summary: Generate a caldav token
|
||||
tags:
|
||||
- user
|
||||
/user/settings/token/caldav/{id}:
|
||||
get:
|
||||
consumes:
|
||||
- application/json
|
||||
parameters:
|
||||
- description: Token ID
|
||||
in: path
|
||||
name: id
|
||||
required: true
|
||||
type: integer
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"200":
|
||||
description: OK
|
||||
schema:
|
||||
$ref: '#/definitions/models.Message'
|
||||
"400":
|
||||
description: Something's invalid.
|
||||
schema:
|
||||
$ref: '#/definitions/web.HTTPError'
|
||||
"404":
|
||||
description: User does not exist.
|
||||
schema:
|
||||
$ref: '#/definitions/web.HTTPError'
|
||||
"500":
|
||||
description: Internal server error.
|
||||
schema:
|
||||
$ref: '#/definitions/models.Message'
|
||||
security:
|
||||
- JWTKeyAuth: []
|
||||
summary: Delete a caldav token by id
|
||||
tags:
|
||||
- user
|
||||
/user/settings/totp:
|
||||
get:
|
||||
consumes:
|
||||
@ -6344,32 +6208,6 @@ paths:
|
||||
summary: Totp QR Code
|
||||
tags:
|
||||
- user
|
||||
/user/timezones:
|
||||
get:
|
||||
consumes:
|
||||
- application/json
|
||||
description: Because available time zones depend on the system Vikunja is running
|
||||
on, this endpoint returns a list of all valid time zones this particular Vikunja
|
||||
instance can handle. The list of time zones is not sorted, you should sort
|
||||
it on the client.
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"200":
|
||||
description: All available time zones.
|
||||
schema:
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
"500":
|
||||
description: Internal server error.
|
||||
schema:
|
||||
$ref: '#/definitions/models.Message'
|
||||
security:
|
||||
- JWTKeyAuth: []
|
||||
summary: Get all available time zones on this vikunja instance
|
||||
tags:
|
||||
- user
|
||||
/user/token:
|
||||
post:
|
||||
consumes:
|
||||
|
@ -1,40 +0,0 @@
|
||||
// Vikunja is a to-do list application to facilitate your life.
|
||||
// Copyright 2018-2021 Vikunja and contributors. All rights reserved.
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Affero General Public Licensee as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Affero General Public Licensee for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Affero General Public Licensee
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
package user
|
||||
|
||||
import "code.vikunja.io/api/pkg/db"
|
||||
|
||||
func GenerateNewCaldavToken(u *User) (token *Token, err error) {
|
||||
s := db.NewSession()
|
||||
defer s.Close()
|
||||
|
||||
return generateHashedToken(s, u, TokenCaldavAuth)
|
||||
}
|
||||
|
||||
func GetCaldavTokens(u *User) (tokens []*Token, err error) {
|
||||
s := db.NewSession()
|
||||
defer s.Close()
|
||||
|
||||
return getTokensForKind(s, u, TokenCaldavAuth)
|
||||
}
|
||||
|
||||
func DeleteCaldavTokenByID(u *User, id int64) error {
|
||||
s := db.NewSession()
|
||||
defer s.Close()
|
||||
|
||||
return removeTokenByID(s, u, TokenCaldavAuth, id)
|
||||
}
|
@ -58,10 +58,10 @@ func notifyUsersScheduledForDeletion() {
|
||||
|
||||
var number = 2
|
||||
if user.DeletionLastReminderSent.IsZero() {
|
||||
number = 3
|
||||
number = 1
|
||||
}
|
||||
if user.DeletionScheduledAt.Sub(user.DeletionLastReminderSent) < time.Hour*24 {
|
||||
number = 1
|
||||
number = 3
|
||||
}
|
||||
|
||||
log.Debugf("Notifying user %d of the deletion of their account...", user.ID)
|
||||
@ -87,7 +87,7 @@ func notifyUsersScheduledForDeletion() {
|
||||
|
||||
// RequestDeletion creates a user deletion confirm token and sends a notification to the user
|
||||
func RequestDeletion(s *xorm.Session, user *User) (err error) {
|
||||
token, err := generateToken(s, user, TokenAccountDeletion)
|
||||
token, err := generateNewToken(s, user, TokenAccountDeletion)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -34,19 +34,17 @@ const (
|
||||
TokenPasswordReset
|
||||
TokenEmailConfirm
|
||||
TokenAccountDeletion
|
||||
TokenCaldavAuth
|
||||
|
||||
tokenSize = 64
|
||||
)
|
||||
|
||||
// Token is a token a user can use to do things like verify their email or resetting their password
|
||||
type Token struct {
|
||||
ID int64 `xorm:"bigint autoincr not null unique pk" json:"id"`
|
||||
UserID int64 `xorm:"not null" json:"-"`
|
||||
Token string `xorm:"varchar(450) not null index" json:"-"`
|
||||
ClearTextToken string `xorm:"-" json:"token"`
|
||||
Kind TokenKind `xorm:"not null" json:"-"`
|
||||
Created time.Time `xorm:"created not null" json:"created"`
|
||||
ID int64 `xorm:"bigint autoincr not null unique pk"`
|
||||
UserID int64 `xorm:"not null"`
|
||||
Token string `xorm:"varchar(450) not null index"`
|
||||
Kind TokenKind `xorm:"not null"`
|
||||
Created time.Time `xorm:"created not null"`
|
||||
}
|
||||
|
||||
// TableName returns the real table name for user tokens
|
||||
@ -54,28 +52,12 @@ func (t *Token) TableName() string {
|
||||
return "user_tokens"
|
||||
}
|
||||
|
||||
func genToken(u *User, kind TokenKind) *Token {
|
||||
return &Token{
|
||||
func generateNewToken(s *xorm.Session, u *User, kind TokenKind) (token *Token, err error) {
|
||||
token = &Token{
|
||||
UserID: u.ID,
|
||||
Kind: kind,
|
||||
Token: utils.MakeRandomString(tokenSize),
|
||||
}
|
||||
}
|
||||
|
||||
func generateToken(s *xorm.Session, u *User, kind TokenKind) (token *Token, err error) {
|
||||
token = genToken(u, kind)
|
||||
|
||||
_, err = s.Insert(token)
|
||||
return
|
||||
}
|
||||
|
||||
func generateHashedToken(s *xorm.Session, u *User, kind TokenKind) (token *Token, err error) {
|
||||
token = genToken(u, kind)
|
||||
token.ClearTextToken = token.Token
|
||||
token.Token, err = HashPassword(token.ClearTextToken)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
_, err = s.Insert(token)
|
||||
return
|
||||
@ -92,26 +74,12 @@ func getToken(s *xorm.Session, token string, kind TokenKind) (t *Token, err erro
|
||||
return
|
||||
}
|
||||
|
||||
func getTokensForKind(s *xorm.Session, u *User, kind TokenKind) (tokens []*Token, err error) {
|
||||
tokens = []*Token{}
|
||||
|
||||
err = s.Where("kind = ? AND user_id = ?", kind, u.ID).
|
||||
Find(&tokens)
|
||||
return
|
||||
}
|
||||
|
||||
func removeTokens(s *xorm.Session, u *User, kind TokenKind) (err error) {
|
||||
_, err = s.Where("user_id = ? AND kind = ?", u.ID, kind).
|
||||
Delete(&Token{})
|
||||
return
|
||||
}
|
||||
|
||||
func removeTokenByID(s *xorm.Session, u *User, kind TokenKind, id int64) (err error) {
|
||||
_, err = s.Where("id = ? AND user_id = ? AND kind = ?", id, u.ID, kind).
|
||||
Delete(&Token{})
|
||||
return
|
||||
}
|
||||
|
||||
// RegisterTokenCleanupCron registers a cron function to clean up all password reset tokens older than 24 hours
|
||||
func RegisterTokenCleanupCron() {
|
||||
const logPrefix = "[User Token Cleanup Cron] "
|
||||
|
@ -63,7 +63,7 @@ func UpdateEmail(s *xorm.Session, update *EmailUpdate) (err error) {
|
||||
}
|
||||
|
||||
update.User.Status = StatusEmailConfirmationRequired
|
||||
token, err := generateToken(s, update.User, TokenEmailConfirm)
|
||||
token, err := generateNewToken(s, update.User, TokenEmailConfirm)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
@ -44,8 +44,6 @@ type Login struct {
|
||||
Password string `json:"password"`
|
||||
// The totp passcode of a user. Only needs to be provided when enabled.
|
||||
TOTPPasscode string `json:"totp_passcode"`
|
||||
// If true, the token returned will be valid a lot longer than default. Useful for "remember me" style logins.
|
||||
LongToken bool `json:"long_token"`
|
||||
}
|
||||
|
||||
type Status int
|
||||
@ -90,15 +88,12 @@ type User struct {
|
||||
Issuer string `xorm:"text null" json:"-"`
|
||||
Subject string `xorm:"text null" json:"-"`
|
||||
|
||||
EmailRemindersEnabled bool `xorm:"bool default true" json:"-"`
|
||||
DiscoverableByName bool `xorm:"bool default false index" json:"-"`
|
||||
DiscoverableByEmail bool `xorm:"bool default false index" json:"-"`
|
||||
OverdueTasksRemindersEnabled bool `xorm:"bool default true index" json:"-"`
|
||||
DefaultListID int64 `xorm:"bigint null index" json:"-"`
|
||||
HomeListID int64 `xorm:"bigint null index" json:"-"`
|
||||
WeekStart int `xorm:"null" json:"-"`
|
||||
Language string `xorm:"varchar(50) null" json:"-"`
|
||||
Timezone string `xorm:"varchar(255) null" json:"-"`
|
||||
EmailRemindersEnabled bool `xorm:"bool default true" json:"-"`
|
||||
DiscoverableByName bool `xorm:"bool default false index" json:"-"`
|
||||
DiscoverableByEmail bool `xorm:"bool default false index" json:"-"`
|
||||
OverdueTasksRemindersEnabled bool `xorm:"bool default true index" json:"-"`
|
||||
DefaultListID int64 `xorm:"bigint null index" json:"-"`
|
||||
WeekStart int `xorm:"null" json:"-"`
|
||||
|
||||
DeletionScheduledAt time.Time `xorm:"datetime null" json:"-"`
|
||||
DeletionLastReminderSent time.Time `xorm:"datetime null" json:"-"`
|
||||
@ -325,7 +320,7 @@ func CheckUserCredentials(s *xorm.Session, u *Login) (*User, error) {
|
||||
if IsErrWrongUsernameOrPassword(err) {
|
||||
handleFailedPassword(user)
|
||||
}
|
||||
return user, err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return user, nil
|
||||
@ -460,22 +455,11 @@ func UpdateUser(s *xorm.Session, user *User) (updatedUser *User, err error) {
|
||||
if user.AvatarProvider != "default" &&
|
||||
user.AvatarProvider != "gravatar" &&
|
||||
user.AvatarProvider != "initials" &&
|
||||
user.AvatarProvider != "upload" &&
|
||||
user.AvatarProvider != "marble" {
|
||||
user.AvatarProvider != "upload" {
|
||||
return updatedUser, &ErrInvalidAvatarProvider{AvatarProvider: user.AvatarProvider}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if we have a valid time zone
|
||||
if user.Timezone == "" {
|
||||
user.Timezone = config.GetTimeZone().String()
|
||||
}
|
||||
|
||||
_, err = time.LoadLocation(user.Timezone)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
// Update it
|
||||
_, err = s.
|
||||
ID(user.ID).
|
||||
@ -491,10 +475,7 @@ func UpdateUser(s *xorm.Session, user *User) (updatedUser *User, err error) {
|
||||
"discoverable_by_email",
|
||||
"overdue_tasks_reminders_enabled",
|
||||
"default_list_id",
|
||||
"home_list_id",
|
||||
"week_start",
|
||||
"language",
|
||||
"timezone",
|
||||
).
|
||||
Update(user)
|
||||
if err != nil {
|
||||
|
@ -81,7 +81,7 @@ func CreateUser(s *xorm.Session, user *User) (newUser *User, err error) {
|
||||
}
|
||||
|
||||
user.Status = StatusEmailConfirmationRequired
|
||||
token, err := generateToken(s, user, TokenEmailConfirm)
|
||||
token, err := generateNewToken(s, user, TokenEmailConfirm)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -112,7 +112,7 @@ func RequestUserPasswordResetTokenByEmail(s *xorm.Session, tr *PasswordTokenRequ
|
||||
|
||||
// RequestUserPasswordResetToken sends a user a password reset email.
|
||||
func RequestUserPasswordResetToken(s *xorm.Session, user *User) (err error) {
|
||||
token, err := generateToken(s, user, TokenPasswordReset)
|
||||
token, err := generateNewToken(s, user, TokenPasswordReset)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
@ -14,7 +14,6 @@
|
||||
// You should have received a copy of the GNU Affero General Public Licensee
|
||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
//go:build !windows
|
||||
// +build !windows
|
||||
|
||||
package utils
|
||||
|
@ -54,7 +54,7 @@ func WriteFilesToZip(files map[int64]io.ReadCloser, wr *zip.Writer) (err error)
|
||||
}
|
||||
_, err = io.Copy(w, file)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error writing file %d: %w", fid, err)
|
||||
return fmt.Errorf("error writing file %d: %s", fid, err)
|
||||
}
|
||||
_ = file.Close()
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user