Compare commits
108 Commits
e9f2e3a5a0
...
evan/api-m
| Author | SHA1 | Date | |
|---|---|---|---|
| 75c872264f | |||
| 0930054847 | |||
| aa812c6917 | |||
| 8ec3349b7c | |||
| decc3f0195 | |||
| b13f9b362c | |||
| 6c2c4f6b8b | |||
| d38392ac9a | |||
| 63ad73755d | |||
| 784e53c557 | |||
| 9ed63b2695 | |||
| 27e651c4f5 | |||
| 7e96e41ba4 | |||
| ee1d62858b | |||
| 4d133994ab | |||
| ba919bbde4 | |||
| 197a1577c2 | |||
| fd9afe86b0 | |||
| 93707ff513 | |||
| 75e0228fe0 | |||
| b1b8eb297e | |||
| 7c47f2d2eb | |||
| c46dcb440d | |||
| 5cb17bace7 | |||
| ecf77fd105 | |||
| e289d1a29b | |||
| 3e9a193d08 | |||
| 4306d86080 | |||
| d40f8fc375 | |||
| c84bc2522e | |||
| 0704b5d650 | |||
| 4c1789fc16 | |||
| 082f7e926c | |||
| 6031cf06d4 | |||
| 8fd2aeb6a2 | |||
| bc076a4f44 | |||
| f9f23f2d3f | |||
| 3cff965393 | |||
| 7937890acd | |||
| 938dd69e5e | |||
| 7c92c346fa | |||
| 456b6e457c | |||
| d304421798 | |||
| 0fe52bc541 | |||
| 49f3d53170 | |||
| 57f81e5dd7 | |||
| 162adfbe16 | |||
| e2cfdb3a0c | |||
| acf4119d9a | |||
| f6dd8cee50 | |||
| a981d98ba5 | |||
| a193f97d29 | |||
| 841b29c425 | |||
| 3d61d0f5ef | |||
| 5e388730a5 | |||
| 0a1dfeab65 | |||
| d4c8e4d2da | |||
| bbd3a00102 | |||
| 3a633235ea | |||
| 9809a09d2e | |||
| f37bff365f | |||
| 77527bfb05 | |||
| 8de6fed5df | |||
| f9277d3b32 | |||
| db9629a618 | |||
| 546600db93 | |||
| 7c6acad689 | |||
| 5482899075 | |||
| 5a64ff7029 | |||
| a7ecb1a6f8 | |||
| 2d206826d6 | |||
| f1414e3e4e | |||
| 8e81acd381 | |||
| 6c6a6dd329 | |||
| c4602c8c3b | |||
| fe81b57a34 | |||
| a69b7452ce | |||
| 75ed394f8d | |||
| 803c187a00 | |||
| da1baeb4cd | |||
| 5865fe3c13 | |||
| 4a5464853b | |||
| 622dcd5702 | |||
| a86e2520ef | |||
| b1cfd16627 | |||
| 015ca30ac5 | |||
| 9792a6ff19 | |||
| 8c4c1022c3 | |||
| fd8b6bcdc1 | |||
| 0bbd5986cb | |||
| 45cef2f4af | |||
| e33a64db96 | |||
| 35ca021649 | |||
| 760b9ca0a0 | |||
| c9edcd8f5a | |||
| 2d63a7d109 | |||
| 9bd6bf7727 | |||
| f0a2d2cf69 | |||
| a65750ae21 | |||
| 14b930781e | |||
| 8a8f12c07a | |||
| c5b181dda4 | |||
| d3d89b36f6 | |||
| a69f20d5a9 | |||
| c66a6c8499 | |||
| 3057b86002 | |||
| 2c240f2f5c | |||
| 39fd7ab1f1 |
14
.djlintrc
Normal file
14
.djlintrc
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
{
|
||||||
|
"profile": "golang",
|
||||||
|
"indent": 2,
|
||||||
|
"close_void_tags": true,
|
||||||
|
"format_attribute_template_tags": true,
|
||||||
|
"format_js": true,
|
||||||
|
"js": {
|
||||||
|
"indent_size": 2
|
||||||
|
},
|
||||||
|
"format_css": true,
|
||||||
|
"css": {
|
||||||
|
"indent_size": 2
|
||||||
|
}
|
||||||
|
}
|
||||||
29
.drone.yml
29
.drone.yml
@@ -1,33 +1,34 @@
|
|||||||
kind: pipeline
|
kind: pipeline
|
||||||
type: kubernetes
|
type: docker
|
||||||
name: default
|
name: default
|
||||||
|
|
||||||
|
trigger:
|
||||||
|
branch:
|
||||||
|
- master
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
# Unit Tests
|
# Unit Tests
|
||||||
- name: unit test
|
- name: tests
|
||||||
image: golang
|
image: golang
|
||||||
commands:
|
commands:
|
||||||
- make tests_unit
|
- make tests
|
||||||
|
|
||||||
# Integration Tests (Every Month)
|
# Fetch tags
|
||||||
- name: integration test
|
- name: fetch tags
|
||||||
image: golang
|
image: alpine/git
|
||||||
commands:
|
commands:
|
||||||
- make tests_integration
|
- git fetch --tags
|
||||||
when:
|
|
||||||
event:
|
|
||||||
- cron
|
|
||||||
cron:
|
|
||||||
- integration-test
|
|
||||||
|
|
||||||
# Publish Dev Docker Image
|
# Publish docker image
|
||||||
- name: publish_docker
|
- name: publish docker
|
||||||
image: plugins/docker
|
image: plugins/docker
|
||||||
settings:
|
settings:
|
||||||
repo: gitea.va.reichard.io/evan/antholume
|
repo: gitea.va.reichard.io/evan/antholume
|
||||||
registry: gitea.va.reichard.io
|
registry: gitea.va.reichard.io
|
||||||
tags:
|
tags:
|
||||||
- dev
|
- dev
|
||||||
|
custom_dns:
|
||||||
|
- 8.8.8.8
|
||||||
username:
|
username:
|
||||||
from_secret: docker_username
|
from_secret: docker_username
|
||||||
password:
|
password:
|
||||||
|
|||||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -3,3 +3,5 @@ TODO.md
|
|||||||
data/
|
data/
|
||||||
build/
|
build/
|
||||||
.direnv/
|
.direnv/
|
||||||
|
cover.html
|
||||||
|
node_modules
|
||||||
|
|||||||
3
.prettierrc
Normal file
3
.prettierrc
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
{
|
||||||
|
"plugins": ["prettier-plugin-go-template"]
|
||||||
|
}
|
||||||
75
AGENTS.md
Normal file
75
AGENTS.md
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
# AnthoLume Agent Guide
|
||||||
|
|
||||||
|
## 1) Working Style
|
||||||
|
|
||||||
|
- Keep changes targeted.
|
||||||
|
- Do not refactor broadly unless the task requires it.
|
||||||
|
- Validate only what is relevant to the change when practical.
|
||||||
|
- If a fix will require substantial refactoring or wide-reaching changes, stop and ask first.
|
||||||
|
|
||||||
|
## 2) Hard Rules
|
||||||
|
|
||||||
|
- Never edit generated files directly.
|
||||||
|
- Never write ad-hoc SQL.
|
||||||
|
- For Go error wrapping, use `fmt.Errorf("message: %w", err)`.
|
||||||
|
- Do not use `github.com/pkg/errors`.
|
||||||
|
|
||||||
|
## 3) Generated Code
|
||||||
|
|
||||||
|
### OpenAPI
|
||||||
|
Edit:
|
||||||
|
- `api/v1/openapi.yaml`
|
||||||
|
|
||||||
|
Regenerate:
|
||||||
|
- `go generate ./api/v1/generate.go`
|
||||||
|
- `cd frontend && bun run generate:api`
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
- If you add response headers in `api/v1/openapi.yaml` (for example `Set-Cookie`), `oapi-codegen` will generate typed response header structs in `api/v1/api.gen.go`; update the handler response values to populate those headers explicitly.
|
||||||
|
|
||||||
|
Examples of generated files:
|
||||||
|
- `api/v1/api.gen.go`
|
||||||
|
- `frontend/src/generated/**/*.ts`
|
||||||
|
|
||||||
|
### SQLC
|
||||||
|
Edit:
|
||||||
|
- `database/query.sql`
|
||||||
|
|
||||||
|
Regenerate:
|
||||||
|
- `sqlc generate`
|
||||||
|
|
||||||
|
## 4) Backend / Assets
|
||||||
|
|
||||||
|
### Common commands
|
||||||
|
- Dev server: `make dev`
|
||||||
|
- Direct dev run: `CONFIG_PATH=./data DATA_PATH=./data REGISTRATION_ENABLED=true go run main.go serve`
|
||||||
|
- Tests: `make tests`
|
||||||
|
- Tailwind asset build: `make build_tailwind`
|
||||||
|
|
||||||
|
### Notes
|
||||||
|
- The Go server embeds `templates/*` and `assets/*`.
|
||||||
|
- Root Tailwind output is built to `assets/style.css`.
|
||||||
|
- Be mindful of whether a change affects the embedded server-rendered app, the React frontend, or both.
|
||||||
|
- SQLite timestamps are stored as RFC3339 strings (usually with a trailing `Z`); prefer `parseTime` / `parseTimePtr` instead of ad-hoc `time.Parse` layouts.
|
||||||
|
|
||||||
|
## 5) Frontend
|
||||||
|
|
||||||
|
For frontend-specific implementation notes and commands, also read:
|
||||||
|
- `frontend/AGENTS.md`
|
||||||
|
|
||||||
|
## 6) Regeneration Summary
|
||||||
|
|
||||||
|
- Go API: `go generate ./api/v1/generate.go`
|
||||||
|
- Frontend API client: `cd frontend && bun run generate:api`
|
||||||
|
- SQLC: `sqlc generate`
|
||||||
|
|
||||||
|
## 7) Updating This File
|
||||||
|
|
||||||
|
After completing a task, update this `AGENTS.md` if you learned something general that would help future agents.
|
||||||
|
|
||||||
|
Rules for updates:
|
||||||
|
- Add only repository-wide guidance.
|
||||||
|
- Do not add one-off task history.
|
||||||
|
- Keep updates short, concrete, and organized.
|
||||||
|
- Place new guidance in the most relevant section.
|
||||||
|
- If the new information would help future agents avoid repeated mistakes, add it proactively.
|
||||||
21
Dockerfile
21
Dockerfile
@@ -1,23 +1,26 @@
|
|||||||
# Certificate Store
|
# Certificates & Timezones
|
||||||
FROM alpine AS certs
|
FROM alpine AS alpine
|
||||||
RUN apk update && apk add ca-certificates
|
RUN apk update && apk add --no-cache ca-certificates tzdata
|
||||||
|
|
||||||
# Build Image
|
# Build Image
|
||||||
FROM golang:1.20 AS build
|
FROM golang:1.24 AS build
|
||||||
|
|
||||||
|
# Create Package Directory
|
||||||
|
RUN mkdir -p /opt/antholume
|
||||||
|
|
||||||
# Copy Source
|
# Copy Source
|
||||||
WORKDIR /src
|
WORKDIR /src
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
# Create Package Directory
|
|
||||||
RUN mkdir -p /opt/antholume
|
|
||||||
|
|
||||||
# Compile
|
# Compile
|
||||||
RUN go build -o /opt/antholume/server
|
RUN go build \
|
||||||
|
-ldflags "-X reichard.io/antholume/config.version=`git describe --tags`" \
|
||||||
|
-o /opt/antholume/server
|
||||||
|
|
||||||
# Create Image
|
# Create Image
|
||||||
FROM busybox:1.36
|
FROM busybox:1.36
|
||||||
COPY --from=certs /etc/ssl/certs /etc/ssl/certs
|
COPY --from=alpine /etc/ssl/certs /etc/ssl/certs
|
||||||
|
COPY --from=alpine /usr/share/zoneinfo /usr/share/zoneinfo
|
||||||
COPY --from=build /opt/antholume /opt/antholume
|
COPY --from=build /opt/antholume /opt/antholume
|
||||||
WORKDIR /opt/antholume
|
WORKDIR /opt/antholume
|
||||||
EXPOSE 8585
|
EXPOSE 8585
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
# Certificate Store
|
# Certificates & Timezones
|
||||||
FROM alpine AS certs
|
FROM alpine AS alpine
|
||||||
RUN apk update && apk add ca-certificates
|
RUN apk update && apk add --no-cache ca-certificates tzdata
|
||||||
|
|
||||||
# Build Image
|
# Build Image
|
||||||
FROM --platform=$BUILDPLATFORM golang:1.20 AS build
|
FROM --platform=$BUILDPLATFORM golang:1.21 AS build
|
||||||
|
|
||||||
# Create Package Directory
|
# Create Package Directory
|
||||||
WORKDIR /src
|
WORKDIR /src
|
||||||
@@ -15,11 +15,14 @@ ARG TARGETARCH
|
|||||||
RUN --mount=target=. \
|
RUN --mount=target=. \
|
||||||
--mount=type=cache,target=/root/.cache/go-build \
|
--mount=type=cache,target=/root/.cache/go-build \
|
||||||
--mount=type=cache,target=/go/pkg \
|
--mount=type=cache,target=/go/pkg \
|
||||||
GOOS=$TARGETOS GOARCH=$TARGETARCH go build -o /opt/antholume/server
|
GOOS=$TARGETOS GOARCH=$TARGETARCH go build \
|
||||||
|
-ldflags "-X reichard.io/antholume/config.version=`git describe --tags`" \
|
||||||
|
-o /opt/antholume/server
|
||||||
|
|
||||||
# Create Image
|
# Create Image
|
||||||
FROM busybox:1.36
|
FROM busybox:1.36
|
||||||
COPY --from=certs /etc/ssl/certs /etc/ssl/certs
|
COPY --from=alpine /etc/ssl/certs /etc/ssl/certs
|
||||||
|
COPY --from=alpine /usr/share/zoneinfo /usr/share/zoneinfo
|
||||||
COPY --from=build /opt/antholume /opt/antholume
|
COPY --from=build /opt/antholume /opt/antholume
|
||||||
WORKDIR /opt/antholume
|
WORKDIR /opt/antholume
|
||||||
EXPOSE 8585
|
EXPOSE 8585
|
||||||
|
|||||||
29
Makefile
29
Makefile
@@ -3,10 +3,10 @@ build_local: build_tailwind
|
|||||||
rm -r ./build || true
|
rm -r ./build || true
|
||||||
mkdir -p ./build
|
mkdir -p ./build
|
||||||
|
|
||||||
env GOOS=linux GOARCH=amd64 go build -o ./build/server_linux_amd64
|
env GOOS=linux GOARCH=amd64 go build -ldflags "-X reichard.io/antholume/config.version=`git describe --tags`" -o ./build/server_linux_amd64
|
||||||
env GOOS=linux GOARCH=arm64 go build -o ./build/server_linux_arm64
|
env GOOS=linux GOARCH=arm64 go build -ldflags "-X reichard.io/antholume/config.version=`git describe --tags`" -o ./build/server_linux_arm64
|
||||||
env GOOS=darwin GOARCH=arm64 go build -o ./build/server_darwin_arm64
|
env GOOS=darwin GOARCH=arm64 go build -ldflags "-X reichard.io/antholume/config.version=`git describe --tags`" -o ./build/server_darwin_arm64
|
||||||
env GOOS=darwin GOARCH=amd64 go build -o ./build/server_darwin_amd64
|
env GOOS=darwin GOARCH=amd64 go build -ldflags "-X reichard.io/antholume/config.version=`git describe --tags`" -o ./build/server_darwin_amd64
|
||||||
|
|
||||||
docker_build_local: build_tailwind
|
docker_build_local: build_tailwind
|
||||||
docker build -t antholume:latest .
|
docker build -t antholume:latest .
|
||||||
@@ -27,13 +27,22 @@ docker_build_release_latest: build_tailwind
|
|||||||
--push .
|
--push .
|
||||||
|
|
||||||
build_tailwind:
|
build_tailwind:
|
||||||
tailwind build -o ./assets/style.css --minify
|
tailwindcss build -o ./assets/style.css --minify
|
||||||
|
|
||||||
|
dev: build_tailwind
|
||||||
|
GIN_MODE=release \
|
||||||
|
CONFIG_PATH=./data \
|
||||||
|
DATA_PATH=./data \
|
||||||
|
SEARCH_ENABLED=true \
|
||||||
|
REGISTRATION_ENABLED=true \
|
||||||
|
COOKIE_SECURE=false \
|
||||||
|
COOKIE_AUTH_KEY=1234 \
|
||||||
|
LOG_LEVEL=debug go run main.go serve
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
rm -rf ./build
|
rm -rf ./build
|
||||||
|
|
||||||
tests_integration:
|
tests:
|
||||||
go test -v -tags=integration -coverpkg=./... ./metadata
|
SET_TEST=set_val go test -coverpkg=./... ./... -coverprofile=./cover.out
|
||||||
|
go tool cover -html=./cover.out -o ./cover.html
|
||||||
tests_unit:
|
rm ./cover.out
|
||||||
SET_TEST=set_val go test -v -coverpkg=./... ./...
|
|
||||||
|
|||||||
16
README.md
16
README.md
@@ -64,6 +64,8 @@ The OPDS API endpoint is located at: `http(s)://<SERVER>/api/opds`
|
|||||||
|
|
||||||
### Quick Start
|
### Quick Start
|
||||||
|
|
||||||
|
**NOTE**: If you're accessing your instance over HTTP (not HTTPS), you must set `COOKIE_SECURE=false`, otherwise you will not be able to login.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Make Data Directory
|
# Make Data Directory
|
||||||
mkdir -p antholume_data
|
mkdir -p antholume_data
|
||||||
@@ -71,6 +73,7 @@ mkdir -p antholume_data
|
|||||||
# Run Server
|
# Run Server
|
||||||
docker run \
|
docker run \
|
||||||
-p 8585:8585 \
|
-p 8585:8585 \
|
||||||
|
-e COOKIE_SECURE=false \
|
||||||
-e REGISTRATION_ENABLED=true \
|
-e REGISTRATION_ENABLED=true \
|
||||||
-v ./antholume_data:/config \
|
-v ./antholume_data:/config \
|
||||||
-v ./antholume_data:/data \
|
-v ./antholume_data:/data \
|
||||||
@@ -82,7 +85,7 @@ The service is now accessible at: `http://localhost:8585`. I recommend registeri
|
|||||||
### Configuration
|
### Configuration
|
||||||
|
|
||||||
| Environment Variable | Default Value | Description |
|
| Environment Variable | Default Value | Description |
|
||||||
| -------------------- | ------------- | ------------------------------------------------------------------- |
|
| -------------------- | ------------- | -------------------------------------------------------------------------- |
|
||||||
| DATABASE_TYPE | SQLite | Currently only "SQLite" is supported |
|
| DATABASE_TYPE | SQLite | Currently only "SQLite" is supported |
|
||||||
| DATABASE_NAME | antholume | The database name, or in SQLite's case, the filename |
|
| DATABASE_NAME | antholume | The database name, or in SQLite's case, the filename |
|
||||||
| CONFIG_PATH | /config | Directory where to store SQLite's DB |
|
| CONFIG_PATH | /config | Directory where to store SQLite's DB |
|
||||||
@@ -90,7 +93,8 @@ The service is now accessible at: `http://localhost:8585`. I recommend registeri
|
|||||||
| LISTEN_PORT | 8585 | Port the server listens at |
|
| LISTEN_PORT | 8585 | Port the server listens at |
|
||||||
| LOG_LEVEL | info | Set server log level |
|
| LOG_LEVEL | info | Set server log level |
|
||||||
| REGISTRATION_ENABLED | false | Whether to allow registration (applies to both WebApp & KOSync API) |
|
| REGISTRATION_ENABLED | false | Whether to allow registration (applies to both WebApp & KOSync API) |
|
||||||
| COOKIE_SESSION_KEY | <EMPTY> | Optional secret cookie session key (auto generated if not provided) |
|
| COOKIE_AUTH_KEY | <EMPTY> | Optional secret cookie authentication key (auto generated if not provided) |
|
||||||
|
| COOKIE_ENC_KEY | <EMPTY> | Optional secret cookie encryption key (16 or 32 bytes) |
|
||||||
| COOKIE_SECURE | true | Set Cookie `Secure` attribute (i.e. only works over HTTPS) |
|
| COOKIE_SECURE | true | Set Cookie `Secure` attribute (i.e. only works over HTTPS) |
|
||||||
| COOKIE_HTTP_ONLY | true | Set Cookie `HttpOnly` attribute (i.e. inacessible via JavaScript) |
|
| COOKIE_HTTP_ONLY | true | Set Cookie `HttpOnly` attribute (i.e. inacessible via JavaScript) |
|
||||||
|
|
||||||
@@ -114,13 +118,19 @@ See documentation in the `client` subfolder: [SyncNinja](https://gitea.va.reicha
|
|||||||
|
|
||||||
## Development
|
## Development
|
||||||
|
|
||||||
SQLC Generation (v1.21.0):
|
SQLC Generation (v1.26.0):
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
go install github.com/sqlc-dev/sqlc/cmd/sqlc@latest
|
go install github.com/sqlc-dev/sqlc/cmd/sqlc@latest
|
||||||
~/go/bin/sqlc generate
|
~/go/bin/sqlc generate
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Goose Migrations:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
go install github.com/pressly/goose/v3/cmd/goose@latest
|
||||||
|
```
|
||||||
|
|
||||||
Run Development:
|
Run Development:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
|||||||
415
api/api.go
415
api/api.go
@@ -1,14 +1,15 @@
|
|||||||
package api
|
package api
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"crypto/rand"
|
"context"
|
||||||
"embed"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"html/template"
|
"html/template"
|
||||||
"io/fs"
|
"io/fs"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"path"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/gin-contrib/multitemplate"
|
"github.com/gin-contrib/multitemplate"
|
||||||
"github.com/gin-contrib/sessions"
|
"github.com/gin-contrib/sessions"
|
||||||
@@ -16,213 +17,357 @@ import (
|
|||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/microcosm-cc/bluemonday"
|
"github.com/microcosm-cc/bluemonday"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"reichard.io/bbank/config"
|
"reichard.io/antholume/config"
|
||||||
"reichard.io/bbank/database"
|
"reichard.io/antholume/database"
|
||||||
|
"reichard.io/antholume/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
type API struct {
|
type API struct {
|
||||||
Router *gin.Engine
|
db *database.DBManager
|
||||||
Config *config.Config
|
cfg *config.Config
|
||||||
DB *database.DBManager
|
assets fs.FS
|
||||||
HTMLPolicy *bluemonday.Policy
|
httpServer *http.Server
|
||||||
Assets *embed.FS
|
templates map[string]*template.Template
|
||||||
|
userAuthCache map[string]string
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewApi(db *database.DBManager, c *config.Config, assets embed.FS) *API {
|
var htmlPolicy = bluemonday.StrictPolicy()
|
||||||
|
|
||||||
|
func NewApi(db *database.DBManager, c *config.Config, assets fs.FS) *API {
|
||||||
api := &API{
|
api := &API{
|
||||||
HTMLPolicy: bluemonday.StrictPolicy(),
|
db: db,
|
||||||
Router: gin.Default(),
|
cfg: c,
|
||||||
Config: c,
|
assets: assets,
|
||||||
DB: db,
|
templates: make(map[string]*template.Template),
|
||||||
Assets: &assets,
|
userAuthCache: make(map[string]string),
|
||||||
}
|
}
|
||||||
|
|
||||||
// Assets & Web App Templates
|
// Create router
|
||||||
assetsDir, _ := fs.Sub(assets, "assets")
|
router := gin.New()
|
||||||
api.Router.StaticFS("/assets", http.FS(assetsDir))
|
|
||||||
|
|
||||||
// Generate Secure Token
|
// Add server
|
||||||
|
api.httpServer = &http.Server{
|
||||||
|
Handler: router,
|
||||||
|
Addr: (":" + c.ListenPort),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add global logging middleware
|
||||||
|
router.Use(loggingMiddleware)
|
||||||
|
|
||||||
|
// Add global template loader middleware (develop)
|
||||||
|
if c.Version == "develop" {
|
||||||
|
log.Info("utilizing debug template loader")
|
||||||
|
router.Use(api.templateMiddleware(router))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Assets & web app templates
|
||||||
|
assetsDir, _ := fs.Sub(assets, "assets")
|
||||||
|
router.StaticFS("/assets", http.FS(assetsDir))
|
||||||
|
|
||||||
|
// Generate auth token
|
||||||
var newToken []byte
|
var newToken []byte
|
||||||
var err error
|
var err error
|
||||||
|
if c.CookieAuthKey != "" {
|
||||||
if c.CookieSessionKey != "" {
|
log.Info("utilizing environment cookie auth key")
|
||||||
log.Info("[NewApi] Utilizing Environment Cookie Session Key")
|
newToken = []byte(c.CookieAuthKey)
|
||||||
newToken = []byte(c.CookieSessionKey)
|
|
||||||
} else {
|
} else {
|
||||||
log.Info("[NewApi] Generating Cookie Session Key")
|
log.Info("generating cookie auth key")
|
||||||
newToken, err = generateToken(64)
|
newToken, err = utils.GenerateToken(64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic("Unable to generate secure token")
|
log.Panic("unable to generate cookie auth key")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Configure Cookie Session Store
|
// Set enc token
|
||||||
store := cookie.NewStore(newToken)
|
store := cookie.NewStore(newToken)
|
||||||
|
if c.CookieEncKey != "" {
|
||||||
|
if len(c.CookieEncKey) == 16 || len(c.CookieEncKey) == 32 {
|
||||||
|
log.Info("utilizing environment cookie encryption key")
|
||||||
|
store = cookie.NewStore(newToken, []byte(c.CookieEncKey))
|
||||||
|
} else {
|
||||||
|
log.Panic("invalid cookie encryption key (must be 16 or 32 bytes)")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Configure cookie session store
|
||||||
store.Options(sessions.Options{
|
store.Options(sessions.Options{
|
||||||
MaxAge: 60 * 60 * 24 * 7,
|
MaxAge: 60 * 60 * 24 * 7,
|
||||||
Secure: c.CookieSecure,
|
Secure: c.CookieSecure,
|
||||||
HttpOnly: c.CookieHTTPOnly,
|
HttpOnly: c.CookieHTTPOnly,
|
||||||
SameSite: http.SameSiteStrictMode,
|
SameSite: http.SameSiteStrictMode,
|
||||||
})
|
})
|
||||||
api.Router.Use(sessions.Sessions("token", store))
|
router.Use(sessions.Sessions("token", store))
|
||||||
|
|
||||||
// Register Web App Route
|
// Register web app route
|
||||||
api.registerWebAppRoutes()
|
api.registerWebAppRoutes(router)
|
||||||
|
|
||||||
// Register API Routes
|
// Register API routes
|
||||||
apiGroup := api.Router.Group("/api")
|
apiGroup := router.Group("/api")
|
||||||
api.registerKOAPIRoutes(apiGroup)
|
api.registerKOAPIRoutes(apiGroup)
|
||||||
api.registerOPDSRoutes(apiGroup)
|
api.registerOPDSRoutes(apiGroup)
|
||||||
|
|
||||||
return api
|
return api
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) registerWebAppRoutes() {
|
func (api *API) Start() error {
|
||||||
// Generate Templates
|
return api.httpServer.ListenAndServe()
|
||||||
api.Router.HTMLRender = *api.generateTemplates()
|
|
||||||
|
|
||||||
// Static Assets (Required @ Root)
|
|
||||||
api.Router.GET("/manifest.json", api.webManifest)
|
|
||||||
api.Router.GET("/favicon.ico", api.faviconIcon)
|
|
||||||
api.Router.GET("/sw.js", api.serviceWorker)
|
|
||||||
|
|
||||||
// Local / Offline Static Pages (No Template, No Auth)
|
|
||||||
api.Router.GET("/local", api.localDocuments)
|
|
||||||
|
|
||||||
// Reader (Reader Page, Document Progress, Devices)
|
|
||||||
api.Router.GET("/reader", api.documentReader)
|
|
||||||
api.Router.GET("/reader/devices", api.authWebAppMiddleware, api.getDevices)
|
|
||||||
api.Router.GET("/reader/progress/:document", api.authWebAppMiddleware, api.getDocumentProgress)
|
|
||||||
|
|
||||||
// Web App
|
|
||||||
api.Router.GET("/", api.authWebAppMiddleware, api.createAppResourcesRoute("home"))
|
|
||||||
api.Router.GET("/activity", api.authWebAppMiddleware, api.createAppResourcesRoute("activity"))
|
|
||||||
api.Router.GET("/documents", api.authWebAppMiddleware, api.createAppResourcesRoute("documents"))
|
|
||||||
api.Router.GET("/documents/:document", api.authWebAppMiddleware, api.createAppResourcesRoute("document"))
|
|
||||||
api.Router.GET("/documents/:document/cover", api.authWebAppMiddleware, api.getDocumentCover)
|
|
||||||
api.Router.GET("/documents/:document/file", api.authWebAppMiddleware, api.downloadDocument)
|
|
||||||
api.Router.GET("/login", api.createAppResourcesRoute("login"))
|
|
||||||
api.Router.GET("/logout", api.authWebAppMiddleware, api.authLogout)
|
|
||||||
api.Router.GET("/register", api.createAppResourcesRoute("login", gin.H{"Register": true}))
|
|
||||||
api.Router.GET("/settings", api.authWebAppMiddleware, api.createAppResourcesRoute("settings"))
|
|
||||||
api.Router.POST("/login", api.authFormLogin)
|
|
||||||
api.Router.POST("/register", api.authFormRegister)
|
|
||||||
|
|
||||||
// Demo Mode Enabled Configuration
|
|
||||||
if api.Config.DemoMode {
|
|
||||||
api.Router.POST("/documents", api.authWebAppMiddleware, api.demoModeAppError)
|
|
||||||
api.Router.POST("/documents/:document/delete", api.authWebAppMiddleware, api.demoModeAppError)
|
|
||||||
api.Router.POST("/documents/:document/edit", api.authWebAppMiddleware, api.demoModeAppError)
|
|
||||||
api.Router.POST("/documents/:document/identify", api.authWebAppMiddleware, api.demoModeAppError)
|
|
||||||
api.Router.POST("/settings", api.authWebAppMiddleware, api.demoModeAppError)
|
|
||||||
} else {
|
|
||||||
api.Router.POST("/documents", api.authWebAppMiddleware, api.uploadNewDocument)
|
|
||||||
api.Router.POST("/documents/:document/delete", api.authWebAppMiddleware, api.deleteDocument)
|
|
||||||
api.Router.POST("/documents/:document/edit", api.authWebAppMiddleware, api.editDocument)
|
|
||||||
api.Router.POST("/documents/:document/identify", api.authWebAppMiddleware, api.identifyDocument)
|
|
||||||
api.Router.POST("/settings", api.authWebAppMiddleware, api.editSettings)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Search Enabled Configuration
|
// Handler returns the underlying http.Handler for the Gin router
|
||||||
if api.Config.SearchEnabled {
|
func (api *API) Handler() http.Handler {
|
||||||
api.Router.GET("/search", api.authWebAppMiddleware, api.createAppResourcesRoute("search"))
|
return api.httpServer.Handler
|
||||||
api.Router.POST("/search", api.authWebAppMiddleware, api.saveNewDocument)
|
}
|
||||||
|
|
||||||
|
func (api *API) Stop() error {
|
||||||
|
// Stop server
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
err := api.httpServer.Shutdown(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Close DB
|
||||||
|
return api.db.DB.Close()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) registerWebAppRoutes(router *gin.Engine) {
|
||||||
|
// Generate templates
|
||||||
|
router.HTMLRender = *api.generateTemplates()
|
||||||
|
|
||||||
|
// Static assets (required @ root)
|
||||||
|
router.GET("/manifest.json", api.appWebManifest)
|
||||||
|
router.GET("/favicon.ico", api.appFaviconIcon)
|
||||||
|
router.GET("/sw.js", api.appServiceWorker)
|
||||||
|
|
||||||
|
// Local / offline static pages (no template, no auth)
|
||||||
|
router.GET("/local", api.appLocalDocuments)
|
||||||
|
|
||||||
|
// Reader (reader page, document progress, devices)
|
||||||
|
router.GET("/reader", api.appDocumentReader)
|
||||||
|
router.GET("/reader/devices", api.authWebAppMiddleware, api.appGetDevices)
|
||||||
|
router.GET("/reader/progress/:document", api.authWebAppMiddleware, api.appGetDocumentProgress)
|
||||||
|
|
||||||
|
// Web app
|
||||||
|
router.GET("/", api.authWebAppMiddleware, api.appGetHome)
|
||||||
|
router.GET("/activity", api.authWebAppMiddleware, api.appGetActivity)
|
||||||
|
router.GET("/progress", api.authWebAppMiddleware, api.appGetProgress)
|
||||||
|
router.GET("/documents", api.authWebAppMiddleware, api.appGetDocuments)
|
||||||
|
router.GET("/documents/:document", api.authWebAppMiddleware, api.appGetDocument)
|
||||||
|
router.GET("/documents/:document/cover", api.authWebAppMiddleware, api.createGetCoverHandler(appErrorPage))
|
||||||
|
router.GET("/documents/:document/file", api.authWebAppMiddleware, api.createDownloadDocumentHandler(appErrorPage))
|
||||||
|
router.GET("/login", api.appGetLogin)
|
||||||
|
router.GET("/logout", api.authWebAppMiddleware, api.appAuthLogout)
|
||||||
|
router.GET("/register", api.appGetRegister)
|
||||||
|
router.GET("/settings", api.authWebAppMiddleware, api.appGetSettings)
|
||||||
|
router.GET("/admin/logs", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appGetAdminLogs)
|
||||||
|
router.GET("/admin/import", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appGetAdminImport)
|
||||||
|
router.POST("/admin/import", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appPerformAdminImport)
|
||||||
|
router.GET("/admin/users", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appGetAdminUsers)
|
||||||
|
router.POST("/admin/users", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appUpdateAdminUsers)
|
||||||
|
router.GET("/admin", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appGetAdmin)
|
||||||
|
router.POST("/admin", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appPerformAdminAction)
|
||||||
|
router.POST("/login", api.appAuthLogin)
|
||||||
|
router.POST("/register", api.appAuthRegister)
|
||||||
|
|
||||||
|
// Demo mode enabled configuration
|
||||||
|
if api.cfg.DemoMode {
|
||||||
|
router.POST("/documents", api.authWebAppMiddleware, api.appDemoModeError)
|
||||||
|
router.POST("/documents/:document/delete", api.authWebAppMiddleware, api.appDemoModeError)
|
||||||
|
router.POST("/documents/:document/edit", api.authWebAppMiddleware, api.appDemoModeError)
|
||||||
|
router.POST("/documents/:document/identify", api.authWebAppMiddleware, api.appDemoModeError)
|
||||||
|
router.POST("/settings", api.authWebAppMiddleware, api.appDemoModeError)
|
||||||
|
} else {
|
||||||
|
router.POST("/documents", api.authWebAppMiddleware, api.appUploadNewDocument)
|
||||||
|
router.POST("/documents/:document/delete", api.authWebAppMiddleware, api.appDeleteDocument)
|
||||||
|
router.POST("/documents/:document/edit", api.authWebAppMiddleware, api.appEditDocument)
|
||||||
|
router.POST("/documents/:document/identify", api.authWebAppMiddleware, api.appIdentifyDocument)
|
||||||
|
router.POST("/settings", api.authWebAppMiddleware, api.appEditSettings)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Search enabled configuration
|
||||||
|
if api.cfg.SearchEnabled {
|
||||||
|
router.GET("/search", api.authWebAppMiddleware, api.appGetSearch)
|
||||||
|
router.POST("/search", api.authWebAppMiddleware, api.appSaveNewDocument)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) registerKOAPIRoutes(apiGroup *gin.RouterGroup) {
|
func (api *API) registerKOAPIRoutes(apiGroup *gin.RouterGroup) {
|
||||||
koGroup := apiGroup.Group("/ko")
|
koGroup := apiGroup.Group("/ko")
|
||||||
|
|
||||||
// KO Sync Routes (WebApp Uses - Progress & Activity)
|
// KO sync routes (webapp uses - progress & activity)
|
||||||
koGroup.GET("/documents/:document/file", api.authKOMiddleware, api.downloadDocument)
|
koGroup.GET("/documents/:document/file", api.authKOMiddleware, api.createDownloadDocumentHandler(apiErrorPage))
|
||||||
koGroup.GET("/syncs/progress/:document", api.authKOMiddleware, api.getProgress)
|
koGroup.GET("/syncs/progress/:document", api.authKOMiddleware, api.koGetProgress)
|
||||||
koGroup.GET("/users/auth", api.authKOMiddleware, api.authorizeUser)
|
koGroup.GET("/users/auth", api.authKOMiddleware, api.koAuthorizeUser)
|
||||||
koGroup.POST("/activity", api.authKOMiddleware, api.addActivities)
|
koGroup.POST("/activity", api.authKOMiddleware, api.koAddActivities)
|
||||||
koGroup.POST("/syncs/activity", api.authKOMiddleware, api.checkActivitySync)
|
koGroup.POST("/syncs/activity", api.authKOMiddleware, api.koCheckActivitySync)
|
||||||
koGroup.POST("/users/create", api.createUser)
|
koGroup.POST("/users/create", api.koAuthRegister)
|
||||||
koGroup.PUT("/syncs/progress", api.authKOMiddleware, api.setProgress)
|
koGroup.PUT("/syncs/progress", api.authKOMiddleware, api.koSetProgress)
|
||||||
|
|
||||||
// Demo Mode Enabled Configuration
|
// Demo mode enabled configuration
|
||||||
if api.Config.DemoMode {
|
if api.cfg.DemoMode {
|
||||||
koGroup.POST("/documents", api.authKOMiddleware, api.demoModeJSONError)
|
koGroup.POST("/documents", api.authKOMiddleware, api.koDemoModeJSONError)
|
||||||
koGroup.POST("/syncs/documents", api.authKOMiddleware, api.demoModeJSONError)
|
koGroup.POST("/syncs/documents", api.authKOMiddleware, api.koDemoModeJSONError)
|
||||||
koGroup.PUT("/documents/:document/file", api.authKOMiddleware, api.demoModeJSONError)
|
koGroup.PUT("/documents/:document/file", api.authKOMiddleware, api.koDemoModeJSONError)
|
||||||
} else {
|
} else {
|
||||||
koGroup.POST("/documents", api.authKOMiddleware, api.addDocuments)
|
koGroup.POST("/documents", api.authKOMiddleware, api.koAddDocuments)
|
||||||
koGroup.POST("/syncs/documents", api.authKOMiddleware, api.checkDocumentsSync)
|
koGroup.POST("/syncs/documents", api.authKOMiddleware, api.koCheckDocumentsSync)
|
||||||
koGroup.PUT("/documents/:document/file", api.authKOMiddleware, api.uploadExistingDocument)
|
koGroup.PUT("/documents/:document/file", api.authKOMiddleware, api.koUploadExistingDocument)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) registerOPDSRoutes(apiGroup *gin.RouterGroup) {
|
func (api *API) registerOPDSRoutes(apiGroup *gin.RouterGroup) {
|
||||||
opdsGroup := apiGroup.Group("/opds")
|
opdsGroup := apiGroup.Group("/opds")
|
||||||
|
|
||||||
// OPDS Routes
|
// OPDS routes
|
||||||
opdsGroup.GET("", api.authOPDSMiddleware, api.opdsEntry)
|
opdsGroup.GET("", api.authOPDSMiddleware, api.opdsEntry)
|
||||||
opdsGroup.GET("/", api.authOPDSMiddleware, api.opdsEntry)
|
opdsGroup.GET("/", api.authOPDSMiddleware, api.opdsEntry)
|
||||||
opdsGroup.GET("/search.xml", api.authOPDSMiddleware, api.opdsSearchDescription)
|
opdsGroup.GET("/search.xml", api.authOPDSMiddleware, api.opdsSearchDescription)
|
||||||
opdsGroup.GET("/documents", api.authOPDSMiddleware, api.opdsDocuments)
|
opdsGroup.GET("/documents", api.authOPDSMiddleware, api.opdsDocuments)
|
||||||
opdsGroup.GET("/documents/:document/cover", api.authOPDSMiddleware, api.getDocumentCover)
|
opdsGroup.GET("/documents/:document/cover", api.authOPDSMiddleware, api.createGetCoverHandler(apiErrorPage))
|
||||||
opdsGroup.GET("/documents/:document/file", api.authOPDSMiddleware, api.downloadDocument)
|
opdsGroup.GET("/documents/:document/file", api.authOPDSMiddleware, api.createDownloadDocumentHandler(apiErrorPage))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) generateTemplates() *multitemplate.Renderer {
|
func (api *API) generateTemplates() *multitemplate.Renderer {
|
||||||
// Define Templates & Helper Functions
|
// Define templates & helper functions
|
||||||
render := multitemplate.NewRenderer()
|
render := multitemplate.NewRenderer()
|
||||||
|
templates := make(map[string]*template.Template)
|
||||||
helperFuncs := template.FuncMap{
|
helperFuncs := template.FuncMap{
|
||||||
"GetSVGGraphData": getSVGGraphData,
|
|
||||||
"GetUTCOffsets": getUTCOffsets,
|
|
||||||
"NiceSeconds": niceSeconds,
|
|
||||||
"dict": dict,
|
"dict": dict,
|
||||||
|
"slice": slice,
|
||||||
|
"fields": fields,
|
||||||
|
"getSVGGraphData": getSVGGraphData,
|
||||||
|
"getTimeZones": getTimeZones,
|
||||||
|
"hasPrefix": strings.HasPrefix,
|
||||||
|
"niceNumbers": niceNumbers,
|
||||||
|
"niceSeconds": niceSeconds,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Load Base
|
// Load Base
|
||||||
b, _ := api.Assets.ReadFile("templates/base.html")
|
b, err := fs.ReadFile(api.assets, "templates/base.tmpl")
|
||||||
baseTemplate := template.Must(template.New("base").Funcs(helperFuncs).Parse(string(b)))
|
if err != nil {
|
||||||
|
log.Errorf("error reading base template: %v", err)
|
||||||
|
return &render
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse Base
|
||||||
|
baseTemplate, err := template.New("base").Funcs(helperFuncs).Parse(string(b))
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf("error parsing base template: %v", err)
|
||||||
|
return &render
|
||||||
|
}
|
||||||
|
|
||||||
// Load SVGs
|
// Load SVGs
|
||||||
svgs, _ := api.Assets.ReadDir("templates/svgs")
|
err = api.loadTemplates("svg", baseTemplate, templates, false)
|
||||||
for _, item := range svgs {
|
if err != nil {
|
||||||
basename := item.Name()
|
log.Errorf("error loading svg templates: %v", err)
|
||||||
path := fmt.Sprintf("templates/svgs/%s", basename)
|
return &render
|
||||||
name := strings.TrimSuffix(basename, filepath.Ext(basename))
|
|
||||||
|
|
||||||
b, _ := api.Assets.ReadFile(path)
|
|
||||||
baseTemplate = template.Must(baseTemplate.New("svg/" + name).Parse(string(b)))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Load Components
|
// Load Components
|
||||||
components, _ := api.Assets.ReadDir("templates/components")
|
err = api.loadTemplates("component", baseTemplate, templates, false)
|
||||||
for _, item := range components {
|
if err != nil {
|
||||||
basename := item.Name()
|
log.Errorf("error loading component templates: %v", err)
|
||||||
path := fmt.Sprintf("templates/components/%s", basename)
|
return &render
|
||||||
name := strings.TrimSuffix(basename, filepath.Ext(basename))
|
|
||||||
|
|
||||||
b, _ := api.Assets.ReadFile(path)
|
|
||||||
baseTemplate = template.Must(baseTemplate.New("component/" + name).Parse(string(b)))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Load Pages
|
// Load Pages
|
||||||
pages, _ := api.Assets.ReadDir("templates/pages")
|
err = api.loadTemplates("page", baseTemplate, templates, true)
|
||||||
for _, item := range pages {
|
if err != nil {
|
||||||
basename := item.Name()
|
log.Errorf("error loading page templates: %v", err)
|
||||||
path := fmt.Sprintf("templates/pages/%s", basename)
|
return &render
|
||||||
name := strings.TrimSuffix(basename, filepath.Ext(basename))
|
}
|
||||||
|
|
||||||
// Clone Base Template
|
// Populate Renderer
|
||||||
b, _ := api.Assets.ReadFile(path)
|
api.templates = templates
|
||||||
pageTemplate, _ := template.Must(baseTemplate.Clone()).New("page/" + name).Parse(string(b))
|
for templateName, templateValue := range templates {
|
||||||
render.Add("page/"+name, pageTemplate)
|
render.Add(templateName, templateValue)
|
||||||
}
|
}
|
||||||
|
|
||||||
return &render
|
return &render
|
||||||
}
|
}
|
||||||
|
|
||||||
func generateToken(n int) ([]byte, error) {
|
func (api *API) loadTemplates(
|
||||||
b := make([]byte, n)
|
basePath string,
|
||||||
_, err := rand.Read(b)
|
baseTemplate *template.Template,
|
||||||
|
allTemplates map[string]*template.Template,
|
||||||
|
cloneBase bool,
|
||||||
|
) error {
|
||||||
|
// Load Templates (Pluralize)
|
||||||
|
templateDirectory := fmt.Sprintf("templates/%ss", basePath)
|
||||||
|
allFiles, err := fs.ReadDir(api.assets, templateDirectory)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return fmt.Errorf("unable to read template dir %s: %w", templateDirectory, err)
|
||||||
}
|
}
|
||||||
return b, nil
|
|
||||||
|
// Generate Templates
|
||||||
|
for _, item := range allFiles {
|
||||||
|
templateFile := item.Name()
|
||||||
|
templatePath := path.Join(templateDirectory, templateFile)
|
||||||
|
templateName := fmt.Sprintf("%s/%s", basePath, strings.TrimSuffix(templateFile, filepath.Ext(templateFile)))
|
||||||
|
|
||||||
|
// Read Template
|
||||||
|
b, err := fs.ReadFile(api.assets, templatePath)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("unable to read template %s: %w", templateName, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clone? (Pages - Don't Stomp)
|
||||||
|
if cloneBase {
|
||||||
|
baseTemplate = template.Must(baseTemplate.Clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse Template
|
||||||
|
baseTemplate, err = baseTemplate.New(templateName).Parse(string(b))
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("unable to parse template %s: %w", templateName, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
allTemplates[templateName] = baseTemplate
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) templateMiddleware(router *gin.Engine) gin.HandlerFunc {
|
||||||
|
return func(c *gin.Context) {
|
||||||
|
router.HTMLRender = *api.generateTemplates()
|
||||||
|
c.Next()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func loggingMiddleware(c *gin.Context) {
|
||||||
|
// Start timer
|
||||||
|
startTime := time.Now()
|
||||||
|
|
||||||
|
// Process request
|
||||||
|
c.Next()
|
||||||
|
|
||||||
|
// End timer
|
||||||
|
endTime := time.Now()
|
||||||
|
latency := endTime.Sub(startTime).Round(time.Microsecond)
|
||||||
|
|
||||||
|
// Log data
|
||||||
|
logData := log.Fields{
|
||||||
|
"type": "access",
|
||||||
|
"ip": c.ClientIP(),
|
||||||
|
"latency": latency.String(),
|
||||||
|
"status": c.Writer.Status(),
|
||||||
|
"method": c.Request.Method,
|
||||||
|
"path": c.Request.URL.Path,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get username
|
||||||
|
var auth authData
|
||||||
|
if data, _ := c.Get("Authorization"); data != nil {
|
||||||
|
auth = data.(authData)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log user
|
||||||
|
if auth.UserName != "" {
|
||||||
|
logData["user"] = auth.UserName
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log result
|
||||||
|
log.WithFields(logData).Info(fmt.Sprintf("%s %s", c.Request.Method, c.Request.URL.Path))
|
||||||
}
|
}
|
||||||
|
|||||||
949
api/app-admin-routes.go
Normal file
949
api/app-admin-routes.go
Normal file
@@ -0,0 +1,949 @@
|
|||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"archive/zip"
|
||||||
|
"bufio"
|
||||||
|
"context"
|
||||||
|
"crypto/md5"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"io/fs"
|
||||||
|
"mime/multipart"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"slices"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
argon2 "github.com/alexedwards/argon2id"
|
||||||
|
"github.com/gabriel-vasile/mimetype"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/itchyny/gojq"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
"reichard.io/antholume/database"
|
||||||
|
"reichard.io/antholume/metadata"
|
||||||
|
"reichard.io/antholume/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
type adminAction string
|
||||||
|
|
||||||
|
const (
|
||||||
|
adminBackup adminAction = "BACKUP"
|
||||||
|
adminRestore adminAction = "RESTORE"
|
||||||
|
adminMetadataMatch adminAction = "METADATA_MATCH"
|
||||||
|
adminCacheTables adminAction = "CACHE_TABLES"
|
||||||
|
)
|
||||||
|
|
||||||
|
type requestAdminAction struct {
|
||||||
|
Action adminAction `form:"action"`
|
||||||
|
|
||||||
|
// Backup Action
|
||||||
|
BackupTypes []backupType `form:"backup_types"`
|
||||||
|
|
||||||
|
// Restore Action
|
||||||
|
RestoreFile *multipart.FileHeader `form:"restore_file"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type importType string
|
||||||
|
|
||||||
|
const (
|
||||||
|
importDirect importType = "DIRECT"
|
||||||
|
importCopy importType = "COPY"
|
||||||
|
)
|
||||||
|
|
||||||
|
type requestAdminImport struct {
|
||||||
|
Directory string `form:"directory"`
|
||||||
|
Select string `form:"select"`
|
||||||
|
Type importType `form:"type"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type operationType string
|
||||||
|
|
||||||
|
const (
|
||||||
|
opUpdate operationType = "UPDATE"
|
||||||
|
opCreate operationType = "CREATE"
|
||||||
|
opDelete operationType = "DELETE"
|
||||||
|
)
|
||||||
|
|
||||||
|
type requestAdminUpdateUser struct {
|
||||||
|
User string `form:"user"`
|
||||||
|
Password *string `form:"password"`
|
||||||
|
IsAdmin *bool `form:"is_admin"`
|
||||||
|
Operation operationType `form:"operation"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type requestAdminLogs struct {
|
||||||
|
Filter string `form:"filter"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type importStatus string
|
||||||
|
|
||||||
|
const (
|
||||||
|
importFailed importStatus = "FAILED"
|
||||||
|
importSuccess importStatus = "SUCCESS"
|
||||||
|
importExists importStatus = "EXISTS"
|
||||||
|
)
|
||||||
|
|
||||||
|
type importResult struct {
|
||||||
|
ID string
|
||||||
|
Name string
|
||||||
|
Path string
|
||||||
|
Status importStatus
|
||||||
|
Error error
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) appPerformAdminAction(c *gin.Context) {
|
||||||
|
templateVars, _ := api.getBaseTemplateVars("admin", c)
|
||||||
|
|
||||||
|
var rAdminAction requestAdminAction
|
||||||
|
if err := c.ShouldBind(&rAdminAction); err != nil {
|
||||||
|
log.Error("Invalid Form Bind: ", err)
|
||||||
|
appErrorPage(c, http.StatusBadRequest, "Invalid or missing form values")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
switch rAdminAction.Action {
|
||||||
|
case adminMetadataMatch:
|
||||||
|
// TODO
|
||||||
|
// 1. Documents xref most recent metadata table?
|
||||||
|
// 2. Select all / deselect?
|
||||||
|
case adminCacheTables:
|
||||||
|
go func() {
|
||||||
|
err := api.db.CacheTempTables(c)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Unable to cache temp tables: ", err)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
case adminRestore:
|
||||||
|
api.processRestoreFile(rAdminAction, c)
|
||||||
|
return
|
||||||
|
case adminBackup:
|
||||||
|
// Vacuum
|
||||||
|
_, err := api.db.DB.ExecContext(c, "VACUUM;")
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Unable to vacuum DB: ", err)
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "Unable to vacuum database")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set Headers
|
||||||
|
c.Header("Content-type", "application/octet-stream")
|
||||||
|
c.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"AnthoLumeBackup_%s.zip\"", time.Now().Format("20060102150405")))
|
||||||
|
|
||||||
|
// Stream Backup ZIP Archive
|
||||||
|
c.Stream(func(w io.Writer) bool {
|
||||||
|
var directories []string
|
||||||
|
for _, item := range rAdminAction.BackupTypes {
|
||||||
|
if item == backupCovers {
|
||||||
|
directories = append(directories, "covers")
|
||||||
|
} else if item == backupDocuments {
|
||||||
|
directories = append(directories, "documents")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
err := api.createBackup(c, w, directories)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Backup Error: ", err)
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
})
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.HTML(http.StatusOK, "page/admin", templateVars)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) appGetAdmin(c *gin.Context) {
|
||||||
|
templateVars, _ := api.getBaseTemplateVars("admin", c)
|
||||||
|
c.HTML(http.StatusOK, "page/admin", templateVars)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) appGetAdminLogs(c *gin.Context) {
|
||||||
|
templateVars, _ := api.getBaseTemplateVars("admin-logs", c)
|
||||||
|
|
||||||
|
var rAdminLogs requestAdminLogs
|
||||||
|
if err := c.ShouldBindQuery(&rAdminLogs); err != nil {
|
||||||
|
log.Error("Invalid URI Bind")
|
||||||
|
appErrorPage(c, http.StatusNotFound, "Invalid URI parameters")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
rAdminLogs.Filter = strings.TrimSpace(rAdminLogs.Filter)
|
||||||
|
|
||||||
|
var jqFilter *gojq.Code
|
||||||
|
var basicFilter string
|
||||||
|
if strings.HasPrefix(rAdminLogs.Filter, "\"") && strings.HasSuffix(rAdminLogs.Filter, "\"") {
|
||||||
|
basicFilter = rAdminLogs.Filter[1 : len(rAdminLogs.Filter)-1]
|
||||||
|
} else if rAdminLogs.Filter != "" {
|
||||||
|
parsed, err := gojq.Parse(rAdminLogs.Filter)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Unable to parse JQ filter")
|
||||||
|
appErrorPage(c, http.StatusNotFound, "Unable to parse JQ filter")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
jqFilter, err = gojq.Compile(parsed)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Unable to compile JQ filter")
|
||||||
|
appErrorPage(c, http.StatusNotFound, "Unable to compile JQ filter")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Open Log File
|
||||||
|
logPath := filepath.Join(api.cfg.ConfigPath, "logs/antholume.log")
|
||||||
|
logFile, err := os.Open(logPath)
|
||||||
|
if err != nil {
|
||||||
|
appErrorPage(c, http.StatusBadRequest, "Missing AnthoLume log file")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer logFile.Close()
|
||||||
|
|
||||||
|
// Log Lines
|
||||||
|
var logLines []string
|
||||||
|
scanner := bufio.NewScanner(logFile)
|
||||||
|
for scanner.Scan() {
|
||||||
|
rawLog := scanner.Text()
|
||||||
|
|
||||||
|
// Attempt JSON Pretty
|
||||||
|
var jsonMap map[string]any
|
||||||
|
err := json.Unmarshal([]byte(rawLog), &jsonMap)
|
||||||
|
if err != nil {
|
||||||
|
logLines = append(logLines, scanner.Text())
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse JSON
|
||||||
|
rawData, err := json.MarshalIndent(jsonMap, "", " ")
|
||||||
|
if err != nil {
|
||||||
|
logLines = append(logLines, scanner.Text())
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Basic Filter
|
||||||
|
if basicFilter != "" && strings.Contains(string(rawData), basicFilter) {
|
||||||
|
logLines = append(logLines, string(rawData))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// No JQ Filter
|
||||||
|
if jqFilter == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Error or nil
|
||||||
|
result, _ := jqFilter.Run(jsonMap).Next()
|
||||||
|
if _, ok := result.(error); ok {
|
||||||
|
logLines = append(logLines, string(rawData))
|
||||||
|
continue
|
||||||
|
} else if result == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Attempt filtered json
|
||||||
|
filteredData, err := json.MarshalIndent(result, "", " ")
|
||||||
|
if err == nil {
|
||||||
|
rawData = filteredData
|
||||||
|
}
|
||||||
|
|
||||||
|
logLines = append(logLines, string(rawData))
|
||||||
|
}
|
||||||
|
|
||||||
|
templateVars["Data"] = logLines
|
||||||
|
templateVars["Filter"] = rAdminLogs.Filter
|
||||||
|
|
||||||
|
c.HTML(http.StatusOK, "page/admin-logs", templateVars)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) appGetAdminUsers(c *gin.Context) {
|
||||||
|
templateVars, _ := api.getBaseTemplateVars("admin-users", c)
|
||||||
|
|
||||||
|
users, err := api.db.Queries.GetUsers(c)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("GetUsers DB Error: ", err)
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("GetUsers DB Error: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
templateVars["Data"] = users
|
||||||
|
|
||||||
|
c.HTML(http.StatusOK, "page/admin-users", templateVars)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) appUpdateAdminUsers(c *gin.Context) {
|
||||||
|
templateVars, _ := api.getBaseTemplateVars("admin-users", c)
|
||||||
|
|
||||||
|
var rUpdate requestAdminUpdateUser
|
||||||
|
if err := c.ShouldBind(&rUpdate); err != nil {
|
||||||
|
log.Error("Invalid URI Bind")
|
||||||
|
appErrorPage(c, http.StatusNotFound, "Invalid user parameters")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure Username
|
||||||
|
if rUpdate.User == "" {
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "User cannot be empty")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var err error
|
||||||
|
switch rUpdate.Operation {
|
||||||
|
case opCreate:
|
||||||
|
err = api.createUser(c, rUpdate.User, rUpdate.Password, rUpdate.IsAdmin)
|
||||||
|
case opUpdate:
|
||||||
|
err = api.updateUser(c, rUpdate.User, rUpdate.Password, rUpdate.IsAdmin)
|
||||||
|
case opDelete:
|
||||||
|
err = api.deleteUser(c, rUpdate.User)
|
||||||
|
default:
|
||||||
|
appErrorPage(c, http.StatusNotFound, "Unknown user operation")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("Unable to create or update user: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
users, err := api.db.Queries.GetUsers(c)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("GetUsers DB Error: ", err)
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("GetUsers DB Error: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
templateVars["Data"] = users
|
||||||
|
|
||||||
|
c.HTML(http.StatusOK, "page/admin-users", templateVars)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) appGetAdminImport(c *gin.Context) {
|
||||||
|
templateVars, _ := api.getBaseTemplateVars("admin-import", c)
|
||||||
|
|
||||||
|
var rImportFolder requestAdminImport
|
||||||
|
if err := c.ShouldBindQuery(&rImportFolder); err != nil {
|
||||||
|
log.Error("Invalid URI Bind")
|
||||||
|
appErrorPage(c, http.StatusNotFound, "Invalid directory")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if rImportFolder.Select != "" {
|
||||||
|
templateVars["SelectedDirectory"] = rImportFolder.Select
|
||||||
|
c.HTML(http.StatusOK, "page/admin-import", templateVars)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default Path
|
||||||
|
if rImportFolder.Directory == "" {
|
||||||
|
dPath, err := filepath.Abs(api.cfg.DataPath)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Absolute filepath error: ", rImportFolder.Directory)
|
||||||
|
appErrorPage(c, http.StatusNotFound, "Unable to get data directory absolute path")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
rImportFolder.Directory = dPath
|
||||||
|
}
|
||||||
|
|
||||||
|
entries, err := os.ReadDir(rImportFolder.Directory)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Invalid directory: ", rImportFolder.Directory)
|
||||||
|
appErrorPage(c, http.StatusNotFound, "Invalid directory")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
allDirectories := []string{}
|
||||||
|
for _, e := range entries {
|
||||||
|
if !e.IsDir() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
allDirectories = append(allDirectories, e.Name())
|
||||||
|
}
|
||||||
|
|
||||||
|
templateVars["CurrentPath"] = filepath.Clean(rImportFolder.Directory)
|
||||||
|
templateVars["Data"] = allDirectories
|
||||||
|
|
||||||
|
c.HTML(http.StatusOK, "page/admin-import", templateVars)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) appPerformAdminImport(c *gin.Context) {
|
||||||
|
templateVars, _ := api.getBaseTemplateVars("admin-import", c)
|
||||||
|
|
||||||
|
var rAdminImport requestAdminImport
|
||||||
|
if err := c.ShouldBind(&rAdminImport); err != nil {
|
||||||
|
log.Error("Invalid URI Bind")
|
||||||
|
appErrorPage(c, http.StatusNotFound, "Invalid directory")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get import directory
|
||||||
|
importDirectory := filepath.Clean(rAdminImport.Directory)
|
||||||
|
|
||||||
|
// Get data directory
|
||||||
|
absoluteDataPath, _ := filepath.Abs(filepath.Join(api.cfg.DataPath, "documents"))
|
||||||
|
|
||||||
|
// Validate different path
|
||||||
|
if absoluteDataPath == importDirectory {
|
||||||
|
appErrorPage(c, http.StatusBadRequest, "Directory is the same as data path")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Do Transaction
|
||||||
|
tx, err := api.db.DB.Begin()
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Transaction Begin DB Error:", err)
|
||||||
|
apiErrorPage(c, http.StatusBadRequest, "Unknown error")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Defer & Start Transaction
|
||||||
|
defer func() {
|
||||||
|
if err := tx.Rollback(); err != nil {
|
||||||
|
log.Error("DB Rollback Error:", err)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
qtx := api.db.Queries.WithTx(tx)
|
||||||
|
|
||||||
|
// Track imports
|
||||||
|
importResults := make([]importResult, 0)
|
||||||
|
|
||||||
|
// Walk Directory & Import
|
||||||
|
err = filepath.WalkDir(importDirectory, func(importPath string, f fs.DirEntry, err error) error {
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.IsDir() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get relative path
|
||||||
|
basePath := importDirectory
|
||||||
|
relFilePath, err := filepath.Rel(importDirectory, importPath)
|
||||||
|
if err != nil {
|
||||||
|
log.Warnf("path error: %v", err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Track imports
|
||||||
|
iResult := importResult{
|
||||||
|
Path: relFilePath,
|
||||||
|
Status: importFailed,
|
||||||
|
}
|
||||||
|
defer func() {
|
||||||
|
importResults = append(importResults, iResult)
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Get metadata
|
||||||
|
fileMeta, err := metadata.GetMetadata(importPath)
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf("metadata error: %v", err)
|
||||||
|
iResult.Error = err
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
iResult.ID = *fileMeta.PartialMD5
|
||||||
|
iResult.Name = fmt.Sprintf("%s - %s", *fileMeta.Author, *fileMeta.Title)
|
||||||
|
|
||||||
|
// Check already exists
|
||||||
|
_, err = qtx.GetDocument(c, *fileMeta.PartialMD5)
|
||||||
|
if err == nil {
|
||||||
|
log.Warnf("document already exists: %s", *fileMeta.PartialMD5)
|
||||||
|
iResult.Status = importExists
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Import Copy
|
||||||
|
if rAdminImport.Type == importCopy {
|
||||||
|
// Derive & Sanitize File Name
|
||||||
|
relFilePath = deriveBaseFileName(fileMeta)
|
||||||
|
safePath := filepath.Join(api.cfg.DataPath, "documents", relFilePath)
|
||||||
|
|
||||||
|
// Open Source File
|
||||||
|
srcFile, err := os.Open(importPath)
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf("unable to open current file: %v", err)
|
||||||
|
iResult.Error = err
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
defer srcFile.Close()
|
||||||
|
|
||||||
|
// Open Destination File
|
||||||
|
destFile, err := os.Create(safePath)
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf("unable to open destination file: %v", err)
|
||||||
|
iResult.Error = err
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
defer destFile.Close()
|
||||||
|
|
||||||
|
// Copy File
|
||||||
|
if _, err = io.Copy(destFile, srcFile); err != nil {
|
||||||
|
log.Errorf("unable to save file: %v", err)
|
||||||
|
iResult.Error = err
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update Base & Path
|
||||||
|
basePath = filepath.Join(api.cfg.DataPath, "documents")
|
||||||
|
iResult.Path = relFilePath
|
||||||
|
}
|
||||||
|
|
||||||
|
// Upsert document
|
||||||
|
if _, err = qtx.UpsertDocument(c, database.UpsertDocumentParams{
|
||||||
|
ID: *fileMeta.PartialMD5,
|
||||||
|
Title: fileMeta.Title,
|
||||||
|
Author: fileMeta.Author,
|
||||||
|
Description: fileMeta.Description,
|
||||||
|
Md5: fileMeta.MD5,
|
||||||
|
Words: fileMeta.WordCount,
|
||||||
|
Filepath: &relFilePath,
|
||||||
|
Basepath: &basePath,
|
||||||
|
}); err != nil {
|
||||||
|
log.Errorf("UpsertDocument DB Error: %v", err)
|
||||||
|
iResult.Error = err
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
iResult.Status = importSuccess
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("Import Failed: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Commit transaction
|
||||||
|
if err := tx.Commit(); err != nil {
|
||||||
|
log.Error("Transaction Commit DB Error: ", err)
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("Import DB Error: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort import results
|
||||||
|
sort.Slice(importResults, func(i int, j int) bool {
|
||||||
|
return importStatusPriority(importResults[i].Status) <
|
||||||
|
importStatusPriority(importResults[j].Status)
|
||||||
|
})
|
||||||
|
|
||||||
|
templateVars["Data"] = importResults
|
||||||
|
c.HTML(http.StatusOK, "page/admin-import-results", templateVars)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) processRestoreFile(rAdminAction requestAdminAction, c *gin.Context) {
|
||||||
|
// Validate Type & Derive Extension on MIME
|
||||||
|
uploadedFile, err := rAdminAction.RestoreFile.Open()
|
||||||
|
if err != nil {
|
||||||
|
log.Error("File Error: ", err)
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "Unable to open file")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
fileMime, err := mimetype.DetectReader(uploadedFile)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("MIME Error")
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "Unable to detect filetype")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
fileExtension := fileMime.Extension()
|
||||||
|
|
||||||
|
// Validate Extension
|
||||||
|
if !slices.Contains([]string{".zip"}, fileExtension) {
|
||||||
|
log.Error("Invalid FileType: ", fileExtension)
|
||||||
|
appErrorPage(c, http.StatusBadRequest, "Invalid filetype")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create Temp File
|
||||||
|
tempFile, err := os.CreateTemp("", "restore")
|
||||||
|
if err != nil {
|
||||||
|
log.Warn("Temp File Create Error: ", err)
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "Unable to create temp file")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer os.Remove(tempFile.Name())
|
||||||
|
defer tempFile.Close()
|
||||||
|
|
||||||
|
// Save Temp
|
||||||
|
err = c.SaveUploadedFile(rAdminAction.RestoreFile, tempFile.Name())
|
||||||
|
if err != nil {
|
||||||
|
log.Error("File Error: ", err)
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "Unable to save file")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// ZIP Info
|
||||||
|
fileInfo, err := tempFile.Stat()
|
||||||
|
if err != nil {
|
||||||
|
log.Error("File Error: ", err)
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "Unable to read file")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create ZIP Reader
|
||||||
|
zipReader, err := zip.NewReader(tempFile, fileInfo.Size())
|
||||||
|
if err != nil {
|
||||||
|
log.Error("ZIP Error: ", err)
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "Unable to read zip")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate ZIP Contents
|
||||||
|
hasDBFile := false
|
||||||
|
hasUnknownFile := false
|
||||||
|
for _, file := range zipReader.File {
|
||||||
|
fileName := strings.TrimPrefix(file.Name, "/")
|
||||||
|
if fileName == "antholume.db" {
|
||||||
|
hasDBFile = true
|
||||||
|
break
|
||||||
|
} else if !strings.HasPrefix(fileName, "covers/") && !strings.HasPrefix(fileName, "documents/") {
|
||||||
|
hasUnknownFile = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Invalid ZIP
|
||||||
|
if !hasDBFile {
|
||||||
|
log.Error("Invalid ZIP File - Missing DB")
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "Invalid Restore ZIP - Missing DB")
|
||||||
|
return
|
||||||
|
} else if hasUnknownFile {
|
||||||
|
log.Error("Invalid ZIP File - Invalid File(s)")
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "Invalid Restore ZIP - Invalid File(s)")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create Backup File
|
||||||
|
backupFilePath := filepath.Join(api.cfg.ConfigPath, fmt.Sprintf("backups/AnthoLumeBackup_%s.zip", time.Now().Format("20060102150405")))
|
||||||
|
backupFile, err := os.Create(backupFilePath)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Unable to create backup file: ", err)
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "Unable to create backup file")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer backupFile.Close()
|
||||||
|
|
||||||
|
// Save Backup File
|
||||||
|
w := bufio.NewWriter(backupFile)
|
||||||
|
err = api.createBackup(c, w, []string{"covers", "documents"})
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Unable to save backup file: ", err)
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "Unable to save backup file")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove Data
|
||||||
|
err = api.removeData()
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Unable to delete data: ", err)
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "Unable to delete data")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Restore Data
|
||||||
|
err = api.restoreData(zipReader)
|
||||||
|
if err != nil {
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "Unable to restore data")
|
||||||
|
log.Panic("Unable to restore data: ", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reinit DB
|
||||||
|
if err := api.db.Reload(c); err != nil {
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "Unable to reload DB")
|
||||||
|
log.Panicf("Unable to reload DB: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rotate Auth Hashes
|
||||||
|
if err := api.rotateAllAuthHashes(c); err != nil {
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "Unable to rotate hashes")
|
||||||
|
log.Panicf("Unable to rotate auth hashes: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Redirect to login page
|
||||||
|
c.Redirect(http.StatusFound, "/login")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) restoreData(zipReader *zip.Reader) error {
|
||||||
|
// Ensure Directories
|
||||||
|
api.cfg.EnsureDirectories()
|
||||||
|
|
||||||
|
// Restore Data
|
||||||
|
for _, file := range zipReader.File {
|
||||||
|
rc, err := file.Open()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer rc.Close()
|
||||||
|
|
||||||
|
destPath := filepath.Join(api.cfg.DataPath, file.Name)
|
||||||
|
destFile, err := os.Create(destPath)
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf("error creating destination file: %v", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer destFile.Close()
|
||||||
|
|
||||||
|
// Copy the contents from the zip file to the destination file.
|
||||||
|
if _, err := io.Copy(destFile, rc); err != nil {
|
||||||
|
log.Errorf("Error copying file contents: %v", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) removeData() error {
|
||||||
|
allPaths := []string{
|
||||||
|
"covers",
|
||||||
|
"documents",
|
||||||
|
"antholume.db",
|
||||||
|
"antholume.db-wal",
|
||||||
|
"antholume.db-shm",
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, name := range allPaths {
|
||||||
|
fullPath := filepath.Join(api.cfg.DataPath, name)
|
||||||
|
err := os.RemoveAll(fullPath)
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf("Unable to delete %s: %v", name, err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) createBackup(ctx context.Context, w io.Writer, directories []string) error {
|
||||||
|
// Vacuum DB
|
||||||
|
_, err := api.db.DB.ExecContext(ctx, "VACUUM;")
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("Unable to vacuum database: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
ar := zip.NewWriter(w)
|
||||||
|
exportWalker := func(currentPath string, f fs.DirEntry, err error) error {
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if f.IsDir() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Open File on Disk
|
||||||
|
file, err := os.Open(currentPath)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
// Derive Export Structure
|
||||||
|
fileName := filepath.Base(currentPath)
|
||||||
|
folderName := filepath.Base(filepath.Dir(currentPath))
|
||||||
|
|
||||||
|
// Create File in Export
|
||||||
|
newF, err := ar.Create(filepath.Join(folderName, fileName))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy File in Export
|
||||||
|
_, err = io.Copy(newF, file)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get DB Path
|
||||||
|
fileName := fmt.Sprintf("%s.db", api.cfg.DBName)
|
||||||
|
dbLocation := filepath.Join(api.cfg.ConfigPath, fileName)
|
||||||
|
|
||||||
|
// Copy Database File
|
||||||
|
dbFile, err := os.Open(dbLocation)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer dbFile.Close()
|
||||||
|
|
||||||
|
newDbFile, err := ar.Create(fileName)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = io.Copy(newDbFile, dbFile)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Backup Covers & Documents
|
||||||
|
for _, dir := range directories {
|
||||||
|
err = filepath.WalkDir(filepath.Join(api.cfg.DataPath, dir), exportWalker)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ar.Close()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) isLastAdmin(ctx context.Context, userID string) (bool, error) {
|
||||||
|
allUsers, err := api.db.Queries.GetUsers(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return false, fmt.Errorf("GetUsers DB Error: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
hasAdmin := false
|
||||||
|
for _, user := range allUsers {
|
||||||
|
if user.Admin && user.ID != userID {
|
||||||
|
hasAdmin = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return !hasAdmin, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) createUser(ctx context.Context, user string, rawPassword *string, isAdmin *bool) error {
|
||||||
|
// Validate Necessary Parameters
|
||||||
|
if rawPassword == nil || *rawPassword == "" {
|
||||||
|
return fmt.Errorf("password can't be empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Base Params
|
||||||
|
createParams := database.CreateUserParams{
|
||||||
|
ID: user,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle Admin (Explicit or False)
|
||||||
|
if isAdmin != nil {
|
||||||
|
createParams.Admin = *isAdmin
|
||||||
|
} else {
|
||||||
|
createParams.Admin = false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse Password
|
||||||
|
password := fmt.Sprintf("%x", md5.Sum([]byte(*rawPassword)))
|
||||||
|
hashedPassword, err := argon2.CreateHash(password, argon2.DefaultParams)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("unable to create hashed password")
|
||||||
|
}
|
||||||
|
createParams.Pass = &hashedPassword
|
||||||
|
|
||||||
|
// Generate Auth Hash
|
||||||
|
rawAuthHash, err := utils.GenerateToken(64)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("unable to create token for user")
|
||||||
|
}
|
||||||
|
authHash := fmt.Sprintf("%x", rawAuthHash)
|
||||||
|
createParams.AuthHash = &authHash
|
||||||
|
|
||||||
|
// Create user in DB
|
||||||
|
if rows, err := api.db.Queries.CreateUser(ctx, createParams); err != nil {
|
||||||
|
log.Error("CreateUser DB Error:", err)
|
||||||
|
return fmt.Errorf("unable to create user")
|
||||||
|
} else if rows == 0 {
|
||||||
|
log.Warn("User Already Exists:", createParams.ID)
|
||||||
|
return fmt.Errorf("user already exists")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) updateUser(ctx context.Context, user string, rawPassword *string, isAdmin *bool) error {
|
||||||
|
// Validate Necessary Parameters
|
||||||
|
if rawPassword == nil && isAdmin == nil {
|
||||||
|
return fmt.Errorf("nothing to update")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Base Params
|
||||||
|
updateParams := database.UpdateUserParams{
|
||||||
|
UserID: user,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle Admin (Update or Existing)
|
||||||
|
if isAdmin != nil {
|
||||||
|
updateParams.Admin = *isAdmin
|
||||||
|
} else {
|
||||||
|
user, err := api.db.Queries.GetUser(ctx, user)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("GetUser DB Error: %w", err)
|
||||||
|
}
|
||||||
|
updateParams.Admin = user.Admin
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check Admins - Disallow Demotion
|
||||||
|
if isLast, err := api.isLastAdmin(ctx, user); err != nil {
|
||||||
|
return err
|
||||||
|
} else if isLast && !updateParams.Admin {
|
||||||
|
return fmt.Errorf("unable to demote %s - last admin", user)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle Password
|
||||||
|
if rawPassword != nil {
|
||||||
|
if *rawPassword == "" {
|
||||||
|
return fmt.Errorf("password can't be empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse Password
|
||||||
|
password := fmt.Sprintf("%x", md5.Sum([]byte(*rawPassword)))
|
||||||
|
hashedPassword, err := argon2.CreateHash(password, argon2.DefaultParams)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("unable to create hashed password")
|
||||||
|
}
|
||||||
|
updateParams.Password = &hashedPassword
|
||||||
|
|
||||||
|
// Generate Auth Hash
|
||||||
|
rawAuthHash, err := utils.GenerateToken(64)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("unable to create token for user")
|
||||||
|
}
|
||||||
|
authHash := fmt.Sprintf("%x", rawAuthHash)
|
||||||
|
updateParams.AuthHash = &authHash
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update User
|
||||||
|
_, err := api.db.Queries.UpdateUser(ctx, updateParams)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("UpdateUser DB Error: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) deleteUser(ctx context.Context, user string) error {
|
||||||
|
// Check Admins
|
||||||
|
if isLast, err := api.isLastAdmin(ctx, user); err != nil {
|
||||||
|
return err
|
||||||
|
} else if isLast {
|
||||||
|
return fmt.Errorf("unable to delete %s - last admin", user)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create Backup File
|
||||||
|
backupFilePath := filepath.Join(api.cfg.ConfigPath, fmt.Sprintf("backups/AnthoLumeBackup_%s.zip", time.Now().Format("20060102150405")))
|
||||||
|
backupFile, err := os.Create(backupFilePath)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer backupFile.Close()
|
||||||
|
|
||||||
|
// Save Backup File (DB Only)
|
||||||
|
w := bufio.NewWriter(backupFile)
|
||||||
|
err = api.createBackup(ctx, w, []string{})
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete User
|
||||||
|
_, err = api.db.Queries.DeleteUser(ctx, user)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("DeleteUser DB Error: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
File diff suppressed because it is too large
Load Diff
388
api/auth.go
388
api/auth.go
@@ -1,6 +1,7 @@
|
|||||||
package api
|
package api
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"crypto/md5"
|
"crypto/md5"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
@@ -11,39 +12,49 @@ import (
|
|||||||
"github.com/gin-contrib/sessions"
|
"github.com/gin-contrib/sessions"
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"reichard.io/bbank/database"
|
"reichard.io/antholume/database"
|
||||||
|
"reichard.io/antholume/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// Authorization Data
|
||||||
|
type authData struct {
|
||||||
|
UserName string
|
||||||
|
IsAdmin bool
|
||||||
|
AuthHash string
|
||||||
|
}
|
||||||
|
|
||||||
// KOSync API Auth Headers
|
// KOSync API Auth Headers
|
||||||
type authKOHeader struct {
|
type authKOHeader struct {
|
||||||
AuthUser string `header:"x-auth-user"`
|
AuthUser string `header:"x-auth-user"`
|
||||||
AuthKey string `header:"x-auth-key"`
|
AuthKey string `header:"x-auth-key"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// OPDS Auth Headers
|
func (api *API) authorizeCredentials(ctx context.Context, username string, password string) (auth *authData) {
|
||||||
type authOPDSHeader struct {
|
user, err := api.db.Queries.GetUser(ctx, username)
|
||||||
Authorization string `header:"authorization"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (api *API) authorizeCredentials(username string, password string) (authorized bool) {
|
|
||||||
user, err := api.DB.Queries.GetUser(api.DB.Ctx, username)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return false
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if match, err := argon2.ComparePasswordAndHash(password, *user.Pass); err != nil || match != true {
|
if match, err := argon2.ComparePasswordAndHash(password, *user.Pass); err != nil || !match {
|
||||||
return false
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
return true
|
// Update auth cache
|
||||||
|
api.userAuthCache[user.ID] = *user.AuthHash
|
||||||
|
|
||||||
|
return &authData{
|
||||||
|
UserName: user.ID,
|
||||||
|
IsAdmin: user.Admin,
|
||||||
|
AuthHash: *user.AuthHash,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) authKOMiddleware(c *gin.Context) {
|
func (api *API) authKOMiddleware(c *gin.Context) {
|
||||||
session := sessions.Default(c)
|
session := sessions.Default(c)
|
||||||
|
|
||||||
// Check Session First
|
// Check Session First
|
||||||
if user, ok := getSession(session); ok == true {
|
if auth, ok := api.getSession(c, session); ok {
|
||||||
c.Set("AuthorizedUser", user)
|
c.Set("Authorization", auth)
|
||||||
c.Header("Cache-Control", "private")
|
c.Header("Cache-Control", "private")
|
||||||
c.Next()
|
c.Next()
|
||||||
return
|
return
|
||||||
@@ -61,17 +72,18 @@ func (api *API) authKOMiddleware(c *gin.Context) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if authorized := api.authorizeCredentials(rHeader.AuthUser, rHeader.AuthKey); authorized != true {
|
authData := api.authorizeCredentials(c, rHeader.AuthUser, rHeader.AuthKey)
|
||||||
|
if authData == nil {
|
||||||
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Unauthorized"})
|
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Unauthorized"})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := setSession(session, rHeader.AuthUser); err != nil {
|
if err := api.setSession(session, *authData); err != nil {
|
||||||
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Unauthorized"})
|
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Unauthorized"})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.Set("AuthorizedUser", rHeader.AuthUser)
|
c.Set("Authorization", *authData)
|
||||||
c.Header("Cache-Control", "private")
|
c.Header("Cache-Control", "private")
|
||||||
c.Next()
|
c.Next()
|
||||||
}
|
}
|
||||||
@@ -82,19 +94,20 @@ func (api *API) authOPDSMiddleware(c *gin.Context) {
|
|||||||
user, rawPassword, hasAuth := c.Request.BasicAuth()
|
user, rawPassword, hasAuth := c.Request.BasicAuth()
|
||||||
|
|
||||||
// Validate Auth Fields
|
// Validate Auth Fields
|
||||||
if hasAuth != true || user == "" || rawPassword == "" {
|
if !hasAuth || user == "" || rawPassword == "" {
|
||||||
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Invalid Authorization Headers"})
|
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Invalid Authorization Headers"})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate Auth
|
// Validate Auth
|
||||||
password := fmt.Sprintf("%x", md5.Sum([]byte(rawPassword)))
|
password := fmt.Sprintf("%x", md5.Sum([]byte(rawPassword)))
|
||||||
if authorized := api.authorizeCredentials(user, password); authorized != true {
|
authData := api.authorizeCredentials(c, user, password)
|
||||||
|
if authData == nil {
|
||||||
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Unauthorized"})
|
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Unauthorized"})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.Set("AuthorizedUser", user)
|
c.Set("Authorization", *authData)
|
||||||
c.Header("Cache-Control", "private")
|
c.Header("Cache-Control", "private")
|
||||||
c.Next()
|
c.Next()
|
||||||
}
|
}
|
||||||
@@ -103,8 +116,8 @@ func (api *API) authWebAppMiddleware(c *gin.Context) {
|
|||||||
session := sessions.Default(c)
|
session := sessions.Default(c)
|
||||||
|
|
||||||
// Check Session
|
// Check Session
|
||||||
if user, ok := getSession(session); ok == true {
|
if auth, ok := api.getSession(c, session); ok {
|
||||||
c.Set("AuthorizedUser", user)
|
c.Set("Authorization", auth)
|
||||||
c.Header("Cache-Control", "private")
|
c.Header("Cache-Control", "private")
|
||||||
c.Next()
|
c.Next()
|
||||||
return
|
return
|
||||||
@@ -112,38 +125,47 @@ func (api *API) authWebAppMiddleware(c *gin.Context) {
|
|||||||
|
|
||||||
c.Redirect(http.StatusFound, "/login")
|
c.Redirect(http.StatusFound, "/login")
|
||||||
c.Abort()
|
c.Abort()
|
||||||
return
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) authFormLogin(c *gin.Context) {
|
func (api *API) authAdminWebAppMiddleware(c *gin.Context) {
|
||||||
|
if data, _ := c.Get("Authorization"); data != nil {
|
||||||
|
auth := data.(authData)
|
||||||
|
if auth.IsAdmin {
|
||||||
|
c.Next()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
appErrorPage(c, http.StatusUnauthorized, "Admin Permissions Required")
|
||||||
|
c.Abort()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) appAuthLogin(c *gin.Context) {
|
||||||
|
templateVars, _ := api.getBaseTemplateVars("login", c)
|
||||||
|
|
||||||
username := strings.TrimSpace(c.PostForm("username"))
|
username := strings.TrimSpace(c.PostForm("username"))
|
||||||
rawPassword := strings.TrimSpace(c.PostForm("password"))
|
rawPassword := strings.TrimSpace(c.PostForm("password"))
|
||||||
|
|
||||||
if username == "" || rawPassword == "" {
|
if username == "" || rawPassword == "" {
|
||||||
c.HTML(http.StatusUnauthorized, "login", gin.H{
|
templateVars["Error"] = "Invalid Credentials"
|
||||||
"RegistrationEnabled": api.Config.RegistrationEnabled,
|
c.HTML(http.StatusUnauthorized, "page/login", templateVars)
|
||||||
"Error": "Invalid Credentials",
|
|
||||||
})
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// MD5 - KOSync Compatiblity
|
// MD5 - KOSync Compatiblity
|
||||||
password := fmt.Sprintf("%x", md5.Sum([]byte(rawPassword)))
|
password := fmt.Sprintf("%x", md5.Sum([]byte(rawPassword)))
|
||||||
if authorized := api.authorizeCredentials(username, password); authorized != true {
|
authData := api.authorizeCredentials(c, username, password)
|
||||||
c.HTML(http.StatusUnauthorized, "login", gin.H{
|
if authData == nil {
|
||||||
"RegistrationEnabled": api.Config.RegistrationEnabled,
|
templateVars["Error"] = "Invalid Credentials"
|
||||||
"Error": "Invalid Credentials",
|
c.HTML(http.StatusUnauthorized, "page/login", templateVars)
|
||||||
})
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set Session
|
// Set Session
|
||||||
session := sessions.Default(c)
|
session := sessions.Default(c)
|
||||||
if err := setSession(session, username); err != nil {
|
if err := api.setSession(session, *authData); err != nil {
|
||||||
c.HTML(http.StatusUnauthorized, "login", gin.H{
|
templateVars["Error"] = "Invalid Credentials"
|
||||||
"RegistrationEnabled": api.Config.RegistrationEnabled,
|
c.HTML(http.StatusUnauthorized, "page/login", templateVars)
|
||||||
"Error": "Unknown Error",
|
|
||||||
})
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -151,60 +173,93 @@ func (api *API) authFormLogin(c *gin.Context) {
|
|||||||
c.Redirect(http.StatusFound, "/")
|
c.Redirect(http.StatusFound, "/")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) authFormRegister(c *gin.Context) {
|
func (api *API) appAuthRegister(c *gin.Context) {
|
||||||
if !api.Config.RegistrationEnabled {
|
if !api.cfg.RegistrationEnabled {
|
||||||
errorPage(c, http.StatusUnauthorized, "Nice try. Registration is disabled.")
|
appErrorPage(c, http.StatusUnauthorized, "Nice try. Registration is disabled.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
templateVars, _ := api.getBaseTemplateVars("login", c)
|
||||||
|
templateVars["Register"] = true
|
||||||
|
|
||||||
username := strings.TrimSpace(c.PostForm("username"))
|
username := strings.TrimSpace(c.PostForm("username"))
|
||||||
rawPassword := strings.TrimSpace(c.PostForm("password"))
|
rawPassword := strings.TrimSpace(c.PostForm("password"))
|
||||||
|
|
||||||
if username == "" || rawPassword == "" {
|
if username == "" || rawPassword == "" {
|
||||||
c.HTML(http.StatusBadRequest, "login", gin.H{
|
templateVars["Error"] = "Invalid User or Password"
|
||||||
"Register": true,
|
c.HTML(http.StatusBadRequest, "page/login", templateVars)
|
||||||
"Error": "Registration Disabled or User Already Exists",
|
|
||||||
})
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
password := fmt.Sprintf("%x", md5.Sum([]byte(rawPassword)))
|
password := fmt.Sprintf("%x", md5.Sum([]byte(rawPassword)))
|
||||||
|
|
||||||
hashedPassword, err := argon2.CreateHash(password, argon2.DefaultParams)
|
hashedPassword, err := argon2.CreateHash(password, argon2.DefaultParams)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
c.HTML(http.StatusBadRequest, "login", gin.H{
|
templateVars["Error"] = "Registration Disabled or User Already Exists"
|
||||||
"Register": true,
|
c.HTML(http.StatusBadRequest, "page/login", templateVars)
|
||||||
"Error": "Registration Disabled or User Already Exists",
|
|
||||||
})
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
rows, err := api.DB.Queries.CreateUser(api.DB.Ctx, database.CreateUserParams{
|
// Generate auth hash
|
||||||
|
rawAuthHash, err := utils.GenerateToken(64)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Failed to generate user token: ", err)
|
||||||
|
templateVars["Error"] = "Failed to Create User"
|
||||||
|
c.HTML(http.StatusBadRequest, "page/login", templateVars)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get current users
|
||||||
|
currentUsers, err := api.db.Queries.GetUsers(c)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Failed to check all users: ", err)
|
||||||
|
templateVars["Error"] = "Failed to Create User"
|
||||||
|
c.HTML(http.StatusBadRequest, "page/login", templateVars)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine if we should be admin
|
||||||
|
isAdmin := false
|
||||||
|
if len(currentUsers) == 0 {
|
||||||
|
isAdmin = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create user in DB
|
||||||
|
authHash := fmt.Sprintf("%x", rawAuthHash)
|
||||||
|
if rows, err := api.db.Queries.CreateUser(c, database.CreateUserParams{
|
||||||
ID: username,
|
ID: username,
|
||||||
Pass: &hashedPassword,
|
Pass: &hashedPassword,
|
||||||
})
|
AuthHash: &authHash,
|
||||||
|
Admin: isAdmin,
|
||||||
|
}); err != nil {
|
||||||
|
log.Error("CreateUser DB Error:", err)
|
||||||
|
templateVars["Error"] = "Registration Disabled or User Already Exists"
|
||||||
|
c.HTML(http.StatusBadRequest, "page/login", templateVars)
|
||||||
|
return
|
||||||
|
} else if rows == 0 {
|
||||||
|
log.Warn("User Already Exists:", username)
|
||||||
|
templateVars["Error"] = "Registration Disabled or User Already Exists"
|
||||||
|
c.HTML(http.StatusBadRequest, "page/login", templateVars)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
// SQL Error
|
// Get user
|
||||||
|
user, err := api.db.Queries.GetUser(c, username)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
c.HTML(http.StatusBadRequest, "login", gin.H{
|
log.Error("GetUser DB Error:", err)
|
||||||
"Register": true,
|
templateVars["Error"] = "Registration Disabled or User Already Exists"
|
||||||
"Error": "Registration Disabled or User Already Exists",
|
c.HTML(http.StatusBadRequest, "page/login", templateVars)
|
||||||
})
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// User Already Exists
|
// Set session
|
||||||
if rows == 0 {
|
auth := authData{
|
||||||
c.HTML(http.StatusBadRequest, "login", gin.H{
|
UserName: user.ID,
|
||||||
"Register": true,
|
IsAdmin: user.Admin,
|
||||||
"Error": "Registration Disabled or User Already Exists",
|
AuthHash: *user.AuthHash,
|
||||||
})
|
|
||||||
return
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set Session
|
|
||||||
session := sessions.Default(c)
|
session := sessions.Default(c)
|
||||||
if err := setSession(session, username); err != nil {
|
if err := api.setSession(session, auth); err != nil {
|
||||||
errorPage(c, http.StatusUnauthorized, "Unauthorized.")
|
appErrorPage(c, http.StatusUnauthorized, "Unauthorized.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -212,41 +267,206 @@ func (api *API) authFormRegister(c *gin.Context) {
|
|||||||
c.Redirect(http.StatusFound, "/")
|
c.Redirect(http.StatusFound, "/")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) authLogout(c *gin.Context) {
|
func (api *API) appAuthLogout(c *gin.Context) {
|
||||||
session := sessions.Default(c)
|
session := sessions.Default(c)
|
||||||
session.Clear()
|
session.Clear()
|
||||||
session.Save()
|
if err := session.Save(); err != nil {
|
||||||
|
log.Error("unable to save session")
|
||||||
|
}
|
||||||
|
|
||||||
c.Redirect(http.StatusFound, "/login")
|
c.Redirect(http.StatusFound, "/login")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) demoModeAppError(c *gin.Context) {
|
func (api *API) koAuthRegister(c *gin.Context) {
|
||||||
errorPage(c, http.StatusUnauthorized, "Not Allowed in Demo Mode")
|
if !api.cfg.RegistrationEnabled {
|
||||||
|
c.AbortWithStatus(http.StatusConflict)
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) demoModeJSONError(c *gin.Context) {
|
var rUser requestUser
|
||||||
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Not Allowed in Demo Mode"})
|
if err := c.ShouldBindJSON(&rUser); err != nil {
|
||||||
|
log.Error("Invalid JSON Bind")
|
||||||
|
apiErrorPage(c, http.StatusBadRequest, "Invalid User Data")
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
func getSession(session sessions.Session) (user string, ok bool) {
|
if rUser.Username == "" || rUser.Password == "" {
|
||||||
// Check Session
|
log.Error("Invalid User - Empty Username or Password")
|
||||||
|
apiErrorPage(c, http.StatusBadRequest, "Invalid User Data")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate password hash
|
||||||
|
hashedPassword, err := argon2.CreateHash(rUser.Password, argon2.DefaultParams)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Argon2 Hash Failure:", err)
|
||||||
|
apiErrorPage(c, http.StatusBadRequest, "Unknown Error")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate auth hash
|
||||||
|
rawAuthHash, err := utils.GenerateToken(64)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Failed to generate user token: ", err)
|
||||||
|
apiErrorPage(c, http.StatusBadRequest, "Unknown Error")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get current users
|
||||||
|
currentUsers, err := api.db.Queries.GetUsers(c)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Failed to check all users: ", err)
|
||||||
|
apiErrorPage(c, http.StatusBadRequest, "Failed to Create User")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine if we should be admin
|
||||||
|
isAdmin := false
|
||||||
|
if len(currentUsers) == 0 {
|
||||||
|
isAdmin = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create user
|
||||||
|
authHash := fmt.Sprintf("%x", rawAuthHash)
|
||||||
|
if rows, err := api.db.Queries.CreateUser(c, database.CreateUserParams{
|
||||||
|
ID: rUser.Username,
|
||||||
|
Pass: &hashedPassword,
|
||||||
|
AuthHash: &authHash,
|
||||||
|
Admin: isAdmin,
|
||||||
|
}); err != nil {
|
||||||
|
log.Error("CreateUser DB Error:", err)
|
||||||
|
apiErrorPage(c, http.StatusBadRequest, "Invalid User Data")
|
||||||
|
return
|
||||||
|
} else if rows == 0 {
|
||||||
|
log.Error("User Already Exists:", rUser.Username)
|
||||||
|
apiErrorPage(c, http.StatusBadRequest, "User Already Exists")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusCreated, gin.H{
|
||||||
|
"username": rUser.Username,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) getSession(ctx context.Context, session sessions.Session) (auth authData, ok bool) {
|
||||||
|
// Get Session
|
||||||
authorizedUser := session.Get("authorizedUser")
|
authorizedUser := session.Get("authorizedUser")
|
||||||
if authorizedUser == nil {
|
isAdmin := session.Get("isAdmin")
|
||||||
return "", false
|
expiresAt := session.Get("expiresAt")
|
||||||
|
authHash := session.Get("authHash")
|
||||||
|
if authorizedUser == nil || isAdmin == nil || expiresAt == nil || authHash == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create Auth Object
|
||||||
|
auth = authData{
|
||||||
|
UserName: authorizedUser.(string),
|
||||||
|
IsAdmin: isAdmin.(bool),
|
||||||
|
AuthHash: authHash.(string),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate Auth Hash
|
||||||
|
correctAuthHash, err := api.getUserAuthHash(ctx, auth.UserName)
|
||||||
|
if err != nil || correctAuthHash != auth.AuthHash {
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Refresh
|
// Refresh
|
||||||
expiresAt := session.Get("expiresAt")
|
if expiresAt.(int64)-time.Now().Unix() < 60*60*24 {
|
||||||
if expiresAt != nil && expiresAt.(int64)-time.Now().Unix() < 60*60*24 {
|
log.Info("Refreshing Session")
|
||||||
log.Info("[getSession] Refreshing Session")
|
if err := api.setSession(session, auth); err != nil {
|
||||||
setSession(session, authorizedUser.(string))
|
log.Error("unable to get session")
|
||||||
|
return
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return authorizedUser.(string), true
|
// Authorized
|
||||||
|
return auth, true
|
||||||
}
|
}
|
||||||
|
|
||||||
func setSession(session sessions.Session, user string) error {
|
func (api *API) setSession(session sessions.Session, auth authData) error {
|
||||||
// Set Session Cookie
|
// Set Session Cookie
|
||||||
session.Set("authorizedUser", user)
|
session.Set("authorizedUser", auth.UserName)
|
||||||
|
session.Set("isAdmin", auth.IsAdmin)
|
||||||
session.Set("expiresAt", time.Now().Unix()+(60*60*24*7))
|
session.Set("expiresAt", time.Now().Unix()+(60*60*24*7))
|
||||||
|
session.Set("authHash", auth.AuthHash)
|
||||||
|
|
||||||
return session.Save()
|
return session.Save()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (api *API) getUserAuthHash(ctx context.Context, username string) (string, error) {
|
||||||
|
// Return Cache
|
||||||
|
if api.userAuthCache[username] != "" {
|
||||||
|
return api.userAuthCache[username], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get DB
|
||||||
|
user, err := api.db.Queries.GetUser(ctx, username)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("GetUser DB Error:", err)
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update Cache
|
||||||
|
api.userAuthCache[username] = *user.AuthHash
|
||||||
|
|
||||||
|
return api.userAuthCache[username], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) rotateAllAuthHashes(ctx context.Context) error {
|
||||||
|
// Do Transaction
|
||||||
|
tx, err := api.db.DB.Begin()
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Transaction Begin DB Error: ", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Defer & Start Transaction
|
||||||
|
defer func() {
|
||||||
|
if err := tx.Rollback(); err != nil {
|
||||||
|
log.Error("DB Rollback Error:", err)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
qtx := api.db.Queries.WithTx(tx)
|
||||||
|
|
||||||
|
users, err := qtx.GetUsers(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update Users
|
||||||
|
newAuthHashCache := make(map[string]string, 0)
|
||||||
|
for _, user := range users {
|
||||||
|
// Generate Auth Hash
|
||||||
|
rawAuthHash, err := utils.GenerateToken(64)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update User
|
||||||
|
authHash := fmt.Sprintf("%x", rawAuthHash)
|
||||||
|
if _, err = qtx.UpdateUser(ctx, database.UpdateUserParams{
|
||||||
|
UserID: user.ID,
|
||||||
|
AuthHash: &authHash,
|
||||||
|
Admin: user.Admin,
|
||||||
|
}); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save New Hash Cache
|
||||||
|
newAuthHashCache[user.ID] = fmt.Sprintf("%x", rawAuthHash)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Commit Transaction
|
||||||
|
if err := tx.Commit(); err != nil {
|
||||||
|
log.Error("Transaction Commit DB Error: ", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Transaction Succeeded -> Update Cache
|
||||||
|
for user, hash := range newAuthHashCache {
|
||||||
|
api.userAuthCache[user] = hash
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|||||||
151
api/common.go
Normal file
151
api/common.go
Normal file
@@ -0,0 +1,151 @@
|
|||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
"reichard.io/antholume/database"
|
||||||
|
"reichard.io/antholume/metadata"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (api *API) createDownloadDocumentHandler(errorFunc func(*gin.Context, int, string)) func(*gin.Context) {
|
||||||
|
return func(c *gin.Context) {
|
||||||
|
var rDoc requestDocumentID
|
||||||
|
if err := c.ShouldBindUri(&rDoc); err != nil {
|
||||||
|
log.Error("Invalid URI Bind")
|
||||||
|
errorFunc(c, http.StatusBadRequest, "Invalid Request")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get Document
|
||||||
|
document, err := api.db.Queries.GetDocument(c, rDoc.DocumentID)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("GetDocument DB Error:", err)
|
||||||
|
errorFunc(c, http.StatusBadRequest, "Unknown Document")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if document.Filepath == nil {
|
||||||
|
log.Error("Document Doesn't Have File:", rDoc.DocumentID)
|
||||||
|
errorFunc(c, http.StatusBadRequest, "Document Doesn't Exist")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Derive Basepath
|
||||||
|
basepath := filepath.Join(api.cfg.DataPath, "documents")
|
||||||
|
if document.Basepath != nil && *document.Basepath != "" {
|
||||||
|
basepath = *document.Basepath
|
||||||
|
}
|
||||||
|
|
||||||
|
// Derive Storage Location
|
||||||
|
filePath := filepath.Join(basepath, *document.Filepath)
|
||||||
|
|
||||||
|
// Validate File Exists
|
||||||
|
_, err = os.Stat(filePath)
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
log.Error("File should but doesn't exist: ", err)
|
||||||
|
errorFunc(c, http.StatusBadRequest, "Document Doesn't Exist")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Force Download
|
||||||
|
c.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", filepath.Base(*document.Filepath)))
|
||||||
|
c.File(filePath)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) createGetCoverHandler(errorFunc func(*gin.Context, int, string)) func(*gin.Context) {
|
||||||
|
return func(c *gin.Context) {
|
||||||
|
var rDoc requestDocumentID
|
||||||
|
if err := c.ShouldBindUri(&rDoc); err != nil {
|
||||||
|
log.Error("Invalid URI Bind")
|
||||||
|
errorFunc(c, http.StatusNotFound, "Invalid cover.")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate Document Exists in DB
|
||||||
|
document, err := api.db.Queries.GetDocument(c, rDoc.DocumentID)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("GetDocument DB Error:", err)
|
||||||
|
errorFunc(c, http.StatusInternalServerError, fmt.Sprintf("GetDocument DB Error: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle Identified Document
|
||||||
|
if document.Coverfile != nil {
|
||||||
|
if *document.Coverfile == "UNKNOWN" {
|
||||||
|
c.FileFromFS("assets/images/no-cover.jpg", http.FS(api.assets))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Derive Path
|
||||||
|
safePath := filepath.Join(api.cfg.DataPath, "covers", *document.Coverfile)
|
||||||
|
|
||||||
|
// Validate File Exists
|
||||||
|
_, err = os.Stat(safePath)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("File should but doesn't exist: ", err)
|
||||||
|
c.FileFromFS("assets/images/no-cover.jpg", http.FS(api.assets))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.File(safePath)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Attempt Metadata
|
||||||
|
var coverDir string = filepath.Join(api.cfg.DataPath, "covers")
|
||||||
|
var coverFile string = "UNKNOWN"
|
||||||
|
|
||||||
|
// Identify Documents & Save Covers
|
||||||
|
metadataResults, err := metadata.SearchMetadata(metadata.SOURCE_GBOOK, metadata.MetadataInfo{
|
||||||
|
Title: document.Title,
|
||||||
|
Author: document.Author,
|
||||||
|
})
|
||||||
|
|
||||||
|
if err == nil && len(metadataResults) > 0 && metadataResults[0].ID != nil {
|
||||||
|
firstResult := metadataResults[0]
|
||||||
|
|
||||||
|
// Save Cover
|
||||||
|
fileName, err := metadata.CacheCover(*firstResult.ID, coverDir, document.ID, false)
|
||||||
|
if err == nil {
|
||||||
|
coverFile = *fileName
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store First Metadata Result
|
||||||
|
if _, err = api.db.Queries.AddMetadata(c, database.AddMetadataParams{
|
||||||
|
DocumentID: document.ID,
|
||||||
|
Title: firstResult.Title,
|
||||||
|
Author: firstResult.Author,
|
||||||
|
Description: firstResult.Description,
|
||||||
|
Gbid: firstResult.ID,
|
||||||
|
Olid: nil,
|
||||||
|
Isbn10: firstResult.ISBN10,
|
||||||
|
Isbn13: firstResult.ISBN13,
|
||||||
|
}); err != nil {
|
||||||
|
log.Error("AddMetadata DB Error:", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Upsert Document
|
||||||
|
if _, err = api.db.Queries.UpsertDocument(c, database.UpsertDocumentParams{
|
||||||
|
ID: document.ID,
|
||||||
|
Coverfile: &coverFile,
|
||||||
|
}); err != nil {
|
||||||
|
log.Warn("UpsertDocument DB Error:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return Unknown Cover
|
||||||
|
if coverFile == "UNKNOWN" {
|
||||||
|
c.FileFromFS("assets/images/no-cover.jpg", http.FS(api.assets))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
coverFilePath := filepath.Join(coverDir, coverFile)
|
||||||
|
c.File(coverFilePath)
|
||||||
|
}
|
||||||
|
}
|
||||||
444
api/ko-routes.go
444
api/ko-routes.go
@@ -10,16 +10,12 @@ import (
|
|||||||
"net/http"
|
"net/http"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strings"
|
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
argon2 "github.com/alexedwards/argon2id"
|
|
||||||
"github.com/gabriel-vasile/mimetype"
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"golang.org/x/exp/slices"
|
"reichard.io/antholume/database"
|
||||||
"reichard.io/bbank/database"
|
"reichard.io/antholume/metadata"
|
||||||
"reichard.io/bbank/metadata"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type activityItem struct {
|
type activityItem struct {
|
||||||
@@ -75,139 +71,91 @@ type requestDocumentID struct {
|
|||||||
DocumentID string `uri:"document" binding:"required"`
|
DocumentID string `uri:"document" binding:"required"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) authorizeUser(c *gin.Context) {
|
func (api *API) koAuthorizeUser(c *gin.Context) {
|
||||||
c.JSON(200, gin.H{
|
koJSON(c, 200, gin.H{
|
||||||
"authorized": "OK",
|
"authorized": "OK",
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) createUser(c *gin.Context) {
|
func (api *API) koSetProgress(c *gin.Context) {
|
||||||
if !api.Config.RegistrationEnabled {
|
var auth authData
|
||||||
c.AbortWithStatus(http.StatusConflict)
|
if data, _ := c.Get("Authorization"); data != nil {
|
||||||
return
|
auth = data.(authData)
|
||||||
}
|
}
|
||||||
|
|
||||||
var rUser requestUser
|
|
||||||
if err := c.ShouldBindJSON(&rUser); err != nil {
|
|
||||||
log.Error("[createUser] Invalid JSON Bind")
|
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid User Data"})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if rUser.Username == "" || rUser.Password == "" {
|
|
||||||
log.Error("[createUser] Invalid User - Empty Username or Password")
|
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid User Data"})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
hashedPassword, err := argon2.CreateHash(rUser.Password, argon2.DefaultParams)
|
|
||||||
if err != nil {
|
|
||||||
log.Error("[createUser] Argon2 Hash Failure:", err)
|
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Unknown Error"})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
rows, err := api.DB.Queries.CreateUser(api.DB.Ctx, database.CreateUserParams{
|
|
||||||
ID: rUser.Username,
|
|
||||||
Pass: &hashedPassword,
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
log.Error("[createUser] CreateUser DB Error:", err)
|
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid User Data"})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// User Exists
|
|
||||||
if rows == 0 {
|
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "User Already Exists"})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
c.JSON(http.StatusCreated, gin.H{
|
|
||||||
"username": rUser.Username,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func (api *API) setProgress(c *gin.Context) {
|
|
||||||
rUser, _ := c.Get("AuthorizedUser")
|
|
||||||
|
|
||||||
var rPosition requestPosition
|
var rPosition requestPosition
|
||||||
if err := c.ShouldBindJSON(&rPosition); err != nil {
|
if err := c.ShouldBindJSON(&rPosition); err != nil {
|
||||||
log.Error("[setProgress] Invalid JSON Bind")
|
log.Error("Invalid JSON Bind")
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Progress Data"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Progress Data")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Upsert Device
|
// Upsert Device
|
||||||
if _, err := api.DB.Queries.UpsertDevice(api.DB.Ctx, database.UpsertDeviceParams{
|
if _, err := api.db.Queries.UpsertDevice(c, database.UpsertDeviceParams{
|
||||||
ID: rPosition.DeviceID,
|
ID: rPosition.DeviceID,
|
||||||
UserID: rUser.(string),
|
UserID: auth.UserName,
|
||||||
DeviceName: rPosition.Device,
|
DeviceName: rPosition.Device,
|
||||||
LastSynced: time.Now().UTC().Format(time.RFC3339),
|
LastSynced: time.Now().UTC().Format(time.RFC3339),
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
log.Error("[setProgress] UpsertDevice DB Error:", err)
|
log.Error("UpsertDevice DB Error:", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Upsert Document
|
// Upsert Document
|
||||||
if _, err := api.DB.Queries.UpsertDocument(api.DB.Ctx, database.UpsertDocumentParams{
|
if _, err := api.db.Queries.UpsertDocument(c, database.UpsertDocumentParams{
|
||||||
ID: rPosition.DocumentID,
|
ID: rPosition.DocumentID,
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
log.Error("[setProgress] UpsertDocument DB Error:", err)
|
log.Error("UpsertDocument DB Error:", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create or Replace Progress
|
// Create or Replace Progress
|
||||||
progress, err := api.DB.Queries.UpdateProgress(api.DB.Ctx, database.UpdateProgressParams{
|
progress, err := api.db.Queries.UpdateProgress(c, database.UpdateProgressParams{
|
||||||
Percentage: rPosition.Percentage,
|
Percentage: rPosition.Percentage,
|
||||||
DocumentID: rPosition.DocumentID,
|
DocumentID: rPosition.DocumentID,
|
||||||
DeviceID: rPosition.DeviceID,
|
DeviceID: rPosition.DeviceID,
|
||||||
UserID: rUser.(string),
|
UserID: auth.UserName,
|
||||||
Progress: rPosition.Progress,
|
Progress: rPosition.Progress,
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[setProgress] UpdateProgress DB Error:", err)
|
log.Error("UpdateProgress DB Error:", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Request")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update Statistic
|
koJSON(c, http.StatusOK, gin.H{
|
||||||
log.Info("[setProgress] UpdateDocumentUserStatistic Running...")
|
|
||||||
if err := api.DB.UpdateDocumentUserStatistic(rPosition.DocumentID, rUser.(string)); err != nil {
|
|
||||||
log.Error("[setProgress] UpdateDocumentUserStatistic Error:", err)
|
|
||||||
}
|
|
||||||
log.Info("[setProgress] UpdateDocumentUserStatistic Complete")
|
|
||||||
|
|
||||||
c.JSON(http.StatusOK, gin.H{
|
|
||||||
"document": progress.DocumentID,
|
"document": progress.DocumentID,
|
||||||
"timestamp": progress.CreatedAt,
|
"timestamp": progress.CreatedAt,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) getProgress(c *gin.Context) {
|
func (api *API) koGetProgress(c *gin.Context) {
|
||||||
rUser, _ := c.Get("AuthorizedUser")
|
var auth authData
|
||||||
|
if data, _ := c.Get("Authorization"); data != nil {
|
||||||
|
auth = data.(authData)
|
||||||
|
}
|
||||||
|
|
||||||
var rDocID requestDocumentID
|
var rDocID requestDocumentID
|
||||||
if err := c.ShouldBindUri(&rDocID); err != nil {
|
if err := c.ShouldBindUri(&rDocID); err != nil {
|
||||||
log.Error("[getProgress] Invalid URI Bind")
|
log.Error("Invalid URI Bind")
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Request")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
progress, err := api.DB.Queries.GetProgress(api.DB.Ctx, database.GetProgressParams{
|
progress, err := api.db.Queries.GetDocumentProgress(c, database.GetDocumentProgressParams{
|
||||||
DocumentID: rDocID.DocumentID,
|
DocumentID: rDocID.DocumentID,
|
||||||
UserID: rUser.(string),
|
UserID: auth.UserName,
|
||||||
})
|
})
|
||||||
|
|
||||||
if err == sql.ErrNoRows {
|
if err == sql.ErrNoRows {
|
||||||
// Not Found
|
// Not Found
|
||||||
c.JSON(http.StatusOK, gin.H{})
|
koJSON(c, http.StatusOK, gin.H{})
|
||||||
return
|
return
|
||||||
} else if err != nil {
|
} else if err != nil {
|
||||||
log.Error("[getProgress] GetProgress DB Error:", err)
|
log.Error("GetDocumentProgress DB Error:", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Document"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Document")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.JSON(http.StatusOK, gin.H{
|
koJSON(c, http.StatusOK, gin.H{
|
||||||
"document": progress.DocumentID,
|
"document": progress.DocumentID,
|
||||||
"percentage": progress.Percentage,
|
"percentage": progress.Percentage,
|
||||||
"progress": progress.Progress,
|
"progress": progress.Progress,
|
||||||
@@ -216,21 +164,24 @@ func (api *API) getProgress(c *gin.Context) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) addActivities(c *gin.Context) {
|
func (api *API) koAddActivities(c *gin.Context) {
|
||||||
rUser, _ := c.Get("AuthorizedUser")
|
var auth authData
|
||||||
|
if data, _ := c.Get("Authorization"); data != nil {
|
||||||
|
auth = data.(authData)
|
||||||
|
}
|
||||||
|
|
||||||
var rActivity requestActivity
|
var rActivity requestActivity
|
||||||
if err := c.ShouldBindJSON(&rActivity); err != nil {
|
if err := c.ShouldBindJSON(&rActivity); err != nil {
|
||||||
log.Error("[addActivity] Invalid JSON Bind")
|
log.Error("Invalid JSON Bind")
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Activity"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Activity")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Do Transaction
|
// Do Transaction
|
||||||
tx, err := api.DB.DB.Begin()
|
tx, err := api.db.DB.Begin()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[addActivities] Transaction Begin DB Error:", err)
|
log.Error("Transaction Begin DB Error:", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Unknown Error"})
|
apiErrorPage(c, http.StatusBadRequest, "Unknown Error")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -242,36 +193,40 @@ func (api *API) addActivities(c *gin.Context) {
|
|||||||
allDocuments := getKeys(allDocumentsMap)
|
allDocuments := getKeys(allDocumentsMap)
|
||||||
|
|
||||||
// Defer & Start Transaction
|
// Defer & Start Transaction
|
||||||
defer tx.Rollback()
|
defer func() {
|
||||||
qtx := api.DB.Queries.WithTx(tx)
|
if err := tx.Rollback(); err != nil {
|
||||||
|
log.Error("DB Rollback Error:", err)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
qtx := api.db.Queries.WithTx(tx)
|
||||||
|
|
||||||
// Upsert Documents
|
// Upsert Documents
|
||||||
for _, doc := range allDocuments {
|
for _, doc := range allDocuments {
|
||||||
if _, err := qtx.UpsertDocument(api.DB.Ctx, database.UpsertDocumentParams{
|
if _, err := qtx.UpsertDocument(c, database.UpsertDocumentParams{
|
||||||
ID: doc,
|
ID: doc,
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
log.Error("[addActivities] UpsertDocument DB Error:", err)
|
log.Error("UpsertDocument DB Error:", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Document"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Document")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Upsert Device
|
// Upsert Device
|
||||||
if _, err = qtx.UpsertDevice(api.DB.Ctx, database.UpsertDeviceParams{
|
if _, err = qtx.UpsertDevice(c, database.UpsertDeviceParams{
|
||||||
ID: rActivity.DeviceID,
|
ID: rActivity.DeviceID,
|
||||||
UserID: rUser.(string),
|
UserID: auth.UserName,
|
||||||
DeviceName: rActivity.Device,
|
DeviceName: rActivity.Device,
|
||||||
LastSynced: time.Now().UTC().Format(time.RFC3339),
|
LastSynced: time.Now().UTC().Format(time.RFC3339),
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
log.Error("[addActivities] UpsertDevice DB Error:", err)
|
log.Error("UpsertDevice DB Error:", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Device"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Device")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add All Activity
|
// Add All Activity
|
||||||
for _, item := range rActivity.Activity {
|
for _, item := range rActivity.Activity {
|
||||||
if _, err := qtx.AddActivity(api.DB.Ctx, database.AddActivityParams{
|
if _, err := qtx.AddActivity(c, database.AddActivityParams{
|
||||||
UserID: rUser.(string),
|
UserID: auth.UserName,
|
||||||
DocumentID: item.DocumentID,
|
DocumentID: item.DocumentID,
|
||||||
DeviceID: rActivity.DeviceID,
|
DeviceID: rActivity.DeviceID,
|
||||||
StartTime: time.Unix(int64(item.StartTime), 0).UTC().Format(time.RFC3339),
|
StartTime: time.Unix(int64(item.StartTime), 0).UTC().Format(time.RFC3339),
|
||||||
@@ -279,104 +234,102 @@ func (api *API) addActivities(c *gin.Context) {
|
|||||||
StartPercentage: float64(item.Page) / float64(item.Pages),
|
StartPercentage: float64(item.Page) / float64(item.Pages),
|
||||||
EndPercentage: float64(item.Page+1) / float64(item.Pages),
|
EndPercentage: float64(item.Page+1) / float64(item.Pages),
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
log.Error("[addActivities] AddActivity DB Error:", err)
|
log.Error("AddActivity DB Error:", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Activity"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Activity")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Commit Transaction
|
// Commit Transaction
|
||||||
if err := tx.Commit(); err != nil {
|
if err := tx.Commit(); err != nil {
|
||||||
log.Error("[addActivities] Transaction Commit DB Error:", err)
|
log.Error("Transaction Commit DB Error:", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Unknown Error"})
|
apiErrorPage(c, http.StatusBadRequest, "Unknown Error")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update Statistic
|
koJSON(c, http.StatusOK, gin.H{
|
||||||
for _, doc := range allDocuments {
|
|
||||||
log.Info("[addActivities] UpdateDocumentUserStatistic Running...")
|
|
||||||
if err := api.DB.UpdateDocumentUserStatistic(doc, rUser.(string)); err != nil {
|
|
||||||
log.Error("[addActivities] UpdateDocumentUserStatistic Error:", err)
|
|
||||||
}
|
|
||||||
log.Info("[addActivities] UpdateDocumentUserStatistic Complete")
|
|
||||||
}
|
|
||||||
|
|
||||||
c.JSON(http.StatusOK, gin.H{
|
|
||||||
"added": len(rActivity.Activity),
|
"added": len(rActivity.Activity),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) checkActivitySync(c *gin.Context) {
|
func (api *API) koCheckActivitySync(c *gin.Context) {
|
||||||
rUser, _ := c.Get("AuthorizedUser")
|
var auth authData
|
||||||
|
if data, _ := c.Get("Authorization"); data != nil {
|
||||||
|
auth = data.(authData)
|
||||||
|
}
|
||||||
|
|
||||||
var rCheckActivity requestCheckActivitySync
|
var rCheckActivity requestCheckActivitySync
|
||||||
if err := c.ShouldBindJSON(&rCheckActivity); err != nil {
|
if err := c.ShouldBindJSON(&rCheckActivity); err != nil {
|
||||||
log.Error("[checkActivitySync] Invalid JSON Bind")
|
log.Error("Invalid JSON Bind")
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Request")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Upsert Device
|
// Upsert Device
|
||||||
if _, err := api.DB.Queries.UpsertDevice(api.DB.Ctx, database.UpsertDeviceParams{
|
if _, err := api.db.Queries.UpsertDevice(c, database.UpsertDeviceParams{
|
||||||
ID: rCheckActivity.DeviceID,
|
ID: rCheckActivity.DeviceID,
|
||||||
UserID: rUser.(string),
|
UserID: auth.UserName,
|
||||||
DeviceName: rCheckActivity.Device,
|
DeviceName: rCheckActivity.Device,
|
||||||
LastSynced: time.Now().UTC().Format(time.RFC3339),
|
LastSynced: time.Now().UTC().Format(time.RFC3339),
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
log.Error("[checkActivitySync] UpsertDevice DB Error", err)
|
log.Error("UpsertDevice DB Error", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Device"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Device")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get Last Device Activity
|
// Get Last Device Activity
|
||||||
lastActivity, err := api.DB.Queries.GetLastActivity(api.DB.Ctx, database.GetLastActivityParams{
|
lastActivity, err := api.db.Queries.GetLastActivity(c, database.GetLastActivityParams{
|
||||||
UserID: rUser.(string),
|
UserID: auth.UserName,
|
||||||
DeviceID: rCheckActivity.DeviceID,
|
DeviceID: rCheckActivity.DeviceID,
|
||||||
})
|
})
|
||||||
if err == sql.ErrNoRows {
|
if err == sql.ErrNoRows {
|
||||||
lastActivity = time.UnixMilli(0).Format(time.RFC3339)
|
lastActivity = time.UnixMilli(0).Format(time.RFC3339)
|
||||||
} else if err != nil {
|
} else if err != nil {
|
||||||
log.Error("[checkActivitySync] GetLastActivity DB Error:", err)
|
log.Error("GetLastActivity DB Error:", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Unknown Error"})
|
apiErrorPage(c, http.StatusBadRequest, "Unknown Error")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse Time
|
// Parse Time
|
||||||
parsedTime, err := time.Parse(time.RFC3339, lastActivity)
|
parsedTime, err := time.Parse(time.RFC3339, lastActivity)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[checkActivitySync] Time Parse Error:", err)
|
log.Error("Time Parse Error:", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Unknown Error"})
|
apiErrorPage(c, http.StatusBadRequest, "Unknown Error")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.JSON(http.StatusOK, gin.H{
|
koJSON(c, http.StatusOK, gin.H{
|
||||||
"last_sync": parsedTime.Unix(),
|
"last_sync": parsedTime.Unix(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) addDocuments(c *gin.Context) {
|
func (api *API) koAddDocuments(c *gin.Context) {
|
||||||
var rNewDocs requestDocument
|
var rNewDocs requestDocument
|
||||||
if err := c.ShouldBindJSON(&rNewDocs); err != nil {
|
if err := c.ShouldBindJSON(&rNewDocs); err != nil {
|
||||||
log.Error("[addDocuments] Invalid JSON Bind")
|
log.Error("Invalid JSON Bind")
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Document(s)"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Document(s)")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Do Transaction
|
// Do Transaction
|
||||||
tx, err := api.DB.DB.Begin()
|
tx, err := api.db.DB.Begin()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[addDocuments] Transaction Begin DB Error:", err)
|
log.Error("Transaction Begin DB Error:", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Unknown Error"})
|
apiErrorPage(c, http.StatusBadRequest, "Unknown Error")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Defer & Start Transaction
|
// Defer & Start Transaction
|
||||||
defer tx.Rollback()
|
defer func() {
|
||||||
qtx := api.DB.Queries.WithTx(tx)
|
if err := tx.Rollback(); err != nil {
|
||||||
|
log.Error("DB Rollback Error:", err)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
qtx := api.db.Queries.WithTx(tx)
|
||||||
|
|
||||||
// Upsert Documents
|
// Upsert Documents
|
||||||
for _, doc := range rNewDocs.Documents {
|
for _, doc := range rNewDocs.Documents {
|
||||||
_, err := qtx.UpsertDocument(api.DB.Ctx, database.UpsertDocumentParams{
|
_, err := qtx.UpsertDocument(c, database.UpsertDocumentParams{
|
||||||
ID: doc.ID,
|
ID: doc.ID,
|
||||||
Title: api.sanitizeInput(doc.Title),
|
Title: api.sanitizeInput(doc.Title),
|
||||||
Author: api.sanitizeInput(doc.Author),
|
Author: api.sanitizeInput(doc.Author),
|
||||||
@@ -386,78 +339,78 @@ func (api *API) addDocuments(c *gin.Context) {
|
|||||||
Description: api.sanitizeInput(doc.Description),
|
Description: api.sanitizeInput(doc.Description),
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[addDocuments] UpsertDocument DB Error:", err)
|
log.Error("UpsertDocument DB Error:", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Document"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Document")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Commit Transaction
|
// Commit Transaction
|
||||||
if err := tx.Commit(); err != nil {
|
if err := tx.Commit(); err != nil {
|
||||||
log.Error("[addDocuments] Transaction Commit DB Error:", err)
|
log.Error("Transaction Commit DB Error:", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Unknown Error"})
|
apiErrorPage(c, http.StatusBadRequest, "Unknown Error")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.JSON(http.StatusOK, gin.H{
|
koJSON(c, http.StatusOK, gin.H{
|
||||||
"changed": len(rNewDocs.Documents),
|
"changed": len(rNewDocs.Documents),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) checkDocumentsSync(c *gin.Context) {
|
func (api *API) koCheckDocumentsSync(c *gin.Context) {
|
||||||
rUser, _ := c.Get("AuthorizedUser")
|
var auth authData
|
||||||
|
if data, _ := c.Get("Authorization"); data != nil {
|
||||||
|
auth = data.(authData)
|
||||||
|
}
|
||||||
|
|
||||||
var rCheckDocs requestCheckDocumentSync
|
var rCheckDocs requestCheckDocumentSync
|
||||||
if err := c.ShouldBindJSON(&rCheckDocs); err != nil {
|
if err := c.ShouldBindJSON(&rCheckDocs); err != nil {
|
||||||
log.Error("[checkDocumentsSync] Invalid JSON Bind")
|
log.Error("Invalid JSON Bind")
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Request")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Upsert Device
|
// Upsert Device
|
||||||
_, err := api.DB.Queries.UpsertDevice(api.DB.Ctx, database.UpsertDeviceParams{
|
_, err := api.db.Queries.UpsertDevice(c, database.UpsertDeviceParams{
|
||||||
ID: rCheckDocs.DeviceID,
|
ID: rCheckDocs.DeviceID,
|
||||||
UserID: rUser.(string),
|
UserID: auth.UserName,
|
||||||
DeviceName: rCheckDocs.Device,
|
DeviceName: rCheckDocs.Device,
|
||||||
LastSynced: time.Now().UTC().Format(time.RFC3339),
|
LastSynced: time.Now().UTC().Format(time.RFC3339),
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[checkDocumentsSync] UpsertDevice DB Error", err)
|
log.Error("UpsertDevice DB Error", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Device"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Device")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
missingDocs := []database.Document{}
|
|
||||||
deletedDocIDs := []string{}
|
|
||||||
|
|
||||||
// Get Missing Documents
|
// Get Missing Documents
|
||||||
missingDocs, err = api.DB.Queries.GetMissingDocuments(api.DB.Ctx, rCheckDocs.Have)
|
missingDocs, err := api.db.Queries.GetMissingDocuments(c, rCheckDocs.Have)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[checkDocumentsSync] GetMissingDocuments DB Error", err)
|
log.Error("GetMissingDocuments DB Error", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Request")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get Deleted Documents
|
// Get Deleted Documents
|
||||||
deletedDocIDs, err = api.DB.Queries.GetDeletedDocuments(api.DB.Ctx, rCheckDocs.Have)
|
deletedDocIDs, err := api.db.Queries.GetDeletedDocuments(c, rCheckDocs.Have)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[checkDocumentsSync] GetDeletedDocuments DB Error", err)
|
log.Error("GetDeletedDocuments DB Error", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Request")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get Wanted Documents
|
// Get Wanted Documents
|
||||||
jsonHaves, err := json.Marshal(rCheckDocs.Have)
|
jsonHaves, err := json.Marshal(rCheckDocs.Have)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[checkDocumentsSync] JSON Marshal Error", err)
|
log.Error("JSON Marshal Error", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Request")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
wantedDocs, err := api.DB.Queries.GetWantedDocuments(api.DB.Ctx, string(jsonHaves))
|
wantedDocs, err := api.db.Queries.GetWantedDocuments(c, string(jsonHaves))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[checkDocumentsSync] GetWantedDocuments DB Error", err)
|
log.Error("GetWantedDocuments DB Error", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Request")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -494,158 +447,116 @@ func (api *API) checkDocumentsSync(c *gin.Context) {
|
|||||||
rCheckDocSync.Delete = deletedDocIDs
|
rCheckDocSync.Delete = deletedDocIDs
|
||||||
}
|
}
|
||||||
|
|
||||||
c.JSON(http.StatusOK, rCheckDocSync)
|
koJSON(c, http.StatusOK, rCheckDocSync)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) uploadExistingDocument(c *gin.Context) {
|
func (api *API) koUploadExistingDocument(c *gin.Context) {
|
||||||
var rDoc requestDocumentID
|
var rDoc requestDocumentID
|
||||||
if err := c.ShouldBindUri(&rDoc); err != nil {
|
if err := c.ShouldBindUri(&rDoc); err != nil {
|
||||||
log.Error("[uploadExistingDocument] Invalid URI Bind")
|
log.Error("Invalid URI Bind")
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Request")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Open Form File
|
||||||
fileData, err := c.FormFile("file")
|
fileData, err := c.FormFile("file")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[uploadExistingDocument] File Error:", err)
|
log.Error("File Error:", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "File Error"})
|
apiErrorPage(c, http.StatusBadRequest, "File error")
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate Type & Derive Extension on MIME
|
|
||||||
uploadedFile, err := fileData.Open()
|
|
||||||
fileMime, err := mimetype.DetectReader(uploadedFile)
|
|
||||||
fileExtension := fileMime.Extension()
|
|
||||||
|
|
||||||
if !slices.Contains([]string{".epub", ".html"}, fileExtension) {
|
|
||||||
log.Error("[uploadExistingDocument] Invalid FileType:", fileExtension)
|
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Filetype"})
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate Document Exists in DB
|
// Validate Document Exists in DB
|
||||||
document, err := api.DB.Queries.GetDocument(api.DB.Ctx, rDoc.DocumentID)
|
document, err := api.db.Queries.GetDocument(c, rDoc.DocumentID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[uploadExistingDocument] GetDocument DB Error:", err)
|
log.Error("GetDocument DB Error:", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Unknown Document"})
|
apiErrorPage(c, http.StatusBadRequest, "Unknown Document")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Open File
|
||||||
|
uploadedFile, err := fileData.Open()
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Unable to open file")
|
||||||
|
apiErrorPage(c, http.StatusBadRequest, "Unable to open file")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check Support
|
||||||
|
docType, err := metadata.GetDocumentTypeReader(uploadedFile)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Unsupported file")
|
||||||
|
apiErrorPage(c, http.StatusBadRequest, "Unsupported file")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Derive Filename
|
// Derive Filename
|
||||||
var fileName string
|
fileName := deriveBaseFileName(&metadata.MetadataInfo{
|
||||||
if document.Author != nil {
|
Type: *docType,
|
||||||
fileName = fileName + *document.Author
|
PartialMD5: &document.ID,
|
||||||
} else {
|
Title: document.Title,
|
||||||
fileName = fileName + "Unknown"
|
Author: document.Author,
|
||||||
}
|
})
|
||||||
|
|
||||||
if document.Title != nil {
|
|
||||||
fileName = fileName + " - " + *document.Title
|
|
||||||
} else {
|
|
||||||
fileName = fileName + " - Unknown"
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove Slashes
|
|
||||||
fileName = strings.ReplaceAll(fileName, "/", "")
|
|
||||||
|
|
||||||
// Derive & Sanitize File Name
|
|
||||||
fileName = "." + filepath.Clean(fmt.Sprintf("/%s [%s]%s", fileName, document.ID, fileExtension))
|
|
||||||
|
|
||||||
// Generate Storage Path
|
// Generate Storage Path
|
||||||
safePath := filepath.Join(api.Config.DataPath, "documents", fileName)
|
basePath := filepath.Join(api.cfg.DataPath, "documents")
|
||||||
|
safePath := filepath.Join(basePath, fileName)
|
||||||
|
|
||||||
// Save & Prevent Overwrites
|
// Save & Prevent Overwrites
|
||||||
_, err = os.Stat(safePath)
|
_, err = os.Stat(safePath)
|
||||||
if os.IsNotExist(err) {
|
if os.IsNotExist(err) {
|
||||||
err = c.SaveUploadedFile(fileData, safePath)
|
err = c.SaveUploadedFile(fileData, safePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[uploadExistingDocument] Save Failure:", err)
|
log.Error("Save Failure:", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "File Error"})
|
apiErrorPage(c, http.StatusBadRequest, "File Error")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get MD5 Hash
|
// Acquire Metadata
|
||||||
fileHash, err := getFileMD5(safePath)
|
metadataInfo, err := metadata.GetMetadata(safePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[uploadExistingDocument] Hash Failure:", err)
|
log.Errorf("Unable to acquire metadata: %v", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "File Error"})
|
apiErrorPage(c, http.StatusBadRequest, "Unable to acquire metadata")
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get Word Count
|
|
||||||
wordCount, err := metadata.GetWordCount(safePath)
|
|
||||||
if err != nil {
|
|
||||||
log.Error("[uploadExistingDocument] Word Count Failure:", err)
|
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "File Error"})
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Upsert Document
|
// Upsert Document
|
||||||
if _, err = api.DB.Queries.UpsertDocument(api.DB.Ctx, database.UpsertDocumentParams{
|
if _, err = api.db.Queries.UpsertDocument(c, database.UpsertDocumentParams{
|
||||||
ID: document.ID,
|
ID: document.ID,
|
||||||
Md5: fileHash,
|
Md5: metadataInfo.MD5,
|
||||||
|
Words: metadataInfo.WordCount,
|
||||||
Filepath: &fileName,
|
Filepath: &fileName,
|
||||||
Words: &wordCount,
|
Basepath: &basePath,
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
log.Error("[uploadExistingDocument] UpsertDocument DB Error:", err)
|
log.Error("UpsertDocument DB Error:", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Document Error"})
|
apiErrorPage(c, http.StatusBadRequest, "Document Error")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.JSON(http.StatusOK, gin.H{
|
koJSON(c, http.StatusOK, gin.H{
|
||||||
"status": "ok",
|
"status": "ok",
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) downloadDocument(c *gin.Context) {
|
func (api *API) koDemoModeJSONError(c *gin.Context) {
|
||||||
var rDoc requestDocumentID
|
apiErrorPage(c, http.StatusUnauthorized, "Not Allowed in Demo Mode")
|
||||||
if err := c.ShouldBindUri(&rDoc); err != nil {
|
|
||||||
log.Error("[downloadDocument] Invalid URI Bind")
|
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"})
|
|
||||||
return
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get Document
|
func apiErrorPage(c *gin.Context, errorCode int, errorMessage string) {
|
||||||
document, err := api.DB.Queries.GetDocument(api.DB.Ctx, rDoc.DocumentID)
|
c.AbortWithStatusJSON(errorCode, gin.H{"error": errorMessage})
|
||||||
if err != nil {
|
|
||||||
log.Error("[downloadDocument] GetDocument DB Error:", err)
|
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Unknown Document"})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if document.Filepath == nil {
|
|
||||||
log.Error("[downloadDocument] Document Doesn't Have File:", rDoc.DocumentID)
|
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Document Doesn't Exist"})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Derive Storage Location
|
|
||||||
filePath := filepath.Join(api.Config.DataPath, "documents", *document.Filepath)
|
|
||||||
|
|
||||||
// Validate File Exists
|
|
||||||
_, err = os.Stat(filePath)
|
|
||||||
if os.IsNotExist(err) {
|
|
||||||
log.Error("[downloadDocument] File Doesn't Exist:", rDoc.DocumentID)
|
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Document Doesn't Exists"})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Force Download (Security)
|
|
||||||
c.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", filepath.Base(*document.Filepath)))
|
|
||||||
c.File(filePath)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) sanitizeInput(val any) *string {
|
func (api *API) sanitizeInput(val any) *string {
|
||||||
switch v := val.(type) {
|
switch v := val.(type) {
|
||||||
case *string:
|
case *string:
|
||||||
if v != nil {
|
if v != nil {
|
||||||
newString := html.UnescapeString(api.HTMLPolicy.Sanitize(string(*v)))
|
newString := html.UnescapeString(htmlPolicy.Sanitize(string(*v)))
|
||||||
return &newString
|
return &newString
|
||||||
}
|
}
|
||||||
case string:
|
case string:
|
||||||
if v != "" {
|
if v != "" {
|
||||||
newString := html.UnescapeString(api.HTMLPolicy.Sanitize(string(v)))
|
newString := html.UnescapeString(htmlPolicy.Sanitize(string(v)))
|
||||||
return &newString
|
return &newString
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -678,3 +589,10 @@ func getFileMD5(filePath string) (*string, error) {
|
|||||||
|
|
||||||
return &fileHash, nil
|
return &fileHash, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// koJSON forces koJSON Content-Type to only return `application/json`. This is addressing
|
||||||
|
// the following issue: https://github.com/koreader/koreader/issues/13629
|
||||||
|
func koJSON(c *gin.Context, code int, obj any) {
|
||||||
|
c.Header("Content-Type", "application/json")
|
||||||
|
c.JSON(code, obj)
|
||||||
|
}
|
||||||
|
|||||||
@@ -8,8 +8,9 @@ import (
|
|||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"reichard.io/bbank/database"
|
"reichard.io/antholume/database"
|
||||||
"reichard.io/bbank/opds"
|
"reichard.io/antholume/opds"
|
||||||
|
"reichard.io/antholume/pkg/ptr"
|
||||||
)
|
)
|
||||||
|
|
||||||
var mimeMapping map[string]string = map[string]string{
|
var mimeMapping map[string]string = map[string]string{
|
||||||
@@ -61,9 +62,9 @@ func (api *API) opdsEntry(c *gin.Context) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) opdsDocuments(c *gin.Context) {
|
func (api *API) opdsDocuments(c *gin.Context) {
|
||||||
var userID string
|
var auth authData
|
||||||
if rUser, _ := c.Get("AuthorizedUser"); rUser != nil {
|
if data, _ := c.Get("Authorization"); data != nil {
|
||||||
userID = rUser.(string)
|
auth = data.(authData)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Potential URL Parameters (Default Pagination - 100)
|
// Potential URL Parameters (Default Pagination - 100)
|
||||||
@@ -77,14 +78,15 @@ func (api *API) opdsDocuments(c *gin.Context) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get Documents
|
// Get Documents
|
||||||
documents, err := api.DB.Queries.GetDocumentsWithStats(api.DB.Ctx, database.GetDocumentsWithStatsParams{
|
documents, err := api.db.Queries.GetDocumentsWithStats(c, database.GetDocumentsWithStatsParams{
|
||||||
UserID: userID,
|
UserID: auth.UserName,
|
||||||
Query: query,
|
Query: query,
|
||||||
|
Deleted: ptr.Of(false),
|
||||||
Offset: (*qParams.Page - 1) * *qParams.Limit,
|
Offset: (*qParams.Page - 1) * *qParams.Limit,
|
||||||
Limit: *qParams.Limit,
|
Limit: *qParams.Limit,
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[opdsDocuments] GetDocumentsWithStats DB Error:", err)
|
log.Error("GetDocumentsWithStats DB Error:", err)
|
||||||
c.AbortWithStatus(http.StatusBadRequest)
|
c.AbortWithStatus(http.StatusBadRequest)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|||||||
76
api/streamer.go
Normal file
76
api/streamer.go
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"html/template"
|
||||||
|
"net/http"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
)
|
||||||
|
|
||||||
|
type streamer struct {
|
||||||
|
templates map[string]*template.Template
|
||||||
|
writer gin.ResponseWriter
|
||||||
|
mutex sync.Mutex
|
||||||
|
completeCh chan struct{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) newStreamer(c *gin.Context, data string) *streamer {
|
||||||
|
stream := &streamer{
|
||||||
|
writer: c.Writer,
|
||||||
|
templates: api.templates,
|
||||||
|
completeCh: make(chan struct{}),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set Headers
|
||||||
|
header := stream.writer.Header()
|
||||||
|
header.Set("Transfer-Encoding", "chunked")
|
||||||
|
header.Set("Content-Type", "text/html; charset=utf-8")
|
||||||
|
header.Set("X-Content-Type-Options", "nosniff")
|
||||||
|
stream.writer.WriteHeader(http.StatusOK)
|
||||||
|
|
||||||
|
// Send Open Element Tags
|
||||||
|
stream.write(data)
|
||||||
|
|
||||||
|
// Keep Alive
|
||||||
|
go func() {
|
||||||
|
closeCh := stream.writer.CloseNotify()
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case <-stream.completeCh:
|
||||||
|
return
|
||||||
|
case <-closeCh:
|
||||||
|
return
|
||||||
|
default:
|
||||||
|
stream.write("<!-- ping -->")
|
||||||
|
time.Sleep(2 * time.Second)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
return stream
|
||||||
|
}
|
||||||
|
|
||||||
|
func (stream *streamer) write(str string) {
|
||||||
|
stream.mutex.Lock()
|
||||||
|
stream.writer.WriteString(str)
|
||||||
|
stream.writer.(http.Flusher).Flush()
|
||||||
|
stream.mutex.Unlock()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (stream *streamer) send(templateName string, templateVars gin.H) {
|
||||||
|
t := stream.templates[templateName]
|
||||||
|
buf := &bytes.Buffer{}
|
||||||
|
_ = t.ExecuteTemplate(buf, templateName, templateVars)
|
||||||
|
stream.write(buf.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (stream *streamer) close(data string) {
|
||||||
|
// Send Close Element Tags
|
||||||
|
stream.write(data)
|
||||||
|
|
||||||
|
// Close
|
||||||
|
close(stream.completeCh)
|
||||||
|
}
|
||||||
181
api/utils.go
181
api/utils.go
@@ -4,61 +4,58 @@ import (
|
|||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"math"
|
"math"
|
||||||
|
"path/filepath"
|
||||||
|
"reflect"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"reichard.io/bbank/database"
|
"reichard.io/antholume/database"
|
||||||
"reichard.io/bbank/graph"
|
"reichard.io/antholume/graph"
|
||||||
|
"reichard.io/antholume/metadata"
|
||||||
)
|
)
|
||||||
|
|
||||||
type UTCOffset struct {
|
// getTimeZones returns a string slice of IANA timezones.
|
||||||
Name string
|
func getTimeZones() []string {
|
||||||
Value string
|
return []string{
|
||||||
}
|
"Africa/Cairo",
|
||||||
|
"Africa/Johannesburg",
|
||||||
var UTC_OFFSETS = []UTCOffset{
|
"Africa/Lagos",
|
||||||
{Value: "-12 hours", Name: "UTC−12:00"},
|
"Africa/Nairobi",
|
||||||
{Value: "-11 hours", Name: "UTC−11:00"},
|
"America/Adak",
|
||||||
{Value: "-10 hours", Name: "UTC−10:00"},
|
"America/Anchorage",
|
||||||
{Value: "-9.5 hours", Name: "UTC−09:30"},
|
"America/Buenos_Aires",
|
||||||
{Value: "-9 hours", Name: "UTC−09:00"},
|
"America/Chicago",
|
||||||
{Value: "-8 hours", Name: "UTC−08:00"},
|
"America/Denver",
|
||||||
{Value: "-7 hours", Name: "UTC−07:00"},
|
"America/Los_Angeles",
|
||||||
{Value: "-6 hours", Name: "UTC−06:00"},
|
"America/Mexico_City",
|
||||||
{Value: "-5 hours", Name: "UTC−05:00"},
|
"America/New_York",
|
||||||
{Value: "-4 hours", Name: "UTC−04:00"},
|
"America/Nuuk",
|
||||||
{Value: "-3.5 hours", Name: "UTC−03:30"},
|
"America/Phoenix",
|
||||||
{Value: "-3 hours", Name: "UTC−03:00"},
|
"America/Puerto_Rico",
|
||||||
{Value: "-2 hours", Name: "UTC−02:00"},
|
"America/Sao_Paulo",
|
||||||
{Value: "-1 hours", Name: "UTC−01:00"},
|
"America/St_Johns",
|
||||||
{Value: "0 hours", Name: "UTC±00:00"},
|
"America/Toronto",
|
||||||
{Value: "+1 hours", Name: "UTC+01:00"},
|
"Asia/Dubai",
|
||||||
{Value: "+2 hours", Name: "UTC+02:00"},
|
"Asia/Hong_Kong",
|
||||||
{Value: "+3 hours", Name: "UTC+03:00"},
|
"Asia/Kolkata",
|
||||||
{Value: "+3.5 hours", Name: "UTC+03:30"},
|
"Asia/Seoul",
|
||||||
{Value: "+4 hours", Name: "UTC+04:00"},
|
"Asia/Shanghai",
|
||||||
{Value: "+4.5 hours", Name: "UTC+04:30"},
|
"Asia/Singapore",
|
||||||
{Value: "+5 hours", Name: "UTC+05:00"},
|
"Asia/Tokyo",
|
||||||
{Value: "+5.5 hours", Name: "UTC+05:30"},
|
"Atlantic/Azores",
|
||||||
{Value: "+5.75 hours", Name: "UTC+05:45"},
|
"Australia/Melbourne",
|
||||||
{Value: "+6 hours", Name: "UTC+06:00"},
|
"Australia/Sydney",
|
||||||
{Value: "+6.5 hours", Name: "UTC+06:30"},
|
"Europe/Berlin",
|
||||||
{Value: "+7 hours", Name: "UTC+07:00"},
|
"Europe/London",
|
||||||
{Value: "+8 hours", Name: "UTC+08:00"},
|
"Europe/Moscow",
|
||||||
{Value: "+8.75 hours", Name: "UTC+08:45"},
|
"Europe/Paris",
|
||||||
{Value: "+9 hours", Name: "UTC+09:00"},
|
"Pacific/Auckland",
|
||||||
{Value: "+9.5 hours", Name: "UTC+09:30"},
|
"Pacific/Honolulu",
|
||||||
{Value: "+10 hours", Name: "UTC+10:00"},
|
}
|
||||||
{Value: "+10.5 hours", Name: "UTC+10:30"},
|
|
||||||
{Value: "+11 hours", Name: "UTC+11:00"},
|
|
||||||
{Value: "+12 hours", Name: "UTC+12:00"},
|
|
||||||
{Value: "+12.75 hours", Name: "UTC+12:45"},
|
|
||||||
{Value: "+13 hours", Name: "UTC+13:00"},
|
|
||||||
{Value: "+14 hours", Name: "UTC+14:00"},
|
|
||||||
}
|
|
||||||
|
|
||||||
func getUTCOffsets() []UTCOffset {
|
|
||||||
return UTC_OFFSETS
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// niceSeconds takes in an int (in seconds) and returns a string readable
|
||||||
|
// representation. For example 1928371 -> "22d 7h 39m 31s".
|
||||||
|
// Deprecated: Use formatters.FormatDuration
|
||||||
func niceSeconds(input int64) (result string) {
|
func niceSeconds(input int64) (result string) {
|
||||||
if input == 0 {
|
if input == 0 {
|
||||||
return "N/A"
|
return "N/A"
|
||||||
@@ -87,7 +84,29 @@ func niceSeconds(input int64) (result string) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Convert Database Array -> Int64 Array
|
// niceNumbers takes in an int and returns a string representation. For example
|
||||||
|
// 19823 -> "19.8k".
|
||||||
|
// Deprecated: Use formatters.FormatNumber
|
||||||
|
func niceNumbers(input int64) string {
|
||||||
|
if input == 0 {
|
||||||
|
return "0"
|
||||||
|
}
|
||||||
|
|
||||||
|
abbreviations := []string{"", "k", "M", "B", "T"}
|
||||||
|
abbrevIndex := int(math.Log10(float64(input)) / 3)
|
||||||
|
scaledNumber := float64(input) / math.Pow(10, float64(abbrevIndex*3))
|
||||||
|
|
||||||
|
if scaledNumber >= 100 {
|
||||||
|
return fmt.Sprintf("%.0f%s", scaledNumber, abbreviations[abbrevIndex])
|
||||||
|
} else if scaledNumber >= 10 {
|
||||||
|
return fmt.Sprintf("%.1f%s", scaledNumber, abbreviations[abbrevIndex])
|
||||||
|
} else {
|
||||||
|
return fmt.Sprintf("%.2f%s", scaledNumber, abbreviations[abbrevIndex])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// getSVGGraphData builds SVGGraphData from the provided stats, width and height.
|
||||||
|
// It is used exclusively in templates to generate the daily read stats graph.
|
||||||
func getSVGGraphData(inputData []database.GetDailyReadStatsRow, svgWidth int, svgHeight int) graph.SVGGraphData {
|
func getSVGGraphData(inputData []database.GetDailyReadStatsRow, svgWidth int, svgHeight int) graph.SVGGraphData {
|
||||||
var intData []int64
|
var intData []int64
|
||||||
for _, item := range inputData {
|
for _, item := range inputData {
|
||||||
@@ -97,11 +116,13 @@ func getSVGGraphData(inputData []database.GetDailyReadStatsRow, svgWidth int, sv
|
|||||||
return graph.GetSVGGraphData(intData, svgWidth, svgHeight)
|
return graph.GetSVGGraphData(intData, svgWidth, svgHeight)
|
||||||
}
|
}
|
||||||
|
|
||||||
func dict(values ...interface{}) (map[string]interface{}, error) {
|
// dict returns a map[string]any dict. Each pair of two is a key & value
|
||||||
|
// respectively. It's primarily utilized in templates.
|
||||||
|
func dict(values ...any) (map[string]any, error) {
|
||||||
if len(values)%2 != 0 {
|
if len(values)%2 != 0 {
|
||||||
return nil, errors.New("invalid dict call")
|
return nil, errors.New("invalid dict call")
|
||||||
}
|
}
|
||||||
dict := make(map[string]interface{}, len(values)/2)
|
dict := make(map[string]any, len(values)/2)
|
||||||
for i := 0; i < len(values); i += 2 {
|
for i := 0; i < len(values); i += 2 {
|
||||||
key, ok := values[i].(string)
|
key, ok := values[i].(string)
|
||||||
if !ok {
|
if !ok {
|
||||||
@@ -111,3 +132,57 @@ func dict(values ...interface{}) (map[string]interface{}, error) {
|
|||||||
}
|
}
|
||||||
return dict, nil
|
return dict, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// fields returns a map[string]any of the provided struct. It's primarily
|
||||||
|
// utilized in templates.
|
||||||
|
func fields(value any) (map[string]any, error) {
|
||||||
|
v := reflect.Indirect(reflect.ValueOf(value))
|
||||||
|
if v.Kind() != reflect.Struct {
|
||||||
|
return nil, fmt.Errorf("%T is not a struct", value)
|
||||||
|
}
|
||||||
|
m := make(map[string]any)
|
||||||
|
t := v.Type()
|
||||||
|
for i := 0; i < t.NumField(); i++ {
|
||||||
|
sv := t.Field(i)
|
||||||
|
m[sv.Name] = v.Field(i).Interface()
|
||||||
|
}
|
||||||
|
return m, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// slice returns a slice of the provided arguments. It's primarily utilized in
|
||||||
|
// templates.
|
||||||
|
func slice(elements ...any) []any {
|
||||||
|
return elements
|
||||||
|
}
|
||||||
|
|
||||||
|
// deriveBaseFileName builds the base filename for a given MetadataInfo object.
|
||||||
|
func deriveBaseFileName(metadataInfo *metadata.MetadataInfo) string {
|
||||||
|
// Derive New FileName
|
||||||
|
var newFileName string
|
||||||
|
if *metadataInfo.Author != "" {
|
||||||
|
newFileName = newFileName + *metadataInfo.Author
|
||||||
|
} else {
|
||||||
|
newFileName = newFileName + "Unknown"
|
||||||
|
}
|
||||||
|
if *metadataInfo.Title != "" {
|
||||||
|
newFileName = newFileName + " - " + *metadataInfo.Title
|
||||||
|
} else {
|
||||||
|
newFileName = newFileName + " - Unknown"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove Slashes
|
||||||
|
fileName := strings.ReplaceAll(newFileName, "/", "")
|
||||||
|
return "." + filepath.Clean(fmt.Sprintf("/%s [%s]%s", fileName, *metadataInfo.PartialMD5, metadataInfo.Type))
|
||||||
|
}
|
||||||
|
|
||||||
|
// importStatusPriority returns the order priority for import status in the UI.
|
||||||
|
func importStatusPriority(status importStatus) int {
|
||||||
|
switch status {
|
||||||
|
case importFailed:
|
||||||
|
return 1
|
||||||
|
case importExists:
|
||||||
|
return 2
|
||||||
|
default:
|
||||||
|
return 3
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,12 +1,35 @@
|
|||||||
package api
|
package api
|
||||||
|
|
||||||
import "testing"
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
func TestNiceSeconds(t *testing.T) {
|
func TestNiceSeconds(t *testing.T) {
|
||||||
want := "22d 7h 39m 31s"
|
wantOne := "22d 7h 39m 31s"
|
||||||
nice := niceSeconds(1928371)
|
wantNA := "N/A"
|
||||||
|
|
||||||
if nice != want {
|
niceOne := niceSeconds(1928371)
|
||||||
t.Fatalf(`Expected: %v, Got: %v`, want, nice)
|
niceNA := niceSeconds(0)
|
||||||
|
|
||||||
|
assert.Equal(t, wantOne, niceOne, "should be nice seconds")
|
||||||
|
assert.Equal(t, wantNA, niceNA, "should be nice NA")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestNiceNumbers(t *testing.T) {
|
||||||
|
wantMillions := "198M"
|
||||||
|
wantThousands := "19.8k"
|
||||||
|
wantThousandsTwo := "1.98k"
|
||||||
|
wantZero := "0"
|
||||||
|
|
||||||
|
niceMillions := niceNumbers(198236461)
|
||||||
|
niceThousands := niceNumbers(19823)
|
||||||
|
niceThousandsTwo := niceNumbers(1984)
|
||||||
|
niceZero := niceNumbers(0)
|
||||||
|
|
||||||
|
assert.Equal(t, wantMillions, niceMillions, "should be nice millions")
|
||||||
|
assert.Equal(t, wantThousands, niceThousands, "should be nice thousands")
|
||||||
|
assert.Equal(t, wantThousandsTwo, niceThousandsTwo, "should be nice thousands")
|
||||||
|
assert.Equal(t, wantZero, niceZero, "should be nice zero")
|
||||||
}
|
}
|
||||||
|
|||||||
151
api/v1/activity.go
Normal file
151
api/v1/activity.go
Normal file
@@ -0,0 +1,151 @@
|
|||||||
|
package v1
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
"reichard.io/antholume/database"
|
||||||
|
)
|
||||||
|
|
||||||
|
// GET /activity
|
||||||
|
func (s *Server) GetActivity(ctx context.Context, request GetActivityRequestObject) (GetActivityResponseObject, error) {
|
||||||
|
auth, ok := s.getSessionFromContext(ctx)
|
||||||
|
if !ok {
|
||||||
|
return GetActivity401JSONResponse{Code: 401, Message: "Unauthorized"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
docFilter := false
|
||||||
|
if request.Params.DocFilter != nil {
|
||||||
|
docFilter = *request.Params.DocFilter
|
||||||
|
}
|
||||||
|
|
||||||
|
documentID := ""
|
||||||
|
if request.Params.DocumentId != nil {
|
||||||
|
documentID = *request.Params.DocumentId
|
||||||
|
}
|
||||||
|
|
||||||
|
offset := int64(0)
|
||||||
|
if request.Params.Offset != nil {
|
||||||
|
offset = *request.Params.Offset
|
||||||
|
}
|
||||||
|
|
||||||
|
limit := int64(100)
|
||||||
|
if request.Params.Limit != nil {
|
||||||
|
limit = *request.Params.Limit
|
||||||
|
}
|
||||||
|
|
||||||
|
activities, err := s.db.Queries.GetActivity(ctx, database.GetActivityParams{
|
||||||
|
UserID: auth.UserName,
|
||||||
|
DocFilter: docFilter,
|
||||||
|
DocumentID: documentID,
|
||||||
|
Offset: offset,
|
||||||
|
Limit: limit,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return GetActivity500JSONResponse{Code: 500, Message: err.Error()}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
apiActivities := make([]Activity, len(activities))
|
||||||
|
for i, a := range activities {
|
||||||
|
// Convert StartTime from interface{} to string
|
||||||
|
startTimeStr := ""
|
||||||
|
if a.StartTime != nil {
|
||||||
|
if str, ok := a.StartTime.(string); ok {
|
||||||
|
startTimeStr = str
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
apiActivities[i] = Activity{
|
||||||
|
DocumentId: a.DocumentID,
|
||||||
|
DeviceId: a.DeviceID,
|
||||||
|
StartTime: startTimeStr,
|
||||||
|
Title: a.Title,
|
||||||
|
Author: a.Author,
|
||||||
|
Duration: a.Duration,
|
||||||
|
StartPercentage: float32(a.StartPercentage),
|
||||||
|
EndPercentage: float32(a.EndPercentage),
|
||||||
|
ReadPercentage: float32(a.ReadPercentage),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
response := ActivityResponse{
|
||||||
|
Activities: apiActivities,
|
||||||
|
}
|
||||||
|
return GetActivity200JSONResponse(response), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// POST /activity
|
||||||
|
func (s *Server) CreateActivity(ctx context.Context, request CreateActivityRequestObject) (CreateActivityResponseObject, error) {
|
||||||
|
auth, ok := s.getSessionFromContext(ctx)
|
||||||
|
if !ok {
|
||||||
|
return CreateActivity401JSONResponse{Code: 401, Message: "Unauthorized"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if request.Body == nil {
|
||||||
|
return CreateActivity400JSONResponse{Code: 400, Message: "Request body is required"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
tx, err := s.db.DB.Begin()
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Transaction Begin DB Error:", err)
|
||||||
|
return CreateActivity500JSONResponse{Code: 500, Message: "Database error"}, nil
|
||||||
|
}
|
||||||
|
committed := false
|
||||||
|
defer func() {
|
||||||
|
if committed {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if rollbackErr := tx.Rollback(); rollbackErr != nil {
|
||||||
|
log.Debug("Transaction Rollback DB Error:", rollbackErr)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
qtx := s.db.Queries.WithTx(tx)
|
||||||
|
|
||||||
|
allDocumentsMap := make(map[string]struct{})
|
||||||
|
for _, item := range request.Body.Activity {
|
||||||
|
allDocumentsMap[item.DocumentId] = struct{}{}
|
||||||
|
}
|
||||||
|
|
||||||
|
for documentID := range allDocumentsMap {
|
||||||
|
if _, err := qtx.UpsertDocument(ctx, database.UpsertDocumentParams{ID: documentID}); err != nil {
|
||||||
|
log.Error("UpsertDocument DB Error:", err)
|
||||||
|
return CreateActivity400JSONResponse{Code: 400, Message: "Invalid document"}, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err := qtx.UpsertDevice(ctx, database.UpsertDeviceParams{
|
||||||
|
ID: request.Body.DeviceId,
|
||||||
|
UserID: auth.UserName,
|
||||||
|
DeviceName: request.Body.DeviceName,
|
||||||
|
LastSynced: time.Now().UTC().Format(time.RFC3339),
|
||||||
|
}); err != nil {
|
||||||
|
log.Error("UpsertDevice DB Error:", err)
|
||||||
|
return CreateActivity400JSONResponse{Code: 400, Message: "Invalid device"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, item := range request.Body.Activity {
|
||||||
|
if _, err := qtx.AddActivity(ctx, database.AddActivityParams{
|
||||||
|
UserID: auth.UserName,
|
||||||
|
DocumentID: item.DocumentId,
|
||||||
|
DeviceID: request.Body.DeviceId,
|
||||||
|
StartTime: time.Unix(item.StartTime, 0).UTC().Format(time.RFC3339),
|
||||||
|
Duration: item.Duration,
|
||||||
|
StartPercentage: float64(item.Page) / float64(item.Pages),
|
||||||
|
EndPercentage: float64(item.Page+1) / float64(item.Pages),
|
||||||
|
}); err != nil {
|
||||||
|
log.Error("AddActivity DB Error:", err)
|
||||||
|
return CreateActivity400JSONResponse{Code: 400, Message: "Invalid activity"}, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := tx.Commit(); err != nil {
|
||||||
|
log.Error("Transaction Commit DB Error:", err)
|
||||||
|
return CreateActivity500JSONResponse{Code: 500, Message: "Database error"}, nil
|
||||||
|
}
|
||||||
|
committed = true
|
||||||
|
|
||||||
|
response := CreateActivityResponse{Added: int64(len(request.Body.Activity))}
|
||||||
|
return CreateActivity200JSONResponse(response), nil
|
||||||
|
}
|
||||||
1070
api/v1/admin.go
Normal file
1070
api/v1/admin.go
Normal file
File diff suppressed because it is too large
Load Diff
152
api/v1/admin_test.go
Normal file
152
api/v1/admin_test.go
Normal file
@@ -0,0 +1,152 @@
|
|||||||
|
package v1
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"crypto/md5"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
argon2 "github.com/alexedwards/argon2id"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
"reichard.io/antholume/config"
|
||||||
|
"reichard.io/antholume/database"
|
||||||
|
)
|
||||||
|
|
||||||
|
func createAdminTestUser(t *testing.T, db *database.DBManager, username, password string) {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
md5Hash := fmt.Sprintf("%x", md5.Sum([]byte(password)))
|
||||||
|
hashedPassword, err := argon2.CreateHash(md5Hash, argon2.DefaultParams)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
authHash := "test-auth-hash"
|
||||||
|
_, err = db.Queries.CreateUser(context.Background(), database.CreateUserParams{
|
||||||
|
ID: username,
|
||||||
|
Pass: &hashedPassword,
|
||||||
|
AuthHash: &authHash,
|
||||||
|
Admin: true,
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func loginAdminTestUser(t *testing.T, srv *Server, username, password string) *http.Cookie {
|
||||||
|
t.Helper()
|
||||||
|
|
||||||
|
body, err := json.Marshal(LoginRequest{Username: username, Password: password})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
req := httptest.NewRequest(http.MethodPost, "/api/v1/auth/login", bytes.NewReader(body))
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
srv.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
require.Equal(t, http.StatusOK, w.Code)
|
||||||
|
cookies := w.Result().Cookies()
|
||||||
|
require.Len(t, cookies, 1)
|
||||||
|
|
||||||
|
return cookies[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetLogsPagination(t *testing.T) {
|
||||||
|
configPath := t.TempDir()
|
||||||
|
require.NoError(t, os.MkdirAll(filepath.Join(configPath, "logs"), 0o755))
|
||||||
|
require.NoError(t, os.WriteFile(filepath.Join(configPath, "logs", "antholume.log"), []byte(
|
||||||
|
"{\"level\":\"info\",\"msg\":\"one\"}\n"+
|
||||||
|
"plain two\n"+
|
||||||
|
"{\"level\":\"error\",\"msg\":\"three\"}\n"+
|
||||||
|
"plain four\n",
|
||||||
|
), 0o644))
|
||||||
|
|
||||||
|
cfg := &config.Config{
|
||||||
|
ListenPort: "8080",
|
||||||
|
DBType: "memory",
|
||||||
|
DBName: "test",
|
||||||
|
ConfigPath: configPath,
|
||||||
|
CookieAuthKey: "test-auth-key-32-bytes-long-enough",
|
||||||
|
CookieEncKey: "0123456789abcdef",
|
||||||
|
CookieSecure: false,
|
||||||
|
CookieHTTPOnly: true,
|
||||||
|
Version: "test",
|
||||||
|
DemoMode: false,
|
||||||
|
RegistrationEnabled: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
db := database.NewMgr(cfg)
|
||||||
|
srv := NewServer(db, cfg, nil)
|
||||||
|
createAdminTestUser(t, db, "admin", "password")
|
||||||
|
cookie := loginAdminTestUser(t, srv, "admin", "password")
|
||||||
|
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/api/v1/admin/logs?page=2&limit=2", nil)
|
||||||
|
req.AddCookie(cookie)
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
srv.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
require.Equal(t, http.StatusOK, w.Code)
|
||||||
|
|
||||||
|
var resp LogsResponse
|
||||||
|
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
|
||||||
|
require.NotNil(t, resp.Logs)
|
||||||
|
require.Len(t, *resp.Logs, 2)
|
||||||
|
require.NotNil(t, resp.Page)
|
||||||
|
require.Equal(t, int64(2), *resp.Page)
|
||||||
|
require.NotNil(t, resp.Limit)
|
||||||
|
require.Equal(t, int64(2), *resp.Limit)
|
||||||
|
require.NotNil(t, resp.Total)
|
||||||
|
require.Equal(t, int64(4), *resp.Total)
|
||||||
|
require.Nil(t, resp.NextPage)
|
||||||
|
require.NotNil(t, resp.PreviousPage)
|
||||||
|
require.Equal(t, int64(1), *resp.PreviousPage)
|
||||||
|
require.Contains(t, (*resp.Logs)[0], "three")
|
||||||
|
require.Contains(t, (*resp.Logs)[1], "plain four")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetLogsPaginationWithBasicFilter(t *testing.T) {
|
||||||
|
configPath := t.TempDir()
|
||||||
|
require.NoError(t, os.MkdirAll(filepath.Join(configPath, "logs"), 0o755))
|
||||||
|
require.NoError(t, os.WriteFile(filepath.Join(configPath, "logs", "antholume.log"), []byte(
|
||||||
|
"{\"level\":\"info\",\"msg\":\"match-1\"}\n"+
|
||||||
|
"{\"level\":\"info\",\"msg\":\"skip\"}\n"+
|
||||||
|
"plain match-2\n"+
|
||||||
|
"{\"level\":\"info\",\"msg\":\"match-3\"}\n",
|
||||||
|
), 0o644))
|
||||||
|
|
||||||
|
cfg := &config.Config{
|
||||||
|
ListenPort: "8080",
|
||||||
|
DBType: "memory",
|
||||||
|
DBName: "test",
|
||||||
|
ConfigPath: configPath,
|
||||||
|
CookieAuthKey: "test-auth-key-32-bytes-long-enough",
|
||||||
|
CookieEncKey: "0123456789abcdef",
|
||||||
|
CookieSecure: false,
|
||||||
|
CookieHTTPOnly: true,
|
||||||
|
Version: "test",
|
||||||
|
DemoMode: false,
|
||||||
|
RegistrationEnabled: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
db := database.NewMgr(cfg)
|
||||||
|
srv := NewServer(db, cfg, nil)
|
||||||
|
createAdminTestUser(t, db, "admin", "password")
|
||||||
|
cookie := loginAdminTestUser(t, srv, "admin", "password")
|
||||||
|
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/api/v1/admin/logs?filter=%22match%22&page=1&limit=2", nil)
|
||||||
|
req.AddCookie(cookie)
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
srv.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
require.Equal(t, http.StatusOK, w.Code)
|
||||||
|
|
||||||
|
var resp LogsResponse
|
||||||
|
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
|
||||||
|
require.NotNil(t, resp.Logs)
|
||||||
|
require.Len(t, *resp.Logs, 2)
|
||||||
|
require.NotNil(t, resp.Total)
|
||||||
|
require.Equal(t, int64(3), *resp.Total)
|
||||||
|
require.NotNil(t, resp.NextPage)
|
||||||
|
require.Equal(t, int64(2), *resp.NextPage)
|
||||||
|
}
|
||||||
4146
api/v1/api.gen.go
Normal file
4146
api/v1/api.gen.go
Normal file
File diff suppressed because it is too large
Load Diff
286
api/v1/auth.go
Normal file
286
api/v1/auth.go
Normal file
@@ -0,0 +1,286 @@
|
|||||||
|
package v1
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"crypto/md5"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
argon2 "github.com/alexedwards/argon2id"
|
||||||
|
"github.com/gorilla/sessions"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
)
|
||||||
|
|
||||||
|
// POST /auth/login
|
||||||
|
func (s *Server) Login(ctx context.Context, request LoginRequestObject) (LoginResponseObject, error) {
|
||||||
|
if request.Body == nil {
|
||||||
|
return Login400JSONResponse{Code: 400, Message: "Invalid request body"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
req := *request.Body
|
||||||
|
if req.Username == "" || req.Password == "" {
|
||||||
|
return Login400JSONResponse{Code: 400, Message: "Invalid credentials"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// MD5 - KOSync compatibility
|
||||||
|
password := fmt.Sprintf("%x", md5.Sum([]byte(req.Password)))
|
||||||
|
|
||||||
|
// Verify credentials
|
||||||
|
user, err := s.db.Queries.GetUser(ctx, req.Username)
|
||||||
|
if err != nil {
|
||||||
|
return Login401JSONResponse{Code: 401, Message: "Invalid credentials"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if match, err := argon2.ComparePasswordAndHash(password, *user.Pass); err != nil || !match {
|
||||||
|
return Login401JSONResponse{Code: 401, Message: "Invalid credentials"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := s.saveUserSession(ctx, user.ID, user.Admin, *user.AuthHash); err != nil {
|
||||||
|
return Login500JSONResponse{Code: 500, Message: err.Error()}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return Login200JSONResponse{
|
||||||
|
Body: LoginResponse{
|
||||||
|
Username: user.ID,
|
||||||
|
IsAdmin: user.Admin,
|
||||||
|
},
|
||||||
|
Headers: Login200ResponseHeaders{
|
||||||
|
SetCookie: s.getSetCookieFromContext(ctx),
|
||||||
|
},
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// POST /auth/register
|
||||||
|
func (s *Server) Register(ctx context.Context, request RegisterRequestObject) (RegisterResponseObject, error) {
|
||||||
|
if !s.cfg.RegistrationEnabled {
|
||||||
|
return Register403JSONResponse{Code: 403, Message: "Registration is disabled"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if request.Body == nil {
|
||||||
|
return Register400JSONResponse{Code: 400, Message: "Invalid request body"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
req := *request.Body
|
||||||
|
if req.Username == "" || req.Password == "" {
|
||||||
|
return Register400JSONResponse{Code: 400, Message: "Invalid user or password"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
currentUsers, err := s.db.Queries.GetUsers(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return Register500JSONResponse{Code: 500, Message: "Failed to create user"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
isAdmin := len(currentUsers) == 0
|
||||||
|
if err := s.createUser(ctx, req.Username, &req.Password, &isAdmin); err != nil {
|
||||||
|
return Register400JSONResponse{Code: 400, Message: err.Error()}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
user, err := s.db.Queries.GetUser(ctx, req.Username)
|
||||||
|
if err != nil {
|
||||||
|
return Register500JSONResponse{Code: 500, Message: "Failed to load created user"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := s.saveUserSession(ctx, user.ID, user.Admin, *user.AuthHash); err != nil {
|
||||||
|
return Register500JSONResponse{Code: 500, Message: err.Error()}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return Register201JSONResponse{
|
||||||
|
Body: LoginResponse{
|
||||||
|
Username: user.ID,
|
||||||
|
IsAdmin: user.Admin,
|
||||||
|
},
|
||||||
|
Headers: Register201ResponseHeaders{
|
||||||
|
SetCookie: s.getSetCookieFromContext(ctx),
|
||||||
|
},
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// POST /auth/logout
|
||||||
|
func (s *Server) Logout(ctx context.Context, request LogoutRequestObject) (LogoutResponseObject, error) {
|
||||||
|
_, ok := s.getSessionFromContext(ctx)
|
||||||
|
if !ok {
|
||||||
|
return Logout401JSONResponse{Code: 401, Message: "Unauthorized"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
r := s.getRequestFromContext(ctx)
|
||||||
|
w := s.getResponseWriterFromContext(ctx)
|
||||||
|
|
||||||
|
if r == nil || w == nil {
|
||||||
|
return Logout401JSONResponse{Code: 401, Message: "Internal context error"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
session, err := s.getCookieSession(r)
|
||||||
|
if err != nil {
|
||||||
|
return Logout401JSONResponse{Code: 401, Message: "Unauthorized"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
session.Values = make(map[any]any)
|
||||||
|
|
||||||
|
if err := session.Save(r, w); err != nil {
|
||||||
|
return Logout401JSONResponse{Code: 401, Message: "Failed to logout"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return Logout200Response{}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GET /auth/me
|
||||||
|
func (s *Server) GetMe(ctx context.Context, request GetMeRequestObject) (GetMeResponseObject, error) {
|
||||||
|
auth, ok := s.getSessionFromContext(ctx)
|
||||||
|
if !ok {
|
||||||
|
return GetMe401JSONResponse{Code: 401, Message: "Unauthorized"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return GetMe200JSONResponse{
|
||||||
|
Username: auth.UserName,
|
||||||
|
IsAdmin: auth.IsAdmin,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Server) saveUserSession(ctx context.Context, username string, isAdmin bool, authHash string) error {
|
||||||
|
r := s.getRequestFromContext(ctx)
|
||||||
|
w := s.getResponseWriterFromContext(ctx)
|
||||||
|
if r == nil || w == nil {
|
||||||
|
return fmt.Errorf("internal context error")
|
||||||
|
}
|
||||||
|
|
||||||
|
session, err := s.getCookieSession(r)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("unauthorized")
|
||||||
|
}
|
||||||
|
|
||||||
|
session.Values["authorizedUser"] = username
|
||||||
|
session.Values["isAdmin"] = isAdmin
|
||||||
|
session.Values["expiresAt"] = time.Now().Unix() + (60 * 60 * 24 * 7)
|
||||||
|
session.Values["authHash"] = authHash
|
||||||
|
|
||||||
|
if err := session.Save(r, w); err != nil {
|
||||||
|
return fmt.Errorf("failed to create session")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Server) getCookieSession(r *http.Request) (*sessions.Session, error) {
|
||||||
|
store := sessions.NewCookieStore([]byte(s.cfg.CookieAuthKey))
|
||||||
|
if s.cfg.CookieEncKey != "" {
|
||||||
|
if len(s.cfg.CookieEncKey) == 16 || len(s.cfg.CookieEncKey) == 32 {
|
||||||
|
store = sessions.NewCookieStore([]byte(s.cfg.CookieAuthKey), []byte(s.cfg.CookieEncKey))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
session, err := store.Get(r, "token")
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to get session: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
session.Options.SameSite = http.SameSiteLaxMode
|
||||||
|
session.Options.HttpOnly = true
|
||||||
|
session.Options.Secure = s.cfg.CookieSecure
|
||||||
|
|
||||||
|
return session, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// getSessionFromContext extracts authData from context
|
||||||
|
func (s *Server) getSessionFromContext(ctx context.Context) (authData, bool) {
|
||||||
|
auth, ok := ctx.Value("auth").(authData)
|
||||||
|
if !ok {
|
||||||
|
return authData{}, false
|
||||||
|
}
|
||||||
|
return auth, true
|
||||||
|
}
|
||||||
|
|
||||||
|
// isAdmin checks if a user has admin privileges
|
||||||
|
func (s *Server) isAdmin(ctx context.Context) bool {
|
||||||
|
auth, ok := s.getSessionFromContext(ctx)
|
||||||
|
if !ok {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return auth.IsAdmin
|
||||||
|
}
|
||||||
|
|
||||||
|
// getRequestFromContext extracts the HTTP request from context
|
||||||
|
func (s *Server) getRequestFromContext(ctx context.Context) *http.Request {
|
||||||
|
r, ok := ctx.Value("request").(*http.Request)
|
||||||
|
if !ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
|
// getResponseWriterFromContext extracts the response writer from context
|
||||||
|
func (s *Server) getResponseWriterFromContext(ctx context.Context) http.ResponseWriter {
|
||||||
|
w, ok := ctx.Value("response").(http.ResponseWriter)
|
||||||
|
if !ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return w
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Server) getSetCookieFromContext(ctx context.Context) string {
|
||||||
|
w := s.getResponseWriterFromContext(ctx)
|
||||||
|
if w == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return w.Header().Get("Set-Cookie")
|
||||||
|
}
|
||||||
|
|
||||||
|
// getSession retrieves auth data from the session cookie
|
||||||
|
func (s *Server) getSession(r *http.Request) (auth authData, ok bool) {
|
||||||
|
// Get session from cookie store
|
||||||
|
store := sessions.NewCookieStore([]byte(s.cfg.CookieAuthKey))
|
||||||
|
if s.cfg.CookieEncKey != "" {
|
||||||
|
if len(s.cfg.CookieEncKey) == 16 || len(s.cfg.CookieEncKey) == 32 {
|
||||||
|
store = sessions.NewCookieStore([]byte(s.cfg.CookieAuthKey), []byte(s.cfg.CookieEncKey))
|
||||||
|
} else {
|
||||||
|
log.Error("invalid cookie encryption key (must be 16 or 32 bytes)")
|
||||||
|
return authData{}, false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
session, err := store.Get(r, "token")
|
||||||
|
if err != nil {
|
||||||
|
return authData{}, false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get session values
|
||||||
|
authorizedUser := session.Values["authorizedUser"]
|
||||||
|
isAdmin := session.Values["isAdmin"]
|
||||||
|
expiresAt := session.Values["expiresAt"]
|
||||||
|
authHash := session.Values["authHash"]
|
||||||
|
|
||||||
|
if authorizedUser == nil || isAdmin == nil || expiresAt == nil || authHash == nil {
|
||||||
|
return authData{}, false
|
||||||
|
}
|
||||||
|
|
||||||
|
auth = authData{
|
||||||
|
UserName: authorizedUser.(string),
|
||||||
|
IsAdmin: isAdmin.(bool),
|
||||||
|
AuthHash: authHash.(string),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate auth hash
|
||||||
|
ctx := r.Context()
|
||||||
|
correctAuthHash, err := s.getUserAuthHash(ctx, auth.UserName)
|
||||||
|
if err != nil || correctAuthHash != auth.AuthHash {
|
||||||
|
return authData{}, false
|
||||||
|
}
|
||||||
|
|
||||||
|
return auth, true
|
||||||
|
}
|
||||||
|
|
||||||
|
// getUserAuthHash retrieves the user's auth hash from DB or cache
|
||||||
|
func (s *Server) getUserAuthHash(ctx context.Context, username string) (string, error) {
|
||||||
|
user, err := s.db.Queries.GetUser(ctx, username)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return *user.AuthHash, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// authData represents authenticated user information
|
||||||
|
type authData struct {
|
||||||
|
UserName string
|
||||||
|
IsAdmin bool
|
||||||
|
AuthHash string
|
||||||
|
}
|
||||||
228
api/v1/auth_test.go
Normal file
228
api/v1/auth_test.go
Normal file
@@ -0,0 +1,228 @@
|
|||||||
|
package v1
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"crypto/md5"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
argon2 "github.com/alexedwards/argon2id"
|
||||||
|
"reichard.io/antholume/config"
|
||||||
|
"reichard.io/antholume/database"
|
||||||
|
)
|
||||||
|
|
||||||
|
type AuthTestSuite struct {
|
||||||
|
suite.Suite
|
||||||
|
db *database.DBManager
|
||||||
|
cfg *config.Config
|
||||||
|
srv *Server
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *AuthTestSuite) setupConfig() *config.Config {
|
||||||
|
return &config.Config{
|
||||||
|
ListenPort: "8080",
|
||||||
|
DBType: "memory",
|
||||||
|
DBName: "test",
|
||||||
|
ConfigPath: "/tmp",
|
||||||
|
CookieAuthKey: "test-auth-key-32-bytes-long-enough",
|
||||||
|
CookieEncKey: "0123456789abcdef",
|
||||||
|
CookieSecure: false,
|
||||||
|
CookieHTTPOnly: true,
|
||||||
|
Version: "test",
|
||||||
|
DemoMode: false,
|
||||||
|
RegistrationEnabled: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAuth(t *testing.T) {
|
||||||
|
suite.Run(t, new(AuthTestSuite))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *AuthTestSuite) SetupTest() {
|
||||||
|
suite.cfg = suite.setupConfig()
|
||||||
|
suite.db = database.NewMgr(suite.cfg)
|
||||||
|
suite.srv = NewServer(suite.db, suite.cfg, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *AuthTestSuite) createTestUser(username, password string) {
|
||||||
|
md5Hash := fmt.Sprintf("%x", md5.Sum([]byte(password)))
|
||||||
|
|
||||||
|
hashedPassword, err := argon2.CreateHash(md5Hash, argon2.DefaultParams)
|
||||||
|
suite.Require().NoError(err)
|
||||||
|
|
||||||
|
authHash := "test-auth-hash"
|
||||||
|
|
||||||
|
_, err = suite.db.Queries.CreateUser(suite.T().Context(), database.CreateUserParams{
|
||||||
|
ID: username,
|
||||||
|
Pass: &hashedPassword,
|
||||||
|
AuthHash: &authHash,
|
||||||
|
Admin: true,
|
||||||
|
})
|
||||||
|
suite.Require().NoError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *AuthTestSuite) assertSessionCookie(cookie *http.Cookie) {
|
||||||
|
suite.Require().NotNil(cookie)
|
||||||
|
suite.Equal("token", cookie.Name)
|
||||||
|
suite.NotEmpty(cookie.Value)
|
||||||
|
suite.True(cookie.HttpOnly)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *AuthTestSuite) login(username, password string) *http.Cookie {
|
||||||
|
reqBody := LoginRequest{
|
||||||
|
Username: username,
|
||||||
|
Password: password,
|
||||||
|
}
|
||||||
|
body, err := json.Marshal(reqBody)
|
||||||
|
suite.Require().NoError(err)
|
||||||
|
|
||||||
|
req := httptest.NewRequest(http.MethodPost, "/api/v1/auth/login", bytes.NewReader(body))
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
|
||||||
|
suite.srv.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
suite.Equal(http.StatusOK, w.Code, "login should return 200")
|
||||||
|
|
||||||
|
var resp LoginResponse
|
||||||
|
suite.Require().NoError(json.Unmarshal(w.Body.Bytes(), &resp))
|
||||||
|
|
||||||
|
cookies := w.Result().Cookies()
|
||||||
|
suite.Require().Len(cookies, 1, "should have session cookie")
|
||||||
|
suite.assertSessionCookie(cookies[0])
|
||||||
|
|
||||||
|
return cookies[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *AuthTestSuite) TestAPILogin() {
|
||||||
|
suite.createTestUser("testuser", "testpass")
|
||||||
|
|
||||||
|
reqBody := LoginRequest{
|
||||||
|
Username: "testuser",
|
||||||
|
Password: "testpass",
|
||||||
|
}
|
||||||
|
body, _ := json.Marshal(reqBody)
|
||||||
|
|
||||||
|
req := httptest.NewRequest(http.MethodPost, "/api/v1/auth/login", bytes.NewReader(body))
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
|
||||||
|
suite.srv.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
suite.Equal(http.StatusOK, w.Code)
|
||||||
|
|
||||||
|
var resp LoginResponse
|
||||||
|
suite.Require().NoError(json.Unmarshal(w.Body.Bytes(), &resp))
|
||||||
|
suite.Equal("testuser", resp.Username)
|
||||||
|
|
||||||
|
cookies := w.Result().Cookies()
|
||||||
|
suite.Require().Len(cookies, 1)
|
||||||
|
suite.assertSessionCookie(cookies[0])
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *AuthTestSuite) TestAPILoginInvalidCredentials() {
|
||||||
|
reqBody := LoginRequest{
|
||||||
|
Username: "testuser",
|
||||||
|
Password: "wrongpass",
|
||||||
|
}
|
||||||
|
body, _ := json.Marshal(reqBody)
|
||||||
|
|
||||||
|
req := httptest.NewRequest(http.MethodPost, "/api/v1/auth/login", bytes.NewReader(body))
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
|
||||||
|
suite.srv.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
suite.Equal(http.StatusUnauthorized, w.Code)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *AuthTestSuite) TestAPIRegister() {
|
||||||
|
reqBody := LoginRequest{
|
||||||
|
Username: "newuser",
|
||||||
|
Password: "newpass",
|
||||||
|
}
|
||||||
|
body, _ := json.Marshal(reqBody)
|
||||||
|
|
||||||
|
req := httptest.NewRequest(http.MethodPost, "/api/v1/auth/register", bytes.NewReader(body))
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
|
||||||
|
suite.srv.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
suite.Equal(http.StatusCreated, w.Code)
|
||||||
|
|
||||||
|
var resp LoginResponse
|
||||||
|
suite.Require().NoError(json.Unmarshal(w.Body.Bytes(), &resp))
|
||||||
|
suite.Equal("newuser", resp.Username)
|
||||||
|
suite.True(resp.IsAdmin, "first registered user should mirror legacy admin bootstrap behavior")
|
||||||
|
|
||||||
|
cookies := w.Result().Cookies()
|
||||||
|
suite.Require().Len(cookies, 1, "register should set a session cookie")
|
||||||
|
suite.assertSessionCookie(cookies[0])
|
||||||
|
|
||||||
|
user, err := suite.db.Queries.GetUser(suite.T().Context(), "newuser")
|
||||||
|
suite.Require().NoError(err)
|
||||||
|
suite.True(user.Admin)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *AuthTestSuite) TestAPIRegisterDisabled() {
|
||||||
|
suite.cfg.RegistrationEnabled = false
|
||||||
|
suite.srv = NewServer(suite.db, suite.cfg, nil)
|
||||||
|
|
||||||
|
reqBody := LoginRequest{
|
||||||
|
Username: "newuser",
|
||||||
|
Password: "newpass",
|
||||||
|
}
|
||||||
|
body, _ := json.Marshal(reqBody)
|
||||||
|
|
||||||
|
req := httptest.NewRequest(http.MethodPost, "/api/v1/auth/register", bytes.NewReader(body))
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
|
||||||
|
suite.srv.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
suite.Equal(http.StatusForbidden, w.Code)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *AuthTestSuite) TestAPILogout() {
|
||||||
|
suite.createTestUser("testuser", "testpass")
|
||||||
|
cookie := suite.login("testuser", "testpass")
|
||||||
|
|
||||||
|
req := httptest.NewRequest(http.MethodPost, "/api/v1/auth/logout", nil)
|
||||||
|
req.AddCookie(cookie)
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
|
||||||
|
suite.srv.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
suite.Equal(http.StatusOK, w.Code)
|
||||||
|
|
||||||
|
cookies := w.Result().Cookies()
|
||||||
|
suite.Require().Len(cookies, 1)
|
||||||
|
suite.Equal("token", cookies[0].Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *AuthTestSuite) TestAPIGetMe() {
|
||||||
|
suite.createTestUser("testuser", "testpass")
|
||||||
|
cookie := suite.login("testuser", "testpass")
|
||||||
|
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/api/v1/auth/me", nil)
|
||||||
|
req.AddCookie(cookie)
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
|
||||||
|
suite.srv.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
suite.Equal(http.StatusOK, w.Code)
|
||||||
|
|
||||||
|
var resp UserData
|
||||||
|
suite.Require().NoError(json.Unmarshal(w.Body.Bytes(), &resp))
|
||||||
|
suite.Equal("testuser", resp.Username)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *AuthTestSuite) TestAPIGetMeUnauthenticated() {
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/api/v1/auth/me", nil)
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
|
||||||
|
suite.srv.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
suite.Equal(http.StatusUnauthorized, w.Code)
|
||||||
|
}
|
||||||
827
api/v1/documents.go
Normal file
827
api/v1/documents.go
Normal file
@@ -0,0 +1,827 @@
|
|||||||
|
package v1
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"io/fs"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
"reichard.io/antholume/database"
|
||||||
|
"reichard.io/antholume/metadata"
|
||||||
|
)
|
||||||
|
|
||||||
|
// GET /documents
|
||||||
|
func (s *Server) GetDocuments(ctx context.Context, request GetDocumentsRequestObject) (GetDocumentsResponseObject, error) {
|
||||||
|
auth, ok := s.getSessionFromContext(ctx)
|
||||||
|
if !ok {
|
||||||
|
return GetDocuments401JSONResponse{Code: 401, Message: "Unauthorized"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
page := int64(1)
|
||||||
|
if request.Params.Page != nil {
|
||||||
|
page = *request.Params.Page
|
||||||
|
}
|
||||||
|
|
||||||
|
limit := int64(9)
|
||||||
|
if request.Params.Limit != nil {
|
||||||
|
limit = *request.Params.Limit
|
||||||
|
}
|
||||||
|
|
||||||
|
search := ""
|
||||||
|
if request.Params.Search != nil {
|
||||||
|
search = "%" + *request.Params.Search + "%"
|
||||||
|
}
|
||||||
|
|
||||||
|
rows, err := s.db.Queries.GetDocumentsWithStats(
|
||||||
|
ctx,
|
||||||
|
database.GetDocumentsWithStatsParams{
|
||||||
|
UserID: auth.UserName,
|
||||||
|
Query: &search,
|
||||||
|
Deleted: ptrOf(false),
|
||||||
|
Offset: (page - 1) * limit,
|
||||||
|
Limit: limit,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return GetDocuments500JSONResponse{Code: 500, Message: err.Error()}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
total := int64(len(rows))
|
||||||
|
var nextPage *int64
|
||||||
|
var previousPage *int64
|
||||||
|
if page*limit < total {
|
||||||
|
nextPage = ptrOf(page + 1)
|
||||||
|
}
|
||||||
|
if page > 1 {
|
||||||
|
previousPage = ptrOf(page - 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
apiDocuments := make([]Document, len(rows))
|
||||||
|
for i, row := range rows {
|
||||||
|
apiDocuments[i] = Document{
|
||||||
|
Id: row.ID,
|
||||||
|
Title: *row.Title,
|
||||||
|
Author: *row.Author,
|
||||||
|
Description: row.Description,
|
||||||
|
Isbn10: row.Isbn10,
|
||||||
|
Isbn13: row.Isbn13,
|
||||||
|
Words: row.Words,
|
||||||
|
Filepath: row.Filepath,
|
||||||
|
Percentage: ptrOf(float32(row.Percentage)),
|
||||||
|
TotalTimeSeconds: ptrOf(row.TotalTimeSeconds),
|
||||||
|
Wpm: ptrOf(float32(row.Wpm)),
|
||||||
|
SecondsPerPercent: ptrOf(row.SecondsPerPercent),
|
||||||
|
LastRead: parseInterfaceTime(row.LastRead),
|
||||||
|
CreatedAt: time.Now(), // Will be overwritten if we had a proper created_at from DB
|
||||||
|
UpdatedAt: time.Now(), // Will be overwritten if we had a proper updated_at from DB
|
||||||
|
Deleted: false, // Default, should be overridden if available
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
response := DocumentsResponse{
|
||||||
|
Documents: apiDocuments,
|
||||||
|
Total: total,
|
||||||
|
Page: page,
|
||||||
|
Limit: limit,
|
||||||
|
NextPage: nextPage,
|
||||||
|
PreviousPage: previousPage,
|
||||||
|
Search: request.Params.Search,
|
||||||
|
}
|
||||||
|
return GetDocuments200JSONResponse(response), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GET /documents/{id}
|
||||||
|
func (s *Server) GetDocument(ctx context.Context, request GetDocumentRequestObject) (GetDocumentResponseObject, error) {
|
||||||
|
auth, ok := s.getSessionFromContext(ctx)
|
||||||
|
if !ok {
|
||||||
|
return GetDocument401JSONResponse{Code: 401, Message: "Unauthorized"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use GetDocumentsWithStats to get document with stats
|
||||||
|
docs, err := s.db.Queries.GetDocumentsWithStats(
|
||||||
|
ctx,
|
||||||
|
database.GetDocumentsWithStatsParams{
|
||||||
|
UserID: auth.UserName,
|
||||||
|
ID: &request.Id,
|
||||||
|
Deleted: ptrOf(false),
|
||||||
|
Offset: 0,
|
||||||
|
Limit: 1,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if err != nil || len(docs) == 0 {
|
||||||
|
return GetDocument404JSONResponse{Code: 404, Message: "Document not found"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
doc := docs[0]
|
||||||
|
|
||||||
|
apiDoc := Document{
|
||||||
|
Id: doc.ID,
|
||||||
|
Title: *doc.Title,
|
||||||
|
Author: *doc.Author,
|
||||||
|
Description: doc.Description,
|
||||||
|
Isbn10: doc.Isbn10,
|
||||||
|
Isbn13: doc.Isbn13,
|
||||||
|
Words: doc.Words,
|
||||||
|
Filepath: doc.Filepath,
|
||||||
|
Percentage: ptrOf(float32(doc.Percentage)),
|
||||||
|
TotalTimeSeconds: ptrOf(doc.TotalTimeSeconds),
|
||||||
|
Wpm: ptrOf(float32(doc.Wpm)),
|
||||||
|
SecondsPerPercent: ptrOf(doc.SecondsPerPercent),
|
||||||
|
LastRead: parseInterfaceTime(doc.LastRead),
|
||||||
|
CreatedAt: time.Now(), // Will be overwritten if we had a proper created_at from DB
|
||||||
|
UpdatedAt: time.Now(), // Will be overwritten if we had a proper updated_at from DB
|
||||||
|
Deleted: false, // Default, should be overridden if available
|
||||||
|
}
|
||||||
|
|
||||||
|
response := DocumentResponse{
|
||||||
|
Document: apiDoc,
|
||||||
|
}
|
||||||
|
return GetDocument200JSONResponse(response), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// POST /documents/{id}
|
||||||
|
func (s *Server) EditDocument(ctx context.Context, request EditDocumentRequestObject) (EditDocumentResponseObject, error) {
|
||||||
|
auth, ok := s.getSessionFromContext(ctx)
|
||||||
|
if !ok {
|
||||||
|
return EditDocument401JSONResponse{Code: 401, Message: "Unauthorized"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if request.Body == nil {
|
||||||
|
return EditDocument400JSONResponse{Code: 400, Message: "Missing request body"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate document exists and get current state
|
||||||
|
currentDoc, err := s.db.Queries.GetDocument(ctx, request.Id)
|
||||||
|
if err != nil {
|
||||||
|
return EditDocument404JSONResponse{Code: 404, Message: "Document not found"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate at least one editable field is provided
|
||||||
|
if request.Body.Title == nil &&
|
||||||
|
request.Body.Author == nil &&
|
||||||
|
request.Body.Description == nil &&
|
||||||
|
request.Body.Isbn10 == nil &&
|
||||||
|
request.Body.Isbn13 == nil &&
|
||||||
|
request.Body.CoverGbid == nil {
|
||||||
|
return EditDocument400JSONResponse{Code: 400, Message: "No editable fields provided"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle cover via Google Books ID
|
||||||
|
var coverFileName *string
|
||||||
|
if request.Body.CoverGbid != nil {
|
||||||
|
coverDir := filepath.Join(s.cfg.DataPath, "covers")
|
||||||
|
fileName, err := metadata.CacheCoverWithContext(ctx, *request.Body.CoverGbid, coverDir, request.Id, true)
|
||||||
|
if err == nil {
|
||||||
|
coverFileName = fileName
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update document with provided editable fields only
|
||||||
|
_, err = s.db.Queries.UpsertDocument(ctx, database.UpsertDocumentParams{
|
||||||
|
ID: request.Id,
|
||||||
|
Title: request.Body.Title,
|
||||||
|
Author: request.Body.Author,
|
||||||
|
Description: request.Body.Description,
|
||||||
|
Isbn10: request.Body.Isbn10,
|
||||||
|
Isbn13: request.Body.Isbn13,
|
||||||
|
Coverfile: coverFileName,
|
||||||
|
// Preserve existing values for non-editable fields
|
||||||
|
Md5: currentDoc.Md5,
|
||||||
|
Basepath: currentDoc.Basepath,
|
||||||
|
Filepath: currentDoc.Filepath,
|
||||||
|
Words: currentDoc.Words,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
log.Error("UpsertDocument DB Error:", err)
|
||||||
|
return EditDocument500JSONResponse{Code: 500, Message: "Failed to update document"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use GetDocumentsWithStats to get document with stats for the response
|
||||||
|
docs, err := s.db.Queries.GetDocumentsWithStats(
|
||||||
|
ctx,
|
||||||
|
database.GetDocumentsWithStatsParams{
|
||||||
|
UserID: auth.UserName,
|
||||||
|
ID: &request.Id,
|
||||||
|
Deleted: ptrOf(false),
|
||||||
|
Offset: 0,
|
||||||
|
Limit: 1,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if err != nil || len(docs) == 0 {
|
||||||
|
return EditDocument404JSONResponse{Code: 404, Message: "Document not found"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
doc := docs[0]
|
||||||
|
|
||||||
|
|
||||||
|
apiDoc := Document{
|
||||||
|
Id: doc.ID,
|
||||||
|
Title: *doc.Title,
|
||||||
|
Author: *doc.Author,
|
||||||
|
Description: doc.Description,
|
||||||
|
Isbn10: doc.Isbn10,
|
||||||
|
Isbn13: doc.Isbn13,
|
||||||
|
Words: doc.Words,
|
||||||
|
Filepath: doc.Filepath,
|
||||||
|
Percentage: ptrOf(float32(doc.Percentage)),
|
||||||
|
TotalTimeSeconds: ptrOf(doc.TotalTimeSeconds),
|
||||||
|
Wpm: ptrOf(float32(doc.Wpm)),
|
||||||
|
SecondsPerPercent: ptrOf(doc.SecondsPerPercent),
|
||||||
|
LastRead: parseInterfaceTime(doc.LastRead),
|
||||||
|
CreatedAt: time.Now(),
|
||||||
|
UpdatedAt: time.Now(),
|
||||||
|
Deleted: false,
|
||||||
|
}
|
||||||
|
|
||||||
|
response := DocumentResponse{
|
||||||
|
Document: apiDoc,
|
||||||
|
}
|
||||||
|
return EditDocument200JSONResponse(response), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// deriveBaseFileName builds the base filename for a given MetadataInfo object.
|
||||||
|
func deriveBaseFileName(metadataInfo *metadata.MetadataInfo) string {
|
||||||
|
// Derive New FileName
|
||||||
|
var newFileName string
|
||||||
|
if metadataInfo.Author != nil && *metadataInfo.Author != "" {
|
||||||
|
newFileName = newFileName + *metadataInfo.Author
|
||||||
|
} else {
|
||||||
|
newFileName = newFileName + "Unknown"
|
||||||
|
}
|
||||||
|
if metadataInfo.Title != nil && *metadataInfo.Title != "" {
|
||||||
|
newFileName = newFileName + " - " + *metadataInfo.Title
|
||||||
|
} else {
|
||||||
|
newFileName = newFileName + " - Unknown"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove Slashes
|
||||||
|
fileName := strings.ReplaceAll(newFileName, "/", "")
|
||||||
|
return "." + filepath.Clean(fmt.Sprintf("/%s [%s]%s", fileName, *metadataInfo.PartialMD5, metadataInfo.Type))
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseInterfaceTime converts an interface{} to time.Time for SQLC queries
|
||||||
|
func parseInterfaceTime(t any) *time.Time {
|
||||||
|
if t == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
switch v := t.(type) {
|
||||||
|
case string:
|
||||||
|
parsed, err := time.Parse(time.RFC3339, v)
|
||||||
|
if err != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return &parsed
|
||||||
|
case time.Time:
|
||||||
|
return &v
|
||||||
|
default:
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// serveNoCover serves the default no-cover image from assets
|
||||||
|
func (s *Server) serveNoCover() (fs.File, string, int64, error) {
|
||||||
|
// Try to open the no-cover image from assets
|
||||||
|
file, err := s.assets.Open("assets/images/no-cover.jpg")
|
||||||
|
if err != nil {
|
||||||
|
return nil, "", 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get file info
|
||||||
|
info, err := file.Stat()
|
||||||
|
if err != nil {
|
||||||
|
file.Close()
|
||||||
|
return nil, "", 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return file, "image/jpeg", info.Size(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// openFileReader opens a file and returns it as an io.ReaderCloser
|
||||||
|
func openFileReader(path string) (*os.File, error) {
|
||||||
|
return os.Open(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GET /documents/{id}/cover
|
||||||
|
func (s *Server) GetDocumentCover(ctx context.Context, request GetDocumentCoverRequestObject) (GetDocumentCoverResponseObject, error) {
|
||||||
|
// Authentication is handled by middleware, which also adds auth data to context
|
||||||
|
// This endpoint just serves the cover image
|
||||||
|
|
||||||
|
// Validate Document Exists in DB
|
||||||
|
document, err := s.db.Queries.GetDocument(ctx, request.Id)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("GetDocument DB Error:", err)
|
||||||
|
return GetDocumentCover404JSONResponse{Code: 404, Message: "Document not found"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var coverFile fs.File
|
||||||
|
var contentType string
|
||||||
|
var contentLength int64
|
||||||
|
var needMetadataFetch bool
|
||||||
|
|
||||||
|
// Handle Identified Document
|
||||||
|
if document.Coverfile != nil {
|
||||||
|
if *document.Coverfile == "UNKNOWN" {
|
||||||
|
// Serve no-cover image
|
||||||
|
file, ct, size, err := s.serveNoCover()
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Failed to open no-cover image:", err)
|
||||||
|
return GetDocumentCover404JSONResponse{Code: 404, Message: "Cover not found"}, nil
|
||||||
|
}
|
||||||
|
coverFile = file
|
||||||
|
contentType = ct
|
||||||
|
contentLength = size
|
||||||
|
needMetadataFetch = true
|
||||||
|
} else {
|
||||||
|
// Derive Path
|
||||||
|
coverPath := filepath.Join(s.cfg.DataPath, "covers", *document.Coverfile)
|
||||||
|
|
||||||
|
// Validate File Exists
|
||||||
|
fileInfo, err := os.Stat(coverPath)
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
log.Error("Cover file should but doesn't exist: ", err)
|
||||||
|
// Serve no-cover image
|
||||||
|
file, ct, size, err := s.serveNoCover()
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Failed to open no-cover image:", err)
|
||||||
|
return GetDocumentCover404JSONResponse{Code: 404, Message: "Cover not found"}, nil
|
||||||
|
}
|
||||||
|
coverFile = file
|
||||||
|
contentType = ct
|
||||||
|
contentLength = size
|
||||||
|
needMetadataFetch = true
|
||||||
|
} else {
|
||||||
|
// Open the cover file
|
||||||
|
file, err := openFileReader(coverPath)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Failed to open cover file:", err)
|
||||||
|
return GetDocumentCover500JSONResponse{Code: 500, Message: "Failed to open cover"}, nil
|
||||||
|
}
|
||||||
|
coverFile = file
|
||||||
|
contentLength = fileInfo.Size()
|
||||||
|
|
||||||
|
// Determine content type based on file extension
|
||||||
|
contentType = "image/jpeg"
|
||||||
|
if strings.HasSuffix(coverPath, ".png") {
|
||||||
|
contentType = "image/png"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
needMetadataFetch = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Attempt Metadata fetch if needed
|
||||||
|
var cachedCoverFile string = "UNKNOWN"
|
||||||
|
var coverDir string = filepath.Join(s.cfg.DataPath, "covers")
|
||||||
|
|
||||||
|
if needMetadataFetch {
|
||||||
|
// Create context with timeout for metadata service calls
|
||||||
|
metadataCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
// Identify Documents & Save Covers
|
||||||
|
metadataResults, err := metadata.SearchMetadataWithContext(metadataCtx, metadata.SOURCE_GBOOK, metadata.MetadataInfo{
|
||||||
|
Title: document.Title,
|
||||||
|
Author: document.Author,
|
||||||
|
})
|
||||||
|
|
||||||
|
if err == nil && len(metadataResults) > 0 && metadataResults[0].ID != nil {
|
||||||
|
firstResult := metadataResults[0]
|
||||||
|
|
||||||
|
// Save Cover
|
||||||
|
fileName, err := metadata.CacheCoverWithContext(metadataCtx, *firstResult.ID, coverDir, document.ID, false)
|
||||||
|
if err == nil {
|
||||||
|
cachedCoverFile = *fileName
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store First Metadata Result
|
||||||
|
if _, err = s.db.Queries.AddMetadata(ctx, database.AddMetadataParams{
|
||||||
|
DocumentID: document.ID,
|
||||||
|
Title: firstResult.Title,
|
||||||
|
Author: firstResult.Author,
|
||||||
|
Description: firstResult.Description,
|
||||||
|
Gbid: firstResult.ID,
|
||||||
|
Olid: nil,
|
||||||
|
Isbn10: firstResult.ISBN10,
|
||||||
|
Isbn13: firstResult.ISBN13,
|
||||||
|
}); err != nil {
|
||||||
|
log.Error("AddMetadata DB Error:", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Upsert Document
|
||||||
|
if _, err = s.db.Queries.UpsertDocument(ctx, database.UpsertDocumentParams{
|
||||||
|
ID: document.ID,
|
||||||
|
Coverfile: &cachedCoverFile,
|
||||||
|
}); err != nil {
|
||||||
|
log.Warn("UpsertDocument DB Error:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update cover file if we got a new cover
|
||||||
|
if cachedCoverFile != "UNKNOWN" {
|
||||||
|
coverPath := filepath.Join(coverDir, cachedCoverFile)
|
||||||
|
fileInfo, err := os.Stat(coverPath)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Failed to stat cached cover:", err)
|
||||||
|
// Keep the no-cover image
|
||||||
|
} else {
|
||||||
|
file, err := openFileReader(coverPath)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Failed to open cached cover:", err)
|
||||||
|
// Keep the no-cover image
|
||||||
|
} else {
|
||||||
|
_ = coverFile.Close() // Close the previous file
|
||||||
|
coverFile = file
|
||||||
|
contentLength = fileInfo.Size()
|
||||||
|
|
||||||
|
// Determine content type based on file extension
|
||||||
|
contentType = "image/jpeg"
|
||||||
|
if strings.HasSuffix(coverPath, ".png") {
|
||||||
|
contentType = "image/png"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &GetDocumentCover200Response{
|
||||||
|
Body: coverFile,
|
||||||
|
ContentLength: contentLength,
|
||||||
|
ContentType: contentType,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// POST /documents/{id}/cover
|
||||||
|
func (s *Server) UploadDocumentCover(ctx context.Context, request UploadDocumentCoverRequestObject) (UploadDocumentCoverResponseObject, error) {
|
||||||
|
auth, ok := s.getSessionFromContext(ctx)
|
||||||
|
if !ok {
|
||||||
|
return UploadDocumentCover401JSONResponse{Code: 401, Message: "Unauthorized"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if request.Body == nil {
|
||||||
|
return UploadDocumentCover400JSONResponse{Code: 400, Message: "Missing request body"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate document exists
|
||||||
|
_, err := s.db.Queries.GetDocument(ctx, request.Id)
|
||||||
|
if err != nil {
|
||||||
|
return UploadDocumentCover404JSONResponse{Code: 404, Message: "Document not found"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read multipart form
|
||||||
|
form, err := request.Body.ReadForm(32 << 20) // 32MB max
|
||||||
|
if err != nil {
|
||||||
|
log.Error("ReadForm error:", err)
|
||||||
|
return UploadDocumentCover500JSONResponse{Code: 500, Message: "Failed to read form"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get file from form
|
||||||
|
fileField := form.File["cover_file"]
|
||||||
|
if len(fileField) == 0 {
|
||||||
|
return UploadDocumentCover400JSONResponse{Code: 400, Message: "No file provided"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
file := fileField[0]
|
||||||
|
|
||||||
|
// Validate file extension
|
||||||
|
if !strings.HasSuffix(strings.ToLower(file.Filename), ".jpg") && !strings.HasSuffix(strings.ToLower(file.Filename), ".png") {
|
||||||
|
return UploadDocumentCover400JSONResponse{Code: 400, Message: "Only JPG and PNG files are allowed"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Open file
|
||||||
|
f, err := file.Open()
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Open file error:", err)
|
||||||
|
return UploadDocumentCover500JSONResponse{Code: 500, Message: "Failed to open file"}, nil
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
|
||||||
|
// Read file content
|
||||||
|
data, err := io.ReadAll(f)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Read file error:", err)
|
||||||
|
return UploadDocumentCover500JSONResponse{Code: 500, Message: "Failed to read file"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate actual content type
|
||||||
|
contentType := http.DetectContentType(data)
|
||||||
|
allowedTypes := map[string]bool{
|
||||||
|
"image/jpeg": true,
|
||||||
|
"image/png": true,
|
||||||
|
}
|
||||||
|
if !allowedTypes[contentType] {
|
||||||
|
return UploadDocumentCover400JSONResponse{
|
||||||
|
Code: 400,
|
||||||
|
Message: fmt.Sprintf("Invalid file type: %s. Only JPG and PNG files are allowed.", contentType),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Derive storage path
|
||||||
|
coverDir := filepath.Join(s.cfg.DataPath, "covers")
|
||||||
|
fileName := fmt.Sprintf("%s%s", request.Id, strings.ToLower(filepath.Ext(file.Filename)))
|
||||||
|
safePath := filepath.Join(coverDir, fileName)
|
||||||
|
|
||||||
|
// Save file
|
||||||
|
err = os.WriteFile(safePath, data, 0644)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Save file error:", err)
|
||||||
|
return UploadDocumentCover500JSONResponse{Code: 500, Message: "Unable to save cover"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Upsert document with new cover
|
||||||
|
_, err = s.db.Queries.UpsertDocument(ctx, database.UpsertDocumentParams{
|
||||||
|
ID: request.Id,
|
||||||
|
Coverfile: &fileName,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
log.Error("UpsertDocument DB error:", err)
|
||||||
|
return UploadDocumentCover500JSONResponse{Code: 500, Message: "Failed to save cover"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use GetDocumentsWithStats to get document with stats for the response
|
||||||
|
docs, err := s.db.Queries.GetDocumentsWithStats(
|
||||||
|
ctx,
|
||||||
|
database.GetDocumentsWithStatsParams{
|
||||||
|
UserID: auth.UserName,
|
||||||
|
ID: &request.Id,
|
||||||
|
Deleted: ptrOf(false),
|
||||||
|
Offset: 0,
|
||||||
|
Limit: 1,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if err != nil || len(docs) == 0 {
|
||||||
|
return UploadDocumentCover404JSONResponse{Code: 404, Message: "Document not found"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
doc := docs[0]
|
||||||
|
|
||||||
|
|
||||||
|
apiDoc := Document{
|
||||||
|
Id: doc.ID,
|
||||||
|
Title: *doc.Title,
|
||||||
|
Author: *doc.Author,
|
||||||
|
Description: doc.Description,
|
||||||
|
Isbn10: doc.Isbn10,
|
||||||
|
Isbn13: doc.Isbn13,
|
||||||
|
Words: doc.Words,
|
||||||
|
Filepath: doc.Filepath,
|
||||||
|
Percentage: ptrOf(float32(doc.Percentage)),
|
||||||
|
TotalTimeSeconds: ptrOf(doc.TotalTimeSeconds),
|
||||||
|
Wpm: ptrOf(float32(doc.Wpm)),
|
||||||
|
SecondsPerPercent: ptrOf(doc.SecondsPerPercent),
|
||||||
|
LastRead: parseInterfaceTime(doc.LastRead),
|
||||||
|
CreatedAt: time.Now(),
|
||||||
|
UpdatedAt: time.Now(),
|
||||||
|
Deleted: false,
|
||||||
|
}
|
||||||
|
|
||||||
|
response := DocumentResponse{
|
||||||
|
Document: apiDoc,
|
||||||
|
}
|
||||||
|
return UploadDocumentCover200JSONResponse(response), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GET /documents/{id}/file
|
||||||
|
func (s *Server) GetDocumentFile(ctx context.Context, request GetDocumentFileRequestObject) (GetDocumentFileResponseObject, error) {
|
||||||
|
// Authentication is handled by middleware, which also adds auth data to context
|
||||||
|
// This endpoint just serves the document file download
|
||||||
|
// Get Document
|
||||||
|
document, err := s.db.Queries.GetDocument(ctx, request.Id)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("GetDocument DB Error:", err)
|
||||||
|
return GetDocumentFile404JSONResponse{Code: 404, Message: "Document not found"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if document.Filepath == nil {
|
||||||
|
log.Error("Document Doesn't Have File:", request.Id)
|
||||||
|
return GetDocumentFile404JSONResponse{Code: 404, Message: "Document file not found"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Derive Basepath
|
||||||
|
basepath := filepath.Join(s.cfg.DataPath, "documents")
|
||||||
|
if document.Basepath != nil && *document.Basepath != "" {
|
||||||
|
basepath = *document.Basepath
|
||||||
|
}
|
||||||
|
|
||||||
|
// Derive Storage Location
|
||||||
|
filePath := filepath.Join(basepath, *document.Filepath)
|
||||||
|
|
||||||
|
// Validate File Exists
|
||||||
|
fileInfo, err := os.Stat(filePath)
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
log.Error("File should but doesn't exist:", err)
|
||||||
|
return GetDocumentFile404JSONResponse{Code: 404, Message: "Document file not found"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Open file
|
||||||
|
file, err := os.Open(filePath)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Failed to open document file:", err)
|
||||||
|
return GetDocumentFile500JSONResponse{Code: 500, Message: "Failed to open document"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return &GetDocumentFile200Response{
|
||||||
|
Body: file,
|
||||||
|
ContentLength: fileInfo.Size(),
|
||||||
|
Filename: filepath.Base(*document.Filepath),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// POST /documents
|
||||||
|
func (s *Server) CreateDocument(ctx context.Context, request CreateDocumentRequestObject) (CreateDocumentResponseObject, error) {
|
||||||
|
_, ok := s.getSessionFromContext(ctx)
|
||||||
|
if !ok {
|
||||||
|
return CreateDocument401JSONResponse{Code: 401, Message: "Unauthorized"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if request.Body == nil {
|
||||||
|
return CreateDocument400JSONResponse{Code: 400, Message: "Missing request body"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read multipart form
|
||||||
|
form, err := request.Body.ReadForm(32 << 20) // 32MB max memory
|
||||||
|
if err != nil {
|
||||||
|
log.Error("ReadForm error:", err)
|
||||||
|
return CreateDocument500JSONResponse{Code: 500, Message: "Failed to read form"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get file from form
|
||||||
|
fileField := form.File["document_file"]
|
||||||
|
if len(fileField) == 0 {
|
||||||
|
return CreateDocument400JSONResponse{Code: 400, Message: "No file provided"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
file := fileField[0]
|
||||||
|
|
||||||
|
// Validate file extension
|
||||||
|
if !strings.HasSuffix(strings.ToLower(file.Filename), ".epub") {
|
||||||
|
return CreateDocument400JSONResponse{Code: 400, Message: "Only EPUB files are allowed"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Open file
|
||||||
|
f, err := file.Open()
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Open file error:", err)
|
||||||
|
return CreateDocument500JSONResponse{Code: 500, Message: "Failed to open file"}, nil
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
|
||||||
|
// Read file content
|
||||||
|
data, err := io.ReadAll(f)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Read file error:", err)
|
||||||
|
return CreateDocument500JSONResponse{Code: 500, Message: "Failed to read file"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate actual content type
|
||||||
|
contentType := http.DetectContentType(data)
|
||||||
|
if contentType != "application/epub+zip" && contentType != "application/zip" {
|
||||||
|
return CreateDocument400JSONResponse{
|
||||||
|
Code: 400,
|
||||||
|
Message: fmt.Sprintf("Invalid file type: %s. Only EPUB files are allowed.", contentType),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create temp file to get metadata
|
||||||
|
tempFile, err := os.CreateTemp("", "book")
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Temp file create error:", err)
|
||||||
|
return CreateDocument500JSONResponse{Code: 500, Message: "Unable to create temp file"}, nil
|
||||||
|
}
|
||||||
|
defer os.Remove(tempFile.Name())
|
||||||
|
defer tempFile.Close()
|
||||||
|
|
||||||
|
// Write data to temp file
|
||||||
|
if _, err := tempFile.Write(data); err != nil {
|
||||||
|
log.Error("Write temp file error:", err)
|
||||||
|
return CreateDocument500JSONResponse{Code: 500, Message: "Unable to write temp file"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get metadata using metadata package
|
||||||
|
metadataInfo, err := metadata.GetMetadata(tempFile.Name())
|
||||||
|
if err != nil {
|
||||||
|
log.Error("GetMetadata error:", err)
|
||||||
|
return CreateDocument500JSONResponse{Code: 500, Message: "Unable to acquire metadata"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if already exists
|
||||||
|
_, err = s.db.Queries.GetDocument(ctx, *metadataInfo.PartialMD5)
|
||||||
|
if err == nil {
|
||||||
|
// Document already exists
|
||||||
|
existingDoc, _ := s.db.Queries.GetDocument(ctx, *metadataInfo.PartialMD5)
|
||||||
|
apiDoc := Document{
|
||||||
|
Id: existingDoc.ID,
|
||||||
|
Title: *existingDoc.Title,
|
||||||
|
Author: *existingDoc.Author,
|
||||||
|
Description: existingDoc.Description,
|
||||||
|
Isbn10: existingDoc.Isbn10,
|
||||||
|
Isbn13: existingDoc.Isbn13,
|
||||||
|
Words: existingDoc.Words,
|
||||||
|
Filepath: existingDoc.Filepath,
|
||||||
|
CreatedAt: parseTime(existingDoc.CreatedAt),
|
||||||
|
UpdatedAt: parseTime(existingDoc.UpdatedAt),
|
||||||
|
Deleted: existingDoc.Deleted,
|
||||||
|
}
|
||||||
|
response := DocumentResponse{
|
||||||
|
Document: apiDoc,
|
||||||
|
}
|
||||||
|
return CreateDocument200JSONResponse(response), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Derive & sanitize file name
|
||||||
|
fileName := deriveBaseFileName(metadataInfo)
|
||||||
|
basePath := filepath.Join(s.cfg.DataPath, "documents")
|
||||||
|
safePath := filepath.Join(basePath, fileName)
|
||||||
|
|
||||||
|
// Save file to storage
|
||||||
|
err = os.WriteFile(safePath, data, 0644)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Save file error:", err)
|
||||||
|
return CreateDocument500JSONResponse{Code: 500, Message: "Unable to save file"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Upsert document
|
||||||
|
doc, err := s.db.Queries.UpsertDocument(ctx, database.UpsertDocumentParams{
|
||||||
|
ID: *metadataInfo.PartialMD5,
|
||||||
|
Title: metadataInfo.Title,
|
||||||
|
Author: metadataInfo.Author,
|
||||||
|
Description: metadataInfo.Description,
|
||||||
|
Md5: metadataInfo.MD5,
|
||||||
|
Words: metadataInfo.WordCount,
|
||||||
|
Filepath: &fileName,
|
||||||
|
Basepath: &basePath,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
log.Error("UpsertDocument DB error:", err)
|
||||||
|
return CreateDocument500JSONResponse{Code: 500, Message: "Failed to save document"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
apiDoc := Document{
|
||||||
|
Id: doc.ID,
|
||||||
|
Title: *doc.Title,
|
||||||
|
Author: *doc.Author,
|
||||||
|
Description: doc.Description,
|
||||||
|
Isbn10: doc.Isbn10,
|
||||||
|
Isbn13: doc.Isbn13,
|
||||||
|
Words: doc.Words,
|
||||||
|
Filepath: doc.Filepath,
|
||||||
|
CreatedAt: parseTime(doc.CreatedAt),
|
||||||
|
UpdatedAt: parseTime(doc.UpdatedAt),
|
||||||
|
Deleted: doc.Deleted,
|
||||||
|
}
|
||||||
|
|
||||||
|
response := DocumentResponse{
|
||||||
|
Document: apiDoc,
|
||||||
|
}
|
||||||
|
|
||||||
|
return CreateDocument200JSONResponse(response), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetDocumentCover200Response is a custom response type that allows setting content type
|
||||||
|
type GetDocumentCover200Response struct {
|
||||||
|
Body io.Reader
|
||||||
|
ContentLength int64
|
||||||
|
ContentType string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (response GetDocumentCover200Response) VisitGetDocumentCoverResponse(w http.ResponseWriter) error {
|
||||||
|
w.Header().Set("Content-Type", response.ContentType)
|
||||||
|
if response.ContentLength != 0 {
|
||||||
|
w.Header().Set("Content-Length", fmt.Sprint(response.ContentLength))
|
||||||
|
}
|
||||||
|
w.WriteHeader(200)
|
||||||
|
|
||||||
|
if closer, ok := response.Body.(io.Closer); ok {
|
||||||
|
defer closer.Close()
|
||||||
|
}
|
||||||
|
_, err := io.Copy(w, response.Body)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetDocumentFile200Response is a custom response type that allows setting filename for download
|
||||||
|
type GetDocumentFile200Response struct {
|
||||||
|
Body io.Reader
|
||||||
|
ContentLength int64
|
||||||
|
Filename string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (response GetDocumentFile200Response) VisitGetDocumentFileResponse(w http.ResponseWriter) error {
|
||||||
|
w.Header().Set("Content-Type", "application/octet-stream")
|
||||||
|
if response.ContentLength != 0 {
|
||||||
|
w.Header().Set("Content-Length", fmt.Sprint(response.ContentLength))
|
||||||
|
}
|
||||||
|
w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", response.Filename))
|
||||||
|
w.WriteHeader(200)
|
||||||
|
|
||||||
|
if closer, ok := response.Body.(io.Closer); ok {
|
||||||
|
defer closer.Close()
|
||||||
|
}
|
||||||
|
_, err := io.Copy(w, response.Body)
|
||||||
|
return err
|
||||||
|
}
|
||||||
178
api/v1/documents_test.go
Normal file
178
api/v1/documents_test.go
Normal file
@@ -0,0 +1,178 @@
|
|||||||
|
package v1
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"crypto/md5"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
argon2 "github.com/alexedwards/argon2id"
|
||||||
|
"reichard.io/antholume/config"
|
||||||
|
"reichard.io/antholume/database"
|
||||||
|
"reichard.io/antholume/pkg/ptr"
|
||||||
|
)
|
||||||
|
|
||||||
|
type DocumentsTestSuite struct {
|
||||||
|
suite.Suite
|
||||||
|
db *database.DBManager
|
||||||
|
cfg *config.Config
|
||||||
|
srv *Server
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *DocumentsTestSuite) setupConfig() *config.Config {
|
||||||
|
return &config.Config{
|
||||||
|
ListenPort: "8080",
|
||||||
|
DBType: "memory",
|
||||||
|
DBName: "test",
|
||||||
|
ConfigPath: "/tmp",
|
||||||
|
CookieAuthKey: "test-auth-key-32-bytes-long-enough",
|
||||||
|
CookieEncKey: "0123456789abcdef",
|
||||||
|
CookieSecure: false,
|
||||||
|
CookieHTTPOnly: true,
|
||||||
|
Version: "test",
|
||||||
|
DemoMode: false,
|
||||||
|
RegistrationEnabled: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDocuments(t *testing.T) {
|
||||||
|
suite.Run(t, new(DocumentsTestSuite))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *DocumentsTestSuite) SetupTest() {
|
||||||
|
suite.cfg = suite.setupConfig()
|
||||||
|
suite.db = database.NewMgr(suite.cfg)
|
||||||
|
suite.srv = NewServer(suite.db, suite.cfg, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *DocumentsTestSuite) createTestUser(username, password string) {
|
||||||
|
suite.authTestSuiteHelper(username, password)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *DocumentsTestSuite) login(username, password string) *http.Cookie {
|
||||||
|
return suite.authLoginHelper(username, password)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *DocumentsTestSuite) authTestSuiteHelper(username, password string) {
|
||||||
|
// MD5 hash for KOSync compatibility (matches existing system)
|
||||||
|
md5Hash := fmt.Sprintf("%x", md5.Sum([]byte(password)))
|
||||||
|
|
||||||
|
// Then argon2 hash the MD5
|
||||||
|
hashedPassword, err := argon2.CreateHash(md5Hash, argon2.DefaultParams)
|
||||||
|
suite.Require().NoError(err)
|
||||||
|
|
||||||
|
_, err = suite.db.Queries.CreateUser(suite.T().Context(), database.CreateUserParams{
|
||||||
|
ID: username,
|
||||||
|
Pass: &hashedPassword,
|
||||||
|
AuthHash: ptr.Of("test-auth-hash"),
|
||||||
|
Admin: true,
|
||||||
|
})
|
||||||
|
suite.Require().NoError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *DocumentsTestSuite) authLoginHelper(username, password string) *http.Cookie {
|
||||||
|
reqBody := LoginRequest{Username: username, Password: password}
|
||||||
|
body, err := json.Marshal(reqBody)
|
||||||
|
suite.Require().NoError(err)
|
||||||
|
|
||||||
|
req := httptest.NewRequest(http.MethodPost, "/api/v1/auth/login", bytes.NewReader(body))
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
suite.srv.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
suite.Equal(http.StatusOK, w.Code)
|
||||||
|
|
||||||
|
cookies := w.Result().Cookies()
|
||||||
|
suite.Require().Len(cookies, 1)
|
||||||
|
|
||||||
|
return cookies[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *DocumentsTestSuite) TestAPIGetDocuments() {
|
||||||
|
suite.createTestUser("testuser", "testpass")
|
||||||
|
cookie := suite.login("testuser", "testpass")
|
||||||
|
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/api/v1/documents?page=1&limit=9", nil)
|
||||||
|
req.AddCookie(cookie)
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
|
||||||
|
suite.srv.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
suite.Equal(http.StatusOK, w.Code)
|
||||||
|
|
||||||
|
var resp DocumentsResponse
|
||||||
|
suite.Require().NoError(json.Unmarshal(w.Body.Bytes(), &resp))
|
||||||
|
suite.Equal(int64(1), resp.Page)
|
||||||
|
suite.Equal(int64(9), resp.Limit)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *DocumentsTestSuite) TestAPIGetDocumentsUnauthenticated() {
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/api/v1/documents", nil)
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
|
||||||
|
suite.srv.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
suite.Equal(http.StatusUnauthorized, w.Code)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *DocumentsTestSuite) TestAPIGetDocument() {
|
||||||
|
suite.createTestUser("testuser", "testpass")
|
||||||
|
|
||||||
|
docID := "test-doc-1"
|
||||||
|
_, err := suite.db.Queries.UpsertDocument(suite.T().Context(), database.UpsertDocumentParams{
|
||||||
|
ID: docID,
|
||||||
|
Title: ptr.Of("Test Document"),
|
||||||
|
Author: ptr.Of("Test Author"),
|
||||||
|
})
|
||||||
|
suite.Require().NoError(err)
|
||||||
|
|
||||||
|
cookie := suite.login("testuser", "testpass")
|
||||||
|
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/api/v1/documents/"+docID, nil)
|
||||||
|
req.AddCookie(cookie)
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
|
||||||
|
suite.srv.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
suite.Equal(http.StatusOK, w.Code)
|
||||||
|
|
||||||
|
var resp DocumentResponse
|
||||||
|
suite.Require().NoError(json.Unmarshal(w.Body.Bytes(), &resp))
|
||||||
|
suite.Equal(docID, resp.Document.Id)
|
||||||
|
suite.Equal("Test Document", resp.Document.Title)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *DocumentsTestSuite) TestAPIGetDocumentNotFound() {
|
||||||
|
suite.createTestUser("testuser", "testpass")
|
||||||
|
cookie := suite.login("testuser", "testpass")
|
||||||
|
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/api/v1/documents/non-existent", nil)
|
||||||
|
req.AddCookie(cookie)
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
|
||||||
|
suite.srv.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
suite.Equal(http.StatusNotFound, w.Code)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *DocumentsTestSuite) TestAPIGetDocumentCoverUnauthenticated() {
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/api/v1/documents/test-id/cover", nil)
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
|
||||||
|
suite.srv.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
suite.Equal(http.StatusUnauthorized, w.Code)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *DocumentsTestSuite) TestAPIGetDocumentFileUnauthenticated() {
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/api/v1/documents/test-id/file", nil)
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
|
||||||
|
suite.srv.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
suite.Equal(http.StatusUnauthorized, w.Code)
|
||||||
|
}
|
||||||
3
api/v1/generate.go
Normal file
3
api/v1/generate.go
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
package v1
|
||||||
|
|
||||||
|
//go:generate oapi-codegen -config oapi-codegen.yaml openapi.yaml
|
||||||
226
api/v1/home.go
Normal file
226
api/v1/home.go
Normal file
@@ -0,0 +1,226 @@
|
|||||||
|
package v1
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"sort"
|
||||||
|
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
"reichard.io/antholume/database"
|
||||||
|
"reichard.io/antholume/graph"
|
||||||
|
)
|
||||||
|
|
||||||
|
// GET /home
|
||||||
|
func (s *Server) GetHome(ctx context.Context, request GetHomeRequestObject) (GetHomeResponseObject, error) {
|
||||||
|
auth, ok := s.getSessionFromContext(ctx)
|
||||||
|
if !ok {
|
||||||
|
return GetHome401JSONResponse{Code: 401, Message: "Unauthorized"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get database info
|
||||||
|
dbInfo, err := s.db.Queries.GetDatabaseInfo(ctx, auth.UserName)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("GetDatabaseInfo DB Error:", err)
|
||||||
|
return GetHome500JSONResponse{Code: 500, Message: "Database error"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get streaks
|
||||||
|
streaks, err := s.db.Queries.GetUserStreaks(ctx, auth.UserName)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("GetUserStreaks DB Error:", err)
|
||||||
|
return GetHome500JSONResponse{Code: 500, Message: "Database error"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get graph data
|
||||||
|
graphData, err := s.db.Queries.GetDailyReadStats(ctx, auth.UserName)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("GetDailyReadStats DB Error:", err)
|
||||||
|
return GetHome500JSONResponse{Code: 500, Message: "Database error"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get user statistics
|
||||||
|
userStats, err := s.db.Queries.GetUserStatistics(ctx)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("GetUserStatistics DB Error:", err)
|
||||||
|
return GetHome500JSONResponse{Code: 500, Message: "Database error"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build response
|
||||||
|
response := HomeResponse{
|
||||||
|
DatabaseInfo: DatabaseInfo{
|
||||||
|
DocumentsSize: dbInfo.DocumentsSize,
|
||||||
|
ActivitySize: dbInfo.ActivitySize,
|
||||||
|
ProgressSize: dbInfo.ProgressSize,
|
||||||
|
DevicesSize: dbInfo.DevicesSize,
|
||||||
|
},
|
||||||
|
Streaks: StreaksResponse{
|
||||||
|
Streaks: convertStreaks(streaks),
|
||||||
|
},
|
||||||
|
GraphData: GraphDataResponse{
|
||||||
|
GraphData: convertGraphData(graphData),
|
||||||
|
},
|
||||||
|
UserStatistics: arrangeUserStatistics(userStats),
|
||||||
|
}
|
||||||
|
|
||||||
|
return GetHome200JSONResponse(response), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GET /home/streaks
|
||||||
|
func (s *Server) GetStreaks(ctx context.Context, request GetStreaksRequestObject) (GetStreaksResponseObject, error) {
|
||||||
|
auth, ok := s.getSessionFromContext(ctx)
|
||||||
|
if !ok {
|
||||||
|
return GetStreaks401JSONResponse{Code: 401, Message: "Unauthorized"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
streaks, err := s.db.Queries.GetUserStreaks(ctx, auth.UserName)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("GetUserStreaks DB Error:", err)
|
||||||
|
return GetStreaks500JSONResponse{Code: 500, Message: "Database error"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
response := StreaksResponse{
|
||||||
|
Streaks: convertStreaks(streaks),
|
||||||
|
}
|
||||||
|
|
||||||
|
return GetStreaks200JSONResponse(response), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GET /home/graph
|
||||||
|
func (s *Server) GetGraphData(ctx context.Context, request GetGraphDataRequestObject) (GetGraphDataResponseObject, error) {
|
||||||
|
auth, ok := s.getSessionFromContext(ctx)
|
||||||
|
if !ok {
|
||||||
|
return GetGraphData401JSONResponse{Code: 401, Message: "Unauthorized"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
graphData, err := s.db.Queries.GetDailyReadStats(ctx, auth.UserName)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("GetDailyReadStats DB Error:", err)
|
||||||
|
return GetGraphData500JSONResponse{Code: 500, Message: "Database error"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
response := GraphDataResponse{
|
||||||
|
GraphData: convertGraphData(graphData),
|
||||||
|
}
|
||||||
|
|
||||||
|
return GetGraphData200JSONResponse(response), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GET /home/statistics
|
||||||
|
func (s *Server) GetUserStatistics(ctx context.Context, request GetUserStatisticsRequestObject) (GetUserStatisticsResponseObject, error) {
|
||||||
|
_, ok := s.getSessionFromContext(ctx)
|
||||||
|
if !ok {
|
||||||
|
return GetUserStatistics401JSONResponse{Code: 401, Message: "Unauthorized"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
userStats, err := s.db.Queries.GetUserStatistics(ctx)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("GetUserStatistics DB Error:", err)
|
||||||
|
return GetUserStatistics500JSONResponse{Code: 500, Message: "Database error"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
response := arrangeUserStatistics(userStats)
|
||||||
|
return GetUserStatistics200JSONResponse(response), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func convertStreaks(streaks []database.UserStreak) []UserStreak {
|
||||||
|
result := make([]UserStreak, len(streaks))
|
||||||
|
for i, streak := range streaks {
|
||||||
|
result[i] = UserStreak{
|
||||||
|
Window: streak.Window,
|
||||||
|
MaxStreak: streak.MaxStreak,
|
||||||
|
MaxStreakStartDate: streak.MaxStreakStartDate,
|
||||||
|
MaxStreakEndDate: streak.MaxStreakEndDate,
|
||||||
|
CurrentStreak: streak.CurrentStreak,
|
||||||
|
CurrentStreakStartDate: streak.CurrentStreakStartDate,
|
||||||
|
CurrentStreakEndDate: streak.CurrentStreakEndDate,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func convertGraphData(graphData []database.GetDailyReadStatsRow) []GraphDataPoint {
|
||||||
|
result := make([]GraphDataPoint, len(graphData))
|
||||||
|
for i, data := range graphData {
|
||||||
|
result[i] = GraphDataPoint{
|
||||||
|
Date: data.Date,
|
||||||
|
MinutesRead: data.MinutesRead,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func arrangeUserStatistics(userStatistics []database.GetUserStatisticsRow) UserStatisticsResponse {
|
||||||
|
// Sort by WPM for each period
|
||||||
|
sortByWPM := func(stats []database.GetUserStatisticsRow, getter func(database.GetUserStatisticsRow) float64) []LeaderboardEntry {
|
||||||
|
sorted := append([]database.GetUserStatisticsRow(nil), stats...)
|
||||||
|
sort.SliceStable(sorted, func(i, j int) bool {
|
||||||
|
return getter(sorted[i]) > getter(sorted[j])
|
||||||
|
})
|
||||||
|
|
||||||
|
result := make([]LeaderboardEntry, len(sorted))
|
||||||
|
for i, item := range sorted {
|
||||||
|
result[i] = LeaderboardEntry{UserId: item.UserID, Value: getter(item)}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort by duration (seconds) for each period
|
||||||
|
sortByDuration := func(stats []database.GetUserStatisticsRow, getter func(database.GetUserStatisticsRow) int64) []LeaderboardEntry {
|
||||||
|
sorted := append([]database.GetUserStatisticsRow(nil), stats...)
|
||||||
|
sort.SliceStable(sorted, func(i, j int) bool {
|
||||||
|
return getter(sorted[i]) > getter(sorted[j])
|
||||||
|
})
|
||||||
|
|
||||||
|
result := make([]LeaderboardEntry, len(sorted))
|
||||||
|
for i, item := range sorted {
|
||||||
|
result[i] = LeaderboardEntry{UserId: item.UserID, Value: float64(getter(item))}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort by words for each period
|
||||||
|
sortByWords := func(stats []database.GetUserStatisticsRow, getter func(database.GetUserStatisticsRow) int64) []LeaderboardEntry {
|
||||||
|
sorted := append([]database.GetUserStatisticsRow(nil), stats...)
|
||||||
|
sort.SliceStable(sorted, func(i, j int) bool {
|
||||||
|
return getter(sorted[i]) > getter(sorted[j])
|
||||||
|
})
|
||||||
|
|
||||||
|
result := make([]LeaderboardEntry, len(sorted))
|
||||||
|
for i, item := range sorted {
|
||||||
|
result[i] = LeaderboardEntry{UserId: item.UserID, Value: float64(getter(item))}
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
return UserStatisticsResponse{
|
||||||
|
Wpm: LeaderboardData{
|
||||||
|
All: sortByWPM(userStatistics, func(s database.GetUserStatisticsRow) float64 { return s.TotalWpm }),
|
||||||
|
Year: sortByWPM(userStatistics, func(s database.GetUserStatisticsRow) float64 { return s.YearlyWpm }),
|
||||||
|
Month: sortByWPM(userStatistics, func(s database.GetUserStatisticsRow) float64 { return s.MonthlyWpm }),
|
||||||
|
Week: sortByWPM(userStatistics, func(s database.GetUserStatisticsRow) float64 { return s.WeeklyWpm }),
|
||||||
|
},
|
||||||
|
Duration: LeaderboardData{
|
||||||
|
All: sortByDuration(userStatistics, func(s database.GetUserStatisticsRow) int64 { return s.TotalSeconds }),
|
||||||
|
Year: sortByDuration(userStatistics, func(s database.GetUserStatisticsRow) int64 { return s.YearlySeconds }),
|
||||||
|
Month: sortByDuration(userStatistics, func(s database.GetUserStatisticsRow) int64 { return s.MonthlySeconds }),
|
||||||
|
Week: sortByDuration(userStatistics, func(s database.GetUserStatisticsRow) int64 { return s.WeeklySeconds }),
|
||||||
|
},
|
||||||
|
Words: LeaderboardData{
|
||||||
|
All: sortByWords(userStatistics, func(s database.GetUserStatisticsRow) int64 { return s.TotalWordsRead }),
|
||||||
|
Year: sortByWords(userStatistics, func(s database.GetUserStatisticsRow) int64 { return s.YearlyWordsRead }),
|
||||||
|
Month: sortByWords(userStatistics, func(s database.GetUserStatisticsRow) int64 { return s.MonthlyWordsRead }),
|
||||||
|
Week: sortByWords(userStatistics, func(s database.GetUserStatisticsRow) int64 { return s.WeeklyWordsRead }),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetSVGGraphData generates SVG bezier path for graph visualization
|
||||||
|
func GetSVGGraphData(inputData []GraphDataPoint, svgWidth int, svgHeight int) graph.SVGGraphData {
|
||||||
|
// Convert to int64 slice expected by graph package
|
||||||
|
intData := make([]int64, len(inputData))
|
||||||
|
|
||||||
|
for i, data := range inputData {
|
||||||
|
intData[i] = int64(data.MinutesRead)
|
||||||
|
}
|
||||||
|
|
||||||
|
return graph.GetSVGGraphData(intData, svgWidth, svgHeight)
|
||||||
|
}
|
||||||
6
api/v1/oapi-codegen.yaml
Normal file
6
api/v1/oapi-codegen.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
package: v1
|
||||||
|
generate:
|
||||||
|
std-http-server: true
|
||||||
|
strict-server: true
|
||||||
|
models: true
|
||||||
|
output: api.gen.go
|
||||||
1977
api/v1/openapi.yaml
Normal file
1977
api/v1/openapi.yaml
Normal file
File diff suppressed because it is too large
Load Diff
163
api/v1/progress.go
Normal file
163
api/v1/progress.go
Normal file
@@ -0,0 +1,163 @@
|
|||||||
|
package v1
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"math"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
"reichard.io/antholume/database"
|
||||||
|
)
|
||||||
|
|
||||||
|
// GET /progress
|
||||||
|
func (s *Server) GetProgressList(ctx context.Context, request GetProgressListRequestObject) (GetProgressListResponseObject, error) {
|
||||||
|
auth, ok := s.getSessionFromContext(ctx)
|
||||||
|
if !ok {
|
||||||
|
return GetProgressList401JSONResponse{Code: 401, Message: "Unauthorized"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
page := int64(1)
|
||||||
|
if request.Params.Page != nil {
|
||||||
|
page = *request.Params.Page
|
||||||
|
}
|
||||||
|
|
||||||
|
limit := int64(15)
|
||||||
|
if request.Params.Limit != nil {
|
||||||
|
limit = *request.Params.Limit
|
||||||
|
}
|
||||||
|
|
||||||
|
filter := database.GetProgressParams{
|
||||||
|
UserID: auth.UserName,
|
||||||
|
Offset: (page - 1) * limit,
|
||||||
|
Limit: limit,
|
||||||
|
}
|
||||||
|
|
||||||
|
if request.Params.Document != nil && *request.Params.Document != "" {
|
||||||
|
filter.DocFilter = true
|
||||||
|
filter.DocumentID = *request.Params.Document
|
||||||
|
}
|
||||||
|
|
||||||
|
progress, err := s.db.Queries.GetProgress(ctx, filter)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("GetProgress DB Error:", err)
|
||||||
|
return GetProgressList500JSONResponse{Code: 500, Message: "Database error"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
total := int64(len(progress))
|
||||||
|
var nextPage *int64
|
||||||
|
var previousPage *int64
|
||||||
|
|
||||||
|
// Calculate total pages
|
||||||
|
totalPages := int64(math.Ceil(float64(total) / float64(limit)))
|
||||||
|
if page < totalPages {
|
||||||
|
nextPage = ptrOf(page + 1)
|
||||||
|
}
|
||||||
|
if page > 1 {
|
||||||
|
previousPage = ptrOf(page - 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
apiProgress := make([]Progress, len(progress))
|
||||||
|
for i, row := range progress {
|
||||||
|
apiProgress[i] = Progress{
|
||||||
|
Title: row.Title,
|
||||||
|
Author: row.Author,
|
||||||
|
DeviceName: &row.DeviceName,
|
||||||
|
Percentage: &row.Percentage,
|
||||||
|
DocumentId: &row.DocumentID,
|
||||||
|
UserId: &row.UserID,
|
||||||
|
CreatedAt: parseTimePtr(row.CreatedAt),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
response := ProgressListResponse{
|
||||||
|
Progress: &apiProgress,
|
||||||
|
Page: &page,
|
||||||
|
Limit: &limit,
|
||||||
|
NextPage: nextPage,
|
||||||
|
PreviousPage: previousPage,
|
||||||
|
Total: &total,
|
||||||
|
}
|
||||||
|
|
||||||
|
return GetProgressList200JSONResponse(response), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GET /progress/{id}
|
||||||
|
func (s *Server) GetProgress(ctx context.Context, request GetProgressRequestObject) (GetProgressResponseObject, error) {
|
||||||
|
auth, ok := s.getSessionFromContext(ctx)
|
||||||
|
if !ok {
|
||||||
|
return GetProgress401JSONResponse{Code: 401, Message: "Unauthorized"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
row, err := s.db.Queries.GetDocumentProgress(ctx, database.GetDocumentProgressParams{
|
||||||
|
UserID: auth.UserName,
|
||||||
|
DocumentID: request.Id,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
log.Error("GetDocumentProgress DB Error:", err)
|
||||||
|
return GetProgress404JSONResponse{Code: 404, Message: "Progress not found"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
apiProgress := Progress{
|
||||||
|
DeviceName: &row.DeviceName,
|
||||||
|
DeviceId: &row.DeviceID,
|
||||||
|
Percentage: &row.Percentage,
|
||||||
|
Progress: &row.Progress,
|
||||||
|
DocumentId: &row.DocumentID,
|
||||||
|
UserId: &row.UserID,
|
||||||
|
CreatedAt: parseTimePtr(row.CreatedAt),
|
||||||
|
}
|
||||||
|
|
||||||
|
response := ProgressResponse{
|
||||||
|
Progress: &apiProgress,
|
||||||
|
}
|
||||||
|
|
||||||
|
return GetProgress200JSONResponse(response), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// PUT /progress
|
||||||
|
func (s *Server) UpdateProgress(ctx context.Context, request UpdateProgressRequestObject) (UpdateProgressResponseObject, error) {
|
||||||
|
auth, ok := s.getSessionFromContext(ctx)
|
||||||
|
if !ok {
|
||||||
|
return UpdateProgress401JSONResponse{Code: 401, Message: "Unauthorized"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if request.Body == nil {
|
||||||
|
return UpdateProgress400JSONResponse{Code: 400, Message: "Request body is required"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err := s.db.Queries.UpsertDevice(ctx, database.UpsertDeviceParams{
|
||||||
|
ID: request.Body.DeviceId,
|
||||||
|
UserID: auth.UserName,
|
||||||
|
DeviceName: request.Body.DeviceName,
|
||||||
|
LastSynced: time.Now().UTC().Format(time.RFC3339),
|
||||||
|
}); err != nil {
|
||||||
|
log.Error("UpsertDevice DB Error:", err)
|
||||||
|
return UpdateProgress500JSONResponse{Code: 500, Message: "Database error"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err := s.db.Queries.UpsertDocument(ctx, database.UpsertDocumentParams{
|
||||||
|
ID: request.Body.DocumentId,
|
||||||
|
}); err != nil {
|
||||||
|
log.Error("UpsertDocument DB Error:", err)
|
||||||
|
return UpdateProgress500JSONResponse{Code: 500, Message: "Database error"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
progress, err := s.db.Queries.UpdateProgress(ctx, database.UpdateProgressParams{
|
||||||
|
Percentage: request.Body.Percentage,
|
||||||
|
DocumentID: request.Body.DocumentId,
|
||||||
|
DeviceID: request.Body.DeviceId,
|
||||||
|
UserID: auth.UserName,
|
||||||
|
Progress: request.Body.Progress,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
log.Error("UpdateProgress DB Error:", err)
|
||||||
|
return UpdateProgress400JSONResponse{Code: 400, Message: "Invalid request"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
response := UpdateProgressResponse{
|
||||||
|
DocumentId: progress.DocumentID,
|
||||||
|
Timestamp: parseTime(progress.CreatedAt),
|
||||||
|
}
|
||||||
|
|
||||||
|
return UpdateProgress200JSONResponse(response), nil
|
||||||
|
}
|
||||||
59
api/v1/search.go
Normal file
59
api/v1/search.go
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
package v1
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"reichard.io/antholume/search"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
)
|
||||||
|
|
||||||
|
// GET /search
|
||||||
|
func (s *Server) GetSearch(ctx context.Context, request GetSearchRequestObject) (GetSearchResponseObject, error) {
|
||||||
|
|
||||||
|
if request.Params.Query == "" {
|
||||||
|
return GetSearch400JSONResponse{Code: 400, Message: "Invalid query"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
query := request.Params.Query
|
||||||
|
source := string(request.Params.Source)
|
||||||
|
|
||||||
|
// Validate source
|
||||||
|
if source != "LibGen" && source != "Annas Archive" {
|
||||||
|
return GetSearch400JSONResponse{Code: 400, Message: "Invalid source"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
searchResults, err := search.SearchBook(query, search.Source(source))
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Search Error:", err)
|
||||||
|
return GetSearch500JSONResponse{Code: 500, Message: "Search error"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
apiResults := make([]SearchItem, len(searchResults))
|
||||||
|
for i, item := range searchResults {
|
||||||
|
apiResults[i] = SearchItem{
|
||||||
|
Id: ptrOf(item.ID),
|
||||||
|
Title: ptrOf(item.Title),
|
||||||
|
Author: ptrOf(item.Author),
|
||||||
|
Language: ptrOf(item.Language),
|
||||||
|
Series: ptrOf(item.Series),
|
||||||
|
FileType: ptrOf(item.FileType),
|
||||||
|
FileSize: ptrOf(item.FileSize),
|
||||||
|
UploadDate: ptrOf(item.UploadDate),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
response := SearchResponse{
|
||||||
|
Results: apiResults,
|
||||||
|
Source: source,
|
||||||
|
Query: query,
|
||||||
|
}
|
||||||
|
|
||||||
|
return GetSearch200JSONResponse(response), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// POST /search
|
||||||
|
func (s *Server) PostSearch(ctx context.Context, request PostSearchRequestObject) (PostSearchResponseObject, error) {
|
||||||
|
// This endpoint is used by the SSR template to queue a download
|
||||||
|
// For the API, we just return success - the actual download happens via /documents POST
|
||||||
|
return PostSearch200Response{}, nil
|
||||||
|
}
|
||||||
99
api/v1/server.go
Normal file
99
api/v1/server.go
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
package v1
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"io/fs"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"reichard.io/antholume/config"
|
||||||
|
"reichard.io/antholume/database"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ StrictServerInterface = (*Server)(nil)
|
||||||
|
|
||||||
|
type Server struct {
|
||||||
|
mux *http.ServeMux
|
||||||
|
db *database.DBManager
|
||||||
|
cfg *config.Config
|
||||||
|
assets fs.FS
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewServer creates a new native HTTP server
|
||||||
|
func NewServer(db *database.DBManager, cfg *config.Config, assets fs.FS) *Server {
|
||||||
|
s := &Server{
|
||||||
|
mux: http.NewServeMux(),
|
||||||
|
db: db,
|
||||||
|
cfg: cfg,
|
||||||
|
assets: assets,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create strict handler with authentication middleware
|
||||||
|
strictHandler := NewStrictHandler(s, []StrictMiddlewareFunc{s.authMiddleware})
|
||||||
|
|
||||||
|
s.mux = HandlerFromMuxWithBaseURL(strictHandler, s.mux, "/api/v1").(*http.ServeMux)
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Server) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||||
|
s.mux.ServeHTTP(w, r)
|
||||||
|
}
|
||||||
|
|
||||||
|
// authMiddleware adds authentication context to requests
|
||||||
|
func (s *Server) authMiddleware(handler StrictHandlerFunc, operationID string) StrictHandlerFunc {
|
||||||
|
return func(ctx context.Context, w http.ResponseWriter, r *http.Request, request any) (any, error) {
|
||||||
|
// Store request and response in context for all handlers
|
||||||
|
ctx = context.WithValue(ctx, "request", r)
|
||||||
|
ctx = context.WithValue(ctx, "response", w)
|
||||||
|
|
||||||
|
// Skip auth for public auth and info endpoints - cover and file require auth via cookies
|
||||||
|
if operationID == "Login" || operationID == "Register" || operationID == "GetInfo" {
|
||||||
|
return handler(ctx, w, r, request)
|
||||||
|
}
|
||||||
|
|
||||||
|
auth, ok := s.getSession(r)
|
||||||
|
if !ok {
|
||||||
|
// Write 401 response directly
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
w.WriteHeader(401)
|
||||||
|
json.NewEncoder(w).Encode(ErrorResponse{Code: 401, Message: "Unauthorized"})
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check admin status for admin-only endpoints
|
||||||
|
adminEndpoints := []string{
|
||||||
|
"GetAdmin",
|
||||||
|
"PostAdminAction",
|
||||||
|
"GetUsers",
|
||||||
|
"UpdateUser",
|
||||||
|
"GetImportDirectory",
|
||||||
|
"PostImport",
|
||||||
|
"GetImportResults",
|
||||||
|
"GetLogs",
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, adminEndpoint := range adminEndpoints {
|
||||||
|
if operationID == adminEndpoint && !auth.IsAdmin {
|
||||||
|
// Write 403 response directly
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
w.WriteHeader(403)
|
||||||
|
json.NewEncoder(w).Encode(ErrorResponse{Code: 403, Message: "Admin privileges required"})
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store auth in context for handlers to access
|
||||||
|
ctx = context.WithValue(ctx, "auth", auth)
|
||||||
|
|
||||||
|
return handler(ctx, w, r, request)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetInfo returns server information
|
||||||
|
func (s *Server) GetInfo(ctx context.Context, request GetInfoRequestObject) (GetInfoResponseObject, error) {
|
||||||
|
return GetInfo200JSONResponse{
|
||||||
|
Version: s.cfg.Version,
|
||||||
|
SearchEnabled: s.cfg.SearchEnabled,
|
||||||
|
RegistrationEnabled: s.cfg.RegistrationEnabled,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
58
api/v1/server_test.go
Normal file
58
api/v1/server_test.go
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
package v1
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
"reichard.io/antholume/config"
|
||||||
|
"reichard.io/antholume/database"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ServerTestSuite struct {
|
||||||
|
suite.Suite
|
||||||
|
db *database.DBManager
|
||||||
|
cfg *config.Config
|
||||||
|
srv *Server
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestServer(t *testing.T) {
|
||||||
|
suite.Run(t, new(ServerTestSuite))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *ServerTestSuite) SetupTest() {
|
||||||
|
suite.cfg = &config.Config{
|
||||||
|
ListenPort: "8080",
|
||||||
|
DBType: "memory",
|
||||||
|
DBName: "test",
|
||||||
|
ConfigPath: "/tmp",
|
||||||
|
CookieAuthKey: "test-auth-key-32-bytes-long-enough",
|
||||||
|
CookieEncKey: "0123456789abcdef",
|
||||||
|
CookieSecure: false,
|
||||||
|
CookieHTTPOnly: true,
|
||||||
|
Version: "test",
|
||||||
|
DemoMode: false,
|
||||||
|
RegistrationEnabled: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
suite.db = database.NewMgr(suite.cfg)
|
||||||
|
suite.srv = NewServer(suite.db, suite.cfg, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *ServerTestSuite) TestNewServer() {
|
||||||
|
suite.NotNil(suite.srv)
|
||||||
|
suite.NotNil(suite.srv.mux)
|
||||||
|
suite.NotNil(suite.srv.db)
|
||||||
|
suite.NotNil(suite.srv.cfg)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *ServerTestSuite) TestServerServeHTTP() {
|
||||||
|
req := httptest.NewRequest(http.MethodGet, "/api/v1/auth/me", nil)
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
|
||||||
|
suite.srv.ServeHTTP(w, req)
|
||||||
|
|
||||||
|
suite.Equal(http.StatusUnauthorized, w.Code)
|
||||||
|
}
|
||||||
157
api/v1/settings.go
Normal file
157
api/v1/settings.go
Normal file
@@ -0,0 +1,157 @@
|
|||||||
|
package v1
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"crypto/md5"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"reichard.io/antholume/database"
|
||||||
|
argon2id "github.com/alexedwards/argon2id"
|
||||||
|
)
|
||||||
|
|
||||||
|
// GET /settings
|
||||||
|
func (s *Server) GetSettings(ctx context.Context, request GetSettingsRequestObject) (GetSettingsResponseObject, error) {
|
||||||
|
auth, ok := s.getSessionFromContext(ctx)
|
||||||
|
if !ok {
|
||||||
|
return GetSettings401JSONResponse{Code: 401, Message: "Unauthorized"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
user, err := s.db.Queries.GetUser(ctx, auth.UserName)
|
||||||
|
if err != nil {
|
||||||
|
return GetSettings500JSONResponse{Code: 500, Message: err.Error()}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
devices, err := s.db.Queries.GetDevices(ctx, auth.UserName)
|
||||||
|
if err != nil {
|
||||||
|
return GetSettings500JSONResponse{Code: 500, Message: err.Error()}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
apiDevices := make([]Device, len(devices))
|
||||||
|
for i, device := range devices {
|
||||||
|
apiDevices[i] = Device{
|
||||||
|
Id: &device.ID,
|
||||||
|
DeviceName: &device.DeviceName,
|
||||||
|
CreatedAt: parseTimePtr(device.CreatedAt),
|
||||||
|
LastSynced: parseTimePtr(device.LastSynced),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
response := SettingsResponse{
|
||||||
|
User: UserData{Username: auth.UserName, IsAdmin: auth.IsAdmin},
|
||||||
|
Timezone: user.Timezone,
|
||||||
|
Devices: &apiDevices,
|
||||||
|
}
|
||||||
|
return GetSettings200JSONResponse(response), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// authorizeCredentials verifies if credentials are valid
|
||||||
|
func (s *Server) authorizeCredentials(ctx context.Context, username string, password string) bool {
|
||||||
|
user, err := s.db.Queries.GetUser(ctx, username)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try argon2 hash comparison
|
||||||
|
if match, err := argon2id.ComparePasswordAndHash(password, *user.Pass); err == nil && match {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// PUT /settings
|
||||||
|
func (s *Server) UpdateSettings(ctx context.Context, request UpdateSettingsRequestObject) (UpdateSettingsResponseObject, error) {
|
||||||
|
auth, ok := s.getSessionFromContext(ctx)
|
||||||
|
if !ok {
|
||||||
|
return UpdateSettings401JSONResponse{Code: 401, Message: "Unauthorized"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if request.Body == nil {
|
||||||
|
return UpdateSettings400JSONResponse{Code: 400, Message: "Request body is required"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
user, err := s.db.Queries.GetUser(ctx, auth.UserName)
|
||||||
|
if err != nil {
|
||||||
|
return UpdateSettings500JSONResponse{Code: 500, Message: err.Error()}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
updateParams := database.UpdateUserParams{
|
||||||
|
UserID: auth.UserName,
|
||||||
|
Admin: auth.IsAdmin,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update password if provided
|
||||||
|
if request.Body.NewPassword != nil {
|
||||||
|
if request.Body.Password == nil {
|
||||||
|
return UpdateSettings400JSONResponse{Code: 400, Message: "Current password is required to set new password"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify current password - first try bcrypt (new format), then argon2, then MD5 (legacy format)
|
||||||
|
currentPasswordMatched := false
|
||||||
|
|
||||||
|
// Try argon2 (current format)
|
||||||
|
if !currentPasswordMatched {
|
||||||
|
currentPassword := fmt.Sprintf("%x", md5.Sum([]byte(*request.Body.Password)))
|
||||||
|
if match, err := argon2id.ComparePasswordAndHash(currentPassword, *user.Pass); err == nil && match {
|
||||||
|
currentPasswordMatched = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !currentPasswordMatched {
|
||||||
|
return UpdateSettings400JSONResponse{Code: 400, Message: "Invalid current password"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Hash new password with argon2
|
||||||
|
newPassword := fmt.Sprintf("%x", md5.Sum([]byte(*request.Body.NewPassword)))
|
||||||
|
hashedPassword, err := argon2id.CreateHash(newPassword, argon2id.DefaultParams)
|
||||||
|
if err != nil {
|
||||||
|
return UpdateSettings500JSONResponse{Code: 500, Message: "Failed to hash password"}, nil
|
||||||
|
}
|
||||||
|
updateParams.Password = &hashedPassword
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update timezone if provided
|
||||||
|
if request.Body.Timezone != nil {
|
||||||
|
updateParams.Timezone = request.Body.Timezone
|
||||||
|
}
|
||||||
|
|
||||||
|
// If nothing to update, return error
|
||||||
|
if request.Body.NewPassword == nil && request.Body.Timezone == nil {
|
||||||
|
return UpdateSettings400JSONResponse{Code: 400, Message: "At least one field must be provided"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update user
|
||||||
|
_, err = s.db.Queries.UpdateUser(ctx, updateParams)
|
||||||
|
if err != nil {
|
||||||
|
return UpdateSettings500JSONResponse{Code: 500, Message: err.Error()}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get updated settings to return
|
||||||
|
user, err = s.db.Queries.GetUser(ctx, auth.UserName)
|
||||||
|
if err != nil {
|
||||||
|
return UpdateSettings500JSONResponse{Code: 500, Message: err.Error()}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
devices, err := s.db.Queries.GetDevices(ctx, auth.UserName)
|
||||||
|
if err != nil {
|
||||||
|
return UpdateSettings500JSONResponse{Code: 500, Message: err.Error()}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
apiDevices := make([]Device, len(devices))
|
||||||
|
for i, device := range devices {
|
||||||
|
apiDevices[i] = Device{
|
||||||
|
Id: &device.ID,
|
||||||
|
DeviceName: &device.DeviceName,
|
||||||
|
CreatedAt: parseTimePtr(device.CreatedAt),
|
||||||
|
LastSynced: parseTimePtr(device.LastSynced),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
response := SettingsResponse{
|
||||||
|
User: UserData{Username: auth.UserName, IsAdmin: auth.IsAdmin},
|
||||||
|
Timezone: user.Timezone,
|
||||||
|
Devices: &apiDevices,
|
||||||
|
}
|
||||||
|
return UpdateSettings200JSONResponse(response), nil
|
||||||
|
}
|
||||||
|
|
||||||
84
api/v1/utils.go
Normal file
84
api/v1/utils.go
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
package v1
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"strconv"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// writeJSON writes a JSON response (deprecated - used by tests only)
|
||||||
|
func writeJSON(w http.ResponseWriter, status int, data any) {
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
w.WriteHeader(status)
|
||||||
|
if err := json.NewEncoder(w).Encode(data); err != nil {
|
||||||
|
writeJSONError(w, http.StatusInternalServerError, "Failed to encode response")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeJSONError writes a JSON error response (deprecated - used by tests only)
|
||||||
|
func writeJSONError(w http.ResponseWriter, status int, message string) {
|
||||||
|
writeJSON(w, status, ErrorResponse{
|
||||||
|
Code: status,
|
||||||
|
Message: message,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// QueryParams represents parsed query parameters (deprecated - used by tests only)
|
||||||
|
type QueryParams struct {
|
||||||
|
Page int64
|
||||||
|
Limit int64
|
||||||
|
Search *string
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseQueryParams parses URL query parameters (deprecated - used by tests only)
|
||||||
|
func parseQueryParams(query url.Values, defaultLimit int64) QueryParams {
|
||||||
|
page, _ := strconv.ParseInt(query.Get("page"), 10, 64)
|
||||||
|
if page == 0 {
|
||||||
|
page = 1
|
||||||
|
}
|
||||||
|
limit, _ := strconv.ParseInt(query.Get("limit"), 10, 64)
|
||||||
|
if limit == 0 {
|
||||||
|
limit = defaultLimit
|
||||||
|
}
|
||||||
|
search := query.Get("search")
|
||||||
|
var searchPtr *string
|
||||||
|
if search != "" {
|
||||||
|
searchPtr = ptrOf("%" + search + "%")
|
||||||
|
}
|
||||||
|
return QueryParams{
|
||||||
|
Page: page,
|
||||||
|
Limit: limit,
|
||||||
|
Search: searchPtr,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ptrOf returns a pointer to the given value
|
||||||
|
func ptrOf[T any](v T) *T {
|
||||||
|
return &v
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseTime parses a string to time.Time
|
||||||
|
func parseTime(s string) time.Time {
|
||||||
|
t, _ := time.Parse(time.RFC3339, s)
|
||||||
|
if t.IsZero() {
|
||||||
|
t, _ = time.Parse("2006-01-02T15:04:05", s)
|
||||||
|
}
|
||||||
|
return t
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseTimePtr parses an interface{} (from SQL) to *time.Time
|
||||||
|
func parseTimePtr(v interface{}) *time.Time {
|
||||||
|
if v == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if s, ok := v.(string); ok {
|
||||||
|
t := parseTime(s)
|
||||||
|
if t.IsZero() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return &t
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
76
api/v1/utils_test.go
Normal file
76
api/v1/utils_test.go
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
package v1
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/suite"
|
||||||
|
)
|
||||||
|
|
||||||
|
type UtilsTestSuite struct {
|
||||||
|
suite.Suite
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestUtils(t *testing.T) {
|
||||||
|
suite.Run(t, new(UtilsTestSuite))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *UtilsTestSuite) TestWriteJSON() {
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
data := map[string]string{"test": "value"}
|
||||||
|
|
||||||
|
writeJSON(w, http.StatusOK, data)
|
||||||
|
|
||||||
|
suite.Equal("application/json", w.Header().Get("Content-Type"))
|
||||||
|
suite.Equal(http.StatusOK, w.Code)
|
||||||
|
|
||||||
|
var resp map[string]string
|
||||||
|
suite.Require().NoError(json.Unmarshal(w.Body.Bytes(), &resp))
|
||||||
|
suite.Equal("value", resp["test"])
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *UtilsTestSuite) TestWriteJSONError() {
|
||||||
|
w := httptest.NewRecorder()
|
||||||
|
|
||||||
|
writeJSONError(w, http.StatusBadRequest, "test error")
|
||||||
|
|
||||||
|
suite.Equal(http.StatusBadRequest, w.Code)
|
||||||
|
|
||||||
|
var resp ErrorResponse
|
||||||
|
suite.Require().NoError(json.Unmarshal(w.Body.Bytes(), &resp))
|
||||||
|
suite.Equal(http.StatusBadRequest, resp.Code)
|
||||||
|
suite.Equal("test error", resp.Message)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *UtilsTestSuite) TestParseQueryParams() {
|
||||||
|
query := make(map[string][]string)
|
||||||
|
query["page"] = []string{"2"}
|
||||||
|
query["limit"] = []string{"15"}
|
||||||
|
query["search"] = []string{"test"}
|
||||||
|
|
||||||
|
params := parseQueryParams(query, 9)
|
||||||
|
|
||||||
|
suite.Equal(int64(2), params.Page)
|
||||||
|
suite.Equal(int64(15), params.Limit)
|
||||||
|
suite.NotNil(params.Search)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *UtilsTestSuite) TestParseQueryParamsDefaults() {
|
||||||
|
query := make(map[string][]string)
|
||||||
|
|
||||||
|
params := parseQueryParams(query, 9)
|
||||||
|
|
||||||
|
suite.Equal(int64(1), params.Page)
|
||||||
|
suite.Equal(int64(9), params.Limit)
|
||||||
|
suite.Nil(params.Search)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *UtilsTestSuite) TestPtrOf() {
|
||||||
|
value := "test"
|
||||||
|
ptr := ptrOf(value)
|
||||||
|
|
||||||
|
suite.NotNil(ptr)
|
||||||
|
suite.Equal("test", *ptr)
|
||||||
|
}
|
||||||
2
assets/lib/epub.min.js
vendored
2
assets/lib/epub.min.js
vendored
File diff suppressed because one or more lines are too long
119
assets/reader/fonts.css
Normal file
119
assets/reader/fonts.css
Normal file
@@ -0,0 +1,119 @@
|
|||||||
|
/**
|
||||||
|
* Lato
|
||||||
|
* - Charsets: [latin,latin-ext]
|
||||||
|
* - Styles: [100,700,100italic,regular,italic,700italic]
|
||||||
|
**/
|
||||||
|
|
||||||
|
/* lato-100 - latin_latin-ext */
|
||||||
|
@font-face {
|
||||||
|
font-display: swap; /* Check https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/font-display for other options. */
|
||||||
|
font-family: "Lato";
|
||||||
|
font-style: normal;
|
||||||
|
font-weight: 100;
|
||||||
|
src: url("./fonts/lato-v24-latin_latin-ext-100.woff2") format("woff2"); /* Chrome 36+, Opera 23+, Firefox 39+, Safari 12+, iOS 10+ */
|
||||||
|
}
|
||||||
|
|
||||||
|
/* lato-100italic - latin_latin-ext */
|
||||||
|
@font-face {
|
||||||
|
font-display: swap; /* Check https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/font-display for other options. */
|
||||||
|
font-family: "Lato";
|
||||||
|
font-style: italic;
|
||||||
|
font-weight: 100;
|
||||||
|
src: url("./fonts/lato-v24-latin_latin-ext-100italic.woff2") format("woff2"); /* Chrome 36+, Opera 23+, Firefox 39+, Safari 12+, iOS 10+ */
|
||||||
|
}
|
||||||
|
|
||||||
|
/* lato-regular - latin_latin-ext */
|
||||||
|
@font-face {
|
||||||
|
font-display: swap; /* Check https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/font-display for other options. */
|
||||||
|
font-family: "Lato";
|
||||||
|
font-style: normal;
|
||||||
|
font-weight: 400;
|
||||||
|
src: url("./fonts/lato-v24-latin_latin-ext-regular.woff2") format("woff2"); /* Chrome 36+, Opera 23+, Firefox 39+, Safari 12+, iOS 10+ */
|
||||||
|
}
|
||||||
|
|
||||||
|
/* lato-italic - latin_latin-ext */
|
||||||
|
@font-face {
|
||||||
|
font-display: swap; /* Check https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/font-display for other options. */
|
||||||
|
font-family: "Lato";
|
||||||
|
font-style: italic;
|
||||||
|
font-weight: 400;
|
||||||
|
src: url("./fonts/lato-v24-latin_latin-ext-italic.woff2") format("woff2"); /* Chrome 36+, Opera 23+, Firefox 39+, Safari 12+, iOS 10+ */
|
||||||
|
}
|
||||||
|
|
||||||
|
/* lato-700 - latin_latin-ext */
|
||||||
|
@font-face {
|
||||||
|
font-display: swap; /* Check https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/font-display for other options. */
|
||||||
|
font-family: "Lato";
|
||||||
|
font-style: normal;
|
||||||
|
font-weight: 700;
|
||||||
|
src: url("./fonts/lato-v24-latin_latin-ext-700.woff2") format("woff2"); /* Chrome 36+, Opera 23+, Firefox 39+, Safari 12+, iOS 10+ */
|
||||||
|
}
|
||||||
|
|
||||||
|
/* lato-700italic - latin_latin-ext */
|
||||||
|
@font-face {
|
||||||
|
font-display: swap; /* Check https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/font-display for other options. */
|
||||||
|
font-family: "Lato";
|
||||||
|
font-style: italic;
|
||||||
|
font-weight: 700;
|
||||||
|
src: url("./fonts/lato-v24-latin_latin-ext-700italic.woff2") format("woff2"); /* Chrome 36+, Opera 23+, Firefox 39+, Safari 12+, iOS 10+ */
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Open Sans
|
||||||
|
* - Charsets: [latin,latin-ext]
|
||||||
|
* - Styles: [700,regular,italic,700italic]
|
||||||
|
**/
|
||||||
|
|
||||||
|
/* open-sans-regular - latin_latin-ext */
|
||||||
|
@font-face {
|
||||||
|
font-display: swap; /* Check https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/font-display for other options. */
|
||||||
|
font-family: "Open Sans";
|
||||||
|
font-style: normal;
|
||||||
|
font-weight: 400;
|
||||||
|
src: url("./fonts/open-sans-v36-latin_latin-ext-regular.woff2")
|
||||||
|
format("woff2"); /* Chrome 36+, Opera 23+, Firefox 39+, Safari 12+, iOS 10+ */
|
||||||
|
}
|
||||||
|
|
||||||
|
/* open-sans-italic - latin_latin-ext */
|
||||||
|
@font-face {
|
||||||
|
font-display: swap; /* Check https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/font-display for other options. */
|
||||||
|
font-family: "Open Sans";
|
||||||
|
font-style: italic;
|
||||||
|
font-weight: 400;
|
||||||
|
src: url("./fonts/open-sans-v36-latin_latin-ext-italic.woff2") format("woff2"); /* Chrome 36+, Opera 23+, Firefox 39+, Safari 12+, iOS 10+ */
|
||||||
|
}
|
||||||
|
|
||||||
|
/* open-sans-700 - latin_latin-ext */
|
||||||
|
@font-face {
|
||||||
|
font-display: swap; /* Check https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/font-display for other options. */
|
||||||
|
font-family: "Open Sans";
|
||||||
|
font-style: normal;
|
||||||
|
font-weight: 700;
|
||||||
|
src: url("./fonts/open-sans-v36-latin_latin-ext-700.woff2") format("woff2"); /* Chrome 36+, Opera 23+, Firefox 39+, Safari 12+, iOS 10+ */
|
||||||
|
}
|
||||||
|
|
||||||
|
/* open-sans-700italic - latin_latin-ext */
|
||||||
|
@font-face {
|
||||||
|
font-display: swap; /* Check https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/font-display for other options. */
|
||||||
|
font-family: "Open Sans";
|
||||||
|
font-style: italic;
|
||||||
|
font-weight: 700;
|
||||||
|
src: url("./fonts/open-sans-v36-latin_latin-ext-700italic.woff2")
|
||||||
|
format("woff2"); /* Chrome 36+, Opera 23+, Firefox 39+, Safari 12+, iOS 10+ */
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Arbutus Slab
|
||||||
|
* - Charsets: [latin,latin-ext]
|
||||||
|
* - Styles: [regular]
|
||||||
|
**/
|
||||||
|
|
||||||
|
/* arbutus-slab-regular - latin_latin-ext */
|
||||||
|
@font-face {
|
||||||
|
font-display: swap; /* Check https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/font-display for other options. */
|
||||||
|
font-family: "Arbutus Slab";
|
||||||
|
font-style: normal;
|
||||||
|
font-weight: 400;
|
||||||
|
src: url("./fonts/arbutus-slab-v16-latin_latin-ext-regular.woff2")
|
||||||
|
format("woff2"); /* Chrome 36+, Opera 23+, Firefox 39+, Safari 12+, iOS 10+ */
|
||||||
|
}
|
||||||
Binary file not shown.
BIN
assets/reader/fonts/lato-v24-latin_latin-ext-100.woff2
Normal file
BIN
assets/reader/fonts/lato-v24-latin_latin-ext-100.woff2
Normal file
Binary file not shown.
BIN
assets/reader/fonts/lato-v24-latin_latin-ext-100italic.woff2
Normal file
BIN
assets/reader/fonts/lato-v24-latin_latin-ext-100italic.woff2
Normal file
Binary file not shown.
BIN
assets/reader/fonts/lato-v24-latin_latin-ext-700.woff2
Normal file
BIN
assets/reader/fonts/lato-v24-latin_latin-ext-700.woff2
Normal file
Binary file not shown.
BIN
assets/reader/fonts/lato-v24-latin_latin-ext-700italic.woff2
Normal file
BIN
assets/reader/fonts/lato-v24-latin_latin-ext-700italic.woff2
Normal file
Binary file not shown.
BIN
assets/reader/fonts/lato-v24-latin_latin-ext-italic.woff2
Normal file
BIN
assets/reader/fonts/lato-v24-latin_latin-ext-italic.woff2
Normal file
Binary file not shown.
BIN
assets/reader/fonts/lato-v24-latin_latin-ext-regular.woff2
Normal file
BIN
assets/reader/fonts/lato-v24-latin_latin-ext-regular.woff2
Normal file
Binary file not shown.
BIN
assets/reader/fonts/open-sans-v36-latin_latin-ext-700.woff2
Normal file
BIN
assets/reader/fonts/open-sans-v36-latin_latin-ext-700.woff2
Normal file
Binary file not shown.
Binary file not shown.
BIN
assets/reader/fonts/open-sans-v36-latin_latin-ext-italic.woff2
Normal file
BIN
assets/reader/fonts/open-sans-v36-latin_latin-ext-italic.woff2
Normal file
Binary file not shown.
BIN
assets/reader/fonts/open-sans-v36-latin_latin-ext-regular.woff2
Normal file
BIN
assets/reader/fonts/open-sans-v36-latin_latin-ext-regular.woff2
Normal file
Binary file not shown.
@@ -1,4 +1,4 @@
|
|||||||
<!DOCTYPE html>
|
<!doctype html>
|
||||||
<html lang="en">
|
<html lang="en">
|
||||||
<head>
|
<head>
|
||||||
<meta charset="utf-8" />
|
<meta charset="utf-8" />
|
||||||
@@ -82,7 +82,8 @@
|
|||||||
id="top-bar"
|
id="top-bar"
|
||||||
class="transition-all duration-200 absolute z-10 bg-gray-100 dark:bg-gray-800 w-full px-2"
|
class="transition-all duration-200 absolute z-10 bg-gray-100 dark:bg-gray-800 w-full px-2"
|
||||||
>
|
>
|
||||||
<div class="w-full h-32 flex items-center justify-around relative">
|
<div class="max-h-[75vh] w-full flex flex-col items-center justify-around relative dark:text-white">
|
||||||
|
<div class="h-32">
|
||||||
<div class="text-gray-500 absolute top-6 left-4 flex flex-col gap-4">
|
<div class="text-gray-500 absolute top-6 left-4 flex flex-col gap-4">
|
||||||
<a href="#">
|
<a href="#">
|
||||||
<svg
|
<svg
|
||||||
@@ -152,6 +153,8 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
<div id="toc" class="w-full text-center max-h-[50%] overflow-scroll no-scrollbar"></div>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div
|
<div
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
const THEMES = ["light", "tan", "blue", "gray", "black"];
|
const THEMES = ["light", "tan", "blue", "gray", "black"];
|
||||||
const THEME_FILE = "/assets/reader/readerThemes.css";
|
const THEME_FILE = "/assets/reader/themes.css";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Initial load handler. Gets called on DOMContentLoaded. Responsible for
|
* Initial load handler. Gets called on DOMContentLoaded. Responsible for
|
||||||
@@ -66,6 +66,56 @@ function populateMetadata(data) {
|
|||||||
authorEl.innerText = data.author;
|
authorEl.innerText = data.author;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Populate the Table of Contents
|
||||||
|
**/
|
||||||
|
function populateTOC() {
|
||||||
|
if (!currentReader.book.navigation.toc) {
|
||||||
|
console.warn("[populateTOC] No TOC");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let tocEl = document.querySelector("#toc");
|
||||||
|
if (!tocEl) {
|
||||||
|
console.warn("[populateTOC] No TOC Element");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse the Table of Contents
|
||||||
|
let parsedTOC = currentReader.book.navigation.toc.reduce((agg, item) => {
|
||||||
|
let sectionTitle = item.label.trim();
|
||||||
|
agg.push({ title: sectionTitle, href: item.href });
|
||||||
|
if (item.subitems.length == 0) {
|
||||||
|
return agg;
|
||||||
|
}
|
||||||
|
|
||||||
|
let allSubSections = item.subitems.map(item => {
|
||||||
|
let itemTitle = item.label.trim();
|
||||||
|
if (sectionTitle != "") {
|
||||||
|
itemTitle = sectionTitle + " - " + item.label.trim();
|
||||||
|
}
|
||||||
|
return { title: itemTitle, href: item.href };
|
||||||
|
});
|
||||||
|
agg.push(...allSubSections);
|
||||||
|
|
||||||
|
return agg;
|
||||||
|
}, [])
|
||||||
|
|
||||||
|
// Add Table of Contents to DOM
|
||||||
|
let listEl = document.createElement("ul");
|
||||||
|
listEl.classList.add("m-4")
|
||||||
|
parsedTOC.forEach(item => {
|
||||||
|
let listItem = document.createElement("li");
|
||||||
|
listItem.style.cursor = "pointer";
|
||||||
|
listItem.addEventListener("click", () => {
|
||||||
|
currentReader.rendition.display(item.href);
|
||||||
|
});
|
||||||
|
listItem.textContent = item.title;
|
||||||
|
listEl.appendChild(listItem);
|
||||||
|
});
|
||||||
|
tocEl.appendChild(listEl);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This is the main reader class. All functionality is wrapped in this class.
|
* This is the main reader class. All functionality is wrapped in this class.
|
||||||
* Responsible for handling gesture / clicks, flushing progress & activity,
|
* Responsible for handling gesture / clicks, flushing progress & activity,
|
||||||
@@ -97,16 +147,18 @@ class EBookReader {
|
|||||||
flow: "paginated",
|
flow: "paginated",
|
||||||
width: "100%",
|
width: "100%",
|
||||||
height: "100%",
|
height: "100%",
|
||||||
|
allowScriptedContent: true,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Setup Reader
|
// Setup Reader
|
||||||
this.book.ready.then(this.setupReader.bind(this));
|
this.book.ready.then(this.setupReader.bind(this));
|
||||||
|
|
||||||
// Initialize
|
// Initialize
|
||||||
|
this.initCSP();
|
||||||
this.initDevice();
|
this.initDevice();
|
||||||
this.initWakeLock();
|
this.initWakeLock();
|
||||||
this.initThemes();
|
this.initThemes();
|
||||||
this.initRenditionListeners();
|
this.initViewerListeners();
|
||||||
this.initDocumentListeners();
|
this.initDocumentListeners();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -141,7 +193,7 @@ class EBookReader {
|
|||||||
return "00000000000000000000000000000000".replace(/[018]/g, (c) =>
|
return "00000000000000000000000000000000".replace(/[018]/g, (c) =>
|
||||||
(c ^ (crypto.getRandomValues(new Uint8Array(1))[0] & (15 >> (c / 4))))
|
(c ^ (crypto.getRandomValues(new Uint8Array(1))[0] & (15 >> (c / 4))))
|
||||||
.toString(16)
|
.toString(16)
|
||||||
.toUpperCase()
|
.toUpperCase(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -244,7 +296,7 @@ class EBookReader {
|
|||||||
initThemes() {
|
initThemes() {
|
||||||
// Register Themes
|
// Register Themes
|
||||||
THEMES.forEach((theme) =>
|
THEMES.forEach((theme) =>
|
||||||
this.rendition.themes.register(theme, THEME_FILE)
|
this.rendition.themes.register(theme, THEME_FILE),
|
||||||
);
|
);
|
||||||
|
|
||||||
let themeLinkEl = document.createElement("link");
|
let themeLinkEl = document.createElement("link");
|
||||||
@@ -267,25 +319,48 @@ class EBookReader {
|
|||||||
// Restore Theme
|
// Restore Theme
|
||||||
this.setTheme();
|
this.setTheme();
|
||||||
|
|
||||||
// Set Fonts - TODO: Local
|
// Set Fonts
|
||||||
// https://gwfh.mranftl.com/fonts
|
|
||||||
this.rendition.getContents().forEach((c) => {
|
this.rendition.getContents().forEach((c) => {
|
||||||
[
|
|
||||||
"https://fonts.googleapis.com/css?family=Arbutus+Slab",
|
|
||||||
"https://fonts.googleapis.com/css?family=Open+Sans",
|
|
||||||
"https://fonts.googleapis.com/css?family=Lato:400,400i,700,700i",
|
|
||||||
].forEach((url) => {
|
|
||||||
let el = c.document.head.appendChild(
|
let el = c.document.head.appendChild(
|
||||||
c.document.createElement("link")
|
c.document.createElement("link"),
|
||||||
);
|
);
|
||||||
el.setAttribute("rel", "stylesheet");
|
el.setAttribute("rel", "stylesheet");
|
||||||
el.setAttribute("href", url);
|
el.setAttribute("href", "/assets/reader/fonts.css");
|
||||||
});
|
});
|
||||||
});
|
}.bind(this),
|
||||||
}.bind(this)
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* EpubJS will set iframe sandbox when settings "allowScriptedContent: false".
|
||||||
|
* However, Safari completely blocks us from attaching listeners to the iframe
|
||||||
|
* document. So instead we just inject a restrictive CSP rule.
|
||||||
|
*
|
||||||
|
* This effectively blocks all script content within the iframe while still
|
||||||
|
* allowing us to attach listeners to the iframe document.
|
||||||
|
**/
|
||||||
|
initCSP() {
|
||||||
|
// Derive CSP Host
|
||||||
|
var protocol = document.location.protocol;
|
||||||
|
var host = document.location.host;
|
||||||
|
var cspURL = `${protocol}//${host}`;
|
||||||
|
|
||||||
|
// Add CSP Policy
|
||||||
|
this.book.spine.hooks.content.register((output, section) => {
|
||||||
|
let cspWrapper = document.createElement("div");
|
||||||
|
cspWrapper.innerHTML = `
|
||||||
|
<meta
|
||||||
|
http-equiv="Content-Security-Policy"
|
||||||
|
content="require-trusted-types-for 'script';
|
||||||
|
style-src 'self' blob: 'unsafe-inline' ${cspURL};
|
||||||
|
object-src 'none';
|
||||||
|
script-src 'none';"
|
||||||
|
>`;
|
||||||
|
let cspMeta = cspWrapper.children[0];
|
||||||
|
output.head.append(cspMeta);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set theme & meta theme color
|
* Set theme & meta theme color
|
||||||
**/
|
**/
|
||||||
@@ -311,7 +386,7 @@ class EBookReader {
|
|||||||
let themeColorEl = document.querySelector("[name='theme-color']");
|
let themeColorEl = document.querySelector("[name='theme-color']");
|
||||||
let themeStyleSheet = document.querySelector("#themes").sheet;
|
let themeStyleSheet = document.querySelector("#themes").sheet;
|
||||||
let themeStyleRule = Array.from(themeStyleSheet.cssRules).find(
|
let themeStyleRule = Array.from(themeStyleSheet.cssRules).find(
|
||||||
(item) => item.selectorText == "." + colorScheme
|
(item) => item.selectorText == "." + colorScheme,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Match Reader Theme
|
// Match Reader Theme
|
||||||
@@ -325,13 +400,13 @@ class EBookReader {
|
|||||||
// Set Font Family
|
// Set Font Family
|
||||||
item.document.documentElement.style.setProperty(
|
item.document.documentElement.style.setProperty(
|
||||||
"--editor-font-family",
|
"--editor-font-family",
|
||||||
fontFamily
|
fontFamily,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Set Font Size
|
// Set Font Size
|
||||||
item.document.documentElement.style.setProperty(
|
item.document.documentElement.style.setProperty(
|
||||||
"--editor-font-size",
|
"--editor-font-size",
|
||||||
fontSize + "em"
|
fontSize + "em",
|
||||||
);
|
);
|
||||||
|
|
||||||
// Set Highlight Style
|
// Set Highlight Style
|
||||||
@@ -364,7 +439,7 @@ class EBookReader {
|
|||||||
|
|
||||||
// Compute Style
|
// Compute Style
|
||||||
let backgroundColor = getComputedStyle(
|
let backgroundColor = getComputedStyle(
|
||||||
this.bookState.progressElement.ownerDocument.body
|
this.bookState.progressElement.ownerDocument.body,
|
||||||
).backgroundColor;
|
).backgroundColor;
|
||||||
|
|
||||||
// Set Style
|
// Set Style
|
||||||
@@ -378,9 +453,9 @@ class EBookReader {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Rendition hooks
|
* Viewer Listeners
|
||||||
**/
|
**/
|
||||||
initRenditionListeners() {
|
initViewerListeners() {
|
||||||
/**
|
/**
|
||||||
* Initiate the debounce when the given function returns true.
|
* Initiate the debounce when the given function returns true.
|
||||||
* Don't run it again until the timeout lapses.
|
* Don't run it again until the timeout lapses.
|
||||||
@@ -408,56 +483,18 @@ class EBookReader {
|
|||||||
let bottomBar = document.querySelector("#bottom-bar");
|
let bottomBar = document.querySelector("#bottom-bar");
|
||||||
|
|
||||||
// Local Functions
|
// Local Functions
|
||||||
let getCFIFromXPath = this.getCFIFromXPath.bind(this);
|
|
||||||
let setPosition = this.setPosition.bind(this);
|
|
||||||
let nextPage = this.nextPage.bind(this);
|
let nextPage = this.nextPage.bind(this);
|
||||||
let prevPage = this.prevPage.bind(this);
|
let prevPage = this.prevPage.bind(this);
|
||||||
let saveSettings = this.saveSettings.bind(this);
|
|
||||||
|
|
||||||
// Local Vars
|
|
||||||
let readerSettings = this.readerSettings;
|
|
||||||
let bookState = this.bookState;
|
|
||||||
|
|
||||||
this.rendition.hooks.render.register(function (doc, data) {
|
|
||||||
let renderDoc = doc.document;
|
|
||||||
|
|
||||||
// ------------------------------------------------ //
|
// ------------------------------------------------ //
|
||||||
// ---------------- Wake Lock Hack ---------------- //
|
// ----------------- Swipe Helpers ---------------- //
|
||||||
// ------------------------------------------------ //
|
|
||||||
let wakeLockListener = function () {
|
|
||||||
doc.window.parent.document.dispatchEvent(new CustomEvent("wakelock"));
|
|
||||||
};
|
|
||||||
renderDoc.addEventListener("click", wakeLockListener);
|
|
||||||
renderDoc.addEventListener("gesturechange", wakeLockListener);
|
|
||||||
renderDoc.addEventListener("touchstart", wakeLockListener);
|
|
||||||
|
|
||||||
// ------------------------------------------------ //
|
|
||||||
// --------------- Swipe Pagination --------------- //
|
|
||||||
// ------------------------------------------------ //
|
// ------------------------------------------------ //
|
||||||
|
let disablePagination = false;
|
||||||
let touchStartX,
|
let touchStartX,
|
||||||
touchStartY,
|
touchStartY,
|
||||||
touchEndX,
|
touchEndX,
|
||||||
touchEndY = undefined;
|
touchEndY = undefined;
|
||||||
|
|
||||||
renderDoc.addEventListener(
|
|
||||||
"touchstart",
|
|
||||||
function (event) {
|
|
||||||
touchStartX = event.changedTouches[0].screenX;
|
|
||||||
touchStartY = event.changedTouches[0].screenY;
|
|
||||||
},
|
|
||||||
false
|
|
||||||
);
|
|
||||||
|
|
||||||
renderDoc.addEventListener(
|
|
||||||
"touchend",
|
|
||||||
function (event) {
|
|
||||||
touchEndX = event.changedTouches[0].screenX;
|
|
||||||
touchEndY = event.changedTouches[0].screenY;
|
|
||||||
handleGesture(event);
|
|
||||||
},
|
|
||||||
false
|
|
||||||
);
|
|
||||||
|
|
||||||
function handleGesture(event) {
|
function handleGesture(event) {
|
||||||
let drasticity = 75;
|
let drasticity = 75;
|
||||||
|
|
||||||
@@ -473,18 +510,55 @@ class EBookReader {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Swipe Left
|
// Swipe Left
|
||||||
if (touchEndX + drasticity < touchStartX) {
|
if (!disablePagination && touchEndX + drasticity < touchStartX) {
|
||||||
nextPage();
|
nextPage();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Swipe Right
|
// Swipe Right
|
||||||
if (touchEndX - drasticity > touchStartX) {
|
if (!disablePagination && touchEndX - drasticity > touchStartX) {
|
||||||
prevPage();
|
prevPage();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function handleSwipeDown() {
|
||||||
|
if (bottomBar.classList.contains("bottom-0")) {
|
||||||
|
bottomBar.classList.remove("bottom-0");
|
||||||
|
disablePagination = false;
|
||||||
|
} else {
|
||||||
|
topBar.classList.add("top-0");
|
||||||
|
populateTOC()
|
||||||
|
disablePagination = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleSwipeUp() {
|
||||||
|
if (topBar.classList.contains("top-0")) {
|
||||||
|
topBar.classList.remove("top-0");
|
||||||
|
disablePagination = false;
|
||||||
|
|
||||||
|
const tocEl = document.querySelector("#toc");
|
||||||
|
if (tocEl) tocEl.innerHTML = "";
|
||||||
|
} else {
|
||||||
|
bottomBar.classList.add("bottom-0");
|
||||||
|
disablePagination = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.rendition.hooks.render.register(function (doc, data) {
|
||||||
|
let renderDoc = doc.document;
|
||||||
|
|
||||||
// ------------------------------------------------ //
|
// ------------------------------------------------ //
|
||||||
// --------------- Bottom & Top Bar --------------- //
|
// ---------------- Wake Lock Hack ---------------- //
|
||||||
|
// ------------------------------------------------ //
|
||||||
|
let wakeLockListener = function () {
|
||||||
|
renderDoc.dispatchEvent(new CustomEvent("wakelock"));
|
||||||
|
};
|
||||||
|
renderDoc.addEventListener("click", wakeLockListener);
|
||||||
|
renderDoc.addEventListener("gesturechange", wakeLockListener);
|
||||||
|
renderDoc.addEventListener("touchstart", wakeLockListener);
|
||||||
|
|
||||||
|
// ------------------------------------------------ //
|
||||||
|
// --------------- Bars & Page Turn --------------- //
|
||||||
// ------------------------------------------------ //
|
// ------------------------------------------------ //
|
||||||
renderDoc.addEventListener(
|
renderDoc.addEventListener(
|
||||||
"click",
|
"click",
|
||||||
@@ -513,13 +587,13 @@ class EBookReader {
|
|||||||
// Handle Event
|
// Handle Event
|
||||||
if (yCoord < top) handleSwipeDown();
|
if (yCoord < top) handleSwipeDown();
|
||||||
else if (yCoord > bottom) handleSwipeUp();
|
else if (yCoord > bottom) handleSwipeUp();
|
||||||
else if (xCoord < left) prevPage();
|
else if (!disablePagination && xCoord < left) prevPage();
|
||||||
else if (xCoord > right) nextPage();
|
else if (!disablePagination && xCoord > right) nextPage();
|
||||||
else {
|
else {
|
||||||
bottomBar.classList.remove("bottom-0");
|
bottomBar.classList.remove("bottom-0");
|
||||||
topBar.classList.remove("top-0");
|
topBar.classList.remove("top-0");
|
||||||
}
|
}
|
||||||
}.bind(this)
|
}.bind(this),
|
||||||
);
|
);
|
||||||
|
|
||||||
renderDoc.addEventListener(
|
renderDoc.addEventListener(
|
||||||
@@ -533,50 +607,30 @@ class EBookReader {
|
|||||||
handleSwipeDown();
|
handleSwipeDown();
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}, 400)
|
}, 400),
|
||||||
);
|
);
|
||||||
|
|
||||||
function handleSwipeDown() {
|
|
||||||
if (bottomBar.classList.contains("bottom-0"))
|
|
||||||
bottomBar.classList.remove("bottom-0");
|
|
||||||
else topBar.classList.add("top-0");
|
|
||||||
}
|
|
||||||
|
|
||||||
function handleSwipeUp() {
|
|
||||||
if (topBar.classList.contains("top-0"))
|
|
||||||
topBar.classList.remove("top-0");
|
|
||||||
else bottomBar.classList.add("bottom-0");
|
|
||||||
}
|
|
||||||
|
|
||||||
// ------------------------------------------------ //
|
// ------------------------------------------------ //
|
||||||
// -------------- Keyboard Shortcuts -------------- //
|
// ------------------- Gestures ------------------- //
|
||||||
// ------------------------------------------------ //
|
// ------------------------------------------------ //
|
||||||
|
|
||||||
renderDoc.addEventListener(
|
renderDoc.addEventListener(
|
||||||
"keyup",
|
"touchstart",
|
||||||
function (e) {
|
function (event) {
|
||||||
// Left Key (Previous Page)
|
touchStartX = event.changedTouches[0].screenX;
|
||||||
if ((e.keyCode || e.which) == 37) {
|
touchStartY = event.changedTouches[0].screenY;
|
||||||
prevPage();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Right Key (Next Page)
|
|
||||||
if ((e.keyCode || e.which) == 39) {
|
|
||||||
nextPage();
|
|
||||||
}
|
|
||||||
|
|
||||||
// "t" Key (Theme Cycle)
|
|
||||||
if ((e.keyCode || e.which) == 84) {
|
|
||||||
let currentThemeIdx = THEMES.indexOf(
|
|
||||||
readerSettings.theme.colorScheme
|
|
||||||
);
|
|
||||||
let colorScheme =
|
|
||||||
THEMES.length == currentThemeIdx + 1
|
|
||||||
? THEMES[0]
|
|
||||||
: THEMES[currentThemeIdx + 1];
|
|
||||||
setTheme({ colorScheme });
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
false
|
false,
|
||||||
|
);
|
||||||
|
|
||||||
|
renderDoc.addEventListener(
|
||||||
|
"touchend",
|
||||||
|
function (event) {
|
||||||
|
touchEndX = event.changedTouches[0].screenX;
|
||||||
|
touchEndY = event.changedTouches[0].screenY;
|
||||||
|
handleGesture(event);
|
||||||
|
},
|
||||||
|
false,
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -591,7 +645,9 @@ class EBookReader {
|
|||||||
let nextPage = this.nextPage.bind(this);
|
let nextPage = this.nextPage.bind(this);
|
||||||
let prevPage = this.prevPage.bind(this);
|
let prevPage = this.prevPage.bind(this);
|
||||||
|
|
||||||
// Keyboard Shortcuts
|
// ------------------------------------------------ //
|
||||||
|
// -------------- Keyboard Shortcuts -------------- //
|
||||||
|
// ------------------------------------------------ //
|
||||||
document.addEventListener(
|
document.addEventListener(
|
||||||
"keyup",
|
"keyup",
|
||||||
function (e) {
|
function (e) {
|
||||||
@@ -608,7 +664,7 @@ class EBookReader {
|
|||||||
// "t" Key (Theme Cycle)
|
// "t" Key (Theme Cycle)
|
||||||
if ((e.keyCode || e.which) == 84) {
|
if ((e.keyCode || e.which) == 84) {
|
||||||
let currentThemeIdx = THEMES.indexOf(
|
let currentThemeIdx = THEMES.indexOf(
|
||||||
this.readerSettings.theme.colorScheme
|
this.readerSettings.theme.colorScheme,
|
||||||
);
|
);
|
||||||
let colorScheme =
|
let colorScheme =
|
||||||
THEMES.length == currentThemeIdx + 1
|
THEMES.length == currentThemeIdx + 1
|
||||||
@@ -617,7 +673,7 @@ class EBookReader {
|
|||||||
this.setTheme({ colorScheme });
|
this.setTheme({ colorScheme });
|
||||||
}
|
}
|
||||||
}.bind(this),
|
}.bind(this),
|
||||||
false
|
false,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Color Scheme Switcher
|
// Color Scheme Switcher
|
||||||
@@ -628,9 +684,9 @@ class EBookReader {
|
|||||||
function (event) {
|
function (event) {
|
||||||
let colorScheme = event.target.innerText;
|
let colorScheme = event.target.innerText;
|
||||||
this.setTheme({ colorScheme });
|
this.setTheme({ colorScheme });
|
||||||
}.bind(this)
|
}.bind(this),
|
||||||
);
|
);
|
||||||
}.bind(this)
|
}.bind(this),
|
||||||
);
|
);
|
||||||
|
|
||||||
// Font Switcher
|
// Font Switcher
|
||||||
@@ -645,9 +701,9 @@ class EBookReader {
|
|||||||
this.setTheme({ fontFamily });
|
this.setTheme({ fontFamily });
|
||||||
|
|
||||||
this.setPosition(cfi);
|
this.setPosition(cfi);
|
||||||
}.bind(this)
|
}.bind(this),
|
||||||
);
|
);
|
||||||
}.bind(this)
|
}.bind(this),
|
||||||
);
|
);
|
||||||
|
|
||||||
// Font Size
|
// Font Size
|
||||||
@@ -670,14 +726,17 @@ class EBookReader {
|
|||||||
|
|
||||||
// Restore CFI
|
// Restore CFI
|
||||||
this.setPosition(cfi);
|
this.setPosition(cfi);
|
||||||
}.bind(this)
|
}.bind(this),
|
||||||
);
|
);
|
||||||
}.bind(this)
|
}.bind(this),
|
||||||
);
|
);
|
||||||
|
|
||||||
// Close Top Bar
|
// Close Top Bar
|
||||||
document.querySelector(".close-top-bar").addEventListener("click", () => {
|
document.querySelector(".close-top-bar").addEventListener("click", () => {
|
||||||
topBar.classList.remove("top-0");
|
topBar.classList.remove("top-0");
|
||||||
|
|
||||||
|
const tocEl = document.querySelector("#toc");
|
||||||
|
if (tocEl) tocEl.innerHTML = "";
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -759,7 +818,7 @@ class EBookReader {
|
|||||||
if (pageWPM >= WPM_MAX)
|
if (pageWPM >= WPM_MAX)
|
||||||
return console.log(
|
return console.log(
|
||||||
"[createActivity] Page WPM Exceeds Max (2000):",
|
"[createActivity] Page WPM Exceeds Max (2000):",
|
||||||
pageWPM
|
pageWPM,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Ensure WPM Minimum
|
// Ensure WPM Minimum
|
||||||
@@ -772,7 +831,7 @@ class EBookReader {
|
|||||||
return console.warn("[createActivity] Invalid Total Pages (0)");
|
return console.warn("[createActivity] Invalid Total Pages (0)");
|
||||||
|
|
||||||
let currentPage = Math.round(
|
let currentPage = Math.round(
|
||||||
(currentWord * totalPages) / this.bookState.words
|
(currentWord * totalPages) / this.bookState.words,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Create Activity Event
|
// Create Activity Event
|
||||||
@@ -826,7 +885,7 @@ class EBookReader {
|
|||||||
response: r,
|
response: r,
|
||||||
json: await r.json(),
|
json: await r.json(),
|
||||||
data: activityEvent,
|
data: activityEvent,
|
||||||
})
|
}),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -887,7 +946,7 @@ class EBookReader {
|
|||||||
response: r,
|
response: r,
|
||||||
json: await r.json(),
|
json: await r.json(),
|
||||||
data: progressEvent,
|
data: progressEvent,
|
||||||
})
|
}),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -923,7 +982,7 @@ class EBookReader {
|
|||||||
let currentWord = await this.getBookWordPosition();
|
let currentWord = await this.getBookWordPosition();
|
||||||
|
|
||||||
let currentTOC = this.book.navigation.toc.find(
|
let currentTOC = this.book.navigation.toc.find(
|
||||||
(item) => item.href == currentLocation.start.href
|
(item) => item.href == currentLocation.start.href,
|
||||||
);
|
);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@@ -957,10 +1016,16 @@ class EBookReader {
|
|||||||
**/
|
**/
|
||||||
async getXPathFromCFI(cfi) {
|
async getXPathFromCFI(cfi) {
|
||||||
// Get DocFragment (Spine Index)
|
// Get DocFragment (Spine Index)
|
||||||
let startCFI = cfi.replace("epubcfi(", "");
|
let cfiBaseMatch = cfi.match(/\(([^!]+)/);
|
||||||
|
if (!cfiBaseMatch) {
|
||||||
|
console.error("[getXPathFromCFI] No CFI Match");
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
let startCFI = cfiBaseMatch[1];
|
||||||
|
|
||||||
let docFragmentIndex =
|
let docFragmentIndex =
|
||||||
this.book.spine.spineItems.find((item) =>
|
this.book.spine.spineItems.find((item) =>
|
||||||
startCFI.startsWith(item.cfiBase)
|
item.cfiBase == startCFI
|
||||||
).index + 1;
|
).index + 1;
|
||||||
|
|
||||||
// Base Progress
|
// Base Progress
|
||||||
@@ -1037,10 +1102,6 @@ class EBookReader {
|
|||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Match Item Index
|
|
||||||
let indexMatch = xpath.match(/\.(\d+)$/);
|
|
||||||
let itemIndex = indexMatch ? parseInt(indexMatch[1]) : 0;
|
|
||||||
|
|
||||||
// Get Spine Item
|
// Get Spine Item
|
||||||
let spinePosition = parseInt(fragMatch[1]) - 1;
|
let spinePosition = parseInt(fragMatch[1]) - 1;
|
||||||
let sectionItem = this.book.spine.get(spinePosition);
|
let sectionItem = this.book.spine.get(spinePosition);
|
||||||
@@ -1108,7 +1169,7 @@ class EBookReader {
|
|||||||
} else {
|
} else {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -1132,6 +1193,11 @@ class EBookReader {
|
|||||||
let element = docSearch.iterateNext() || derivedSelectorElement;
|
let element = docSearch.iterateNext() || derivedSelectorElement;
|
||||||
let cfi = sectionItem.cfiFromElement(element);
|
let cfi = sectionItem.cfiFromElement(element);
|
||||||
|
|
||||||
|
// Hack - epub.js crashes sometimes when its a bare section with no element
|
||||||
|
// so just return the first.
|
||||||
|
if (cfi.endsWith("!/)"))
|
||||||
|
cfi = cfi.slice(0, -1) + "0)"
|
||||||
|
|
||||||
return { cfi, element };
|
return { cfi, element };
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1153,7 +1219,7 @@ class EBookReader {
|
|||||||
|
|
||||||
// Get CFI Range
|
// Get CFI Range
|
||||||
let firstCFI = spineItem.cfiFromElement(
|
let firstCFI = spineItem.cfiFromElement(
|
||||||
spineItem.document.body.children[0]
|
spineItem.document.body.children[0],
|
||||||
);
|
);
|
||||||
let currentLocation = await this.rendition.currentLocation();
|
let currentLocation = await this.rendition.currentLocation();
|
||||||
let cfiRange = this.getCFIRange(firstCFI, currentLocation.start.cfi);
|
let cfiRange = this.getCFIRange(firstCFI, currentLocation.start.cfi);
|
||||||
@@ -1251,10 +1317,10 @@ class EBookReader {
|
|||||||
let spineWC = await Promise.all(
|
let spineWC = await Promise.all(
|
||||||
this.book.spine.spineItems.map(async (item) => {
|
this.book.spine.spineItems.map(async (item) => {
|
||||||
let newDoc = await item.load(this.book.load.bind(this.book));
|
let newDoc = await item.load(this.book.load.bind(this.book));
|
||||||
let spineWords = newDoc.innerText.trim().split(/\s+/).length;
|
let spineWords = (newDoc.innerText || "").trim().split(/\s+/).length;
|
||||||
item.wordCount = spineWords;
|
item.wordCount = spineWords;
|
||||||
return spineWords;
|
return spineWords;
|
||||||
})
|
}),
|
||||||
);
|
);
|
||||||
|
|
||||||
return spineWC.reduce((totalCount, itemCount) => totalCount + itemCount, 0);
|
return spineWC.reduce((totalCount, itemCount) => totalCount + itemCount, 0);
|
||||||
@@ -1273,7 +1339,7 @@ class EBookReader {
|
|||||||
**/
|
**/
|
||||||
loadSettings() {
|
loadSettings() {
|
||||||
this.readerSettings = JSON.parse(
|
this.readerSettings = JSON.parse(
|
||||||
localStorage.getItem("readerSettings") || "{}"
|
localStorage.getItem("readerSettings") || "{}",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
50
assets/sw.js
50
assets/sw.js
@@ -38,6 +38,7 @@ const ROUTES = [
|
|||||||
{ route: "/local", type: CACHE_UPDATE_ASYNC },
|
{ route: "/local", type: CACHE_UPDATE_ASYNC },
|
||||||
{ route: "/reader", type: CACHE_UPDATE_ASYNC },
|
{ route: "/reader", type: CACHE_UPDATE_ASYNC },
|
||||||
{ route: "/manifest.json", type: CACHE_UPDATE_ASYNC },
|
{ route: "/manifest.json", type: CACHE_UPDATE_ASYNC },
|
||||||
|
{ route: /^\/assets\/reader\/fonts\//, type: CACHE_ONLY },
|
||||||
{ route: /^\/assets\//, type: CACHE_UPDATE_ASYNC },
|
{ route: /^\/assets\//, type: CACHE_UPDATE_ASYNC },
|
||||||
{
|
{
|
||||||
route: /^\/documents\/[a-zA-Z0-9]{32}\/(cover|file)$/,
|
route: /^\/documents\/[a-zA-Z0-9]{32}\/(cover|file)$/,
|
||||||
@@ -63,9 +64,10 @@ const PRECACHE_ASSETS = [
|
|||||||
"/reader",
|
"/reader",
|
||||||
"/assets/local/index.js",
|
"/assets/local/index.js",
|
||||||
"/assets/reader/index.js",
|
"/assets/reader/index.js",
|
||||||
|
"/assets/reader/fonts.css",
|
||||||
|
"/assets/reader/themes.css",
|
||||||
"/assets/icons/icon512.png",
|
"/assets/icons/icon512.png",
|
||||||
"/assets/images/no-cover.jpg",
|
"/assets/images/no-cover.jpg",
|
||||||
"/assets/reader/readerThemes.css",
|
|
||||||
|
|
||||||
// Main App Assets
|
// Main App Assets
|
||||||
"/manifest.json",
|
"/manifest.json",
|
||||||
@@ -78,13 +80,26 @@ const PRECACHE_ASSETS = [
|
|||||||
"/assets/lib/epub.min.js",
|
"/assets/lib/epub.min.js",
|
||||||
"/assets/lib/no-sleep.min.js",
|
"/assets/lib/no-sleep.min.js",
|
||||||
"/assets/lib/idb-keyval.min.js",
|
"/assets/lib/idb-keyval.min.js",
|
||||||
|
|
||||||
|
// Fonts
|
||||||
|
"/assets/reader/fonts/arbutus-slab-v16-latin_latin-ext-regular.woff2",
|
||||||
|
"/assets/reader/fonts/lato-v24-latin_latin-ext-100.woff2",
|
||||||
|
"/assets/reader/fonts/lato-v24-latin_latin-ext-100italic.woff2",
|
||||||
|
"/assets/reader/fonts/lato-v24-latin_latin-ext-700.woff2",
|
||||||
|
"/assets/reader/fonts/lato-v24-latin_latin-ext-700italic.woff2",
|
||||||
|
"/assets/reader/fonts/lato-v24-latin_latin-ext-italic.woff2",
|
||||||
|
"/assets/reader/fonts/lato-v24-latin_latin-ext-regular.woff2",
|
||||||
|
"/assets/reader/fonts/open-sans-v36-latin_latin-ext-700.woff2",
|
||||||
|
"/assets/reader/fonts/open-sans-v36-latin_latin-ext-700italic.woff2",
|
||||||
|
"/assets/reader/fonts/open-sans-v36-latin_latin-ext-italic.woff2",
|
||||||
|
"/assets/reader/fonts/open-sans-v36-latin_latin-ext-regular.woff2",
|
||||||
];
|
];
|
||||||
|
|
||||||
// ------------------------------------------------------- //
|
// ------------------------------------------------------- //
|
||||||
// ----------------------- Helpers ----------------------- //
|
// ----------------------- Helpers ----------------------- //
|
||||||
// ------------------------------------------------------- //
|
// ------------------------------------------------------- //
|
||||||
|
|
||||||
function purgeCache() {
|
async function purgeCache() {
|
||||||
console.log("[purgeCache] Purging Cache");
|
console.log("[purgeCache] Purging Cache");
|
||||||
return caches.keys().then(function (names) {
|
return caches.keys().then(function (names) {
|
||||||
for (let name of names) caches.delete(name);
|
for (let name of names) caches.delete(name);
|
||||||
@@ -121,7 +136,7 @@ async function handleFetch(event) {
|
|||||||
const directive = ROUTES.find(
|
const directive = ROUTES.find(
|
||||||
(item) =>
|
(item) =>
|
||||||
(item.route instanceof RegExp && url.match(item.route)) ||
|
(item.route instanceof RegExp && url.match(item.route)) ||
|
||||||
url == item.route
|
url == item.route,
|
||||||
) || { type: CACHE_NEVER };
|
) || { type: CACHE_NEVER };
|
||||||
|
|
||||||
// Get Fallback
|
// Get Fallback
|
||||||
@@ -146,11 +161,11 @@ async function handleFetch(event) {
|
|||||||
);
|
);
|
||||||
case CACHE_UPDATE_SYNC:
|
case CACHE_UPDATE_SYNC:
|
||||||
return updateCache(event.request).catch(
|
return updateCache(event.request).catch(
|
||||||
(e) => currentCache || fallbackFunc(event)
|
(e) => currentCache || fallbackFunc(event),
|
||||||
);
|
);
|
||||||
case CACHE_UPDATE_ASYNC:
|
case CACHE_UPDATE_ASYNC:
|
||||||
let newResponse = updateCache(event.request).catch((e) =>
|
let newResponse = updateCache(event.request).catch((e) =>
|
||||||
fallbackFunc(event)
|
fallbackFunc(event),
|
||||||
);
|
);
|
||||||
|
|
||||||
return currentCache || newResponse;
|
return currentCache || newResponse;
|
||||||
@@ -177,7 +192,7 @@ function handleMessage(event) {
|
|||||||
.filter(
|
.filter(
|
||||||
(item) =>
|
(item) =>
|
||||||
item.startsWith("/documents/") ||
|
item.startsWith("/documents/") ||
|
||||||
item.startsWith("/reader/progress/")
|
item.startsWith("/reader/progress/"),
|
||||||
);
|
);
|
||||||
|
|
||||||
// Derive Unique IDs
|
// Derive Unique IDs
|
||||||
@@ -185,8 +200,8 @@ function handleMessage(event) {
|
|||||||
new Set(
|
new Set(
|
||||||
docResources
|
docResources
|
||||||
.filter((item) => item.startsWith("/documents/"))
|
.filter((item) => item.startsWith("/documents/"))
|
||||||
.map((item) => item.split("/")[2])
|
.map((item) => item.split("/")[2]),
|
||||||
)
|
),
|
||||||
);
|
);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -199,14 +214,14 @@ function handleMessage(event) {
|
|||||||
.filter(
|
.filter(
|
||||||
(id) =>
|
(id) =>
|
||||||
docResources.includes("/documents/" + id + "/file") &&
|
docResources.includes("/documents/" + id + "/file") &&
|
||||||
docResources.includes("/reader/progress/" + id)
|
docResources.includes("/reader/progress/" + id),
|
||||||
)
|
)
|
||||||
.map(async (id) => {
|
.map(async (id) => {
|
||||||
let url = "/reader/progress/" + id;
|
let url = "/reader/progress/" + id;
|
||||||
let currentCache = await caches.match(url);
|
let currentCache = await caches.match(url);
|
||||||
let resp = await updateCache(url).catch((e) => currentCache);
|
let resp = await updateCache(url).catch((e) => currentCache);
|
||||||
return resp.json();
|
return resp.json();
|
||||||
})
|
}),
|
||||||
);
|
);
|
||||||
|
|
||||||
event.source.postMessage({ id, data: cachedDocuments });
|
event.source.postMessage({ id, data: cachedDocuments });
|
||||||
@@ -218,7 +233,7 @@ function handleMessage(event) {
|
|||||||
Promise.all([
|
Promise.all([
|
||||||
cache.delete("/documents/" + data.id + "/file"),
|
cache.delete("/documents/" + data.id + "/file"),
|
||||||
cache.delete("/reader/progress/" + data.id),
|
cache.delete("/reader/progress/" + data.id),
|
||||||
])
|
]),
|
||||||
)
|
)
|
||||||
.then(() => event.source.postMessage({ id, data: "SUCCESS" }))
|
.then(() => event.source.postMessage({ id, data: "SUCCESS" }))
|
||||||
.catch(() => event.source.postMessage({ id, data: "FAILURE" }));
|
.catch(() => event.source.postMessage({ id, data: "FAILURE" }));
|
||||||
@@ -238,6 +253,13 @@ self.addEventListener("install", function (event) {
|
|||||||
event.waitUntil(handleInstall(event));
|
event.waitUntil(handleInstall(event));
|
||||||
});
|
});
|
||||||
|
|
||||||
self.addEventListener("fetch", (event) =>
|
self.addEventListener("fetch", (event) => {
|
||||||
event.respondWith(handleFetch(event))
|
/**
|
||||||
);
|
* Weird things happen when a service worker attempts to handle a request
|
||||||
|
* when the server responds with chunked transfer encoding. Right now we only
|
||||||
|
* use chunked encoding on POSTs. So this is to avoid processing those.
|
||||||
|
**/
|
||||||
|
|
||||||
|
if (event.request.method != "GET") return;
|
||||||
|
return event.respondWith(handleFetch(event));
|
||||||
|
});
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
104
config/config.go
104
config/config.go
@@ -1,7 +1,11 @@
|
|||||||
package config
|
package config
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
|
"path"
|
||||||
|
"path/filepath"
|
||||||
|
"runtime"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
@@ -27,38 +31,99 @@ type Config struct {
|
|||||||
LogLevel string
|
LogLevel string
|
||||||
|
|
||||||
// Cookie Settings
|
// Cookie Settings
|
||||||
CookieSessionKey string
|
CookieAuthKey string
|
||||||
|
CookieEncKey string
|
||||||
CookieSecure bool
|
CookieSecure bool
|
||||||
CookieHTTPOnly bool
|
CookieHTTPOnly bool
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type customFormatter struct {
|
||||||
|
log.Formatter
|
||||||
|
}
|
||||||
|
|
||||||
|
// Force UTC & Set type (app)
|
||||||
|
func (cf customFormatter) Format(e *log.Entry) ([]byte, error) {
|
||||||
|
if e.Data["type"] == nil {
|
||||||
|
e.Data["type"] = "app"
|
||||||
|
}
|
||||||
|
e.Time = e.Time.UTC()
|
||||||
|
return cf.Formatter.Format(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set at runtime
|
||||||
|
var version string = "develop"
|
||||||
|
|
||||||
func Load() *Config {
|
func Load() *Config {
|
||||||
c := &Config{
|
c := &Config{
|
||||||
Version: "0.0.1",
|
Version: version,
|
||||||
DBType: trimLowerString(getEnv("DATABASE_TYPE", "SQLite")),
|
|
||||||
DBName: trimLowerString(getEnv("DATABASE_NAME", "antholume")),
|
|
||||||
ConfigPath: getEnv("CONFIG_PATH", "/config"),
|
ConfigPath: getEnv("CONFIG_PATH", "/config"),
|
||||||
DataPath: getEnv("DATA_PATH", "/data"),
|
DataPath: getEnv("DATA_PATH", "/data"),
|
||||||
ListenPort: getEnv("LISTEN_PORT", "8585"),
|
ListenPort: getEnv("LISTEN_PORT", "8585"),
|
||||||
|
DBType: trimLowerString(getEnv("DATABASE_TYPE", "SQLite")),
|
||||||
|
DBName: trimLowerString(getEnv("DATABASE_NAME", "antholume")),
|
||||||
RegistrationEnabled: trimLowerString(getEnv("REGISTRATION_ENABLED", "false")) == "true",
|
RegistrationEnabled: trimLowerString(getEnv("REGISTRATION_ENABLED", "false")) == "true",
|
||||||
DemoMode: trimLowerString(getEnv("DEMO_MODE", "false")) == "true",
|
DemoMode: trimLowerString(getEnv("DEMO_MODE", "false")) == "true",
|
||||||
SearchEnabled: trimLowerString(getEnv("SEARCH_ENABLED", "false")) == "true",
|
SearchEnabled: trimLowerString(getEnv("SEARCH_ENABLED", "false")) == "true",
|
||||||
CookieSessionKey: trimLowerString(getEnv("COOKIE_SESSION_KEY", "")),
|
CookieAuthKey: trimLowerString(getEnv("COOKIE_AUTH_KEY", "")),
|
||||||
|
CookieEncKey: trimLowerString(getEnv("COOKIE_ENC_KEY", "")),
|
||||||
LogLevel: trimLowerString(getEnv("LOG_LEVEL", "info")),
|
LogLevel: trimLowerString(getEnv("LOG_LEVEL", "info")),
|
||||||
CookieSecure: trimLowerString(getEnv("COOKIE_SECURE", "true")) == "true",
|
CookieSecure: trimLowerString(getEnv("COOKIE_SECURE", "true")) == "true",
|
||||||
CookieHTTPOnly: trimLowerString(getEnv("COOKIE_HTTP_ONLY", "true")) == "true",
|
CookieHTTPOnly: trimLowerString(getEnv("COOKIE_HTTP_ONLY", "true")) == "true",
|
||||||
}
|
}
|
||||||
|
|
||||||
// Log Level
|
// Parse log level
|
||||||
ll, err := log.ParseLevel(c.LogLevel)
|
logLevel, err := log.ParseLevel(c.LogLevel)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ll = log.InfoLevel
|
logLevel = log.InfoLevel
|
||||||
}
|
}
|
||||||
log.SetLevel(ll)
|
|
||||||
|
// Create custom formatter
|
||||||
|
logFormatter := &customFormatter{&log.JSONFormatter{
|
||||||
|
CallerPrettyfier: prettyCaller,
|
||||||
|
}}
|
||||||
|
|
||||||
|
// Create log rotator
|
||||||
|
rotateFileHook, err := NewRotateFileHook(RotateFileConfig{
|
||||||
|
Filename: path.Join(c.ConfigPath, "logs/antholume.log"),
|
||||||
|
MaxSize: 50,
|
||||||
|
MaxBackups: 3,
|
||||||
|
MaxAge: 30,
|
||||||
|
Level: logLevel,
|
||||||
|
Formatter: logFormatter,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal("Unable to initialize file rotate hook")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rotate now
|
||||||
|
rotateFileHook.Rotate()
|
||||||
|
|
||||||
|
// Set logger settings
|
||||||
|
log.SetLevel(logLevel)
|
||||||
|
log.SetFormatter(logFormatter)
|
||||||
|
log.SetReportCaller(true)
|
||||||
|
log.AddHook(rotateFileHook)
|
||||||
|
|
||||||
|
// Ensure directories exist
|
||||||
|
c.EnsureDirectories()
|
||||||
|
|
||||||
return c
|
return c
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Ensures needed directories exist
|
||||||
|
func (c *Config) EnsureDirectories() {
|
||||||
|
os.Mkdir(c.ConfigPath, 0755)
|
||||||
|
os.Mkdir(c.DataPath, 0755)
|
||||||
|
|
||||||
|
docDir := filepath.Join(c.DataPath, "documents")
|
||||||
|
coversDir := filepath.Join(c.DataPath, "covers")
|
||||||
|
backupDir := filepath.Join(c.DataPath, "backups")
|
||||||
|
|
||||||
|
os.Mkdir(docDir, 0755)
|
||||||
|
os.Mkdir(coversDir, 0755)
|
||||||
|
os.Mkdir(backupDir, 0755)
|
||||||
|
}
|
||||||
|
|
||||||
func getEnv(key, fallback string) string {
|
func getEnv(key, fallback string) string {
|
||||||
if value, ok := os.LookupEnv(key); ok {
|
if value, ok := os.LookupEnv(key); ok {
|
||||||
return value
|
return value
|
||||||
@@ -69,3 +134,24 @@ func getEnv(key, fallback string) string {
|
|||||||
func trimLowerString(val string) string {
|
func trimLowerString(val string) string {
|
||||||
return strings.ToLower(strings.TrimSpace(val))
|
return strings.ToLower(strings.TrimSpace(val))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func prettyCaller(f *runtime.Frame) (function string, file string) {
|
||||||
|
purgePrefix := "reichard.io/antholume/"
|
||||||
|
|
||||||
|
pathName := strings.Replace(f.Func.Name(), purgePrefix, "", 1)
|
||||||
|
parts := strings.Split(pathName, ".")
|
||||||
|
|
||||||
|
filepath, line := f.Func.FileLine(f.PC)
|
||||||
|
splitFilePath := strings.Split(filepath, "/")
|
||||||
|
|
||||||
|
fileName := fmt.Sprintf("%s/%s@%d", parts[0], splitFilePath[len(splitFilePath)-1], line)
|
||||||
|
functionName := strings.Replace(pathName, parts[0]+".", "", 1)
|
||||||
|
|
||||||
|
// Exclude GIN Logger
|
||||||
|
if functionName == "NewApi.apiLogger.func1" {
|
||||||
|
fileName = ""
|
||||||
|
functionName = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
return functionName, fileName
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,35 +1,37 @@
|
|||||||
package config
|
package config
|
||||||
|
|
||||||
import "testing"
|
import (
|
||||||
|
"runtime"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
func TestLoadConfig(t *testing.T) {
|
func TestLoadConfig(t *testing.T) {
|
||||||
conf := Load()
|
conf := Load()
|
||||||
want := "sqlite"
|
assert.Equal(t, "sqlite", conf.DBType)
|
||||||
if conf.DBType != want {
|
|
||||||
t.Fatalf(`Load().DBType = %q, want match for %#q, nil`, conf.DBType, want)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGetEnvDefault(t *testing.T) {
|
func TestGetEnvDefault(t *testing.T) {
|
||||||
want := "def_val"
|
desiredValue := "def_val"
|
||||||
envDefault := getEnv("DEFAULT_TEST", want)
|
envDefault := getEnv("DEFAULT_TEST", desiredValue)
|
||||||
if envDefault != want {
|
|
||||||
t.Fatalf(`getEnv("DEFAULT_TEST", "def_val") = %q, want match for %#q, nil`, envDefault, want)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestGetEnvSet(t *testing.T) {
|
assert.Equal(t, desiredValue, envDefault)
|
||||||
envDefault := getEnv("SET_TEST", "not_this")
|
|
||||||
want := "set_val"
|
|
||||||
if envDefault != want {
|
|
||||||
t.Fatalf(`getEnv("SET_TEST", "not_this") = %q, want match for %#q, nil`, envDefault, want)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTrimLowerString(t *testing.T) {
|
func TestTrimLowerString(t *testing.T) {
|
||||||
want := "trimtest"
|
desiredValue := "trimtest"
|
||||||
output := trimLowerString(" trimTest ")
|
outputValue := trimLowerString(" trimTest ")
|
||||||
if output != want {
|
|
||||||
t.Fatalf(`trimLowerString(" trimTest ") = %q, want match for %#q, nil`, output, want)
|
assert.Equal(t, desiredValue, outputValue)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestPrettyCaller(t *testing.T) {
|
||||||
|
p, _, _, _ := runtime.Caller(0)
|
||||||
|
result := runtime.CallersFrames([]uintptr{p})
|
||||||
|
f, _ := result.Next()
|
||||||
|
functionName, fileName := prettyCaller(&f)
|
||||||
|
|
||||||
|
assert.Equal(t, "TestPrettyCaller", functionName, "should have current function name")
|
||||||
|
assert.Equal(t, "config/config_test.go@30", fileName, "should have current file path and line number")
|
||||||
}
|
}
|
||||||
|
|||||||
54
config/logger.go
Normal file
54
config/logger.go
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/sirupsen/logrus"
|
||||||
|
"gopkg.in/natefinch/lumberjack.v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Modified "snowzach/rotatefilehook" to support manual rotation
|
||||||
|
|
||||||
|
type RotateFileConfig struct {
|
||||||
|
Filename string
|
||||||
|
MaxSize int
|
||||||
|
MaxBackups int
|
||||||
|
MaxAge int
|
||||||
|
Compress bool
|
||||||
|
Level logrus.Level
|
||||||
|
Formatter logrus.Formatter
|
||||||
|
}
|
||||||
|
|
||||||
|
type RotateFileHook struct {
|
||||||
|
Config RotateFileConfig
|
||||||
|
logWriter *lumberjack.Logger
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewRotateFileHook(config RotateFileConfig) (*RotateFileHook, error) {
|
||||||
|
hook := RotateFileHook{
|
||||||
|
Config: config,
|
||||||
|
}
|
||||||
|
hook.logWriter = &lumberjack.Logger{
|
||||||
|
Filename: config.Filename,
|
||||||
|
MaxSize: config.MaxSize,
|
||||||
|
MaxBackups: config.MaxBackups,
|
||||||
|
MaxAge: config.MaxAge,
|
||||||
|
Compress: config.Compress,
|
||||||
|
}
|
||||||
|
return &hook, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (hook *RotateFileHook) Rotate() error {
|
||||||
|
return hook.logWriter.Rotate()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (hook *RotateFileHook) Levels() []logrus.Level {
|
||||||
|
return logrus.AllLevels[:hook.Config.Level+1]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (hook *RotateFileHook) Fire(entry *logrus.Entry) (err error) {
|
||||||
|
b, err := hook.Config.Formatter.Format(entry)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
hook.logWriter.Write(b)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
// Code generated by sqlc. DO NOT EDIT.
|
// Code generated by sqlc. DO NOT EDIT.
|
||||||
// versions:
|
// versions:
|
||||||
// sqlc v1.21.0
|
// sqlc v1.29.0
|
||||||
|
|
||||||
package database
|
package database
|
||||||
|
|
||||||
|
|||||||
151
database/document_user_statistics.sql
Normal file
151
database/document_user_statistics.sql
Normal file
@@ -0,0 +1,151 @@
|
|||||||
|
WITH grouped_activity AS (
|
||||||
|
SELECT
|
||||||
|
ga.user_id,
|
||||||
|
ga.document_id,
|
||||||
|
MAX(ga.created_at) AS created_at,
|
||||||
|
MAX(ga.start_time) AS start_time,
|
||||||
|
MIN(ga.start_percentage) AS start_percentage,
|
||||||
|
MAX(ga.end_percentage) AS end_percentage,
|
||||||
|
|
||||||
|
-- Total Duration & Percentage
|
||||||
|
SUM(ga.duration) AS total_time_seconds,
|
||||||
|
SUM(ga.end_percentage - ga.start_percentage) AS total_read_percentage,
|
||||||
|
|
||||||
|
-- Yearly Duration
|
||||||
|
SUM(
|
||||||
|
CASE
|
||||||
|
WHEN
|
||||||
|
ga.start_time >= DATE('now', '-1 year')
|
||||||
|
THEN ga.duration
|
||||||
|
ELSE 0
|
||||||
|
END
|
||||||
|
)
|
||||||
|
AS yearly_time_seconds,
|
||||||
|
|
||||||
|
-- Yearly Percentage
|
||||||
|
SUM(
|
||||||
|
CASE
|
||||||
|
WHEN
|
||||||
|
ga.start_time >= DATE('now', '-1 year')
|
||||||
|
THEN ga.end_percentage - ga.start_percentage
|
||||||
|
ELSE 0
|
||||||
|
END
|
||||||
|
)
|
||||||
|
AS yearly_read_percentage,
|
||||||
|
|
||||||
|
-- Monthly Duration
|
||||||
|
SUM(
|
||||||
|
CASE
|
||||||
|
WHEN
|
||||||
|
ga.start_time >= DATE('now', '-1 month')
|
||||||
|
THEN ga.duration
|
||||||
|
ELSE 0
|
||||||
|
END
|
||||||
|
)
|
||||||
|
AS monthly_time_seconds,
|
||||||
|
|
||||||
|
-- Monthly Percentage
|
||||||
|
SUM(
|
||||||
|
CASE
|
||||||
|
WHEN
|
||||||
|
ga.start_time >= DATE('now', '-1 month')
|
||||||
|
THEN ga.end_percentage - ga.start_percentage
|
||||||
|
ELSE 0
|
||||||
|
END
|
||||||
|
)
|
||||||
|
AS monthly_read_percentage,
|
||||||
|
|
||||||
|
-- Weekly Duration
|
||||||
|
SUM(
|
||||||
|
CASE
|
||||||
|
WHEN
|
||||||
|
ga.start_time >= DATE('now', '-7 days')
|
||||||
|
THEN ga.duration
|
||||||
|
ELSE 0
|
||||||
|
END
|
||||||
|
)
|
||||||
|
AS weekly_time_seconds,
|
||||||
|
|
||||||
|
-- Weekly Percentage
|
||||||
|
SUM(
|
||||||
|
CASE
|
||||||
|
WHEN
|
||||||
|
ga.start_time >= DATE('now', '-7 days')
|
||||||
|
THEN ga.end_percentage - ga.start_percentage
|
||||||
|
ELSE 0
|
||||||
|
END
|
||||||
|
)
|
||||||
|
AS weekly_read_percentage
|
||||||
|
|
||||||
|
FROM activity AS ga
|
||||||
|
GROUP BY ga.user_id, ga.document_id
|
||||||
|
),
|
||||||
|
|
||||||
|
current_progress AS (
|
||||||
|
SELECT
|
||||||
|
user_id,
|
||||||
|
document_id,
|
||||||
|
COALESCE((
|
||||||
|
SELECT dp.percentage
|
||||||
|
FROM document_progress AS dp
|
||||||
|
WHERE
|
||||||
|
dp.user_id = iga.user_id
|
||||||
|
AND dp.document_id = iga.document_id
|
||||||
|
ORDER BY dp.created_at DESC
|
||||||
|
LIMIT 1
|
||||||
|
), end_percentage) AS percentage
|
||||||
|
FROM grouped_activity AS iga
|
||||||
|
)
|
||||||
|
|
||||||
|
INSERT INTO document_user_statistics
|
||||||
|
SELECT
|
||||||
|
ga.document_id,
|
||||||
|
ga.user_id,
|
||||||
|
cp.percentage,
|
||||||
|
MAX(ga.start_time) AS last_read,
|
||||||
|
MAX(ga.created_at) AS last_seen,
|
||||||
|
SUM(ga.total_read_percentage) AS read_percentage,
|
||||||
|
|
||||||
|
-- All Time WPM
|
||||||
|
SUM(ga.total_time_seconds) AS total_time_seconds,
|
||||||
|
(CAST(COALESCE(d.words, 0.0) AS REAL) * SUM(ga.total_read_percentage))
|
||||||
|
AS total_words_read,
|
||||||
|
(CAST(COALESCE(d.words, 0.0) AS REAL) * SUM(ga.total_read_percentage))
|
||||||
|
/ (SUM(ga.total_time_seconds) / 60.0) AS total_wpm,
|
||||||
|
|
||||||
|
-- Yearly WPM
|
||||||
|
ga.yearly_time_seconds,
|
||||||
|
CAST(COALESCE(d.words, 0.0) AS REAL) * ga.yearly_read_percentage
|
||||||
|
AS yearly_words_read,
|
||||||
|
COALESCE(
|
||||||
|
(CAST(COALESCE(d.words, 0.0) AS REAL) * ga.yearly_read_percentage)
|
||||||
|
/ (ga.yearly_time_seconds / 60), 0.0)
|
||||||
|
AS yearly_wpm,
|
||||||
|
|
||||||
|
-- Monthly WPM
|
||||||
|
ga.monthly_time_seconds,
|
||||||
|
CAST(COALESCE(d.words, 0.0) AS REAL) * ga.monthly_read_percentage
|
||||||
|
AS monthly_words_read,
|
||||||
|
COALESCE(
|
||||||
|
(CAST(COALESCE(d.words, 0.0) AS REAL) * ga.monthly_read_percentage)
|
||||||
|
/ (ga.monthly_time_seconds / 60), 0.0)
|
||||||
|
AS monthly_wpm,
|
||||||
|
|
||||||
|
-- Weekly WPM
|
||||||
|
ga.weekly_time_seconds,
|
||||||
|
CAST(COALESCE(d.words, 0.0) AS REAL) * ga.weekly_read_percentage
|
||||||
|
AS weekly_words_read,
|
||||||
|
COALESCE(
|
||||||
|
(CAST(COALESCE(d.words, 0.0) AS REAL) * ga.weekly_read_percentage)
|
||||||
|
/ (ga.weekly_time_seconds / 60), 0.0)
|
||||||
|
AS weekly_wpm
|
||||||
|
|
||||||
|
FROM grouped_activity AS ga
|
||||||
|
INNER JOIN
|
||||||
|
current_progress AS cp
|
||||||
|
ON ga.user_id = cp.user_id AND ga.document_id = cp.document_id
|
||||||
|
INNER JOIN
|
||||||
|
documents AS d
|
||||||
|
ON ga.document_id = d.id
|
||||||
|
GROUP BY ga.document_id, ga.user_id
|
||||||
|
ORDER BY total_wpm DESC;
|
||||||
27
database/documents.go
Normal file
27
database/documents.go
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"reichard.io/antholume/pkg/ptr"
|
||||||
|
"reichard.io/antholume/pkg/sliceutils"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (d *DBManager) GetDocument(ctx context.Context, docID, userID string) (*GetDocumentsWithStatsRow, error) {
|
||||||
|
documents, err := d.Queries.GetDocumentsWithStats(ctx, GetDocumentsWithStatsParams{
|
||||||
|
ID: ptr.Of(docID),
|
||||||
|
UserID: userID,
|
||||||
|
Limit: 1,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
document, found := sliceutils.First(documents)
|
||||||
|
if !found {
|
||||||
|
return nil, fmt.Errorf("document not found: %s", docID)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &document, nil
|
||||||
|
}
|
||||||
115
database/documents_test.go
Normal file
115
database/documents_test.go
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
"reichard.io/antholume/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
type DocumentsTestSuite struct {
|
||||||
|
suite.Suite
|
||||||
|
dbm *DBManager
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDocuments(t *testing.T) {
|
||||||
|
suite.Run(t, new(DocumentsTestSuite))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *DocumentsTestSuite) SetupTest() {
|
||||||
|
cfg := config.Config{
|
||||||
|
DBType: "memory",
|
||||||
|
}
|
||||||
|
|
||||||
|
suite.dbm = NewMgr(&cfg)
|
||||||
|
|
||||||
|
// Create Document
|
||||||
|
_, err := suite.dbm.Queries.UpsertDocument(context.Background(), UpsertDocumentParams{
|
||||||
|
ID: documentID,
|
||||||
|
Title: &documentTitle,
|
||||||
|
Author: &documentAuthor,
|
||||||
|
Words: &documentWords,
|
||||||
|
})
|
||||||
|
suite.NoError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DOCUMENT - TODO:
|
||||||
|
// - (q *Queries) GetDocumentProgress
|
||||||
|
// - (q *Queries) GetDocumentWithStats
|
||||||
|
// - (q *Queries) GetDocumentsSize
|
||||||
|
// - (q *Queries) GetDocumentsWithStats
|
||||||
|
// - (q *Queries) GetMissingDocuments
|
||||||
|
func (suite *DocumentsTestSuite) TestGetDocument() {
|
||||||
|
doc, err := suite.dbm.Queries.GetDocument(context.Background(), documentID)
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Equal(documentID, doc.ID, "should have changed the document")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *DocumentsTestSuite) TestUpsertDocument() {
|
||||||
|
testDocID := "docid1"
|
||||||
|
|
||||||
|
doc, err := suite.dbm.Queries.UpsertDocument(context.Background(), UpsertDocumentParams{
|
||||||
|
ID: testDocID,
|
||||||
|
Title: &documentTitle,
|
||||||
|
Author: &documentAuthor,
|
||||||
|
})
|
||||||
|
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Equal(testDocID, doc.ID, "should have document id")
|
||||||
|
suite.Equal(documentTitle, *doc.Title, "should have document title")
|
||||||
|
suite.Equal(documentAuthor, *doc.Author, "should have document author")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *DocumentsTestSuite) TestDeleteDocument() {
|
||||||
|
changed, err := suite.dbm.Queries.DeleteDocument(context.Background(), documentID)
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Equal(int64(1), changed, "should have changed the document")
|
||||||
|
|
||||||
|
doc, err := suite.dbm.Queries.GetDocument(context.Background(), documentID)
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.True(doc.Deleted, "should have deleted the document")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *DocumentsTestSuite) TestGetDeletedDocuments() {
|
||||||
|
changed, err := suite.dbm.Queries.DeleteDocument(context.Background(), documentID)
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Equal(int64(1), changed, "should have changed the document")
|
||||||
|
|
||||||
|
deletedDocs, err := suite.dbm.Queries.GetDeletedDocuments(context.Background(), []string{documentID})
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Len(deletedDocs, 1, "should have one deleted document")
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO - Convert GetWantedDocuments -> (sqlc.slice('document_ids'));
|
||||||
|
func (suite *DocumentsTestSuite) TestGetWantedDocuments() {
|
||||||
|
wantedDocs, err := suite.dbm.Queries.GetWantedDocuments(context.Background(), fmt.Sprintf("[\"%s\"]", documentID))
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Len(wantedDocs, 1, "should have one wanted document")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *DocumentsTestSuite) TestGetMissingDocuments() {
|
||||||
|
// Create Document
|
||||||
|
_, err := suite.dbm.Queries.UpsertDocument(context.Background(), UpsertDocumentParams{
|
||||||
|
ID: documentID,
|
||||||
|
Filepath: &documentFilepath,
|
||||||
|
})
|
||||||
|
suite.NoError(err)
|
||||||
|
|
||||||
|
missingDocs, err := suite.dbm.Queries.GetMissingDocuments(context.Background(), []string{documentID})
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Len(missingDocs, 0, "should have no wanted document")
|
||||||
|
|
||||||
|
missingDocs, err = suite.dbm.Queries.GetMissingDocuments(context.Background(), []string{"other"})
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Len(missingDocs, 1, "should have one missing document")
|
||||||
|
suite.Equal(documentID, missingDocs[0].ID, "should have missing doc")
|
||||||
|
|
||||||
|
// TODO - https://github.com/sqlc-dev/sqlc/issues/3451
|
||||||
|
// missingDocs, err = suite.dbm.Queries.GetMissingDocuments(context.Background(), []string{})
|
||||||
|
// suite.Nil(err, "should have nil err")
|
||||||
|
// suite.Len(missingDocs, 1, "should have one missing document")
|
||||||
|
// suite.Equal(documentID, missingDocs[0].ID, "should have missing doc")
|
||||||
|
}
|
||||||
@@ -3,84 +3,256 @@ package database
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"database/sql"
|
"database/sql"
|
||||||
_ "embed"
|
"database/sql/driver"
|
||||||
|
"embed"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"path/filepath"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/pressly/goose/v3"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
_ "modernc.org/sqlite"
|
sqlite "modernc.org/sqlite"
|
||||||
"path"
|
"reichard.io/antholume/config"
|
||||||
"reichard.io/bbank/config"
|
_ "reichard.io/antholume/database/migrations"
|
||||||
)
|
)
|
||||||
|
|
||||||
type DBManager struct {
|
type DBManager struct {
|
||||||
DB *sql.DB
|
DB *sql.DB
|
||||||
Ctx context.Context
|
|
||||||
Queries *Queries
|
Queries *Queries
|
||||||
|
cfg *config.Config
|
||||||
}
|
}
|
||||||
|
|
||||||
//go:embed schema.sql
|
//go:embed schema.sql
|
||||||
var ddl string
|
var ddl string
|
||||||
|
|
||||||
//go:embed update_temp_tables.sql
|
//go:embed user_streaks.sql
|
||||||
var tsql string
|
var user_streaks string
|
||||||
|
|
||||||
//go:embed update_document_user_statistics.sql
|
//go:embed document_user_statistics.sql
|
||||||
var doc_user_stat_sql string
|
var document_user_statistics string
|
||||||
|
|
||||||
func NewMgr(c *config.Config) *DBManager {
|
//go:embed migrations/*
|
||||||
// Create Manager
|
var migrations embed.FS
|
||||||
dbm := &DBManager{
|
|
||||||
Ctx: context.Background(),
|
// Register scalar sqlite function on init
|
||||||
|
func init() {
|
||||||
|
sqlite.MustRegisterFunction("LOCAL_TIME", &sqlite.FunctionImpl{
|
||||||
|
NArgs: 2,
|
||||||
|
Deterministic: true,
|
||||||
|
Scalar: localTime,
|
||||||
|
})
|
||||||
|
sqlite.MustRegisterFunction("LOCAL_DATE", &sqlite.FunctionImpl{
|
||||||
|
NArgs: 2,
|
||||||
|
Deterministic: true,
|
||||||
|
Scalar: localDate,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create Database
|
// NewMgr Returns an initialized manager
|
||||||
if c.DBType == "sqlite" || c.DBType == "memory" {
|
func NewMgr(c *config.Config) *DBManager {
|
||||||
var dbLocation string = ":memory:"
|
// Create Manager
|
||||||
if c.DBType == "sqlite" {
|
dbm := &DBManager{cfg: c}
|
||||||
dbLocation = path.Join(c.ConfigPath, fmt.Sprintf("%s.db", c.DBName))
|
|
||||||
|
if err := dbm.init(context.Background()); err != nil {
|
||||||
|
log.Panic("Unable to init DB")
|
||||||
|
}
|
||||||
|
|
||||||
|
return dbm
|
||||||
|
}
|
||||||
|
|
||||||
|
// init loads the DB manager
|
||||||
|
func (dbm *DBManager) init(ctx context.Context) error {
|
||||||
|
// Build DB Location
|
||||||
|
var dbLocation string
|
||||||
|
switch dbm.cfg.DBType {
|
||||||
|
case "sqlite":
|
||||||
|
dbLocation = filepath.Join(dbm.cfg.ConfigPath, fmt.Sprintf("%s.db", dbm.cfg.DBName))
|
||||||
|
case "memory":
|
||||||
|
dbLocation = ":memory:"
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("unsupported database")
|
||||||
}
|
}
|
||||||
|
|
||||||
var err error
|
var err error
|
||||||
dbm.DB, err = sql.Open("sqlite", dbLocation)
|
dbm.DB, err = sql.Open("sqlite", dbLocation)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatal(err)
|
log.Panicf("Unable to open DB: %v", err)
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Single Open Connection
|
// Single open connection
|
||||||
dbm.DB.SetMaxOpenConns(1)
|
dbm.DB.SetMaxOpenConns(1)
|
||||||
if _, err := dbm.DB.Exec(ddl, nil); err != nil {
|
|
||||||
log.Info("Exec Error:", err)
|
// Check if DB is new
|
||||||
}
|
isNew, err := isEmpty(dbm.DB)
|
||||||
} else {
|
if err != nil {
|
||||||
log.Fatal("Unsupported Database")
|
log.Panicf("Unable to determine db info: %v", err)
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Init SQLc
|
||||||
dbm.Queries = New(dbm.DB)
|
dbm.Queries = New(dbm.DB)
|
||||||
|
|
||||||
return dbm
|
// Execute schema
|
||||||
|
if _, err := dbm.DB.Exec(ddl, nil); err != nil {
|
||||||
|
log.Panicf("Error executing schema: %v", err)
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (dbm *DBManager) Shutdown() error {
|
// Perform migrations
|
||||||
return dbm.DB.Close()
|
err = dbm.performMigrations(isNew)
|
||||||
|
if err != nil && err != goose.ErrNoMigrationFiles {
|
||||||
|
log.Panicf("Error running DB migrations: %v", err)
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (dbm *DBManager) UpdateDocumentUserStatistic(documentID string, userID string) error {
|
// Update settings
|
||||||
// Prepare Statement
|
err = dbm.updateSettings(ctx)
|
||||||
stmt, err := dbm.DB.PrepareContext(dbm.Ctx, doc_user_stat_sql)
|
if err != nil {
|
||||||
|
log.Panicf("Error running DB settings update: %v", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cache tables
|
||||||
|
if err := dbm.CacheTempTables(ctx); err != nil {
|
||||||
|
log.Warn("Refreshing temp table cache failed: ", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reload closes the DB & reinits
|
||||||
|
func (dbm *DBManager) Reload(ctx context.Context) error {
|
||||||
|
// Close handle
|
||||||
|
err := dbm.DB.Close()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
defer stmt.Close()
|
|
||||||
|
|
||||||
// Execute
|
// Reinit DB
|
||||||
if _, err := stmt.ExecContext(dbm.Ctx, documentID, userID); err != nil {
|
if err := dbm.init(ctx); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (dbm *DBManager) CacheTempTables() error {
|
// CacheTempTables clears existing statistics and recalculates
|
||||||
if _, err := dbm.DB.ExecContext(dbm.Ctx, tsql); err != nil {
|
func (dbm *DBManager) CacheTempTables(ctx context.Context) error {
|
||||||
|
start := time.Now()
|
||||||
|
if _, err := dbm.DB.ExecContext(ctx, user_streaks); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
log.Debug("Cached 'user_streaks' in: ", time.Since(start))
|
||||||
|
|
||||||
|
start = time.Now()
|
||||||
|
if _, err := dbm.DB.ExecContext(ctx, document_user_statistics); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
log.Debug("Cached 'document_user_statistics' in: ", time.Since(start))
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// updateSettings ensures that we're enforcing foreign keys and enable journal
|
||||||
|
// mode.
|
||||||
|
func (dbm *DBManager) updateSettings(ctx context.Context) error {
|
||||||
|
// Set SQLite PRAGMA Settings
|
||||||
|
pragmaQuery := `
|
||||||
|
PRAGMA foreign_keys = ON;
|
||||||
|
PRAGMA journal_mode = WAL;
|
||||||
|
`
|
||||||
|
if _, err := dbm.DB.Exec(pragmaQuery, nil); err != nil {
|
||||||
|
log.Errorf("Error executing pragma: %v", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update Antholume Version in DB
|
||||||
|
if _, err := dbm.Queries.UpdateSettings(ctx, UpdateSettingsParams{
|
||||||
|
Name: "version",
|
||||||
|
Value: dbm.cfg.Version,
|
||||||
|
}); err != nil {
|
||||||
|
log.Errorf("Error updating DB settings: %v", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// performMigrations runs all migrations
|
||||||
|
func (dbm *DBManager) performMigrations(isNew bool) error {
|
||||||
|
// Create context
|
||||||
|
ctx := context.WithValue(context.Background(), "isNew", isNew) // nolint
|
||||||
|
|
||||||
|
// Set DB migration
|
||||||
|
goose.SetBaseFS(migrations)
|
||||||
|
|
||||||
|
// Run migrations
|
||||||
|
goose.SetLogger(log.StandardLogger())
|
||||||
|
if err := goose.SetDialect("sqlite"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return goose.UpContext(ctx, dbm.DB, "migrations")
|
||||||
|
}
|
||||||
|
|
||||||
|
// isEmpty determines whether the database is empty
|
||||||
|
func isEmpty(db *sql.DB) (bool, error) {
|
||||||
|
var tableCount int
|
||||||
|
err := db.QueryRow("SELECT COUNT(*) FROM sqlite_master WHERE type='table';").Scan(&tableCount)
|
||||||
|
if err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
return tableCount == 0, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// localTime is a custom SQL function that is registered as LOCAL_TIME in the init function
|
||||||
|
func localTime(ctx *sqlite.FunctionContext, args []driver.Value) (driver.Value, error) {
|
||||||
|
timeStr, ok := args[0].(string)
|
||||||
|
if !ok {
|
||||||
|
return nil, errors.New("both arguments to TZTime must be strings")
|
||||||
|
}
|
||||||
|
|
||||||
|
timeZoneStr, ok := args[1].(string)
|
||||||
|
if !ok {
|
||||||
|
return nil, errors.New("both arguments to TZTime must be strings")
|
||||||
|
}
|
||||||
|
|
||||||
|
timeZone, err := time.LoadLocation(timeZoneStr)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.New("unable to parse timezone")
|
||||||
|
}
|
||||||
|
|
||||||
|
formattedTime, err := time.ParseInLocation(time.RFC3339, timeStr, time.UTC)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.New("unable to parse time")
|
||||||
|
}
|
||||||
|
|
||||||
|
return formattedTime.In(timeZone).Format(time.RFC3339), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// localDate is a custom SQL function that is registered as LOCAL_DATE in the init function
|
||||||
|
func localDate(ctx *sqlite.FunctionContext, args []driver.Value) (driver.Value, error) {
|
||||||
|
timeStr, ok := args[0].(string)
|
||||||
|
if !ok {
|
||||||
|
return nil, errors.New("both arguments to TZTime must be strings")
|
||||||
|
}
|
||||||
|
|
||||||
|
timeZoneStr, ok := args[1].(string)
|
||||||
|
if !ok {
|
||||||
|
return nil, errors.New("both arguments to TZTime must be strings")
|
||||||
|
}
|
||||||
|
|
||||||
|
timeZone, err := time.LoadLocation(timeZoneStr)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.New("unable to parse timezone")
|
||||||
|
}
|
||||||
|
|
||||||
|
formattedTime, err := time.ParseInLocation(time.RFC3339, timeStr, time.UTC)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.New("unable to parse time")
|
||||||
|
}
|
||||||
|
|
||||||
|
return formattedTime.In(timeZone).Format("2006-01-02"), nil
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,118 +1,78 @@
|
|||||||
package database
|
package database
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"reichard.io/bbank/config"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
"reichard.io/antholume/config"
|
||||||
|
"reichard.io/antholume/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
type databaseTest struct {
|
var (
|
||||||
*testing.T
|
userID string = "testUser"
|
||||||
|
userPass string = "testPass"
|
||||||
|
deviceID string = "testDevice"
|
||||||
|
deviceName string = "testDeviceName"
|
||||||
|
documentID string = "testDocument"
|
||||||
|
documentTitle string = "testTitle"
|
||||||
|
documentAuthor string = "testAuthor"
|
||||||
|
documentFilepath string = "./testPath.epub"
|
||||||
|
documentWords int64 = 5000
|
||||||
|
)
|
||||||
|
|
||||||
|
type DatabaseTestSuite struct {
|
||||||
|
suite.Suite
|
||||||
dbm *DBManager
|
dbm *DBManager
|
||||||
}
|
}
|
||||||
|
|
||||||
var userID string = "testUser"
|
func TestDatabase(t *testing.T) {
|
||||||
var userPass string = "testPass"
|
suite.Run(t, new(DatabaseTestSuite))
|
||||||
var deviceID string = "testDevice"
|
}
|
||||||
var deviceName string = "testDeviceName"
|
|
||||||
var documentID string = "testDocument"
|
|
||||||
var documentTitle string = "testTitle"
|
|
||||||
var documentAuthor string = "testAuthor"
|
|
||||||
|
|
||||||
func TestNewMgr(t *testing.T) {
|
// PROGRESS - TODO:
|
||||||
|
// - (q *Queries) GetProgress
|
||||||
|
// - (q *Queries) UpdateProgress
|
||||||
|
|
||||||
|
func (suite *DatabaseTestSuite) SetupTest() {
|
||||||
cfg := config.Config{
|
cfg := config.Config{
|
||||||
DBType: "memory",
|
DBType: "memory",
|
||||||
}
|
}
|
||||||
|
|
||||||
dbm := NewMgr(&cfg)
|
suite.dbm = NewMgr(&cfg)
|
||||||
if dbm == nil {
|
|
||||||
t.Fatalf(`Expected: *DBManager, Got: nil`)
|
|
||||||
}
|
|
||||||
|
|
||||||
t.Run("Database", func(t *testing.T) {
|
// Create User
|
||||||
dt := databaseTest{t, dbm}
|
rawAuthHash, _ := utils.GenerateToken(64)
|
||||||
dt.TestUser()
|
authHash := fmt.Sprintf("%x", rawAuthHash)
|
||||||
dt.TestDocument()
|
_, err := suite.dbm.Queries.CreateUser(context.Background(), CreateUserParams{
|
||||||
dt.TestDevice()
|
|
||||||
dt.TestActivity()
|
|
||||||
dt.TestDailyReadStats()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func (dt *databaseTest) TestUser() {
|
|
||||||
dt.Run("User", func(t *testing.T) {
|
|
||||||
changed, err := dt.dbm.Queries.CreateUser(dt.dbm.Ctx, CreateUserParams{
|
|
||||||
ID: userID,
|
ID: userID,
|
||||||
Pass: &userPass,
|
Pass: &userPass,
|
||||||
|
AuthHash: &authHash,
|
||||||
})
|
})
|
||||||
|
suite.NoError(err)
|
||||||
|
|
||||||
if err != nil || changed != 1 {
|
// Create Document
|
||||||
t.Fatalf(`Expected: %v, Got: %v, Error: %v`, 1, changed, err)
|
_, err = suite.dbm.Queries.UpsertDocument(context.Background(), UpsertDocumentParams{
|
||||||
}
|
|
||||||
|
|
||||||
user, err := dt.dbm.Queries.GetUser(dt.dbm.Ctx, userID)
|
|
||||||
if err != nil || *user.Pass != userPass {
|
|
||||||
t.Fatalf(`Expected: %v, Got: %v, Error: %v`, userPass, *user.Pass, err)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func (dt *databaseTest) TestDocument() {
|
|
||||||
dt.Run("Document", func(t *testing.T) {
|
|
||||||
doc, err := dt.dbm.Queries.UpsertDocument(dt.dbm.Ctx, UpsertDocumentParams{
|
|
||||||
ID: documentID,
|
ID: documentID,
|
||||||
Title: &documentTitle,
|
Title: &documentTitle,
|
||||||
Author: &documentAuthor,
|
Author: &documentAuthor,
|
||||||
|
Filepath: &documentFilepath,
|
||||||
|
Words: &documentWords,
|
||||||
})
|
})
|
||||||
|
suite.NoError(err)
|
||||||
|
|
||||||
if err != nil {
|
// Create Device
|
||||||
t.Fatalf(`Expected: Document, Got: %v, Error: %v`, doc, err)
|
_, err = suite.dbm.Queries.UpsertDevice(context.Background(), UpsertDeviceParams{
|
||||||
}
|
|
||||||
|
|
||||||
if doc.ID != documentID {
|
|
||||||
t.Fatalf(`Expected: %v, Got: %v`, documentID, doc.ID)
|
|
||||||
}
|
|
||||||
|
|
||||||
if *doc.Title != documentTitle {
|
|
||||||
t.Fatalf(`Expected: %v, Got: %v`, documentTitle, *doc.Title)
|
|
||||||
}
|
|
||||||
|
|
||||||
if *doc.Author != documentAuthor {
|
|
||||||
t.Fatalf(`Expected: %v, Got: %v`, documentAuthor, *doc.Author)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func (dt *databaseTest) TestDevice() {
|
|
||||||
dt.Run("Device", func(t *testing.T) {
|
|
||||||
device, err := dt.dbm.Queries.UpsertDevice(dt.dbm.Ctx, UpsertDeviceParams{
|
|
||||||
ID: deviceID,
|
ID: deviceID,
|
||||||
UserID: userID,
|
UserID: userID,
|
||||||
DeviceName: deviceName,
|
DeviceName: deviceName,
|
||||||
})
|
})
|
||||||
|
suite.NoError(err)
|
||||||
|
|
||||||
if err != nil {
|
// Create Activity
|
||||||
t.Fatalf(`Expected: Device, Got: %v, Error: %v`, device, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if device.ID != deviceID {
|
|
||||||
t.Fatalf(`Expected: %v, Got: %v`, deviceID, device.ID)
|
|
||||||
}
|
|
||||||
|
|
||||||
if device.UserID != userID {
|
|
||||||
t.Fatalf(`Expected: %v, Got: %v`, userID, device.UserID)
|
|
||||||
}
|
|
||||||
|
|
||||||
if device.DeviceName != deviceName {
|
|
||||||
t.Fatalf(`Expected: %v, Got: %v`, deviceName, device.DeviceName)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func (dt *databaseTest) TestActivity() {
|
|
||||||
dt.Run("Progress", func(t *testing.T) {
|
|
||||||
// 10 Activities, 10 Days
|
|
||||||
end := time.Now()
|
end := time.Now()
|
||||||
start := end.AddDate(0, 0, -9)
|
start := end.AddDate(0, 0, -9)
|
||||||
var counter int64 = 0
|
var counter int64 = 0
|
||||||
@@ -121,7 +81,7 @@ func (dt *databaseTest) TestActivity() {
|
|||||||
counter += 1
|
counter += 1
|
||||||
|
|
||||||
// Add Item
|
// Add Item
|
||||||
activity, err := dt.dbm.Queries.AddActivity(dt.dbm.Ctx, AddActivityParams{
|
activity, err := suite.dbm.Queries.AddActivity(context.Background(), AddActivityParams{
|
||||||
DocumentID: documentID,
|
DocumentID: documentID,
|
||||||
DeviceID: deviceID,
|
DeviceID: deviceID,
|
||||||
UserID: userID,
|
UserID: userID,
|
||||||
@@ -131,37 +91,50 @@ func (dt *databaseTest) TestActivity() {
|
|||||||
EndPercentage: float64(counter+1) / 100.0,
|
EndPercentage: float64(counter+1) / 100.0,
|
||||||
})
|
})
|
||||||
|
|
||||||
// Validate No Error
|
suite.Nil(err, fmt.Sprintf("[%d] should have nil err for add activity", counter))
|
||||||
if err != nil {
|
suite.Equal(counter, activity.ID, fmt.Sprintf("[%d] should have correct id for add activity", counter))
|
||||||
t.Fatalf(`expected: rawactivity, got: %v, error: %v`, activity, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate Auto Increment Working
|
|
||||||
if activity.ID != counter {
|
|
||||||
t.Fatalf(`Expected: %v, Got: %v`, counter, activity.ID)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Initiate Cache
|
// Initiate Cache
|
||||||
dt.dbm.CacheTempTables()
|
err = suite.dbm.CacheTempTables(context.Background())
|
||||||
|
suite.NoError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DEVICES - TODO:
|
||||||
|
// - (q *Queries) GetDevice
|
||||||
|
// - (q *Queries) GetDevices
|
||||||
|
// - (q *Queries) UpsertDevice
|
||||||
|
func (suite *DatabaseTestSuite) TestDevice() {
|
||||||
|
testDevice := "dev123"
|
||||||
|
device, err := suite.dbm.Queries.UpsertDevice(context.Background(), UpsertDeviceParams{
|
||||||
|
ID: testDevice,
|
||||||
|
UserID: userID,
|
||||||
|
DeviceName: deviceName,
|
||||||
|
})
|
||||||
|
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Equal(testDevice, device.ID, "should have device id")
|
||||||
|
suite.Equal(userID, device.UserID, "should have user id")
|
||||||
|
suite.Equal(deviceName, device.DeviceName, "should have device name")
|
||||||
|
}
|
||||||
|
|
||||||
|
// ACTIVITY - TODO:
|
||||||
|
// - (q *Queries) AddActivity
|
||||||
|
// - (q *Queries) GetActivity
|
||||||
|
// - (q *Queries) GetLastActivity
|
||||||
|
func (suite *DatabaseTestSuite) TestActivity() {
|
||||||
// Validate Exists
|
// Validate Exists
|
||||||
existsRows, err := dt.dbm.Queries.GetActivity(dt.dbm.Ctx, GetActivityParams{
|
existsRows, err := suite.dbm.Queries.GetActivity(context.Background(), GetActivityParams{
|
||||||
UserID: userID,
|
UserID: userID,
|
||||||
Offset: 0,
|
Offset: 0,
|
||||||
Limit: 50,
|
Limit: 50,
|
||||||
})
|
})
|
||||||
|
|
||||||
if err != nil {
|
suite.Nil(err, "should have nil err for get activity")
|
||||||
t.Fatalf(`Expected: []GetActivityRow, Got: %v, Error: %v`, existsRows, err)
|
suite.Len(existsRows, 10, "should have correct number of rows get activity")
|
||||||
}
|
|
||||||
|
|
||||||
if len(existsRows) != 10 {
|
|
||||||
t.Fatalf(`Expected: %v, Got: %v`, 10, len(existsRows))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate Doesn't Exist
|
// Validate Doesn't Exist
|
||||||
doesntExistsRows, err := dt.dbm.Queries.GetActivity(dt.dbm.Ctx, GetActivityParams{
|
doesntExistsRows, err := suite.dbm.Queries.GetActivity(context.Background(), GetActivityParams{
|
||||||
UserID: userID,
|
UserID: userID,
|
||||||
DocumentID: "unknownDoc",
|
DocumentID: "unknownDoc",
|
||||||
DocFilter: true,
|
DocFilter: true,
|
||||||
@@ -169,43 +142,30 @@ func (dt *databaseTest) TestActivity() {
|
|||||||
Limit: 50,
|
Limit: 50,
|
||||||
})
|
})
|
||||||
|
|
||||||
if err != nil {
|
suite.Nil(err, "should have nil err for get activity")
|
||||||
t.Fatalf(`Expected: []GetActivityRow, Got: %v, Error: %v`, doesntExistsRows, err)
|
suite.Len(doesntExistsRows, 0, "should have no rows")
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(doesntExistsRows) != 0 {
|
// MISC - TODO:
|
||||||
t.Fatalf(`Expected: %v, Got: %v`, 0, len(doesntExistsRows))
|
// - (q *Queries) AddMetadata
|
||||||
}
|
// - (q *Queries) GetDailyReadStats
|
||||||
})
|
// - (q *Queries) GetDatabaseInfo
|
||||||
}
|
// - (q *Queries) UpdateSettings
|
||||||
|
func (suite *DatabaseTestSuite) TestGetDailyReadStats() {
|
||||||
|
readStats, err := suite.dbm.Queries.GetDailyReadStats(context.Background(), userID)
|
||||||
|
|
||||||
func (dt *databaseTest) TestDailyReadStats() {
|
suite.Nil(err, "should have nil err")
|
||||||
dt.Run("DailyReadStats", func(t *testing.T) {
|
suite.Len(readStats, 30, "should have length of 30")
|
||||||
readStats, err := dt.dbm.Queries.GetDailyReadStats(dt.dbm.Ctx, userID)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf(`Expected: []GetDailyReadStatsRow, Got: %v, Error: %v`, readStats, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate 30 Days Stats
|
|
||||||
if len(readStats) != 30 {
|
|
||||||
t.Fatalf(`Expected: %v, Got: %v`, 30, len(readStats))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate 1 Minute / Day - Last 10 Days
|
// Validate 1 Minute / Day - Last 10 Days
|
||||||
for i := 0; i < 10; i++ {
|
for i := 0; i < 10; i++ {
|
||||||
stat := readStats[i]
|
stat := readStats[i]
|
||||||
if stat.MinutesRead != 1 {
|
suite.Equal(int64(1), stat.MinutesRead, "should have one minute read")
|
||||||
t.Fatalf(`Day: %v, Expected: %v, Got: %v`, stat.Date, 1, stat.MinutesRead)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate 0 Minute / Day - Remaining 20 Days
|
// Validate 0 Minute / Day - Remaining 20 Days
|
||||||
for i := 10; i < 30; i++ {
|
for i := 10; i < 30; i++ {
|
||||||
stat := readStats[i]
|
stat := readStats[i]
|
||||||
if stat.MinutesRead != 0 {
|
suite.Equal(int64(0), stat.MinutesRead, "should have zero minutes read")
|
||||||
t.Fatalf(`Day: %v, Expected: %v, Got: %v`, stat.Date, 0, stat.MinutesRead)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|||||||
89
database/migrations/20240128012356_user_auth_hash.go
Normal file
89
database/migrations/20240128012356_user_auth_hash.go
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
package migrations
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/pressly/goose/v3"
|
||||||
|
"reichard.io/antholume/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
goose.AddMigrationContext(upUserAuthHash, downUserAuthHash)
|
||||||
|
}
|
||||||
|
|
||||||
|
func upUserAuthHash(ctx context.Context, tx *sql.Tx) error {
|
||||||
|
// Determine if we have a new DB or not
|
||||||
|
isNew := ctx.Value("isNew").(bool)
|
||||||
|
if isNew {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy table & create column
|
||||||
|
_, err := tx.Exec(`
|
||||||
|
-- Create Copy Table
|
||||||
|
CREATE TABLE temp_users AS SELECT * FROM users;
|
||||||
|
ALTER TABLE temp_users ADD COLUMN auth_hash TEXT;
|
||||||
|
|
||||||
|
-- Update Schema
|
||||||
|
DELETE FROM users;
|
||||||
|
ALTER TABLE users ADD COLUMN auth_hash TEXT NOT NULL;
|
||||||
|
`)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get current users
|
||||||
|
rows, err := tx.Query("SELECT id FROM temp_users")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query existing users
|
||||||
|
var users []string
|
||||||
|
for rows.Next() {
|
||||||
|
var user string
|
||||||
|
if err := rows.Scan(&user); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
users = append(users, user)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create auth hash per user
|
||||||
|
for _, user := range users {
|
||||||
|
rawAuthHash, err := utils.GenerateToken(64)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
authHash := fmt.Sprintf("%x", rawAuthHash)
|
||||||
|
_, err = tx.Exec("UPDATE temp_users SET auth_hash = ? WHERE id = ?", authHash, user)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy from temp to true table
|
||||||
|
_, err = tx.Exec(`
|
||||||
|
-- Copy Into New
|
||||||
|
INSERT INTO users SELECT * FROM temp_users;
|
||||||
|
|
||||||
|
-- Drop Temp Table
|
||||||
|
DROP TABLE temp_users;
|
||||||
|
`)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func downUserAuthHash(ctx context.Context, tx *sql.Tx) error {
|
||||||
|
// Drop column
|
||||||
|
_, err := tx.Exec("ALTER users DROP COLUMN auth_hash")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
58
database/migrations/20240311121111_user_timezone.go
Normal file
58
database/migrations/20240311121111_user_timezone.go
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
package migrations
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
|
||||||
|
"github.com/pressly/goose/v3"
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
goose.AddMigrationContext(upUserTimezone, downUserTimezone)
|
||||||
|
}
|
||||||
|
|
||||||
|
func upUserTimezone(ctx context.Context, tx *sql.Tx) error {
|
||||||
|
// Determine if we have a new DB or not
|
||||||
|
isNew := ctx.Value("isNew").(bool)
|
||||||
|
if isNew {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy table & create column
|
||||||
|
_, err := tx.Exec(`
|
||||||
|
-- Copy Table
|
||||||
|
CREATE TABLE temp_users AS SELECT * FROM users;
|
||||||
|
ALTER TABLE temp_users DROP COLUMN time_offset;
|
||||||
|
ALTER TABLE temp_users ADD COLUMN timezone TEXT;
|
||||||
|
UPDATE temp_users SET timezone = 'Europe/London';
|
||||||
|
|
||||||
|
-- Clean Table
|
||||||
|
DELETE FROM users;
|
||||||
|
ALTER TABLE users DROP COLUMN time_offset;
|
||||||
|
ALTER TABLE users ADD COLUMN timezone TEXT NOT NULL DEFAULT 'Europe/London';
|
||||||
|
|
||||||
|
-- Copy Temp Table -> Clean Table
|
||||||
|
INSERT INTO users SELECT * FROM temp_users;
|
||||||
|
|
||||||
|
-- Drop Temp Table
|
||||||
|
DROP TABLE temp_users;
|
||||||
|
`)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func downUserTimezone(ctx context.Context, tx *sql.Tx) error {
|
||||||
|
// Update column name & value
|
||||||
|
_, err := tx.Exec(`
|
||||||
|
ALTER TABLE users RENAME COLUMN timezone TO time_offset;
|
||||||
|
UPDATE users SET time_offset = '0 hours';
|
||||||
|
`)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
38
database/migrations/20240510123707_import_basepath.go
Normal file
38
database/migrations/20240510123707_import_basepath.go
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
package migrations
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
|
||||||
|
"github.com/pressly/goose/v3"
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
goose.AddMigrationContext(upImportBasepath, downImportBasepath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func upImportBasepath(ctx context.Context, tx *sql.Tx) error {
|
||||||
|
// Determine if we have a new DB or not
|
||||||
|
isNew := ctx.Value("isNew").(bool)
|
||||||
|
if isNew {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add basepath column
|
||||||
|
_, err := tx.Exec(`ALTER TABLE documents ADD COLUMN basepath TEXT;`)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// This code is executed when the migration is applied.
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func downImportBasepath(ctx context.Context, tx *sql.Tx) error {
|
||||||
|
// Drop basepath column
|
||||||
|
_, err := tx.Exec("ALTER documents DROP COLUMN basepath;")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
9
database/migrations/README.md
Normal file
9
database/migrations/README.md
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
# DB Migrations
|
||||||
|
|
||||||
|
```bash
|
||||||
|
goose create migration_name
|
||||||
|
```
|
||||||
|
|
||||||
|
## Note
|
||||||
|
|
||||||
|
Since we update both the `schema.sql`, as well as the migration files, when we create a new DB it will inherently be up-to-date. We don't want to run the migrations if it's already up-to-date. Instead each migration checks if we have a new DB (via a value passed into the context), and if we do we simply return.
|
||||||
@@ -1,13 +1,9 @@
|
|||||||
// Code generated by sqlc. DO NOT EDIT.
|
// Code generated by sqlc. DO NOT EDIT.
|
||||||
// versions:
|
// versions:
|
||||||
// sqlc v1.21.0
|
// sqlc v1.29.0
|
||||||
|
|
||||||
package database
|
package database
|
||||||
|
|
||||||
import (
|
|
||||||
"database/sql"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Activity struct {
|
type Activity struct {
|
||||||
ID int64 `json:"id"`
|
ID int64 `json:"id"`
|
||||||
UserID string `json:"user_id"`
|
UserID string `json:"user_id"`
|
||||||
@@ -32,6 +28,7 @@ type Device struct {
|
|||||||
type Document struct {
|
type Document struct {
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
Md5 *string `json:"md5"`
|
Md5 *string `json:"md5"`
|
||||||
|
Basepath *string `json:"basepath"`
|
||||||
Filepath *string `json:"filepath"`
|
Filepath *string `json:"filepath"`
|
||||||
Coverfile *string `json:"coverfile"`
|
Coverfile *string `json:"coverfile"`
|
||||||
Title *string `json:"title"`
|
Title *string `json:"title"`
|
||||||
@@ -63,15 +60,25 @@ type DocumentProgress struct {
|
|||||||
type DocumentUserStatistic struct {
|
type DocumentUserStatistic struct {
|
||||||
DocumentID string `json:"document_id"`
|
DocumentID string `json:"document_id"`
|
||||||
UserID string `json:"user_id"`
|
UserID string `json:"user_id"`
|
||||||
LastRead string `json:"last_read"`
|
|
||||||
TotalTimeSeconds int64 `json:"total_time_seconds"`
|
|
||||||
ReadPercentage float64 `json:"read_percentage"`
|
|
||||||
Percentage float64 `json:"percentage"`
|
Percentage float64 `json:"percentage"`
|
||||||
WordsRead int64 `json:"words_read"`
|
LastRead string `json:"last_read"`
|
||||||
Wpm float64 `json:"wpm"`
|
LastSeen string `json:"last_seen"`
|
||||||
|
ReadPercentage float64 `json:"read_percentage"`
|
||||||
|
TotalTimeSeconds int64 `json:"total_time_seconds"`
|
||||||
|
TotalWordsRead int64 `json:"total_words_read"`
|
||||||
|
TotalWpm float64 `json:"total_wpm"`
|
||||||
|
YearlyTimeSeconds int64 `json:"yearly_time_seconds"`
|
||||||
|
YearlyWordsRead int64 `json:"yearly_words_read"`
|
||||||
|
YearlyWpm float64 `json:"yearly_wpm"`
|
||||||
|
MonthlyTimeSeconds int64 `json:"monthly_time_seconds"`
|
||||||
|
MonthlyWordsRead int64 `json:"monthly_words_read"`
|
||||||
|
MonthlyWpm float64 `json:"monthly_wpm"`
|
||||||
|
WeeklyTimeSeconds int64 `json:"weekly_time_seconds"`
|
||||||
|
WeeklyWordsRead int64 `json:"weekly_words_read"`
|
||||||
|
WeeklyWpm float64 `json:"weekly_wpm"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type Metadatum struct {
|
type Metadata struct {
|
||||||
ID int64 `json:"id"`
|
ID int64 `json:"id"`
|
||||||
DocumentID string `json:"document_id"`
|
DocumentID string `json:"document_id"`
|
||||||
Title *string `json:"title"`
|
Title *string `json:"title"`
|
||||||
@@ -84,11 +91,19 @@ type Metadatum struct {
|
|||||||
CreatedAt string `json:"created_at"`
|
CreatedAt string `json:"created_at"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type Setting struct {
|
||||||
|
ID int64 `json:"id"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
Value string `json:"value"`
|
||||||
|
CreatedAt string `json:"created_at"`
|
||||||
|
}
|
||||||
|
|
||||||
type User struct {
|
type User struct {
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
Pass *string `json:"-"`
|
Pass *string `json:"-"`
|
||||||
|
AuthHash *string `json:"auth_hash"`
|
||||||
Admin bool `json:"-"`
|
Admin bool `json:"-"`
|
||||||
TimeOffset *string `json:"time_offset"`
|
Timezone *string `json:"timezone"`
|
||||||
CreatedAt string `json:"created_at"`
|
CreatedAt string `json:"created_at"`
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -101,26 +116,8 @@ type UserStreak struct {
|
|||||||
CurrentStreak int64 `json:"current_streak"`
|
CurrentStreak int64 `json:"current_streak"`
|
||||||
CurrentStreakStartDate string `json:"current_streak_start_date"`
|
CurrentStreakStartDate string `json:"current_streak_start_date"`
|
||||||
CurrentStreakEndDate string `json:"current_streak_end_date"`
|
CurrentStreakEndDate string `json:"current_streak_end_date"`
|
||||||
}
|
LastTimezone string `json:"last_timezone"`
|
||||||
|
LastSeen string `json:"last_seen"`
|
||||||
type ViewDocumentUserStatistic struct {
|
LastRecord string `json:"last_record"`
|
||||||
DocumentID string `json:"document_id"`
|
LastCalculated string `json:"last_calculated"`
|
||||||
UserID string `json:"user_id"`
|
|
||||||
LastRead interface{} `json:"last_read"`
|
|
||||||
TotalTimeSeconds sql.NullFloat64 `json:"total_time_seconds"`
|
|
||||||
ReadPercentage sql.NullFloat64 `json:"read_percentage"`
|
|
||||||
Percentage float64 `json:"percentage"`
|
|
||||||
WordsRead interface{} `json:"words_read"`
|
|
||||||
Wpm int64 `json:"wpm"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ViewUserStreak struct {
|
|
||||||
UserID string `json:"user_id"`
|
|
||||||
Window string `json:"window"`
|
|
||||||
MaxStreak interface{} `json:"max_streak"`
|
|
||||||
MaxStreakStartDate interface{} `json:"max_streak_start_date"`
|
|
||||||
MaxStreakEndDate interface{} `json:"max_streak_end_date"`
|
|
||||||
CurrentStreak interface{} `json:"current_streak"`
|
|
||||||
CurrentStreakStartDate interface{} `json:"current_streak_start_date"`
|
|
||||||
CurrentStreakEndDate interface{} `json:"current_streak_end_date"`
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -26,10 +26,13 @@ VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
|||||||
RETURNING *;
|
RETURNING *;
|
||||||
|
|
||||||
-- name: CreateUser :execrows
|
-- name: CreateUser :execrows
|
||||||
INSERT INTO users (id, pass)
|
INSERT INTO users (id, pass, auth_hash, admin)
|
||||||
VALUES (?, ?)
|
VALUES (?, ?, ?, ?)
|
||||||
ON CONFLICT DO NOTHING;
|
ON CONFLICT DO NOTHING;
|
||||||
|
|
||||||
|
-- name: DeleteUser :execrows
|
||||||
|
DELETE FROM users WHERE id = $id;
|
||||||
|
|
||||||
-- name: DeleteDocument :execrows
|
-- name: DeleteDocument :execrows
|
||||||
UPDATE documents
|
UPDATE documents
|
||||||
SET
|
SET
|
||||||
@@ -64,7 +67,7 @@ WITH filtered_activity AS (
|
|||||||
SELECT
|
SELECT
|
||||||
document_id,
|
document_id,
|
||||||
device_id,
|
device_id,
|
||||||
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', activity.start_time, users.time_offset) AS TEXT) AS start_time,
|
LOCAL_TIME(activity.start_time, users.timezone) AS start_time,
|
||||||
title,
|
title,
|
||||||
author,
|
author,
|
||||||
duration,
|
duration,
|
||||||
@@ -77,7 +80,7 @@ LEFT JOIN users ON users.id = activity.user_id;
|
|||||||
|
|
||||||
-- name: GetDailyReadStats :many
|
-- name: GetDailyReadStats :many
|
||||||
WITH RECURSIVE last_30_days AS (
|
WITH RECURSIVE last_30_days AS (
|
||||||
SELECT DATE('now', time_offset) AS date
|
SELECT LOCAL_DATE(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone) AS date
|
||||||
FROM users WHERE users.id = $user_id
|
FROM users WHERE users.id = $user_id
|
||||||
UNION ALL
|
UNION ALL
|
||||||
SELECT DATE(date, '-1 days')
|
SELECT DATE(date, '-1 days')
|
||||||
@@ -96,11 +99,10 @@ filtered_activity AS (
|
|||||||
activity_days AS (
|
activity_days AS (
|
||||||
SELECT
|
SELECT
|
||||||
SUM(duration) AS seconds_read,
|
SUM(duration) AS seconds_read,
|
||||||
DATE(start_time, time_offset) AS day
|
LOCAL_DATE(start_time, timezone) AS day
|
||||||
FROM filtered_activity AS activity
|
FROM filtered_activity AS activity
|
||||||
LEFT JOIN users ON users.id = activity.user_id
|
LEFT JOIN users ON users.id = activity.user_id
|
||||||
GROUP BY day
|
GROUP BY day
|
||||||
LIMIT 30
|
|
||||||
)
|
)
|
||||||
SELECT
|
SELECT
|
||||||
CAST(date AS TEXT),
|
CAST(date AS TEXT),
|
||||||
@@ -136,8 +138,8 @@ WHERE id = $device_id LIMIT 1;
|
|||||||
SELECT
|
SELECT
|
||||||
devices.id,
|
devices.id,
|
||||||
devices.device_name,
|
devices.device_name,
|
||||||
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', devices.created_at, users.time_offset) AS TEXT) AS created_at,
|
LOCAL_TIME(devices.created_at, users.timezone) AS created_at,
|
||||||
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', devices.last_synced, users.time_offset) AS TEXT) AS last_synced
|
LOCAL_TIME(devices.last_synced, users.timezone) AS last_synced
|
||||||
FROM devices
|
FROM devices
|
||||||
JOIN users ON users.id = devices.user_id
|
JOIN users ON users.id = devices.user_id
|
||||||
WHERE users.id = $user_id
|
WHERE users.id = $user_id
|
||||||
@@ -147,40 +149,18 @@ ORDER BY devices.last_synced DESC;
|
|||||||
SELECT * FROM documents
|
SELECT * FROM documents
|
||||||
WHERE id = $document_id LIMIT 1;
|
WHERE id = $document_id LIMIT 1;
|
||||||
|
|
||||||
-- name: GetDocumentWithStats :one
|
-- name: GetDocumentProgress :one
|
||||||
SELECT
|
SELECT
|
||||||
docs.id,
|
document_progress.*,
|
||||||
docs.title,
|
devices.device_name
|
||||||
docs.author,
|
FROM document_progress
|
||||||
docs.description,
|
JOIN devices ON document_progress.device_id = devices.id
|
||||||
docs.isbn10,
|
WHERE
|
||||||
docs.isbn13,
|
document_progress.user_id = $user_id
|
||||||
docs.filepath,
|
AND document_progress.document_id = $document_id
|
||||||
docs.words,
|
ORDER BY
|
||||||
|
document_progress.created_at
|
||||||
CAST(COALESCE(dus.wpm, 0.0) AS INTEGER) AS wpm,
|
DESC
|
||||||
COALESCE(dus.read_percentage, 0) AS read_percentage,
|
|
||||||
COALESCE(dus.total_time_seconds, 0) AS total_time_seconds,
|
|
||||||
STRFTIME('%Y-%m-%d %H:%M:%S', COALESCE(dus.last_read, "1970-01-01"), users.time_offset)
|
|
||||||
AS last_read,
|
|
||||||
ROUND(CAST(CASE
|
|
||||||
WHEN dus.percentage IS NULL THEN 0.0
|
|
||||||
WHEN (dus.percentage * 100.0) > 97.0 THEN 100.0
|
|
||||||
ELSE dus.percentage * 100.0
|
|
||||||
END AS REAL), 2) AS percentage,
|
|
||||||
CAST(CASE
|
|
||||||
WHEN dus.total_time_seconds IS NULL THEN 0.0
|
|
||||||
ELSE
|
|
||||||
CAST(dus.total_time_seconds AS REAL)
|
|
||||||
/ (dus.read_percentage * 100.0)
|
|
||||||
END AS INTEGER) AS seconds_per_percent
|
|
||||||
FROM documents AS docs
|
|
||||||
LEFT JOIN users ON users.id = $user_id
|
|
||||||
LEFT JOIN
|
|
||||||
document_user_statistics AS dus
|
|
||||||
ON dus.document_id = docs.id AND dus.user_id = $user_id
|
|
||||||
WHERE users.id = $user_id
|
|
||||||
AND docs.id = $document_id
|
|
||||||
LIMIT 1;
|
LIMIT 1;
|
||||||
|
|
||||||
-- name: GetDocuments :many
|
-- name: GetDocuments :many
|
||||||
@@ -210,36 +190,35 @@ SELECT
|
|||||||
docs.filepath,
|
docs.filepath,
|
||||||
docs.words,
|
docs.words,
|
||||||
|
|
||||||
CAST(COALESCE(dus.wpm, 0.0) AS INTEGER) AS wpm,
|
CAST(COALESCE(dus.total_wpm, 0.0) AS INTEGER) AS wpm,
|
||||||
COALESCE(dus.read_percentage, 0) AS read_percentage,
|
COALESCE(dus.read_percentage, 0) AS read_percentage,
|
||||||
COALESCE(dus.total_time_seconds, 0) AS total_time_seconds,
|
COALESCE(dus.total_time_seconds, 0) AS total_time_seconds,
|
||||||
STRFTIME('%Y-%m-%d %H:%M:%S', COALESCE(dus.last_read, "1970-01-01"), users.time_offset)
|
STRFTIME('%Y-%m-%d %H:%M:%S', LOCAL_TIME(COALESCE(dus.last_read, STRFTIME('%Y-%m-%dT%H:%M:%SZ', 0, 'unixepoch')), users.timezone))
|
||||||
AS last_read,
|
AS last_read,
|
||||||
ROUND(CAST(CASE
|
ROUND(CAST(CASE
|
||||||
WHEN dus.percentage IS NULL THEN 0.0
|
WHEN dus.percentage IS NULL THEN 0.0
|
||||||
WHEN (dus.percentage * 100.0) > 97.0 THEN 100.0
|
WHEN (dus.percentage * 100.0) > 97.0 THEN 100.0
|
||||||
ELSE dus.percentage * 100.0
|
ELSE dus.percentage * 100.0
|
||||||
END AS REAL), 2) AS percentage,
|
END AS REAL), 2) AS percentage,
|
||||||
|
CAST(CASE
|
||||||
CASE
|
|
||||||
WHEN dus.total_time_seconds IS NULL THEN 0.0
|
WHEN dus.total_time_seconds IS NULL THEN 0.0
|
||||||
ELSE
|
ELSE
|
||||||
ROUND(
|
|
||||||
CAST(dus.total_time_seconds AS REAL)
|
CAST(dus.total_time_seconds AS REAL)
|
||||||
/ (dus.read_percentage * 100.0)
|
/ (dus.read_percentage * 100.0)
|
||||||
)
|
END AS INTEGER) AS seconds_per_percent
|
||||||
END AS seconds_per_percent
|
|
||||||
FROM documents AS docs
|
FROM documents AS docs
|
||||||
LEFT JOIN users ON users.id = $user_id
|
LEFT JOIN users ON users.id = $user_id
|
||||||
LEFT JOIN
|
LEFT JOIN
|
||||||
document_user_statistics AS dus
|
document_user_statistics AS dus
|
||||||
ON dus.document_id = docs.id AND dus.user_id = $user_id
|
ON dus.document_id = docs.id AND dus.user_id = $user_id
|
||||||
WHERE
|
WHERE
|
||||||
docs.deleted = false AND (
|
(docs.id = sqlc.narg('id') OR $id IS NULL)
|
||||||
$query IS NULL OR (
|
AND (docs.deleted = sqlc.narg(deleted) OR $deleted IS NULL)
|
||||||
docs.title LIKE $query OR
|
AND (
|
||||||
|
(
|
||||||
|
docs.title LIKE sqlc.narg('query') OR
|
||||||
docs.author LIKE $query
|
docs.author LIKE $query
|
||||||
)
|
) OR $query IS NULL
|
||||||
)
|
)
|
||||||
ORDER BY dus.last_read DESC, docs.created_at DESC
|
ORDER BY dus.last_read DESC, docs.created_at DESC
|
||||||
LIMIT $limit
|
LIMIT $limit
|
||||||
@@ -259,19 +238,30 @@ WHERE
|
|||||||
AND documents.deleted = false
|
AND documents.deleted = false
|
||||||
AND documents.id NOT IN (sqlc.slice('document_ids'));
|
AND documents.id NOT IN (sqlc.slice('document_ids'));
|
||||||
|
|
||||||
-- name: GetProgress :one
|
-- name: GetProgress :many
|
||||||
SELECT
|
SELECT
|
||||||
document_progress.*,
|
documents.title,
|
||||||
devices.device_name
|
documents.author,
|
||||||
FROM document_progress
|
devices.device_name,
|
||||||
JOIN devices ON document_progress.device_id = devices.id
|
ROUND(CAST(progress.percentage AS REAL) * 100, 2) AS percentage,
|
||||||
|
progress.document_id,
|
||||||
|
progress.user_id,
|
||||||
|
LOCAL_TIME(progress.created_at, users.timezone) AS created_at
|
||||||
|
FROM document_progress AS progress
|
||||||
|
LEFT JOIN users ON progress.user_id = users.id
|
||||||
|
LEFT JOIN devices ON progress.device_id = devices.id
|
||||||
|
LEFT JOIN documents ON progress.document_id = documents.id
|
||||||
WHERE
|
WHERE
|
||||||
document_progress.user_id = $user_id
|
progress.user_id = $user_id
|
||||||
AND document_progress.document_id = $document_id
|
AND (
|
||||||
ORDER BY
|
(
|
||||||
document_progress.created_at
|
CAST($doc_filter AS BOOLEAN) = TRUE
|
||||||
DESC
|
AND document_id = $document_id
|
||||||
LIMIT 1;
|
) OR $doc_filter = FALSE
|
||||||
|
)
|
||||||
|
ORDER BY created_at DESC
|
||||||
|
LIMIT $limit
|
||||||
|
OFFSET $offset;
|
||||||
|
|
||||||
-- name: GetUser :one
|
-- name: GetUser :one
|
||||||
SELECT * FROM users
|
SELECT * FROM users
|
||||||
@@ -281,17 +271,37 @@ WHERE id = $user_id LIMIT 1;
|
|||||||
SELECT * FROM user_streaks
|
SELECT * FROM user_streaks
|
||||||
WHERE user_id = $user_id;
|
WHERE user_id = $user_id;
|
||||||
|
|
||||||
-- name: GetWPMLeaderboard :many
|
-- name: GetUsers :many
|
||||||
|
SELECT * FROM users;
|
||||||
|
|
||||||
|
-- name: GetUserStatistics :many
|
||||||
SELECT
|
SELECT
|
||||||
user_id,
|
user_id,
|
||||||
CAST(SUM(words_read) AS INTEGER) AS total_words_read,
|
|
||||||
|
CAST(SUM(total_words_read) AS INTEGER) AS total_words_read,
|
||||||
CAST(SUM(total_time_seconds) AS INTEGER) AS total_seconds,
|
CAST(SUM(total_time_seconds) AS INTEGER) AS total_seconds,
|
||||||
ROUND(CAST(SUM(words_read) AS REAL) / (SUM(total_time_seconds) / 60.0), 2)
|
ROUND(COALESCE(CAST(SUM(total_words_read) AS REAL) / (SUM(total_time_seconds) / 60.0), 0.0), 2)
|
||||||
AS wpm
|
AS total_wpm,
|
||||||
|
|
||||||
|
CAST(SUM(yearly_words_read) AS INTEGER) AS yearly_words_read,
|
||||||
|
CAST(SUM(yearly_time_seconds) AS INTEGER) AS yearly_seconds,
|
||||||
|
ROUND(COALESCE(CAST(SUM(yearly_words_read) AS REAL) / (SUM(yearly_time_seconds) / 60.0), 0.0), 2)
|
||||||
|
AS yearly_wpm,
|
||||||
|
|
||||||
|
CAST(SUM(monthly_words_read) AS INTEGER) AS monthly_words_read,
|
||||||
|
CAST(SUM(monthly_time_seconds) AS INTEGER) AS monthly_seconds,
|
||||||
|
ROUND(COALESCE(CAST(SUM(monthly_words_read) AS REAL) / (SUM(monthly_time_seconds) / 60.0), 0.0), 2)
|
||||||
|
AS monthly_wpm,
|
||||||
|
|
||||||
|
CAST(SUM(weekly_words_read) AS INTEGER) AS weekly_words_read,
|
||||||
|
CAST(SUM(weekly_time_seconds) AS INTEGER) AS weekly_seconds,
|
||||||
|
ROUND(COALESCE(CAST(SUM(weekly_words_read) AS REAL) / (SUM(weekly_time_seconds) / 60.0), 0.0), 2)
|
||||||
|
AS weekly_wpm
|
||||||
|
|
||||||
FROM document_user_statistics
|
FROM document_user_statistics
|
||||||
WHERE words_read > 0
|
WHERE total_words_read > 0
|
||||||
GROUP BY user_id
|
GROUP BY user_id
|
||||||
ORDER BY wpm DESC;
|
ORDER BY total_wpm DESC;
|
||||||
|
|
||||||
-- name: GetWantedDocuments :many
|
-- name: GetWantedDocuments :many
|
||||||
SELECT
|
SELECT
|
||||||
@@ -324,10 +334,21 @@ RETURNING *;
|
|||||||
UPDATE users
|
UPDATE users
|
||||||
SET
|
SET
|
||||||
pass = COALESCE($password, pass),
|
pass = COALESCE($password, pass),
|
||||||
time_offset = COALESCE($time_offset, time_offset)
|
auth_hash = COALESCE($auth_hash, auth_hash),
|
||||||
|
timezone = COALESCE($timezone, timezone),
|
||||||
|
admin = COALESCE($admin, admin)
|
||||||
WHERE id = $user_id
|
WHERE id = $user_id
|
||||||
RETURNING *;
|
RETURNING *;
|
||||||
|
|
||||||
|
-- name: UpdateSettings :one
|
||||||
|
INSERT INTO settings (name, value)
|
||||||
|
VALUES (?, ?)
|
||||||
|
ON CONFLICT DO UPDATE
|
||||||
|
SET
|
||||||
|
name = COALESCE(excluded.name, name),
|
||||||
|
value = COALESCE(excluded.value, value)
|
||||||
|
RETURNING *;
|
||||||
|
|
||||||
-- name: UpsertDevice :one
|
-- name: UpsertDevice :one
|
||||||
INSERT INTO devices (id, user_id, last_synced, device_name)
|
INSERT INTO devices (id, user_id, last_synced, device_name)
|
||||||
VALUES (?, ?, ?, ?)
|
VALUES (?, ?, ?, ?)
|
||||||
@@ -341,6 +362,7 @@ RETURNING *;
|
|||||||
INSERT INTO documents (
|
INSERT INTO documents (
|
||||||
id,
|
id,
|
||||||
md5,
|
md5,
|
||||||
|
basepath,
|
||||||
filepath,
|
filepath,
|
||||||
coverfile,
|
coverfile,
|
||||||
title,
|
title,
|
||||||
@@ -355,10 +377,11 @@ INSERT INTO documents (
|
|||||||
isbn10,
|
isbn10,
|
||||||
isbn13
|
isbn13
|
||||||
)
|
)
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
ON CONFLICT DO UPDATE
|
ON CONFLICT DO UPDATE
|
||||||
SET
|
SET
|
||||||
md5 = COALESCE(excluded.md5, md5),
|
md5 = COALESCE(excluded.md5, md5),
|
||||||
|
basepath = COALESCE(excluded.basepath, basepath),
|
||||||
filepath = COALESCE(excluded.filepath, filepath),
|
filepath = COALESCE(excluded.filepath, filepath),
|
||||||
coverfile = COALESCE(excluded.coverfile, coverfile),
|
coverfile = COALESCE(excluded.coverfile, coverfile),
|
||||||
title = COALESCE(excluded.title, title),
|
title = COALESCE(excluded.title, title),
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
// Code generated by sqlc. DO NOT EDIT.
|
// Code generated by sqlc. DO NOT EDIT.
|
||||||
// versions:
|
// versions:
|
||||||
// sqlc v1.21.0
|
// sqlc v1.29.0
|
||||||
// source: query.sql
|
// source: query.sql
|
||||||
|
|
||||||
package database
|
package database
|
||||||
@@ -85,7 +85,7 @@ type AddMetadataParams struct {
|
|||||||
Isbn13 *string `json:"isbn13"`
|
Isbn13 *string `json:"isbn13"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *Queries) AddMetadata(ctx context.Context, arg AddMetadataParams) (Metadatum, error) {
|
func (q *Queries) AddMetadata(ctx context.Context, arg AddMetadataParams) (Metadata, error) {
|
||||||
row := q.db.QueryRowContext(ctx, addMetadata,
|
row := q.db.QueryRowContext(ctx, addMetadata,
|
||||||
arg.DocumentID,
|
arg.DocumentID,
|
||||||
arg.Title,
|
arg.Title,
|
||||||
@@ -96,7 +96,7 @@ func (q *Queries) AddMetadata(ctx context.Context, arg AddMetadataParams) (Metad
|
|||||||
arg.Isbn10,
|
arg.Isbn10,
|
||||||
arg.Isbn13,
|
arg.Isbn13,
|
||||||
)
|
)
|
||||||
var i Metadatum
|
var i Metadata
|
||||||
err := row.Scan(
|
err := row.Scan(
|
||||||
&i.ID,
|
&i.ID,
|
||||||
&i.DocumentID,
|
&i.DocumentID,
|
||||||
@@ -113,18 +113,25 @@ func (q *Queries) AddMetadata(ctx context.Context, arg AddMetadataParams) (Metad
|
|||||||
}
|
}
|
||||||
|
|
||||||
const createUser = `-- name: CreateUser :execrows
|
const createUser = `-- name: CreateUser :execrows
|
||||||
INSERT INTO users (id, pass)
|
INSERT INTO users (id, pass, auth_hash, admin)
|
||||||
VALUES (?, ?)
|
VALUES (?, ?, ?, ?)
|
||||||
ON CONFLICT DO NOTHING
|
ON CONFLICT DO NOTHING
|
||||||
`
|
`
|
||||||
|
|
||||||
type CreateUserParams struct {
|
type CreateUserParams struct {
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
Pass *string `json:"-"`
|
Pass *string `json:"-"`
|
||||||
|
AuthHash *string `json:"auth_hash"`
|
||||||
|
Admin bool `json:"-"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *Queries) CreateUser(ctx context.Context, arg CreateUserParams) (int64, error) {
|
func (q *Queries) CreateUser(ctx context.Context, arg CreateUserParams) (int64, error) {
|
||||||
result, err := q.db.ExecContext(ctx, createUser, arg.ID, arg.Pass)
|
result, err := q.db.ExecContext(ctx, createUser,
|
||||||
|
arg.ID,
|
||||||
|
arg.Pass,
|
||||||
|
arg.AuthHash,
|
||||||
|
arg.Admin,
|
||||||
|
)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, err
|
return 0, err
|
||||||
}
|
}
|
||||||
@@ -146,6 +153,18 @@ func (q *Queries) DeleteDocument(ctx context.Context, id string) (int64, error)
|
|||||||
return result.RowsAffected()
|
return result.RowsAffected()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const deleteUser = `-- name: DeleteUser :execrows
|
||||||
|
DELETE FROM users WHERE id = ?1
|
||||||
|
`
|
||||||
|
|
||||||
|
func (q *Queries) DeleteUser(ctx context.Context, id string) (int64, error) {
|
||||||
|
result, err := q.db.ExecContext(ctx, deleteUser, id)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
return result.RowsAffected()
|
||||||
|
}
|
||||||
|
|
||||||
const getActivity = `-- name: GetActivity :many
|
const getActivity = `-- name: GetActivity :many
|
||||||
WITH filtered_activity AS (
|
WITH filtered_activity AS (
|
||||||
SELECT
|
SELECT
|
||||||
@@ -174,7 +193,7 @@ WITH filtered_activity AS (
|
|||||||
SELECT
|
SELECT
|
||||||
document_id,
|
document_id,
|
||||||
device_id,
|
device_id,
|
||||||
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', activity.start_time, users.time_offset) AS TEXT) AS start_time,
|
LOCAL_TIME(activity.start_time, users.timezone) AS start_time,
|
||||||
title,
|
title,
|
||||||
author,
|
author,
|
||||||
duration,
|
duration,
|
||||||
@@ -197,7 +216,7 @@ type GetActivityParams struct {
|
|||||||
type GetActivityRow struct {
|
type GetActivityRow struct {
|
||||||
DocumentID string `json:"document_id"`
|
DocumentID string `json:"document_id"`
|
||||||
DeviceID string `json:"device_id"`
|
DeviceID string `json:"device_id"`
|
||||||
StartTime string `json:"start_time"`
|
StartTime interface{} `json:"start_time"`
|
||||||
Title *string `json:"title"`
|
Title *string `json:"title"`
|
||||||
Author *string `json:"author"`
|
Author *string `json:"author"`
|
||||||
Duration int64 `json:"duration"`
|
Duration int64 `json:"duration"`
|
||||||
@@ -247,7 +266,7 @@ func (q *Queries) GetActivity(ctx context.Context, arg GetActivityParams) ([]Get
|
|||||||
|
|
||||||
const getDailyReadStats = `-- name: GetDailyReadStats :many
|
const getDailyReadStats = `-- name: GetDailyReadStats :many
|
||||||
WITH RECURSIVE last_30_days AS (
|
WITH RECURSIVE last_30_days AS (
|
||||||
SELECT DATE('now', time_offset) AS date
|
SELECT LOCAL_DATE(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone) AS date
|
||||||
FROM users WHERE users.id = ?1
|
FROM users WHERE users.id = ?1
|
||||||
UNION ALL
|
UNION ALL
|
||||||
SELECT DATE(date, '-1 days')
|
SELECT DATE(date, '-1 days')
|
||||||
@@ -266,11 +285,10 @@ filtered_activity AS (
|
|||||||
activity_days AS (
|
activity_days AS (
|
||||||
SELECT
|
SELECT
|
||||||
SUM(duration) AS seconds_read,
|
SUM(duration) AS seconds_read,
|
||||||
DATE(start_time, time_offset) AS day
|
LOCAL_DATE(start_time, timezone) AS day
|
||||||
FROM filtered_activity AS activity
|
FROM filtered_activity AS activity
|
||||||
LEFT JOIN users ON users.id = activity.user_id
|
LEFT JOIN users ON users.id = activity.user_id
|
||||||
GROUP BY day
|
GROUP BY day
|
||||||
LIMIT 30
|
|
||||||
)
|
)
|
||||||
SELECT
|
SELECT
|
||||||
CAST(date AS TEXT),
|
CAST(date AS TEXT),
|
||||||
@@ -404,8 +422,8 @@ const getDevices = `-- name: GetDevices :many
|
|||||||
SELECT
|
SELECT
|
||||||
devices.id,
|
devices.id,
|
||||||
devices.device_name,
|
devices.device_name,
|
||||||
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', devices.created_at, users.time_offset) AS TEXT) AS created_at,
|
LOCAL_TIME(devices.created_at, users.timezone) AS created_at,
|
||||||
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', devices.last_synced, users.time_offset) AS TEXT) AS last_synced
|
LOCAL_TIME(devices.last_synced, users.timezone) AS last_synced
|
||||||
FROM devices
|
FROM devices
|
||||||
JOIN users ON users.id = devices.user_id
|
JOIN users ON users.id = devices.user_id
|
||||||
WHERE users.id = ?1
|
WHERE users.id = ?1
|
||||||
@@ -415,8 +433,8 @@ ORDER BY devices.last_synced DESC
|
|||||||
type GetDevicesRow struct {
|
type GetDevicesRow struct {
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
DeviceName string `json:"device_name"`
|
DeviceName string `json:"device_name"`
|
||||||
CreatedAt string `json:"created_at"`
|
CreatedAt interface{} `json:"created_at"`
|
||||||
LastSynced string `json:"last_synced"`
|
LastSynced interface{} `json:"last_synced"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *Queries) GetDevices(ctx context.Context, userID string) ([]GetDevicesRow, error) {
|
func (q *Queries) GetDevices(ctx context.Context, userID string) ([]GetDevicesRow, error) {
|
||||||
@@ -448,7 +466,7 @@ func (q *Queries) GetDevices(ctx context.Context, userID string) ([]GetDevicesRo
|
|||||||
}
|
}
|
||||||
|
|
||||||
const getDocument = `-- name: GetDocument :one
|
const getDocument = `-- name: GetDocument :one
|
||||||
SELECT id, md5, filepath, coverfile, title, author, series, series_index, lang, description, words, gbid, olid, isbn10, isbn13, synced, deleted, updated_at, created_at FROM documents
|
SELECT id, md5, basepath, filepath, coverfile, title, author, series, series_index, lang, description, words, gbid, olid, isbn10, isbn13, synced, deleted, updated_at, created_at FROM documents
|
||||||
WHERE id = ?1 LIMIT 1
|
WHERE id = ?1 LIMIT 1
|
||||||
`
|
`
|
||||||
|
|
||||||
@@ -458,6 +476,7 @@ func (q *Queries) GetDocument(ctx context.Context, documentID string) (Document,
|
|||||||
err := row.Scan(
|
err := row.Scan(
|
||||||
&i.ID,
|
&i.ID,
|
||||||
&i.Md5,
|
&i.Md5,
|
||||||
|
&i.Basepath,
|
||||||
&i.Filepath,
|
&i.Filepath,
|
||||||
&i.Coverfile,
|
&i.Coverfile,
|
||||||
&i.Title,
|
&i.Title,
|
||||||
@@ -479,89 +498,53 @@ func (q *Queries) GetDocument(ctx context.Context, documentID string) (Document,
|
|||||||
return i, err
|
return i, err
|
||||||
}
|
}
|
||||||
|
|
||||||
const getDocumentWithStats = `-- name: GetDocumentWithStats :one
|
const getDocumentProgress = `-- name: GetDocumentProgress :one
|
||||||
SELECT
|
SELECT
|
||||||
docs.id,
|
document_progress.user_id, document_progress.document_id, document_progress.device_id, document_progress.percentage, document_progress.progress, document_progress.created_at,
|
||||||
docs.title,
|
devices.device_name
|
||||||
docs.author,
|
FROM document_progress
|
||||||
docs.description,
|
JOIN devices ON document_progress.device_id = devices.id
|
||||||
docs.isbn10,
|
WHERE
|
||||||
docs.isbn13,
|
document_progress.user_id = ?1
|
||||||
docs.filepath,
|
AND document_progress.document_id = ?2
|
||||||
docs.words,
|
ORDER BY
|
||||||
|
document_progress.created_at
|
||||||
CAST(COALESCE(dus.wpm, 0.0) AS INTEGER) AS wpm,
|
DESC
|
||||||
COALESCE(dus.read_percentage, 0) AS read_percentage,
|
|
||||||
COALESCE(dus.total_time_seconds, 0) AS total_time_seconds,
|
|
||||||
STRFTIME('%Y-%m-%d %H:%M:%S', COALESCE(dus.last_read, "1970-01-01"), users.time_offset)
|
|
||||||
AS last_read,
|
|
||||||
ROUND(CAST(CASE
|
|
||||||
WHEN dus.percentage IS NULL THEN 0.0
|
|
||||||
WHEN (dus.percentage * 100.0) > 97.0 THEN 100.0
|
|
||||||
ELSE dus.percentage * 100.0
|
|
||||||
END AS REAL), 2) AS percentage,
|
|
||||||
CAST(CASE
|
|
||||||
WHEN dus.total_time_seconds IS NULL THEN 0.0
|
|
||||||
ELSE
|
|
||||||
CAST(dus.total_time_seconds AS REAL)
|
|
||||||
/ (dus.read_percentage * 100.0)
|
|
||||||
END AS INTEGER) AS seconds_per_percent
|
|
||||||
FROM documents AS docs
|
|
||||||
LEFT JOIN users ON users.id = ?1
|
|
||||||
LEFT JOIN
|
|
||||||
document_user_statistics AS dus
|
|
||||||
ON dus.document_id = docs.id AND dus.user_id = ?1
|
|
||||||
WHERE users.id = ?1
|
|
||||||
AND docs.id = ?2
|
|
||||||
LIMIT 1
|
LIMIT 1
|
||||||
`
|
`
|
||||||
|
|
||||||
type GetDocumentWithStatsParams struct {
|
type GetDocumentProgressParams struct {
|
||||||
UserID string `json:"user_id"`
|
UserID string `json:"user_id"`
|
||||||
DocumentID string `json:"document_id"`
|
DocumentID string `json:"document_id"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type GetDocumentWithStatsRow struct {
|
type GetDocumentProgressRow struct {
|
||||||
ID string `json:"id"`
|
UserID string `json:"user_id"`
|
||||||
Title *string `json:"title"`
|
DocumentID string `json:"document_id"`
|
||||||
Author *string `json:"author"`
|
DeviceID string `json:"device_id"`
|
||||||
Description *string `json:"description"`
|
|
||||||
Isbn10 *string `json:"isbn10"`
|
|
||||||
Isbn13 *string `json:"isbn13"`
|
|
||||||
Filepath *string `json:"filepath"`
|
|
||||||
Words *int64 `json:"words"`
|
|
||||||
Wpm int64 `json:"wpm"`
|
|
||||||
ReadPercentage float64 `json:"read_percentage"`
|
|
||||||
TotalTimeSeconds int64 `json:"total_time_seconds"`
|
|
||||||
LastRead interface{} `json:"last_read"`
|
|
||||||
Percentage float64 `json:"percentage"`
|
Percentage float64 `json:"percentage"`
|
||||||
SecondsPerPercent int64 `json:"seconds_per_percent"`
|
Progress string `json:"progress"`
|
||||||
|
CreatedAt string `json:"created_at"`
|
||||||
|
DeviceName string `json:"device_name"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *Queries) GetDocumentWithStats(ctx context.Context, arg GetDocumentWithStatsParams) (GetDocumentWithStatsRow, error) {
|
func (q *Queries) GetDocumentProgress(ctx context.Context, arg GetDocumentProgressParams) (GetDocumentProgressRow, error) {
|
||||||
row := q.db.QueryRowContext(ctx, getDocumentWithStats, arg.UserID, arg.DocumentID)
|
row := q.db.QueryRowContext(ctx, getDocumentProgress, arg.UserID, arg.DocumentID)
|
||||||
var i GetDocumentWithStatsRow
|
var i GetDocumentProgressRow
|
||||||
err := row.Scan(
|
err := row.Scan(
|
||||||
&i.ID,
|
&i.UserID,
|
||||||
&i.Title,
|
&i.DocumentID,
|
||||||
&i.Author,
|
&i.DeviceID,
|
||||||
&i.Description,
|
|
||||||
&i.Isbn10,
|
|
||||||
&i.Isbn13,
|
|
||||||
&i.Filepath,
|
|
||||||
&i.Words,
|
|
||||||
&i.Wpm,
|
|
||||||
&i.ReadPercentage,
|
|
||||||
&i.TotalTimeSeconds,
|
|
||||||
&i.LastRead,
|
|
||||||
&i.Percentage,
|
&i.Percentage,
|
||||||
&i.SecondsPerPercent,
|
&i.Progress,
|
||||||
|
&i.CreatedAt,
|
||||||
|
&i.DeviceName,
|
||||||
)
|
)
|
||||||
return i, err
|
return i, err
|
||||||
}
|
}
|
||||||
|
|
||||||
const getDocuments = `-- name: GetDocuments :many
|
const getDocuments = `-- name: GetDocuments :many
|
||||||
SELECT id, md5, filepath, coverfile, title, author, series, series_index, lang, description, words, gbid, olid, isbn10, isbn13, synced, deleted, updated_at, created_at FROM documents
|
SELECT id, md5, basepath, filepath, coverfile, title, author, series, series_index, lang, description, words, gbid, olid, isbn10, isbn13, synced, deleted, updated_at, created_at FROM documents
|
||||||
ORDER BY created_at DESC
|
ORDER BY created_at DESC
|
||||||
LIMIT ?2
|
LIMIT ?2
|
||||||
OFFSET ?1
|
OFFSET ?1
|
||||||
@@ -584,6 +567,7 @@ func (q *Queries) GetDocuments(ctx context.Context, arg GetDocumentsParams) ([]D
|
|||||||
if err := rows.Scan(
|
if err := rows.Scan(
|
||||||
&i.ID,
|
&i.ID,
|
||||||
&i.Md5,
|
&i.Md5,
|
||||||
|
&i.Basepath,
|
||||||
&i.Filepath,
|
&i.Filepath,
|
||||||
&i.Coverfile,
|
&i.Coverfile,
|
||||||
&i.Title,
|
&i.Title,
|
||||||
@@ -644,45 +628,46 @@ SELECT
|
|||||||
docs.filepath,
|
docs.filepath,
|
||||||
docs.words,
|
docs.words,
|
||||||
|
|
||||||
CAST(COALESCE(dus.wpm, 0.0) AS INTEGER) AS wpm,
|
CAST(COALESCE(dus.total_wpm, 0.0) AS INTEGER) AS wpm,
|
||||||
COALESCE(dus.read_percentage, 0) AS read_percentage,
|
COALESCE(dus.read_percentage, 0) AS read_percentage,
|
||||||
COALESCE(dus.total_time_seconds, 0) AS total_time_seconds,
|
COALESCE(dus.total_time_seconds, 0) AS total_time_seconds,
|
||||||
STRFTIME('%Y-%m-%d %H:%M:%S', COALESCE(dus.last_read, "1970-01-01"), users.time_offset)
|
STRFTIME('%Y-%m-%d %H:%M:%S', LOCAL_TIME(COALESCE(dus.last_read, STRFTIME('%Y-%m-%dT%H:%M:%SZ', 0, 'unixepoch')), users.timezone))
|
||||||
AS last_read,
|
AS last_read,
|
||||||
ROUND(CAST(CASE
|
ROUND(CAST(CASE
|
||||||
WHEN dus.percentage IS NULL THEN 0.0
|
WHEN dus.percentage IS NULL THEN 0.0
|
||||||
WHEN (dus.percentage * 100.0) > 97.0 THEN 100.0
|
WHEN (dus.percentage * 100.0) > 97.0 THEN 100.0
|
||||||
ELSE dus.percentage * 100.0
|
ELSE dus.percentage * 100.0
|
||||||
END AS REAL), 2) AS percentage,
|
END AS REAL), 2) AS percentage,
|
||||||
|
CAST(CASE
|
||||||
CASE
|
|
||||||
WHEN dus.total_time_seconds IS NULL THEN 0.0
|
WHEN dus.total_time_seconds IS NULL THEN 0.0
|
||||||
ELSE
|
ELSE
|
||||||
ROUND(
|
|
||||||
CAST(dus.total_time_seconds AS REAL)
|
CAST(dus.total_time_seconds AS REAL)
|
||||||
/ (dus.read_percentage * 100.0)
|
/ (dus.read_percentage * 100.0)
|
||||||
)
|
END AS INTEGER) AS seconds_per_percent
|
||||||
END AS seconds_per_percent
|
|
||||||
FROM documents AS docs
|
FROM documents AS docs
|
||||||
LEFT JOIN users ON users.id = ?1
|
LEFT JOIN users ON users.id = ?1
|
||||||
LEFT JOIN
|
LEFT JOIN
|
||||||
document_user_statistics AS dus
|
document_user_statistics AS dus
|
||||||
ON dus.document_id = docs.id AND dus.user_id = ?1
|
ON dus.document_id = docs.id AND dus.user_id = ?1
|
||||||
WHERE
|
WHERE
|
||||||
docs.deleted = false AND (
|
(docs.id = ?2 OR ?2 IS NULL)
|
||||||
?2 IS NULL OR (
|
AND (docs.deleted = ?3 OR ?3 IS NULL)
|
||||||
docs.title LIKE ?2 OR
|
AND (
|
||||||
docs.author LIKE ?2
|
(
|
||||||
)
|
docs.title LIKE ?4 OR
|
||||||
|
docs.author LIKE ?4
|
||||||
|
) OR ?4 IS NULL
|
||||||
)
|
)
|
||||||
ORDER BY dus.last_read DESC, docs.created_at DESC
|
ORDER BY dus.last_read DESC, docs.created_at DESC
|
||||||
LIMIT ?4
|
LIMIT ?6
|
||||||
OFFSET ?3
|
OFFSET ?5
|
||||||
`
|
`
|
||||||
|
|
||||||
type GetDocumentsWithStatsParams struct {
|
type GetDocumentsWithStatsParams struct {
|
||||||
UserID string `json:"user_id"`
|
UserID string `json:"user_id"`
|
||||||
Query interface{} `json:"query"`
|
ID *string `json:"id"`
|
||||||
|
Deleted *bool `json:"-"`
|
||||||
|
Query *string `json:"query"`
|
||||||
Offset int64 `json:"offset"`
|
Offset int64 `json:"offset"`
|
||||||
Limit int64 `json:"limit"`
|
Limit int64 `json:"limit"`
|
||||||
}
|
}
|
||||||
@@ -701,12 +686,14 @@ type GetDocumentsWithStatsRow struct {
|
|||||||
TotalTimeSeconds int64 `json:"total_time_seconds"`
|
TotalTimeSeconds int64 `json:"total_time_seconds"`
|
||||||
LastRead interface{} `json:"last_read"`
|
LastRead interface{} `json:"last_read"`
|
||||||
Percentage float64 `json:"percentage"`
|
Percentage float64 `json:"percentage"`
|
||||||
SecondsPerPercent interface{} `json:"seconds_per_percent"`
|
SecondsPerPercent int64 `json:"seconds_per_percent"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *Queries) GetDocumentsWithStats(ctx context.Context, arg GetDocumentsWithStatsParams) ([]GetDocumentsWithStatsRow, error) {
|
func (q *Queries) GetDocumentsWithStats(ctx context.Context, arg GetDocumentsWithStatsParams) ([]GetDocumentsWithStatsRow, error) {
|
||||||
rows, err := q.db.QueryContext(ctx, getDocumentsWithStats,
|
rows, err := q.db.QueryContext(ctx, getDocumentsWithStats,
|
||||||
arg.UserID,
|
arg.UserID,
|
||||||
|
arg.ID,
|
||||||
|
arg.Deleted,
|
||||||
arg.Query,
|
arg.Query,
|
||||||
arg.Offset,
|
arg.Offset,
|
||||||
arg.Limit,
|
arg.Limit,
|
||||||
@@ -768,7 +755,7 @@ func (q *Queries) GetLastActivity(ctx context.Context, arg GetLastActivityParams
|
|||||||
}
|
}
|
||||||
|
|
||||||
const getMissingDocuments = `-- name: GetMissingDocuments :many
|
const getMissingDocuments = `-- name: GetMissingDocuments :many
|
||||||
SELECT documents.id, documents.md5, documents.filepath, documents.coverfile, documents.title, documents.author, documents.series, documents.series_index, documents.lang, documents.description, documents.words, documents.gbid, documents.olid, documents.isbn10, documents.isbn13, documents.synced, documents.deleted, documents.updated_at, documents.created_at FROM documents
|
SELECT documents.id, documents.md5, documents.basepath, documents.filepath, documents.coverfile, documents.title, documents.author, documents.series, documents.series_index, documents.lang, documents.description, documents.words, documents.gbid, documents.olid, documents.isbn10, documents.isbn13, documents.synced, documents.deleted, documents.updated_at, documents.created_at FROM documents
|
||||||
WHERE
|
WHERE
|
||||||
documents.filepath IS NOT NULL
|
documents.filepath IS NOT NULL
|
||||||
AND documents.deleted = false
|
AND documents.deleted = false
|
||||||
@@ -797,6 +784,7 @@ func (q *Queries) GetMissingDocuments(ctx context.Context, documentIds []string)
|
|||||||
if err := rows.Scan(
|
if err := rows.Scan(
|
||||||
&i.ID,
|
&i.ID,
|
||||||
&i.Md5,
|
&i.Md5,
|
||||||
|
&i.Basepath,
|
||||||
&i.Filepath,
|
&i.Filepath,
|
||||||
&i.Coverfile,
|
&i.Coverfile,
|
||||||
&i.Title,
|
&i.Title,
|
||||||
@@ -828,53 +816,89 @@ func (q *Queries) GetMissingDocuments(ctx context.Context, documentIds []string)
|
|||||||
return items, nil
|
return items, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
const getProgress = `-- name: GetProgress :one
|
const getProgress = `-- name: GetProgress :many
|
||||||
SELECT
|
SELECT
|
||||||
document_progress.user_id, document_progress.document_id, document_progress.device_id, document_progress.percentage, document_progress.progress, document_progress.created_at,
|
documents.title,
|
||||||
devices.device_name
|
documents.author,
|
||||||
FROM document_progress
|
devices.device_name,
|
||||||
JOIN devices ON document_progress.device_id = devices.id
|
ROUND(CAST(progress.percentage AS REAL) * 100, 2) AS percentage,
|
||||||
|
progress.document_id,
|
||||||
|
progress.user_id,
|
||||||
|
LOCAL_TIME(progress.created_at, users.timezone) AS created_at
|
||||||
|
FROM document_progress AS progress
|
||||||
|
LEFT JOIN users ON progress.user_id = users.id
|
||||||
|
LEFT JOIN devices ON progress.device_id = devices.id
|
||||||
|
LEFT JOIN documents ON progress.document_id = documents.id
|
||||||
WHERE
|
WHERE
|
||||||
document_progress.user_id = ?1
|
progress.user_id = ?1
|
||||||
AND document_progress.document_id = ?2
|
AND (
|
||||||
ORDER BY
|
(
|
||||||
document_progress.created_at
|
CAST(?2 AS BOOLEAN) = TRUE
|
||||||
DESC
|
AND document_id = ?3
|
||||||
LIMIT 1
|
) OR ?2 = FALSE
|
||||||
|
)
|
||||||
|
ORDER BY created_at DESC
|
||||||
|
LIMIT ?5
|
||||||
|
OFFSET ?4
|
||||||
`
|
`
|
||||||
|
|
||||||
type GetProgressParams struct {
|
type GetProgressParams struct {
|
||||||
UserID string `json:"user_id"`
|
UserID string `json:"user_id"`
|
||||||
|
DocFilter bool `json:"doc_filter"`
|
||||||
DocumentID string `json:"document_id"`
|
DocumentID string `json:"document_id"`
|
||||||
|
Offset int64 `json:"offset"`
|
||||||
|
Limit int64 `json:"limit"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type GetProgressRow struct {
|
type GetProgressRow struct {
|
||||||
UserID string `json:"user_id"`
|
Title *string `json:"title"`
|
||||||
DocumentID string `json:"document_id"`
|
Author *string `json:"author"`
|
||||||
DeviceID string `json:"device_id"`
|
|
||||||
Percentage float64 `json:"percentage"`
|
|
||||||
Progress string `json:"progress"`
|
|
||||||
CreatedAt string `json:"created_at"`
|
|
||||||
DeviceName string `json:"device_name"`
|
DeviceName string `json:"device_name"`
|
||||||
|
Percentage float64 `json:"percentage"`
|
||||||
|
DocumentID string `json:"document_id"`
|
||||||
|
UserID string `json:"user_id"`
|
||||||
|
CreatedAt interface{} `json:"created_at"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *Queries) GetProgress(ctx context.Context, arg GetProgressParams) (GetProgressRow, error) {
|
func (q *Queries) GetProgress(ctx context.Context, arg GetProgressParams) ([]GetProgressRow, error) {
|
||||||
row := q.db.QueryRowContext(ctx, getProgress, arg.UserID, arg.DocumentID)
|
rows, err := q.db.QueryContext(ctx, getProgress,
|
||||||
var i GetProgressRow
|
arg.UserID,
|
||||||
err := row.Scan(
|
arg.DocFilter,
|
||||||
&i.UserID,
|
arg.DocumentID,
|
||||||
&i.DocumentID,
|
arg.Offset,
|
||||||
&i.DeviceID,
|
arg.Limit,
|
||||||
&i.Percentage,
|
|
||||||
&i.Progress,
|
|
||||||
&i.CreatedAt,
|
|
||||||
&i.DeviceName,
|
|
||||||
)
|
)
|
||||||
return i, err
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
var items []GetProgressRow
|
||||||
|
for rows.Next() {
|
||||||
|
var i GetProgressRow
|
||||||
|
if err := rows.Scan(
|
||||||
|
&i.Title,
|
||||||
|
&i.Author,
|
||||||
|
&i.DeviceName,
|
||||||
|
&i.Percentage,
|
||||||
|
&i.DocumentID,
|
||||||
|
&i.UserID,
|
||||||
|
&i.CreatedAt,
|
||||||
|
); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
items = append(items, i)
|
||||||
|
}
|
||||||
|
if err := rows.Close(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return items, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
const getUser = `-- name: GetUser :one
|
const getUser = `-- name: GetUser :one
|
||||||
SELECT id, pass, admin, time_offset, created_at FROM users
|
SELECT id, pass, auth_hash, admin, timezone, created_at FROM users
|
||||||
WHERE id = ?1 LIMIT 1
|
WHERE id = ?1 LIMIT 1
|
||||||
`
|
`
|
||||||
|
|
||||||
@@ -884,15 +908,99 @@ func (q *Queries) GetUser(ctx context.Context, userID string) (User, error) {
|
|||||||
err := row.Scan(
|
err := row.Scan(
|
||||||
&i.ID,
|
&i.ID,
|
||||||
&i.Pass,
|
&i.Pass,
|
||||||
|
&i.AuthHash,
|
||||||
&i.Admin,
|
&i.Admin,
|
||||||
&i.TimeOffset,
|
&i.Timezone,
|
||||||
&i.CreatedAt,
|
&i.CreatedAt,
|
||||||
)
|
)
|
||||||
return i, err
|
return i, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const getUserStatistics = `-- name: GetUserStatistics :many
|
||||||
|
SELECT
|
||||||
|
user_id,
|
||||||
|
|
||||||
|
CAST(SUM(total_words_read) AS INTEGER) AS total_words_read,
|
||||||
|
CAST(SUM(total_time_seconds) AS INTEGER) AS total_seconds,
|
||||||
|
ROUND(COALESCE(CAST(SUM(total_words_read) AS REAL) / (SUM(total_time_seconds) / 60.0), 0.0), 2)
|
||||||
|
AS total_wpm,
|
||||||
|
|
||||||
|
CAST(SUM(yearly_words_read) AS INTEGER) AS yearly_words_read,
|
||||||
|
CAST(SUM(yearly_time_seconds) AS INTEGER) AS yearly_seconds,
|
||||||
|
ROUND(COALESCE(CAST(SUM(yearly_words_read) AS REAL) / (SUM(yearly_time_seconds) / 60.0), 0.0), 2)
|
||||||
|
AS yearly_wpm,
|
||||||
|
|
||||||
|
CAST(SUM(monthly_words_read) AS INTEGER) AS monthly_words_read,
|
||||||
|
CAST(SUM(monthly_time_seconds) AS INTEGER) AS monthly_seconds,
|
||||||
|
ROUND(COALESCE(CAST(SUM(monthly_words_read) AS REAL) / (SUM(monthly_time_seconds) / 60.0), 0.0), 2)
|
||||||
|
AS monthly_wpm,
|
||||||
|
|
||||||
|
CAST(SUM(weekly_words_read) AS INTEGER) AS weekly_words_read,
|
||||||
|
CAST(SUM(weekly_time_seconds) AS INTEGER) AS weekly_seconds,
|
||||||
|
ROUND(COALESCE(CAST(SUM(weekly_words_read) AS REAL) / (SUM(weekly_time_seconds) / 60.0), 0.0), 2)
|
||||||
|
AS weekly_wpm
|
||||||
|
|
||||||
|
FROM document_user_statistics
|
||||||
|
WHERE total_words_read > 0
|
||||||
|
GROUP BY user_id
|
||||||
|
ORDER BY total_wpm DESC
|
||||||
|
`
|
||||||
|
|
||||||
|
type GetUserStatisticsRow struct {
|
||||||
|
UserID string `json:"user_id"`
|
||||||
|
TotalWordsRead int64 `json:"total_words_read"`
|
||||||
|
TotalSeconds int64 `json:"total_seconds"`
|
||||||
|
TotalWpm float64 `json:"total_wpm"`
|
||||||
|
YearlyWordsRead int64 `json:"yearly_words_read"`
|
||||||
|
YearlySeconds int64 `json:"yearly_seconds"`
|
||||||
|
YearlyWpm float64 `json:"yearly_wpm"`
|
||||||
|
MonthlyWordsRead int64 `json:"monthly_words_read"`
|
||||||
|
MonthlySeconds int64 `json:"monthly_seconds"`
|
||||||
|
MonthlyWpm float64 `json:"monthly_wpm"`
|
||||||
|
WeeklyWordsRead int64 `json:"weekly_words_read"`
|
||||||
|
WeeklySeconds int64 `json:"weekly_seconds"`
|
||||||
|
WeeklyWpm float64 `json:"weekly_wpm"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) GetUserStatistics(ctx context.Context) ([]GetUserStatisticsRow, error) {
|
||||||
|
rows, err := q.db.QueryContext(ctx, getUserStatistics)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
var items []GetUserStatisticsRow
|
||||||
|
for rows.Next() {
|
||||||
|
var i GetUserStatisticsRow
|
||||||
|
if err := rows.Scan(
|
||||||
|
&i.UserID,
|
||||||
|
&i.TotalWordsRead,
|
||||||
|
&i.TotalSeconds,
|
||||||
|
&i.TotalWpm,
|
||||||
|
&i.YearlyWordsRead,
|
||||||
|
&i.YearlySeconds,
|
||||||
|
&i.YearlyWpm,
|
||||||
|
&i.MonthlyWordsRead,
|
||||||
|
&i.MonthlySeconds,
|
||||||
|
&i.MonthlyWpm,
|
||||||
|
&i.WeeklyWordsRead,
|
||||||
|
&i.WeeklySeconds,
|
||||||
|
&i.WeeklyWpm,
|
||||||
|
); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
items = append(items, i)
|
||||||
|
}
|
||||||
|
if err := rows.Close(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return items, nil
|
||||||
|
}
|
||||||
|
|
||||||
const getUserStreaks = `-- name: GetUserStreaks :many
|
const getUserStreaks = `-- name: GetUserStreaks :many
|
||||||
SELECT user_id, "window", max_streak, max_streak_start_date, max_streak_end_date, current_streak, current_streak_start_date, current_streak_end_date FROM user_streaks
|
SELECT user_id, "window", max_streak, max_streak_start_date, max_streak_end_date, current_streak, current_streak_start_date, current_streak_end_date, last_timezone, last_seen, last_record, last_calculated FROM user_streaks
|
||||||
WHERE user_id = ?1
|
WHERE user_id = ?1
|
||||||
`
|
`
|
||||||
|
|
||||||
@@ -914,6 +1022,10 @@ func (q *Queries) GetUserStreaks(ctx context.Context, userID string) ([]UserStre
|
|||||||
&i.CurrentStreak,
|
&i.CurrentStreak,
|
||||||
&i.CurrentStreakStartDate,
|
&i.CurrentStreakStartDate,
|
||||||
&i.CurrentStreakEndDate,
|
&i.CurrentStreakEndDate,
|
||||||
|
&i.LastTimezone,
|
||||||
|
&i.LastSeen,
|
||||||
|
&i.LastRecord,
|
||||||
|
&i.LastCalculated,
|
||||||
); err != nil {
|
); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -928,40 +1040,26 @@ func (q *Queries) GetUserStreaks(ctx context.Context, userID string) ([]UserStre
|
|||||||
return items, nil
|
return items, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
const getWPMLeaderboard = `-- name: GetWPMLeaderboard :many
|
const getUsers = `-- name: GetUsers :many
|
||||||
SELECT
|
SELECT id, pass, auth_hash, admin, timezone, created_at FROM users
|
||||||
user_id,
|
|
||||||
CAST(SUM(words_read) AS INTEGER) AS total_words_read,
|
|
||||||
CAST(SUM(total_time_seconds) AS INTEGER) AS total_seconds,
|
|
||||||
ROUND(CAST(SUM(words_read) AS REAL) / (SUM(total_time_seconds) / 60.0), 2)
|
|
||||||
AS wpm
|
|
||||||
FROM document_user_statistics
|
|
||||||
WHERE words_read > 0
|
|
||||||
GROUP BY user_id
|
|
||||||
ORDER BY wpm DESC
|
|
||||||
`
|
`
|
||||||
|
|
||||||
type GetWPMLeaderboardRow struct {
|
func (q *Queries) GetUsers(ctx context.Context) ([]User, error) {
|
||||||
UserID string `json:"user_id"`
|
rows, err := q.db.QueryContext(ctx, getUsers)
|
||||||
TotalWordsRead int64 `json:"total_words_read"`
|
|
||||||
TotalSeconds int64 `json:"total_seconds"`
|
|
||||||
Wpm float64 `json:"wpm"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (q *Queries) GetWPMLeaderboard(ctx context.Context) ([]GetWPMLeaderboardRow, error) {
|
|
||||||
rows, err := q.db.QueryContext(ctx, getWPMLeaderboard)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
defer rows.Close()
|
defer rows.Close()
|
||||||
var items []GetWPMLeaderboardRow
|
var items []User
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
var i GetWPMLeaderboardRow
|
var i User
|
||||||
if err := rows.Scan(
|
if err := rows.Scan(
|
||||||
&i.UserID,
|
&i.ID,
|
||||||
&i.TotalWordsRead,
|
&i.Pass,
|
||||||
&i.TotalSeconds,
|
&i.AuthHash,
|
||||||
&i.Wpm,
|
&i.Admin,
|
||||||
|
&i.Timezone,
|
||||||
|
&i.CreatedAt,
|
||||||
); err != nil {
|
); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -1062,29 +1160,67 @@ func (q *Queries) UpdateProgress(ctx context.Context, arg UpdateProgressParams)
|
|||||||
return i, err
|
return i, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const updateSettings = `-- name: UpdateSettings :one
|
||||||
|
INSERT INTO settings (name, value)
|
||||||
|
VALUES (?, ?)
|
||||||
|
ON CONFLICT DO UPDATE
|
||||||
|
SET
|
||||||
|
name = COALESCE(excluded.name, name),
|
||||||
|
value = COALESCE(excluded.value, value)
|
||||||
|
RETURNING id, name, value, created_at
|
||||||
|
`
|
||||||
|
|
||||||
|
type UpdateSettingsParams struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
Value string `json:"value"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) UpdateSettings(ctx context.Context, arg UpdateSettingsParams) (Setting, error) {
|
||||||
|
row := q.db.QueryRowContext(ctx, updateSettings, arg.Name, arg.Value)
|
||||||
|
var i Setting
|
||||||
|
err := row.Scan(
|
||||||
|
&i.ID,
|
||||||
|
&i.Name,
|
||||||
|
&i.Value,
|
||||||
|
&i.CreatedAt,
|
||||||
|
)
|
||||||
|
return i, err
|
||||||
|
}
|
||||||
|
|
||||||
const updateUser = `-- name: UpdateUser :one
|
const updateUser = `-- name: UpdateUser :one
|
||||||
UPDATE users
|
UPDATE users
|
||||||
SET
|
SET
|
||||||
pass = COALESCE(?1, pass),
|
pass = COALESCE(?1, pass),
|
||||||
time_offset = COALESCE(?2, time_offset)
|
auth_hash = COALESCE(?2, auth_hash),
|
||||||
WHERE id = ?3
|
timezone = COALESCE(?3, timezone),
|
||||||
RETURNING id, pass, admin, time_offset, created_at
|
admin = COALESCE(?4, admin)
|
||||||
|
WHERE id = ?5
|
||||||
|
RETURNING id, pass, auth_hash, admin, timezone, created_at
|
||||||
`
|
`
|
||||||
|
|
||||||
type UpdateUserParams struct {
|
type UpdateUserParams struct {
|
||||||
Password *string `json:"-"`
|
Password *string `json:"-"`
|
||||||
TimeOffset *string `json:"time_offset"`
|
AuthHash *string `json:"auth_hash"`
|
||||||
|
Timezone *string `json:"timezone"`
|
||||||
|
Admin bool `json:"-"`
|
||||||
UserID string `json:"user_id"`
|
UserID string `json:"user_id"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *Queries) UpdateUser(ctx context.Context, arg UpdateUserParams) (User, error) {
|
func (q *Queries) UpdateUser(ctx context.Context, arg UpdateUserParams) (User, error) {
|
||||||
row := q.db.QueryRowContext(ctx, updateUser, arg.Password, arg.TimeOffset, arg.UserID)
|
row := q.db.QueryRowContext(ctx, updateUser,
|
||||||
|
arg.Password,
|
||||||
|
arg.AuthHash,
|
||||||
|
arg.Timezone,
|
||||||
|
arg.Admin,
|
||||||
|
arg.UserID,
|
||||||
|
)
|
||||||
var i User
|
var i User
|
||||||
err := row.Scan(
|
err := row.Scan(
|
||||||
&i.ID,
|
&i.ID,
|
||||||
&i.Pass,
|
&i.Pass,
|
||||||
|
&i.AuthHash,
|
||||||
&i.Admin,
|
&i.Admin,
|
||||||
&i.TimeOffset,
|
&i.Timezone,
|
||||||
&i.CreatedAt,
|
&i.CreatedAt,
|
||||||
)
|
)
|
||||||
return i, err
|
return i, err
|
||||||
@@ -1130,6 +1266,7 @@ const upsertDocument = `-- name: UpsertDocument :one
|
|||||||
INSERT INTO documents (
|
INSERT INTO documents (
|
||||||
id,
|
id,
|
||||||
md5,
|
md5,
|
||||||
|
basepath,
|
||||||
filepath,
|
filepath,
|
||||||
coverfile,
|
coverfile,
|
||||||
title,
|
title,
|
||||||
@@ -1144,10 +1281,11 @@ INSERT INTO documents (
|
|||||||
isbn10,
|
isbn10,
|
||||||
isbn13
|
isbn13
|
||||||
)
|
)
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
ON CONFLICT DO UPDATE
|
ON CONFLICT DO UPDATE
|
||||||
SET
|
SET
|
||||||
md5 = COALESCE(excluded.md5, md5),
|
md5 = COALESCE(excluded.md5, md5),
|
||||||
|
basepath = COALESCE(excluded.basepath, basepath),
|
||||||
filepath = COALESCE(excluded.filepath, filepath),
|
filepath = COALESCE(excluded.filepath, filepath),
|
||||||
coverfile = COALESCE(excluded.coverfile, coverfile),
|
coverfile = COALESCE(excluded.coverfile, coverfile),
|
||||||
title = COALESCE(excluded.title, title),
|
title = COALESCE(excluded.title, title),
|
||||||
@@ -1161,12 +1299,13 @@ SET
|
|||||||
gbid = COALESCE(excluded.gbid, gbid),
|
gbid = COALESCE(excluded.gbid, gbid),
|
||||||
isbn10 = COALESCE(excluded.isbn10, isbn10),
|
isbn10 = COALESCE(excluded.isbn10, isbn10),
|
||||||
isbn13 = COALESCE(excluded.isbn13, isbn13)
|
isbn13 = COALESCE(excluded.isbn13, isbn13)
|
||||||
RETURNING id, md5, filepath, coverfile, title, author, series, series_index, lang, description, words, gbid, olid, isbn10, isbn13, synced, deleted, updated_at, created_at
|
RETURNING id, md5, basepath, filepath, coverfile, title, author, series, series_index, lang, description, words, gbid, olid, isbn10, isbn13, synced, deleted, updated_at, created_at
|
||||||
`
|
`
|
||||||
|
|
||||||
type UpsertDocumentParams struct {
|
type UpsertDocumentParams struct {
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
Md5 *string `json:"md5"`
|
Md5 *string `json:"md5"`
|
||||||
|
Basepath *string `json:"basepath"`
|
||||||
Filepath *string `json:"filepath"`
|
Filepath *string `json:"filepath"`
|
||||||
Coverfile *string `json:"coverfile"`
|
Coverfile *string `json:"coverfile"`
|
||||||
Title *string `json:"title"`
|
Title *string `json:"title"`
|
||||||
@@ -1186,6 +1325,7 @@ func (q *Queries) UpsertDocument(ctx context.Context, arg UpsertDocumentParams)
|
|||||||
row := q.db.QueryRowContext(ctx, upsertDocument,
|
row := q.db.QueryRowContext(ctx, upsertDocument,
|
||||||
arg.ID,
|
arg.ID,
|
||||||
arg.Md5,
|
arg.Md5,
|
||||||
|
arg.Basepath,
|
||||||
arg.Filepath,
|
arg.Filepath,
|
||||||
arg.Coverfile,
|
arg.Coverfile,
|
||||||
arg.Title,
|
arg.Title,
|
||||||
@@ -1204,6 +1344,7 @@ func (q *Queries) UpsertDocument(ctx context.Context, arg UpsertDocumentParams)
|
|||||||
err := row.Scan(
|
err := row.Scan(
|
||||||
&i.ID,
|
&i.ID,
|
||||||
&i.Md5,
|
&i.Md5,
|
||||||
|
&i.Basepath,
|
||||||
&i.Filepath,
|
&i.Filepath,
|
||||||
&i.Coverfile,
|
&i.Coverfile,
|
||||||
&i.Title,
|
&i.Title,
|
||||||
|
|||||||
@@ -1,6 +1,3 @@
|
|||||||
PRAGMA foreign_keys = ON;
|
|
||||||
PRAGMA journal_mode = WAL;
|
|
||||||
|
|
||||||
---------------------------------------------------------------
|
---------------------------------------------------------------
|
||||||
------------------------ Normal Tables ------------------------
|
------------------------ Normal Tables ------------------------
|
||||||
---------------------------------------------------------------
|
---------------------------------------------------------------
|
||||||
@@ -10,8 +7,9 @@ CREATE TABLE IF NOT EXISTS users (
|
|||||||
id TEXT NOT NULL PRIMARY KEY,
|
id TEXT NOT NULL PRIMARY KEY,
|
||||||
|
|
||||||
pass TEXT NOT NULL,
|
pass TEXT NOT NULL,
|
||||||
|
auth_hash TEXT NOT NULL,
|
||||||
admin BOOLEAN NOT NULL DEFAULT 0 CHECK (admin IN (0, 1)),
|
admin BOOLEAN NOT NULL DEFAULT 0 CHECK (admin IN (0, 1)),
|
||||||
time_offset TEXT NOT NULL DEFAULT '0 hours',
|
timezone TEXT NOT NULL DEFAULT 'Europe/London',
|
||||||
|
|
||||||
created_at DATETIME NOT NULL DEFAULT (STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'))
|
created_at DATETIME NOT NULL DEFAULT (STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'))
|
||||||
);
|
);
|
||||||
@@ -21,6 +19,7 @@ CREATE TABLE IF NOT EXISTS documents (
|
|||||||
id TEXT NOT NULL PRIMARY KEY,
|
id TEXT NOT NULL PRIMARY KEY,
|
||||||
|
|
||||||
md5 TEXT,
|
md5 TEXT,
|
||||||
|
basepath TEXT,
|
||||||
filepath TEXT,
|
filepath TEXT,
|
||||||
coverfile TEXT,
|
coverfile TEXT,
|
||||||
title TEXT,
|
title TEXT,
|
||||||
@@ -46,7 +45,6 @@ CREATE TABLE IF NOT EXISTS documents (
|
|||||||
-- Metadata
|
-- Metadata
|
||||||
CREATE TABLE IF NOT EXISTS metadata (
|
CREATE TABLE IF NOT EXISTS metadata (
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
|
||||||
document_id TEXT NOT NULL,
|
document_id TEXT NOT NULL,
|
||||||
|
|
||||||
title TEXT,
|
title TEXT,
|
||||||
@@ -110,12 +108,46 @@ CREATE TABLE IF NOT EXISTS activity (
|
|||||||
FOREIGN KEY (device_id) REFERENCES devices (id)
|
FOREIGN KEY (device_id) REFERENCES devices (id)
|
||||||
);
|
);
|
||||||
|
|
||||||
---------------------------------------------------------------
|
-- Settings
|
||||||
----------------------- Temporary Tables ----------------------
|
CREATE TABLE IF NOT EXISTS settings (
|
||||||
---------------------------------------------------------------
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
|
||||||
-- Temporary User Streaks Table (Cached from View)
|
name TEXT NOT NULL,
|
||||||
CREATE TEMPORARY TABLE IF NOT EXISTS user_streaks (
|
value TEXT NOT NULL,
|
||||||
|
|
||||||
|
created_at DATETIME NOT NULL DEFAULT (STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'))
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Document User Statistics Table
|
||||||
|
CREATE TABLE IF NOT EXISTS document_user_statistics (
|
||||||
|
document_id TEXT NOT NULL,
|
||||||
|
user_id TEXT NOT NULL,
|
||||||
|
percentage REAL NOT NULL,
|
||||||
|
last_read DATETIME NOT NULL,
|
||||||
|
last_seen DATETIME NOT NULL,
|
||||||
|
read_percentage REAL NOT NULL,
|
||||||
|
|
||||||
|
total_time_seconds INTEGER NOT NULL,
|
||||||
|
total_words_read INTEGER NOT NULL,
|
||||||
|
total_wpm REAL NOT NULL,
|
||||||
|
|
||||||
|
yearly_time_seconds INTEGER NOT NULL,
|
||||||
|
yearly_words_read INTEGER NOT NULL,
|
||||||
|
yearly_wpm REAL NOT NULL,
|
||||||
|
|
||||||
|
monthly_time_seconds INTEGER NOT NULL,
|
||||||
|
monthly_words_read INTEGER NOT NULL,
|
||||||
|
monthly_wpm REAL NOT NULL,
|
||||||
|
|
||||||
|
weekly_time_seconds INTEGER NOT NULL,
|
||||||
|
weekly_words_read INTEGER NOT NULL,
|
||||||
|
weekly_wpm REAL NOT NULL,
|
||||||
|
|
||||||
|
UNIQUE(document_id, user_id) ON CONFLICT REPLACE
|
||||||
|
);
|
||||||
|
|
||||||
|
-- User Streaks Table
|
||||||
|
CREATE TABLE IF NOT EXISTS user_streaks (
|
||||||
user_id TEXT NOT NULL,
|
user_id TEXT NOT NULL,
|
||||||
window TEXT NOT NULL,
|
window TEXT NOT NULL,
|
||||||
|
|
||||||
@@ -125,245 +157,28 @@ CREATE TEMPORARY TABLE IF NOT EXISTS user_streaks (
|
|||||||
|
|
||||||
current_streak INTEGER NOT NULL,
|
current_streak INTEGER NOT NULL,
|
||||||
current_streak_start_date TEXT NOT NULL,
|
current_streak_start_date TEXT NOT NULL,
|
||||||
current_streak_end_date TEXT NOT NULL
|
current_streak_end_date TEXT NOT NULL,
|
||||||
|
|
||||||
|
last_timezone TEXT NOT NULL,
|
||||||
|
last_seen TEXT NOT NULL,
|
||||||
|
last_record TEXT NOT NULL,
|
||||||
|
last_calculated TEXT NOT NULL,
|
||||||
|
|
||||||
|
UNIQUE(user_id, window) ON CONFLICT REPLACE
|
||||||
);
|
);
|
||||||
|
|
||||||
CREATE TEMPORARY TABLE IF NOT EXISTS document_user_statistics (
|
|
||||||
document_id TEXT NOT NULL,
|
|
||||||
user_id TEXT NOT NULL,
|
|
||||||
last_read TEXT NOT NULL,
|
|
||||||
total_time_seconds INTEGER NOT NULL,
|
|
||||||
read_percentage REAL NOT NULL,
|
|
||||||
percentage REAL NOT NULL,
|
|
||||||
words_read INTEGER NOT NULL,
|
|
||||||
wpm REAL NOT NULL,
|
|
||||||
|
|
||||||
UNIQUE(document_id, user_id) ON CONFLICT REPLACE
|
|
||||||
);
|
|
||||||
|
|
||||||
|
|
||||||
---------------------------------------------------------------
|
---------------------------------------------------------------
|
||||||
--------------------------- Indexes ---------------------------
|
--------------------------- Indexes ---------------------------
|
||||||
---------------------------------------------------------------
|
---------------------------------------------------------------
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS activity_start_time ON activity (start_time);
|
CREATE INDEX IF NOT EXISTS activity_start_time ON activity (start_time);
|
||||||
|
CREATE INDEX IF NOT EXISTS activity_created_at ON activity (created_at);
|
||||||
CREATE INDEX IF NOT EXISTS activity_user_id ON activity (user_id);
|
CREATE INDEX IF NOT EXISTS activity_user_id ON activity (user_id);
|
||||||
CREATE INDEX IF NOT EXISTS activity_user_id_document_id ON activity (
|
CREATE INDEX IF NOT EXISTS activity_user_id_document_id ON activity (
|
||||||
user_id,
|
user_id,
|
||||||
document_id
|
document_id
|
||||||
);
|
);
|
||||||
|
|
||||||
---------------------------------------------------------------
|
|
||||||
---------------------------- Views ----------------------------
|
|
||||||
---------------------------------------------------------------
|
|
||||||
|
|
||||||
--------------------------------
|
|
||||||
--------- User Streaks ---------
|
|
||||||
--------------------------------
|
|
||||||
|
|
||||||
CREATE VIEW IF NOT EXISTS view_user_streaks AS
|
|
||||||
|
|
||||||
WITH document_windows AS (
|
|
||||||
SELECT
|
|
||||||
activity.user_id,
|
|
||||||
users.time_offset,
|
|
||||||
DATE(
|
|
||||||
activity.start_time,
|
|
||||||
users.time_offset,
|
|
||||||
'weekday 0', '-7 day'
|
|
||||||
) AS weekly_read,
|
|
||||||
DATE(activity.start_time, users.time_offset) AS daily_read
|
|
||||||
FROM activity
|
|
||||||
LEFT JOIN users ON users.id = activity.user_id
|
|
||||||
GROUP BY activity.user_id, weekly_read, daily_read
|
|
||||||
),
|
|
||||||
|
|
||||||
weekly_partitions AS (
|
|
||||||
SELECT
|
|
||||||
user_id,
|
|
||||||
time_offset,
|
|
||||||
'WEEK' AS "window",
|
|
||||||
weekly_read AS read_window,
|
|
||||||
row_number() OVER (
|
|
||||||
PARTITION BY user_id ORDER BY weekly_read DESC
|
|
||||||
) AS seqnum
|
|
||||||
FROM document_windows
|
|
||||||
GROUP BY user_id, weekly_read
|
|
||||||
),
|
|
||||||
|
|
||||||
daily_partitions AS (
|
|
||||||
SELECT
|
|
||||||
user_id,
|
|
||||||
time_offset,
|
|
||||||
'DAY' AS "window",
|
|
||||||
daily_read AS read_window,
|
|
||||||
row_number() OVER (
|
|
||||||
PARTITION BY user_id ORDER BY daily_read DESC
|
|
||||||
) AS seqnum
|
|
||||||
FROM document_windows
|
|
||||||
GROUP BY user_id, daily_read
|
|
||||||
),
|
|
||||||
|
|
||||||
streaks AS (
|
|
||||||
SELECT
|
|
||||||
COUNT(*) AS streak,
|
|
||||||
MIN(read_window) AS start_date,
|
|
||||||
MAX(read_window) AS end_date,
|
|
||||||
window,
|
|
||||||
user_id,
|
|
||||||
time_offset
|
|
||||||
FROM daily_partitions
|
|
||||||
GROUP BY
|
|
||||||
time_offset,
|
|
||||||
user_id,
|
|
||||||
DATE(read_window, '+' || seqnum || ' day')
|
|
||||||
|
|
||||||
UNION ALL
|
|
||||||
|
|
||||||
SELECT
|
|
||||||
COUNT(*) AS streak,
|
|
||||||
MIN(read_window) AS start_date,
|
|
||||||
MAX(read_window) AS end_date,
|
|
||||||
window,
|
|
||||||
user_id,
|
|
||||||
time_offset
|
|
||||||
FROM weekly_partitions
|
|
||||||
GROUP BY
|
|
||||||
time_offset,
|
|
||||||
user_id,
|
|
||||||
DATE(read_window, '+' || (seqnum * 7) || ' day')
|
|
||||||
),
|
|
||||||
max_streak AS (
|
|
||||||
SELECT
|
|
||||||
MAX(streak) AS max_streak,
|
|
||||||
start_date AS max_streak_start_date,
|
|
||||||
end_date AS max_streak_end_date,
|
|
||||||
window,
|
|
||||||
user_id
|
|
||||||
FROM streaks
|
|
||||||
GROUP BY user_id, window
|
|
||||||
),
|
|
||||||
current_streak AS (
|
|
||||||
SELECT
|
|
||||||
streak AS current_streak,
|
|
||||||
start_date AS current_streak_start_date,
|
|
||||||
end_date AS current_streak_end_date,
|
|
||||||
window,
|
|
||||||
user_id
|
|
||||||
FROM streaks
|
|
||||||
WHERE CASE
|
|
||||||
WHEN window = "WEEK" THEN
|
|
||||||
DATE('now', time_offset, 'weekday 0', '-14 day') = current_streak_end_date
|
|
||||||
OR DATE('now', time_offset, 'weekday 0', '-7 day') = current_streak_end_date
|
|
||||||
WHEN window = "DAY" THEN
|
|
||||||
DATE('now', time_offset, '-1 day') = current_streak_end_date
|
|
||||||
OR DATE('now', time_offset) = current_streak_end_date
|
|
||||||
END
|
|
||||||
GROUP BY user_id, window
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
max_streak.user_id,
|
|
||||||
max_streak.window,
|
|
||||||
IFNULL(max_streak, 0) AS max_streak,
|
|
||||||
IFNULL(max_streak_start_date, "N/A") AS max_streak_start_date,
|
|
||||||
IFNULL(max_streak_end_date, "N/A") AS max_streak_end_date,
|
|
||||||
IFNULL(current_streak, 0) AS current_streak,
|
|
||||||
IFNULL(current_streak_start_date, "N/A") AS current_streak_start_date,
|
|
||||||
IFNULL(current_streak_end_date, "N/A") AS current_streak_end_date
|
|
||||||
FROM max_streak
|
|
||||||
LEFT JOIN current_streak ON
|
|
||||||
current_streak.user_id = max_streak.user_id
|
|
||||||
AND current_streak.window = max_streak.window;
|
|
||||||
|
|
||||||
--------------------------------
|
|
||||||
------- Document Stats ---------
|
|
||||||
--------------------------------
|
|
||||||
|
|
||||||
CREATE VIEW IF NOT EXISTS view_document_user_statistics AS
|
|
||||||
|
|
||||||
WITH intermediate_ga AS (
|
|
||||||
SELECT
|
|
||||||
ga1.id AS row_id,
|
|
||||||
ga1.user_id,
|
|
||||||
ga1.document_id,
|
|
||||||
ga1.duration,
|
|
||||||
ga1.start_time,
|
|
||||||
ga1.start_percentage,
|
|
||||||
ga1.end_percentage,
|
|
||||||
|
|
||||||
-- Find Overlapping Events (Assign Unique ID)
|
|
||||||
(
|
|
||||||
SELECT MIN(id)
|
|
||||||
FROM activity AS ga2
|
|
||||||
WHERE
|
|
||||||
ga1.document_id = ga2.document_id
|
|
||||||
AND ga1.user_id = ga2.user_id
|
|
||||||
AND ga1.start_percentage <= ga2.end_percentage
|
|
||||||
AND ga1.end_percentage >= ga2.start_percentage
|
|
||||||
) AS group_leader
|
|
||||||
FROM activity AS ga1
|
|
||||||
),
|
|
||||||
|
|
||||||
grouped_activity AS (
|
|
||||||
SELECT
|
|
||||||
user_id,
|
|
||||||
document_id,
|
|
||||||
MAX(start_time) AS start_time,
|
|
||||||
MIN(start_percentage) AS start_percentage,
|
|
||||||
MAX(end_percentage) AS end_percentage,
|
|
||||||
MAX(end_percentage) - MIN(start_percentage) AS read_percentage,
|
|
||||||
SUM(duration) AS duration
|
|
||||||
FROM intermediate_ga
|
|
||||||
GROUP BY group_leader
|
|
||||||
),
|
|
||||||
|
|
||||||
current_progress AS (
|
|
||||||
SELECT
|
|
||||||
user_id,
|
|
||||||
document_id,
|
|
||||||
COALESCE((
|
|
||||||
SELECT percentage
|
|
||||||
FROM document_progress AS dp
|
|
||||||
WHERE
|
|
||||||
dp.user_id = iga.user_id
|
|
||||||
AND dp.document_id = iga.document_id
|
|
||||||
ORDER BY created_at DESC
|
|
||||||
LIMIT 1
|
|
||||||
), end_percentage) AS percentage
|
|
||||||
FROM intermediate_ga AS iga
|
|
||||||
GROUP BY user_id, document_id
|
|
||||||
HAVING MAX(start_time)
|
|
||||||
)
|
|
||||||
|
|
||||||
SELECT
|
|
||||||
ga.document_id,
|
|
||||||
ga.user_id,
|
|
||||||
MAX(start_time) AS last_read,
|
|
||||||
SUM(duration) AS total_time_seconds,
|
|
||||||
SUM(read_percentage) AS read_percentage,
|
|
||||||
cp.percentage,
|
|
||||||
|
|
||||||
(CAST(COALESCE(d.words, 0.0) AS REAL) * SUM(read_percentage))
|
|
||||||
AS words_read,
|
|
||||||
|
|
||||||
(CAST(COALESCE(d.words, 0.0) AS REAL) * SUM(read_percentage))
|
|
||||||
/ (SUM(duration) / 60.0) AS wpm
|
|
||||||
FROM grouped_activity AS ga
|
|
||||||
INNER JOIN
|
|
||||||
current_progress AS cp
|
|
||||||
ON ga.user_id = cp.user_id AND ga.document_id = cp.document_id
|
|
||||||
INNER JOIN
|
|
||||||
documents AS d
|
|
||||||
ON d.id = ga.document_id
|
|
||||||
GROUP BY ga.document_id, ga.user_id
|
|
||||||
ORDER BY wpm DESC;
|
|
||||||
|
|
||||||
---------------------------------------------------------------
|
|
||||||
------------------ Populate Temporary Tables ------------------
|
|
||||||
---------------------------------------------------------------
|
|
||||||
INSERT INTO user_streaks SELECT * FROM view_user_streaks;
|
|
||||||
INSERT INTO document_user_statistics SELECT * FROM view_document_user_statistics;
|
|
||||||
|
|
||||||
---------------------------------------------------------------
|
---------------------------------------------------------------
|
||||||
--------------------------- Triggers --------------------------
|
--------------------------- Triggers --------------------------
|
||||||
---------------------------------------------------------------
|
---------------------------------------------------------------
|
||||||
@@ -375,3 +190,11 @@ UPDATE documents
|
|||||||
SET updated_at = STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now')
|
SET updated_at = STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now')
|
||||||
WHERE id = old.id;
|
WHERE id = old.id;
|
||||||
END;
|
END;
|
||||||
|
|
||||||
|
-- Delete User
|
||||||
|
CREATE TRIGGER IF NOT EXISTS user_deleted
|
||||||
|
BEFORE DELETE ON users BEGIN
|
||||||
|
DELETE FROM activity WHERE activity.user_id=OLD.id;
|
||||||
|
DELETE FROM devices WHERE devices.user_id=OLD.id;
|
||||||
|
DELETE FROM document_progress WHERE document_progress.user_id=OLD.id;
|
||||||
|
END;
|
||||||
|
|||||||
@@ -1,77 +0,0 @@
|
|||||||
INSERT INTO document_user_statistics
|
|
||||||
WITH intermediate_ga AS (
|
|
||||||
SELECT
|
|
||||||
ga1.id AS row_id,
|
|
||||||
ga1.user_id,
|
|
||||||
ga1.document_id,
|
|
||||||
ga1.duration,
|
|
||||||
ga1.start_time,
|
|
||||||
ga1.start_percentage,
|
|
||||||
ga1.end_percentage,
|
|
||||||
|
|
||||||
-- Find Overlapping Events (Assign Unique ID)
|
|
||||||
(
|
|
||||||
SELECT MIN(id)
|
|
||||||
FROM activity AS ga2
|
|
||||||
WHERE
|
|
||||||
ga1.document_id = ga2.document_id
|
|
||||||
AND ga1.user_id = ga2.user_id
|
|
||||||
AND ga1.start_percentage <= ga2.end_percentage
|
|
||||||
AND ga1.end_percentage >= ga2.start_percentage
|
|
||||||
) AS group_leader
|
|
||||||
FROM activity AS ga1
|
|
||||||
WHERE
|
|
||||||
document_id = ?
|
|
||||||
AND user_id = ?
|
|
||||||
),
|
|
||||||
grouped_activity AS (
|
|
||||||
SELECT
|
|
||||||
user_id,
|
|
||||||
document_id,
|
|
||||||
MAX(start_time) AS start_time,
|
|
||||||
MIN(start_percentage) AS start_percentage,
|
|
||||||
MAX(end_percentage) AS end_percentage,
|
|
||||||
MAX(end_percentage) - MIN(start_percentage) AS read_percentage,
|
|
||||||
SUM(duration) AS duration
|
|
||||||
FROM intermediate_ga
|
|
||||||
GROUP BY group_leader
|
|
||||||
),
|
|
||||||
current_progress AS (
|
|
||||||
SELECT
|
|
||||||
user_id,
|
|
||||||
document_id,
|
|
||||||
COALESCE((
|
|
||||||
SELECT percentage
|
|
||||||
FROM document_progress AS dp
|
|
||||||
WHERE
|
|
||||||
dp.user_id = iga.user_id
|
|
||||||
AND dp.document_id = iga.document_id
|
|
||||||
ORDER BY created_at DESC
|
|
||||||
LIMIT 1
|
|
||||||
), end_percentage) AS percentage
|
|
||||||
FROM intermediate_ga AS iga
|
|
||||||
GROUP BY user_id, document_id
|
|
||||||
HAVING MAX(start_time)
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
ga.document_id,
|
|
||||||
ga.user_id,
|
|
||||||
MAX(start_time) AS last_read,
|
|
||||||
SUM(duration) AS total_time_seconds,
|
|
||||||
SUM(read_percentage) AS read_percentage,
|
|
||||||
cp.percentage,
|
|
||||||
|
|
||||||
(CAST(COALESCE(d.words, 0.0) AS REAL) * SUM(read_percentage))
|
|
||||||
AS words_read,
|
|
||||||
|
|
||||||
(CAST(COALESCE(d.words, 0.0) AS REAL) * SUM(read_percentage))
|
|
||||||
/ (SUM(duration) / 60.0) AS wpm
|
|
||||||
FROM grouped_activity AS ga
|
|
||||||
INNER JOIN
|
|
||||||
current_progress AS cp
|
|
||||||
ON ga.user_id = cp.user_id AND ga.document_id = cp.document_id
|
|
||||||
INNER JOIN
|
|
||||||
documents AS d
|
|
||||||
ON d.id = ga.document_id
|
|
||||||
GROUP BY ga.document_id, ga.user_id
|
|
||||||
ORDER BY wpm DESC;
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
DELETE FROM user_streaks;
|
|
||||||
INSERT INTO user_streaks SELECT * FROM view_user_streaks;
|
|
||||||
DELETE FROM document_user_statistics;
|
|
||||||
INSERT INTO document_user_statistics
|
|
||||||
SELECT *
|
|
||||||
FROM view_document_user_statistics;
|
|
||||||
154
database/user_streaks.sql
Normal file
154
database/user_streaks.sql
Normal file
@@ -0,0 +1,154 @@
|
|||||||
|
WITH updated_users AS (
|
||||||
|
SELECT a.user_id
|
||||||
|
FROM activity AS a
|
||||||
|
LEFT JOIN users AS u ON u.id = a.user_id
|
||||||
|
LEFT JOIN user_streaks AS s ON a.user_id = s.user_id AND s.window = 'DAY'
|
||||||
|
WHERE
|
||||||
|
a.created_at > COALESCE(s.last_seen, '1970-01-01')
|
||||||
|
AND LOCAL_DATE(s.last_record, u.timezone) != LOCAL_DATE(a.start_time, u.timezone)
|
||||||
|
GROUP BY a.user_id
|
||||||
|
),
|
||||||
|
|
||||||
|
outdated_users AS (
|
||||||
|
SELECT
|
||||||
|
a.user_id,
|
||||||
|
u.timezone AS last_timezone,
|
||||||
|
MAX(a.created_at) AS last_seen,
|
||||||
|
MAX(a.start_time) AS last_record,
|
||||||
|
STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now') AS last_calculated
|
||||||
|
FROM activity AS a
|
||||||
|
LEFT JOIN users AS u ON u.id = a.user_id
|
||||||
|
LEFT JOIN user_streaks AS s ON a.user_id = s.user_id AND s.window = 'DAY'
|
||||||
|
GROUP BY a.user_id
|
||||||
|
HAVING
|
||||||
|
-- User Changed Timezones
|
||||||
|
s.last_timezone != u.timezone
|
||||||
|
|
||||||
|
-- Users Date Changed
|
||||||
|
OR LOCAL_DATE(COALESCE(s.last_calculated, '1970-01-01T00:00:00Z'), u.timezone) !=
|
||||||
|
LOCAL_DATE(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), u.timezone)
|
||||||
|
|
||||||
|
-- User Added New Data
|
||||||
|
OR a.user_id IN updated_users
|
||||||
|
),
|
||||||
|
|
||||||
|
document_windows AS (
|
||||||
|
SELECT
|
||||||
|
activity.user_id,
|
||||||
|
users.timezone,
|
||||||
|
DATE(
|
||||||
|
LOCAL_DATE(activity.start_time, users.timezone),
|
||||||
|
'weekday 0', '-7 day'
|
||||||
|
) AS weekly_read,
|
||||||
|
LOCAL_DATE(activity.start_time, users.timezone) AS daily_read
|
||||||
|
FROM activity
|
||||||
|
INNER JOIN outdated_users ON outdated_users.user_id = activity.user_id
|
||||||
|
LEFT JOIN users ON users.id = activity.user_id
|
||||||
|
GROUP BY activity.user_id, weekly_read, daily_read
|
||||||
|
),
|
||||||
|
|
||||||
|
weekly_partitions AS (
|
||||||
|
SELECT
|
||||||
|
user_id,
|
||||||
|
timezone,
|
||||||
|
'WEEK' AS "window",
|
||||||
|
weekly_read AS read_window,
|
||||||
|
ROW_NUMBER() OVER (
|
||||||
|
PARTITION BY user_id ORDER BY weekly_read DESC
|
||||||
|
) AS seqnum
|
||||||
|
FROM document_windows
|
||||||
|
GROUP BY user_id, weekly_read
|
||||||
|
),
|
||||||
|
|
||||||
|
daily_partitions AS (
|
||||||
|
SELECT
|
||||||
|
user_id,
|
||||||
|
timezone,
|
||||||
|
'DAY' AS "window",
|
||||||
|
daily_read AS read_window,
|
||||||
|
ROW_NUMBER() OVER (
|
||||||
|
PARTITION BY user_id ORDER BY daily_read DESC
|
||||||
|
) AS seqnum
|
||||||
|
FROM document_windows
|
||||||
|
GROUP BY user_id, daily_read
|
||||||
|
),
|
||||||
|
|
||||||
|
streaks AS (
|
||||||
|
SELECT
|
||||||
|
COUNT(*) AS streak,
|
||||||
|
MIN(read_window) AS start_date,
|
||||||
|
MAX(read_window) AS end_date,
|
||||||
|
window,
|
||||||
|
user_id,
|
||||||
|
timezone
|
||||||
|
FROM daily_partitions
|
||||||
|
GROUP BY
|
||||||
|
timezone,
|
||||||
|
user_id,
|
||||||
|
DATE(read_window, '+' || seqnum || ' day')
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
SELECT
|
||||||
|
COUNT(*) AS streak,
|
||||||
|
MIN(read_window) AS start_date,
|
||||||
|
MAX(read_window) AS end_date,
|
||||||
|
window,
|
||||||
|
user_id,
|
||||||
|
timezone
|
||||||
|
FROM weekly_partitions
|
||||||
|
GROUP BY
|
||||||
|
timezone,
|
||||||
|
user_id,
|
||||||
|
DATE(read_window, '+' || (seqnum * 7) || ' day')
|
||||||
|
),
|
||||||
|
|
||||||
|
max_streak AS (
|
||||||
|
SELECT
|
||||||
|
MAX(streak) AS max_streak,
|
||||||
|
start_date AS max_streak_start_date,
|
||||||
|
end_date AS max_streak_end_date,
|
||||||
|
window,
|
||||||
|
user_id
|
||||||
|
FROM streaks
|
||||||
|
GROUP BY user_id, window
|
||||||
|
),
|
||||||
|
|
||||||
|
current_streak AS (
|
||||||
|
SELECT
|
||||||
|
streak AS current_streak,
|
||||||
|
start_date AS current_streak_start_date,
|
||||||
|
end_date AS current_streak_end_date,
|
||||||
|
window,
|
||||||
|
user_id
|
||||||
|
FROM streaks
|
||||||
|
WHERE CASE
|
||||||
|
WHEN window = "WEEK" THEN
|
||||||
|
DATE(LOCAL_DATE(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone), 'weekday 0', '-14 day') = current_streak_end_date
|
||||||
|
OR DATE(LOCAL_DATE(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone), 'weekday 0', '-7 day') = current_streak_end_date
|
||||||
|
WHEN window = "DAY" THEN
|
||||||
|
DATE(LOCAL_DATE(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone), '-1 day') = current_streak_end_date
|
||||||
|
OR DATE(LOCAL_DATE(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone)) = current_streak_end_date
|
||||||
|
END
|
||||||
|
GROUP BY user_id, window
|
||||||
|
)
|
||||||
|
|
||||||
|
INSERT INTO user_streaks
|
||||||
|
SELECT
|
||||||
|
max_streak.user_id,
|
||||||
|
max_streak.window,
|
||||||
|
IFNULL(max_streak, 0) AS max_streak,
|
||||||
|
IFNULL(max_streak_start_date, "N/A") AS max_streak_start_date,
|
||||||
|
IFNULL(max_streak_end_date, "N/A") AS max_streak_end_date,
|
||||||
|
IFNULL(current_streak.current_streak, 0) AS current_streak,
|
||||||
|
IFNULL(current_streak.current_streak_start_date, "N/A") AS current_streak_start_date,
|
||||||
|
IFNULL(current_streak.current_streak_end_date, "N/A") AS current_streak_end_date,
|
||||||
|
outdated_users.last_timezone AS last_timezone,
|
||||||
|
outdated_users.last_seen AS last_seen,
|
||||||
|
outdated_users.last_record AS last_record,
|
||||||
|
outdated_users.last_calculated AS last_calculated
|
||||||
|
FROM max_streak
|
||||||
|
JOIN outdated_users ON max_streak.user_id = outdated_users.user_id
|
||||||
|
LEFT JOIN current_streak ON
|
||||||
|
current_streak.user_id = max_streak.user_id
|
||||||
|
AND current_streak.window = max_streak.window;
|
||||||
205
database/users_test.go
Normal file
205
database/users_test.go
Normal file
@@ -0,0 +1,205 @@
|
|||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
"fmt"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
"reichard.io/antholume/config"
|
||||||
|
"reichard.io/antholume/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
testUserID string = "testUser"
|
||||||
|
testUserPass string = "testPass"
|
||||||
|
)
|
||||||
|
|
||||||
|
type UsersTestSuite struct {
|
||||||
|
suite.Suite
|
||||||
|
dbm *DBManager
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestUsers(t *testing.T) {
|
||||||
|
suite.Run(t, new(UsersTestSuite))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *UsersTestSuite) SetupTest() {
|
||||||
|
cfg := config.Config{
|
||||||
|
DBType: "memory",
|
||||||
|
}
|
||||||
|
|
||||||
|
suite.dbm = NewMgr(&cfg)
|
||||||
|
|
||||||
|
// Create User
|
||||||
|
rawAuthHash, _ := utils.GenerateToken(64)
|
||||||
|
authHash := fmt.Sprintf("%x", rawAuthHash)
|
||||||
|
_, err := suite.dbm.Queries.CreateUser(context.Background(), CreateUserParams{
|
||||||
|
ID: testUserID,
|
||||||
|
Pass: &testUserPass,
|
||||||
|
AuthHash: &authHash,
|
||||||
|
})
|
||||||
|
suite.NoError(err)
|
||||||
|
|
||||||
|
// Create Document
|
||||||
|
_, err = suite.dbm.Queries.UpsertDocument(context.Background(), UpsertDocumentParams{
|
||||||
|
ID: documentID,
|
||||||
|
Title: &documentTitle,
|
||||||
|
Author: &documentAuthor,
|
||||||
|
Words: &documentWords,
|
||||||
|
})
|
||||||
|
suite.NoError(err)
|
||||||
|
|
||||||
|
// Create Device
|
||||||
|
_, err = suite.dbm.Queries.UpsertDevice(context.Background(), UpsertDeviceParams{
|
||||||
|
ID: deviceID,
|
||||||
|
UserID: testUserID,
|
||||||
|
DeviceName: deviceName,
|
||||||
|
})
|
||||||
|
suite.NoError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *UsersTestSuite) TestGetUser() {
|
||||||
|
user, err := suite.dbm.Queries.GetUser(context.Background(), testUserID)
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Equal(testUserPass, *user.Pass)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *UsersTestSuite) TestCreateUser() {
|
||||||
|
testUser := "user1"
|
||||||
|
testPass := "pass1"
|
||||||
|
|
||||||
|
// Generate Auth Hash
|
||||||
|
rawAuthHash, err := utils.GenerateToken(64)
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
|
||||||
|
authHash := fmt.Sprintf("%x", rawAuthHash)
|
||||||
|
changed, err := suite.dbm.Queries.CreateUser(context.Background(), CreateUserParams{
|
||||||
|
ID: testUser,
|
||||||
|
Pass: &testPass,
|
||||||
|
AuthHash: &authHash,
|
||||||
|
})
|
||||||
|
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Equal(int64(1), changed)
|
||||||
|
|
||||||
|
user, err := suite.dbm.Queries.GetUser(context.Background(), testUser)
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Equal(testPass, *user.Pass)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *UsersTestSuite) TestDeleteUser() {
|
||||||
|
changed, err := suite.dbm.Queries.DeleteUser(context.Background(), testUserID)
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Equal(int64(1), changed, "should have one changed row")
|
||||||
|
|
||||||
|
_, err = suite.dbm.Queries.GetUser(context.Background(), testUserID)
|
||||||
|
suite.ErrorIs(err, sql.ErrNoRows, "should have no rows error")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *UsersTestSuite) TestGetUsers() {
|
||||||
|
users, err := suite.dbm.Queries.GetUsers(context.Background())
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Len(users, 1, "should have single user")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *UsersTestSuite) TestUpdateUser() {
|
||||||
|
newPassword := "newPass123"
|
||||||
|
user, err := suite.dbm.Queries.UpdateUser(context.Background(), UpdateUserParams{
|
||||||
|
UserID: testUserID,
|
||||||
|
Password: &newPassword,
|
||||||
|
})
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Equal(newPassword, *user.Pass, "should have new password")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *UsersTestSuite) TestGetUserStatistics() {
|
||||||
|
err := suite.dbm.CacheTempTables(context.Background())
|
||||||
|
suite.NoError(err)
|
||||||
|
|
||||||
|
// Ensure Zero Items
|
||||||
|
userStats, err := suite.dbm.Queries.GetUserStatistics(context.Background())
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Empty(userStats, "should be empty")
|
||||||
|
|
||||||
|
// Create Activity
|
||||||
|
end := time.Now()
|
||||||
|
start := end.AddDate(0, 0, -9)
|
||||||
|
var counter int64 = 0
|
||||||
|
|
||||||
|
for d := start; d.After(end) == false; d = d.AddDate(0, 0, 1) {
|
||||||
|
counter += 1
|
||||||
|
|
||||||
|
// Add Item
|
||||||
|
activity, err := suite.dbm.Queries.AddActivity(context.Background(), AddActivityParams{
|
||||||
|
DocumentID: documentID,
|
||||||
|
DeviceID: deviceID,
|
||||||
|
UserID: testUserID,
|
||||||
|
StartTime: d.UTC().Format(time.RFC3339),
|
||||||
|
Duration: 60,
|
||||||
|
StartPercentage: float64(counter) / 100.0,
|
||||||
|
EndPercentage: float64(counter+1) / 100.0,
|
||||||
|
})
|
||||||
|
|
||||||
|
suite.Nil(err, fmt.Sprintf("[%d] should have nil err for add activity", counter))
|
||||||
|
suite.Equal(counter, activity.ID, fmt.Sprintf("[%d] should have correct id for add activity", counter))
|
||||||
|
}
|
||||||
|
|
||||||
|
err = suite.dbm.CacheTempTables(context.Background())
|
||||||
|
suite.NoError(err)
|
||||||
|
|
||||||
|
// Ensure One Item
|
||||||
|
userStats, err = suite.dbm.Queries.GetUserStatistics(context.Background())
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Len(userStats, 1, "should have length of one")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *UsersTestSuite) TestGetUsersStreaks() {
|
||||||
|
err := suite.dbm.CacheTempTables(context.Background())
|
||||||
|
suite.NoError(err)
|
||||||
|
|
||||||
|
// Ensure Zero Items
|
||||||
|
userStats, err := suite.dbm.Queries.GetUserStreaks(context.Background(), testUserID)
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Empty(userStats, "should be empty")
|
||||||
|
|
||||||
|
// Create Activity
|
||||||
|
end := time.Now()
|
||||||
|
start := end.AddDate(0, 0, -9)
|
||||||
|
var counter int64 = 0
|
||||||
|
|
||||||
|
for d := start; d.After(end) == false; d = d.AddDate(0, 0, 1) {
|
||||||
|
counter += 1
|
||||||
|
|
||||||
|
// Add Item
|
||||||
|
activity, err := suite.dbm.Queries.AddActivity(context.Background(), AddActivityParams{
|
||||||
|
DocumentID: documentID,
|
||||||
|
DeviceID: deviceID,
|
||||||
|
UserID: testUserID,
|
||||||
|
StartTime: d.UTC().Format(time.RFC3339),
|
||||||
|
Duration: 60,
|
||||||
|
StartPercentage: float64(counter) / 100.0,
|
||||||
|
EndPercentage: float64(counter+1) / 100.0,
|
||||||
|
})
|
||||||
|
|
||||||
|
suite.Nil(err, fmt.Sprintf("[%d] should have nil err for add activity", counter))
|
||||||
|
suite.Equal(counter, activity.ID, fmt.Sprintf("[%d] should have correct id for add activity", counter))
|
||||||
|
}
|
||||||
|
|
||||||
|
err = suite.dbm.CacheTempTables(context.Background())
|
||||||
|
suite.NoError(err)
|
||||||
|
|
||||||
|
// Ensure Two Item
|
||||||
|
userStats, err = suite.dbm.Queries.GetUserStreaks(context.Background(), testUserID)
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Len(userStats, 2, "should have length of two")
|
||||||
|
|
||||||
|
// Ensure Streak Stats
|
||||||
|
dayStats := userStats[0]
|
||||||
|
weekStats := userStats[1]
|
||||||
|
suite.Equal(int64(10), dayStats.CurrentStreak, "should be 10 days")
|
||||||
|
suite.Greater(weekStats.CurrentStreak, int64(1), "should be 2 or 3")
|
||||||
|
}
|
||||||
61
flake.lock
generated
Normal file
61
flake.lock
generated
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
{
|
||||||
|
"nodes": {
|
||||||
|
"flake-utils": {
|
||||||
|
"inputs": {
|
||||||
|
"systems": "systems"
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1731533236,
|
||||||
|
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
|
||||||
|
"owner": "numtide",
|
||||||
|
"repo": "flake-utils",
|
||||||
|
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "numtide",
|
||||||
|
"repo": "flake-utils",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nixpkgs": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1773524153,
|
||||||
|
"narHash": "sha256-Jms57zzlFf64ayKzzBWSE2SGvJmK+NGt8Gli71d9kmY=",
|
||||||
|
"owner": "NixOS",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"rev": "e9f278faa1d0c2fc835bd331d4666b59b505a410",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "NixOS",
|
||||||
|
"ref": "nixos-25.11",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": {
|
||||||
|
"inputs": {
|
||||||
|
"flake-utils": "flake-utils",
|
||||||
|
"nixpkgs": "nixpkgs"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"systems": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1681028828,
|
||||||
|
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||||
|
"owner": "nix-systems",
|
||||||
|
"repo": "default",
|
||||||
|
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "nix-systems",
|
||||||
|
"repo": "default",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": "root",
|
||||||
|
"version": 7
|
||||||
|
}
|
||||||
37
flake.nix
Normal file
37
flake.nix
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
{
|
||||||
|
description = "Development Environment";
|
||||||
|
|
||||||
|
inputs = {
|
||||||
|
nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.11";
|
||||||
|
flake-utils.url = "github:numtide/flake-utils";
|
||||||
|
};
|
||||||
|
|
||||||
|
outputs =
|
||||||
|
{ self
|
||||||
|
, nixpkgs
|
||||||
|
, flake-utils
|
||||||
|
,
|
||||||
|
}:
|
||||||
|
flake-utils.lib.eachDefaultSystem (
|
||||||
|
system:
|
||||||
|
let
|
||||||
|
pkgs = nixpkgs.legacyPackages.${system};
|
||||||
|
in
|
||||||
|
{
|
||||||
|
devShells.default = pkgs.mkShell {
|
||||||
|
packages = with pkgs; [
|
||||||
|
go
|
||||||
|
golangci-lint
|
||||||
|
gopls
|
||||||
|
|
||||||
|
bun
|
||||||
|
nodejs
|
||||||
|
tailwindcss
|
||||||
|
];
|
||||||
|
shellHook = ''
|
||||||
|
export PATH=$PATH:~/go/bin
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
2
frontend/.gitignore
vendored
Normal file
2
frontend/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
node_modules
|
||||||
|
dist
|
||||||
2
frontend/.prettierignore
Normal file
2
frontend/.prettierignore
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
# Generated API code
|
||||||
|
src/generated/**/*
|
||||||
11
frontend/.prettierrc
Normal file
11
frontend/.prettierrc
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"semi": true,
|
||||||
|
"singleQuote": true,
|
||||||
|
"tabWidth": 2,
|
||||||
|
"useTabs": false,
|
||||||
|
"trailingComma": "es5",
|
||||||
|
"printWidth": 100,
|
||||||
|
"bracketSpacing": true,
|
||||||
|
"arrowParens": "avoid",
|
||||||
|
"endOfLine": "lf"
|
||||||
|
}
|
||||||
76
frontend/AGENTS.md
Normal file
76
frontend/AGENTS.md
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
# AnthoLume Frontend Agent Guide
|
||||||
|
|
||||||
|
Read this file for work in `frontend/`.
|
||||||
|
Also follow the repository root guide at `../AGENTS.md`.
|
||||||
|
|
||||||
|
## 1) Stack
|
||||||
|
|
||||||
|
- Package manager: `bun`
|
||||||
|
- Framework: React + Vite
|
||||||
|
- Data fetching: React Query
|
||||||
|
- API generation: Orval
|
||||||
|
- Linting: ESLint + Tailwind plugin
|
||||||
|
- Formatting: Prettier
|
||||||
|
|
||||||
|
## 2) Conventions
|
||||||
|
|
||||||
|
- Use local icon components from `src/icons/`.
|
||||||
|
- Do not add external icon libraries.
|
||||||
|
- Prefer generated types from `src/generated/model/` over `any`.
|
||||||
|
- Avoid custom class names in JSX `className` values unless the Tailwind lint config already allows them.
|
||||||
|
- For decorative icons in inputs or labels, disable hover styling via the icon component API rather than overriding it ad hoc.
|
||||||
|
- Prefer `LoadingState` for result-area loading indicators; avoid early returns that unmount search/filter forms during fetches.
|
||||||
|
- Use theme tokens from `tailwind.config.js` / `src/index.css` (`bg-surface`, `text-content`, `border-border`, `primary`, etc.) for new UI work instead of adding raw light/dark color pairs.
|
||||||
|
- Store frontend-only preferences in `src/utils/localSettings.ts` so appearance and view settings share one local-storage shape.
|
||||||
|
|
||||||
|
## 3) Generated API client
|
||||||
|
|
||||||
|
- Do not edit `src/generated/**` directly.
|
||||||
|
- Edit `../api/v1/openapi.yaml` and regenerate instead.
|
||||||
|
- Regenerate with: `bun run generate:api`
|
||||||
|
|
||||||
|
### Important behavior
|
||||||
|
|
||||||
|
- The generated client returns `{ data, status, headers }` for both success and error responses.
|
||||||
|
- Do not assume non-2xx responses throw.
|
||||||
|
- Check `response.status` and response shape before treating a request as successful.
|
||||||
|
|
||||||
|
## 4) Auth / Query State
|
||||||
|
|
||||||
|
- When changing auth flows, account for React Query cache state.
|
||||||
|
- Pay special attention to `/api/v1/auth/me`.
|
||||||
|
- A local auth state update may not be enough if cached query data still reflects a previous auth state.
|
||||||
|
|
||||||
|
## 5) Commands
|
||||||
|
|
||||||
|
- Lint: `bun run lint`
|
||||||
|
- Typecheck: `bun run typecheck`
|
||||||
|
- Lint fix: `bun run lint:fix`
|
||||||
|
- Format check: `bun run format`
|
||||||
|
- Format fix: `bun run format:fix`
|
||||||
|
- Build: `bun run build`
|
||||||
|
- Generate API client: `bun run generate:api`
|
||||||
|
|
||||||
|
## 6) Validation Notes
|
||||||
|
|
||||||
|
- ESLint ignores `src/generated/**`.
|
||||||
|
- Frontend unit tests use Vitest and live alongside source as `src/**/*.test.ts(x)`.
|
||||||
|
- Read `TESTING_STRATEGY.md` before adding or expanding frontend tests.
|
||||||
|
- Prefer tests for meaningful app behavior, branching logic, side effects, and user-visible outcomes.
|
||||||
|
- Avoid low-value tests that mainly assert exact styling classes, duplicate existing coverage, or re-test framework/library behavior.
|
||||||
|
- `bun run lint` includes test files but does not typecheck.
|
||||||
|
- Use `bun run typecheck` to run TypeScript validation for app code and colocated tests without a full production build.
|
||||||
|
- Run frontend tests with `bun run test`.
|
||||||
|
- `bun run build` still runs `tsc && vite build`, so unrelated TypeScript issues elsewhere in `src/` can fail the build.
|
||||||
|
- When possible, validate changed files directly before escalating to full-project fixes.
|
||||||
|
|
||||||
|
## 7) Updating This File
|
||||||
|
|
||||||
|
After completing a frontend task, update this file if you learned something general that would help future frontend agents.
|
||||||
|
|
||||||
|
Rules for updates:
|
||||||
|
|
||||||
|
- Add only frontend-wide guidance.
|
||||||
|
- Do not record one-off task history.
|
||||||
|
- Keep updates concise and action-oriented.
|
||||||
|
- Prefer notes that prevent repeated mistakes.
|
||||||
111
frontend/README.md
Normal file
111
frontend/README.md
Normal file
@@ -0,0 +1,111 @@
|
|||||||
|
# AnthoLume Frontend
|
||||||
|
|
||||||
|
A React + TypeScript frontend for AnthoLume, replacing the server-side rendering (SSR) templates.
|
||||||
|
|
||||||
|
## Tech Stack
|
||||||
|
|
||||||
|
- **React 19** - UI framework
|
||||||
|
- **TypeScript** - Type safety
|
||||||
|
- **React Query (TanStack Query)** - Server state management
|
||||||
|
- **Orval** - API client generation from OpenAPI spec
|
||||||
|
- **React Router** - Navigation
|
||||||
|
- **Tailwind CSS** - Styling
|
||||||
|
- **Vite** - Build tool
|
||||||
|
- **Axios** - HTTP client with auth interceptors
|
||||||
|
|
||||||
|
## Authentication
|
||||||
|
|
||||||
|
The frontend includes a complete authentication system:
|
||||||
|
|
||||||
|
### Auth Context
|
||||||
|
- `AuthProvider` - Manages authentication state globally
|
||||||
|
- `useAuth()` - Hook to access auth state and methods
|
||||||
|
- Token stored in `localStorage`
|
||||||
|
- Axios interceptors automatically attach Bearer token to API requests
|
||||||
|
|
||||||
|
### Protected Routes
|
||||||
|
- All main routes are wrapped in `ProtectedRoute`
|
||||||
|
- Unauthenticated users are redirected to `/login`
|
||||||
|
- Layout redirects to login if not authenticated
|
||||||
|
|
||||||
|
### Login Flow
|
||||||
|
1. User enters credentials on `/login`
|
||||||
|
2. POST to `/api/v1/auth/login`
|
||||||
|
3. Token stored in localStorage
|
||||||
|
4. Redirect to home page
|
||||||
|
5. Axios interceptor includes token in subsequent requests
|
||||||
|
|
||||||
|
### Logout Flow
|
||||||
|
1. User clicks "Logout" in dropdown menu
|
||||||
|
2. POST to `/api/v1/auth/logout`
|
||||||
|
3. Token cleared from localStorage
|
||||||
|
4. Redirect to `/login`
|
||||||
|
|
||||||
|
### 401 Handling
|
||||||
|
- Axios response interceptor clears token on 401 errors
|
||||||
|
- Prevents stale auth state
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
The frontend mirrors the existing SSR templates structure:
|
||||||
|
|
||||||
|
### Pages
|
||||||
|
- `HomePage` - Landing page with recent documents
|
||||||
|
- `DocumentsPage` - Document listing with search and pagination
|
||||||
|
- `DocumentPage` - Single document view with details
|
||||||
|
- `ProgressPage` - Reading progress table
|
||||||
|
- `ActivityPage` - User activity log
|
||||||
|
- `SearchPage` - Search interface
|
||||||
|
- `SettingsPage` - User settings
|
||||||
|
- `LoginPage` - Authentication
|
||||||
|
|
||||||
|
### Components
|
||||||
|
- `Layout` - Main layout with navigation sidebar and header
|
||||||
|
- Generated API hooks from `api/v1/openapi.yaml`
|
||||||
|
|
||||||
|
## API Integration
|
||||||
|
|
||||||
|
The frontend uses **Orval** to generate TypeScript types and React Query hooks from the OpenAPI spec:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm run generate:api
|
||||||
|
```
|
||||||
|
|
||||||
|
This generates:
|
||||||
|
- Type definitions for all API schemas
|
||||||
|
- React Query hooks (`useGetDocuments`, `useGetDocument`, etc.)
|
||||||
|
- Mutation hooks (`useLogin`, `useLogout`)
|
||||||
|
|
||||||
|
## Development
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Install dependencies
|
||||||
|
npm install
|
||||||
|
|
||||||
|
# Generate API types (if OpenAPI spec changes)
|
||||||
|
npm run generate:api
|
||||||
|
|
||||||
|
# Start development server
|
||||||
|
npm run dev
|
||||||
|
|
||||||
|
# Build for production
|
||||||
|
npm run build
|
||||||
|
```
|
||||||
|
|
||||||
|
## Deployment
|
||||||
|
|
||||||
|
The built output is in `dist/` and can be served by the Go backend or deployed separately.
|
||||||
|
|
||||||
|
## Migration from SSR
|
||||||
|
|
||||||
|
The frontend replicates the functionality of the following SSR templates:
|
||||||
|
- `templates/pages/home.tmpl` → `HomePage.tsx`
|
||||||
|
- `templates/pages/documents.tmpl` → `DocumentsPage.tsx`
|
||||||
|
- `templates/pages/document.tmpl` → `DocumentPage.tsx`
|
||||||
|
- `templates/pages/progress.tmpl` → `ProgressPage.tsx`
|
||||||
|
- `templates/pages/activity.tmpl` → `ActivityPage.tsx`
|
||||||
|
- `templates/pages/search.tmpl` → `SearchPage.tsx`
|
||||||
|
- `templates/pages/settings.tmpl` → `SettingsPage.tsx`
|
||||||
|
- `templates/pages/login.tmpl` → `LoginPage.tsx`
|
||||||
|
|
||||||
|
The styling follows the same Tailwind CSS classes as the original templates for consistency.
|
||||||
73
frontend/TESTING_STRATEGY.md
Normal file
73
frontend/TESTING_STRATEGY.md
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
# Frontend Testing Strategy
|
||||||
|
|
||||||
|
This project prefers meaningful frontend tests over high test counts.
|
||||||
|
|
||||||
|
## What we want to test
|
||||||
|
|
||||||
|
Prioritize tests for app-owned behavior such as:
|
||||||
|
|
||||||
|
- user-visible page and component behavior
|
||||||
|
- auth and routing behavior
|
||||||
|
- branching logic and business rules
|
||||||
|
- data normalization and error handling
|
||||||
|
- timing behavior with real app logic
|
||||||
|
- side effects that could regress, such as token handling or redirects
|
||||||
|
- algorithmic or formatting logic that defines product behavior
|
||||||
|
|
||||||
|
Good examples in this repo:
|
||||||
|
|
||||||
|
- login and registration flows
|
||||||
|
- protected-route behavior
|
||||||
|
- auth interceptor token injection and cleanup
|
||||||
|
- error message extraction
|
||||||
|
- debounce timing
|
||||||
|
- human-readable formatting logic
|
||||||
|
- graph/algorithm output where exact parity matters
|
||||||
|
|
||||||
|
## What we usually do not want to test
|
||||||
|
|
||||||
|
Avoid tests that mostly prove:
|
||||||
|
|
||||||
|
- the language/runtime works
|
||||||
|
- React forwards basic props correctly
|
||||||
|
- a third-party library behaves as documented
|
||||||
|
- exact Tailwind class strings with no product meaning
|
||||||
|
- implementation details not observable in behavior
|
||||||
|
- duplicated examples that re-assert the same logic
|
||||||
|
|
||||||
|
In other words, do not add tests equivalent to checking that JavaScript can compute `1 + 1`.
|
||||||
|
|
||||||
|
## Preferred test style
|
||||||
|
|
||||||
|
- Prefer behavior-focused assertions over implementation-detail assertions.
|
||||||
|
- Prefer user-visible outcomes over internal state inspection.
|
||||||
|
- Mock at module boundaries when needed.
|
||||||
|
- Keep test setup small and local.
|
||||||
|
- Use exact-output assertions only when the output itself is the contract.
|
||||||
|
|
||||||
|
## When exact assertions are appropriate
|
||||||
|
|
||||||
|
Exact assertions are appropriate when they protect a real contract, for example:
|
||||||
|
|
||||||
|
- a formatter's exact human-readable output
|
||||||
|
- auth decision outcomes for a given API response shape
|
||||||
|
- exact algorithm output that must remain stable
|
||||||
|
|
||||||
|
Exact assertions are usually not appropriate for:
|
||||||
|
|
||||||
|
- incidental class names
|
||||||
|
- framework internals
|
||||||
|
- non-observable React keys
|
||||||
|
|
||||||
|
## Cleanup rule of thumb
|
||||||
|
|
||||||
|
Keep tests that would catch meaningful regressions in product behavior.
|
||||||
|
Trim or remove tests that are brittle, duplicated, or mostly validate tooling rather than app logic.
|
||||||
|
|
||||||
|
## Validation
|
||||||
|
|
||||||
|
For frontend test work, validate with:
|
||||||
|
|
||||||
|
- `cd frontend && bun run lint`
|
||||||
|
- `cd frontend && bun run typecheck`
|
||||||
|
- `cd frontend && bun run test`
|
||||||
1350
frontend/bun.lock
Normal file
1350
frontend/bun.lock
Normal file
File diff suppressed because it is too large
Load Diff
82
frontend/eslint.config.js
Normal file
82
frontend/eslint.config.js
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
import js from "@eslint/js";
|
||||||
|
import typescriptParser from "@typescript-eslint/parser";
|
||||||
|
import typescriptPlugin from "@typescript-eslint/eslint-plugin";
|
||||||
|
import reactPlugin from "eslint-plugin-react";
|
||||||
|
import reactHooksPlugin from "eslint-plugin-react-hooks";
|
||||||
|
import tailwindcss from "eslint-plugin-tailwindcss";
|
||||||
|
import prettier from "eslint-plugin-prettier";
|
||||||
|
import eslintConfigPrettier from "eslint-config-prettier";
|
||||||
|
|
||||||
|
export default [
|
||||||
|
js.configs.recommended,
|
||||||
|
{
|
||||||
|
files: ["**/*.ts", "**/*.tsx"],
|
||||||
|
ignores: ["**/generated/**"],
|
||||||
|
languageOptions: {
|
||||||
|
parser: typescriptParser,
|
||||||
|
parserOptions: {
|
||||||
|
ecmaVersion: "latest",
|
||||||
|
sourceType: "module",
|
||||||
|
ecmaFeatures: {
|
||||||
|
jsx: true,
|
||||||
|
},
|
||||||
|
projectService: true,
|
||||||
|
},
|
||||||
|
globals: {
|
||||||
|
localStorage: "readonly",
|
||||||
|
sessionStorage: "readonly",
|
||||||
|
document: "readonly",
|
||||||
|
window: "readonly",
|
||||||
|
setTimeout: "readonly",
|
||||||
|
clearTimeout: "readonly",
|
||||||
|
setInterval: "readonly",
|
||||||
|
clearInterval: "readonly",
|
||||||
|
HTMLElement: "readonly",
|
||||||
|
HTMLDivElement: "readonly",
|
||||||
|
HTMLButtonElement: "readonly",
|
||||||
|
HTMLAnchorElement: "readonly",
|
||||||
|
MouseEvent: "readonly",
|
||||||
|
Node: "readonly",
|
||||||
|
File: "readonly",
|
||||||
|
Blob: "readonly",
|
||||||
|
FormData: "readonly",
|
||||||
|
alert: "readonly",
|
||||||
|
confirm: "readonly",
|
||||||
|
prompt: "readonly",
|
||||||
|
React: "readonly",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
plugins: {
|
||||||
|
"@typescript-eslint": typescriptPlugin,
|
||||||
|
react: reactPlugin,
|
||||||
|
"react-hooks": reactHooksPlugin,
|
||||||
|
tailwindcss,
|
||||||
|
prettier,
|
||||||
|
},
|
||||||
|
rules: {
|
||||||
|
...eslintConfigPrettier.rules,
|
||||||
|
...tailwindcss.configs.recommended.rules,
|
||||||
|
"react/react-in-jsx-scope": "off",
|
||||||
|
"react/prop-types": "off",
|
||||||
|
"no-console": ["warn", { allow: ["warn", "error"] }],
|
||||||
|
"no-undef": "off",
|
||||||
|
"@typescript-eslint/no-explicit-any": "warn",
|
||||||
|
"no-unused-vars": "off",
|
||||||
|
"@typescript-eslint/no-unused-vars": [
|
||||||
|
"error",
|
||||||
|
{
|
||||||
|
argsIgnorePattern: "^_",
|
||||||
|
varsIgnorePattern: "^_",
|
||||||
|
caughtErrorsIgnorePattern: "^_",
|
||||||
|
ignoreRestSiblings: true,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"no-useless-catch": "off",
|
||||||
|
},
|
||||||
|
settings: {
|
||||||
|
react: {
|
||||||
|
version: "detect",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
];
|
||||||
31
frontend/index.html
Normal file
31
frontend/index.html
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
<!doctype html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8" />
|
||||||
|
<meta
|
||||||
|
name="viewport"
|
||||||
|
content="width=device-width, initial-scale=0.90, user-scalable=no, viewport-fit=cover"
|
||||||
|
/>
|
||||||
|
<meta name="apple-mobile-web-app-capable" content="yes" />
|
||||||
|
<meta
|
||||||
|
name="apple-mobile-web-app-status-bar-style"
|
||||||
|
content="black-translucent"
|
||||||
|
/>
|
||||||
|
<meta
|
||||||
|
name="theme-color"
|
||||||
|
content="#F3F4F6"
|
||||||
|
media="(prefers-color-scheme: light)"
|
||||||
|
/>
|
||||||
|
<meta
|
||||||
|
name="theme-color"
|
||||||
|
content="#1F2937"
|
||||||
|
media="(prefers-color-scheme: dark)"
|
||||||
|
/>
|
||||||
|
<title>AnthoLume</title>
|
||||||
|
<link rel="manifest" href="/manifest.json" />
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div id="root"></div>
|
||||||
|
<script type="module" src="/src/main.tsx"></script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
21
frontend/orval.config.ts
Normal file
21
frontend/orval.config.ts
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
import { defineConfig } from 'orval';
|
||||||
|
|
||||||
|
export default defineConfig({
|
||||||
|
antholume: {
|
||||||
|
output: {
|
||||||
|
mode: 'split',
|
||||||
|
baseUrl: '/api/v1',
|
||||||
|
target: 'src/generated',
|
||||||
|
schemas: 'src/generated/model',
|
||||||
|
client: 'react-query',
|
||||||
|
mock: false,
|
||||||
|
override: {
|
||||||
|
useQuery: true,
|
||||||
|
mutations: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
input: {
|
||||||
|
target: '../api/v1/openapi.yaml',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
56
frontend/package.json
Normal file
56
frontend/package.json
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
{
|
||||||
|
"name": "antholume-frontend",
|
||||||
|
"private": true,
|
||||||
|
"version": "1.0.0",
|
||||||
|
"type": "module",
|
||||||
|
"scripts": {
|
||||||
|
"dev": "vite",
|
||||||
|
"typecheck": "tsc --noEmit",
|
||||||
|
"build": "tsc && vite build",
|
||||||
|
"preview": "vite preview",
|
||||||
|
"generate:api": "orval",
|
||||||
|
"lint": "eslint src --max-warnings=0",
|
||||||
|
"lint:fix": "eslint src --fix",
|
||||||
|
"format": "prettier --check src",
|
||||||
|
"format:fix": "prettier --write src",
|
||||||
|
"test": "vitest run"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@tanstack/react-query": "^5.62.16",
|
||||||
|
"ajv": "^8.18.0",
|
||||||
|
"axios": "^1.13.6",
|
||||||
|
"clsx": "^2.1.1",
|
||||||
|
"epubjs": "^0.3.93",
|
||||||
|
"nosleep.js": "^0.12.0",
|
||||||
|
"orval": "8.5.3",
|
||||||
|
"react": "^19.0.0",
|
||||||
|
"react-dom": "^19.0.0",
|
||||||
|
"react-router-dom": "^7.1.1",
|
||||||
|
"tailwind-merge": "^3.5.0"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@eslint/js": "^9.17.0",
|
||||||
|
"@testing-library/jest-dom": "^6.9.1",
|
||||||
|
"@testing-library/react": "^16.3.2",
|
||||||
|
"@testing-library/user-event": "^14.6.1",
|
||||||
|
"@types/react": "^19.0.8",
|
||||||
|
"@types/react-dom": "^19.0.8",
|
||||||
|
"@typescript-eslint/eslint-plugin": "^8.13.0",
|
||||||
|
"@typescript-eslint/parser": "^8.13.0",
|
||||||
|
"@vitejs/plugin-react": "^4.3.4",
|
||||||
|
"autoprefixer": "^10.4.20",
|
||||||
|
"eslint": "^9.17.0",
|
||||||
|
"eslint-config-prettier": "^9.1.0",
|
||||||
|
"eslint-plugin-prettier": "^5.2.1",
|
||||||
|
"eslint-plugin-react": "^7.37.5",
|
||||||
|
"eslint-plugin-react-hooks": "^5.0.0",
|
||||||
|
"eslint-plugin-tailwindcss": "^3.18.2",
|
||||||
|
"jsdom": "^29.0.1",
|
||||||
|
"postcss": "^8.4.49",
|
||||||
|
"prettier": "^3.3.3",
|
||||||
|
"tailwindcss": "^3.4.17",
|
||||||
|
"typescript": "~5.6.2",
|
||||||
|
"vite": "^6.0.5",
|
||||||
|
"vitest": "^4.1.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
6
frontend/postcss.config.js
Normal file
6
frontend/postcss.config.js
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
export default {
|
||||||
|
plugins: {
|
||||||
|
tailwindcss: {},
|
||||||
|
autoprefixer: {},
|
||||||
|
},
|
||||||
|
};
|
||||||
12
frontend/src/App.tsx
Normal file
12
frontend/src/App.tsx
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
import { AuthProvider } from './auth/AuthContext';
|
||||||
|
import { Routes } from './Routes';
|
||||||
|
|
||||||
|
function App() {
|
||||||
|
return (
|
||||||
|
<AuthProvider>
|
||||||
|
<Routes />
|
||||||
|
</AuthProvider>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default App;
|
||||||
134
frontend/src/Routes.tsx
Normal file
134
frontend/src/Routes.tsx
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
import { Route, Routes as ReactRoutes } from 'react-router-dom';
|
||||||
|
import Layout from './components/Layout';
|
||||||
|
import HomePage from './pages/HomePage';
|
||||||
|
import DocumentsPage from './pages/DocumentsPage';
|
||||||
|
import DocumentPage from './pages/DocumentPage';
|
||||||
|
import ProgressPage from './pages/ProgressPage';
|
||||||
|
import ActivityPage from './pages/ActivityPage';
|
||||||
|
import SearchPage from './pages/SearchPage';
|
||||||
|
import SettingsPage from './pages/SettingsPage';
|
||||||
|
import LoginPage from './pages/LoginPage';
|
||||||
|
import RegisterPage from './pages/RegisterPage';
|
||||||
|
import AdminPage from './pages/AdminPage';
|
||||||
|
import AdminImportPage from './pages/AdminImportPage';
|
||||||
|
import AdminImportResultsPage from './pages/AdminImportResultsPage';
|
||||||
|
import AdminUsersPage from './pages/AdminUsersPage';
|
||||||
|
import AdminLogsPage from './pages/AdminLogsPage';
|
||||||
|
import ReaderPage from './pages/ReaderPage';
|
||||||
|
import { ProtectedRoute } from './auth/ProtectedRoute';
|
||||||
|
|
||||||
|
export function Routes() {
|
||||||
|
return (
|
||||||
|
<ReactRoutes>
|
||||||
|
<Route path="/" element={<Layout />}>
|
||||||
|
<Route
|
||||||
|
index
|
||||||
|
element={
|
||||||
|
<ProtectedRoute>
|
||||||
|
<HomePage />
|
||||||
|
</ProtectedRoute>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<Route
|
||||||
|
path="documents"
|
||||||
|
element={
|
||||||
|
<ProtectedRoute>
|
||||||
|
<DocumentsPage />
|
||||||
|
</ProtectedRoute>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<Route
|
||||||
|
path="documents/:id"
|
||||||
|
element={
|
||||||
|
<ProtectedRoute>
|
||||||
|
<DocumentPage />
|
||||||
|
</ProtectedRoute>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<Route
|
||||||
|
path="progress"
|
||||||
|
element={
|
||||||
|
<ProtectedRoute>
|
||||||
|
<ProgressPage />
|
||||||
|
</ProtectedRoute>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<Route
|
||||||
|
path="activity"
|
||||||
|
element={
|
||||||
|
<ProtectedRoute>
|
||||||
|
<ActivityPage />
|
||||||
|
</ProtectedRoute>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<Route
|
||||||
|
path="search"
|
||||||
|
element={
|
||||||
|
<ProtectedRoute>
|
||||||
|
<SearchPage />
|
||||||
|
</ProtectedRoute>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<Route
|
||||||
|
path="settings"
|
||||||
|
element={
|
||||||
|
<ProtectedRoute>
|
||||||
|
<SettingsPage />
|
||||||
|
</ProtectedRoute>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
{/* Admin routes */}
|
||||||
|
<Route
|
||||||
|
path="admin"
|
||||||
|
element={
|
||||||
|
<ProtectedRoute>
|
||||||
|
<AdminPage />
|
||||||
|
</ProtectedRoute>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<Route
|
||||||
|
path="admin/import"
|
||||||
|
element={
|
||||||
|
<ProtectedRoute>
|
||||||
|
<AdminImportPage />
|
||||||
|
</ProtectedRoute>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<Route
|
||||||
|
path="admin/import-results"
|
||||||
|
element={
|
||||||
|
<ProtectedRoute>
|
||||||
|
<AdminImportResultsPage />
|
||||||
|
</ProtectedRoute>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<Route
|
||||||
|
path="admin/users"
|
||||||
|
element={
|
||||||
|
<ProtectedRoute>
|
||||||
|
<AdminUsersPage />
|
||||||
|
</ProtectedRoute>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<Route
|
||||||
|
path="admin/logs"
|
||||||
|
element={
|
||||||
|
<ProtectedRoute>
|
||||||
|
<AdminLogsPage />
|
||||||
|
</ProtectedRoute>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
</Route>
|
||||||
|
<Route
|
||||||
|
path="/reader/:id"
|
||||||
|
element={
|
||||||
|
<ProtectedRoute>
|
||||||
|
<ReaderPage />
|
||||||
|
</ProtectedRoute>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<Route path="/login" element={<LoginPage />} />
|
||||||
|
<Route path="/register" element={<RegisterPage />} />
|
||||||
|
</ReactRoutes>
|
||||||
|
);
|
||||||
|
}
|
||||||
135
frontend/src/auth/AuthContext.tsx
Normal file
135
frontend/src/auth/AuthContext.tsx
Normal file
@@ -0,0 +1,135 @@
|
|||||||
|
import { createContext, useContext, useState, useEffect, ReactNode, useCallback } from 'react';
|
||||||
|
import { useQueryClient } from '@tanstack/react-query';
|
||||||
|
import { useNavigate } from 'react-router-dom';
|
||||||
|
import {
|
||||||
|
getGetMeQueryKey,
|
||||||
|
useLogin,
|
||||||
|
useLogout,
|
||||||
|
useGetMe,
|
||||||
|
useRegister,
|
||||||
|
} from '../generated/anthoLumeAPIV1';
|
||||||
|
import {
|
||||||
|
type AuthState,
|
||||||
|
getAuthenticatedAuthState,
|
||||||
|
getUnauthenticatedAuthState,
|
||||||
|
resolveAuthStateFromMe,
|
||||||
|
validateAuthMutationResponse,
|
||||||
|
} from './authHelpers';
|
||||||
|
|
||||||
|
interface AuthContextType extends AuthState {
|
||||||
|
login: (_username: string, _password: string) => Promise<void>;
|
||||||
|
register: (_username: string, _password: string) => Promise<void>;
|
||||||
|
logout: () => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
const AuthContext = createContext<AuthContextType | undefined>(undefined);
|
||||||
|
|
||||||
|
const initialAuthState: AuthState = {
|
||||||
|
isAuthenticated: false,
|
||||||
|
user: null,
|
||||||
|
isCheckingAuth: true,
|
||||||
|
};
|
||||||
|
|
||||||
|
export function AuthProvider({ children }: { children: ReactNode }) {
|
||||||
|
const [authState, setAuthState] = useState<AuthState>(initialAuthState);
|
||||||
|
|
||||||
|
const loginMutation = useLogin();
|
||||||
|
const registerMutation = useRegister();
|
||||||
|
const logoutMutation = useLogout();
|
||||||
|
|
||||||
|
const { data: meData, error: meError, isLoading: meLoading } = useGetMe();
|
||||||
|
|
||||||
|
const queryClient = useQueryClient();
|
||||||
|
const navigate = useNavigate();
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
setAuthState(prev =>
|
||||||
|
resolveAuthStateFromMe({
|
||||||
|
meData,
|
||||||
|
meError,
|
||||||
|
meLoading,
|
||||||
|
previousState: prev,
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}, [meData, meError, meLoading]);
|
||||||
|
|
||||||
|
const login = useCallback(
|
||||||
|
async (username: string, password: string) => {
|
||||||
|
try {
|
||||||
|
const response = await loginMutation.mutateAsync({
|
||||||
|
data: {
|
||||||
|
username,
|
||||||
|
password,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const user = validateAuthMutationResponse(response, 200);
|
||||||
|
if (!user) {
|
||||||
|
setAuthState(getUnauthenticatedAuthState());
|
||||||
|
throw new Error('Login failed');
|
||||||
|
}
|
||||||
|
|
||||||
|
setAuthState(getAuthenticatedAuthState(user));
|
||||||
|
|
||||||
|
await queryClient.invalidateQueries({ queryKey: getGetMeQueryKey() });
|
||||||
|
navigate('/');
|
||||||
|
} catch (_error) {
|
||||||
|
setAuthState(getUnauthenticatedAuthState());
|
||||||
|
throw new Error('Login failed');
|
||||||
|
}
|
||||||
|
},
|
||||||
|
[loginMutation, navigate, queryClient]
|
||||||
|
);
|
||||||
|
|
||||||
|
const register = useCallback(
|
||||||
|
async (username: string, password: string) => {
|
||||||
|
try {
|
||||||
|
const response = await registerMutation.mutateAsync({
|
||||||
|
data: {
|
||||||
|
username,
|
||||||
|
password,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const user = validateAuthMutationResponse(response, 201);
|
||||||
|
if (!user) {
|
||||||
|
setAuthState(getUnauthenticatedAuthState());
|
||||||
|
throw new Error('Registration failed');
|
||||||
|
}
|
||||||
|
|
||||||
|
setAuthState(getAuthenticatedAuthState(user));
|
||||||
|
|
||||||
|
await queryClient.invalidateQueries({ queryKey: getGetMeQueryKey() });
|
||||||
|
navigate('/');
|
||||||
|
} catch (_error) {
|
||||||
|
setAuthState(getUnauthenticatedAuthState());
|
||||||
|
throw new Error('Registration failed');
|
||||||
|
}
|
||||||
|
},
|
||||||
|
[navigate, queryClient, registerMutation]
|
||||||
|
);
|
||||||
|
|
||||||
|
const logout = useCallback(() => {
|
||||||
|
logoutMutation.mutate(undefined, {
|
||||||
|
onSuccess: async () => {
|
||||||
|
setAuthState(getUnauthenticatedAuthState());
|
||||||
|
await queryClient.removeQueries({ queryKey: getGetMeQueryKey() });
|
||||||
|
navigate('/login');
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}, [logoutMutation, navigate, queryClient]);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<AuthContext.Provider value={{ ...authState, login, register, logout }}>
|
||||||
|
{children}
|
||||||
|
</AuthContext.Provider>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function useAuth() {
|
||||||
|
const context = useContext(AuthContext);
|
||||||
|
if (context === undefined) {
|
||||||
|
throw new Error('useAuth must be used within an AuthProvider');
|
||||||
|
}
|
||||||
|
return context;
|
||||||
|
}
|
||||||
90
frontend/src/auth/ProtectedRoute.test.tsx
Normal file
90
frontend/src/auth/ProtectedRoute.test.tsx
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
import { describe, expect, it, vi, beforeEach } from 'vitest';
|
||||||
|
import { render, screen } from '@testing-library/react';
|
||||||
|
import { MemoryRouter, Route, Routes } from 'react-router-dom';
|
||||||
|
import { ProtectedRoute } from './ProtectedRoute';
|
||||||
|
import { useAuth } from './AuthContext';
|
||||||
|
|
||||||
|
vi.mock('./AuthContext', () => ({
|
||||||
|
useAuth: vi.fn(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
const mockedUseAuth = vi.mocked(useAuth);
|
||||||
|
|
||||||
|
describe('ProtectedRoute', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('shows a loading state while auth is being checked', () => {
|
||||||
|
mockedUseAuth.mockReturnValue({
|
||||||
|
isAuthenticated: false,
|
||||||
|
isCheckingAuth: true,
|
||||||
|
user: null,
|
||||||
|
login: vi.fn(),
|
||||||
|
register: vi.fn(),
|
||||||
|
logout: vi.fn(),
|
||||||
|
});
|
||||||
|
|
||||||
|
render(
|
||||||
|
<MemoryRouter initialEntries={['/private']}>
|
||||||
|
<ProtectedRoute>
|
||||||
|
<div>Secret</div>
|
||||||
|
</ProtectedRoute>
|
||||||
|
</MemoryRouter>
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(screen.getByText('Loading...')).toBeInTheDocument();
|
||||||
|
expect(screen.queryByText('Secret')).not.toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('redirects unauthenticated users to the login page', () => {
|
||||||
|
mockedUseAuth.mockReturnValue({
|
||||||
|
isAuthenticated: false,
|
||||||
|
isCheckingAuth: false,
|
||||||
|
user: null,
|
||||||
|
login: vi.fn(),
|
||||||
|
register: vi.fn(),
|
||||||
|
logout: vi.fn(),
|
||||||
|
});
|
||||||
|
|
||||||
|
render(
|
||||||
|
<MemoryRouter initialEntries={['/private']}>
|
||||||
|
<Routes>
|
||||||
|
<Route
|
||||||
|
path="/private"
|
||||||
|
element={
|
||||||
|
<ProtectedRoute>
|
||||||
|
<div>Secret</div>
|
||||||
|
</ProtectedRoute>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<Route path="/login" element={<div>Login Page</div>} />
|
||||||
|
</Routes>
|
||||||
|
</MemoryRouter>
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(screen.getByText('Login Page')).toBeInTheDocument();
|
||||||
|
expect(screen.queryByText('Secret')).not.toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('renders children for authenticated users', () => {
|
||||||
|
mockedUseAuth.mockReturnValue({
|
||||||
|
isAuthenticated: true,
|
||||||
|
isCheckingAuth: false,
|
||||||
|
user: { username: 'evan', is_admin: false },
|
||||||
|
login: vi.fn(),
|
||||||
|
register: vi.fn(),
|
||||||
|
logout: vi.fn(),
|
||||||
|
});
|
||||||
|
|
||||||
|
render(
|
||||||
|
<MemoryRouter>
|
||||||
|
<ProtectedRoute>
|
||||||
|
<div>Secret</div>
|
||||||
|
</ProtectedRoute>
|
||||||
|
</MemoryRouter>
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(screen.getByText('Secret')).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
});
|
||||||
21
frontend/src/auth/ProtectedRoute.tsx
Normal file
21
frontend/src/auth/ProtectedRoute.tsx
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
import { Navigate, useLocation } from 'react-router-dom';
|
||||||
|
import { useAuth } from './AuthContext';
|
||||||
|
|
||||||
|
interface ProtectedRouteProps {
|
||||||
|
children: React.ReactNode;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function ProtectedRoute({ children }: ProtectedRouteProps) {
|
||||||
|
const { isAuthenticated, isCheckingAuth } = useAuth();
|
||||||
|
const location = useLocation();
|
||||||
|
|
||||||
|
if (isCheckingAuth) {
|
||||||
|
return <div className="text-content-muted">Loading...</div>;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!isAuthenticated) {
|
||||||
|
return <Navigate to="/login" state={{ from: location }} replace />;
|
||||||
|
}
|
||||||
|
|
||||||
|
return children;
|
||||||
|
}
|
||||||
157
frontend/src/auth/authHelpers.test.ts
Normal file
157
frontend/src/auth/authHelpers.test.ts
Normal file
@@ -0,0 +1,157 @@
|
|||||||
|
import { describe, expect, it } from 'vitest';
|
||||||
|
import {
|
||||||
|
getCheckingAuthState,
|
||||||
|
getUnauthenticatedAuthState,
|
||||||
|
normalizeAuthenticatedUser,
|
||||||
|
resolveAuthStateFromMe,
|
||||||
|
validateAuthMutationResponse,
|
||||||
|
type AuthState,
|
||||||
|
} from './authHelpers';
|
||||||
|
|
||||||
|
const previousState: AuthState = {
|
||||||
|
isAuthenticated: false,
|
||||||
|
user: null,
|
||||||
|
isCheckingAuth: true,
|
||||||
|
};
|
||||||
|
|
||||||
|
describe('authHelpers', () => {
|
||||||
|
it('normalizes a valid authenticated user payload', () => {
|
||||||
|
expect(normalizeAuthenticatedUser({ username: 'evan', is_admin: true })).toEqual({
|
||||||
|
username: 'evan',
|
||||||
|
is_admin: true,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('rejects invalid authenticated user payloads', () => {
|
||||||
|
expect(normalizeAuthenticatedUser(null)).toBeNull();
|
||||||
|
expect(normalizeAuthenticatedUser({ username: 'evan' })).toBeNull();
|
||||||
|
expect(normalizeAuthenticatedUser({ username: 123, is_admin: true })).toBeNull();
|
||||||
|
expect(normalizeAuthenticatedUser({ username: 'evan', is_admin: 'yes' })).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns a checking state while preserving previous auth information', () => {
|
||||||
|
expect(
|
||||||
|
getCheckingAuthState({
|
||||||
|
isAuthenticated: true,
|
||||||
|
user: { username: 'evan', is_admin: false },
|
||||||
|
isCheckingAuth: false,
|
||||||
|
})
|
||||||
|
).toEqual({
|
||||||
|
isAuthenticated: true,
|
||||||
|
user: { username: 'evan', is_admin: false },
|
||||||
|
isCheckingAuth: true,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('resolves auth state from a successful /auth/me response', () => {
|
||||||
|
expect(
|
||||||
|
resolveAuthStateFromMe({
|
||||||
|
meData: {
|
||||||
|
status: 200,
|
||||||
|
data: { username: 'evan', is_admin: false },
|
||||||
|
},
|
||||||
|
meError: undefined,
|
||||||
|
meLoading: false,
|
||||||
|
previousState,
|
||||||
|
})
|
||||||
|
).toEqual({
|
||||||
|
isAuthenticated: true,
|
||||||
|
user: { username: 'evan', is_admin: false },
|
||||||
|
isCheckingAuth: false,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('resolves auth state to unauthenticated on 401 or query error', () => {
|
||||||
|
expect(
|
||||||
|
resolveAuthStateFromMe({
|
||||||
|
meData: {
|
||||||
|
status: 401,
|
||||||
|
},
|
||||||
|
meError: undefined,
|
||||||
|
meLoading: false,
|
||||||
|
previousState,
|
||||||
|
})
|
||||||
|
).toEqual(getUnauthenticatedAuthState());
|
||||||
|
|
||||||
|
expect(
|
||||||
|
resolveAuthStateFromMe({
|
||||||
|
meData: undefined,
|
||||||
|
meError: new Error('failed'),
|
||||||
|
meLoading: false,
|
||||||
|
previousState,
|
||||||
|
})
|
||||||
|
).toEqual(getUnauthenticatedAuthState());
|
||||||
|
});
|
||||||
|
|
||||||
|
it('keeps checking state while /auth/me is still loading', () => {
|
||||||
|
expect(
|
||||||
|
resolveAuthStateFromMe({
|
||||||
|
meData: undefined,
|
||||||
|
meError: undefined,
|
||||||
|
meLoading: true,
|
||||||
|
previousState: {
|
||||||
|
isAuthenticated: true,
|
||||||
|
user: { username: 'evan', is_admin: true },
|
||||||
|
isCheckingAuth: false,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
).toEqual({
|
||||||
|
isAuthenticated: true,
|
||||||
|
user: { username: 'evan', is_admin: true },
|
||||||
|
isCheckingAuth: true,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns the previous state with checking disabled when there is no decisive me result', () => {
|
||||||
|
expect(
|
||||||
|
resolveAuthStateFromMe({
|
||||||
|
meData: {
|
||||||
|
status: 204,
|
||||||
|
},
|
||||||
|
meError: undefined,
|
||||||
|
meLoading: false,
|
||||||
|
previousState: {
|
||||||
|
isAuthenticated: false,
|
||||||
|
user: null,
|
||||||
|
isCheckingAuth: true,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
).toEqual({
|
||||||
|
isAuthenticated: false,
|
||||||
|
user: null,
|
||||||
|
isCheckingAuth: false,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('validates auth mutation responses by expected status and payload shape', () => {
|
||||||
|
expect(
|
||||||
|
validateAuthMutationResponse(
|
||||||
|
{
|
||||||
|
status: 200,
|
||||||
|
data: { username: 'evan', is_admin: false },
|
||||||
|
},
|
||||||
|
200
|
||||||
|
)
|
||||||
|
).toEqual({ username: 'evan', is_admin: false });
|
||||||
|
|
||||||
|
expect(
|
||||||
|
validateAuthMutationResponse(
|
||||||
|
{
|
||||||
|
status: 201,
|
||||||
|
data: { username: 'evan', is_admin: false },
|
||||||
|
},
|
||||||
|
200
|
||||||
|
)
|
||||||
|
).toBeNull();
|
||||||
|
|
||||||
|
expect(
|
||||||
|
validateAuthMutationResponse(
|
||||||
|
{
|
||||||
|
status: 200,
|
||||||
|
data: { username: 'evan' },
|
||||||
|
},
|
||||||
|
200
|
||||||
|
)
|
||||||
|
).toBeNull();
|
||||||
|
});
|
||||||
|
});
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user