Compare commits
78 Commits
e9f2e3a5a0
...
gocomponen
| Author | SHA1 | Date | |
|---|---|---|---|
| c20aa88574 | |||
| de909b0af7 | |||
| c4f4dcf51e | |||
| 99e55ff568 | |||
| 8fd2aeb6a2 | |||
| bc076a4f44 | |||
| f9f23f2d3f | |||
| 3cff965393 | |||
| 7937890acd | |||
| 938dd69e5e | |||
| 7c92c346fa | |||
| 456b6e457c | |||
| d304421798 | |||
| 0fe52bc541 | |||
| 49f3d53170 | |||
| 57f81e5dd7 | |||
| 162adfbe16 | |||
| e2cfdb3a0c | |||
| acf4119d9a | |||
| f6dd8cee50 | |||
| a981d98ba5 | |||
| a193f97d29 | |||
| 841b29c425 | |||
| 3d61d0f5ef | |||
| 5e388730a5 | |||
| 0a1dfeab65 | |||
| d4c8e4d2da | |||
| bbd3a00102 | |||
| 3a633235ea | |||
| 9809a09d2e | |||
| f37bff365f | |||
| 77527bfb05 | |||
| 8de6fed5df | |||
| f9277d3b32 | |||
| db9629a618 | |||
| 546600db93 | |||
| 7c6acad689 | |||
| 5482899075 | |||
| 5a64ff7029 | |||
| a7ecb1a6f8 | |||
| 2d206826d6 | |||
| f1414e3e4e | |||
| 8e81acd381 | |||
| 6c6a6dd329 | |||
| c4602c8c3b | |||
| fe81b57a34 | |||
| a69b7452ce | |||
| 75ed394f8d | |||
| 803c187a00 | |||
| da1baeb4cd | |||
| 5865fe3c13 | |||
| 4a5464853b | |||
| 622dcd5702 | |||
| a86e2520ef | |||
| b1cfd16627 | |||
| 015ca30ac5 | |||
| 9792a6ff19 | |||
| 8c4c1022c3 | |||
| fd8b6bcdc1 | |||
| 0bbd5986cb | |||
| 45cef2f4af | |||
| e33a64db96 | |||
| 35ca021649 | |||
| 760b9ca0a0 | |||
| c9edcd8f5a | |||
| 2d63a7d109 | |||
| 9bd6bf7727 | |||
| f0a2d2cf69 | |||
| a65750ae21 | |||
| 14b930781e | |||
| 8a8f12c07a | |||
| c5b181dda4 | |||
| d3d89b36f6 | |||
| a69f20d5a9 | |||
| c66a6c8499 | |||
| 3057b86002 | |||
| 2c240f2f5c | |||
| 39fd7ab1f1 |
14
.djlintrc
Normal file
14
.djlintrc
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
{
|
||||||
|
"profile": "golang",
|
||||||
|
"indent": 2,
|
||||||
|
"close_void_tags": true,
|
||||||
|
"format_attribute_template_tags": true,
|
||||||
|
"format_js": true,
|
||||||
|
"js": {
|
||||||
|
"indent_size": 2
|
||||||
|
},
|
||||||
|
"format_css": true,
|
||||||
|
"css": {
|
||||||
|
"indent_size": 2
|
||||||
|
}
|
||||||
|
}
|
||||||
29
.drone.yml
29
.drone.yml
@@ -1,33 +1,34 @@
|
|||||||
kind: pipeline
|
kind: pipeline
|
||||||
type: kubernetes
|
type: docker
|
||||||
name: default
|
name: default
|
||||||
|
|
||||||
|
trigger:
|
||||||
|
branch:
|
||||||
|
- master
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
# Unit Tests
|
# Unit Tests
|
||||||
- name: unit test
|
- name: tests
|
||||||
image: golang
|
image: golang
|
||||||
commands:
|
commands:
|
||||||
- make tests_unit
|
- make tests
|
||||||
|
|
||||||
# Integration Tests (Every Month)
|
# Fetch tags
|
||||||
- name: integration test
|
- name: fetch tags
|
||||||
image: golang
|
image: alpine/git
|
||||||
commands:
|
commands:
|
||||||
- make tests_integration
|
- git fetch --tags
|
||||||
when:
|
|
||||||
event:
|
|
||||||
- cron
|
|
||||||
cron:
|
|
||||||
- integration-test
|
|
||||||
|
|
||||||
# Publish Dev Docker Image
|
# Publish docker image
|
||||||
- name: publish_docker
|
- name: publish docker
|
||||||
image: plugins/docker
|
image: plugins/docker
|
||||||
settings:
|
settings:
|
||||||
repo: gitea.va.reichard.io/evan/antholume
|
repo: gitea.va.reichard.io/evan/antholume
|
||||||
registry: gitea.va.reichard.io
|
registry: gitea.va.reichard.io
|
||||||
tags:
|
tags:
|
||||||
- dev
|
- dev
|
||||||
|
custom_dns:
|
||||||
|
- 8.8.8.8
|
||||||
username:
|
username:
|
||||||
from_secret: docker_username
|
from_secret: docker_username
|
||||||
password:
|
password:
|
||||||
|
|||||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -3,3 +3,5 @@ TODO.md
|
|||||||
data/
|
data/
|
||||||
build/
|
build/
|
||||||
.direnv/
|
.direnv/
|
||||||
|
cover.html
|
||||||
|
node_modules
|
||||||
|
|||||||
6
.golangci.toml
Normal file
6
.golangci.toml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
#:schema https://golangci-lint.run/jsonschema/golangci.jsonschema.json
|
||||||
|
version = "2"
|
||||||
|
|
||||||
|
[[linters.exclusions.rules]]
|
||||||
|
linters = [ "errcheck" ]
|
||||||
|
source = "^\\s*defer\\s+"
|
||||||
3
.prettierrc
Normal file
3
.prettierrc
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
{
|
||||||
|
"plugins": ["prettier-plugin-go-template"]
|
||||||
|
}
|
||||||
21
Dockerfile
21
Dockerfile
@@ -1,23 +1,26 @@
|
|||||||
# Certificate Store
|
# Certificates & Timezones
|
||||||
FROM alpine AS certs
|
FROM alpine AS alpine
|
||||||
RUN apk update && apk add ca-certificates
|
RUN apk update && apk add --no-cache ca-certificates tzdata
|
||||||
|
|
||||||
# Build Image
|
# Build Image
|
||||||
FROM golang:1.20 AS build
|
FROM golang:1.24 AS build
|
||||||
|
|
||||||
|
# Create Package Directory
|
||||||
|
RUN mkdir -p /opt/antholume
|
||||||
|
|
||||||
# Copy Source
|
# Copy Source
|
||||||
WORKDIR /src
|
WORKDIR /src
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
# Create Package Directory
|
|
||||||
RUN mkdir -p /opt/antholume
|
|
||||||
|
|
||||||
# Compile
|
# Compile
|
||||||
RUN go build -o /opt/antholume/server
|
RUN go build \
|
||||||
|
-ldflags "-X reichard.io/antholume/config.version=`git describe --tags`" \
|
||||||
|
-o /opt/antholume/server
|
||||||
|
|
||||||
# Create Image
|
# Create Image
|
||||||
FROM busybox:1.36
|
FROM busybox:1.36
|
||||||
COPY --from=certs /etc/ssl/certs /etc/ssl/certs
|
COPY --from=alpine /etc/ssl/certs /etc/ssl/certs
|
||||||
|
COPY --from=alpine /usr/share/zoneinfo /usr/share/zoneinfo
|
||||||
COPY --from=build /opt/antholume /opt/antholume
|
COPY --from=build /opt/antholume /opt/antholume
|
||||||
WORKDIR /opt/antholume
|
WORKDIR /opt/antholume
|
||||||
EXPOSE 8585
|
EXPOSE 8585
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
# Certificate Store
|
# Certificates & Timezones
|
||||||
FROM alpine AS certs
|
FROM alpine AS alpine
|
||||||
RUN apk update && apk add ca-certificates
|
RUN apk update && apk add --no-cache ca-certificates tzdata
|
||||||
|
|
||||||
# Build Image
|
# Build Image
|
||||||
FROM --platform=$BUILDPLATFORM golang:1.20 AS build
|
FROM --platform=$BUILDPLATFORM golang:1.21 AS build
|
||||||
|
|
||||||
# Create Package Directory
|
# Create Package Directory
|
||||||
WORKDIR /src
|
WORKDIR /src
|
||||||
@@ -15,11 +15,14 @@ ARG TARGETARCH
|
|||||||
RUN --mount=target=. \
|
RUN --mount=target=. \
|
||||||
--mount=type=cache,target=/root/.cache/go-build \
|
--mount=type=cache,target=/root/.cache/go-build \
|
||||||
--mount=type=cache,target=/go/pkg \
|
--mount=type=cache,target=/go/pkg \
|
||||||
GOOS=$TARGETOS GOARCH=$TARGETARCH go build -o /opt/antholume/server
|
GOOS=$TARGETOS GOARCH=$TARGETARCH go build \
|
||||||
|
-ldflags "-X reichard.io/antholume/config.version=`git describe --tags`" \
|
||||||
|
-o /opt/antholume/server
|
||||||
|
|
||||||
# Create Image
|
# Create Image
|
||||||
FROM busybox:1.36
|
FROM busybox:1.36
|
||||||
COPY --from=certs /etc/ssl/certs /etc/ssl/certs
|
COPY --from=alpine /etc/ssl/certs /etc/ssl/certs
|
||||||
|
COPY --from=alpine /usr/share/zoneinfo /usr/share/zoneinfo
|
||||||
COPY --from=build /opt/antholume /opt/antholume
|
COPY --from=build /opt/antholume /opt/antholume
|
||||||
WORKDIR /opt/antholume
|
WORKDIR /opt/antholume
|
||||||
EXPOSE 8585
|
EXPOSE 8585
|
||||||
|
|||||||
29
Makefile
29
Makefile
@@ -3,10 +3,10 @@ build_local: build_tailwind
|
|||||||
rm -r ./build || true
|
rm -r ./build || true
|
||||||
mkdir -p ./build
|
mkdir -p ./build
|
||||||
|
|
||||||
env GOOS=linux GOARCH=amd64 go build -o ./build/server_linux_amd64
|
env GOOS=linux GOARCH=amd64 go build -ldflags "-X reichard.io/antholume/config.version=`git describe --tags`" -o ./build/server_linux_amd64
|
||||||
env GOOS=linux GOARCH=arm64 go build -o ./build/server_linux_arm64
|
env GOOS=linux GOARCH=arm64 go build -ldflags "-X reichard.io/antholume/config.version=`git describe --tags`" -o ./build/server_linux_arm64
|
||||||
env GOOS=darwin GOARCH=arm64 go build -o ./build/server_darwin_arm64
|
env GOOS=darwin GOARCH=arm64 go build -ldflags "-X reichard.io/antholume/config.version=`git describe --tags`" -o ./build/server_darwin_arm64
|
||||||
env GOOS=darwin GOARCH=amd64 go build -o ./build/server_darwin_amd64
|
env GOOS=darwin GOARCH=amd64 go build -ldflags "-X reichard.io/antholume/config.version=`git describe --tags`" -o ./build/server_darwin_amd64
|
||||||
|
|
||||||
docker_build_local: build_tailwind
|
docker_build_local: build_tailwind
|
||||||
docker build -t antholume:latest .
|
docker build -t antholume:latest .
|
||||||
@@ -27,13 +27,22 @@ docker_build_release_latest: build_tailwind
|
|||||||
--push .
|
--push .
|
||||||
|
|
||||||
build_tailwind:
|
build_tailwind:
|
||||||
tailwind build -o ./assets/style.css --minify
|
tailwindcss build -o ./assets/tailwind.css --minify
|
||||||
|
|
||||||
|
dev: build_tailwind
|
||||||
|
GIN_MODE=release \
|
||||||
|
CONFIG_PATH=./data \
|
||||||
|
DATA_PATH=./data \
|
||||||
|
SEARCH_ENABLED=true \
|
||||||
|
REGISTRATION_ENABLED=true \
|
||||||
|
COOKIE_SECURE=false \
|
||||||
|
COOKIE_AUTH_KEY=1234 \
|
||||||
|
LOG_LEVEL=debug go run main.go serve
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
rm -rf ./build
|
rm -rf ./build
|
||||||
|
|
||||||
tests_integration:
|
tests:
|
||||||
go test -v -tags=integration -coverpkg=./... ./metadata
|
SET_TEST=set_val go test -coverpkg=./... ./... -coverprofile=./cover.out
|
||||||
|
go tool cover -html=./cover.out -o ./cover.html
|
||||||
tests_unit:
|
rm ./cover.out
|
||||||
SET_TEST=set_val go test -v -coverpkg=./... ./...
|
|
||||||
|
|||||||
16
README.md
16
README.md
@@ -64,6 +64,8 @@ The OPDS API endpoint is located at: `http(s)://<SERVER>/api/opds`
|
|||||||
|
|
||||||
### Quick Start
|
### Quick Start
|
||||||
|
|
||||||
|
**NOTE**: If you're accessing your instance over HTTP (not HTTPS), you must set `COOKIE_SECURE=false`, otherwise you will not be able to login.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Make Data Directory
|
# Make Data Directory
|
||||||
mkdir -p antholume_data
|
mkdir -p antholume_data
|
||||||
@@ -71,6 +73,7 @@ mkdir -p antholume_data
|
|||||||
# Run Server
|
# Run Server
|
||||||
docker run \
|
docker run \
|
||||||
-p 8585:8585 \
|
-p 8585:8585 \
|
||||||
|
-e COOKIE_SECURE=false \
|
||||||
-e REGISTRATION_ENABLED=true \
|
-e REGISTRATION_ENABLED=true \
|
||||||
-v ./antholume_data:/config \
|
-v ./antholume_data:/config \
|
||||||
-v ./antholume_data:/data \
|
-v ./antholume_data:/data \
|
||||||
@@ -82,7 +85,7 @@ The service is now accessible at: `http://localhost:8585`. I recommend registeri
|
|||||||
### Configuration
|
### Configuration
|
||||||
|
|
||||||
| Environment Variable | Default Value | Description |
|
| Environment Variable | Default Value | Description |
|
||||||
| -------------------- | ------------- | ------------------------------------------------------------------- |
|
| -------------------- | ------------- | -------------------------------------------------------------------------- |
|
||||||
| DATABASE_TYPE | SQLite | Currently only "SQLite" is supported |
|
| DATABASE_TYPE | SQLite | Currently only "SQLite" is supported |
|
||||||
| DATABASE_NAME | antholume | The database name, or in SQLite's case, the filename |
|
| DATABASE_NAME | antholume | The database name, or in SQLite's case, the filename |
|
||||||
| CONFIG_PATH | /config | Directory where to store SQLite's DB |
|
| CONFIG_PATH | /config | Directory where to store SQLite's DB |
|
||||||
@@ -90,7 +93,8 @@ The service is now accessible at: `http://localhost:8585`. I recommend registeri
|
|||||||
| LISTEN_PORT | 8585 | Port the server listens at |
|
| LISTEN_PORT | 8585 | Port the server listens at |
|
||||||
| LOG_LEVEL | info | Set server log level |
|
| LOG_LEVEL | info | Set server log level |
|
||||||
| REGISTRATION_ENABLED | false | Whether to allow registration (applies to both WebApp & KOSync API) |
|
| REGISTRATION_ENABLED | false | Whether to allow registration (applies to both WebApp & KOSync API) |
|
||||||
| COOKIE_SESSION_KEY | <EMPTY> | Optional secret cookie session key (auto generated if not provided) |
|
| COOKIE_AUTH_KEY | <EMPTY> | Optional secret cookie authentication key (auto generated if not provided) |
|
||||||
|
| COOKIE_ENC_KEY | <EMPTY> | Optional secret cookie encryption key (16 or 32 bytes) |
|
||||||
| COOKIE_SECURE | true | Set Cookie `Secure` attribute (i.e. only works over HTTPS) |
|
| COOKIE_SECURE | true | Set Cookie `Secure` attribute (i.e. only works over HTTPS) |
|
||||||
| COOKIE_HTTP_ONLY | true | Set Cookie `HttpOnly` attribute (i.e. inacessible via JavaScript) |
|
| COOKIE_HTTP_ONLY | true | Set Cookie `HttpOnly` attribute (i.e. inacessible via JavaScript) |
|
||||||
|
|
||||||
@@ -114,13 +118,19 @@ See documentation in the `client` subfolder: [SyncNinja](https://gitea.va.reicha
|
|||||||
|
|
||||||
## Development
|
## Development
|
||||||
|
|
||||||
SQLC Generation (v1.21.0):
|
SQLC Generation (v1.26.0):
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
go install github.com/sqlc-dev/sqlc/cmd/sqlc@latest
|
go install github.com/sqlc-dev/sqlc/cmd/sqlc@latest
|
||||||
~/go/bin/sqlc generate
|
~/go/bin/sqlc generate
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Goose Migrations:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
go install github.com/pressly/goose/v3/cmd/goose@latest
|
||||||
|
```
|
||||||
|
|
||||||
Run Development:
|
Run Development:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
|||||||
420
api/api.go
420
api/api.go
@@ -1,228 +1,378 @@
|
|||||||
package api
|
package api
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"crypto/rand"
|
"context"
|
||||||
"embed"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"html/template"
|
"html/template"
|
||||||
"io/fs"
|
"io/fs"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"path"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/gin-contrib/multitemplate"
|
"github.com/gin-contrib/multitemplate"
|
||||||
"github.com/gin-contrib/sessions"
|
"github.com/gin-contrib/sessions"
|
||||||
"github.com/gin-contrib/sessions/cookie"
|
"github.com/gin-contrib/sessions/cookie"
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/microcosm-cc/bluemonday"
|
"github.com/microcosm-cc/bluemonday"
|
||||||
|
"github.com/pkg/errors"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"reichard.io/bbank/config"
|
"reichard.io/antholume/config"
|
||||||
"reichard.io/bbank/database"
|
"reichard.io/antholume/database"
|
||||||
|
"reichard.io/antholume/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
type API struct {
|
type API struct {
|
||||||
Router *gin.Engine
|
db *database.DBManager
|
||||||
Config *config.Config
|
cfg *config.Config
|
||||||
DB *database.DBManager
|
assets fs.FS
|
||||||
HTMLPolicy *bluemonday.Policy
|
httpServer *http.Server
|
||||||
Assets *embed.FS
|
templates map[string]*template.Template
|
||||||
|
userAuthCache map[string]string
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewApi(db *database.DBManager, c *config.Config, assets embed.FS) *API {
|
var htmlPolicy = bluemonday.StrictPolicy()
|
||||||
|
|
||||||
|
func NewApi(db *database.DBManager, c *config.Config, assets fs.FS) *API {
|
||||||
api := &API{
|
api := &API{
|
||||||
HTMLPolicy: bluemonday.StrictPolicy(),
|
db: db,
|
||||||
Router: gin.Default(),
|
cfg: c,
|
||||||
Config: c,
|
assets: assets,
|
||||||
DB: db,
|
templates: make(map[string]*template.Template),
|
||||||
Assets: &assets,
|
userAuthCache: make(map[string]string),
|
||||||
}
|
}
|
||||||
|
|
||||||
// Assets & Web App Templates
|
// Create router
|
||||||
assetsDir, _ := fs.Sub(assets, "assets")
|
router := gin.New()
|
||||||
api.Router.StaticFS("/assets", http.FS(assetsDir))
|
|
||||||
|
|
||||||
// Generate Secure Token
|
// Add server
|
||||||
|
api.httpServer = &http.Server{
|
||||||
|
Handler: router,
|
||||||
|
Addr: (":" + c.ListenPort),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add global logging middleware
|
||||||
|
router.Use(loggingMiddleware)
|
||||||
|
|
||||||
|
// Add global template loader middleware (develop)
|
||||||
|
if c.Version == "develop" {
|
||||||
|
log.Info("utilizing debug template loader")
|
||||||
|
router.Use(api.templateMiddleware(router))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Assets & web app templates
|
||||||
|
assetsDir, _ := fs.Sub(assets, "assets")
|
||||||
|
router.StaticFS("/assets", http.FS(assetsDir))
|
||||||
|
|
||||||
|
// Generate auth token
|
||||||
var newToken []byte
|
var newToken []byte
|
||||||
var err error
|
var err error
|
||||||
|
if c.CookieAuthKey != "" {
|
||||||
if c.CookieSessionKey != "" {
|
log.Info("utilizing environment cookie auth key")
|
||||||
log.Info("[NewApi] Utilizing Environment Cookie Session Key")
|
newToken = []byte(c.CookieAuthKey)
|
||||||
newToken = []byte(c.CookieSessionKey)
|
|
||||||
} else {
|
} else {
|
||||||
log.Info("[NewApi] Generating Cookie Session Key")
|
log.Info("generating cookie auth key")
|
||||||
newToken, err = generateToken(64)
|
newToken, err = utils.GenerateToken(64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic("Unable to generate secure token")
|
log.Panic("unable to generate cookie auth key")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Configure Cookie Session Store
|
// Set enc token
|
||||||
store := cookie.NewStore(newToken)
|
store := cookie.NewStore(newToken)
|
||||||
|
if c.CookieEncKey != "" {
|
||||||
|
if len(c.CookieEncKey) == 16 || len(c.CookieEncKey) == 32 {
|
||||||
|
log.Info("utilizing environment cookie encryption key")
|
||||||
|
store = cookie.NewStore(newToken, []byte(c.CookieEncKey))
|
||||||
|
} else {
|
||||||
|
log.Panic("invalid cookie encryption key (must be 16 or 32 bytes)")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Configure cookie session store
|
||||||
store.Options(sessions.Options{
|
store.Options(sessions.Options{
|
||||||
MaxAge: 60 * 60 * 24 * 7,
|
MaxAge: 60 * 60 * 24 * 7,
|
||||||
Secure: c.CookieSecure,
|
Secure: c.CookieSecure,
|
||||||
HttpOnly: c.CookieHTTPOnly,
|
HttpOnly: c.CookieHTTPOnly,
|
||||||
SameSite: http.SameSiteStrictMode,
|
SameSite: http.SameSiteStrictMode,
|
||||||
})
|
})
|
||||||
api.Router.Use(sessions.Sessions("token", store))
|
router.Use(sessions.Sessions("token", store))
|
||||||
|
|
||||||
// Register Web App Route
|
// Register web app route
|
||||||
api.registerWebAppRoutes()
|
api.registerWebAppRoutes(router)
|
||||||
|
|
||||||
// Register API Routes
|
// Register API routes
|
||||||
apiGroup := api.Router.Group("/api")
|
apiGroup := router.Group("/api")
|
||||||
api.registerKOAPIRoutes(apiGroup)
|
api.registerKOAPIRoutes(apiGroup)
|
||||||
api.registerOPDSRoutes(apiGroup)
|
api.registerOPDSRoutes(apiGroup)
|
||||||
|
|
||||||
return api
|
return api
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) registerWebAppRoutes() {
|
func (api *API) Start() error {
|
||||||
// Generate Templates
|
return api.httpServer.ListenAndServe()
|
||||||
api.Router.HTMLRender = *api.generateTemplates()
|
|
||||||
|
|
||||||
// Static Assets (Required @ Root)
|
|
||||||
api.Router.GET("/manifest.json", api.webManifest)
|
|
||||||
api.Router.GET("/favicon.ico", api.faviconIcon)
|
|
||||||
api.Router.GET("/sw.js", api.serviceWorker)
|
|
||||||
|
|
||||||
// Local / Offline Static Pages (No Template, No Auth)
|
|
||||||
api.Router.GET("/local", api.localDocuments)
|
|
||||||
|
|
||||||
// Reader (Reader Page, Document Progress, Devices)
|
|
||||||
api.Router.GET("/reader", api.documentReader)
|
|
||||||
api.Router.GET("/reader/devices", api.authWebAppMiddleware, api.getDevices)
|
|
||||||
api.Router.GET("/reader/progress/:document", api.authWebAppMiddleware, api.getDocumentProgress)
|
|
||||||
|
|
||||||
// Web App
|
|
||||||
api.Router.GET("/", api.authWebAppMiddleware, api.createAppResourcesRoute("home"))
|
|
||||||
api.Router.GET("/activity", api.authWebAppMiddleware, api.createAppResourcesRoute("activity"))
|
|
||||||
api.Router.GET("/documents", api.authWebAppMiddleware, api.createAppResourcesRoute("documents"))
|
|
||||||
api.Router.GET("/documents/:document", api.authWebAppMiddleware, api.createAppResourcesRoute("document"))
|
|
||||||
api.Router.GET("/documents/:document/cover", api.authWebAppMiddleware, api.getDocumentCover)
|
|
||||||
api.Router.GET("/documents/:document/file", api.authWebAppMiddleware, api.downloadDocument)
|
|
||||||
api.Router.GET("/login", api.createAppResourcesRoute("login"))
|
|
||||||
api.Router.GET("/logout", api.authWebAppMiddleware, api.authLogout)
|
|
||||||
api.Router.GET("/register", api.createAppResourcesRoute("login", gin.H{"Register": true}))
|
|
||||||
api.Router.GET("/settings", api.authWebAppMiddleware, api.createAppResourcesRoute("settings"))
|
|
||||||
api.Router.POST("/login", api.authFormLogin)
|
|
||||||
api.Router.POST("/register", api.authFormRegister)
|
|
||||||
|
|
||||||
// Demo Mode Enabled Configuration
|
|
||||||
if api.Config.DemoMode {
|
|
||||||
api.Router.POST("/documents", api.authWebAppMiddleware, api.demoModeAppError)
|
|
||||||
api.Router.POST("/documents/:document/delete", api.authWebAppMiddleware, api.demoModeAppError)
|
|
||||||
api.Router.POST("/documents/:document/edit", api.authWebAppMiddleware, api.demoModeAppError)
|
|
||||||
api.Router.POST("/documents/:document/identify", api.authWebAppMiddleware, api.demoModeAppError)
|
|
||||||
api.Router.POST("/settings", api.authWebAppMiddleware, api.demoModeAppError)
|
|
||||||
} else {
|
|
||||||
api.Router.POST("/documents", api.authWebAppMiddleware, api.uploadNewDocument)
|
|
||||||
api.Router.POST("/documents/:document/delete", api.authWebAppMiddleware, api.deleteDocument)
|
|
||||||
api.Router.POST("/documents/:document/edit", api.authWebAppMiddleware, api.editDocument)
|
|
||||||
api.Router.POST("/documents/:document/identify", api.authWebAppMiddleware, api.identifyDocument)
|
|
||||||
api.Router.POST("/settings", api.authWebAppMiddleware, api.editSettings)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Search Enabled Configuration
|
func (api *API) Stop() error {
|
||||||
if api.Config.SearchEnabled {
|
// Stop server
|
||||||
api.Router.GET("/search", api.authWebAppMiddleware, api.createAppResourcesRoute("search"))
|
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||||
api.Router.POST("/search", api.authWebAppMiddleware, api.saveNewDocument)
|
defer cancel()
|
||||||
|
err := api.httpServer.Shutdown(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Close DB
|
||||||
|
return api.db.DB.Close()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) registerWebAppRoutes(router *gin.Engine) {
|
||||||
|
// Generate templates
|
||||||
|
router.HTMLRender = *api.generateTemplates()
|
||||||
|
|
||||||
|
// Static assets (required @ root)
|
||||||
|
router.GET("/manifest.json", api.appWebManifest)
|
||||||
|
router.GET("/favicon.ico", api.appFaviconIcon)
|
||||||
|
router.GET("/sw.js", api.appServiceWorker)
|
||||||
|
|
||||||
|
// Web App - Offline
|
||||||
|
router.GET("/local", api.appLocalDocuments)
|
||||||
|
|
||||||
|
// Web App - Reader
|
||||||
|
router.GET("/reader", api.appDocumentReader)
|
||||||
|
router.GET("/reader/devices", api.authWebAppMiddleware, api.appGetDevices)
|
||||||
|
router.GET("/reader/progress/:document", api.authWebAppMiddleware, api.appGetDocumentProgress)
|
||||||
|
|
||||||
|
// Web App - Templates
|
||||||
|
router.GET("/", api.authWebAppMiddleware, api.appGetHome) // DONE
|
||||||
|
router.GET("/activity", api.authWebAppMiddleware, api.appGetActivity) // DONE
|
||||||
|
router.GET("/progress", api.authWebAppMiddleware, api.appGetProgress) // DONE
|
||||||
|
router.GET("/documents", api.authWebAppMiddleware, api.appGetDocuments) // DONE
|
||||||
|
router.GET("/documents/:document", api.authWebAppMiddleware, api.appGetDocument) // DONE
|
||||||
|
|
||||||
|
// Web App - Other Routes
|
||||||
|
router.GET("/documents/:document/cover", api.authWebAppMiddleware, api.createGetCoverHandler(appErrorPage)) // DONE
|
||||||
|
router.GET("/documents/:document/file", api.authWebAppMiddleware, api.createDownloadDocumentHandler(appErrorPage)) // DONE
|
||||||
|
router.GET("/logout", api.authWebAppMiddleware, api.appAuthLogout) // DONE
|
||||||
|
router.POST("/login", api.appAuthLogin) // DONE
|
||||||
|
router.POST("/register", api.appAuthRegister) // DONE
|
||||||
|
router.GET("/settings", api.authWebAppMiddleware, api.appGetSettings) // DONE
|
||||||
|
|
||||||
|
// TODO
|
||||||
|
router.GET("/login", api.appGetLogin)
|
||||||
|
router.GET("/register", api.appGetRegister)
|
||||||
|
|
||||||
|
// DONE
|
||||||
|
router.GET("/admin", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appGetAdmin)
|
||||||
|
router.POST("/admin", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appPerformAdminAction)
|
||||||
|
|
||||||
|
// TODO - WIP
|
||||||
|
router.GET("/admin/logs", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appGetAdminLogs)
|
||||||
|
router.GET("/admin/import", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appGetAdminImport)
|
||||||
|
router.POST("/admin/import", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appPerformAdminImport)
|
||||||
|
router.GET("/admin/users", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appGetAdminUsers)
|
||||||
|
router.POST("/admin/users", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appUpdateAdminUsers)
|
||||||
|
|
||||||
|
// Demo mode enabled configuration
|
||||||
|
if api.cfg.DemoMode {
|
||||||
|
router.POST("/documents", api.authWebAppMiddleware, api.appDemoModeError)
|
||||||
|
router.POST("/documents/:document/delete", api.authWebAppMiddleware, api.appDemoModeError)
|
||||||
|
router.POST("/documents/:document/edit", api.authWebAppMiddleware, api.appDemoModeError)
|
||||||
|
router.POST("/documents/:document/identify", api.authWebAppMiddleware, api.appDemoModeError)
|
||||||
|
router.POST("/settings", api.authWebAppMiddleware, api.appDemoModeError)
|
||||||
|
} else {
|
||||||
|
router.POST("/documents", api.authWebAppMiddleware, api.appUploadNewDocument) // DONE
|
||||||
|
router.POST("/documents/:document/delete", api.authWebAppMiddleware, api.appDeleteDocument) // DONE
|
||||||
|
router.POST("/documents/:document/edit", api.authWebAppMiddleware, api.appEditDocument) // DONE
|
||||||
|
router.POST("/documents/:document/identify", api.authWebAppMiddleware, api.appIdentifyDocumentNew) // DONE
|
||||||
|
router.POST("/settings", api.authWebAppMiddleware, api.appEditSettings) // DONE
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// Search enabled configuration
|
||||||
|
if api.cfg.SearchEnabled {
|
||||||
|
router.GET("/search", api.authWebAppMiddleware, api.appGetSearch) // DONE
|
||||||
|
router.POST("/search", api.authWebAppMiddleware, api.appSaveNewDocument) // TODO
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) registerKOAPIRoutes(apiGroup *gin.RouterGroup) {
|
func (api *API) registerKOAPIRoutes(apiGroup *gin.RouterGroup) {
|
||||||
koGroup := apiGroup.Group("/ko")
|
koGroup := apiGroup.Group("/ko")
|
||||||
|
|
||||||
// KO Sync Routes (WebApp Uses - Progress & Activity)
|
// KO sync routes (webapp uses - progress & activity)
|
||||||
koGroup.GET("/documents/:document/file", api.authKOMiddleware, api.downloadDocument)
|
koGroup.GET("/documents/:document/file", api.authKOMiddleware, api.createDownloadDocumentHandler(apiErrorPage))
|
||||||
koGroup.GET("/syncs/progress/:document", api.authKOMiddleware, api.getProgress)
|
koGroup.GET("/syncs/progress/:document", api.authKOMiddleware, api.koGetProgress)
|
||||||
koGroup.GET("/users/auth", api.authKOMiddleware, api.authorizeUser)
|
koGroup.GET("/users/auth", api.authKOMiddleware, api.koAuthorizeUser)
|
||||||
koGroup.POST("/activity", api.authKOMiddleware, api.addActivities)
|
koGroup.POST("/activity", api.authKOMiddleware, api.koAddActivities)
|
||||||
koGroup.POST("/syncs/activity", api.authKOMiddleware, api.checkActivitySync)
|
koGroup.POST("/syncs/activity", api.authKOMiddleware, api.koCheckActivitySync)
|
||||||
koGroup.POST("/users/create", api.createUser)
|
koGroup.POST("/users/create", api.koAuthRegister)
|
||||||
koGroup.PUT("/syncs/progress", api.authKOMiddleware, api.setProgress)
|
koGroup.PUT("/syncs/progress", api.authKOMiddleware, api.koSetProgress)
|
||||||
|
|
||||||
// Demo Mode Enabled Configuration
|
// Demo mode enabled configuration
|
||||||
if api.Config.DemoMode {
|
if api.cfg.DemoMode {
|
||||||
koGroup.POST("/documents", api.authKOMiddleware, api.demoModeJSONError)
|
koGroup.POST("/documents", api.authKOMiddleware, api.koDemoModeJSONError)
|
||||||
koGroup.POST("/syncs/documents", api.authKOMiddleware, api.demoModeJSONError)
|
koGroup.POST("/syncs/documents", api.authKOMiddleware, api.koDemoModeJSONError)
|
||||||
koGroup.PUT("/documents/:document/file", api.authKOMiddleware, api.demoModeJSONError)
|
koGroup.PUT("/documents/:document/file", api.authKOMiddleware, api.koDemoModeJSONError)
|
||||||
} else {
|
} else {
|
||||||
koGroup.POST("/documents", api.authKOMiddleware, api.addDocuments)
|
koGroup.POST("/documents", api.authKOMiddleware, api.koAddDocuments)
|
||||||
koGroup.POST("/syncs/documents", api.authKOMiddleware, api.checkDocumentsSync)
|
koGroup.POST("/syncs/documents", api.authKOMiddleware, api.koCheckDocumentsSync)
|
||||||
koGroup.PUT("/documents/:document/file", api.authKOMiddleware, api.uploadExistingDocument)
|
koGroup.PUT("/documents/:document/file", api.authKOMiddleware, api.koUploadExistingDocument)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) registerOPDSRoutes(apiGroup *gin.RouterGroup) {
|
func (api *API) registerOPDSRoutes(apiGroup *gin.RouterGroup) {
|
||||||
opdsGroup := apiGroup.Group("/opds")
|
opdsGroup := apiGroup.Group("/opds")
|
||||||
|
|
||||||
// OPDS Routes
|
// OPDS routes
|
||||||
opdsGroup.GET("", api.authOPDSMiddleware, api.opdsEntry)
|
opdsGroup.GET("", api.authOPDSMiddleware, api.opdsEntry)
|
||||||
opdsGroup.GET("/", api.authOPDSMiddleware, api.opdsEntry)
|
opdsGroup.GET("/", api.authOPDSMiddleware, api.opdsEntry)
|
||||||
opdsGroup.GET("/search.xml", api.authOPDSMiddleware, api.opdsSearchDescription)
|
opdsGroup.GET("/search.xml", api.authOPDSMiddleware, api.opdsSearchDescription)
|
||||||
opdsGroup.GET("/documents", api.authOPDSMiddleware, api.opdsDocuments)
|
opdsGroup.GET("/documents", api.authOPDSMiddleware, api.opdsDocuments)
|
||||||
opdsGroup.GET("/documents/:document/cover", api.authOPDSMiddleware, api.getDocumentCover)
|
opdsGroup.GET("/documents/:document/cover", api.authOPDSMiddleware, api.createGetCoverHandler(apiErrorPage))
|
||||||
opdsGroup.GET("/documents/:document/file", api.authOPDSMiddleware, api.downloadDocument)
|
opdsGroup.GET("/documents/:document/file", api.authOPDSMiddleware, api.createDownloadDocumentHandler(apiErrorPage))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) generateTemplates() *multitemplate.Renderer {
|
func (api *API) generateTemplates() *multitemplate.Renderer {
|
||||||
// Define Templates & Helper Functions
|
// Define templates & helper functions
|
||||||
render := multitemplate.NewRenderer()
|
render := multitemplate.NewRenderer()
|
||||||
|
templates := make(map[string]*template.Template)
|
||||||
helperFuncs := template.FuncMap{
|
helperFuncs := template.FuncMap{
|
||||||
"GetSVGGraphData": getSVGGraphData,
|
|
||||||
"GetUTCOffsets": getUTCOffsets,
|
|
||||||
"NiceSeconds": niceSeconds,
|
|
||||||
"dict": dict,
|
"dict": dict,
|
||||||
|
"slice": slice,
|
||||||
|
"fields": fields,
|
||||||
|
"getSVGGraphData": getSVGGraphData,
|
||||||
|
"getTimeZones": getTimeZones,
|
||||||
|
"hasPrefix": strings.HasPrefix,
|
||||||
|
"niceNumbers": niceNumbers,
|
||||||
|
"niceSeconds": niceSeconds,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Load Base
|
// Load Base
|
||||||
b, _ := api.Assets.ReadFile("templates/base.html")
|
b, err := fs.ReadFile(api.assets, "templates/base.tmpl")
|
||||||
baseTemplate := template.Must(template.New("base").Funcs(helperFuncs).Parse(string(b)))
|
if err != nil {
|
||||||
|
log.Errorf("error reading base template: %v", err)
|
||||||
|
return &render
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse Base
|
||||||
|
baseTemplate, err := template.New("base").Funcs(helperFuncs).Parse(string(b))
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf("error parsing base template: %v", err)
|
||||||
|
return &render
|
||||||
|
}
|
||||||
|
|
||||||
// Load SVGs
|
// Load SVGs
|
||||||
svgs, _ := api.Assets.ReadDir("templates/svgs")
|
err = api.loadTemplates("svg", baseTemplate, templates, false)
|
||||||
for _, item := range svgs {
|
if err != nil {
|
||||||
basename := item.Name()
|
log.Errorf("error loading svg templates: %v", err)
|
||||||
path := fmt.Sprintf("templates/svgs/%s", basename)
|
return &render
|
||||||
name := strings.TrimSuffix(basename, filepath.Ext(basename))
|
|
||||||
|
|
||||||
b, _ := api.Assets.ReadFile(path)
|
|
||||||
baseTemplate = template.Must(baseTemplate.New("svg/" + name).Parse(string(b)))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Load Components
|
// Load Components
|
||||||
components, _ := api.Assets.ReadDir("templates/components")
|
err = api.loadTemplates("component", baseTemplate, templates, false)
|
||||||
for _, item := range components {
|
if err != nil {
|
||||||
basename := item.Name()
|
log.Errorf("error loading component templates: %v", err)
|
||||||
path := fmt.Sprintf("templates/components/%s", basename)
|
return &render
|
||||||
name := strings.TrimSuffix(basename, filepath.Ext(basename))
|
|
||||||
|
|
||||||
b, _ := api.Assets.ReadFile(path)
|
|
||||||
baseTemplate = template.Must(baseTemplate.New("component/" + name).Parse(string(b)))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Load Pages
|
// Load Pages
|
||||||
pages, _ := api.Assets.ReadDir("templates/pages")
|
err = api.loadTemplates("page", baseTemplate, templates, true)
|
||||||
for _, item := range pages {
|
if err != nil {
|
||||||
basename := item.Name()
|
log.Errorf("error loading page templates: %v", err)
|
||||||
path := fmt.Sprintf("templates/pages/%s", basename)
|
return &render
|
||||||
name := strings.TrimSuffix(basename, filepath.Ext(basename))
|
}
|
||||||
|
|
||||||
// Clone Base Template
|
// Populate Renderer
|
||||||
b, _ := api.Assets.ReadFile(path)
|
api.templates = templates
|
||||||
pageTemplate, _ := template.Must(baseTemplate.Clone()).New("page/" + name).Parse(string(b))
|
for templateName, templateValue := range templates {
|
||||||
render.Add("page/"+name, pageTemplate)
|
render.Add(templateName, templateValue)
|
||||||
}
|
}
|
||||||
|
|
||||||
return &render
|
return &render
|
||||||
}
|
}
|
||||||
|
|
||||||
func generateToken(n int) ([]byte, error) {
|
func (api *API) loadTemplates(
|
||||||
b := make([]byte, n)
|
basePath string,
|
||||||
_, err := rand.Read(b)
|
baseTemplate *template.Template,
|
||||||
|
allTemplates map[string]*template.Template,
|
||||||
|
cloneBase bool,
|
||||||
|
) error {
|
||||||
|
// Load Templates (Pluralize)
|
||||||
|
templateDirectory := fmt.Sprintf("templates/%ss", basePath)
|
||||||
|
allFiles, err := fs.ReadDir(api.assets, templateDirectory)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return errors.Wrap(err, fmt.Sprintf("unable to read template dir: %s", templateDirectory))
|
||||||
}
|
}
|
||||||
return b, nil
|
|
||||||
|
// Generate Templates
|
||||||
|
for _, item := range allFiles {
|
||||||
|
templateFile := item.Name()
|
||||||
|
templatePath := path.Join(templateDirectory, templateFile)
|
||||||
|
templateName := fmt.Sprintf("%s/%s", basePath, strings.TrimSuffix(templateFile, filepath.Ext(templateFile)))
|
||||||
|
|
||||||
|
// Read Template
|
||||||
|
b, err := fs.ReadFile(api.assets, templatePath)
|
||||||
|
if err != nil {
|
||||||
|
return errors.Wrap(err, fmt.Sprintf("unable to read template: %s", templateName))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clone? (Pages - Don't Stomp)
|
||||||
|
if cloneBase {
|
||||||
|
baseTemplate = template.Must(baseTemplate.Clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse Template
|
||||||
|
baseTemplate, err = baseTemplate.New(templateName).Parse(string(b))
|
||||||
|
if err != nil {
|
||||||
|
return errors.Wrap(err, fmt.Sprintf("unable to parse template: %s", templateName))
|
||||||
|
}
|
||||||
|
|
||||||
|
allTemplates[templateName] = baseTemplate
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) templateMiddleware(router *gin.Engine) gin.HandlerFunc {
|
||||||
|
return func(c *gin.Context) {
|
||||||
|
router.HTMLRender = *api.generateTemplates()
|
||||||
|
c.Next()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func loggingMiddleware(c *gin.Context) {
|
||||||
|
// Start timer
|
||||||
|
startTime := time.Now()
|
||||||
|
|
||||||
|
// Process request
|
||||||
|
c.Next()
|
||||||
|
|
||||||
|
// End timer
|
||||||
|
endTime := time.Now()
|
||||||
|
latency := endTime.Sub(startTime).Round(time.Microsecond)
|
||||||
|
|
||||||
|
// Log data
|
||||||
|
logData := log.Fields{
|
||||||
|
"type": "access",
|
||||||
|
"ip": c.ClientIP(),
|
||||||
|
"latency": latency.String(),
|
||||||
|
"status": c.Writer.Status(),
|
||||||
|
"method": c.Request.Method,
|
||||||
|
"path": c.Request.URL.Path,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get username
|
||||||
|
var auth *authData
|
||||||
|
if data, _ := c.Get("Authorization"); data != nil {
|
||||||
|
auth = data.(*authData)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log user
|
||||||
|
if auth != nil && auth.UserName != "" {
|
||||||
|
logData["user"] = auth.UserName
|
||||||
|
}
|
||||||
|
|
||||||
|
// Log result
|
||||||
|
log.WithFields(logData).Info(fmt.Sprintf("%s %s", c.Request.Method, c.Request.URL.Path))
|
||||||
}
|
}
|
||||||
|
|||||||
961
api/app-admin-routes.go
Normal file
961
api/app-admin-routes.go
Normal file
@@ -0,0 +1,961 @@
|
|||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"archive/zip"
|
||||||
|
"bufio"
|
||||||
|
"context"
|
||||||
|
"crypto/md5"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"io/fs"
|
||||||
|
"mime/multipart"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"slices"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
argon2 "github.com/alexedwards/argon2id"
|
||||||
|
"github.com/gabriel-vasile/mimetype"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/itchyny/gojq"
|
||||||
|
"github.com/pkg/errors"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
"reichard.io/antholume/database"
|
||||||
|
"reichard.io/antholume/metadata"
|
||||||
|
"reichard.io/antholume/utils"
|
||||||
|
"reichard.io/antholume/web/models"
|
||||||
|
"reichard.io/antholume/web/pages"
|
||||||
|
)
|
||||||
|
|
||||||
|
type adminAction string
|
||||||
|
|
||||||
|
const (
|
||||||
|
adminBackup adminAction = "BACKUP"
|
||||||
|
adminRestore adminAction = "RESTORE"
|
||||||
|
adminMetadataMatch adminAction = "METADATA_MATCH"
|
||||||
|
adminCacheTables adminAction = "CACHE_TABLES"
|
||||||
|
)
|
||||||
|
|
||||||
|
type requestAdminAction struct {
|
||||||
|
Action adminAction `form:"action"`
|
||||||
|
|
||||||
|
// Backup Action
|
||||||
|
BackupTypes []backupType `form:"backup_types"`
|
||||||
|
|
||||||
|
// Restore Action
|
||||||
|
RestoreFile *multipart.FileHeader `form:"restore_file"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type importType string
|
||||||
|
|
||||||
|
const (
|
||||||
|
importDirect importType = "DIRECT"
|
||||||
|
importCopy importType = "COPY"
|
||||||
|
)
|
||||||
|
|
||||||
|
type requestAdminImport struct {
|
||||||
|
Directory string `form:"directory"`
|
||||||
|
Select string `form:"select"`
|
||||||
|
Type importType `form:"type"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type operationType string
|
||||||
|
|
||||||
|
const (
|
||||||
|
opUpdate operationType = "UPDATE"
|
||||||
|
opCreate operationType = "CREATE"
|
||||||
|
opDelete operationType = "DELETE"
|
||||||
|
)
|
||||||
|
|
||||||
|
type requestAdminUpdateUser struct {
|
||||||
|
User string `form:"user"`
|
||||||
|
Password *string `form:"password"`
|
||||||
|
IsAdmin *bool `form:"is_admin"`
|
||||||
|
Operation operationType `form:"operation"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type requestAdminLogs struct {
|
||||||
|
Filter string `form:"filter"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type importStatus string
|
||||||
|
|
||||||
|
const (
|
||||||
|
importFailed importStatus = "FAILED"
|
||||||
|
importSuccess importStatus = "SUCCESS"
|
||||||
|
importExists importStatus = "EXISTS"
|
||||||
|
)
|
||||||
|
|
||||||
|
type importResult struct {
|
||||||
|
ID string
|
||||||
|
Name string
|
||||||
|
Path string
|
||||||
|
Status importStatus
|
||||||
|
Error error
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) appGetAdmin(c *gin.Context) {
|
||||||
|
api.renderPage(c, &pages.AdminGeneral{})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) appPerformAdminAction(c *gin.Context) {
|
||||||
|
var rAdminAction requestAdminAction
|
||||||
|
if err := c.ShouldBind(&rAdminAction); err != nil {
|
||||||
|
log.Error("invalid or missing form values")
|
||||||
|
appErrorPage(c, http.StatusBadRequest, "Invalid or missing form values")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var allNotifications []*models.Notification
|
||||||
|
switch rAdminAction.Action {
|
||||||
|
case adminRestore:
|
||||||
|
api.processRestoreFile(rAdminAction, c)
|
||||||
|
return
|
||||||
|
case adminBackup:
|
||||||
|
api.processBackup(c, rAdminAction.BackupTypes)
|
||||||
|
return
|
||||||
|
case adminMetadataMatch:
|
||||||
|
allNotifications = append(allNotifications, &models.Notification{
|
||||||
|
Type: models.NotificationTypeError,
|
||||||
|
Content: "Metadata match not implemented",
|
||||||
|
})
|
||||||
|
case adminCacheTables:
|
||||||
|
go func() {
|
||||||
|
err := api.db.CacheTempTables(c)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Unable to cache temp tables: ", err)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
allNotifications = append(allNotifications, &models.Notification{
|
||||||
|
Type: models.NotificationTypeSuccess,
|
||||||
|
Content: "Initiated table cache",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
api.renderPage(c, &pages.AdminGeneral{}, allNotifications...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) appGetAdminLogs(c *gin.Context) {
|
||||||
|
templateVars, _ := api.getBaseTemplateVars("admin-logs", c)
|
||||||
|
|
||||||
|
var rAdminLogs requestAdminLogs
|
||||||
|
if err := c.ShouldBindQuery(&rAdminLogs); err != nil {
|
||||||
|
log.Error("Invalid URI Bind")
|
||||||
|
appErrorPage(c, http.StatusNotFound, "Invalid URI parameters")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
rAdminLogs.Filter = strings.TrimSpace(rAdminLogs.Filter)
|
||||||
|
|
||||||
|
var jqFilter *gojq.Code
|
||||||
|
var basicFilter string
|
||||||
|
if strings.HasPrefix(rAdminLogs.Filter, "\"") && strings.HasSuffix(rAdminLogs.Filter, "\"") {
|
||||||
|
basicFilter = rAdminLogs.Filter[1 : len(rAdminLogs.Filter)-1]
|
||||||
|
} else if rAdminLogs.Filter != "" {
|
||||||
|
parsed, err := gojq.Parse(rAdminLogs.Filter)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Unable to parse JQ filter")
|
||||||
|
appErrorPage(c, http.StatusNotFound, "Unable to parse JQ filter")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
jqFilter, err = gojq.Compile(parsed)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Unable to compile JQ filter")
|
||||||
|
appErrorPage(c, http.StatusNotFound, "Unable to compile JQ filter")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Open Log File
|
||||||
|
logPath := filepath.Join(api.cfg.ConfigPath, "logs/antholume.log")
|
||||||
|
logFile, err := os.Open(logPath)
|
||||||
|
if err != nil {
|
||||||
|
appErrorPage(c, http.StatusBadRequest, "Missing AnthoLume log file")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer logFile.Close()
|
||||||
|
|
||||||
|
// Log Lines
|
||||||
|
var logLines []string
|
||||||
|
scanner := bufio.NewScanner(logFile)
|
||||||
|
for scanner.Scan() {
|
||||||
|
rawLog := scanner.Text()
|
||||||
|
|
||||||
|
// Attempt JSON Pretty
|
||||||
|
var jsonMap map[string]any
|
||||||
|
err := json.Unmarshal([]byte(rawLog), &jsonMap)
|
||||||
|
if err != nil {
|
||||||
|
logLines = append(logLines, scanner.Text())
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse JSON
|
||||||
|
rawData, err := json.MarshalIndent(jsonMap, "", " ")
|
||||||
|
if err != nil {
|
||||||
|
logLines = append(logLines, scanner.Text())
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Basic Filter
|
||||||
|
if basicFilter != "" && strings.Contains(string(rawData), basicFilter) {
|
||||||
|
logLines = append(logLines, string(rawData))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// No JQ Filter
|
||||||
|
if jqFilter == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Error or nil
|
||||||
|
result, _ := jqFilter.Run(jsonMap).Next()
|
||||||
|
if _, ok := result.(error); ok {
|
||||||
|
logLines = append(logLines, string(rawData))
|
||||||
|
continue
|
||||||
|
} else if result == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Attempt filtered json
|
||||||
|
filteredData, err := json.MarshalIndent(result, "", " ")
|
||||||
|
if err == nil {
|
||||||
|
rawData = filteredData
|
||||||
|
}
|
||||||
|
|
||||||
|
logLines = append(logLines, string(rawData))
|
||||||
|
}
|
||||||
|
|
||||||
|
templateVars["Data"] = logLines
|
||||||
|
templateVars["Filter"] = rAdminLogs.Filter
|
||||||
|
|
||||||
|
c.HTML(http.StatusOK, "page/admin-logs", templateVars)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) appGetAdminUsers(c *gin.Context) {
|
||||||
|
templateVars, _ := api.getBaseTemplateVars("admin-users", c)
|
||||||
|
|
||||||
|
users, err := api.db.Queries.GetUsers(c)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("GetUsers DB Error: ", err)
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("GetUsers DB Error: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
templateVars["Data"] = users
|
||||||
|
|
||||||
|
c.HTML(http.StatusOK, "page/admin-users", templateVars)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) appUpdateAdminUsers(c *gin.Context) {
|
||||||
|
templateVars, _ := api.getBaseTemplateVars("admin-users", c)
|
||||||
|
|
||||||
|
var rUpdate requestAdminUpdateUser
|
||||||
|
if err := c.ShouldBind(&rUpdate); err != nil {
|
||||||
|
log.Error("Invalid URI Bind")
|
||||||
|
appErrorPage(c, http.StatusNotFound, "Invalid user parameters")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure Username
|
||||||
|
if rUpdate.User == "" {
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "User cannot be empty")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var err error
|
||||||
|
switch rUpdate.Operation {
|
||||||
|
case opCreate:
|
||||||
|
err = api.createUser(c, rUpdate.User, rUpdate.Password, rUpdate.IsAdmin)
|
||||||
|
case opUpdate:
|
||||||
|
err = api.updateUser(c, rUpdate.User, rUpdate.Password, rUpdate.IsAdmin)
|
||||||
|
case opDelete:
|
||||||
|
err = api.deleteUser(c, rUpdate.User)
|
||||||
|
default:
|
||||||
|
appErrorPage(c, http.StatusNotFound, "Unknown user operation")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("Unable to create or update user: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
users, err := api.db.Queries.GetUsers(c)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("GetUsers DB Error: ", err)
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("GetUsers DB Error: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
templateVars["Data"] = users
|
||||||
|
|
||||||
|
c.HTML(http.StatusOK, "page/admin-users", templateVars)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) appGetAdminImport(c *gin.Context) {
|
||||||
|
templateVars, _ := api.getBaseTemplateVars("admin-import", c)
|
||||||
|
|
||||||
|
var rImportFolder requestAdminImport
|
||||||
|
if err := c.ShouldBindQuery(&rImportFolder); err != nil {
|
||||||
|
log.Error("Invalid URI Bind")
|
||||||
|
appErrorPage(c, http.StatusNotFound, "Invalid directory")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if rImportFolder.Select != "" {
|
||||||
|
templateVars["SelectedDirectory"] = rImportFolder.Select
|
||||||
|
c.HTML(http.StatusOK, "page/admin-import", templateVars)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default Path
|
||||||
|
if rImportFolder.Directory == "" {
|
||||||
|
dPath, err := filepath.Abs(api.cfg.DataPath)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Absolute filepath error: ", rImportFolder.Directory)
|
||||||
|
appErrorPage(c, http.StatusNotFound, "Unable to get data directory absolute path")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
rImportFolder.Directory = dPath
|
||||||
|
}
|
||||||
|
|
||||||
|
entries, err := os.ReadDir(rImportFolder.Directory)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Invalid directory: ", rImportFolder.Directory)
|
||||||
|
appErrorPage(c, http.StatusNotFound, "Invalid directory")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
allDirectories := []string{}
|
||||||
|
for _, e := range entries {
|
||||||
|
if !e.IsDir() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
allDirectories = append(allDirectories, e.Name())
|
||||||
|
}
|
||||||
|
|
||||||
|
templateVars["CurrentPath"] = filepath.Clean(rImportFolder.Directory)
|
||||||
|
templateVars["Data"] = allDirectories
|
||||||
|
|
||||||
|
c.HTML(http.StatusOK, "page/admin-import", templateVars)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) appPerformAdminImport(c *gin.Context) {
|
||||||
|
templateVars, _ := api.getBaseTemplateVars("admin-import", c)
|
||||||
|
|
||||||
|
var rAdminImport requestAdminImport
|
||||||
|
if err := c.ShouldBind(&rAdminImport); err != nil {
|
||||||
|
log.Error("Invalid URI Bind")
|
||||||
|
appErrorPage(c, http.StatusNotFound, "Invalid directory")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get import directory
|
||||||
|
importDirectory := filepath.Clean(rAdminImport.Directory)
|
||||||
|
|
||||||
|
// Get data directory
|
||||||
|
absoluteDataPath, _ := filepath.Abs(filepath.Join(api.cfg.DataPath, "documents"))
|
||||||
|
|
||||||
|
// Validate different path
|
||||||
|
if absoluteDataPath == importDirectory {
|
||||||
|
appErrorPage(c, http.StatusBadRequest, "Directory is the same as data path")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Do Transaction
|
||||||
|
tx, err := api.db.DB.Begin()
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Transaction Begin DB Error:", err)
|
||||||
|
apiErrorPage(c, http.StatusBadRequest, "Unknown error")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Defer & Start Transaction
|
||||||
|
defer func() {
|
||||||
|
if err := tx.Rollback(); err != nil {
|
||||||
|
log.Error("DB Rollback Error:", err)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
qtx := api.db.Queries.WithTx(tx)
|
||||||
|
|
||||||
|
// Track imports
|
||||||
|
importResults := make([]importResult, 0)
|
||||||
|
|
||||||
|
// Walk Directory & Import
|
||||||
|
err = filepath.WalkDir(importDirectory, func(importPath string, f fs.DirEntry, err error) error {
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.IsDir() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get relative path
|
||||||
|
basePath := importDirectory
|
||||||
|
relFilePath, err := filepath.Rel(importDirectory, importPath)
|
||||||
|
if err != nil {
|
||||||
|
log.Warnf("path error: %v", err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Track imports
|
||||||
|
iResult := importResult{
|
||||||
|
Path: relFilePath,
|
||||||
|
Status: importFailed,
|
||||||
|
}
|
||||||
|
defer func() {
|
||||||
|
importResults = append(importResults, iResult)
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Get metadata
|
||||||
|
fileMeta, err := metadata.GetMetadata(importPath)
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf("metadata error: %v", err)
|
||||||
|
iResult.Error = err
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
iResult.ID = *fileMeta.PartialMD5
|
||||||
|
iResult.Name = fmt.Sprintf("%s - %s", *fileMeta.Author, *fileMeta.Title)
|
||||||
|
|
||||||
|
// Check already exists
|
||||||
|
_, err = qtx.GetDocument(c, *fileMeta.PartialMD5)
|
||||||
|
if err == nil {
|
||||||
|
log.Warnf("document already exists: %s", *fileMeta.PartialMD5)
|
||||||
|
iResult.Status = importExists
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Import Copy
|
||||||
|
if rAdminImport.Type == importCopy {
|
||||||
|
// Derive & Sanitize File Name
|
||||||
|
relFilePath = deriveBaseFileName(fileMeta)
|
||||||
|
safePath := filepath.Join(api.cfg.DataPath, "documents", relFilePath)
|
||||||
|
|
||||||
|
// Open Source File
|
||||||
|
srcFile, err := os.Open(importPath)
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf("unable to open current file: %v", err)
|
||||||
|
iResult.Error = err
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
defer srcFile.Close()
|
||||||
|
|
||||||
|
// Open Destination File
|
||||||
|
destFile, err := os.Create(safePath)
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf("unable to open destination file: %v", err)
|
||||||
|
iResult.Error = err
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
defer destFile.Close()
|
||||||
|
|
||||||
|
// Copy File
|
||||||
|
if _, err = io.Copy(destFile, srcFile); err != nil {
|
||||||
|
log.Errorf("unable to save file: %v", err)
|
||||||
|
iResult.Error = err
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update Base & Path
|
||||||
|
basePath = filepath.Join(api.cfg.DataPath, "documents")
|
||||||
|
iResult.Path = relFilePath
|
||||||
|
}
|
||||||
|
|
||||||
|
// Upsert document
|
||||||
|
if _, err = qtx.UpsertDocument(c, database.UpsertDocumentParams{
|
||||||
|
ID: *fileMeta.PartialMD5,
|
||||||
|
Title: fileMeta.Title,
|
||||||
|
Author: fileMeta.Author,
|
||||||
|
Description: fileMeta.Description,
|
||||||
|
Md5: fileMeta.MD5,
|
||||||
|
Words: fileMeta.WordCount,
|
||||||
|
Filepath: &relFilePath,
|
||||||
|
Basepath: &basePath,
|
||||||
|
}); err != nil {
|
||||||
|
log.Errorf("UpsertDocument DB Error: %v", err)
|
||||||
|
iResult.Error = err
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
iResult.Status = importSuccess
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("Import Failed: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Commit transaction
|
||||||
|
if err := tx.Commit(); err != nil {
|
||||||
|
log.Error("Transaction Commit DB Error: ", err)
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("Import DB Error: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort import results
|
||||||
|
sort.Slice(importResults, func(i int, j int) bool {
|
||||||
|
return importStatusPriority(importResults[i].Status) <
|
||||||
|
importStatusPriority(importResults[j].Status)
|
||||||
|
})
|
||||||
|
|
||||||
|
templateVars["Data"] = importResults
|
||||||
|
c.HTML(http.StatusOK, "page/admin-import-results", templateVars)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) processBackup(c *gin.Context, backupTypes []backupType) {
|
||||||
|
// Vacuum
|
||||||
|
_, err := api.db.DB.ExecContext(c, "VACUUM;")
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Unable to vacuum DB: ", err)
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "Unable to vacuum database")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set Headers
|
||||||
|
c.Header("Content-type", "application/octet-stream")
|
||||||
|
c.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"AnthoLumeBackup_%s.zip\"", time.Now().Format("20060102150405")))
|
||||||
|
|
||||||
|
// Stream Backup ZIP Archive
|
||||||
|
c.Stream(func(w io.Writer) bool {
|
||||||
|
var directories []string
|
||||||
|
for _, item := range backupTypes {
|
||||||
|
switch item {
|
||||||
|
case backupCovers:
|
||||||
|
directories = append(directories, "covers")
|
||||||
|
case backupDocuments:
|
||||||
|
directories = append(directories, "documents")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
err := api.createBackup(c, w, directories)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Backup Error: ", err)
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
})
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) processRestoreFile(rAdminAction requestAdminAction, c *gin.Context) {
|
||||||
|
// Validate Type & Derive Extension on MIME
|
||||||
|
uploadedFile, err := rAdminAction.RestoreFile.Open()
|
||||||
|
if err != nil {
|
||||||
|
log.Error("File Error: ", err)
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "Unable to open file")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
fileMime, err := mimetype.DetectReader(uploadedFile)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("MIME Error")
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "Unable to detect filetype")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
fileExtension := fileMime.Extension()
|
||||||
|
|
||||||
|
// Validate Extension
|
||||||
|
if !slices.Contains([]string{".zip"}, fileExtension) {
|
||||||
|
log.Error("Invalid FileType: ", fileExtension)
|
||||||
|
appErrorPage(c, http.StatusBadRequest, "Invalid filetype")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create Temp File
|
||||||
|
tempFile, err := os.CreateTemp("", "restore")
|
||||||
|
if err != nil {
|
||||||
|
log.Warn("Temp File Create Error: ", err)
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "Unable to create temp file")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer os.Remove(tempFile.Name())
|
||||||
|
defer tempFile.Close()
|
||||||
|
|
||||||
|
// Save Temp
|
||||||
|
err = c.SaveUploadedFile(rAdminAction.RestoreFile, tempFile.Name())
|
||||||
|
if err != nil {
|
||||||
|
log.Error("File Error: ", err)
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "Unable to save file")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// ZIP Info
|
||||||
|
fileInfo, err := tempFile.Stat()
|
||||||
|
if err != nil {
|
||||||
|
log.Error("File Error: ", err)
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "Unable to read file")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create ZIP Reader
|
||||||
|
zipReader, err := zip.NewReader(tempFile, fileInfo.Size())
|
||||||
|
if err != nil {
|
||||||
|
log.Error("ZIP Error: ", err)
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "Unable to read zip")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate ZIP Contents
|
||||||
|
hasDBFile := false
|
||||||
|
hasUnknownFile := false
|
||||||
|
for _, file := range zipReader.File {
|
||||||
|
fileName := strings.TrimPrefix(file.Name, "/")
|
||||||
|
if fileName == "antholume.db" {
|
||||||
|
hasDBFile = true
|
||||||
|
break
|
||||||
|
} else if !strings.HasPrefix(fileName, "covers/") && !strings.HasPrefix(fileName, "documents/") {
|
||||||
|
hasUnknownFile = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Invalid ZIP
|
||||||
|
if !hasDBFile {
|
||||||
|
log.Error("Invalid ZIP File - Missing DB")
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "Invalid Restore ZIP - Missing DB")
|
||||||
|
return
|
||||||
|
} else if hasUnknownFile {
|
||||||
|
log.Error("Invalid ZIP File - Invalid File(s)")
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "Invalid Restore ZIP - Invalid File(s)")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create Backup File
|
||||||
|
backupFilePath := filepath.Join(api.cfg.ConfigPath, fmt.Sprintf("backups/AnthoLumeBackup_%s.zip", time.Now().Format("20060102150405")))
|
||||||
|
backupFile, err := os.Create(backupFilePath)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Unable to create backup file: ", err)
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "Unable to create backup file")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer backupFile.Close()
|
||||||
|
|
||||||
|
// Save Backup File
|
||||||
|
w := bufio.NewWriter(backupFile)
|
||||||
|
err = api.createBackup(c, w, []string{"covers", "documents"})
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Unable to save backup file: ", err)
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "Unable to save backup file")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove Data
|
||||||
|
err = api.removeData()
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Unable to delete data: ", err)
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "Unable to delete data")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Restore Data
|
||||||
|
err = api.restoreData(zipReader)
|
||||||
|
if err != nil {
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "Unable to restore data")
|
||||||
|
log.Panic("Unable to restore data: ", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reinit DB
|
||||||
|
if err := api.db.Reload(c); err != nil {
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "Unable to reload DB")
|
||||||
|
log.Panicf("Unable to reload DB: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rotate Auth Hashes
|
||||||
|
if err := api.rotateAllAuthHashes(c); err != nil {
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, "Unable to rotate hashes")
|
||||||
|
log.Panicf("Unable to rotate auth hashes: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Redirect to login page
|
||||||
|
c.Redirect(http.StatusFound, "/login")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) restoreData(zipReader *zip.Reader) error {
|
||||||
|
// Ensure Directories
|
||||||
|
api.cfg.EnsureDirectories()
|
||||||
|
|
||||||
|
// Restore Data
|
||||||
|
for _, file := range zipReader.File {
|
||||||
|
rc, err := file.Open()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer rc.Close()
|
||||||
|
|
||||||
|
destPath := filepath.Join(api.cfg.DataPath, file.Name)
|
||||||
|
destFile, err := os.Create(destPath)
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf("error creating destination file: %v", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer destFile.Close()
|
||||||
|
|
||||||
|
// Copy the contents from the zip file to the destination file.
|
||||||
|
if _, err := io.Copy(destFile, rc); err != nil {
|
||||||
|
log.Errorf("Error copying file contents: %v", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) removeData() error {
|
||||||
|
allPaths := []string{
|
||||||
|
"covers",
|
||||||
|
"documents",
|
||||||
|
"antholume.db",
|
||||||
|
"antholume.db-wal",
|
||||||
|
"antholume.db-shm",
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, name := range allPaths {
|
||||||
|
fullPath := filepath.Join(api.cfg.DataPath, name)
|
||||||
|
err := os.RemoveAll(fullPath)
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf("Unable to delete %s: %v", name, err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) createBackup(ctx context.Context, w io.Writer, directories []string) error {
|
||||||
|
// Vacuum DB
|
||||||
|
_, err := api.db.DB.ExecContext(ctx, "VACUUM;")
|
||||||
|
if err != nil {
|
||||||
|
return errors.Wrap(err, "Unable to vacuum database")
|
||||||
|
}
|
||||||
|
|
||||||
|
ar := zip.NewWriter(w)
|
||||||
|
exportWalker := func(currentPath string, f fs.DirEntry, err error) error {
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if f.IsDir() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Open File on Disk
|
||||||
|
file, err := os.Open(currentPath)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
// Derive Export Structure
|
||||||
|
fileName := filepath.Base(currentPath)
|
||||||
|
folderName := filepath.Base(filepath.Dir(currentPath))
|
||||||
|
|
||||||
|
// Create File in Export
|
||||||
|
newF, err := ar.Create(filepath.Join(folderName, fileName))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy File in Export
|
||||||
|
_, err = io.Copy(newF, file)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get DB Path
|
||||||
|
fileName := fmt.Sprintf("%s.db", api.cfg.DBName)
|
||||||
|
dbLocation := filepath.Join(api.cfg.ConfigPath, fileName)
|
||||||
|
|
||||||
|
// Copy Database File
|
||||||
|
dbFile, err := os.Open(dbLocation)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer dbFile.Close()
|
||||||
|
|
||||||
|
newDbFile, err := ar.Create(fileName)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = io.Copy(newDbFile, dbFile)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Backup Covers & Documents
|
||||||
|
for _, dir := range directories {
|
||||||
|
err = filepath.WalkDir(filepath.Join(api.cfg.DataPath, dir), exportWalker)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_ = ar.Close()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) isLastAdmin(ctx context.Context, userID string) (bool, error) {
|
||||||
|
allUsers, err := api.db.Queries.GetUsers(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return false, errors.Wrap(err, fmt.Sprintf("GetUsers DB Error: %v", err))
|
||||||
|
}
|
||||||
|
|
||||||
|
hasAdmin := false
|
||||||
|
for _, user := range allUsers {
|
||||||
|
if user.Admin && user.ID != userID {
|
||||||
|
hasAdmin = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return !hasAdmin, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) createUser(ctx context.Context, user string, rawPassword *string, isAdmin *bool) error {
|
||||||
|
// Validate Necessary Parameters
|
||||||
|
if rawPassword == nil || *rawPassword == "" {
|
||||||
|
return fmt.Errorf("password can't be empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Base Params
|
||||||
|
createParams := database.CreateUserParams{
|
||||||
|
ID: user,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle Admin (Explicit or False)
|
||||||
|
if isAdmin != nil {
|
||||||
|
createParams.Admin = *isAdmin
|
||||||
|
} else {
|
||||||
|
createParams.Admin = false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse Password
|
||||||
|
password := fmt.Sprintf("%x", md5.Sum([]byte(*rawPassword)))
|
||||||
|
hashedPassword, err := argon2.CreateHash(password, argon2.DefaultParams)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("unable to create hashed password")
|
||||||
|
}
|
||||||
|
createParams.Pass = &hashedPassword
|
||||||
|
|
||||||
|
// Generate Auth Hash
|
||||||
|
rawAuthHash, err := utils.GenerateToken(64)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("unable to create token for user")
|
||||||
|
}
|
||||||
|
authHash := fmt.Sprintf("%x", rawAuthHash)
|
||||||
|
createParams.AuthHash = &authHash
|
||||||
|
|
||||||
|
// Create user in DB
|
||||||
|
if rows, err := api.db.Queries.CreateUser(ctx, createParams); err != nil {
|
||||||
|
log.Error("CreateUser DB Error:", err)
|
||||||
|
return fmt.Errorf("unable to create user")
|
||||||
|
} else if rows == 0 {
|
||||||
|
log.Warn("User Already Exists:", createParams.ID)
|
||||||
|
return fmt.Errorf("user already exists")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) updateUser(ctx context.Context, user string, rawPassword *string, isAdmin *bool) error {
|
||||||
|
// Validate Necessary Parameters
|
||||||
|
if rawPassword == nil && isAdmin == nil {
|
||||||
|
return fmt.Errorf("nothing to update")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Base Params
|
||||||
|
updateParams := database.UpdateUserParams{
|
||||||
|
UserID: user,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle Admin (Update or Existing)
|
||||||
|
if isAdmin != nil {
|
||||||
|
updateParams.Admin = *isAdmin
|
||||||
|
} else {
|
||||||
|
user, err := api.db.Queries.GetUser(ctx, user)
|
||||||
|
if err != nil {
|
||||||
|
return errors.Wrap(err, fmt.Sprintf("GetUser DB Error: %v", err))
|
||||||
|
}
|
||||||
|
updateParams.Admin = user.Admin
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check Admins - Disallow Demotion
|
||||||
|
if isLast, err := api.isLastAdmin(ctx, user); err != nil {
|
||||||
|
return err
|
||||||
|
} else if isLast && !updateParams.Admin {
|
||||||
|
return fmt.Errorf("unable to demote %s - last admin", user)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle Password
|
||||||
|
if rawPassword != nil {
|
||||||
|
if *rawPassword == "" {
|
||||||
|
return fmt.Errorf("password can't be empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse Password
|
||||||
|
password := fmt.Sprintf("%x", md5.Sum([]byte(*rawPassword)))
|
||||||
|
hashedPassword, err := argon2.CreateHash(password, argon2.DefaultParams)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("unable to create hashed password")
|
||||||
|
}
|
||||||
|
updateParams.Password = &hashedPassword
|
||||||
|
|
||||||
|
// Generate Auth Hash
|
||||||
|
rawAuthHash, err := utils.GenerateToken(64)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("unable to create token for user")
|
||||||
|
}
|
||||||
|
authHash := fmt.Sprintf("%x", rawAuthHash)
|
||||||
|
updateParams.AuthHash = &authHash
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update User
|
||||||
|
_, err := api.db.Queries.UpdateUser(ctx, updateParams)
|
||||||
|
if err != nil {
|
||||||
|
return errors.Wrap(err, fmt.Sprintf("UpdateUser DB Error: %v", err))
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) deleteUser(ctx context.Context, user string) error {
|
||||||
|
// Check Admins
|
||||||
|
if isLast, err := api.isLastAdmin(ctx, user); err != nil {
|
||||||
|
return err
|
||||||
|
} else if isLast {
|
||||||
|
return fmt.Errorf("unable to delete %s - last admin", user)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create Backup File
|
||||||
|
backupFilePath := filepath.Join(api.cfg.ConfigPath, fmt.Sprintf("backups/AnthoLumeBackup_%s.zip", time.Now().Format("20060102150405")))
|
||||||
|
backupFile, err := os.Create(backupFilePath)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer backupFile.Close()
|
||||||
|
|
||||||
|
// Save Backup File (DB Only)
|
||||||
|
w := bufio.NewWriter(backupFile)
|
||||||
|
err = api.createBackup(ctx, w, []string{})
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete User
|
||||||
|
_, err = api.db.Queries.DeleteUser(ctx, user)
|
||||||
|
if err != nil {
|
||||||
|
return errors.Wrap(err, fmt.Sprintf("DeleteUser DB Error: %v", err))
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
513
api/app-routes-new.go
Normal file
513
api/app-routes-new.go
Normal file
@@ -0,0 +1,513 @@
|
|||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"cmp"
|
||||||
|
"crypto/md5"
|
||||||
|
"fmt"
|
||||||
|
"math"
|
||||||
|
"net/http"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
argon2 "github.com/alexedwards/argon2id"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
"reichard.io/antholume/database"
|
||||||
|
"reichard.io/antholume/metadata"
|
||||||
|
"reichard.io/antholume/pkg/formatters"
|
||||||
|
"reichard.io/antholume/pkg/ptr"
|
||||||
|
"reichard.io/antholume/pkg/sliceutils"
|
||||||
|
"reichard.io/antholume/pkg/utils"
|
||||||
|
"reichard.io/antholume/search"
|
||||||
|
"reichard.io/antholume/web/components/stats"
|
||||||
|
"reichard.io/antholume/web/models"
|
||||||
|
"reichard.io/antholume/web/pages"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (api *API) appGetHome(c *gin.Context) {
|
||||||
|
_, auth := api.getBaseTemplateVars("home", c)
|
||||||
|
|
||||||
|
start := time.Now()
|
||||||
|
dailyStats, err := api.db.Queries.GetDailyReadStats(c, auth.UserName)
|
||||||
|
if err != nil {
|
||||||
|
log.WithError(err).Error("failed to get daily read stats")
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("failed to get daily read stats: %s", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
log.Debug("GetDailyReadStats DB Performance: ", time.Since(start))
|
||||||
|
|
||||||
|
start = time.Now()
|
||||||
|
databaseInfo, err := api.db.Queries.GetDatabaseInfo(c, auth.UserName)
|
||||||
|
if err != nil {
|
||||||
|
log.WithError(err).Error("failed to get database info")
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("failed to get database info: %s", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
log.Debug("GetDatabaseInfo DB Performance: ", time.Since(start))
|
||||||
|
|
||||||
|
start = time.Now()
|
||||||
|
streaks, err := api.db.Queries.GetUserStreaks(c, auth.UserName)
|
||||||
|
if err != nil {
|
||||||
|
log.WithError(err).Error("failed to get user streaks")
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("failed to get user streaks: %s", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
log.Debug("GetUserStreaks DB Performance: ", time.Since(start))
|
||||||
|
|
||||||
|
start = time.Now()
|
||||||
|
userStatistics, err := api.db.Queries.GetUserStatistics(c)
|
||||||
|
if err != nil {
|
||||||
|
log.WithError(err).Error("failed to get user statistics")
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("failed to get user statistics: %s", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
log.Debug("GetUserStatistics DB Performance: ", time.Since(start))
|
||||||
|
|
||||||
|
api.renderPage(c, &pages.Home{
|
||||||
|
Leaderboard: arrangeUserStatistic(userStatistics),
|
||||||
|
Streaks: streaks,
|
||||||
|
DailyStats: dailyStats,
|
||||||
|
RecordInfo: &databaseInfo,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) appGetDocuments(c *gin.Context) {
|
||||||
|
qParams, err := bindQueryParams(c, 9)
|
||||||
|
if err != nil {
|
||||||
|
log.WithError(err).Error("failed to bind query params")
|
||||||
|
appErrorPage(c, http.StatusBadRequest, fmt.Sprintf("failed to bind query params: %s", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var query *string
|
||||||
|
if qParams.Search != nil && *qParams.Search != "" {
|
||||||
|
search := "%" + *qParams.Search + "%"
|
||||||
|
query = &search
|
||||||
|
}
|
||||||
|
|
||||||
|
_, auth := api.getBaseTemplateVars("documents", c)
|
||||||
|
documents, err := api.db.Queries.GetDocumentsWithStats(c, database.GetDocumentsWithStatsParams{
|
||||||
|
UserID: auth.UserName,
|
||||||
|
Query: query,
|
||||||
|
Deleted: ptr.Of(false),
|
||||||
|
Offset: (*qParams.Page - 1) * *qParams.Limit,
|
||||||
|
Limit: *qParams.Limit,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
log.WithError(err).Error("failed to get documents with stats")
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("failed to get documents with stats: %s", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
length, err := api.db.Queries.GetDocumentsSize(c, query)
|
||||||
|
if err != nil {
|
||||||
|
log.WithError(err).Error("failed to get document sizes")
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("failed to get document sizes: %s", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err = api.getDocumentsWordCount(c, documents); err != nil {
|
||||||
|
log.WithError(err).Error("failed to get word counts")
|
||||||
|
}
|
||||||
|
|
||||||
|
totalPages := int64(math.Ceil(float64(length) / float64(*qParams.Limit)))
|
||||||
|
nextPage := *qParams.Page + 1
|
||||||
|
previousPage := *qParams.Page - 1
|
||||||
|
|
||||||
|
api.renderPage(c, pages.Documents{
|
||||||
|
Data: sliceutils.Map(documents, convertDBDocToUI),
|
||||||
|
Previous: utils.Ternary(previousPage >= 0, int(previousPage), 0),
|
||||||
|
Next: utils.Ternary(nextPage <= totalPages, int(nextPage), 0),
|
||||||
|
Limit: int(ptr.Deref(qParams.Limit)),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) appGetDocument(c *gin.Context) {
|
||||||
|
var rDocID requestDocumentID
|
||||||
|
if err := c.ShouldBindUri(&rDocID); err != nil {
|
||||||
|
log.WithError(err).Error("failed to bind URI")
|
||||||
|
appErrorPage(c, http.StatusNotFound, "Invalid document")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
_, auth := api.getBaseTemplateVars("document", c)
|
||||||
|
document, err := api.db.GetDocument(c, rDocID.DocumentID, auth.UserName)
|
||||||
|
if err != nil {
|
||||||
|
log.WithError(err).Error("failed to get document")
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("failed to get document: %s", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
api.renderPage(c, &pages.Document{Data: convertDBDocToUI(*document)})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) appGetActivity(c *gin.Context) {
|
||||||
|
qParams, err := bindQueryParams(c, 15)
|
||||||
|
if err != nil {
|
||||||
|
log.WithError(err).Error("failed to bind query params")
|
||||||
|
appErrorPage(c, http.StatusBadRequest, fmt.Sprintf("failed to bind query params: %s", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
_, auth := api.getBaseTemplateVars("activity", c)
|
||||||
|
activity, err := api.db.Queries.GetActivity(c, database.GetActivityParams{
|
||||||
|
UserID: auth.UserName,
|
||||||
|
Offset: (*qParams.Page - 1) * *qParams.Limit,
|
||||||
|
Limit: *qParams.Limit,
|
||||||
|
DocFilter: qParams.Document != nil,
|
||||||
|
DocumentID: ptr.Deref(qParams.Document),
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
log.WithError(err).Error("failed to get activity")
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("failed to get activity: %s", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
api.renderPage(c, &pages.Activity{Data: sliceutils.Map(activity, convertDBActivityToUI)})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) appGetProgress(c *gin.Context) {
|
||||||
|
qParams, err := bindQueryParams(c, 15)
|
||||||
|
if err != nil {
|
||||||
|
log.WithError(err).Error("failed to bind query params")
|
||||||
|
appErrorPage(c, http.StatusBadRequest, fmt.Sprintf("failed to bind query params: %s", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
_, auth := api.getBaseTemplateVars("progress", c)
|
||||||
|
progress, err := api.db.Queries.GetProgress(c, database.GetProgressParams{
|
||||||
|
UserID: auth.UserName,
|
||||||
|
Offset: (*qParams.Page - 1) * *qParams.Limit,
|
||||||
|
Limit: *qParams.Limit,
|
||||||
|
DocFilter: qParams.Document != nil,
|
||||||
|
DocumentID: ptr.Deref(qParams.Document),
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
log.WithError(err).Error("failed to get progress")
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("failed to get progress: %s", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
api.renderPage(c, &pages.Progress{Data: sliceutils.Map(progress, convertDBProgressToUI)})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) appIdentifyDocumentNew(c *gin.Context) {
|
||||||
|
var rDocID requestDocumentID
|
||||||
|
if err := c.ShouldBindUri(&rDocID); err != nil {
|
||||||
|
log.WithError(err).Error("failed to bind URI")
|
||||||
|
appErrorPage(c, http.StatusNotFound, "Invalid document")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var rDocIdentify requestDocumentIdentify
|
||||||
|
if err := c.ShouldBind(&rDocIdentify); err != nil {
|
||||||
|
log.WithError(err).Error("failed to bind form")
|
||||||
|
appErrorPage(c, http.StatusBadRequest, "Invalid or missing form values")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Disallow Empty Strings
|
||||||
|
if rDocIdentify.Title != nil && strings.TrimSpace(*rDocIdentify.Title) == "" {
|
||||||
|
rDocIdentify.Title = nil
|
||||||
|
}
|
||||||
|
if rDocIdentify.Author != nil && strings.TrimSpace(*rDocIdentify.Author) == "" {
|
||||||
|
rDocIdentify.Author = nil
|
||||||
|
}
|
||||||
|
if rDocIdentify.ISBN != nil && strings.TrimSpace(*rDocIdentify.ISBN) == "" {
|
||||||
|
rDocIdentify.ISBN = nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate Values
|
||||||
|
if rDocIdentify.ISBN == nil && rDocIdentify.Title == nil && rDocIdentify.Author == nil {
|
||||||
|
log.Error("invalid or missing form values")
|
||||||
|
appErrorPage(c, http.StatusBadRequest, "Invalid or missing form values")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get Metadata
|
||||||
|
var searchResult *models.DocumentMetadata
|
||||||
|
var allNotifications []*models.Notification
|
||||||
|
metadataResults, err := metadata.SearchMetadata(metadata.SourceGoogleBooks, metadata.MetadataInfo{
|
||||||
|
Title: rDocIdentify.Title,
|
||||||
|
Author: rDocIdentify.Author,
|
||||||
|
ISBN10: rDocIdentify.ISBN,
|
||||||
|
ISBN13: rDocIdentify.ISBN,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
log.WithError(err).Error("failed to search metadata")
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("failed to search metadata: %s", err))
|
||||||
|
return
|
||||||
|
} else if firstResult, found := sliceutils.First(metadataResults); found {
|
||||||
|
searchResult = convertMetaToUI(firstResult)
|
||||||
|
|
||||||
|
// Store First Metadata Result
|
||||||
|
if _, err = api.db.Queries.AddMetadata(c, database.AddMetadataParams{
|
||||||
|
DocumentID: rDocID.DocumentID,
|
||||||
|
Title: firstResult.Title,
|
||||||
|
Author: firstResult.Author,
|
||||||
|
Description: firstResult.Description,
|
||||||
|
Gbid: firstResult.SourceID,
|
||||||
|
Isbn10: firstResult.ISBN10,
|
||||||
|
Isbn13: firstResult.ISBN13,
|
||||||
|
}); err != nil {
|
||||||
|
log.WithError(err).Error("failed to add metadata")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
allNotifications = append(allNotifications, &models.Notification{
|
||||||
|
Type: models.NotificationTypeError,
|
||||||
|
Content: "No Metadata Found",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get Auth
|
||||||
|
_, auth := api.getBaseTemplateVars("document", c)
|
||||||
|
document, err := api.db.GetDocument(c, rDocID.DocumentID, auth.UserName)
|
||||||
|
if err != nil {
|
||||||
|
log.WithError(err).Error("failed to get document")
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("failed to get document: %s", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
api.renderPage(c, &pages.Document{
|
||||||
|
Data: convertDBDocToUI(*document),
|
||||||
|
Search: searchResult,
|
||||||
|
}, allNotifications...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Tabs:
|
||||||
|
// - General (Import, Backup & Restore, Version (githash?), Stats?)
|
||||||
|
// - Users
|
||||||
|
// - Metadata
|
||||||
|
func (api *API) appGetSearch(c *gin.Context) {
|
||||||
|
var sParams searchParams
|
||||||
|
if err := c.BindQuery(&sParams); err != nil {
|
||||||
|
log.WithError(err).Error("failed to bind form")
|
||||||
|
appErrorPage(c, http.StatusBadRequest, "Invalid or missing form values")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only Handle Query
|
||||||
|
var searchResults []models.SearchResult
|
||||||
|
var searchError string
|
||||||
|
if sParams.Query != nil && sParams.Source != nil {
|
||||||
|
results, err := search.SearchBook(*sParams.Query, *sParams.Source)
|
||||||
|
if err != nil {
|
||||||
|
log.WithError(err).Error("failed to search book")
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("Search Error: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
searchResults = sliceutils.Map(results, convertSearchToUI)
|
||||||
|
} else if sParams.Query != nil || sParams.Source != nil {
|
||||||
|
searchError = "Invailid Query"
|
||||||
|
}
|
||||||
|
|
||||||
|
api.renderPage(c, &pages.Search{
|
||||||
|
Results: searchResults,
|
||||||
|
Source: ptr.Deref(sParams.Source),
|
||||||
|
Query: ptr.Deref(sParams.Query),
|
||||||
|
Error: searchError,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) appGetSettings(c *gin.Context) {
|
||||||
|
_, auth := api.getBaseTemplateVars("settings", c)
|
||||||
|
|
||||||
|
user, err := api.db.Queries.GetUser(c, auth.UserName)
|
||||||
|
if err != nil {
|
||||||
|
log.WithError(err).Error("failed to get user")
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("failed to get user: %s", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
devices, err := api.db.Queries.GetDevices(c, auth.UserName)
|
||||||
|
if err != nil {
|
||||||
|
log.WithError(err).Error("failed to get devices")
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("failed to get devices: %s", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
api.renderPage(c, &pages.Settings{
|
||||||
|
Timezone: ptr.Deref(user.Timezone),
|
||||||
|
Devices: sliceutils.Map(devices, convertDBDeviceToUI),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) appEditSettings(c *gin.Context) {
|
||||||
|
var rUserSettings requestSettingsEdit
|
||||||
|
if err := c.ShouldBind(&rUserSettings); err != nil {
|
||||||
|
log.WithError(err).Error("failed to bind form")
|
||||||
|
appErrorPage(c, http.StatusBadRequest, "Invalid or missing form values")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate Something Exists
|
||||||
|
if rUserSettings.Password == nil && rUserSettings.NewPassword == nil && rUserSettings.Timezone == nil {
|
||||||
|
log.Error("invalid or missing form values")
|
||||||
|
appErrorPage(c, http.StatusBadRequest, "Invalid or missing form values")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
_, auth := api.getBaseTemplateVars("settings", c)
|
||||||
|
|
||||||
|
newUserSettings := database.UpdateUserParams{
|
||||||
|
UserID: auth.UserName,
|
||||||
|
Admin: auth.IsAdmin,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set New Password
|
||||||
|
var allNotifications []*models.Notification
|
||||||
|
if rUserSettings.Password != nil && rUserSettings.NewPassword != nil {
|
||||||
|
password := fmt.Sprintf("%x", md5.Sum([]byte(*rUserSettings.Password)))
|
||||||
|
if _, err := api.authorizeCredentials(c, auth.UserName, password); err != nil {
|
||||||
|
allNotifications = append(allNotifications, &models.Notification{
|
||||||
|
Type: models.NotificationTypeError,
|
||||||
|
Content: "Invalid Password",
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
password := fmt.Sprintf("%x", md5.Sum([]byte(*rUserSettings.NewPassword)))
|
||||||
|
hashedPassword, err := argon2.CreateHash(password, argon2.DefaultParams)
|
||||||
|
if err != nil {
|
||||||
|
allNotifications = append(allNotifications, &models.Notification{
|
||||||
|
Type: models.NotificationTypeError,
|
||||||
|
Content: "Unknown Error",
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
allNotifications = append(allNotifications, &models.Notification{
|
||||||
|
Type: models.NotificationTypeSuccess,
|
||||||
|
Content: "Password Updated",
|
||||||
|
})
|
||||||
|
newUserSettings.Password = &hashedPassword
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set Time Offset
|
||||||
|
if rUserSettings.Timezone != nil {
|
||||||
|
allNotifications = append(allNotifications, &models.Notification{
|
||||||
|
Type: models.NotificationTypeSuccess,
|
||||||
|
Content: "Time Offset Updated",
|
||||||
|
})
|
||||||
|
newUserSettings.Timezone = rUserSettings.Timezone
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update User
|
||||||
|
_, err := api.db.Queries.UpdateUser(c, newUserSettings)
|
||||||
|
if err != nil {
|
||||||
|
log.WithError(err).Error("failed to update user")
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("failed to update user: %s", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get User
|
||||||
|
user, err := api.db.Queries.GetUser(c, auth.UserName)
|
||||||
|
if err != nil {
|
||||||
|
log.WithError(err).Error("failed to get user")
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("failed to get user: %s", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get Devices
|
||||||
|
devices, err := api.db.Queries.GetDevices(c, auth.UserName)
|
||||||
|
if err != nil {
|
||||||
|
log.WithError(err).Error("failed to get devices")
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("failed to get devices: %s", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
api.renderPage(c, &pages.Settings{
|
||||||
|
Devices: sliceutils.Map(devices, convertDBDeviceToUI),
|
||||||
|
Timezone: ptr.Deref(user.Timezone),
|
||||||
|
}, allNotifications...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) renderPage(c *gin.Context, page pages.Page, notifications ...*models.Notification) {
|
||||||
|
// Get Authentication Data
|
||||||
|
auth, err := getAuthData(c)
|
||||||
|
if err != nil {
|
||||||
|
log.WithError(err).Error("failed to acquire auth data")
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("failed to acquire auth data: %s", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate Page
|
||||||
|
pageNode, err := page.Generate(models.PageContext{
|
||||||
|
UserInfo: &models.UserInfo{
|
||||||
|
Username: auth.UserName,
|
||||||
|
IsAdmin: auth.IsAdmin,
|
||||||
|
},
|
||||||
|
ServerInfo: &models.ServerInfo{
|
||||||
|
RegistrationEnabled: api.cfg.RegistrationEnabled,
|
||||||
|
SearchEnabled: api.cfg.SearchEnabled,
|
||||||
|
Version: api.cfg.Version,
|
||||||
|
},
|
||||||
|
Notifications: notifications,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
log.WithError(err).Error("failed to generate page")
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("failed to generate page: %s", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Render Page
|
||||||
|
err = pageNode.Render(c.Writer)
|
||||||
|
if err != nil {
|
||||||
|
log.WithError(err).Error("failed to render page")
|
||||||
|
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("failed to render page: %s", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func sortItem[T cmp.Ordered](
|
||||||
|
data []database.GetUserStatisticsRow,
|
||||||
|
accessor func(s database.GetUserStatisticsRow) T,
|
||||||
|
formatter func(s T) string,
|
||||||
|
) []stats.LeaderboardItem {
|
||||||
|
sort.SliceStable(data, func(i, j int) bool {
|
||||||
|
return accessor(data[i]) > accessor(data[j])
|
||||||
|
})
|
||||||
|
|
||||||
|
var items []stats.LeaderboardItem
|
||||||
|
for _, s := range data {
|
||||||
|
items = append(items, stats.LeaderboardItem{
|
||||||
|
UserID: s.UserID,
|
||||||
|
Value: formatter(accessor(s)),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return items
|
||||||
|
}
|
||||||
|
|
||||||
|
func arrangeUserStatistic(data []database.GetUserStatisticsRow) []stats.LeaderboardData {
|
||||||
|
wpmFormatter := func(v float64) string { return fmt.Sprintf("%.2f WPM", v) }
|
||||||
|
return []stats.LeaderboardData{
|
||||||
|
{
|
||||||
|
Name: "WPM",
|
||||||
|
All: sortItem(data, func(r database.GetUserStatisticsRow) float64 { return r.TotalWpm }, wpmFormatter),
|
||||||
|
Year: sortItem(data, func(r database.GetUserStatisticsRow) float64 { return r.YearlyWpm }, wpmFormatter),
|
||||||
|
Month: sortItem(data, func(r database.GetUserStatisticsRow) float64 { return r.MonthlyWpm }, wpmFormatter),
|
||||||
|
Week: sortItem(data, func(r database.GetUserStatisticsRow) float64 { return r.WeeklyWpm }, wpmFormatter),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Words",
|
||||||
|
All: sortItem(data, func(r database.GetUserStatisticsRow) int64 { return r.TotalWordsRead }, formatters.FormatNumber),
|
||||||
|
Year: sortItem(data, func(r database.GetUserStatisticsRow) int64 { return r.YearlyWordsRead }, formatters.FormatNumber),
|
||||||
|
Month: sortItem(data, func(r database.GetUserStatisticsRow) int64 { return r.MonthlyWordsRead }, formatters.FormatNumber),
|
||||||
|
Week: sortItem(data, func(r database.GetUserStatisticsRow) int64 { return r.WeeklyWordsRead }, formatters.FormatNumber),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "Duration",
|
||||||
|
All: sortItem(data, func(r database.GetUserStatisticsRow) time.Duration {
|
||||||
|
return time.Duration(r.TotalSeconds) * time.Second
|
||||||
|
}, formatters.FormatDuration),
|
||||||
|
Year: sortItem(data, func(r database.GetUserStatisticsRow) time.Duration {
|
||||||
|
return time.Duration(r.YearlySeconds) * time.Second
|
||||||
|
}, formatters.FormatDuration),
|
||||||
|
Month: sortItem(data, func(r database.GetUserStatisticsRow) time.Duration {
|
||||||
|
return time.Duration(r.MonthlySeconds) * time.Second
|
||||||
|
}, formatters.FormatDuration),
|
||||||
|
Week: sortItem(data, func(r database.GetUserStatisticsRow) time.Duration {
|
||||||
|
return time.Duration(r.WeeklySeconds) * time.Second
|
||||||
|
}, formatters.FormatDuration),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
File diff suppressed because it is too large
Load Diff
399
api/auth.go
399
api/auth.go
@@ -1,8 +1,10 @@
|
|||||||
package api
|
package api
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"crypto/md5"
|
"crypto/md5"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"maps"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
@@ -11,39 +13,49 @@ import (
|
|||||||
"github.com/gin-contrib/sessions"
|
"github.com/gin-contrib/sessions"
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"reichard.io/bbank/database"
|
"reichard.io/antholume/database"
|
||||||
|
"reichard.io/antholume/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// Authorization Data
|
||||||
|
type authData struct {
|
||||||
|
UserName string
|
||||||
|
IsAdmin bool
|
||||||
|
AuthHash string
|
||||||
|
}
|
||||||
|
|
||||||
// KOSync API Auth Headers
|
// KOSync API Auth Headers
|
||||||
type authKOHeader struct {
|
type authKOHeader struct {
|
||||||
AuthUser string `header:"x-auth-user"`
|
AuthUser string `header:"x-auth-user"`
|
||||||
AuthKey string `header:"x-auth-key"`
|
AuthKey string `header:"x-auth-key"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// OPDS Auth Headers
|
func (api *API) authorizeCredentials(ctx context.Context, username string, password string) (*authData, error) {
|
||||||
type authOPDSHeader struct {
|
user, err := api.db.Queries.GetUser(ctx, username)
|
||||||
Authorization string `header:"authorization"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (api *API) authorizeCredentials(username string, password string) (authorized bool) {
|
|
||||||
user, err := api.DB.Queries.GetUser(api.DB.Ctx, username)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return false
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if match, err := argon2.ComparePasswordAndHash(password, *user.Pass); err != nil || match != true {
|
if match, err := argon2.ComparePasswordAndHash(password, *user.Pass); err != nil || !match {
|
||||||
return false
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return true
|
// Update Auth Cache
|
||||||
|
api.userAuthCache[user.ID] = *user.AuthHash
|
||||||
|
|
||||||
|
return &authData{
|
||||||
|
UserName: user.ID,
|
||||||
|
IsAdmin: user.Admin,
|
||||||
|
AuthHash: *user.AuthHash,
|
||||||
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) authKOMiddleware(c *gin.Context) {
|
func (api *API) authKOMiddleware(c *gin.Context) {
|
||||||
session := sessions.Default(c)
|
session := sessions.Default(c)
|
||||||
|
|
||||||
// Check Session First
|
// Check Session First
|
||||||
if user, ok := getSession(session); ok == true {
|
if auth, ok := api.authorizeSession(c, session); ok {
|
||||||
c.Set("AuthorizedUser", user)
|
c.Set("Authorization", auth)
|
||||||
c.Header("Cache-Control", "private")
|
c.Header("Cache-Control", "private")
|
||||||
c.Next()
|
c.Next()
|
||||||
return
|
return
|
||||||
@@ -53,25 +65,30 @@ func (api *API) authKOMiddleware(c *gin.Context) {
|
|||||||
|
|
||||||
var rHeader authKOHeader
|
var rHeader authKOHeader
|
||||||
if err := c.ShouldBindHeader(&rHeader); err != nil {
|
if err := c.ShouldBindHeader(&rHeader); err != nil {
|
||||||
|
log.WithError(err).Error("failed to bind auth headers")
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Incorrect Headers"})
|
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Incorrect Headers"})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if rHeader.AuthUser == "" || rHeader.AuthKey == "" {
|
if rHeader.AuthUser == "" || rHeader.AuthKey == "" {
|
||||||
|
log.Error("invalid authentication headers")
|
||||||
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Invalid Authorization Headers"})
|
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Invalid Authorization Headers"})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if authorized := api.authorizeCredentials(rHeader.AuthUser, rHeader.AuthKey); authorized != true {
|
authData, err := api.authorizeCredentials(c, rHeader.AuthUser, rHeader.AuthKey)
|
||||||
|
if err != nil {
|
||||||
|
log.WithField("user", rHeader.AuthUser).WithError(err).Error("failed to authorize credentials")
|
||||||
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Unauthorized"})
|
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Unauthorized"})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := setSession(session, rHeader.AuthUser); err != nil {
|
if err := api.setSession(session, authData); err != nil {
|
||||||
|
log.WithField("user", rHeader.AuthUser).WithError(err).Error("failed to set session")
|
||||||
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Unauthorized"})
|
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Unauthorized"})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.Set("AuthorizedUser", rHeader.AuthUser)
|
c.Set("Authorization", *authData)
|
||||||
c.Header("Cache-Control", "private")
|
c.Header("Cache-Control", "private")
|
||||||
c.Next()
|
c.Next()
|
||||||
}
|
}
|
||||||
@@ -82,19 +99,22 @@ func (api *API) authOPDSMiddleware(c *gin.Context) {
|
|||||||
user, rawPassword, hasAuth := c.Request.BasicAuth()
|
user, rawPassword, hasAuth := c.Request.BasicAuth()
|
||||||
|
|
||||||
// Validate Auth Fields
|
// Validate Auth Fields
|
||||||
if hasAuth != true || user == "" || rawPassword == "" {
|
if !hasAuth || user == "" || rawPassword == "" {
|
||||||
|
log.Error("invalid authorization headers")
|
||||||
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Invalid Authorization Headers"})
|
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Invalid Authorization Headers"})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate Auth
|
// Validate Auth
|
||||||
password := fmt.Sprintf("%x", md5.Sum([]byte(rawPassword)))
|
password := fmt.Sprintf("%x", md5.Sum([]byte(rawPassword)))
|
||||||
if authorized := api.authorizeCredentials(user, password); authorized != true {
|
authData, err := api.authorizeCredentials(c, user, password)
|
||||||
|
if err != nil {
|
||||||
|
log.WithField("user", user).WithError(err).Error("failed to authorize credentials")
|
||||||
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Unauthorized"})
|
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Unauthorized"})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.Set("AuthorizedUser", user)
|
c.Set("Authorization", *authData)
|
||||||
c.Header("Cache-Control", "private")
|
c.Header("Cache-Control", "private")
|
||||||
c.Next()
|
c.Next()
|
||||||
}
|
}
|
||||||
@@ -103,8 +123,8 @@ func (api *API) authWebAppMiddleware(c *gin.Context) {
|
|||||||
session := sessions.Default(c)
|
session := sessions.Default(c)
|
||||||
|
|
||||||
// Check Session
|
// Check Session
|
||||||
if user, ok := getSession(session); ok == true {
|
if auth, ok := api.authorizeSession(c, session); ok {
|
||||||
c.Set("AuthorizedUser", user)
|
c.Set("Authorization", auth)
|
||||||
c.Header("Cache-Control", "private")
|
c.Header("Cache-Control", "private")
|
||||||
c.Next()
|
c.Next()
|
||||||
return
|
return
|
||||||
@@ -112,38 +132,48 @@ func (api *API) authWebAppMiddleware(c *gin.Context) {
|
|||||||
|
|
||||||
c.Redirect(http.StatusFound, "/login")
|
c.Redirect(http.StatusFound, "/login")
|
||||||
c.Abort()
|
c.Abort()
|
||||||
return
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) authFormLogin(c *gin.Context) {
|
func (api *API) authAdminWebAppMiddleware(c *gin.Context) {
|
||||||
|
if data, _ := c.Get("Authorization"); data != nil {
|
||||||
|
auth := data.(*authData)
|
||||||
|
if auth.IsAdmin {
|
||||||
|
c.Next()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
appErrorPage(c, http.StatusUnauthorized, "Admin Permissions Required")
|
||||||
|
c.Abort()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) appAuthLogin(c *gin.Context) {
|
||||||
|
templateVars, _ := api.getBaseTemplateVars("login", c)
|
||||||
|
|
||||||
username := strings.TrimSpace(c.PostForm("username"))
|
username := strings.TrimSpace(c.PostForm("username"))
|
||||||
rawPassword := strings.TrimSpace(c.PostForm("password"))
|
rawPassword := strings.TrimSpace(c.PostForm("password"))
|
||||||
|
|
||||||
if username == "" || rawPassword == "" {
|
if username == "" || rawPassword == "" {
|
||||||
c.HTML(http.StatusUnauthorized, "login", gin.H{
|
templateVars["Error"] = "Invalid Credentials"
|
||||||
"RegistrationEnabled": api.Config.RegistrationEnabled,
|
c.HTML(http.StatusUnauthorized, "page/login", templateVars)
|
||||||
"Error": "Invalid Credentials",
|
|
||||||
})
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// MD5 - KOSync Compatiblity
|
// MD5 - KOSync Compatiblity
|
||||||
password := fmt.Sprintf("%x", md5.Sum([]byte(rawPassword)))
|
password := fmt.Sprintf("%x", md5.Sum([]byte(rawPassword)))
|
||||||
if authorized := api.authorizeCredentials(username, password); authorized != true {
|
authData, err := api.authorizeCredentials(c, username, password)
|
||||||
c.HTML(http.StatusUnauthorized, "login", gin.H{
|
if err != nil {
|
||||||
"RegistrationEnabled": api.Config.RegistrationEnabled,
|
log.WithField("user", username).WithError(err).Error("failed to authorize credentials")
|
||||||
"Error": "Invalid Credentials",
|
templateVars["Error"] = "Invalid Credentials"
|
||||||
})
|
c.HTML(http.StatusUnauthorized, "page/login", templateVars)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set Session
|
// Set Session
|
||||||
session := sessions.Default(c)
|
session := sessions.Default(c)
|
||||||
if err := setSession(session, username); err != nil {
|
if err := api.setSession(session, authData); err != nil {
|
||||||
c.HTML(http.StatusUnauthorized, "login", gin.H{
|
templateVars["Error"] = "Invalid Credentials"
|
||||||
"RegistrationEnabled": api.Config.RegistrationEnabled,
|
c.HTML(http.StatusUnauthorized, "page/login", templateVars)
|
||||||
"Error": "Unknown Error",
|
|
||||||
})
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -151,60 +181,93 @@ func (api *API) authFormLogin(c *gin.Context) {
|
|||||||
c.Redirect(http.StatusFound, "/")
|
c.Redirect(http.StatusFound, "/")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) authFormRegister(c *gin.Context) {
|
func (api *API) appAuthRegister(c *gin.Context) {
|
||||||
if !api.Config.RegistrationEnabled {
|
if !api.cfg.RegistrationEnabled {
|
||||||
errorPage(c, http.StatusUnauthorized, "Nice try. Registration is disabled.")
|
appErrorPage(c, http.StatusUnauthorized, "Nice try. Registration is disabled.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
templateVars, _ := api.getBaseTemplateVars("login", c)
|
||||||
|
templateVars["Register"] = true
|
||||||
|
|
||||||
username := strings.TrimSpace(c.PostForm("username"))
|
username := strings.TrimSpace(c.PostForm("username"))
|
||||||
rawPassword := strings.TrimSpace(c.PostForm("password"))
|
rawPassword := strings.TrimSpace(c.PostForm("password"))
|
||||||
|
|
||||||
if username == "" || rawPassword == "" {
|
if username == "" || rawPassword == "" {
|
||||||
c.HTML(http.StatusBadRequest, "login", gin.H{
|
templateVars["Error"] = "Invalid User or Password"
|
||||||
"Register": true,
|
c.HTML(http.StatusBadRequest, "page/login", templateVars)
|
||||||
"Error": "Registration Disabled or User Already Exists",
|
|
||||||
})
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
password := fmt.Sprintf("%x", md5.Sum([]byte(rawPassword)))
|
password := fmt.Sprintf("%x", md5.Sum([]byte(rawPassword)))
|
||||||
|
|
||||||
hashedPassword, err := argon2.CreateHash(password, argon2.DefaultParams)
|
hashedPassword, err := argon2.CreateHash(password, argon2.DefaultParams)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
c.HTML(http.StatusBadRequest, "login", gin.H{
|
templateVars["Error"] = "Registration Disabled or User Already Exists"
|
||||||
"Register": true,
|
c.HTML(http.StatusBadRequest, "page/login", templateVars)
|
||||||
"Error": "Registration Disabled or User Already Exists",
|
|
||||||
})
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
rows, err := api.DB.Queries.CreateUser(api.DB.Ctx, database.CreateUserParams{
|
// Generate auth hash
|
||||||
|
rawAuthHash, err := utils.GenerateToken(64)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Failed to generate user token: ", err)
|
||||||
|
templateVars["Error"] = "Failed to Create User"
|
||||||
|
c.HTML(http.StatusBadRequest, "page/login", templateVars)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get current users
|
||||||
|
currentUsers, err := api.db.Queries.GetUsers(c)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Failed to check all users: ", err)
|
||||||
|
templateVars["Error"] = "Failed to Create User"
|
||||||
|
c.HTML(http.StatusBadRequest, "page/login", templateVars)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine if we should be admin
|
||||||
|
isAdmin := false
|
||||||
|
if len(currentUsers) == 0 {
|
||||||
|
isAdmin = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create user in DB
|
||||||
|
authHash := fmt.Sprintf("%x", rawAuthHash)
|
||||||
|
if rows, err := api.db.Queries.CreateUser(c, database.CreateUserParams{
|
||||||
ID: username,
|
ID: username,
|
||||||
Pass: &hashedPassword,
|
Pass: &hashedPassword,
|
||||||
})
|
AuthHash: &authHash,
|
||||||
|
Admin: isAdmin,
|
||||||
|
}); err != nil {
|
||||||
|
log.Error("CreateUser DB Error:", err)
|
||||||
|
templateVars["Error"] = "Registration Disabled or User Already Exists"
|
||||||
|
c.HTML(http.StatusBadRequest, "page/login", templateVars)
|
||||||
|
return
|
||||||
|
} else if rows == 0 {
|
||||||
|
log.Warn("User Already Exists:", username)
|
||||||
|
templateVars["Error"] = "Registration Disabled or User Already Exists"
|
||||||
|
c.HTML(http.StatusBadRequest, "page/login", templateVars)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
// SQL Error
|
// Get user
|
||||||
|
user, err := api.db.Queries.GetUser(c, username)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
c.HTML(http.StatusBadRequest, "login", gin.H{
|
log.Error("GetUser DB Error:", err)
|
||||||
"Register": true,
|
templateVars["Error"] = "Registration Disabled or User Already Exists"
|
||||||
"Error": "Registration Disabled or User Already Exists",
|
c.HTML(http.StatusBadRequest, "page/login", templateVars)
|
||||||
})
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// User Already Exists
|
// Set session
|
||||||
if rows == 0 {
|
auth := &authData{
|
||||||
c.HTML(http.StatusBadRequest, "login", gin.H{
|
UserName: user.ID,
|
||||||
"Register": true,
|
IsAdmin: user.Admin,
|
||||||
"Error": "Registration Disabled or User Already Exists",
|
AuthHash: *user.AuthHash,
|
||||||
})
|
|
||||||
return
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set Session
|
|
||||||
session := sessions.Default(c)
|
session := sessions.Default(c)
|
||||||
if err := setSession(session, username); err != nil {
|
if err := api.setSession(session, auth); err != nil {
|
||||||
errorPage(c, http.StatusUnauthorized, "Unauthorized.")
|
appErrorPage(c, http.StatusUnauthorized, "Unauthorized.")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -212,41 +275,209 @@ func (api *API) authFormRegister(c *gin.Context) {
|
|||||||
c.Redirect(http.StatusFound, "/")
|
c.Redirect(http.StatusFound, "/")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) authLogout(c *gin.Context) {
|
func (api *API) appAuthLogout(c *gin.Context) {
|
||||||
session := sessions.Default(c)
|
session := sessions.Default(c)
|
||||||
session.Clear()
|
session.Clear()
|
||||||
session.Save()
|
if err := session.Save(); err != nil {
|
||||||
|
log.Error("unable to save session")
|
||||||
|
}
|
||||||
|
|
||||||
c.Redirect(http.StatusFound, "/login")
|
c.Redirect(http.StatusFound, "/login")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) demoModeAppError(c *gin.Context) {
|
func (api *API) koAuthRegister(c *gin.Context) {
|
||||||
errorPage(c, http.StatusUnauthorized, "Not Allowed in Demo Mode")
|
if !api.cfg.RegistrationEnabled {
|
||||||
|
c.AbortWithStatus(http.StatusConflict)
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) demoModeJSONError(c *gin.Context) {
|
var rUser requestUser
|
||||||
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Not Allowed in Demo Mode"})
|
if err := c.ShouldBindJSON(&rUser); err != nil {
|
||||||
|
log.Error("Invalid JSON Bind")
|
||||||
|
apiErrorPage(c, http.StatusBadRequest, "Invalid User Data")
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
func getSession(session sessions.Session) (user string, ok bool) {
|
if rUser.Username == "" || rUser.Password == "" {
|
||||||
// Check Session
|
log.Error("Invalid User - Empty Username or Password")
|
||||||
|
apiErrorPage(c, http.StatusBadRequest, "Invalid User Data")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate password hash
|
||||||
|
hashedPassword, err := argon2.CreateHash(rUser.Password, argon2.DefaultParams)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Argon2 Hash Failure:", err)
|
||||||
|
apiErrorPage(c, http.StatusBadRequest, "Unknown Error")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate auth hash
|
||||||
|
rawAuthHash, err := utils.GenerateToken(64)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Failed to generate user token: ", err)
|
||||||
|
apiErrorPage(c, http.StatusBadRequest, "Unknown Error")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get current users
|
||||||
|
currentUsers, err := api.db.Queries.GetUsers(c)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Failed to check all users: ", err)
|
||||||
|
apiErrorPage(c, http.StatusBadRequest, "Failed to Create User")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine if we should be admin
|
||||||
|
isAdmin := false
|
||||||
|
if len(currentUsers) == 0 {
|
||||||
|
isAdmin = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create user
|
||||||
|
authHash := fmt.Sprintf("%x", rawAuthHash)
|
||||||
|
if rows, err := api.db.Queries.CreateUser(c, database.CreateUserParams{
|
||||||
|
ID: rUser.Username,
|
||||||
|
Pass: &hashedPassword,
|
||||||
|
AuthHash: &authHash,
|
||||||
|
Admin: isAdmin,
|
||||||
|
}); err != nil {
|
||||||
|
log.Error("CreateUser DB Error:", err)
|
||||||
|
apiErrorPage(c, http.StatusBadRequest, "Invalid User Data")
|
||||||
|
return
|
||||||
|
} else if rows == 0 {
|
||||||
|
log.Error("User Already Exists:", rUser.Username)
|
||||||
|
apiErrorPage(c, http.StatusBadRequest, "User Already Exists")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusCreated, gin.H{
|
||||||
|
"username": rUser.Username,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) authorizeSession(ctx context.Context, session sessions.Session) (*authData, bool) {
|
||||||
|
// Get Session
|
||||||
authorizedUser := session.Get("authorizedUser")
|
authorizedUser := session.Get("authorizedUser")
|
||||||
if authorizedUser == nil {
|
isAdmin := session.Get("isAdmin")
|
||||||
return "", false
|
expiresAt := session.Get("expiresAt")
|
||||||
|
authHash := session.Get("authHash")
|
||||||
|
if authorizedUser == nil || isAdmin == nil || expiresAt == nil || authHash == nil {
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create Auth Object
|
||||||
|
auth := &authData{
|
||||||
|
UserName: authorizedUser.(string),
|
||||||
|
IsAdmin: isAdmin.(bool),
|
||||||
|
AuthHash: authHash.(string),
|
||||||
|
}
|
||||||
|
logger := log.WithField("user", auth.UserName)
|
||||||
|
|
||||||
|
// Validate Auth Hash
|
||||||
|
correctAuthHash, err := api.getUserAuthHash(ctx, auth.UserName)
|
||||||
|
if err != nil {
|
||||||
|
logger.WithError(err).Error("failed to get auth hash")
|
||||||
|
return nil, false
|
||||||
|
} else if correctAuthHash != auth.AuthHash {
|
||||||
|
logger.Warn("user auth hash mismatch")
|
||||||
|
return nil, false
|
||||||
}
|
}
|
||||||
|
|
||||||
// Refresh
|
// Refresh
|
||||||
expiresAt := session.Get("expiresAt")
|
if expiresAt.(int64)-time.Now().Unix() < 60*60*24 {
|
||||||
if expiresAt != nil && expiresAt.(int64)-time.Now().Unix() < 60*60*24 {
|
logger.Info("refreshing session")
|
||||||
log.Info("[getSession] Refreshing Session")
|
if err := api.setSession(session, auth); err != nil {
|
||||||
setSession(session, authorizedUser.(string))
|
logger.WithError(err).Error("failed to refresh session")
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return authorizedUser.(string), true
|
// Authorized
|
||||||
|
return auth, true
|
||||||
}
|
}
|
||||||
|
|
||||||
func setSession(session sessions.Session, user string) error {
|
func (api *API) setSession(session sessions.Session, auth *authData) error {
|
||||||
// Set Session Cookie
|
// Set Session Cookie
|
||||||
session.Set("authorizedUser", user)
|
session.Set("authorizedUser", auth.UserName)
|
||||||
|
session.Set("isAdmin", auth.IsAdmin)
|
||||||
session.Set("expiresAt", time.Now().Unix()+(60*60*24*7))
|
session.Set("expiresAt", time.Now().Unix()+(60*60*24*7))
|
||||||
|
session.Set("authHash", auth.AuthHash)
|
||||||
|
|
||||||
return session.Save()
|
return session.Save()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (api *API) getUserAuthHash(ctx context.Context, username string) (string, error) {
|
||||||
|
// Return Cache
|
||||||
|
if api.userAuthCache[username] != "" {
|
||||||
|
return api.userAuthCache[username], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get DB
|
||||||
|
user, err := api.db.Queries.GetUser(ctx, username)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("GetUser DB Error:", err)
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update Cache
|
||||||
|
api.userAuthCache[username] = *user.AuthHash
|
||||||
|
|
||||||
|
return api.userAuthCache[username], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) rotateAllAuthHashes(ctx context.Context) error {
|
||||||
|
// Do Transaction
|
||||||
|
tx, err := api.db.DB.Begin()
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Transaction Begin DB Error: ", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Defer & Start Transaction
|
||||||
|
defer func() {
|
||||||
|
if err := tx.Rollback(); err != nil {
|
||||||
|
log.Error("DB Rollback Error:", err)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
qtx := api.db.Queries.WithTx(tx)
|
||||||
|
|
||||||
|
users, err := qtx.GetUsers(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update Users
|
||||||
|
newAuthHashCache := make(map[string]string, 0)
|
||||||
|
for _, user := range users {
|
||||||
|
// Generate Auth Hash
|
||||||
|
rawAuthHash, err := utils.GenerateToken(64)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update User
|
||||||
|
authHash := fmt.Sprintf("%x", rawAuthHash)
|
||||||
|
if _, err = qtx.UpdateUser(ctx, database.UpdateUserParams{
|
||||||
|
UserID: user.ID,
|
||||||
|
AuthHash: &authHash,
|
||||||
|
Admin: user.Admin,
|
||||||
|
}); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save New Hash Cache
|
||||||
|
newAuthHashCache[user.ID] = fmt.Sprintf("%x", rawAuthHash)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Commit Transaction
|
||||||
|
if err := tx.Commit(); err != nil {
|
||||||
|
log.Error("Transaction Commit DB Error: ", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Transaction Succeeded -> Update Cache
|
||||||
|
maps.Copy(api.userAuthCache, newAuthHashCache)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|||||||
151
api/common.go
Normal file
151
api/common.go
Normal file
@@ -0,0 +1,151 @@
|
|||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
"reichard.io/antholume/database"
|
||||||
|
"reichard.io/antholume/metadata"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (api *API) createDownloadDocumentHandler(errorFunc func(*gin.Context, int, string)) func(*gin.Context) {
|
||||||
|
return func(c *gin.Context) {
|
||||||
|
var rDoc requestDocumentID
|
||||||
|
if err := c.ShouldBindUri(&rDoc); err != nil {
|
||||||
|
log.Error("Invalid URI Bind")
|
||||||
|
errorFunc(c, http.StatusBadRequest, "Invalid Request")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get Document
|
||||||
|
document, err := api.db.Queries.GetDocument(c, rDoc.DocumentID)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("GetDocument DB Error:", err)
|
||||||
|
errorFunc(c, http.StatusBadRequest, "Unknown Document")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if document.Filepath == nil {
|
||||||
|
log.Error("Document Doesn't Have File:", rDoc.DocumentID)
|
||||||
|
errorFunc(c, http.StatusBadRequest, "Document Doesn't Exist")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Derive Basepath
|
||||||
|
basepath := filepath.Join(api.cfg.DataPath, "documents")
|
||||||
|
if document.Basepath != nil && *document.Basepath != "" {
|
||||||
|
basepath = *document.Basepath
|
||||||
|
}
|
||||||
|
|
||||||
|
// Derive Storage Location
|
||||||
|
filePath := filepath.Join(basepath, *document.Filepath)
|
||||||
|
|
||||||
|
// Validate File Exists
|
||||||
|
_, err = os.Stat(filePath)
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
log.Error("File should but doesn't exist: ", err)
|
||||||
|
errorFunc(c, http.StatusBadRequest, "Document Doesn't Exist")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Force Download
|
||||||
|
c.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", filepath.Base(*document.Filepath)))
|
||||||
|
c.File(filePath)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) createGetCoverHandler(errorFunc func(*gin.Context, int, string)) func(*gin.Context) {
|
||||||
|
return func(c *gin.Context) {
|
||||||
|
var rDoc requestDocumentID
|
||||||
|
if err := c.ShouldBindUri(&rDoc); err != nil {
|
||||||
|
log.Error("Invalid URI Bind")
|
||||||
|
errorFunc(c, http.StatusNotFound, "Invalid cover.")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate Document Exists in DB
|
||||||
|
document, err := api.db.Queries.GetDocument(c, rDoc.DocumentID)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("GetDocument DB Error:", err)
|
||||||
|
errorFunc(c, http.StatusInternalServerError, fmt.Sprintf("GetDocument DB Error: %v", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle Identified Document
|
||||||
|
if document.Coverfile != nil {
|
||||||
|
if *document.Coverfile == "UNKNOWN" {
|
||||||
|
c.FileFromFS("assets/images/no-cover.jpg", http.FS(api.assets))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Derive Path
|
||||||
|
safePath := filepath.Join(api.cfg.DataPath, "covers", *document.Coverfile)
|
||||||
|
|
||||||
|
// Validate File Exists
|
||||||
|
_, err = os.Stat(safePath)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("File should but doesn't exist: ", err)
|
||||||
|
c.FileFromFS("assets/images/no-cover.jpg", http.FS(api.assets))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.File(safePath)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Attempt Metadata
|
||||||
|
coverDir := filepath.Join(api.cfg.DataPath, "covers")
|
||||||
|
coverFile := "UNKNOWN"
|
||||||
|
|
||||||
|
// Identify Documents & Save Covers
|
||||||
|
metadataResults, err := metadata.SearchMetadata(metadata.SourceGoogleBooks, metadata.MetadataInfo{
|
||||||
|
Title: document.Title,
|
||||||
|
Author: document.Author,
|
||||||
|
})
|
||||||
|
|
||||||
|
if err == nil && len(metadataResults) > 0 && metadataResults[0].SourceID != nil {
|
||||||
|
firstResult := metadataResults[0]
|
||||||
|
|
||||||
|
// Save Cover
|
||||||
|
fileName, err := metadata.CacheCover(*firstResult.SourceID, coverDir, document.ID, false)
|
||||||
|
if err == nil {
|
||||||
|
coverFile = *fileName
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store First Metadata Result
|
||||||
|
if _, err = api.db.Queries.AddMetadata(c, database.AddMetadataParams{
|
||||||
|
DocumentID: document.ID,
|
||||||
|
Title: firstResult.Title,
|
||||||
|
Author: firstResult.Author,
|
||||||
|
Description: firstResult.Description,
|
||||||
|
Gbid: firstResult.SourceID,
|
||||||
|
Olid: nil,
|
||||||
|
Isbn10: firstResult.ISBN10,
|
||||||
|
Isbn13: firstResult.ISBN13,
|
||||||
|
}); err != nil {
|
||||||
|
log.Error("AddMetadata DB Error:", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Upsert Document
|
||||||
|
if _, err = api.db.Queries.UpsertDocument(c, database.UpsertDocumentParams{
|
||||||
|
ID: document.ID,
|
||||||
|
Coverfile: &coverFile,
|
||||||
|
}); err != nil {
|
||||||
|
log.Warn("UpsertDocument DB Error:", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return Unknown Cover
|
||||||
|
if coverFile == "UNKNOWN" {
|
||||||
|
c.FileFromFS("assets/images/no-cover.jpg", http.FS(api.assets))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
coverFilePath := filepath.Join(coverDir, coverFile)
|
||||||
|
c.File(coverFilePath)
|
||||||
|
}
|
||||||
|
}
|
||||||
83
api/convert.go
Normal file
83
api/convert.go
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"reichard.io/antholume/database"
|
||||||
|
"reichard.io/antholume/metadata"
|
||||||
|
"reichard.io/antholume/pkg/ptr"
|
||||||
|
"reichard.io/antholume/pkg/utils"
|
||||||
|
"reichard.io/antholume/search"
|
||||||
|
"reichard.io/antholume/web/models"
|
||||||
|
)
|
||||||
|
|
||||||
|
func convertDBDocToUI(r database.GetDocumentsWithStatsRow) models.Document {
|
||||||
|
return models.Document{
|
||||||
|
ID: r.ID,
|
||||||
|
Title: ptr.Deref(r.Title),
|
||||||
|
Author: ptr.Deref(r.Author),
|
||||||
|
ISBN10: ptr.Deref(r.Isbn10),
|
||||||
|
ISBN13: ptr.Deref(r.Isbn13),
|
||||||
|
Description: ptr.Deref(r.Description),
|
||||||
|
Percentage: r.Percentage,
|
||||||
|
WPM: r.Wpm,
|
||||||
|
Words: r.Words,
|
||||||
|
TotalTimeRead: time.Duration(r.TotalTimeSeconds) * time.Second,
|
||||||
|
TimePerPercent: time.Duration(r.SecondsPerPercent) * time.Second,
|
||||||
|
HasFile: ptr.Deref(r.Filepath) != "",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func convertMetaToUI(m metadata.MetadataInfo) *models.DocumentMetadata {
|
||||||
|
return &models.DocumentMetadata{
|
||||||
|
SourceID: ptr.Deref(m.SourceID),
|
||||||
|
ISBN10: ptr.Deref(m.ISBN10),
|
||||||
|
ISBN13: ptr.Deref(m.ISBN13),
|
||||||
|
Title: ptr.Deref(m.Title),
|
||||||
|
Author: ptr.Deref(m.Author),
|
||||||
|
Description: ptr.Deref(m.Description),
|
||||||
|
Source: m.Source,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func convertDBActivityToUI(r database.GetActivityRow) models.Activity {
|
||||||
|
return models.Activity{
|
||||||
|
ID: r.DocumentID,
|
||||||
|
Author: utils.FirstNonZero(ptr.Deref(r.Author), "N/A"),
|
||||||
|
Title: utils.FirstNonZero(ptr.Deref(r.Title), "N/A"),
|
||||||
|
StartTime: r.StartTime,
|
||||||
|
Duration: time.Duration(r.Duration) * time.Second,
|
||||||
|
Percentage: r.EndPercentage,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func convertDBProgressToUI(r database.GetProgressRow) models.Progress {
|
||||||
|
return models.Progress{
|
||||||
|
ID: r.DocumentID,
|
||||||
|
Author: utils.FirstNonZero(ptr.Deref(r.Author), "N/A"),
|
||||||
|
Title: utils.FirstNonZero(ptr.Deref(r.Title), "N/A"),
|
||||||
|
DeviceName: r.DeviceName,
|
||||||
|
Percentage: r.Percentage,
|
||||||
|
CreatedAt: r.CreatedAt,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func convertDBDeviceToUI(r database.GetDevicesRow) models.Device {
|
||||||
|
return models.Device{
|
||||||
|
DeviceName: r.DeviceName,
|
||||||
|
LastSynced: r.LastSynced,
|
||||||
|
CreatedAt: r.CreatedAt,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func convertSearchToUI(r search.SearchItem) models.SearchResult {
|
||||||
|
return models.SearchResult{
|
||||||
|
ID: r.ID,
|
||||||
|
Title: r.Title,
|
||||||
|
Author: r.Author,
|
||||||
|
Series: r.Series,
|
||||||
|
FileType: r.FileType,
|
||||||
|
FileSize: r.FileSize,
|
||||||
|
UploadDate: r.UploadDate,
|
||||||
|
}
|
||||||
|
}
|
||||||
444
api/ko-routes.go
444
api/ko-routes.go
@@ -10,16 +10,12 @@ import (
|
|||||||
"net/http"
|
"net/http"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strings"
|
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
argon2 "github.com/alexedwards/argon2id"
|
|
||||||
"github.com/gabriel-vasile/mimetype"
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"golang.org/x/exp/slices"
|
"reichard.io/antholume/database"
|
||||||
"reichard.io/bbank/database"
|
"reichard.io/antholume/metadata"
|
||||||
"reichard.io/bbank/metadata"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type activityItem struct {
|
type activityItem struct {
|
||||||
@@ -75,139 +71,91 @@ type requestDocumentID struct {
|
|||||||
DocumentID string `uri:"document" binding:"required"`
|
DocumentID string `uri:"document" binding:"required"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) authorizeUser(c *gin.Context) {
|
func (api *API) koAuthorizeUser(c *gin.Context) {
|
||||||
c.JSON(200, gin.H{
|
koJSON(c, 200, gin.H{
|
||||||
"authorized": "OK",
|
"authorized": "OK",
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) createUser(c *gin.Context) {
|
func (api *API) koSetProgress(c *gin.Context) {
|
||||||
if !api.Config.RegistrationEnabled {
|
var auth authData
|
||||||
c.AbortWithStatus(http.StatusConflict)
|
if data, _ := c.Get("Authorization"); data != nil {
|
||||||
return
|
auth = data.(authData)
|
||||||
}
|
}
|
||||||
|
|
||||||
var rUser requestUser
|
|
||||||
if err := c.ShouldBindJSON(&rUser); err != nil {
|
|
||||||
log.Error("[createUser] Invalid JSON Bind")
|
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid User Data"})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if rUser.Username == "" || rUser.Password == "" {
|
|
||||||
log.Error("[createUser] Invalid User - Empty Username or Password")
|
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid User Data"})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
hashedPassword, err := argon2.CreateHash(rUser.Password, argon2.DefaultParams)
|
|
||||||
if err != nil {
|
|
||||||
log.Error("[createUser] Argon2 Hash Failure:", err)
|
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Unknown Error"})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
rows, err := api.DB.Queries.CreateUser(api.DB.Ctx, database.CreateUserParams{
|
|
||||||
ID: rUser.Username,
|
|
||||||
Pass: &hashedPassword,
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
log.Error("[createUser] CreateUser DB Error:", err)
|
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid User Data"})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// User Exists
|
|
||||||
if rows == 0 {
|
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "User Already Exists"})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
c.JSON(http.StatusCreated, gin.H{
|
|
||||||
"username": rUser.Username,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func (api *API) setProgress(c *gin.Context) {
|
|
||||||
rUser, _ := c.Get("AuthorizedUser")
|
|
||||||
|
|
||||||
var rPosition requestPosition
|
var rPosition requestPosition
|
||||||
if err := c.ShouldBindJSON(&rPosition); err != nil {
|
if err := c.ShouldBindJSON(&rPosition); err != nil {
|
||||||
log.Error("[setProgress] Invalid JSON Bind")
|
log.Error("Invalid JSON Bind")
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Progress Data"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Progress Data")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Upsert Device
|
// Upsert Device
|
||||||
if _, err := api.DB.Queries.UpsertDevice(api.DB.Ctx, database.UpsertDeviceParams{
|
if _, err := api.db.Queries.UpsertDevice(c, database.UpsertDeviceParams{
|
||||||
ID: rPosition.DeviceID,
|
ID: rPosition.DeviceID,
|
||||||
UserID: rUser.(string),
|
UserID: auth.UserName,
|
||||||
DeviceName: rPosition.Device,
|
DeviceName: rPosition.Device,
|
||||||
LastSynced: time.Now().UTC().Format(time.RFC3339),
|
LastSynced: time.Now().UTC().Format(time.RFC3339),
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
log.Error("[setProgress] UpsertDevice DB Error:", err)
|
log.Error("UpsertDevice DB Error:", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Upsert Document
|
// Upsert Document
|
||||||
if _, err := api.DB.Queries.UpsertDocument(api.DB.Ctx, database.UpsertDocumentParams{
|
if _, err := api.db.Queries.UpsertDocument(c, database.UpsertDocumentParams{
|
||||||
ID: rPosition.DocumentID,
|
ID: rPosition.DocumentID,
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
log.Error("[setProgress] UpsertDocument DB Error:", err)
|
log.Error("UpsertDocument DB Error:", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create or Replace Progress
|
// Create or Replace Progress
|
||||||
progress, err := api.DB.Queries.UpdateProgress(api.DB.Ctx, database.UpdateProgressParams{
|
progress, err := api.db.Queries.UpdateProgress(c, database.UpdateProgressParams{
|
||||||
Percentage: rPosition.Percentage,
|
Percentage: rPosition.Percentage,
|
||||||
DocumentID: rPosition.DocumentID,
|
DocumentID: rPosition.DocumentID,
|
||||||
DeviceID: rPosition.DeviceID,
|
DeviceID: rPosition.DeviceID,
|
||||||
UserID: rUser.(string),
|
UserID: auth.UserName,
|
||||||
Progress: rPosition.Progress,
|
Progress: rPosition.Progress,
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[setProgress] UpdateProgress DB Error:", err)
|
log.Error("UpdateProgress DB Error:", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Request")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update Statistic
|
koJSON(c, http.StatusOK, gin.H{
|
||||||
log.Info("[setProgress] UpdateDocumentUserStatistic Running...")
|
|
||||||
if err := api.DB.UpdateDocumentUserStatistic(rPosition.DocumentID, rUser.(string)); err != nil {
|
|
||||||
log.Error("[setProgress] UpdateDocumentUserStatistic Error:", err)
|
|
||||||
}
|
|
||||||
log.Info("[setProgress] UpdateDocumentUserStatistic Complete")
|
|
||||||
|
|
||||||
c.JSON(http.StatusOK, gin.H{
|
|
||||||
"document": progress.DocumentID,
|
"document": progress.DocumentID,
|
||||||
"timestamp": progress.CreatedAt,
|
"timestamp": progress.CreatedAt,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) getProgress(c *gin.Context) {
|
func (api *API) koGetProgress(c *gin.Context) {
|
||||||
rUser, _ := c.Get("AuthorizedUser")
|
var auth authData
|
||||||
|
if data, _ := c.Get("Authorization"); data != nil {
|
||||||
|
auth = data.(authData)
|
||||||
|
}
|
||||||
|
|
||||||
var rDocID requestDocumentID
|
var rDocID requestDocumentID
|
||||||
if err := c.ShouldBindUri(&rDocID); err != nil {
|
if err := c.ShouldBindUri(&rDocID); err != nil {
|
||||||
log.Error("[getProgress] Invalid URI Bind")
|
log.Error("Invalid URI Bind")
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Request")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
progress, err := api.DB.Queries.GetProgress(api.DB.Ctx, database.GetProgressParams{
|
progress, err := api.db.Queries.GetDocumentProgress(c, database.GetDocumentProgressParams{
|
||||||
DocumentID: rDocID.DocumentID,
|
DocumentID: rDocID.DocumentID,
|
||||||
UserID: rUser.(string),
|
UserID: auth.UserName,
|
||||||
})
|
})
|
||||||
|
|
||||||
if err == sql.ErrNoRows {
|
if err == sql.ErrNoRows {
|
||||||
// Not Found
|
// Not Found
|
||||||
c.JSON(http.StatusOK, gin.H{})
|
koJSON(c, http.StatusOK, gin.H{})
|
||||||
return
|
return
|
||||||
} else if err != nil {
|
} else if err != nil {
|
||||||
log.Error("[getProgress] GetProgress DB Error:", err)
|
log.Error("GetDocumentProgress DB Error:", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Document"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Document")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.JSON(http.StatusOK, gin.H{
|
koJSON(c, http.StatusOK, gin.H{
|
||||||
"document": progress.DocumentID,
|
"document": progress.DocumentID,
|
||||||
"percentage": progress.Percentage,
|
"percentage": progress.Percentage,
|
||||||
"progress": progress.Progress,
|
"progress": progress.Progress,
|
||||||
@@ -216,21 +164,24 @@ func (api *API) getProgress(c *gin.Context) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) addActivities(c *gin.Context) {
|
func (api *API) koAddActivities(c *gin.Context) {
|
||||||
rUser, _ := c.Get("AuthorizedUser")
|
var auth authData
|
||||||
|
if data, _ := c.Get("Authorization"); data != nil {
|
||||||
|
auth = data.(authData)
|
||||||
|
}
|
||||||
|
|
||||||
var rActivity requestActivity
|
var rActivity requestActivity
|
||||||
if err := c.ShouldBindJSON(&rActivity); err != nil {
|
if err := c.ShouldBindJSON(&rActivity); err != nil {
|
||||||
log.Error("[addActivity] Invalid JSON Bind")
|
log.Error("Invalid JSON Bind")
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Activity"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Activity")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Do Transaction
|
// Do Transaction
|
||||||
tx, err := api.DB.DB.Begin()
|
tx, err := api.db.DB.Begin()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[addActivities] Transaction Begin DB Error:", err)
|
log.Error("Transaction Begin DB Error:", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Unknown Error"})
|
apiErrorPage(c, http.StatusBadRequest, "Unknown Error")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -242,36 +193,40 @@ func (api *API) addActivities(c *gin.Context) {
|
|||||||
allDocuments := getKeys(allDocumentsMap)
|
allDocuments := getKeys(allDocumentsMap)
|
||||||
|
|
||||||
// Defer & Start Transaction
|
// Defer & Start Transaction
|
||||||
defer tx.Rollback()
|
defer func() {
|
||||||
qtx := api.DB.Queries.WithTx(tx)
|
if err := tx.Rollback(); err != nil {
|
||||||
|
log.Error("DB Rollback Error:", err)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
qtx := api.db.Queries.WithTx(tx)
|
||||||
|
|
||||||
// Upsert Documents
|
// Upsert Documents
|
||||||
for _, doc := range allDocuments {
|
for _, doc := range allDocuments {
|
||||||
if _, err := qtx.UpsertDocument(api.DB.Ctx, database.UpsertDocumentParams{
|
if _, err := qtx.UpsertDocument(c, database.UpsertDocumentParams{
|
||||||
ID: doc,
|
ID: doc,
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
log.Error("[addActivities] UpsertDocument DB Error:", err)
|
log.Error("UpsertDocument DB Error:", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Document"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Document")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Upsert Device
|
// Upsert Device
|
||||||
if _, err = qtx.UpsertDevice(api.DB.Ctx, database.UpsertDeviceParams{
|
if _, err = qtx.UpsertDevice(c, database.UpsertDeviceParams{
|
||||||
ID: rActivity.DeviceID,
|
ID: rActivity.DeviceID,
|
||||||
UserID: rUser.(string),
|
UserID: auth.UserName,
|
||||||
DeviceName: rActivity.Device,
|
DeviceName: rActivity.Device,
|
||||||
LastSynced: time.Now().UTC().Format(time.RFC3339),
|
LastSynced: time.Now().UTC().Format(time.RFC3339),
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
log.Error("[addActivities] UpsertDevice DB Error:", err)
|
log.Error("UpsertDevice DB Error:", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Device"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Device")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add All Activity
|
// Add All Activity
|
||||||
for _, item := range rActivity.Activity {
|
for _, item := range rActivity.Activity {
|
||||||
if _, err := qtx.AddActivity(api.DB.Ctx, database.AddActivityParams{
|
if _, err := qtx.AddActivity(c, database.AddActivityParams{
|
||||||
UserID: rUser.(string),
|
UserID: auth.UserName,
|
||||||
DocumentID: item.DocumentID,
|
DocumentID: item.DocumentID,
|
||||||
DeviceID: rActivity.DeviceID,
|
DeviceID: rActivity.DeviceID,
|
||||||
StartTime: time.Unix(int64(item.StartTime), 0).UTC().Format(time.RFC3339),
|
StartTime: time.Unix(int64(item.StartTime), 0).UTC().Format(time.RFC3339),
|
||||||
@@ -279,104 +234,102 @@ func (api *API) addActivities(c *gin.Context) {
|
|||||||
StartPercentage: float64(item.Page) / float64(item.Pages),
|
StartPercentage: float64(item.Page) / float64(item.Pages),
|
||||||
EndPercentage: float64(item.Page+1) / float64(item.Pages),
|
EndPercentage: float64(item.Page+1) / float64(item.Pages),
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
log.Error("[addActivities] AddActivity DB Error:", err)
|
log.Error("AddActivity DB Error:", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Activity"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Activity")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Commit Transaction
|
// Commit Transaction
|
||||||
if err := tx.Commit(); err != nil {
|
if err := tx.Commit(); err != nil {
|
||||||
log.Error("[addActivities] Transaction Commit DB Error:", err)
|
log.Error("Transaction Commit DB Error:", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Unknown Error"})
|
apiErrorPage(c, http.StatusBadRequest, "Unknown Error")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update Statistic
|
koJSON(c, http.StatusOK, gin.H{
|
||||||
for _, doc := range allDocuments {
|
|
||||||
log.Info("[addActivities] UpdateDocumentUserStatistic Running...")
|
|
||||||
if err := api.DB.UpdateDocumentUserStatistic(doc, rUser.(string)); err != nil {
|
|
||||||
log.Error("[addActivities] UpdateDocumentUserStatistic Error:", err)
|
|
||||||
}
|
|
||||||
log.Info("[addActivities] UpdateDocumentUserStatistic Complete")
|
|
||||||
}
|
|
||||||
|
|
||||||
c.JSON(http.StatusOK, gin.H{
|
|
||||||
"added": len(rActivity.Activity),
|
"added": len(rActivity.Activity),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) checkActivitySync(c *gin.Context) {
|
func (api *API) koCheckActivitySync(c *gin.Context) {
|
||||||
rUser, _ := c.Get("AuthorizedUser")
|
var auth authData
|
||||||
|
if data, _ := c.Get("Authorization"); data != nil {
|
||||||
|
auth = data.(authData)
|
||||||
|
}
|
||||||
|
|
||||||
var rCheckActivity requestCheckActivitySync
|
var rCheckActivity requestCheckActivitySync
|
||||||
if err := c.ShouldBindJSON(&rCheckActivity); err != nil {
|
if err := c.ShouldBindJSON(&rCheckActivity); err != nil {
|
||||||
log.Error("[checkActivitySync] Invalid JSON Bind")
|
log.Error("Invalid JSON Bind")
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Request")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Upsert Device
|
// Upsert Device
|
||||||
if _, err := api.DB.Queries.UpsertDevice(api.DB.Ctx, database.UpsertDeviceParams{
|
if _, err := api.db.Queries.UpsertDevice(c, database.UpsertDeviceParams{
|
||||||
ID: rCheckActivity.DeviceID,
|
ID: rCheckActivity.DeviceID,
|
||||||
UserID: rUser.(string),
|
UserID: auth.UserName,
|
||||||
DeviceName: rCheckActivity.Device,
|
DeviceName: rCheckActivity.Device,
|
||||||
LastSynced: time.Now().UTC().Format(time.RFC3339),
|
LastSynced: time.Now().UTC().Format(time.RFC3339),
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
log.Error("[checkActivitySync] UpsertDevice DB Error", err)
|
log.Error("UpsertDevice DB Error", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Device"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Device")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get Last Device Activity
|
// Get Last Device Activity
|
||||||
lastActivity, err := api.DB.Queries.GetLastActivity(api.DB.Ctx, database.GetLastActivityParams{
|
lastActivity, err := api.db.Queries.GetLastActivity(c, database.GetLastActivityParams{
|
||||||
UserID: rUser.(string),
|
UserID: auth.UserName,
|
||||||
DeviceID: rCheckActivity.DeviceID,
|
DeviceID: rCheckActivity.DeviceID,
|
||||||
})
|
})
|
||||||
if err == sql.ErrNoRows {
|
if err == sql.ErrNoRows {
|
||||||
lastActivity = time.UnixMilli(0).Format(time.RFC3339)
|
lastActivity = time.UnixMilli(0).Format(time.RFC3339)
|
||||||
} else if err != nil {
|
} else if err != nil {
|
||||||
log.Error("[checkActivitySync] GetLastActivity DB Error:", err)
|
log.Error("GetLastActivity DB Error:", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Unknown Error"})
|
apiErrorPage(c, http.StatusBadRequest, "Unknown Error")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse Time
|
// Parse Time
|
||||||
parsedTime, err := time.Parse(time.RFC3339, lastActivity)
|
parsedTime, err := time.Parse(time.RFC3339, lastActivity)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[checkActivitySync] Time Parse Error:", err)
|
log.Error("Time Parse Error:", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Unknown Error"})
|
apiErrorPage(c, http.StatusBadRequest, "Unknown Error")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.JSON(http.StatusOK, gin.H{
|
koJSON(c, http.StatusOK, gin.H{
|
||||||
"last_sync": parsedTime.Unix(),
|
"last_sync": parsedTime.Unix(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) addDocuments(c *gin.Context) {
|
func (api *API) koAddDocuments(c *gin.Context) {
|
||||||
var rNewDocs requestDocument
|
var rNewDocs requestDocument
|
||||||
if err := c.ShouldBindJSON(&rNewDocs); err != nil {
|
if err := c.ShouldBindJSON(&rNewDocs); err != nil {
|
||||||
log.Error("[addDocuments] Invalid JSON Bind")
|
log.Error("Invalid JSON Bind")
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Document(s)"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Document(s)")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Do Transaction
|
// Do Transaction
|
||||||
tx, err := api.DB.DB.Begin()
|
tx, err := api.db.DB.Begin()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[addDocuments] Transaction Begin DB Error:", err)
|
log.Error("Transaction Begin DB Error:", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Unknown Error"})
|
apiErrorPage(c, http.StatusBadRequest, "Unknown Error")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Defer & Start Transaction
|
// Defer & Start Transaction
|
||||||
defer tx.Rollback()
|
defer func() {
|
||||||
qtx := api.DB.Queries.WithTx(tx)
|
if err := tx.Rollback(); err != nil {
|
||||||
|
log.Error("DB Rollback Error:", err)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
qtx := api.db.Queries.WithTx(tx)
|
||||||
|
|
||||||
// Upsert Documents
|
// Upsert Documents
|
||||||
for _, doc := range rNewDocs.Documents {
|
for _, doc := range rNewDocs.Documents {
|
||||||
_, err := qtx.UpsertDocument(api.DB.Ctx, database.UpsertDocumentParams{
|
_, err := qtx.UpsertDocument(c, database.UpsertDocumentParams{
|
||||||
ID: doc.ID,
|
ID: doc.ID,
|
||||||
Title: api.sanitizeInput(doc.Title),
|
Title: api.sanitizeInput(doc.Title),
|
||||||
Author: api.sanitizeInput(doc.Author),
|
Author: api.sanitizeInput(doc.Author),
|
||||||
@@ -386,78 +339,78 @@ func (api *API) addDocuments(c *gin.Context) {
|
|||||||
Description: api.sanitizeInput(doc.Description),
|
Description: api.sanitizeInput(doc.Description),
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[addDocuments] UpsertDocument DB Error:", err)
|
log.Error("UpsertDocument DB Error:", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Document"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Document")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Commit Transaction
|
// Commit Transaction
|
||||||
if err := tx.Commit(); err != nil {
|
if err := tx.Commit(); err != nil {
|
||||||
log.Error("[addDocuments] Transaction Commit DB Error:", err)
|
log.Error("Transaction Commit DB Error:", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Unknown Error"})
|
apiErrorPage(c, http.StatusBadRequest, "Unknown Error")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.JSON(http.StatusOK, gin.H{
|
koJSON(c, http.StatusOK, gin.H{
|
||||||
"changed": len(rNewDocs.Documents),
|
"changed": len(rNewDocs.Documents),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) checkDocumentsSync(c *gin.Context) {
|
func (api *API) koCheckDocumentsSync(c *gin.Context) {
|
||||||
rUser, _ := c.Get("AuthorizedUser")
|
var auth authData
|
||||||
|
if data, _ := c.Get("Authorization"); data != nil {
|
||||||
|
auth = data.(authData)
|
||||||
|
}
|
||||||
|
|
||||||
var rCheckDocs requestCheckDocumentSync
|
var rCheckDocs requestCheckDocumentSync
|
||||||
if err := c.ShouldBindJSON(&rCheckDocs); err != nil {
|
if err := c.ShouldBindJSON(&rCheckDocs); err != nil {
|
||||||
log.Error("[checkDocumentsSync] Invalid JSON Bind")
|
log.Error("Invalid JSON Bind")
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Request")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Upsert Device
|
// Upsert Device
|
||||||
_, err := api.DB.Queries.UpsertDevice(api.DB.Ctx, database.UpsertDeviceParams{
|
_, err := api.db.Queries.UpsertDevice(c, database.UpsertDeviceParams{
|
||||||
ID: rCheckDocs.DeviceID,
|
ID: rCheckDocs.DeviceID,
|
||||||
UserID: rUser.(string),
|
UserID: auth.UserName,
|
||||||
DeviceName: rCheckDocs.Device,
|
DeviceName: rCheckDocs.Device,
|
||||||
LastSynced: time.Now().UTC().Format(time.RFC3339),
|
LastSynced: time.Now().UTC().Format(time.RFC3339),
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[checkDocumentsSync] UpsertDevice DB Error", err)
|
log.Error("UpsertDevice DB Error", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Device"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Device")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
missingDocs := []database.Document{}
|
|
||||||
deletedDocIDs := []string{}
|
|
||||||
|
|
||||||
// Get Missing Documents
|
// Get Missing Documents
|
||||||
missingDocs, err = api.DB.Queries.GetMissingDocuments(api.DB.Ctx, rCheckDocs.Have)
|
missingDocs, err := api.db.Queries.GetMissingDocuments(c, rCheckDocs.Have)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[checkDocumentsSync] GetMissingDocuments DB Error", err)
|
log.Error("GetMissingDocuments DB Error", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Request")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get Deleted Documents
|
// Get Deleted Documents
|
||||||
deletedDocIDs, err = api.DB.Queries.GetDeletedDocuments(api.DB.Ctx, rCheckDocs.Have)
|
deletedDocIDs, err := api.db.Queries.GetDeletedDocuments(c, rCheckDocs.Have)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[checkDocumentsSync] GetDeletedDocuments DB Error", err)
|
log.Error("GetDeletedDocuments DB Error", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Request")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get Wanted Documents
|
// Get Wanted Documents
|
||||||
jsonHaves, err := json.Marshal(rCheckDocs.Have)
|
jsonHaves, err := json.Marshal(rCheckDocs.Have)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[checkDocumentsSync] JSON Marshal Error", err)
|
log.Error("JSON Marshal Error", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Request")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
wantedDocs, err := api.DB.Queries.GetWantedDocuments(api.DB.Ctx, string(jsonHaves))
|
wantedDocs, err := api.db.Queries.GetWantedDocuments(c, string(jsonHaves))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[checkDocumentsSync] GetWantedDocuments DB Error", err)
|
log.Error("GetWantedDocuments DB Error", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Request")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -494,158 +447,116 @@ func (api *API) checkDocumentsSync(c *gin.Context) {
|
|||||||
rCheckDocSync.Delete = deletedDocIDs
|
rCheckDocSync.Delete = deletedDocIDs
|
||||||
}
|
}
|
||||||
|
|
||||||
c.JSON(http.StatusOK, rCheckDocSync)
|
koJSON(c, http.StatusOK, rCheckDocSync)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) uploadExistingDocument(c *gin.Context) {
|
func (api *API) koUploadExistingDocument(c *gin.Context) {
|
||||||
var rDoc requestDocumentID
|
var rDoc requestDocumentID
|
||||||
if err := c.ShouldBindUri(&rDoc); err != nil {
|
if err := c.ShouldBindUri(&rDoc); err != nil {
|
||||||
log.Error("[uploadExistingDocument] Invalid URI Bind")
|
log.Error("Invalid URI Bind")
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"})
|
apiErrorPage(c, http.StatusBadRequest, "Invalid Request")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Open Form File
|
||||||
fileData, err := c.FormFile("file")
|
fileData, err := c.FormFile("file")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[uploadExistingDocument] File Error:", err)
|
log.Error("File Error:", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "File Error"})
|
apiErrorPage(c, http.StatusBadRequest, "File error")
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate Type & Derive Extension on MIME
|
|
||||||
uploadedFile, err := fileData.Open()
|
|
||||||
fileMime, err := mimetype.DetectReader(uploadedFile)
|
|
||||||
fileExtension := fileMime.Extension()
|
|
||||||
|
|
||||||
if !slices.Contains([]string{".epub", ".html"}, fileExtension) {
|
|
||||||
log.Error("[uploadExistingDocument] Invalid FileType:", fileExtension)
|
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Filetype"})
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate Document Exists in DB
|
// Validate Document Exists in DB
|
||||||
document, err := api.DB.Queries.GetDocument(api.DB.Ctx, rDoc.DocumentID)
|
document, err := api.db.Queries.GetDocument(c, rDoc.DocumentID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[uploadExistingDocument] GetDocument DB Error:", err)
|
log.Error("GetDocument DB Error:", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Unknown Document"})
|
apiErrorPage(c, http.StatusBadRequest, "Unknown Document")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Open File
|
||||||
|
uploadedFile, err := fileData.Open()
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Unable to open file")
|
||||||
|
apiErrorPage(c, http.StatusBadRequest, "Unable to open file")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check Support
|
||||||
|
docType, err := metadata.GetDocumentTypeReader(uploadedFile)
|
||||||
|
if err != nil {
|
||||||
|
log.Error("Unsupported file")
|
||||||
|
apiErrorPage(c, http.StatusBadRequest, "Unsupported file")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Derive Filename
|
// Derive Filename
|
||||||
var fileName string
|
fileName := deriveBaseFileName(&metadata.MetadataInfo{
|
||||||
if document.Author != nil {
|
Type: *docType,
|
||||||
fileName = fileName + *document.Author
|
PartialMD5: &document.ID,
|
||||||
} else {
|
Title: document.Title,
|
||||||
fileName = fileName + "Unknown"
|
Author: document.Author,
|
||||||
}
|
})
|
||||||
|
|
||||||
if document.Title != nil {
|
|
||||||
fileName = fileName + " - " + *document.Title
|
|
||||||
} else {
|
|
||||||
fileName = fileName + " - Unknown"
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove Slashes
|
|
||||||
fileName = strings.ReplaceAll(fileName, "/", "")
|
|
||||||
|
|
||||||
// Derive & Sanitize File Name
|
|
||||||
fileName = "." + filepath.Clean(fmt.Sprintf("/%s [%s]%s", fileName, document.ID, fileExtension))
|
|
||||||
|
|
||||||
// Generate Storage Path
|
// Generate Storage Path
|
||||||
safePath := filepath.Join(api.Config.DataPath, "documents", fileName)
|
basePath := filepath.Join(api.cfg.DataPath, "documents")
|
||||||
|
safePath := filepath.Join(basePath, fileName)
|
||||||
|
|
||||||
// Save & Prevent Overwrites
|
// Save & Prevent Overwrites
|
||||||
_, err = os.Stat(safePath)
|
_, err = os.Stat(safePath)
|
||||||
if os.IsNotExist(err) {
|
if os.IsNotExist(err) {
|
||||||
err = c.SaveUploadedFile(fileData, safePath)
|
err = c.SaveUploadedFile(fileData, safePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[uploadExistingDocument] Save Failure:", err)
|
log.Error("Save Failure:", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "File Error"})
|
apiErrorPage(c, http.StatusBadRequest, "File Error")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get MD5 Hash
|
// Acquire Metadata
|
||||||
fileHash, err := getFileMD5(safePath)
|
metadataInfo, err := metadata.GetMetadata(safePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[uploadExistingDocument] Hash Failure:", err)
|
log.Errorf("Unable to acquire metadata: %v", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "File Error"})
|
apiErrorPage(c, http.StatusBadRequest, "Unable to acquire metadata")
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get Word Count
|
|
||||||
wordCount, err := metadata.GetWordCount(safePath)
|
|
||||||
if err != nil {
|
|
||||||
log.Error("[uploadExistingDocument] Word Count Failure:", err)
|
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "File Error"})
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Upsert Document
|
// Upsert Document
|
||||||
if _, err = api.DB.Queries.UpsertDocument(api.DB.Ctx, database.UpsertDocumentParams{
|
if _, err = api.db.Queries.UpsertDocument(c, database.UpsertDocumentParams{
|
||||||
ID: document.ID,
|
ID: document.ID,
|
||||||
Md5: fileHash,
|
Md5: metadataInfo.MD5,
|
||||||
|
Words: metadataInfo.WordCount,
|
||||||
Filepath: &fileName,
|
Filepath: &fileName,
|
||||||
Words: &wordCount,
|
Basepath: &basePath,
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
log.Error("[uploadExistingDocument] UpsertDocument DB Error:", err)
|
log.Error("UpsertDocument DB Error:", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Document Error"})
|
apiErrorPage(c, http.StatusBadRequest, "Document Error")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
c.JSON(http.StatusOK, gin.H{
|
koJSON(c, http.StatusOK, gin.H{
|
||||||
"status": "ok",
|
"status": "ok",
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) downloadDocument(c *gin.Context) {
|
func (api *API) koDemoModeJSONError(c *gin.Context) {
|
||||||
var rDoc requestDocumentID
|
apiErrorPage(c, http.StatusUnauthorized, "Not Allowed in Demo Mode")
|
||||||
if err := c.ShouldBindUri(&rDoc); err != nil {
|
|
||||||
log.Error("[downloadDocument] Invalid URI Bind")
|
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"})
|
|
||||||
return
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get Document
|
func apiErrorPage(c *gin.Context, errorCode int, errorMessage string) {
|
||||||
document, err := api.DB.Queries.GetDocument(api.DB.Ctx, rDoc.DocumentID)
|
c.AbortWithStatusJSON(errorCode, gin.H{"error": errorMessage})
|
||||||
if err != nil {
|
|
||||||
log.Error("[downloadDocument] GetDocument DB Error:", err)
|
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Unknown Document"})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if document.Filepath == nil {
|
|
||||||
log.Error("[downloadDocument] Document Doesn't Have File:", rDoc.DocumentID)
|
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Document Doesn't Exist"})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Derive Storage Location
|
|
||||||
filePath := filepath.Join(api.Config.DataPath, "documents", *document.Filepath)
|
|
||||||
|
|
||||||
// Validate File Exists
|
|
||||||
_, err = os.Stat(filePath)
|
|
||||||
if os.IsNotExist(err) {
|
|
||||||
log.Error("[downloadDocument] File Doesn't Exist:", rDoc.DocumentID)
|
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Document Doesn't Exists"})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Force Download (Security)
|
|
||||||
c.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", filepath.Base(*document.Filepath)))
|
|
||||||
c.File(filePath)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) sanitizeInput(val any) *string {
|
func (api *API) sanitizeInput(val any) *string {
|
||||||
switch v := val.(type) {
|
switch v := val.(type) {
|
||||||
case *string:
|
case *string:
|
||||||
if v != nil {
|
if v != nil {
|
||||||
newString := html.UnescapeString(api.HTMLPolicy.Sanitize(string(*v)))
|
newString := html.UnescapeString(htmlPolicy.Sanitize(string(*v)))
|
||||||
return &newString
|
return &newString
|
||||||
}
|
}
|
||||||
case string:
|
case string:
|
||||||
if v != "" {
|
if v != "" {
|
||||||
newString := html.UnescapeString(api.HTMLPolicy.Sanitize(string(v)))
|
newString := html.UnescapeString(htmlPolicy.Sanitize(string(v)))
|
||||||
return &newString
|
return &newString
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -678,3 +589,10 @@ func getFileMD5(filePath string) (*string, error) {
|
|||||||
|
|
||||||
return &fileHash, nil
|
return &fileHash, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// koJSON forces koJSON Content-Type to only return `application/json`. This is addressing
|
||||||
|
// the following issue: https://github.com/koreader/koreader/issues/13629
|
||||||
|
func koJSON(c *gin.Context, code int, obj any) {
|
||||||
|
c.Header("Content-Type", "application/json")
|
||||||
|
c.JSON(code, obj)
|
||||||
|
}
|
||||||
|
|||||||
@@ -8,8 +8,9 @@ import (
|
|||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"reichard.io/bbank/database"
|
"reichard.io/antholume/database"
|
||||||
"reichard.io/bbank/opds"
|
"reichard.io/antholume/opds"
|
||||||
|
"reichard.io/antholume/pkg/ptr"
|
||||||
)
|
)
|
||||||
|
|
||||||
var mimeMapping map[string]string = map[string]string{
|
var mimeMapping map[string]string = map[string]string{
|
||||||
@@ -61,13 +62,19 @@ func (api *API) opdsEntry(c *gin.Context) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) opdsDocuments(c *gin.Context) {
|
func (api *API) opdsDocuments(c *gin.Context) {
|
||||||
var userID string
|
auth, err := getAuthData(c)
|
||||||
if rUser, _ := c.Get("AuthorizedUser"); rUser != nil {
|
if err != nil {
|
||||||
userID = rUser.(string)
|
log.WithError(err).Error("failed to acquire auth data")
|
||||||
|
c.AbortWithStatus(http.StatusInternalServerError)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Potential URL Parameters (Default Pagination - 100)
|
// Potential URL Parameters (Default Pagination - 100)
|
||||||
qParams := bindQueryParams(c, 100)
|
qParams, err := bindQueryParams(c, 100)
|
||||||
|
if err != nil {
|
||||||
|
log.WithError(err).Error("failed to bind query params")
|
||||||
|
c.AbortWithStatus(http.StatusBadRequest)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
// Possible Query
|
// Possible Query
|
||||||
var query *string
|
var query *string
|
||||||
@@ -77,14 +84,15 @@ func (api *API) opdsDocuments(c *gin.Context) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get Documents
|
// Get Documents
|
||||||
documents, err := api.DB.Queries.GetDocumentsWithStats(api.DB.Ctx, database.GetDocumentsWithStatsParams{
|
documents, err := api.db.Queries.GetDocumentsWithStats(c, database.GetDocumentsWithStatsParams{
|
||||||
UserID: userID,
|
UserID: auth.UserName,
|
||||||
Query: query,
|
Query: query,
|
||||||
|
Deleted: ptr.Of(false),
|
||||||
Offset: (*qParams.Page - 1) * *qParams.Limit,
|
Offset: (*qParams.Page - 1) * *qParams.Limit,
|
||||||
Limit: *qParams.Limit,
|
Limit: *qParams.Limit,
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[opdsDocuments] GetDocumentsWithStats DB Error:", err)
|
log.WithError(err).Error("failed to get documents with stats")
|
||||||
c.AbortWithStatus(http.StatusBadRequest)
|
c.AbortWithStatus(http.StatusBadRequest)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|||||||
76
api/streamer.go
Normal file
76
api/streamer.go
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"html/template"
|
||||||
|
"net/http"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
)
|
||||||
|
|
||||||
|
type streamer struct {
|
||||||
|
templates map[string]*template.Template
|
||||||
|
writer gin.ResponseWriter
|
||||||
|
mutex sync.Mutex
|
||||||
|
completeCh chan struct{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (api *API) newStreamer(c *gin.Context, data string) *streamer {
|
||||||
|
stream := &streamer{
|
||||||
|
writer: c.Writer,
|
||||||
|
templates: api.templates,
|
||||||
|
completeCh: make(chan struct{}),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set Headers
|
||||||
|
header := stream.writer.Header()
|
||||||
|
header.Set("Transfer-Encoding", "chunked")
|
||||||
|
header.Set("Content-Type", "text/html; charset=utf-8")
|
||||||
|
header.Set("X-Content-Type-Options", "nosniff")
|
||||||
|
stream.writer.WriteHeader(http.StatusOK)
|
||||||
|
|
||||||
|
// Send Open Element Tags
|
||||||
|
stream.write(data)
|
||||||
|
|
||||||
|
// Keep Alive
|
||||||
|
go func() {
|
||||||
|
closeCh := stream.writer.CloseNotify()
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case <-stream.completeCh:
|
||||||
|
return
|
||||||
|
case <-closeCh:
|
||||||
|
return
|
||||||
|
default:
|
||||||
|
stream.write("<!-- ping -->")
|
||||||
|
time.Sleep(2 * time.Second)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
return stream
|
||||||
|
}
|
||||||
|
|
||||||
|
func (stream *streamer) write(str string) {
|
||||||
|
stream.mutex.Lock()
|
||||||
|
stream.writer.WriteString(str)
|
||||||
|
stream.writer.(http.Flusher).Flush()
|
||||||
|
stream.mutex.Unlock()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (stream *streamer) send(templateName string, templateVars gin.H) {
|
||||||
|
t := stream.templates[templateName]
|
||||||
|
buf := &bytes.Buffer{}
|
||||||
|
_ = t.ExecuteTemplate(buf, templateName, templateVars)
|
||||||
|
stream.write(buf.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (stream *streamer) close(data string) {
|
||||||
|
// Send Close Element Tags
|
||||||
|
stream.write(data)
|
||||||
|
|
||||||
|
// Close
|
||||||
|
close(stream.completeCh)
|
||||||
|
}
|
||||||
188
api/utils.go
188
api/utils.go
@@ -4,61 +4,69 @@ import (
|
|||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"math"
|
"math"
|
||||||
|
"path/filepath"
|
||||||
|
"reflect"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"reichard.io/bbank/database"
|
"github.com/gin-gonic/gin"
|
||||||
"reichard.io/bbank/graph"
|
"reichard.io/antholume/database"
|
||||||
|
"reichard.io/antholume/graph"
|
||||||
|
"reichard.io/antholume/metadata"
|
||||||
)
|
)
|
||||||
|
|
||||||
type UTCOffset struct {
|
func getAuthData(ctx *gin.Context) (*authData, error) {
|
||||||
Name string
|
if data, ok := ctx.Get("Authorization"); ok {
|
||||||
Value string
|
var auth *authData
|
||||||
|
if auth, ok = data.(*authData); ok {
|
||||||
|
return auth, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil, errors.New("could not acquire auth data")
|
||||||
}
|
}
|
||||||
|
|
||||||
var UTC_OFFSETS = []UTCOffset{
|
// getTimeZones returns a string slice of IANA timezones.
|
||||||
{Value: "-12 hours", Name: "UTC−12:00"},
|
func getTimeZones() []string {
|
||||||
{Value: "-11 hours", Name: "UTC−11:00"},
|
return []string{
|
||||||
{Value: "-10 hours", Name: "UTC−10:00"},
|
"Africa/Cairo",
|
||||||
{Value: "-9.5 hours", Name: "UTC−09:30"},
|
"Africa/Johannesburg",
|
||||||
{Value: "-9 hours", Name: "UTC−09:00"},
|
"Africa/Lagos",
|
||||||
{Value: "-8 hours", Name: "UTC−08:00"},
|
"Africa/Nairobi",
|
||||||
{Value: "-7 hours", Name: "UTC−07:00"},
|
"America/Adak",
|
||||||
{Value: "-6 hours", Name: "UTC−06:00"},
|
"America/Anchorage",
|
||||||
{Value: "-5 hours", Name: "UTC−05:00"},
|
"America/Buenos_Aires",
|
||||||
{Value: "-4 hours", Name: "UTC−04:00"},
|
"America/Chicago",
|
||||||
{Value: "-3.5 hours", Name: "UTC−03:30"},
|
"America/Denver",
|
||||||
{Value: "-3 hours", Name: "UTC−03:00"},
|
"America/Los_Angeles",
|
||||||
{Value: "-2 hours", Name: "UTC−02:00"},
|
"America/Mexico_City",
|
||||||
{Value: "-1 hours", Name: "UTC−01:00"},
|
"America/New_York",
|
||||||
{Value: "0 hours", Name: "UTC±00:00"},
|
"America/Nuuk",
|
||||||
{Value: "+1 hours", Name: "UTC+01:00"},
|
"America/Phoenix",
|
||||||
{Value: "+2 hours", Name: "UTC+02:00"},
|
"America/Puerto_Rico",
|
||||||
{Value: "+3 hours", Name: "UTC+03:00"},
|
"America/Sao_Paulo",
|
||||||
{Value: "+3.5 hours", Name: "UTC+03:30"},
|
"America/St_Johns",
|
||||||
{Value: "+4 hours", Name: "UTC+04:00"},
|
"America/Toronto",
|
||||||
{Value: "+4.5 hours", Name: "UTC+04:30"},
|
"Asia/Dubai",
|
||||||
{Value: "+5 hours", Name: "UTC+05:00"},
|
"Asia/Hong_Kong",
|
||||||
{Value: "+5.5 hours", Name: "UTC+05:30"},
|
"Asia/Kolkata",
|
||||||
{Value: "+5.75 hours", Name: "UTC+05:45"},
|
"Asia/Seoul",
|
||||||
{Value: "+6 hours", Name: "UTC+06:00"},
|
"Asia/Shanghai",
|
||||||
{Value: "+6.5 hours", Name: "UTC+06:30"},
|
"Asia/Singapore",
|
||||||
{Value: "+7 hours", Name: "UTC+07:00"},
|
"Asia/Tokyo",
|
||||||
{Value: "+8 hours", Name: "UTC+08:00"},
|
"Atlantic/Azores",
|
||||||
{Value: "+8.75 hours", Name: "UTC+08:45"},
|
"Australia/Melbourne",
|
||||||
{Value: "+9 hours", Name: "UTC+09:00"},
|
"Australia/Sydney",
|
||||||
{Value: "+9.5 hours", Name: "UTC+09:30"},
|
"Europe/Berlin",
|
||||||
{Value: "+10 hours", Name: "UTC+10:00"},
|
"Europe/London",
|
||||||
{Value: "+10.5 hours", Name: "UTC+10:30"},
|
"Europe/Moscow",
|
||||||
{Value: "+11 hours", Name: "UTC+11:00"},
|
"Europe/Paris",
|
||||||
{Value: "+12 hours", Name: "UTC+12:00"},
|
"Pacific/Auckland",
|
||||||
{Value: "+12.75 hours", Name: "UTC+12:45"},
|
"Pacific/Honolulu",
|
||||||
{Value: "+13 hours", Name: "UTC+13:00"},
|
}
|
||||||
{Value: "+14 hours", Name: "UTC+14:00"},
|
|
||||||
}
|
|
||||||
|
|
||||||
func getUTCOffsets() []UTCOffset {
|
|
||||||
return UTC_OFFSETS
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// niceSeconds takes in an int (in seconds) and returns a string readable
|
||||||
|
// representation. For example 1928371 -> "22d 7h 39m 31s".
|
||||||
|
// Deprecated: Use formatters.FormatDuration
|
||||||
func niceSeconds(input int64) (result string) {
|
func niceSeconds(input int64) (result string) {
|
||||||
if input == 0 {
|
if input == 0 {
|
||||||
return "N/A"
|
return "N/A"
|
||||||
@@ -87,7 +95,29 @@ func niceSeconds(input int64) (result string) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Convert Database Array -> Int64 Array
|
// niceNumbers takes in an int and returns a string representation. For example
|
||||||
|
// 19823 -> "19.8k".
|
||||||
|
// Deprecated: Use formatters.FormatNumber
|
||||||
|
func niceNumbers(input int64) string {
|
||||||
|
if input == 0 {
|
||||||
|
return "0"
|
||||||
|
}
|
||||||
|
|
||||||
|
abbreviations := []string{"", "k", "M", "B", "T"}
|
||||||
|
abbrevIndex := int(math.Log10(float64(input)) / 3)
|
||||||
|
scaledNumber := float64(input) / math.Pow(10, float64(abbrevIndex*3))
|
||||||
|
|
||||||
|
if scaledNumber >= 100 {
|
||||||
|
return fmt.Sprintf("%.0f%s", scaledNumber, abbreviations[abbrevIndex])
|
||||||
|
} else if scaledNumber >= 10 {
|
||||||
|
return fmt.Sprintf("%.1f%s", scaledNumber, abbreviations[abbrevIndex])
|
||||||
|
} else {
|
||||||
|
return fmt.Sprintf("%.2f%s", scaledNumber, abbreviations[abbrevIndex])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// getSVGGraphData builds SVGGraphData from the provided stats, width and height.
|
||||||
|
// It is used exclusively in templates to generate the daily read stats graph.
|
||||||
func getSVGGraphData(inputData []database.GetDailyReadStatsRow, svgWidth int, svgHeight int) graph.SVGGraphData {
|
func getSVGGraphData(inputData []database.GetDailyReadStatsRow, svgWidth int, svgHeight int) graph.SVGGraphData {
|
||||||
var intData []int64
|
var intData []int64
|
||||||
for _, item := range inputData {
|
for _, item := range inputData {
|
||||||
@@ -97,11 +127,13 @@ func getSVGGraphData(inputData []database.GetDailyReadStatsRow, svgWidth int, sv
|
|||||||
return graph.GetSVGGraphData(intData, svgWidth, svgHeight)
|
return graph.GetSVGGraphData(intData, svgWidth, svgHeight)
|
||||||
}
|
}
|
||||||
|
|
||||||
func dict(values ...interface{}) (map[string]interface{}, error) {
|
// dict returns a map[string]any dict. Each pair of two is a key & value
|
||||||
|
// respectively. It's primarily utilized in templates.
|
||||||
|
func dict(values ...any) (map[string]any, error) {
|
||||||
if len(values)%2 != 0 {
|
if len(values)%2 != 0 {
|
||||||
return nil, errors.New("invalid dict call")
|
return nil, errors.New("invalid dict call")
|
||||||
}
|
}
|
||||||
dict := make(map[string]interface{}, len(values)/2)
|
dict := make(map[string]any, len(values)/2)
|
||||||
for i := 0; i < len(values); i += 2 {
|
for i := 0; i < len(values); i += 2 {
|
||||||
key, ok := values[i].(string)
|
key, ok := values[i].(string)
|
||||||
if !ok {
|
if !ok {
|
||||||
@@ -111,3 +143,57 @@ func dict(values ...interface{}) (map[string]interface{}, error) {
|
|||||||
}
|
}
|
||||||
return dict, nil
|
return dict, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// fields returns a map[string]any of the provided struct. It's primarily
|
||||||
|
// utilized in templates.
|
||||||
|
func fields(value any) (map[string]any, error) {
|
||||||
|
v := reflect.Indirect(reflect.ValueOf(value))
|
||||||
|
if v.Kind() != reflect.Struct {
|
||||||
|
return nil, fmt.Errorf("%T is not a struct", value)
|
||||||
|
}
|
||||||
|
m := make(map[string]any)
|
||||||
|
t := v.Type()
|
||||||
|
for i := 0; i < t.NumField(); i++ {
|
||||||
|
sv := t.Field(i)
|
||||||
|
m[sv.Name] = v.Field(i).Interface()
|
||||||
|
}
|
||||||
|
return m, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// slice returns a slice of the provided arguments. It's primarily utilized in
|
||||||
|
// templates.
|
||||||
|
func slice(elements ...any) []any {
|
||||||
|
return elements
|
||||||
|
}
|
||||||
|
|
||||||
|
// deriveBaseFileName builds the base filename for a given MetadataInfo object.
|
||||||
|
func deriveBaseFileName(metadataInfo *metadata.MetadataInfo) string {
|
||||||
|
// Derive New FileName
|
||||||
|
var newFileName string
|
||||||
|
if *metadataInfo.Author != "" {
|
||||||
|
newFileName = newFileName + *metadataInfo.Author
|
||||||
|
} else {
|
||||||
|
newFileName = newFileName + "Unknown"
|
||||||
|
}
|
||||||
|
if *metadataInfo.Title != "" {
|
||||||
|
newFileName = newFileName + " - " + *metadataInfo.Title
|
||||||
|
} else {
|
||||||
|
newFileName = newFileName + " - Unknown"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove Slashes
|
||||||
|
fileName := strings.ReplaceAll(newFileName, "/", "")
|
||||||
|
return "." + filepath.Clean(fmt.Sprintf("/%s [%s]%s", fileName, *metadataInfo.PartialMD5, metadataInfo.Type))
|
||||||
|
}
|
||||||
|
|
||||||
|
// importStatusPriority returns the order priority for import status in the UI.
|
||||||
|
func importStatusPriority(status importStatus) int {
|
||||||
|
switch status {
|
||||||
|
case importFailed:
|
||||||
|
return 1
|
||||||
|
case importExists:
|
||||||
|
return 2
|
||||||
|
default:
|
||||||
|
return 3
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,12 +1,35 @@
|
|||||||
package api
|
package api
|
||||||
|
|
||||||
import "testing"
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
func TestNiceSeconds(t *testing.T) {
|
func TestNiceSeconds(t *testing.T) {
|
||||||
want := "22d 7h 39m 31s"
|
wantOne := "22d 7h 39m 31s"
|
||||||
nice := niceSeconds(1928371)
|
wantNA := "N/A"
|
||||||
|
|
||||||
if nice != want {
|
niceOne := niceSeconds(1928371)
|
||||||
t.Fatalf(`Expected: %v, Got: %v`, want, nice)
|
niceNA := niceSeconds(0)
|
||||||
|
|
||||||
|
assert.Equal(t, wantOne, niceOne, "should be nice seconds")
|
||||||
|
assert.Equal(t, wantNA, niceNA, "should be nice NA")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestNiceNumbers(t *testing.T) {
|
||||||
|
wantMillions := "198M"
|
||||||
|
wantThousands := "19.8k"
|
||||||
|
wantThousandsTwo := "1.98k"
|
||||||
|
wantZero := "0"
|
||||||
|
|
||||||
|
niceMillions := niceNumbers(198236461)
|
||||||
|
niceThousands := niceNumbers(19823)
|
||||||
|
niceThousandsTwo := niceNumbers(1984)
|
||||||
|
niceZero := niceNumbers(0)
|
||||||
|
|
||||||
|
assert.Equal(t, wantMillions, niceMillions, "should be nice millions")
|
||||||
|
assert.Equal(t, wantThousands, niceThousands, "should be nice thousands")
|
||||||
|
assert.Equal(t, wantThousandsTwo, niceThousandsTwo, "should be nice thousands")
|
||||||
|
assert.Equal(t, wantZero, niceZero, "should be nice zero")
|
||||||
}
|
}
|
||||||
|
|||||||
116
assets/index.css
Normal file
116
assets/index.css
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
/* ----------------------------- */
|
||||||
|
/* -------- PWA Styling -------- */
|
||||||
|
/* ----------------------------- */
|
||||||
|
html,
|
||||||
|
body {
|
||||||
|
overscroll-behavior-y: none;
|
||||||
|
margin: 0px;
|
||||||
|
}
|
||||||
|
|
||||||
|
html {
|
||||||
|
height: calc(100% + env(safe-area-inset-bottom));
|
||||||
|
padding: env(safe-area-inset-top) env(safe-area-inset-right) 0
|
||||||
|
env(safe-area-inset-left);
|
||||||
|
}
|
||||||
|
|
||||||
|
main {
|
||||||
|
height: calc(100dvh - 4rem - env(safe-area-inset-top));
|
||||||
|
}
|
||||||
|
|
||||||
|
#container {
|
||||||
|
padding-bottom: calc(5em + env(safe-area-inset-bottom) * 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* No Scrollbar - IE, Edge, Firefox */
|
||||||
|
* {
|
||||||
|
-ms-overflow-style: none;
|
||||||
|
scrollbar-width: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* No Scrollbar - WebKit */
|
||||||
|
*::-webkit-scrollbar {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ----------------------------- */
|
||||||
|
/* -------- CSS Button -------- */
|
||||||
|
/* ----------------------------- */
|
||||||
|
.css-button:checked + div {
|
||||||
|
visibility: visible;
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.css-button + div {
|
||||||
|
visibility: hidden;
|
||||||
|
opacity: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ----------------------------- */
|
||||||
|
/* ------- User Dropdown ------- */
|
||||||
|
/* ----------------------------- */
|
||||||
|
#user-dropdown-button:checked + #user-dropdown {
|
||||||
|
visibility: visible;
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
#user-dropdown {
|
||||||
|
visibility: hidden;
|
||||||
|
opacity: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ----------------------------- */
|
||||||
|
/* ----- Mobile Navigation ----- */
|
||||||
|
/* ----------------------------- */
|
||||||
|
#mobile-nav-button span {
|
||||||
|
transform-origin: 5px 0px;
|
||||||
|
transition:
|
||||||
|
transform 0.5s cubic-bezier(0.77, 0.2, 0.05, 1),
|
||||||
|
background 0.5s cubic-bezier(0.77, 0.2, 0.05, 1),
|
||||||
|
opacity 0.55s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
#mobile-nav-button span:first-child {
|
||||||
|
transform-origin: 0% 0%;
|
||||||
|
}
|
||||||
|
|
||||||
|
#mobile-nav-button span:nth-last-child(2) {
|
||||||
|
transform-origin: 0% 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
#mobile-nav-button input:checked ~ span {
|
||||||
|
opacity: 1;
|
||||||
|
transform: rotate(45deg) translate(2px, -2px);
|
||||||
|
}
|
||||||
|
|
||||||
|
#mobile-nav-button input:checked ~ span:nth-last-child(3) {
|
||||||
|
opacity: 0;
|
||||||
|
transform: rotate(0deg) scale(0.2, 0.2);
|
||||||
|
}
|
||||||
|
|
||||||
|
#mobile-nav-button input:checked ~ span:nth-last-child(2) {
|
||||||
|
transform: rotate(-45deg) translate(0, 6px);
|
||||||
|
}
|
||||||
|
|
||||||
|
#mobile-nav-button input:checked ~ div {
|
||||||
|
transform: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
@media (min-width: 1024px) {
|
||||||
|
#mobile-nav-button input ~ div {
|
||||||
|
transform: none;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#menu {
|
||||||
|
top: 0;
|
||||||
|
padding-top: env(safe-area-inset-top);
|
||||||
|
transform-origin: 0% 0%;
|
||||||
|
transform: translate(-100%, 0);
|
||||||
|
transition: transform 0.5s cubic-bezier(0.77, 0.2, 0.05, 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
@media (orientation: landscape) {
|
||||||
|
#menu {
|
||||||
|
transform: translate(calc(-1 * (env(safe-area-inset-left) + 100%)), 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
2
assets/lib/epub.min.js
vendored
2
assets/lib/epub.min.js
vendored
File diff suppressed because one or more lines are too long
@@ -25,7 +25,7 @@
|
|||||||
<title>AnthoLume - Local</title>
|
<title>AnthoLume - Local</title>
|
||||||
|
|
||||||
<link rel="manifest" href="/manifest.json" />
|
<link rel="manifest" href="/manifest.json" />
|
||||||
<link rel="stylesheet" href="/assets/style.css" />
|
<link rel="stylesheet" href="/assets/tailwind.css" />
|
||||||
|
|
||||||
<!-- Libraries -->
|
<!-- Libraries -->
|
||||||
<script src="/assets/lib/jszip.min.js"></script>
|
<script src="/assets/lib/jszip.min.js"></script>
|
||||||
|
|||||||
119
assets/reader/fonts.css
Normal file
119
assets/reader/fonts.css
Normal file
@@ -0,0 +1,119 @@
|
|||||||
|
/**
|
||||||
|
* Lato
|
||||||
|
* - Charsets: [latin,latin-ext]
|
||||||
|
* - Styles: [100,700,100italic,regular,italic,700italic]
|
||||||
|
**/
|
||||||
|
|
||||||
|
/* lato-100 - latin_latin-ext */
|
||||||
|
@font-face {
|
||||||
|
font-display: swap; /* Check https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/font-display for other options. */
|
||||||
|
font-family: "Lato";
|
||||||
|
font-style: normal;
|
||||||
|
font-weight: 100;
|
||||||
|
src: url("./fonts/lato-v24-latin_latin-ext-100.woff2") format("woff2"); /* Chrome 36+, Opera 23+, Firefox 39+, Safari 12+, iOS 10+ */
|
||||||
|
}
|
||||||
|
|
||||||
|
/* lato-100italic - latin_latin-ext */
|
||||||
|
@font-face {
|
||||||
|
font-display: swap; /* Check https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/font-display for other options. */
|
||||||
|
font-family: "Lato";
|
||||||
|
font-style: italic;
|
||||||
|
font-weight: 100;
|
||||||
|
src: url("./fonts/lato-v24-latin_latin-ext-100italic.woff2") format("woff2"); /* Chrome 36+, Opera 23+, Firefox 39+, Safari 12+, iOS 10+ */
|
||||||
|
}
|
||||||
|
|
||||||
|
/* lato-regular - latin_latin-ext */
|
||||||
|
@font-face {
|
||||||
|
font-display: swap; /* Check https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/font-display for other options. */
|
||||||
|
font-family: "Lato";
|
||||||
|
font-style: normal;
|
||||||
|
font-weight: 400;
|
||||||
|
src: url("./fonts/lato-v24-latin_latin-ext-regular.woff2") format("woff2"); /* Chrome 36+, Opera 23+, Firefox 39+, Safari 12+, iOS 10+ */
|
||||||
|
}
|
||||||
|
|
||||||
|
/* lato-italic - latin_latin-ext */
|
||||||
|
@font-face {
|
||||||
|
font-display: swap; /* Check https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/font-display for other options. */
|
||||||
|
font-family: "Lato";
|
||||||
|
font-style: italic;
|
||||||
|
font-weight: 400;
|
||||||
|
src: url("./fonts/lato-v24-latin_latin-ext-italic.woff2") format("woff2"); /* Chrome 36+, Opera 23+, Firefox 39+, Safari 12+, iOS 10+ */
|
||||||
|
}
|
||||||
|
|
||||||
|
/* lato-700 - latin_latin-ext */
|
||||||
|
@font-face {
|
||||||
|
font-display: swap; /* Check https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/font-display for other options. */
|
||||||
|
font-family: "Lato";
|
||||||
|
font-style: normal;
|
||||||
|
font-weight: 700;
|
||||||
|
src: url("./fonts/lato-v24-latin_latin-ext-700.woff2") format("woff2"); /* Chrome 36+, Opera 23+, Firefox 39+, Safari 12+, iOS 10+ */
|
||||||
|
}
|
||||||
|
|
||||||
|
/* lato-700italic - latin_latin-ext */
|
||||||
|
@font-face {
|
||||||
|
font-display: swap; /* Check https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/font-display for other options. */
|
||||||
|
font-family: "Lato";
|
||||||
|
font-style: italic;
|
||||||
|
font-weight: 700;
|
||||||
|
src: url("./fonts/lato-v24-latin_latin-ext-700italic.woff2") format("woff2"); /* Chrome 36+, Opera 23+, Firefox 39+, Safari 12+, iOS 10+ */
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Open Sans
|
||||||
|
* - Charsets: [latin,latin-ext]
|
||||||
|
* - Styles: [700,regular,italic,700italic]
|
||||||
|
**/
|
||||||
|
|
||||||
|
/* open-sans-regular - latin_latin-ext */
|
||||||
|
@font-face {
|
||||||
|
font-display: swap; /* Check https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/font-display for other options. */
|
||||||
|
font-family: "Open Sans";
|
||||||
|
font-style: normal;
|
||||||
|
font-weight: 400;
|
||||||
|
src: url("./fonts/open-sans-v36-latin_latin-ext-regular.woff2")
|
||||||
|
format("woff2"); /* Chrome 36+, Opera 23+, Firefox 39+, Safari 12+, iOS 10+ */
|
||||||
|
}
|
||||||
|
|
||||||
|
/* open-sans-italic - latin_latin-ext */
|
||||||
|
@font-face {
|
||||||
|
font-display: swap; /* Check https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/font-display for other options. */
|
||||||
|
font-family: "Open Sans";
|
||||||
|
font-style: italic;
|
||||||
|
font-weight: 400;
|
||||||
|
src: url("./fonts/open-sans-v36-latin_latin-ext-italic.woff2") format("woff2"); /* Chrome 36+, Opera 23+, Firefox 39+, Safari 12+, iOS 10+ */
|
||||||
|
}
|
||||||
|
|
||||||
|
/* open-sans-700 - latin_latin-ext */
|
||||||
|
@font-face {
|
||||||
|
font-display: swap; /* Check https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/font-display for other options. */
|
||||||
|
font-family: "Open Sans";
|
||||||
|
font-style: normal;
|
||||||
|
font-weight: 700;
|
||||||
|
src: url("./fonts/open-sans-v36-latin_latin-ext-700.woff2") format("woff2"); /* Chrome 36+, Opera 23+, Firefox 39+, Safari 12+, iOS 10+ */
|
||||||
|
}
|
||||||
|
|
||||||
|
/* open-sans-700italic - latin_latin-ext */
|
||||||
|
@font-face {
|
||||||
|
font-display: swap; /* Check https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/font-display for other options. */
|
||||||
|
font-family: "Open Sans";
|
||||||
|
font-style: italic;
|
||||||
|
font-weight: 700;
|
||||||
|
src: url("./fonts/open-sans-v36-latin_latin-ext-700italic.woff2")
|
||||||
|
format("woff2"); /* Chrome 36+, Opera 23+, Firefox 39+, Safari 12+, iOS 10+ */
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Arbutus Slab
|
||||||
|
* - Charsets: [latin,latin-ext]
|
||||||
|
* - Styles: [regular]
|
||||||
|
**/
|
||||||
|
|
||||||
|
/* arbutus-slab-regular - latin_latin-ext */
|
||||||
|
@font-face {
|
||||||
|
font-display: swap; /* Check https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/font-display for other options. */
|
||||||
|
font-family: "Arbutus Slab";
|
||||||
|
font-style: normal;
|
||||||
|
font-weight: 400;
|
||||||
|
src: url("./fonts/arbutus-slab-v16-latin_latin-ext-regular.woff2")
|
||||||
|
format("woff2"); /* Chrome 36+, Opera 23+, Firefox 39+, Safari 12+, iOS 10+ */
|
||||||
|
}
|
||||||
Binary file not shown.
BIN
assets/reader/fonts/lato-v24-latin_latin-ext-100.woff2
Normal file
BIN
assets/reader/fonts/lato-v24-latin_latin-ext-100.woff2
Normal file
Binary file not shown.
BIN
assets/reader/fonts/lato-v24-latin_latin-ext-100italic.woff2
Normal file
BIN
assets/reader/fonts/lato-v24-latin_latin-ext-100italic.woff2
Normal file
Binary file not shown.
BIN
assets/reader/fonts/lato-v24-latin_latin-ext-700.woff2
Normal file
BIN
assets/reader/fonts/lato-v24-latin_latin-ext-700.woff2
Normal file
Binary file not shown.
BIN
assets/reader/fonts/lato-v24-latin_latin-ext-700italic.woff2
Normal file
BIN
assets/reader/fonts/lato-v24-latin_latin-ext-700italic.woff2
Normal file
Binary file not shown.
BIN
assets/reader/fonts/lato-v24-latin_latin-ext-italic.woff2
Normal file
BIN
assets/reader/fonts/lato-v24-latin_latin-ext-italic.woff2
Normal file
Binary file not shown.
BIN
assets/reader/fonts/lato-v24-latin_latin-ext-regular.woff2
Normal file
BIN
assets/reader/fonts/lato-v24-latin_latin-ext-regular.woff2
Normal file
Binary file not shown.
BIN
assets/reader/fonts/open-sans-v36-latin_latin-ext-700.woff2
Normal file
BIN
assets/reader/fonts/open-sans-v36-latin_latin-ext-700.woff2
Normal file
Binary file not shown.
Binary file not shown.
BIN
assets/reader/fonts/open-sans-v36-latin_latin-ext-italic.woff2
Normal file
BIN
assets/reader/fonts/open-sans-v36-latin_latin-ext-italic.woff2
Normal file
Binary file not shown.
BIN
assets/reader/fonts/open-sans-v36-latin_latin-ext-regular.woff2
Normal file
BIN
assets/reader/fonts/open-sans-v36-latin_latin-ext-regular.woff2
Normal file
Binary file not shown.
@@ -1,4 +1,4 @@
|
|||||||
<!DOCTYPE html>
|
<!doctype html>
|
||||||
<html lang="en">
|
<html lang="en">
|
||||||
<head>
|
<head>
|
||||||
<meta charset="utf-8" />
|
<meta charset="utf-8" />
|
||||||
@@ -17,7 +17,7 @@
|
|||||||
<title>AnthoLume - Reader</title>
|
<title>AnthoLume - Reader</title>
|
||||||
|
|
||||||
<link rel="manifest" href="/manifest.json" />
|
<link rel="manifest" href="/manifest.json" />
|
||||||
<link rel="stylesheet" href="/assets/style.css" />
|
<link rel="stylesheet" href="/assets/tailwind.css" />
|
||||||
|
|
||||||
<!-- Libraries -->
|
<!-- Libraries -->
|
||||||
<script src="/assets/lib/jszip.min.js"></script>
|
<script src="/assets/lib/jszip.min.js"></script>
|
||||||
@@ -82,8 +82,13 @@
|
|||||||
id="top-bar"
|
id="top-bar"
|
||||||
class="transition-all duration-200 absolute z-10 bg-gray-100 dark:bg-gray-800 w-full px-2"
|
class="transition-all duration-200 absolute z-10 bg-gray-100 dark:bg-gray-800 w-full px-2"
|
||||||
>
|
>
|
||||||
<div class="w-full h-32 flex items-center justify-around relative">
|
<div
|
||||||
<div class="text-gray-500 absolute top-6 left-4 flex flex-col gap-4">
|
class="max-h-[75vh] w-full flex flex-col items-center justify-around relative dark:text-white"
|
||||||
|
>
|
||||||
|
<div class="h-32">
|
||||||
|
<div
|
||||||
|
class="text-gray-500 absolute top-6 left-4 flex flex-col gap-4"
|
||||||
|
>
|
||||||
<a href="#">
|
<a href="#">
|
||||||
<svg
|
<svg
|
||||||
width="32"
|
width="32"
|
||||||
@@ -152,6 +157,11 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
<div
|
||||||
|
id="toc"
|
||||||
|
class="w-full text-center max-h-[50%] overflow-scroll no-scrollbar"
|
||||||
|
></div>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div
|
<div
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
const THEMES = ["light", "tan", "blue", "gray", "black"];
|
const THEMES = ["light", "tan", "blue", "gray", "black"];
|
||||||
const THEME_FILE = "/assets/reader/readerThemes.css";
|
const THEME_FILE = "/assets/reader/themes.css";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Initial load handler. Gets called on DOMContentLoaded. Responsible for
|
* Initial load handler. Gets called on DOMContentLoaded. Responsible for
|
||||||
@@ -66,6 +66,56 @@ function populateMetadata(data) {
|
|||||||
authorEl.innerText = data.author;
|
authorEl.innerText = data.author;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Populate the Table of Contents
|
||||||
|
**/
|
||||||
|
function populateTOC() {
|
||||||
|
if (!currentReader.book.navigation.toc) {
|
||||||
|
console.warn("[populateTOC] No TOC");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let tocEl = document.querySelector("#toc");
|
||||||
|
if (!tocEl) {
|
||||||
|
console.warn("[populateTOC] No TOC Element");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse the Table of Contents
|
||||||
|
let parsedTOC = currentReader.book.navigation.toc.reduce((agg, item) => {
|
||||||
|
let sectionTitle = item.label.trim();
|
||||||
|
agg.push({ title: sectionTitle, href: item.href });
|
||||||
|
if (item.subitems.length == 0) {
|
||||||
|
return agg;
|
||||||
|
}
|
||||||
|
|
||||||
|
let allSubSections = item.subitems.map(item => {
|
||||||
|
let itemTitle = item.label.trim();
|
||||||
|
if (sectionTitle != "") {
|
||||||
|
itemTitle = sectionTitle + " - " + item.label.trim();
|
||||||
|
}
|
||||||
|
return { title: itemTitle, href: item.href };
|
||||||
|
});
|
||||||
|
agg.push(...allSubSections);
|
||||||
|
|
||||||
|
return agg;
|
||||||
|
}, [])
|
||||||
|
|
||||||
|
// Add Table of Contents to DOM
|
||||||
|
let listEl = document.createElement("ul");
|
||||||
|
listEl.classList.add("m-4")
|
||||||
|
parsedTOC.forEach(item => {
|
||||||
|
let listItem = document.createElement("li");
|
||||||
|
listItem.style.cursor = "pointer";
|
||||||
|
listItem.addEventListener("click", () => {
|
||||||
|
currentReader.rendition.display(item.href);
|
||||||
|
});
|
||||||
|
listItem.textContent = item.title;
|
||||||
|
listEl.appendChild(listItem);
|
||||||
|
});
|
||||||
|
tocEl.appendChild(listEl);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This is the main reader class. All functionality is wrapped in this class.
|
* This is the main reader class. All functionality is wrapped in this class.
|
||||||
* Responsible for handling gesture / clicks, flushing progress & activity,
|
* Responsible for handling gesture / clicks, flushing progress & activity,
|
||||||
@@ -97,16 +147,18 @@ class EBookReader {
|
|||||||
flow: "paginated",
|
flow: "paginated",
|
||||||
width: "100%",
|
width: "100%",
|
||||||
height: "100%",
|
height: "100%",
|
||||||
|
allowScriptedContent: true,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Setup Reader
|
// Setup Reader
|
||||||
this.book.ready.then(this.setupReader.bind(this));
|
this.book.ready.then(this.setupReader.bind(this));
|
||||||
|
|
||||||
// Initialize
|
// Initialize
|
||||||
|
this.initCSP();
|
||||||
this.initDevice();
|
this.initDevice();
|
||||||
this.initWakeLock();
|
this.initWakeLock();
|
||||||
this.initThemes();
|
this.initThemes();
|
||||||
this.initRenditionListeners();
|
this.initViewerListeners();
|
||||||
this.initDocumentListeners();
|
this.initDocumentListeners();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -141,7 +193,7 @@ class EBookReader {
|
|||||||
return "00000000000000000000000000000000".replace(/[018]/g, (c) =>
|
return "00000000000000000000000000000000".replace(/[018]/g, (c) =>
|
||||||
(c ^ (crypto.getRandomValues(new Uint8Array(1))[0] & (15 >> (c / 4))))
|
(c ^ (crypto.getRandomValues(new Uint8Array(1))[0] & (15 >> (c / 4))))
|
||||||
.toString(16)
|
.toString(16)
|
||||||
.toUpperCase()
|
.toUpperCase(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -244,7 +296,7 @@ class EBookReader {
|
|||||||
initThemes() {
|
initThemes() {
|
||||||
// Register Themes
|
// Register Themes
|
||||||
THEMES.forEach((theme) =>
|
THEMES.forEach((theme) =>
|
||||||
this.rendition.themes.register(theme, THEME_FILE)
|
this.rendition.themes.register(theme, THEME_FILE),
|
||||||
);
|
);
|
||||||
|
|
||||||
let themeLinkEl = document.createElement("link");
|
let themeLinkEl = document.createElement("link");
|
||||||
@@ -267,25 +319,48 @@ class EBookReader {
|
|||||||
// Restore Theme
|
// Restore Theme
|
||||||
this.setTheme();
|
this.setTheme();
|
||||||
|
|
||||||
// Set Fonts - TODO: Local
|
// Set Fonts
|
||||||
// https://gwfh.mranftl.com/fonts
|
|
||||||
this.rendition.getContents().forEach((c) => {
|
this.rendition.getContents().forEach((c) => {
|
||||||
[
|
|
||||||
"https://fonts.googleapis.com/css?family=Arbutus+Slab",
|
|
||||||
"https://fonts.googleapis.com/css?family=Open+Sans",
|
|
||||||
"https://fonts.googleapis.com/css?family=Lato:400,400i,700,700i",
|
|
||||||
].forEach((url) => {
|
|
||||||
let el = c.document.head.appendChild(
|
let el = c.document.head.appendChild(
|
||||||
c.document.createElement("link")
|
c.document.createElement("link"),
|
||||||
);
|
);
|
||||||
el.setAttribute("rel", "stylesheet");
|
el.setAttribute("rel", "stylesheet");
|
||||||
el.setAttribute("href", url);
|
el.setAttribute("href", "/assets/reader/fonts.css");
|
||||||
});
|
});
|
||||||
});
|
}.bind(this),
|
||||||
}.bind(this)
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* EpubJS will set iframe sandbox when settings "allowScriptedContent: false".
|
||||||
|
* However, Safari completely blocks us from attaching listeners to the iframe
|
||||||
|
* document. So instead we just inject a restrictive CSP rule.
|
||||||
|
*
|
||||||
|
* This effectively blocks all script content within the iframe while still
|
||||||
|
* allowing us to attach listeners to the iframe document.
|
||||||
|
**/
|
||||||
|
initCSP() {
|
||||||
|
// Derive CSP Host
|
||||||
|
var protocol = document.location.protocol;
|
||||||
|
var host = document.location.host;
|
||||||
|
var cspURL = `${protocol}//${host}`;
|
||||||
|
|
||||||
|
// Add CSP Policy
|
||||||
|
this.book.spine.hooks.content.register((output, section) => {
|
||||||
|
let cspWrapper = document.createElement("div");
|
||||||
|
cspWrapper.innerHTML = `
|
||||||
|
<meta
|
||||||
|
http-equiv="Content-Security-Policy"
|
||||||
|
content="require-trusted-types-for 'script';
|
||||||
|
style-src 'self' blob: 'unsafe-inline' ${cspURL};
|
||||||
|
object-src 'none';
|
||||||
|
script-src 'none';"
|
||||||
|
>`;
|
||||||
|
let cspMeta = cspWrapper.children[0];
|
||||||
|
output.head.append(cspMeta);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set theme & meta theme color
|
* Set theme & meta theme color
|
||||||
**/
|
**/
|
||||||
@@ -311,7 +386,7 @@ class EBookReader {
|
|||||||
let themeColorEl = document.querySelector("[name='theme-color']");
|
let themeColorEl = document.querySelector("[name='theme-color']");
|
||||||
let themeStyleSheet = document.querySelector("#themes").sheet;
|
let themeStyleSheet = document.querySelector("#themes").sheet;
|
||||||
let themeStyleRule = Array.from(themeStyleSheet.cssRules).find(
|
let themeStyleRule = Array.from(themeStyleSheet.cssRules).find(
|
||||||
(item) => item.selectorText == "." + colorScheme
|
(item) => item.selectorText == "." + colorScheme,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Match Reader Theme
|
// Match Reader Theme
|
||||||
@@ -325,13 +400,13 @@ class EBookReader {
|
|||||||
// Set Font Family
|
// Set Font Family
|
||||||
item.document.documentElement.style.setProperty(
|
item.document.documentElement.style.setProperty(
|
||||||
"--editor-font-family",
|
"--editor-font-family",
|
||||||
fontFamily
|
fontFamily,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Set Font Size
|
// Set Font Size
|
||||||
item.document.documentElement.style.setProperty(
|
item.document.documentElement.style.setProperty(
|
||||||
"--editor-font-size",
|
"--editor-font-size",
|
||||||
fontSize + "em"
|
fontSize + "em",
|
||||||
);
|
);
|
||||||
|
|
||||||
// Set Highlight Style
|
// Set Highlight Style
|
||||||
@@ -364,7 +439,7 @@ class EBookReader {
|
|||||||
|
|
||||||
// Compute Style
|
// Compute Style
|
||||||
let backgroundColor = getComputedStyle(
|
let backgroundColor = getComputedStyle(
|
||||||
this.bookState.progressElement.ownerDocument.body
|
this.bookState.progressElement.ownerDocument.body,
|
||||||
).backgroundColor;
|
).backgroundColor;
|
||||||
|
|
||||||
// Set Style
|
// Set Style
|
||||||
@@ -378,9 +453,9 @@ class EBookReader {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Rendition hooks
|
* Viewer Listeners
|
||||||
**/
|
**/
|
||||||
initRenditionListeners() {
|
initViewerListeners() {
|
||||||
/**
|
/**
|
||||||
* Initiate the debounce when the given function returns true.
|
* Initiate the debounce when the given function returns true.
|
||||||
* Don't run it again until the timeout lapses.
|
* Don't run it again until the timeout lapses.
|
||||||
@@ -408,56 +483,18 @@ class EBookReader {
|
|||||||
let bottomBar = document.querySelector("#bottom-bar");
|
let bottomBar = document.querySelector("#bottom-bar");
|
||||||
|
|
||||||
// Local Functions
|
// Local Functions
|
||||||
let getCFIFromXPath = this.getCFIFromXPath.bind(this);
|
|
||||||
let setPosition = this.setPosition.bind(this);
|
|
||||||
let nextPage = this.nextPage.bind(this);
|
let nextPage = this.nextPage.bind(this);
|
||||||
let prevPage = this.prevPage.bind(this);
|
let prevPage = this.prevPage.bind(this);
|
||||||
let saveSettings = this.saveSettings.bind(this);
|
|
||||||
|
|
||||||
// Local Vars
|
|
||||||
let readerSettings = this.readerSettings;
|
|
||||||
let bookState = this.bookState;
|
|
||||||
|
|
||||||
this.rendition.hooks.render.register(function (doc, data) {
|
|
||||||
let renderDoc = doc.document;
|
|
||||||
|
|
||||||
// ------------------------------------------------ //
|
// ------------------------------------------------ //
|
||||||
// ---------------- Wake Lock Hack ---------------- //
|
// ----------------- Swipe Helpers ---------------- //
|
||||||
// ------------------------------------------------ //
|
|
||||||
let wakeLockListener = function () {
|
|
||||||
doc.window.parent.document.dispatchEvent(new CustomEvent("wakelock"));
|
|
||||||
};
|
|
||||||
renderDoc.addEventListener("click", wakeLockListener);
|
|
||||||
renderDoc.addEventListener("gesturechange", wakeLockListener);
|
|
||||||
renderDoc.addEventListener("touchstart", wakeLockListener);
|
|
||||||
|
|
||||||
// ------------------------------------------------ //
|
|
||||||
// --------------- Swipe Pagination --------------- //
|
|
||||||
// ------------------------------------------------ //
|
// ------------------------------------------------ //
|
||||||
|
let disablePagination = false;
|
||||||
let touchStartX,
|
let touchStartX,
|
||||||
touchStartY,
|
touchStartY,
|
||||||
touchEndX,
|
touchEndX,
|
||||||
touchEndY = undefined;
|
touchEndY = undefined;
|
||||||
|
|
||||||
renderDoc.addEventListener(
|
|
||||||
"touchstart",
|
|
||||||
function (event) {
|
|
||||||
touchStartX = event.changedTouches[0].screenX;
|
|
||||||
touchStartY = event.changedTouches[0].screenY;
|
|
||||||
},
|
|
||||||
false
|
|
||||||
);
|
|
||||||
|
|
||||||
renderDoc.addEventListener(
|
|
||||||
"touchend",
|
|
||||||
function (event) {
|
|
||||||
touchEndX = event.changedTouches[0].screenX;
|
|
||||||
touchEndY = event.changedTouches[0].screenY;
|
|
||||||
handleGesture(event);
|
|
||||||
},
|
|
||||||
false
|
|
||||||
);
|
|
||||||
|
|
||||||
function handleGesture(event) {
|
function handleGesture(event) {
|
||||||
let drasticity = 75;
|
let drasticity = 75;
|
||||||
|
|
||||||
@@ -473,18 +510,55 @@ class EBookReader {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Swipe Left
|
// Swipe Left
|
||||||
if (touchEndX + drasticity < touchStartX) {
|
if (!disablePagination && touchEndX + drasticity < touchStartX) {
|
||||||
nextPage();
|
nextPage();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Swipe Right
|
// Swipe Right
|
||||||
if (touchEndX - drasticity > touchStartX) {
|
if (!disablePagination && touchEndX - drasticity > touchStartX) {
|
||||||
prevPage();
|
prevPage();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function handleSwipeDown() {
|
||||||
|
if (bottomBar.classList.contains("bottom-0")) {
|
||||||
|
bottomBar.classList.remove("bottom-0");
|
||||||
|
disablePagination = false;
|
||||||
|
} else {
|
||||||
|
topBar.classList.add("top-0");
|
||||||
|
populateTOC()
|
||||||
|
disablePagination = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleSwipeUp() {
|
||||||
|
if (topBar.classList.contains("top-0")) {
|
||||||
|
topBar.classList.remove("top-0");
|
||||||
|
disablePagination = false;
|
||||||
|
|
||||||
|
const tocEl = document.querySelector("#toc");
|
||||||
|
if (tocEl) tocEl.innerHTML = "";
|
||||||
|
} else {
|
||||||
|
bottomBar.classList.add("bottom-0");
|
||||||
|
disablePagination = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.rendition.hooks.render.register(function (doc, data) {
|
||||||
|
let renderDoc = doc.document;
|
||||||
|
|
||||||
// ------------------------------------------------ //
|
// ------------------------------------------------ //
|
||||||
// --------------- Bottom & Top Bar --------------- //
|
// ---------------- Wake Lock Hack ---------------- //
|
||||||
|
// ------------------------------------------------ //
|
||||||
|
let wakeLockListener = function () {
|
||||||
|
renderDoc.dispatchEvent(new CustomEvent("wakelock"));
|
||||||
|
};
|
||||||
|
renderDoc.addEventListener("click", wakeLockListener);
|
||||||
|
renderDoc.addEventListener("gesturechange", wakeLockListener);
|
||||||
|
renderDoc.addEventListener("touchstart", wakeLockListener);
|
||||||
|
|
||||||
|
// ------------------------------------------------ //
|
||||||
|
// --------------- Bars & Page Turn --------------- //
|
||||||
// ------------------------------------------------ //
|
// ------------------------------------------------ //
|
||||||
renderDoc.addEventListener(
|
renderDoc.addEventListener(
|
||||||
"click",
|
"click",
|
||||||
@@ -513,13 +587,13 @@ class EBookReader {
|
|||||||
// Handle Event
|
// Handle Event
|
||||||
if (yCoord < top) handleSwipeDown();
|
if (yCoord < top) handleSwipeDown();
|
||||||
else if (yCoord > bottom) handleSwipeUp();
|
else if (yCoord > bottom) handleSwipeUp();
|
||||||
else if (xCoord < left) prevPage();
|
else if (!disablePagination && xCoord < left) prevPage();
|
||||||
else if (xCoord > right) nextPage();
|
else if (!disablePagination && xCoord > right) nextPage();
|
||||||
else {
|
else {
|
||||||
bottomBar.classList.remove("bottom-0");
|
bottomBar.classList.remove("bottom-0");
|
||||||
topBar.classList.remove("top-0");
|
topBar.classList.remove("top-0");
|
||||||
}
|
}
|
||||||
}.bind(this)
|
}.bind(this),
|
||||||
);
|
);
|
||||||
|
|
||||||
renderDoc.addEventListener(
|
renderDoc.addEventListener(
|
||||||
@@ -533,50 +607,30 @@ class EBookReader {
|
|||||||
handleSwipeDown();
|
handleSwipeDown();
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}, 400)
|
}, 400),
|
||||||
);
|
);
|
||||||
|
|
||||||
function handleSwipeDown() {
|
|
||||||
if (bottomBar.classList.contains("bottom-0"))
|
|
||||||
bottomBar.classList.remove("bottom-0");
|
|
||||||
else topBar.classList.add("top-0");
|
|
||||||
}
|
|
||||||
|
|
||||||
function handleSwipeUp() {
|
|
||||||
if (topBar.classList.contains("top-0"))
|
|
||||||
topBar.classList.remove("top-0");
|
|
||||||
else bottomBar.classList.add("bottom-0");
|
|
||||||
}
|
|
||||||
|
|
||||||
// ------------------------------------------------ //
|
// ------------------------------------------------ //
|
||||||
// -------------- Keyboard Shortcuts -------------- //
|
// ------------------- Gestures ------------------- //
|
||||||
// ------------------------------------------------ //
|
// ------------------------------------------------ //
|
||||||
|
|
||||||
renderDoc.addEventListener(
|
renderDoc.addEventListener(
|
||||||
"keyup",
|
"touchstart",
|
||||||
function (e) {
|
function (event) {
|
||||||
// Left Key (Previous Page)
|
touchStartX = event.changedTouches[0].screenX;
|
||||||
if ((e.keyCode || e.which) == 37) {
|
touchStartY = event.changedTouches[0].screenY;
|
||||||
prevPage();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Right Key (Next Page)
|
|
||||||
if ((e.keyCode || e.which) == 39) {
|
|
||||||
nextPage();
|
|
||||||
}
|
|
||||||
|
|
||||||
// "t" Key (Theme Cycle)
|
|
||||||
if ((e.keyCode || e.which) == 84) {
|
|
||||||
let currentThemeIdx = THEMES.indexOf(
|
|
||||||
readerSettings.theme.colorScheme
|
|
||||||
);
|
|
||||||
let colorScheme =
|
|
||||||
THEMES.length == currentThemeIdx + 1
|
|
||||||
? THEMES[0]
|
|
||||||
: THEMES[currentThemeIdx + 1];
|
|
||||||
setTheme({ colorScheme });
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
false
|
false,
|
||||||
|
);
|
||||||
|
|
||||||
|
renderDoc.addEventListener(
|
||||||
|
"touchend",
|
||||||
|
function (event) {
|
||||||
|
touchEndX = event.changedTouches[0].screenX;
|
||||||
|
touchEndY = event.changedTouches[0].screenY;
|
||||||
|
handleGesture(event);
|
||||||
|
},
|
||||||
|
false,
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -591,7 +645,9 @@ class EBookReader {
|
|||||||
let nextPage = this.nextPage.bind(this);
|
let nextPage = this.nextPage.bind(this);
|
||||||
let prevPage = this.prevPage.bind(this);
|
let prevPage = this.prevPage.bind(this);
|
||||||
|
|
||||||
// Keyboard Shortcuts
|
// ------------------------------------------------ //
|
||||||
|
// -------------- Keyboard Shortcuts -------------- //
|
||||||
|
// ------------------------------------------------ //
|
||||||
document.addEventListener(
|
document.addEventListener(
|
||||||
"keyup",
|
"keyup",
|
||||||
function (e) {
|
function (e) {
|
||||||
@@ -608,7 +664,7 @@ class EBookReader {
|
|||||||
// "t" Key (Theme Cycle)
|
// "t" Key (Theme Cycle)
|
||||||
if ((e.keyCode || e.which) == 84) {
|
if ((e.keyCode || e.which) == 84) {
|
||||||
let currentThemeIdx = THEMES.indexOf(
|
let currentThemeIdx = THEMES.indexOf(
|
||||||
this.readerSettings.theme.colorScheme
|
this.readerSettings.theme.colorScheme,
|
||||||
);
|
);
|
||||||
let colorScheme =
|
let colorScheme =
|
||||||
THEMES.length == currentThemeIdx + 1
|
THEMES.length == currentThemeIdx + 1
|
||||||
@@ -617,7 +673,7 @@ class EBookReader {
|
|||||||
this.setTheme({ colorScheme });
|
this.setTheme({ colorScheme });
|
||||||
}
|
}
|
||||||
}.bind(this),
|
}.bind(this),
|
||||||
false
|
false,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Color Scheme Switcher
|
// Color Scheme Switcher
|
||||||
@@ -628,9 +684,9 @@ class EBookReader {
|
|||||||
function (event) {
|
function (event) {
|
||||||
let colorScheme = event.target.innerText;
|
let colorScheme = event.target.innerText;
|
||||||
this.setTheme({ colorScheme });
|
this.setTheme({ colorScheme });
|
||||||
}.bind(this)
|
}.bind(this),
|
||||||
);
|
);
|
||||||
}.bind(this)
|
}.bind(this),
|
||||||
);
|
);
|
||||||
|
|
||||||
// Font Switcher
|
// Font Switcher
|
||||||
@@ -645,9 +701,9 @@ class EBookReader {
|
|||||||
this.setTheme({ fontFamily });
|
this.setTheme({ fontFamily });
|
||||||
|
|
||||||
this.setPosition(cfi);
|
this.setPosition(cfi);
|
||||||
}.bind(this)
|
}.bind(this),
|
||||||
);
|
);
|
||||||
}.bind(this)
|
}.bind(this),
|
||||||
);
|
);
|
||||||
|
|
||||||
// Font Size
|
// Font Size
|
||||||
@@ -670,14 +726,17 @@ class EBookReader {
|
|||||||
|
|
||||||
// Restore CFI
|
// Restore CFI
|
||||||
this.setPosition(cfi);
|
this.setPosition(cfi);
|
||||||
}.bind(this)
|
}.bind(this),
|
||||||
);
|
);
|
||||||
}.bind(this)
|
}.bind(this),
|
||||||
);
|
);
|
||||||
|
|
||||||
// Close Top Bar
|
// Close Top Bar
|
||||||
document.querySelector(".close-top-bar").addEventListener("click", () => {
|
document.querySelector(".close-top-bar").addEventListener("click", () => {
|
||||||
topBar.classList.remove("top-0");
|
topBar.classList.remove("top-0");
|
||||||
|
|
||||||
|
const tocEl = document.querySelector("#toc");
|
||||||
|
if (tocEl) tocEl.innerHTML = "";
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -759,7 +818,7 @@ class EBookReader {
|
|||||||
if (pageWPM >= WPM_MAX)
|
if (pageWPM >= WPM_MAX)
|
||||||
return console.log(
|
return console.log(
|
||||||
"[createActivity] Page WPM Exceeds Max (2000):",
|
"[createActivity] Page WPM Exceeds Max (2000):",
|
||||||
pageWPM
|
pageWPM,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Ensure WPM Minimum
|
// Ensure WPM Minimum
|
||||||
@@ -772,7 +831,7 @@ class EBookReader {
|
|||||||
return console.warn("[createActivity] Invalid Total Pages (0)");
|
return console.warn("[createActivity] Invalid Total Pages (0)");
|
||||||
|
|
||||||
let currentPage = Math.round(
|
let currentPage = Math.round(
|
||||||
(currentWord * totalPages) / this.bookState.words
|
(currentWord * totalPages) / this.bookState.words,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Create Activity Event
|
// Create Activity Event
|
||||||
@@ -826,7 +885,7 @@ class EBookReader {
|
|||||||
response: r,
|
response: r,
|
||||||
json: await r.json(),
|
json: await r.json(),
|
||||||
data: activityEvent,
|
data: activityEvent,
|
||||||
})
|
}),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -887,7 +946,7 @@ class EBookReader {
|
|||||||
response: r,
|
response: r,
|
||||||
json: await r.json(),
|
json: await r.json(),
|
||||||
data: progressEvent,
|
data: progressEvent,
|
||||||
})
|
}),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -923,7 +982,7 @@ class EBookReader {
|
|||||||
let currentWord = await this.getBookWordPosition();
|
let currentWord = await this.getBookWordPosition();
|
||||||
|
|
||||||
let currentTOC = this.book.navigation.toc.find(
|
let currentTOC = this.book.navigation.toc.find(
|
||||||
(item) => item.href == currentLocation.start.href
|
(item) => item.href == currentLocation.start.href,
|
||||||
);
|
);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@@ -957,10 +1016,16 @@ class EBookReader {
|
|||||||
**/
|
**/
|
||||||
async getXPathFromCFI(cfi) {
|
async getXPathFromCFI(cfi) {
|
||||||
// Get DocFragment (Spine Index)
|
// Get DocFragment (Spine Index)
|
||||||
let startCFI = cfi.replace("epubcfi(", "");
|
let cfiBaseMatch = cfi.match(/\(([^!]+)/);
|
||||||
|
if (!cfiBaseMatch) {
|
||||||
|
console.error("[getXPathFromCFI] No CFI Match");
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
let startCFI = cfiBaseMatch[1];
|
||||||
|
|
||||||
let docFragmentIndex =
|
let docFragmentIndex =
|
||||||
this.book.spine.spineItems.find((item) =>
|
this.book.spine.spineItems.find((item) =>
|
||||||
startCFI.startsWith(item.cfiBase)
|
item.cfiBase == startCFI
|
||||||
).index + 1;
|
).index + 1;
|
||||||
|
|
||||||
// Base Progress
|
// Base Progress
|
||||||
@@ -1037,10 +1102,6 @@ class EBookReader {
|
|||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Match Item Index
|
|
||||||
let indexMatch = xpath.match(/\.(\d+)$/);
|
|
||||||
let itemIndex = indexMatch ? parseInt(indexMatch[1]) : 0;
|
|
||||||
|
|
||||||
// Get Spine Item
|
// Get Spine Item
|
||||||
let spinePosition = parseInt(fragMatch[1]) - 1;
|
let spinePosition = parseInt(fragMatch[1]) - 1;
|
||||||
let sectionItem = this.book.spine.get(spinePosition);
|
let sectionItem = this.book.spine.get(spinePosition);
|
||||||
@@ -1108,7 +1169,7 @@ class EBookReader {
|
|||||||
} else {
|
} else {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -1132,6 +1193,11 @@ class EBookReader {
|
|||||||
let element = docSearch.iterateNext() || derivedSelectorElement;
|
let element = docSearch.iterateNext() || derivedSelectorElement;
|
||||||
let cfi = sectionItem.cfiFromElement(element);
|
let cfi = sectionItem.cfiFromElement(element);
|
||||||
|
|
||||||
|
// Hack - epub.js crashes sometimes when its a bare section with no element
|
||||||
|
// so just return the first.
|
||||||
|
if (cfi.endsWith("!/)"))
|
||||||
|
cfi = cfi.slice(0, -1) + "0)"
|
||||||
|
|
||||||
return { cfi, element };
|
return { cfi, element };
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1153,7 +1219,7 @@ class EBookReader {
|
|||||||
|
|
||||||
// Get CFI Range
|
// Get CFI Range
|
||||||
let firstCFI = spineItem.cfiFromElement(
|
let firstCFI = spineItem.cfiFromElement(
|
||||||
spineItem.document.body.children[0]
|
spineItem.document.body.children[0],
|
||||||
);
|
);
|
||||||
let currentLocation = await this.rendition.currentLocation();
|
let currentLocation = await this.rendition.currentLocation();
|
||||||
let cfiRange = this.getCFIRange(firstCFI, currentLocation.start.cfi);
|
let cfiRange = this.getCFIRange(firstCFI, currentLocation.start.cfi);
|
||||||
@@ -1254,7 +1320,7 @@ class EBookReader {
|
|||||||
let spineWords = newDoc.innerText.trim().split(/\s+/).length;
|
let spineWords = newDoc.innerText.trim().split(/\s+/).length;
|
||||||
item.wordCount = spineWords;
|
item.wordCount = spineWords;
|
||||||
return spineWords;
|
return spineWords;
|
||||||
})
|
}),
|
||||||
);
|
);
|
||||||
|
|
||||||
return spineWC.reduce((totalCount, itemCount) => totalCount + itemCount, 0);
|
return spineWC.reduce((totalCount, itemCount) => totalCount + itemCount, 0);
|
||||||
@@ -1273,7 +1339,7 @@ class EBookReader {
|
|||||||
**/
|
**/
|
||||||
loadSettings() {
|
loadSettings() {
|
||||||
this.readerSettings = JSON.parse(
|
this.readerSettings = JSON.parse(
|
||||||
localStorage.getItem("readerSettings") || "{}"
|
localStorage.getItem("readerSettings") || "{}",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
53
assets/sw.js
53
assets/sw.js
@@ -38,6 +38,7 @@ const ROUTES = [
|
|||||||
{ route: "/local", type: CACHE_UPDATE_ASYNC },
|
{ route: "/local", type: CACHE_UPDATE_ASYNC },
|
||||||
{ route: "/reader", type: CACHE_UPDATE_ASYNC },
|
{ route: "/reader", type: CACHE_UPDATE_ASYNC },
|
||||||
{ route: "/manifest.json", type: CACHE_UPDATE_ASYNC },
|
{ route: "/manifest.json", type: CACHE_UPDATE_ASYNC },
|
||||||
|
{ route: /^\/assets\/reader\/fonts\//, type: CACHE_ONLY },
|
||||||
{ route: /^\/assets\//, type: CACHE_UPDATE_ASYNC },
|
{ route: /^\/assets\//, type: CACHE_UPDATE_ASYNC },
|
||||||
{
|
{
|
||||||
route: /^\/documents\/[a-zA-Z0-9]{32}\/(cover|file)$/,
|
route: /^\/documents\/[a-zA-Z0-9]{32}\/(cover|file)$/,
|
||||||
@@ -63,14 +64,16 @@ const PRECACHE_ASSETS = [
|
|||||||
"/reader",
|
"/reader",
|
||||||
"/assets/local/index.js",
|
"/assets/local/index.js",
|
||||||
"/assets/reader/index.js",
|
"/assets/reader/index.js",
|
||||||
|
"/assets/reader/fonts.css",
|
||||||
|
"/assets/reader/themes.css",
|
||||||
"/assets/icons/icon512.png",
|
"/assets/icons/icon512.png",
|
||||||
"/assets/images/no-cover.jpg",
|
"/assets/images/no-cover.jpg",
|
||||||
"/assets/reader/readerThemes.css",
|
|
||||||
|
|
||||||
// Main App Assets
|
// Main App Assets
|
||||||
"/manifest.json",
|
"/manifest.json",
|
||||||
"/assets/index.js",
|
"/assets/index.js",
|
||||||
"/assets/style.css",
|
"/assets/index.css",
|
||||||
|
"/assets/tailwind.css",
|
||||||
"/assets/common.js",
|
"/assets/common.js",
|
||||||
|
|
||||||
// Library Assets
|
// Library Assets
|
||||||
@@ -78,13 +81,26 @@ const PRECACHE_ASSETS = [
|
|||||||
"/assets/lib/epub.min.js",
|
"/assets/lib/epub.min.js",
|
||||||
"/assets/lib/no-sleep.min.js",
|
"/assets/lib/no-sleep.min.js",
|
||||||
"/assets/lib/idb-keyval.min.js",
|
"/assets/lib/idb-keyval.min.js",
|
||||||
|
|
||||||
|
// Fonts
|
||||||
|
"/assets/reader/fonts/arbutus-slab-v16-latin_latin-ext-regular.woff2",
|
||||||
|
"/assets/reader/fonts/lato-v24-latin_latin-ext-100.woff2",
|
||||||
|
"/assets/reader/fonts/lato-v24-latin_latin-ext-100italic.woff2",
|
||||||
|
"/assets/reader/fonts/lato-v24-latin_latin-ext-700.woff2",
|
||||||
|
"/assets/reader/fonts/lato-v24-latin_latin-ext-700italic.woff2",
|
||||||
|
"/assets/reader/fonts/lato-v24-latin_latin-ext-italic.woff2",
|
||||||
|
"/assets/reader/fonts/lato-v24-latin_latin-ext-regular.woff2",
|
||||||
|
"/assets/reader/fonts/open-sans-v36-latin_latin-ext-700.woff2",
|
||||||
|
"/assets/reader/fonts/open-sans-v36-latin_latin-ext-700italic.woff2",
|
||||||
|
"/assets/reader/fonts/open-sans-v36-latin_latin-ext-italic.woff2",
|
||||||
|
"/assets/reader/fonts/open-sans-v36-latin_latin-ext-regular.woff2",
|
||||||
];
|
];
|
||||||
|
|
||||||
// ------------------------------------------------------- //
|
// ------------------------------------------------------- //
|
||||||
// ----------------------- Helpers ----------------------- //
|
// ----------------------- Helpers ----------------------- //
|
||||||
// ------------------------------------------------------- //
|
// ------------------------------------------------------- //
|
||||||
|
|
||||||
function purgeCache() {
|
async function purgeCache() {
|
||||||
console.log("[purgeCache] Purging Cache");
|
console.log("[purgeCache] Purging Cache");
|
||||||
return caches.keys().then(function (names) {
|
return caches.keys().then(function (names) {
|
||||||
for (let name of names) caches.delete(name);
|
for (let name of names) caches.delete(name);
|
||||||
@@ -121,7 +137,7 @@ async function handleFetch(event) {
|
|||||||
const directive = ROUTES.find(
|
const directive = ROUTES.find(
|
||||||
(item) =>
|
(item) =>
|
||||||
(item.route instanceof RegExp && url.match(item.route)) ||
|
(item.route instanceof RegExp && url.match(item.route)) ||
|
||||||
url == item.route
|
url == item.route,
|
||||||
) || { type: CACHE_NEVER };
|
) || { type: CACHE_NEVER };
|
||||||
|
|
||||||
// Get Fallback
|
// Get Fallback
|
||||||
@@ -146,11 +162,11 @@ async function handleFetch(event) {
|
|||||||
);
|
);
|
||||||
case CACHE_UPDATE_SYNC:
|
case CACHE_UPDATE_SYNC:
|
||||||
return updateCache(event.request).catch(
|
return updateCache(event.request).catch(
|
||||||
(e) => currentCache || fallbackFunc(event)
|
(e) => currentCache || fallbackFunc(event),
|
||||||
);
|
);
|
||||||
case CACHE_UPDATE_ASYNC:
|
case CACHE_UPDATE_ASYNC:
|
||||||
let newResponse = updateCache(event.request).catch((e) =>
|
let newResponse = updateCache(event.request).catch((e) =>
|
||||||
fallbackFunc(event)
|
fallbackFunc(event),
|
||||||
);
|
);
|
||||||
|
|
||||||
return currentCache || newResponse;
|
return currentCache || newResponse;
|
||||||
@@ -177,7 +193,7 @@ function handleMessage(event) {
|
|||||||
.filter(
|
.filter(
|
||||||
(item) =>
|
(item) =>
|
||||||
item.startsWith("/documents/") ||
|
item.startsWith("/documents/") ||
|
||||||
item.startsWith("/reader/progress/")
|
item.startsWith("/reader/progress/"),
|
||||||
);
|
);
|
||||||
|
|
||||||
// Derive Unique IDs
|
// Derive Unique IDs
|
||||||
@@ -185,8 +201,8 @@ function handleMessage(event) {
|
|||||||
new Set(
|
new Set(
|
||||||
docResources
|
docResources
|
||||||
.filter((item) => item.startsWith("/documents/"))
|
.filter((item) => item.startsWith("/documents/"))
|
||||||
.map((item) => item.split("/")[2])
|
.map((item) => item.split("/")[2]),
|
||||||
)
|
),
|
||||||
);
|
);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -199,14 +215,14 @@ function handleMessage(event) {
|
|||||||
.filter(
|
.filter(
|
||||||
(id) =>
|
(id) =>
|
||||||
docResources.includes("/documents/" + id + "/file") &&
|
docResources.includes("/documents/" + id + "/file") &&
|
||||||
docResources.includes("/reader/progress/" + id)
|
docResources.includes("/reader/progress/" + id),
|
||||||
)
|
)
|
||||||
.map(async (id) => {
|
.map(async (id) => {
|
||||||
let url = "/reader/progress/" + id;
|
let url = "/reader/progress/" + id;
|
||||||
let currentCache = await caches.match(url);
|
let currentCache = await caches.match(url);
|
||||||
let resp = await updateCache(url).catch((e) => currentCache);
|
let resp = await updateCache(url).catch((e) => currentCache);
|
||||||
return resp.json();
|
return resp.json();
|
||||||
})
|
}),
|
||||||
);
|
);
|
||||||
|
|
||||||
event.source.postMessage({ id, data: cachedDocuments });
|
event.source.postMessage({ id, data: cachedDocuments });
|
||||||
@@ -218,7 +234,7 @@ function handleMessage(event) {
|
|||||||
Promise.all([
|
Promise.all([
|
||||||
cache.delete("/documents/" + data.id + "/file"),
|
cache.delete("/documents/" + data.id + "/file"),
|
||||||
cache.delete("/reader/progress/" + data.id),
|
cache.delete("/reader/progress/" + data.id),
|
||||||
])
|
]),
|
||||||
)
|
)
|
||||||
.then(() => event.source.postMessage({ id, data: "SUCCESS" }))
|
.then(() => event.source.postMessage({ id, data: "SUCCESS" }))
|
||||||
.catch(() => event.source.postMessage({ id, data: "FAILURE" }));
|
.catch(() => event.source.postMessage({ id, data: "FAILURE" }));
|
||||||
@@ -238,6 +254,13 @@ self.addEventListener("install", function (event) {
|
|||||||
event.waitUntil(handleInstall(event));
|
event.waitUntil(handleInstall(event));
|
||||||
});
|
});
|
||||||
|
|
||||||
self.addEventListener("fetch", (event) =>
|
self.addEventListener("fetch", (event) => {
|
||||||
event.respondWith(handleFetch(event))
|
/**
|
||||||
);
|
* Weird things happen when a service worker attempts to handle a request
|
||||||
|
* when the server responds with chunked transfer encoding. Right now we only
|
||||||
|
* use chunked encoding on POSTs. So this is to avoid processing those.
|
||||||
|
**/
|
||||||
|
|
||||||
|
if (event.request.method != "GET") return;
|
||||||
|
return event.respondWith(handleFetch(event));
|
||||||
|
});
|
||||||
|
|||||||
1
assets/tailwind.css
Normal file
1
assets/tailwind.css
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
104
config/config.go
104
config/config.go
@@ -1,7 +1,11 @@
|
|||||||
package config
|
package config
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
|
"path"
|
||||||
|
"path/filepath"
|
||||||
|
"runtime"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
@@ -27,38 +31,99 @@ type Config struct {
|
|||||||
LogLevel string
|
LogLevel string
|
||||||
|
|
||||||
// Cookie Settings
|
// Cookie Settings
|
||||||
CookieSessionKey string
|
CookieAuthKey string
|
||||||
|
CookieEncKey string
|
||||||
CookieSecure bool
|
CookieSecure bool
|
||||||
CookieHTTPOnly bool
|
CookieHTTPOnly bool
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type customFormatter struct {
|
||||||
|
log.Formatter
|
||||||
|
}
|
||||||
|
|
||||||
|
// Force UTC & Set type (app)
|
||||||
|
func (cf customFormatter) Format(e *log.Entry) ([]byte, error) {
|
||||||
|
if e.Data["type"] == nil {
|
||||||
|
e.Data["type"] = "app"
|
||||||
|
}
|
||||||
|
e.Time = e.Time.UTC()
|
||||||
|
return cf.Formatter.Format(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set at runtime
|
||||||
|
var version string = "develop"
|
||||||
|
|
||||||
func Load() *Config {
|
func Load() *Config {
|
||||||
c := &Config{
|
c := &Config{
|
||||||
Version: "0.0.1",
|
Version: version,
|
||||||
DBType: trimLowerString(getEnv("DATABASE_TYPE", "SQLite")),
|
|
||||||
DBName: trimLowerString(getEnv("DATABASE_NAME", "antholume")),
|
|
||||||
ConfigPath: getEnv("CONFIG_PATH", "/config"),
|
ConfigPath: getEnv("CONFIG_PATH", "/config"),
|
||||||
DataPath: getEnv("DATA_PATH", "/data"),
|
DataPath: getEnv("DATA_PATH", "/data"),
|
||||||
ListenPort: getEnv("LISTEN_PORT", "8585"),
|
ListenPort: getEnv("LISTEN_PORT", "8585"),
|
||||||
|
DBType: trimLowerString(getEnv("DATABASE_TYPE", "SQLite")),
|
||||||
|
DBName: trimLowerString(getEnv("DATABASE_NAME", "antholume")),
|
||||||
RegistrationEnabled: trimLowerString(getEnv("REGISTRATION_ENABLED", "false")) == "true",
|
RegistrationEnabled: trimLowerString(getEnv("REGISTRATION_ENABLED", "false")) == "true",
|
||||||
DemoMode: trimLowerString(getEnv("DEMO_MODE", "false")) == "true",
|
DemoMode: trimLowerString(getEnv("DEMO_MODE", "false")) == "true",
|
||||||
SearchEnabled: trimLowerString(getEnv("SEARCH_ENABLED", "false")) == "true",
|
SearchEnabled: trimLowerString(getEnv("SEARCH_ENABLED", "false")) == "true",
|
||||||
CookieSessionKey: trimLowerString(getEnv("COOKIE_SESSION_KEY", "")),
|
CookieAuthKey: trimLowerString(getEnv("COOKIE_AUTH_KEY", "")),
|
||||||
|
CookieEncKey: trimLowerString(getEnv("COOKIE_ENC_KEY", "")),
|
||||||
LogLevel: trimLowerString(getEnv("LOG_LEVEL", "info")),
|
LogLevel: trimLowerString(getEnv("LOG_LEVEL", "info")),
|
||||||
CookieSecure: trimLowerString(getEnv("COOKIE_SECURE", "true")) == "true",
|
CookieSecure: trimLowerString(getEnv("COOKIE_SECURE", "true")) == "true",
|
||||||
CookieHTTPOnly: trimLowerString(getEnv("COOKIE_HTTP_ONLY", "true")) == "true",
|
CookieHTTPOnly: trimLowerString(getEnv("COOKIE_HTTP_ONLY", "true")) == "true",
|
||||||
}
|
}
|
||||||
|
|
||||||
// Log Level
|
// Parse log level
|
||||||
ll, err := log.ParseLevel(c.LogLevel)
|
logLevel, err := log.ParseLevel(c.LogLevel)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ll = log.InfoLevel
|
logLevel = log.InfoLevel
|
||||||
}
|
}
|
||||||
log.SetLevel(ll)
|
|
||||||
|
// Create custom formatter
|
||||||
|
logFormatter := &customFormatter{&log.JSONFormatter{
|
||||||
|
CallerPrettyfier: prettyCaller,
|
||||||
|
}}
|
||||||
|
|
||||||
|
// Create log rotator
|
||||||
|
rotateFileHook, err := NewRotateFileHook(RotateFileConfig{
|
||||||
|
Filename: path.Join(c.ConfigPath, "logs/antholume.log"),
|
||||||
|
MaxSize: 50,
|
||||||
|
MaxBackups: 3,
|
||||||
|
MaxAge: 30,
|
||||||
|
Level: logLevel,
|
||||||
|
Formatter: logFormatter,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal("Unable to initialize file rotate hook")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rotate now
|
||||||
|
rotateFileHook.Rotate()
|
||||||
|
|
||||||
|
// Set logger settings
|
||||||
|
log.SetLevel(logLevel)
|
||||||
|
log.SetFormatter(logFormatter)
|
||||||
|
log.SetReportCaller(true)
|
||||||
|
log.AddHook(rotateFileHook)
|
||||||
|
|
||||||
|
// Ensure directories exist
|
||||||
|
c.EnsureDirectories()
|
||||||
|
|
||||||
return c
|
return c
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Ensures needed directories exist
|
||||||
|
func (c *Config) EnsureDirectories() {
|
||||||
|
os.Mkdir(c.ConfigPath, 0755)
|
||||||
|
os.Mkdir(c.DataPath, 0755)
|
||||||
|
|
||||||
|
docDir := filepath.Join(c.DataPath, "documents")
|
||||||
|
coversDir := filepath.Join(c.DataPath, "covers")
|
||||||
|
backupDir := filepath.Join(c.DataPath, "backups")
|
||||||
|
|
||||||
|
os.Mkdir(docDir, 0755)
|
||||||
|
os.Mkdir(coversDir, 0755)
|
||||||
|
os.Mkdir(backupDir, 0755)
|
||||||
|
}
|
||||||
|
|
||||||
func getEnv(key, fallback string) string {
|
func getEnv(key, fallback string) string {
|
||||||
if value, ok := os.LookupEnv(key); ok {
|
if value, ok := os.LookupEnv(key); ok {
|
||||||
return value
|
return value
|
||||||
@@ -69,3 +134,24 @@ func getEnv(key, fallback string) string {
|
|||||||
func trimLowerString(val string) string {
|
func trimLowerString(val string) string {
|
||||||
return strings.ToLower(strings.TrimSpace(val))
|
return strings.ToLower(strings.TrimSpace(val))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func prettyCaller(f *runtime.Frame) (function string, file string) {
|
||||||
|
purgePrefix := "reichard.io/antholume/"
|
||||||
|
|
||||||
|
pathName := strings.Replace(f.Func.Name(), purgePrefix, "", 1)
|
||||||
|
parts := strings.Split(pathName, ".")
|
||||||
|
|
||||||
|
filepath, line := f.Func.FileLine(f.PC)
|
||||||
|
splitFilePath := strings.Split(filepath, "/")
|
||||||
|
|
||||||
|
fileName := fmt.Sprintf("%s/%s@%d", parts[0], splitFilePath[len(splitFilePath)-1], line)
|
||||||
|
functionName := strings.Replace(pathName, parts[0]+".", "", 1)
|
||||||
|
|
||||||
|
// Exclude GIN Logger
|
||||||
|
if functionName == "NewApi.apiLogger.func1" {
|
||||||
|
fileName = ""
|
||||||
|
functionName = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
return functionName, fileName
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,35 +1,37 @@
|
|||||||
package config
|
package config
|
||||||
|
|
||||||
import "testing"
|
import (
|
||||||
|
"runtime"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
func TestLoadConfig(t *testing.T) {
|
func TestLoadConfig(t *testing.T) {
|
||||||
conf := Load()
|
conf := Load()
|
||||||
want := "sqlite"
|
assert.Equal(t, "sqlite", conf.DBType)
|
||||||
if conf.DBType != want {
|
|
||||||
t.Fatalf(`Load().DBType = %q, want match for %#q, nil`, conf.DBType, want)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGetEnvDefault(t *testing.T) {
|
func TestGetEnvDefault(t *testing.T) {
|
||||||
want := "def_val"
|
desiredValue := "def_val"
|
||||||
envDefault := getEnv("DEFAULT_TEST", want)
|
envDefault := getEnv("DEFAULT_TEST", desiredValue)
|
||||||
if envDefault != want {
|
|
||||||
t.Fatalf(`getEnv("DEFAULT_TEST", "def_val") = %q, want match for %#q, nil`, envDefault, want)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestGetEnvSet(t *testing.T) {
|
assert.Equal(t, desiredValue, envDefault)
|
||||||
envDefault := getEnv("SET_TEST", "not_this")
|
|
||||||
want := "set_val"
|
|
||||||
if envDefault != want {
|
|
||||||
t.Fatalf(`getEnv("SET_TEST", "not_this") = %q, want match for %#q, nil`, envDefault, want)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTrimLowerString(t *testing.T) {
|
func TestTrimLowerString(t *testing.T) {
|
||||||
want := "trimtest"
|
desiredValue := "trimtest"
|
||||||
output := trimLowerString(" trimTest ")
|
outputValue := trimLowerString(" trimTest ")
|
||||||
if output != want {
|
|
||||||
t.Fatalf(`trimLowerString(" trimTest ") = %q, want match for %#q, nil`, output, want)
|
assert.Equal(t, desiredValue, outputValue)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestPrettyCaller(t *testing.T) {
|
||||||
|
p, _, _, _ := runtime.Caller(0)
|
||||||
|
result := runtime.CallersFrames([]uintptr{p})
|
||||||
|
f, _ := result.Next()
|
||||||
|
functionName, fileName := prettyCaller(&f)
|
||||||
|
|
||||||
|
assert.Equal(t, "TestPrettyCaller", functionName, "should have current function name")
|
||||||
|
assert.Equal(t, "config/config_test.go@30", fileName, "should have current file path and line number")
|
||||||
}
|
}
|
||||||
|
|||||||
54
config/logger.go
Normal file
54
config/logger.go
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/sirupsen/logrus"
|
||||||
|
"gopkg.in/natefinch/lumberjack.v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Modified "snowzach/rotatefilehook" to support manual rotation
|
||||||
|
|
||||||
|
type RotateFileConfig struct {
|
||||||
|
Filename string
|
||||||
|
MaxSize int
|
||||||
|
MaxBackups int
|
||||||
|
MaxAge int
|
||||||
|
Compress bool
|
||||||
|
Level logrus.Level
|
||||||
|
Formatter logrus.Formatter
|
||||||
|
}
|
||||||
|
|
||||||
|
type RotateFileHook struct {
|
||||||
|
Config RotateFileConfig
|
||||||
|
logWriter *lumberjack.Logger
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewRotateFileHook(config RotateFileConfig) (*RotateFileHook, error) {
|
||||||
|
hook := RotateFileHook{
|
||||||
|
Config: config,
|
||||||
|
}
|
||||||
|
hook.logWriter = &lumberjack.Logger{
|
||||||
|
Filename: config.Filename,
|
||||||
|
MaxSize: config.MaxSize,
|
||||||
|
MaxBackups: config.MaxBackups,
|
||||||
|
MaxAge: config.MaxAge,
|
||||||
|
Compress: config.Compress,
|
||||||
|
}
|
||||||
|
return &hook, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (hook *RotateFileHook) Rotate() error {
|
||||||
|
return hook.logWriter.Rotate()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (hook *RotateFileHook) Levels() []logrus.Level {
|
||||||
|
return logrus.AllLevels[:hook.Config.Level+1]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (hook *RotateFileHook) Fire(entry *logrus.Entry) (err error) {
|
||||||
|
b, err := hook.Config.Formatter.Format(entry)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
hook.logWriter.Write(b)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
// Code generated by sqlc. DO NOT EDIT.
|
// Code generated by sqlc. DO NOT EDIT.
|
||||||
// versions:
|
// versions:
|
||||||
// sqlc v1.21.0
|
// sqlc v1.29.0
|
||||||
|
|
||||||
package database
|
package database
|
||||||
|
|
||||||
|
|||||||
151
database/document_user_statistics.sql
Normal file
151
database/document_user_statistics.sql
Normal file
@@ -0,0 +1,151 @@
|
|||||||
|
WITH grouped_activity AS (
|
||||||
|
SELECT
|
||||||
|
ga.user_id,
|
||||||
|
ga.document_id,
|
||||||
|
MAX(ga.created_at) AS created_at,
|
||||||
|
MAX(ga.start_time) AS start_time,
|
||||||
|
MIN(ga.start_percentage) AS start_percentage,
|
||||||
|
MAX(ga.end_percentage) AS end_percentage,
|
||||||
|
|
||||||
|
-- Total Duration & Percentage
|
||||||
|
SUM(ga.duration) AS total_time_seconds,
|
||||||
|
SUM(ga.end_percentage - ga.start_percentage) AS total_read_percentage,
|
||||||
|
|
||||||
|
-- Yearly Duration
|
||||||
|
SUM(
|
||||||
|
CASE
|
||||||
|
WHEN
|
||||||
|
ga.start_time >= DATE('now', '-1 year')
|
||||||
|
THEN ga.duration
|
||||||
|
ELSE 0
|
||||||
|
END
|
||||||
|
)
|
||||||
|
AS yearly_time_seconds,
|
||||||
|
|
||||||
|
-- Yearly Percentage
|
||||||
|
SUM(
|
||||||
|
CASE
|
||||||
|
WHEN
|
||||||
|
ga.start_time >= DATE('now', '-1 year')
|
||||||
|
THEN ga.end_percentage - ga.start_percentage
|
||||||
|
ELSE 0
|
||||||
|
END
|
||||||
|
)
|
||||||
|
AS yearly_read_percentage,
|
||||||
|
|
||||||
|
-- Monthly Duration
|
||||||
|
SUM(
|
||||||
|
CASE
|
||||||
|
WHEN
|
||||||
|
ga.start_time >= DATE('now', '-1 month')
|
||||||
|
THEN ga.duration
|
||||||
|
ELSE 0
|
||||||
|
END
|
||||||
|
)
|
||||||
|
AS monthly_time_seconds,
|
||||||
|
|
||||||
|
-- Monthly Percentage
|
||||||
|
SUM(
|
||||||
|
CASE
|
||||||
|
WHEN
|
||||||
|
ga.start_time >= DATE('now', '-1 month')
|
||||||
|
THEN ga.end_percentage - ga.start_percentage
|
||||||
|
ELSE 0
|
||||||
|
END
|
||||||
|
)
|
||||||
|
AS monthly_read_percentage,
|
||||||
|
|
||||||
|
-- Weekly Duration
|
||||||
|
SUM(
|
||||||
|
CASE
|
||||||
|
WHEN
|
||||||
|
ga.start_time >= DATE('now', '-7 days')
|
||||||
|
THEN ga.duration
|
||||||
|
ELSE 0
|
||||||
|
END
|
||||||
|
)
|
||||||
|
AS weekly_time_seconds,
|
||||||
|
|
||||||
|
-- Weekly Percentage
|
||||||
|
SUM(
|
||||||
|
CASE
|
||||||
|
WHEN
|
||||||
|
ga.start_time >= DATE('now', '-7 days')
|
||||||
|
THEN ga.end_percentage - ga.start_percentage
|
||||||
|
ELSE 0
|
||||||
|
END
|
||||||
|
)
|
||||||
|
AS weekly_read_percentage
|
||||||
|
|
||||||
|
FROM activity AS ga
|
||||||
|
GROUP BY ga.user_id, ga.document_id
|
||||||
|
),
|
||||||
|
|
||||||
|
current_progress AS (
|
||||||
|
SELECT
|
||||||
|
user_id,
|
||||||
|
document_id,
|
||||||
|
COALESCE((
|
||||||
|
SELECT dp.percentage
|
||||||
|
FROM document_progress AS dp
|
||||||
|
WHERE
|
||||||
|
dp.user_id = iga.user_id
|
||||||
|
AND dp.document_id = iga.document_id
|
||||||
|
ORDER BY dp.created_at DESC
|
||||||
|
LIMIT 1
|
||||||
|
), end_percentage) AS percentage
|
||||||
|
FROM grouped_activity AS iga
|
||||||
|
)
|
||||||
|
|
||||||
|
INSERT INTO document_user_statistics
|
||||||
|
SELECT
|
||||||
|
ga.document_id,
|
||||||
|
ga.user_id,
|
||||||
|
cp.percentage,
|
||||||
|
MAX(ga.start_time) AS last_read,
|
||||||
|
MAX(ga.created_at) AS last_seen,
|
||||||
|
SUM(ga.total_read_percentage) AS read_percentage,
|
||||||
|
|
||||||
|
-- All Time WPM
|
||||||
|
SUM(ga.total_time_seconds) AS total_time_seconds,
|
||||||
|
(CAST(COALESCE(d.words, 0.0) AS REAL) * SUM(ga.total_read_percentage))
|
||||||
|
AS total_words_read,
|
||||||
|
(CAST(COALESCE(d.words, 0.0) AS REAL) * SUM(ga.total_read_percentage))
|
||||||
|
/ (SUM(ga.total_time_seconds) / 60.0) AS total_wpm,
|
||||||
|
|
||||||
|
-- Yearly WPM
|
||||||
|
ga.yearly_time_seconds,
|
||||||
|
CAST(COALESCE(d.words, 0.0) AS REAL) * ga.yearly_read_percentage
|
||||||
|
AS yearly_words_read,
|
||||||
|
COALESCE(
|
||||||
|
(CAST(COALESCE(d.words, 0.0) AS REAL) * ga.yearly_read_percentage)
|
||||||
|
/ (ga.yearly_time_seconds / 60), 0.0)
|
||||||
|
AS yearly_wpm,
|
||||||
|
|
||||||
|
-- Monthly WPM
|
||||||
|
ga.monthly_time_seconds,
|
||||||
|
CAST(COALESCE(d.words, 0.0) AS REAL) * ga.monthly_read_percentage
|
||||||
|
AS monthly_words_read,
|
||||||
|
COALESCE(
|
||||||
|
(CAST(COALESCE(d.words, 0.0) AS REAL) * ga.monthly_read_percentage)
|
||||||
|
/ (ga.monthly_time_seconds / 60), 0.0)
|
||||||
|
AS monthly_wpm,
|
||||||
|
|
||||||
|
-- Weekly WPM
|
||||||
|
ga.weekly_time_seconds,
|
||||||
|
CAST(COALESCE(d.words, 0.0) AS REAL) * ga.weekly_read_percentage
|
||||||
|
AS weekly_words_read,
|
||||||
|
COALESCE(
|
||||||
|
(CAST(COALESCE(d.words, 0.0) AS REAL) * ga.weekly_read_percentage)
|
||||||
|
/ (ga.weekly_time_seconds / 60), 0.0)
|
||||||
|
AS weekly_wpm
|
||||||
|
|
||||||
|
FROM grouped_activity AS ga
|
||||||
|
INNER JOIN
|
||||||
|
current_progress AS cp
|
||||||
|
ON ga.user_id = cp.user_id AND ga.document_id = cp.document_id
|
||||||
|
INNER JOIN
|
||||||
|
documents AS d
|
||||||
|
ON ga.document_id = d.id
|
||||||
|
GROUP BY ga.document_id, ga.user_id
|
||||||
|
ORDER BY total_wpm DESC;
|
||||||
27
database/documents.go
Normal file
27
database/documents.go
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"reichard.io/antholume/pkg/ptr"
|
||||||
|
"reichard.io/antholume/pkg/sliceutils"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (d *DBManager) GetDocument(ctx context.Context, docID, userID string) (*GetDocumentsWithStatsRow, error) {
|
||||||
|
documents, err := d.Queries.GetDocumentsWithStats(ctx, GetDocumentsWithStatsParams{
|
||||||
|
ID: ptr.Of(docID),
|
||||||
|
UserID: userID,
|
||||||
|
Limit: 1,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
document, found := sliceutils.First(documents)
|
||||||
|
if !found {
|
||||||
|
return nil, fmt.Errorf("document not found: %s", docID)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &document, nil
|
||||||
|
}
|
||||||
115
database/documents_test.go
Normal file
115
database/documents_test.go
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
"reichard.io/antholume/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
type DocumentsTestSuite struct {
|
||||||
|
suite.Suite
|
||||||
|
dbm *DBManager
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDocuments(t *testing.T) {
|
||||||
|
suite.Run(t, new(DocumentsTestSuite))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *DocumentsTestSuite) SetupTest() {
|
||||||
|
cfg := config.Config{
|
||||||
|
DBType: "memory",
|
||||||
|
}
|
||||||
|
|
||||||
|
suite.dbm = NewMgr(&cfg)
|
||||||
|
|
||||||
|
// Create Document
|
||||||
|
_, err := suite.dbm.Queries.UpsertDocument(context.Background(), UpsertDocumentParams{
|
||||||
|
ID: documentID,
|
||||||
|
Title: &documentTitle,
|
||||||
|
Author: &documentAuthor,
|
||||||
|
Words: &documentWords,
|
||||||
|
})
|
||||||
|
suite.NoError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DOCUMENT - TODO:
|
||||||
|
// - (q *Queries) GetDocumentProgress
|
||||||
|
// - (q *Queries) GetDocumentWithStats
|
||||||
|
// - (q *Queries) GetDocumentsSize
|
||||||
|
// - (q *Queries) GetDocumentsWithStats
|
||||||
|
// - (q *Queries) GetMissingDocuments
|
||||||
|
func (suite *DocumentsTestSuite) TestGetDocument() {
|
||||||
|
doc, err := suite.dbm.Queries.GetDocument(context.Background(), documentID)
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Equal(documentID, doc.ID, "should have changed the document")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *DocumentsTestSuite) TestUpsertDocument() {
|
||||||
|
testDocID := "docid1"
|
||||||
|
|
||||||
|
doc, err := suite.dbm.Queries.UpsertDocument(context.Background(), UpsertDocumentParams{
|
||||||
|
ID: testDocID,
|
||||||
|
Title: &documentTitle,
|
||||||
|
Author: &documentAuthor,
|
||||||
|
})
|
||||||
|
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Equal(testDocID, doc.ID, "should have document id")
|
||||||
|
suite.Equal(documentTitle, *doc.Title, "should have document title")
|
||||||
|
suite.Equal(documentAuthor, *doc.Author, "should have document author")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *DocumentsTestSuite) TestDeleteDocument() {
|
||||||
|
changed, err := suite.dbm.Queries.DeleteDocument(context.Background(), documentID)
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Equal(int64(1), changed, "should have changed the document")
|
||||||
|
|
||||||
|
doc, err := suite.dbm.Queries.GetDocument(context.Background(), documentID)
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.True(doc.Deleted, "should have deleted the document")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *DocumentsTestSuite) TestGetDeletedDocuments() {
|
||||||
|
changed, err := suite.dbm.Queries.DeleteDocument(context.Background(), documentID)
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Equal(int64(1), changed, "should have changed the document")
|
||||||
|
|
||||||
|
deletedDocs, err := suite.dbm.Queries.GetDeletedDocuments(context.Background(), []string{documentID})
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Len(deletedDocs, 1, "should have one deleted document")
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO - Convert GetWantedDocuments -> (sqlc.slice('document_ids'));
|
||||||
|
func (suite *DocumentsTestSuite) TestGetWantedDocuments() {
|
||||||
|
wantedDocs, err := suite.dbm.Queries.GetWantedDocuments(context.Background(), fmt.Sprintf("[\"%s\"]", documentID))
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Len(wantedDocs, 1, "should have one wanted document")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *DocumentsTestSuite) TestGetMissingDocuments() {
|
||||||
|
// Create Document
|
||||||
|
_, err := suite.dbm.Queries.UpsertDocument(context.Background(), UpsertDocumentParams{
|
||||||
|
ID: documentID,
|
||||||
|
Filepath: &documentFilepath,
|
||||||
|
})
|
||||||
|
suite.NoError(err)
|
||||||
|
|
||||||
|
missingDocs, err := suite.dbm.Queries.GetMissingDocuments(context.Background(), []string{documentID})
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Len(missingDocs, 0, "should have no wanted document")
|
||||||
|
|
||||||
|
missingDocs, err = suite.dbm.Queries.GetMissingDocuments(context.Background(), []string{"other"})
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Len(missingDocs, 1, "should have one missing document")
|
||||||
|
suite.Equal(documentID, missingDocs[0].ID, "should have missing doc")
|
||||||
|
|
||||||
|
// TODO - https://github.com/sqlc-dev/sqlc/issues/3451
|
||||||
|
// missingDocs, err = suite.dbm.Queries.GetMissingDocuments(context.Background(), []string{})
|
||||||
|
// suite.Nil(err, "should have nil err")
|
||||||
|
// suite.Len(missingDocs, 1, "should have one missing document")
|
||||||
|
// suite.Equal(documentID, missingDocs[0].ID, "should have missing doc")
|
||||||
|
}
|
||||||
@@ -3,84 +3,256 @@ package database
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"database/sql"
|
"database/sql"
|
||||||
_ "embed"
|
"database/sql/driver"
|
||||||
|
"embed"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"path/filepath"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/pressly/goose/v3"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
_ "modernc.org/sqlite"
|
sqlite "modernc.org/sqlite"
|
||||||
"path"
|
"reichard.io/antholume/config"
|
||||||
"reichard.io/bbank/config"
|
_ "reichard.io/antholume/database/migrations"
|
||||||
)
|
)
|
||||||
|
|
||||||
type DBManager struct {
|
type DBManager struct {
|
||||||
DB *sql.DB
|
DB *sql.DB
|
||||||
Ctx context.Context
|
|
||||||
Queries *Queries
|
Queries *Queries
|
||||||
|
cfg *config.Config
|
||||||
}
|
}
|
||||||
|
|
||||||
//go:embed schema.sql
|
//go:embed schema.sql
|
||||||
var ddl string
|
var ddl string
|
||||||
|
|
||||||
//go:embed update_temp_tables.sql
|
//go:embed user_streaks.sql
|
||||||
var tsql string
|
var user_streaks string
|
||||||
|
|
||||||
//go:embed update_document_user_statistics.sql
|
//go:embed document_user_statistics.sql
|
||||||
var doc_user_stat_sql string
|
var document_user_statistics string
|
||||||
|
|
||||||
func NewMgr(c *config.Config) *DBManager {
|
//go:embed migrations/*
|
||||||
// Create Manager
|
var migrations embed.FS
|
||||||
dbm := &DBManager{
|
|
||||||
Ctx: context.Background(),
|
// Register scalar sqlite function on init
|
||||||
|
func init() {
|
||||||
|
sqlite.MustRegisterFunction("LOCAL_TIME", &sqlite.FunctionImpl{
|
||||||
|
NArgs: 2,
|
||||||
|
Deterministic: true,
|
||||||
|
Scalar: localTime,
|
||||||
|
})
|
||||||
|
sqlite.MustRegisterFunction("LOCAL_DATE", &sqlite.FunctionImpl{
|
||||||
|
NArgs: 2,
|
||||||
|
Deterministic: true,
|
||||||
|
Scalar: localDate,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create Database
|
// NewMgr Returns an initialized manager
|
||||||
if c.DBType == "sqlite" || c.DBType == "memory" {
|
func NewMgr(c *config.Config) *DBManager {
|
||||||
var dbLocation string = ":memory:"
|
// Create Manager
|
||||||
if c.DBType == "sqlite" {
|
dbm := &DBManager{cfg: c}
|
||||||
dbLocation = path.Join(c.ConfigPath, fmt.Sprintf("%s.db", c.DBName))
|
|
||||||
|
if err := dbm.init(context.Background()); err != nil {
|
||||||
|
log.Panic("Unable to init DB")
|
||||||
|
}
|
||||||
|
|
||||||
|
return dbm
|
||||||
|
}
|
||||||
|
|
||||||
|
// init loads the DB manager
|
||||||
|
func (dbm *DBManager) init(ctx context.Context) error {
|
||||||
|
// Build DB Location
|
||||||
|
var dbLocation string
|
||||||
|
switch dbm.cfg.DBType {
|
||||||
|
case "sqlite":
|
||||||
|
dbLocation = filepath.Join(dbm.cfg.ConfigPath, fmt.Sprintf("%s.db", dbm.cfg.DBName))
|
||||||
|
case "memory":
|
||||||
|
dbLocation = ":memory:"
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("unsupported database")
|
||||||
}
|
}
|
||||||
|
|
||||||
var err error
|
var err error
|
||||||
dbm.DB, err = sql.Open("sqlite", dbLocation)
|
dbm.DB, err = sql.Open("sqlite", dbLocation)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatal(err)
|
log.Panicf("Unable to open DB: %v", err)
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Single Open Connection
|
// Single open connection
|
||||||
dbm.DB.SetMaxOpenConns(1)
|
dbm.DB.SetMaxOpenConns(1)
|
||||||
if _, err := dbm.DB.Exec(ddl, nil); err != nil {
|
|
||||||
log.Info("Exec Error:", err)
|
// Check if DB is new
|
||||||
}
|
isNew, err := isEmpty(dbm.DB)
|
||||||
} else {
|
if err != nil {
|
||||||
log.Fatal("Unsupported Database")
|
log.Panicf("Unable to determine db info: %v", err)
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Init SQLc
|
||||||
dbm.Queries = New(dbm.DB)
|
dbm.Queries = New(dbm.DB)
|
||||||
|
|
||||||
return dbm
|
// Execute schema
|
||||||
|
if _, err := dbm.DB.Exec(ddl, nil); err != nil {
|
||||||
|
log.Panicf("Error executing schema: %v", err)
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (dbm *DBManager) Shutdown() error {
|
// Perform migrations
|
||||||
return dbm.DB.Close()
|
err = dbm.performMigrations(isNew)
|
||||||
|
if err != nil && err != goose.ErrNoMigrationFiles {
|
||||||
|
log.Panicf("Error running DB migrations: %v", err)
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (dbm *DBManager) UpdateDocumentUserStatistic(documentID string, userID string) error {
|
// Update settings
|
||||||
// Prepare Statement
|
err = dbm.updateSettings(ctx)
|
||||||
stmt, err := dbm.DB.PrepareContext(dbm.Ctx, doc_user_stat_sql)
|
if err != nil {
|
||||||
|
log.Panicf("Error running DB settings update: %v", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cache tables
|
||||||
|
if err := dbm.CacheTempTables(ctx); err != nil {
|
||||||
|
log.Warn("Refreshing temp table cache failed: ", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reload closes the DB & reinits
|
||||||
|
func (dbm *DBManager) Reload(ctx context.Context) error {
|
||||||
|
// Close handle
|
||||||
|
err := dbm.DB.Close()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
defer stmt.Close()
|
|
||||||
|
|
||||||
// Execute
|
// Reinit DB
|
||||||
if _, err := stmt.ExecContext(dbm.Ctx, documentID, userID); err != nil {
|
if err := dbm.init(ctx); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (dbm *DBManager) CacheTempTables() error {
|
// CacheTempTables clears existing statistics and recalculates
|
||||||
if _, err := dbm.DB.ExecContext(dbm.Ctx, tsql); err != nil {
|
func (dbm *DBManager) CacheTempTables(ctx context.Context) error {
|
||||||
|
start := time.Now()
|
||||||
|
if _, err := dbm.DB.ExecContext(ctx, user_streaks); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
log.Debug("Cached 'user_streaks' in: ", time.Since(start))
|
||||||
|
|
||||||
|
start = time.Now()
|
||||||
|
if _, err := dbm.DB.ExecContext(ctx, document_user_statistics); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
log.Debug("Cached 'document_user_statistics' in: ", time.Since(start))
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// updateSettings ensures that we're enforcing foreign keys and enable journal
|
||||||
|
// mode.
|
||||||
|
func (dbm *DBManager) updateSettings(ctx context.Context) error {
|
||||||
|
// Set SQLite PRAGMA Settings
|
||||||
|
pragmaQuery := `
|
||||||
|
PRAGMA foreign_keys = ON;
|
||||||
|
PRAGMA journal_mode = WAL;
|
||||||
|
`
|
||||||
|
if _, err := dbm.DB.Exec(pragmaQuery, nil); err != nil {
|
||||||
|
log.Errorf("Error executing pragma: %v", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update Antholume Version in DB
|
||||||
|
if _, err := dbm.Queries.UpdateSettings(ctx, UpdateSettingsParams{
|
||||||
|
Name: "version",
|
||||||
|
Value: dbm.cfg.Version,
|
||||||
|
}); err != nil {
|
||||||
|
log.Errorf("Error updating DB settings: %v", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// performMigrations runs all migrations
|
||||||
|
func (dbm *DBManager) performMigrations(isNew bool) error {
|
||||||
|
// Create context
|
||||||
|
ctx := context.WithValue(context.Background(), "isNew", isNew) // nolint
|
||||||
|
|
||||||
|
// Set DB migration
|
||||||
|
goose.SetBaseFS(migrations)
|
||||||
|
|
||||||
|
// Run migrations
|
||||||
|
goose.SetLogger(log.StandardLogger())
|
||||||
|
if err := goose.SetDialect("sqlite"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return goose.UpContext(ctx, dbm.DB, "migrations")
|
||||||
|
}
|
||||||
|
|
||||||
|
// isEmpty determines whether the database is empty
|
||||||
|
func isEmpty(db *sql.DB) (bool, error) {
|
||||||
|
var tableCount int
|
||||||
|
err := db.QueryRow("SELECT COUNT(*) FROM sqlite_master WHERE type='table';").Scan(&tableCount)
|
||||||
|
if err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
return tableCount == 0, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// localTime is a custom SQL function that is registered as LOCAL_TIME in the init function
|
||||||
|
func localTime(ctx *sqlite.FunctionContext, args []driver.Value) (driver.Value, error) {
|
||||||
|
timeStr, ok := args[0].(string)
|
||||||
|
if !ok {
|
||||||
|
return nil, errors.New("both arguments to TZTime must be strings")
|
||||||
|
}
|
||||||
|
|
||||||
|
timeZoneStr, ok := args[1].(string)
|
||||||
|
if !ok {
|
||||||
|
return nil, errors.New("both arguments to TZTime must be strings")
|
||||||
|
}
|
||||||
|
|
||||||
|
timeZone, err := time.LoadLocation(timeZoneStr)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.New("unable to parse timezone")
|
||||||
|
}
|
||||||
|
|
||||||
|
formattedTime, err := time.ParseInLocation(time.RFC3339, timeStr, time.UTC)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.New("unable to parse time")
|
||||||
|
}
|
||||||
|
|
||||||
|
return formattedTime.In(timeZone).Format(time.RFC3339), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// localDate is a custom SQL function that is registered as LOCAL_DATE in the init function
|
||||||
|
func localDate(ctx *sqlite.FunctionContext, args []driver.Value) (driver.Value, error) {
|
||||||
|
timeStr, ok := args[0].(string)
|
||||||
|
if !ok {
|
||||||
|
return nil, errors.New("both arguments to TZTime must be strings")
|
||||||
|
}
|
||||||
|
|
||||||
|
timeZoneStr, ok := args[1].(string)
|
||||||
|
if !ok {
|
||||||
|
return nil, errors.New("both arguments to TZTime must be strings")
|
||||||
|
}
|
||||||
|
|
||||||
|
timeZone, err := time.LoadLocation(timeZoneStr)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.New("unable to parse timezone")
|
||||||
|
}
|
||||||
|
|
||||||
|
formattedTime, err := time.ParseInLocation(time.RFC3339, timeStr, time.UTC)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.New("unable to parse time")
|
||||||
|
}
|
||||||
|
|
||||||
|
return formattedTime.In(timeZone).Format("2006-01-02"), nil
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,118 +1,78 @@
|
|||||||
package database
|
package database
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"reichard.io/bbank/config"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
"reichard.io/antholume/config"
|
||||||
|
"reichard.io/antholume/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
type databaseTest struct {
|
var (
|
||||||
*testing.T
|
userID string = "testUser"
|
||||||
|
userPass string = "testPass"
|
||||||
|
deviceID string = "testDevice"
|
||||||
|
deviceName string = "testDeviceName"
|
||||||
|
documentID string = "testDocument"
|
||||||
|
documentTitle string = "testTitle"
|
||||||
|
documentAuthor string = "testAuthor"
|
||||||
|
documentFilepath string = "./testPath.epub"
|
||||||
|
documentWords int64 = 5000
|
||||||
|
)
|
||||||
|
|
||||||
|
type DatabaseTestSuite struct {
|
||||||
|
suite.Suite
|
||||||
dbm *DBManager
|
dbm *DBManager
|
||||||
}
|
}
|
||||||
|
|
||||||
var userID string = "testUser"
|
func TestDatabase(t *testing.T) {
|
||||||
var userPass string = "testPass"
|
suite.Run(t, new(DatabaseTestSuite))
|
||||||
var deviceID string = "testDevice"
|
}
|
||||||
var deviceName string = "testDeviceName"
|
|
||||||
var documentID string = "testDocument"
|
|
||||||
var documentTitle string = "testTitle"
|
|
||||||
var documentAuthor string = "testAuthor"
|
|
||||||
|
|
||||||
func TestNewMgr(t *testing.T) {
|
// PROGRESS - TODO:
|
||||||
|
// - (q *Queries) GetProgress
|
||||||
|
// - (q *Queries) UpdateProgress
|
||||||
|
|
||||||
|
func (suite *DatabaseTestSuite) SetupTest() {
|
||||||
cfg := config.Config{
|
cfg := config.Config{
|
||||||
DBType: "memory",
|
DBType: "memory",
|
||||||
}
|
}
|
||||||
|
|
||||||
dbm := NewMgr(&cfg)
|
suite.dbm = NewMgr(&cfg)
|
||||||
if dbm == nil {
|
|
||||||
t.Fatalf(`Expected: *DBManager, Got: nil`)
|
|
||||||
}
|
|
||||||
|
|
||||||
t.Run("Database", func(t *testing.T) {
|
// Create User
|
||||||
dt := databaseTest{t, dbm}
|
rawAuthHash, _ := utils.GenerateToken(64)
|
||||||
dt.TestUser()
|
authHash := fmt.Sprintf("%x", rawAuthHash)
|
||||||
dt.TestDocument()
|
_, err := suite.dbm.Queries.CreateUser(context.Background(), CreateUserParams{
|
||||||
dt.TestDevice()
|
|
||||||
dt.TestActivity()
|
|
||||||
dt.TestDailyReadStats()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func (dt *databaseTest) TestUser() {
|
|
||||||
dt.Run("User", func(t *testing.T) {
|
|
||||||
changed, err := dt.dbm.Queries.CreateUser(dt.dbm.Ctx, CreateUserParams{
|
|
||||||
ID: userID,
|
ID: userID,
|
||||||
Pass: &userPass,
|
Pass: &userPass,
|
||||||
|
AuthHash: &authHash,
|
||||||
})
|
})
|
||||||
|
suite.NoError(err)
|
||||||
|
|
||||||
if err != nil || changed != 1 {
|
// Create Document
|
||||||
t.Fatalf(`Expected: %v, Got: %v, Error: %v`, 1, changed, err)
|
_, err = suite.dbm.Queries.UpsertDocument(context.Background(), UpsertDocumentParams{
|
||||||
}
|
|
||||||
|
|
||||||
user, err := dt.dbm.Queries.GetUser(dt.dbm.Ctx, userID)
|
|
||||||
if err != nil || *user.Pass != userPass {
|
|
||||||
t.Fatalf(`Expected: %v, Got: %v, Error: %v`, userPass, *user.Pass, err)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func (dt *databaseTest) TestDocument() {
|
|
||||||
dt.Run("Document", func(t *testing.T) {
|
|
||||||
doc, err := dt.dbm.Queries.UpsertDocument(dt.dbm.Ctx, UpsertDocumentParams{
|
|
||||||
ID: documentID,
|
ID: documentID,
|
||||||
Title: &documentTitle,
|
Title: &documentTitle,
|
||||||
Author: &documentAuthor,
|
Author: &documentAuthor,
|
||||||
|
Filepath: &documentFilepath,
|
||||||
|
Words: &documentWords,
|
||||||
})
|
})
|
||||||
|
suite.NoError(err)
|
||||||
|
|
||||||
if err != nil {
|
// Create Device
|
||||||
t.Fatalf(`Expected: Document, Got: %v, Error: %v`, doc, err)
|
_, err = suite.dbm.Queries.UpsertDevice(context.Background(), UpsertDeviceParams{
|
||||||
}
|
|
||||||
|
|
||||||
if doc.ID != documentID {
|
|
||||||
t.Fatalf(`Expected: %v, Got: %v`, documentID, doc.ID)
|
|
||||||
}
|
|
||||||
|
|
||||||
if *doc.Title != documentTitle {
|
|
||||||
t.Fatalf(`Expected: %v, Got: %v`, documentTitle, *doc.Title)
|
|
||||||
}
|
|
||||||
|
|
||||||
if *doc.Author != documentAuthor {
|
|
||||||
t.Fatalf(`Expected: %v, Got: %v`, documentAuthor, *doc.Author)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func (dt *databaseTest) TestDevice() {
|
|
||||||
dt.Run("Device", func(t *testing.T) {
|
|
||||||
device, err := dt.dbm.Queries.UpsertDevice(dt.dbm.Ctx, UpsertDeviceParams{
|
|
||||||
ID: deviceID,
|
ID: deviceID,
|
||||||
UserID: userID,
|
UserID: userID,
|
||||||
DeviceName: deviceName,
|
DeviceName: deviceName,
|
||||||
})
|
})
|
||||||
|
suite.NoError(err)
|
||||||
|
|
||||||
if err != nil {
|
// Create Activity
|
||||||
t.Fatalf(`Expected: Device, Got: %v, Error: %v`, device, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if device.ID != deviceID {
|
|
||||||
t.Fatalf(`Expected: %v, Got: %v`, deviceID, device.ID)
|
|
||||||
}
|
|
||||||
|
|
||||||
if device.UserID != userID {
|
|
||||||
t.Fatalf(`Expected: %v, Got: %v`, userID, device.UserID)
|
|
||||||
}
|
|
||||||
|
|
||||||
if device.DeviceName != deviceName {
|
|
||||||
t.Fatalf(`Expected: %v, Got: %v`, deviceName, device.DeviceName)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func (dt *databaseTest) TestActivity() {
|
|
||||||
dt.Run("Progress", func(t *testing.T) {
|
|
||||||
// 10 Activities, 10 Days
|
|
||||||
end := time.Now()
|
end := time.Now()
|
||||||
start := end.AddDate(0, 0, -9)
|
start := end.AddDate(0, 0, -9)
|
||||||
var counter int64 = 0
|
var counter int64 = 0
|
||||||
@@ -121,7 +81,7 @@ func (dt *databaseTest) TestActivity() {
|
|||||||
counter += 1
|
counter += 1
|
||||||
|
|
||||||
// Add Item
|
// Add Item
|
||||||
activity, err := dt.dbm.Queries.AddActivity(dt.dbm.Ctx, AddActivityParams{
|
activity, err := suite.dbm.Queries.AddActivity(context.Background(), AddActivityParams{
|
||||||
DocumentID: documentID,
|
DocumentID: documentID,
|
||||||
DeviceID: deviceID,
|
DeviceID: deviceID,
|
||||||
UserID: userID,
|
UserID: userID,
|
||||||
@@ -131,37 +91,50 @@ func (dt *databaseTest) TestActivity() {
|
|||||||
EndPercentage: float64(counter+1) / 100.0,
|
EndPercentage: float64(counter+1) / 100.0,
|
||||||
})
|
})
|
||||||
|
|
||||||
// Validate No Error
|
suite.Nil(err, fmt.Sprintf("[%d] should have nil err for add activity", counter))
|
||||||
if err != nil {
|
suite.Equal(counter, activity.ID, fmt.Sprintf("[%d] should have correct id for add activity", counter))
|
||||||
t.Fatalf(`expected: rawactivity, got: %v, error: %v`, activity, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate Auto Increment Working
|
|
||||||
if activity.ID != counter {
|
|
||||||
t.Fatalf(`Expected: %v, Got: %v`, counter, activity.ID)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Initiate Cache
|
// Initiate Cache
|
||||||
dt.dbm.CacheTempTables()
|
err = suite.dbm.CacheTempTables(context.Background())
|
||||||
|
suite.NoError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DEVICES - TODO:
|
||||||
|
// - (q *Queries) GetDevice
|
||||||
|
// - (q *Queries) GetDevices
|
||||||
|
// - (q *Queries) UpsertDevice
|
||||||
|
func (suite *DatabaseTestSuite) TestDevice() {
|
||||||
|
testDevice := "dev123"
|
||||||
|
device, err := suite.dbm.Queries.UpsertDevice(context.Background(), UpsertDeviceParams{
|
||||||
|
ID: testDevice,
|
||||||
|
UserID: userID,
|
||||||
|
DeviceName: deviceName,
|
||||||
|
})
|
||||||
|
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Equal(testDevice, device.ID, "should have device id")
|
||||||
|
suite.Equal(userID, device.UserID, "should have user id")
|
||||||
|
suite.Equal(deviceName, device.DeviceName, "should have device name")
|
||||||
|
}
|
||||||
|
|
||||||
|
// ACTIVITY - TODO:
|
||||||
|
// - (q *Queries) AddActivity
|
||||||
|
// - (q *Queries) GetActivity
|
||||||
|
// - (q *Queries) GetLastActivity
|
||||||
|
func (suite *DatabaseTestSuite) TestActivity() {
|
||||||
// Validate Exists
|
// Validate Exists
|
||||||
existsRows, err := dt.dbm.Queries.GetActivity(dt.dbm.Ctx, GetActivityParams{
|
existsRows, err := suite.dbm.Queries.GetActivity(context.Background(), GetActivityParams{
|
||||||
UserID: userID,
|
UserID: userID,
|
||||||
Offset: 0,
|
Offset: 0,
|
||||||
Limit: 50,
|
Limit: 50,
|
||||||
})
|
})
|
||||||
|
|
||||||
if err != nil {
|
suite.Nil(err, "should have nil err for get activity")
|
||||||
t.Fatalf(`Expected: []GetActivityRow, Got: %v, Error: %v`, existsRows, err)
|
suite.Len(existsRows, 10, "should have correct number of rows get activity")
|
||||||
}
|
|
||||||
|
|
||||||
if len(existsRows) != 10 {
|
|
||||||
t.Fatalf(`Expected: %v, Got: %v`, 10, len(existsRows))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate Doesn't Exist
|
// Validate Doesn't Exist
|
||||||
doesntExistsRows, err := dt.dbm.Queries.GetActivity(dt.dbm.Ctx, GetActivityParams{
|
doesntExistsRows, err := suite.dbm.Queries.GetActivity(context.Background(), GetActivityParams{
|
||||||
UserID: userID,
|
UserID: userID,
|
||||||
DocumentID: "unknownDoc",
|
DocumentID: "unknownDoc",
|
||||||
DocFilter: true,
|
DocFilter: true,
|
||||||
@@ -169,43 +142,30 @@ func (dt *databaseTest) TestActivity() {
|
|||||||
Limit: 50,
|
Limit: 50,
|
||||||
})
|
})
|
||||||
|
|
||||||
if err != nil {
|
suite.Nil(err, "should have nil err for get activity")
|
||||||
t.Fatalf(`Expected: []GetActivityRow, Got: %v, Error: %v`, doesntExistsRows, err)
|
suite.Len(doesntExistsRows, 0, "should have no rows")
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(doesntExistsRows) != 0 {
|
// MISC - TODO:
|
||||||
t.Fatalf(`Expected: %v, Got: %v`, 0, len(doesntExistsRows))
|
// - (q *Queries) AddMetadata
|
||||||
}
|
// - (q *Queries) GetDailyReadStats
|
||||||
})
|
// - (q *Queries) GetDatabaseInfo
|
||||||
}
|
// - (q *Queries) UpdateSettings
|
||||||
|
func (suite *DatabaseTestSuite) TestGetDailyReadStats() {
|
||||||
|
readStats, err := suite.dbm.Queries.GetDailyReadStats(context.Background(), userID)
|
||||||
|
|
||||||
func (dt *databaseTest) TestDailyReadStats() {
|
suite.Nil(err, "should have nil err")
|
||||||
dt.Run("DailyReadStats", func(t *testing.T) {
|
suite.Len(readStats, 30, "should have length of 30")
|
||||||
readStats, err := dt.dbm.Queries.GetDailyReadStats(dt.dbm.Ctx, userID)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf(`Expected: []GetDailyReadStatsRow, Got: %v, Error: %v`, readStats, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate 30 Days Stats
|
|
||||||
if len(readStats) != 30 {
|
|
||||||
t.Fatalf(`Expected: %v, Got: %v`, 30, len(readStats))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate 1 Minute / Day - Last 10 Days
|
// Validate 1 Minute / Day - Last 10 Days
|
||||||
for i := 0; i < 10; i++ {
|
for i := 0; i < 10; i++ {
|
||||||
stat := readStats[i]
|
stat := readStats[i]
|
||||||
if stat.MinutesRead != 1 {
|
suite.Equal(int64(1), stat.MinutesRead, "should have one minute read")
|
||||||
t.Fatalf(`Day: %v, Expected: %v, Got: %v`, stat.Date, 1, stat.MinutesRead)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate 0 Minute / Day - Remaining 20 Days
|
// Validate 0 Minute / Day - Remaining 20 Days
|
||||||
for i := 10; i < 30; i++ {
|
for i := 10; i < 30; i++ {
|
||||||
stat := readStats[i]
|
stat := readStats[i]
|
||||||
if stat.MinutesRead != 0 {
|
suite.Equal(int64(0), stat.MinutesRead, "should have zero minutes read")
|
||||||
t.Fatalf(`Day: %v, Expected: %v, Got: %v`, stat.Date, 0, stat.MinutesRead)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|||||||
89
database/migrations/20240128012356_user_auth_hash.go
Normal file
89
database/migrations/20240128012356_user_auth_hash.go
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
package migrations
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/pressly/goose/v3"
|
||||||
|
"reichard.io/antholume/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
goose.AddMigrationContext(upUserAuthHash, downUserAuthHash)
|
||||||
|
}
|
||||||
|
|
||||||
|
func upUserAuthHash(ctx context.Context, tx *sql.Tx) error {
|
||||||
|
// Determine if we have a new DB or not
|
||||||
|
isNew := ctx.Value("isNew").(bool)
|
||||||
|
if isNew {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy table & create column
|
||||||
|
_, err := tx.Exec(`
|
||||||
|
-- Create Copy Table
|
||||||
|
CREATE TABLE temp_users AS SELECT * FROM users;
|
||||||
|
ALTER TABLE temp_users ADD COLUMN auth_hash TEXT;
|
||||||
|
|
||||||
|
-- Update Schema
|
||||||
|
DELETE FROM users;
|
||||||
|
ALTER TABLE users ADD COLUMN auth_hash TEXT NOT NULL;
|
||||||
|
`)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get current users
|
||||||
|
rows, err := tx.Query("SELECT id FROM temp_users")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query existing users
|
||||||
|
var users []string
|
||||||
|
for rows.Next() {
|
||||||
|
var user string
|
||||||
|
if err := rows.Scan(&user); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
users = append(users, user)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create auth hash per user
|
||||||
|
for _, user := range users {
|
||||||
|
rawAuthHash, err := utils.GenerateToken(64)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
authHash := fmt.Sprintf("%x", rawAuthHash)
|
||||||
|
_, err = tx.Exec("UPDATE temp_users SET auth_hash = ? WHERE id = ?", authHash, user)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy from temp to true table
|
||||||
|
_, err = tx.Exec(`
|
||||||
|
-- Copy Into New
|
||||||
|
INSERT INTO users SELECT * FROM temp_users;
|
||||||
|
|
||||||
|
-- Drop Temp Table
|
||||||
|
DROP TABLE temp_users;
|
||||||
|
`)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func downUserAuthHash(ctx context.Context, tx *sql.Tx) error {
|
||||||
|
// Drop column
|
||||||
|
_, err := tx.Exec("ALTER users DROP COLUMN auth_hash")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
58
database/migrations/20240311121111_user_timezone.go
Normal file
58
database/migrations/20240311121111_user_timezone.go
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
package migrations
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
|
||||||
|
"github.com/pressly/goose/v3"
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
goose.AddMigrationContext(upUserTimezone, downUserTimezone)
|
||||||
|
}
|
||||||
|
|
||||||
|
func upUserTimezone(ctx context.Context, tx *sql.Tx) error {
|
||||||
|
// Determine if we have a new DB or not
|
||||||
|
isNew := ctx.Value("isNew").(bool)
|
||||||
|
if isNew {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy table & create column
|
||||||
|
_, err := tx.Exec(`
|
||||||
|
-- Copy Table
|
||||||
|
CREATE TABLE temp_users AS SELECT * FROM users;
|
||||||
|
ALTER TABLE temp_users DROP COLUMN time_offset;
|
||||||
|
ALTER TABLE temp_users ADD COLUMN timezone TEXT;
|
||||||
|
UPDATE temp_users SET timezone = 'Europe/London';
|
||||||
|
|
||||||
|
-- Clean Table
|
||||||
|
DELETE FROM users;
|
||||||
|
ALTER TABLE users DROP COLUMN time_offset;
|
||||||
|
ALTER TABLE users ADD COLUMN timezone TEXT NOT NULL DEFAULT 'Europe/London';
|
||||||
|
|
||||||
|
-- Copy Temp Table -> Clean Table
|
||||||
|
INSERT INTO users SELECT * FROM temp_users;
|
||||||
|
|
||||||
|
-- Drop Temp Table
|
||||||
|
DROP TABLE temp_users;
|
||||||
|
`)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func downUserTimezone(ctx context.Context, tx *sql.Tx) error {
|
||||||
|
// Update column name & value
|
||||||
|
_, err := tx.Exec(`
|
||||||
|
ALTER TABLE users RENAME COLUMN timezone TO time_offset;
|
||||||
|
UPDATE users SET time_offset = '0 hours';
|
||||||
|
`)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
38
database/migrations/20240510123707_import_basepath.go
Normal file
38
database/migrations/20240510123707_import_basepath.go
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
package migrations
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
|
||||||
|
"github.com/pressly/goose/v3"
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
goose.AddMigrationContext(upImportBasepath, downImportBasepath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func upImportBasepath(ctx context.Context, tx *sql.Tx) error {
|
||||||
|
// Determine if we have a new DB or not
|
||||||
|
isNew := ctx.Value("isNew").(bool)
|
||||||
|
if isNew {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add basepath column
|
||||||
|
_, err := tx.Exec(`ALTER TABLE documents ADD COLUMN basepath TEXT;`)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// This code is executed when the migration is applied.
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func downImportBasepath(ctx context.Context, tx *sql.Tx) error {
|
||||||
|
// Drop basepath column
|
||||||
|
_, err := tx.Exec("ALTER documents DROP COLUMN basepath;")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
9
database/migrations/README.md
Normal file
9
database/migrations/README.md
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
# DB Migrations
|
||||||
|
|
||||||
|
```bash
|
||||||
|
goose create migration_name
|
||||||
|
```
|
||||||
|
|
||||||
|
## Note
|
||||||
|
|
||||||
|
Since we update both the `schema.sql`, as well as the migration files, when we create a new DB it will inherently be up-to-date. We don't want to run the migrations if it's already up-to-date. Instead each migration checks if we have a new DB (via a value passed into the context), and if we do we simply return.
|
||||||
@@ -1,13 +1,9 @@
|
|||||||
// Code generated by sqlc. DO NOT EDIT.
|
// Code generated by sqlc. DO NOT EDIT.
|
||||||
// versions:
|
// versions:
|
||||||
// sqlc v1.21.0
|
// sqlc v1.29.0
|
||||||
|
|
||||||
package database
|
package database
|
||||||
|
|
||||||
import (
|
|
||||||
"database/sql"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Activity struct {
|
type Activity struct {
|
||||||
ID int64 `json:"id"`
|
ID int64 `json:"id"`
|
||||||
UserID string `json:"user_id"`
|
UserID string `json:"user_id"`
|
||||||
@@ -32,6 +28,7 @@ type Device struct {
|
|||||||
type Document struct {
|
type Document struct {
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
Md5 *string `json:"md5"`
|
Md5 *string `json:"md5"`
|
||||||
|
Basepath *string `json:"basepath"`
|
||||||
Filepath *string `json:"filepath"`
|
Filepath *string `json:"filepath"`
|
||||||
Coverfile *string `json:"coverfile"`
|
Coverfile *string `json:"coverfile"`
|
||||||
Title *string `json:"title"`
|
Title *string `json:"title"`
|
||||||
@@ -63,15 +60,25 @@ type DocumentProgress struct {
|
|||||||
type DocumentUserStatistic struct {
|
type DocumentUserStatistic struct {
|
||||||
DocumentID string `json:"document_id"`
|
DocumentID string `json:"document_id"`
|
||||||
UserID string `json:"user_id"`
|
UserID string `json:"user_id"`
|
||||||
LastRead string `json:"last_read"`
|
|
||||||
TotalTimeSeconds int64 `json:"total_time_seconds"`
|
|
||||||
ReadPercentage float64 `json:"read_percentage"`
|
|
||||||
Percentage float64 `json:"percentage"`
|
Percentage float64 `json:"percentage"`
|
||||||
WordsRead int64 `json:"words_read"`
|
LastRead string `json:"last_read"`
|
||||||
Wpm float64 `json:"wpm"`
|
LastSeen string `json:"last_seen"`
|
||||||
|
ReadPercentage float64 `json:"read_percentage"`
|
||||||
|
TotalTimeSeconds int64 `json:"total_time_seconds"`
|
||||||
|
TotalWordsRead int64 `json:"total_words_read"`
|
||||||
|
TotalWpm float64 `json:"total_wpm"`
|
||||||
|
YearlyTimeSeconds int64 `json:"yearly_time_seconds"`
|
||||||
|
YearlyWordsRead int64 `json:"yearly_words_read"`
|
||||||
|
YearlyWpm float64 `json:"yearly_wpm"`
|
||||||
|
MonthlyTimeSeconds int64 `json:"monthly_time_seconds"`
|
||||||
|
MonthlyWordsRead int64 `json:"monthly_words_read"`
|
||||||
|
MonthlyWpm float64 `json:"monthly_wpm"`
|
||||||
|
WeeklyTimeSeconds int64 `json:"weekly_time_seconds"`
|
||||||
|
WeeklyWordsRead int64 `json:"weekly_words_read"`
|
||||||
|
WeeklyWpm float64 `json:"weekly_wpm"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type Metadatum struct {
|
type Metadata struct {
|
||||||
ID int64 `json:"id"`
|
ID int64 `json:"id"`
|
||||||
DocumentID string `json:"document_id"`
|
DocumentID string `json:"document_id"`
|
||||||
Title *string `json:"title"`
|
Title *string `json:"title"`
|
||||||
@@ -84,11 +91,19 @@ type Metadatum struct {
|
|||||||
CreatedAt string `json:"created_at"`
|
CreatedAt string `json:"created_at"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type Setting struct {
|
||||||
|
ID int64 `json:"id"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
Value string `json:"value"`
|
||||||
|
CreatedAt string `json:"created_at"`
|
||||||
|
}
|
||||||
|
|
||||||
type User struct {
|
type User struct {
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
Pass *string `json:"-"`
|
Pass *string `json:"-"`
|
||||||
|
AuthHash *string `json:"auth_hash"`
|
||||||
Admin bool `json:"-"`
|
Admin bool `json:"-"`
|
||||||
TimeOffset *string `json:"time_offset"`
|
Timezone *string `json:"timezone"`
|
||||||
CreatedAt string `json:"created_at"`
|
CreatedAt string `json:"created_at"`
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -101,26 +116,8 @@ type UserStreak struct {
|
|||||||
CurrentStreak int64 `json:"current_streak"`
|
CurrentStreak int64 `json:"current_streak"`
|
||||||
CurrentStreakStartDate string `json:"current_streak_start_date"`
|
CurrentStreakStartDate string `json:"current_streak_start_date"`
|
||||||
CurrentStreakEndDate string `json:"current_streak_end_date"`
|
CurrentStreakEndDate string `json:"current_streak_end_date"`
|
||||||
}
|
LastTimezone string `json:"last_timezone"`
|
||||||
|
LastSeen string `json:"last_seen"`
|
||||||
type ViewDocumentUserStatistic struct {
|
LastRecord string `json:"last_record"`
|
||||||
DocumentID string `json:"document_id"`
|
LastCalculated string `json:"last_calculated"`
|
||||||
UserID string `json:"user_id"`
|
|
||||||
LastRead interface{} `json:"last_read"`
|
|
||||||
TotalTimeSeconds sql.NullFloat64 `json:"total_time_seconds"`
|
|
||||||
ReadPercentage sql.NullFloat64 `json:"read_percentage"`
|
|
||||||
Percentage float64 `json:"percentage"`
|
|
||||||
WordsRead interface{} `json:"words_read"`
|
|
||||||
Wpm int64 `json:"wpm"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ViewUserStreak struct {
|
|
||||||
UserID string `json:"user_id"`
|
|
||||||
Window string `json:"window"`
|
|
||||||
MaxStreak interface{} `json:"max_streak"`
|
|
||||||
MaxStreakStartDate interface{} `json:"max_streak_start_date"`
|
|
||||||
MaxStreakEndDate interface{} `json:"max_streak_end_date"`
|
|
||||||
CurrentStreak interface{} `json:"current_streak"`
|
|
||||||
CurrentStreakStartDate interface{} `json:"current_streak_start_date"`
|
|
||||||
CurrentStreakEndDate interface{} `json:"current_streak_end_date"`
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -26,10 +26,13 @@ VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
|||||||
RETURNING *;
|
RETURNING *;
|
||||||
|
|
||||||
-- name: CreateUser :execrows
|
-- name: CreateUser :execrows
|
||||||
INSERT INTO users (id, pass)
|
INSERT INTO users (id, pass, auth_hash, admin)
|
||||||
VALUES (?, ?)
|
VALUES (?, ?, ?, ?)
|
||||||
ON CONFLICT DO NOTHING;
|
ON CONFLICT DO NOTHING;
|
||||||
|
|
||||||
|
-- name: DeleteUser :execrows
|
||||||
|
DELETE FROM users WHERE id = $id;
|
||||||
|
|
||||||
-- name: DeleteDocument :execrows
|
-- name: DeleteDocument :execrows
|
||||||
UPDATE documents
|
UPDATE documents
|
||||||
SET
|
SET
|
||||||
@@ -64,7 +67,7 @@ WITH filtered_activity AS (
|
|||||||
SELECT
|
SELECT
|
||||||
document_id,
|
document_id,
|
||||||
device_id,
|
device_id,
|
||||||
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', activity.start_time, users.time_offset) AS TEXT) AS start_time,
|
CAST(LOCAL_TIME(activity.start_time, users.timezone) AS TEXT) AS start_time,
|
||||||
title,
|
title,
|
||||||
author,
|
author,
|
||||||
duration,
|
duration,
|
||||||
@@ -77,7 +80,7 @@ LEFT JOIN users ON users.id = activity.user_id;
|
|||||||
|
|
||||||
-- name: GetDailyReadStats :many
|
-- name: GetDailyReadStats :many
|
||||||
WITH RECURSIVE last_30_days AS (
|
WITH RECURSIVE last_30_days AS (
|
||||||
SELECT DATE('now', time_offset) AS date
|
SELECT LOCAL_DATE(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone) AS date
|
||||||
FROM users WHERE users.id = $user_id
|
FROM users WHERE users.id = $user_id
|
||||||
UNION ALL
|
UNION ALL
|
||||||
SELECT DATE(date, '-1 days')
|
SELECT DATE(date, '-1 days')
|
||||||
@@ -96,11 +99,10 @@ filtered_activity AS (
|
|||||||
activity_days AS (
|
activity_days AS (
|
||||||
SELECT
|
SELECT
|
||||||
SUM(duration) AS seconds_read,
|
SUM(duration) AS seconds_read,
|
||||||
DATE(start_time, time_offset) AS day
|
LOCAL_DATE(start_time, timezone) AS day
|
||||||
FROM filtered_activity AS activity
|
FROM filtered_activity AS activity
|
||||||
LEFT JOIN users ON users.id = activity.user_id
|
LEFT JOIN users ON users.id = activity.user_id
|
||||||
GROUP BY day
|
GROUP BY day
|
||||||
LIMIT 30
|
|
||||||
)
|
)
|
||||||
SELECT
|
SELECT
|
||||||
CAST(date AS TEXT),
|
CAST(date AS TEXT),
|
||||||
@@ -136,8 +138,8 @@ WHERE id = $device_id LIMIT 1;
|
|||||||
SELECT
|
SELECT
|
||||||
devices.id,
|
devices.id,
|
||||||
devices.device_name,
|
devices.device_name,
|
||||||
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', devices.created_at, users.time_offset) AS TEXT) AS created_at,
|
CAST(LOCAL_TIME(devices.created_at, users.timezone) AS TEXT) AS created_at,
|
||||||
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', devices.last_synced, users.time_offset) AS TEXT) AS last_synced
|
CAST(LOCAL_TIME(devices.last_synced, users.timezone) AS TEXT) AS last_synced
|
||||||
FROM devices
|
FROM devices
|
||||||
JOIN users ON users.id = devices.user_id
|
JOIN users ON users.id = devices.user_id
|
||||||
WHERE users.id = $user_id
|
WHERE users.id = $user_id
|
||||||
@@ -147,40 +149,18 @@ ORDER BY devices.last_synced DESC;
|
|||||||
SELECT * FROM documents
|
SELECT * FROM documents
|
||||||
WHERE id = $document_id LIMIT 1;
|
WHERE id = $document_id LIMIT 1;
|
||||||
|
|
||||||
-- name: GetDocumentWithStats :one
|
-- name: GetDocumentProgress :one
|
||||||
SELECT
|
SELECT
|
||||||
docs.id,
|
document_progress.*,
|
||||||
docs.title,
|
devices.device_name
|
||||||
docs.author,
|
FROM document_progress
|
||||||
docs.description,
|
JOIN devices ON document_progress.device_id = devices.id
|
||||||
docs.isbn10,
|
WHERE
|
||||||
docs.isbn13,
|
document_progress.user_id = $user_id
|
||||||
docs.filepath,
|
AND document_progress.document_id = $document_id
|
||||||
docs.words,
|
ORDER BY
|
||||||
|
document_progress.created_at
|
||||||
CAST(COALESCE(dus.wpm, 0.0) AS INTEGER) AS wpm,
|
DESC
|
||||||
COALESCE(dus.read_percentage, 0) AS read_percentage,
|
|
||||||
COALESCE(dus.total_time_seconds, 0) AS total_time_seconds,
|
|
||||||
STRFTIME('%Y-%m-%d %H:%M:%S', COALESCE(dus.last_read, "1970-01-01"), users.time_offset)
|
|
||||||
AS last_read,
|
|
||||||
ROUND(CAST(CASE
|
|
||||||
WHEN dus.percentage IS NULL THEN 0.0
|
|
||||||
WHEN (dus.percentage * 100.0) > 97.0 THEN 100.0
|
|
||||||
ELSE dus.percentage * 100.0
|
|
||||||
END AS REAL), 2) AS percentage,
|
|
||||||
CAST(CASE
|
|
||||||
WHEN dus.total_time_seconds IS NULL THEN 0.0
|
|
||||||
ELSE
|
|
||||||
CAST(dus.total_time_seconds AS REAL)
|
|
||||||
/ (dus.read_percentage * 100.0)
|
|
||||||
END AS INTEGER) AS seconds_per_percent
|
|
||||||
FROM documents AS docs
|
|
||||||
LEFT JOIN users ON users.id = $user_id
|
|
||||||
LEFT JOIN
|
|
||||||
document_user_statistics AS dus
|
|
||||||
ON dus.document_id = docs.id AND dus.user_id = $user_id
|
|
||||||
WHERE users.id = $user_id
|
|
||||||
AND docs.id = $document_id
|
|
||||||
LIMIT 1;
|
LIMIT 1;
|
||||||
|
|
||||||
-- name: GetDocuments :many
|
-- name: GetDocuments :many
|
||||||
@@ -210,36 +190,35 @@ SELECT
|
|||||||
docs.filepath,
|
docs.filepath,
|
||||||
docs.words,
|
docs.words,
|
||||||
|
|
||||||
CAST(COALESCE(dus.wpm, 0.0) AS INTEGER) AS wpm,
|
CAST(COALESCE(dus.total_wpm, 0.0) AS INTEGER) AS wpm,
|
||||||
COALESCE(dus.read_percentage, 0) AS read_percentage,
|
COALESCE(dus.read_percentage, 0) AS read_percentage,
|
||||||
COALESCE(dus.total_time_seconds, 0) AS total_time_seconds,
|
COALESCE(dus.total_time_seconds, 0) AS total_time_seconds,
|
||||||
STRFTIME('%Y-%m-%d %H:%M:%S', COALESCE(dus.last_read, "1970-01-01"), users.time_offset)
|
STRFTIME('%Y-%m-%d %H:%M:%S', LOCAL_TIME(COALESCE(dus.last_read, STRFTIME('%Y-%m-%dT%H:%M:%SZ', 0, 'unixepoch')), users.timezone))
|
||||||
AS last_read,
|
AS last_read,
|
||||||
ROUND(CAST(CASE
|
ROUND(CAST(CASE
|
||||||
WHEN dus.percentage IS NULL THEN 0.0
|
WHEN dus.percentage IS NULL THEN 0.0
|
||||||
WHEN (dus.percentage * 100.0) > 97.0 THEN 100.0
|
WHEN (dus.percentage * 100.0) > 97.0 THEN 100.0
|
||||||
ELSE dus.percentage * 100.0
|
ELSE dus.percentage * 100.0
|
||||||
END AS REAL), 2) AS percentage,
|
END AS REAL), 2) AS percentage,
|
||||||
|
CAST(CASE
|
||||||
CASE
|
|
||||||
WHEN dus.total_time_seconds IS NULL THEN 0.0
|
WHEN dus.total_time_seconds IS NULL THEN 0.0
|
||||||
ELSE
|
ELSE
|
||||||
ROUND(
|
|
||||||
CAST(dus.total_time_seconds AS REAL)
|
CAST(dus.total_time_seconds AS REAL)
|
||||||
/ (dus.read_percentage * 100.0)
|
/ (dus.read_percentage * 100.0)
|
||||||
)
|
END AS INTEGER) AS seconds_per_percent
|
||||||
END AS seconds_per_percent
|
|
||||||
FROM documents AS docs
|
FROM documents AS docs
|
||||||
LEFT JOIN users ON users.id = $user_id
|
LEFT JOIN users ON users.id = $user_id
|
||||||
LEFT JOIN
|
LEFT JOIN
|
||||||
document_user_statistics AS dus
|
document_user_statistics AS dus
|
||||||
ON dus.document_id = docs.id AND dus.user_id = $user_id
|
ON dus.document_id = docs.id AND dus.user_id = $user_id
|
||||||
WHERE
|
WHERE
|
||||||
docs.deleted = false AND (
|
(docs.id = sqlc.narg('id') OR $id IS NULL)
|
||||||
$query IS NULL OR (
|
AND (docs.deleted = sqlc.narg(deleted) OR $deleted IS NULL)
|
||||||
docs.title LIKE $query OR
|
AND (
|
||||||
|
(
|
||||||
|
docs.title LIKE sqlc.narg('query') OR
|
||||||
docs.author LIKE $query
|
docs.author LIKE $query
|
||||||
)
|
) OR $query IS NULL
|
||||||
)
|
)
|
||||||
ORDER BY dus.last_read DESC, docs.created_at DESC
|
ORDER BY dus.last_read DESC, docs.created_at DESC
|
||||||
LIMIT $limit
|
LIMIT $limit
|
||||||
@@ -259,19 +238,30 @@ WHERE
|
|||||||
AND documents.deleted = false
|
AND documents.deleted = false
|
||||||
AND documents.id NOT IN (sqlc.slice('document_ids'));
|
AND documents.id NOT IN (sqlc.slice('document_ids'));
|
||||||
|
|
||||||
-- name: GetProgress :one
|
-- name: GetProgress :many
|
||||||
SELECT
|
SELECT
|
||||||
document_progress.*,
|
documents.title,
|
||||||
devices.device_name
|
documents.author,
|
||||||
FROM document_progress
|
devices.device_name,
|
||||||
JOIN devices ON document_progress.device_id = devices.id
|
ROUND(CAST(progress.percentage AS REAL) * 100, 2) AS percentage,
|
||||||
|
progress.document_id,
|
||||||
|
progress.user_id,
|
||||||
|
CAST(LOCAL_TIME(progress.created_at, users.timezone) AS TEXT) AS created_at
|
||||||
|
FROM document_progress AS progress
|
||||||
|
LEFT JOIN users ON progress.user_id = users.id
|
||||||
|
LEFT JOIN devices ON progress.device_id = devices.id
|
||||||
|
LEFT JOIN documents ON progress.document_id = documents.id
|
||||||
WHERE
|
WHERE
|
||||||
document_progress.user_id = $user_id
|
progress.user_id = $user_id
|
||||||
AND document_progress.document_id = $document_id
|
AND (
|
||||||
ORDER BY
|
(
|
||||||
document_progress.created_at
|
CAST($doc_filter AS BOOLEAN) = TRUE
|
||||||
DESC
|
AND document_id = $document_id
|
||||||
LIMIT 1;
|
) OR $doc_filter = FALSE
|
||||||
|
)
|
||||||
|
ORDER BY created_at DESC
|
||||||
|
LIMIT $limit
|
||||||
|
OFFSET $offset;
|
||||||
|
|
||||||
-- name: GetUser :one
|
-- name: GetUser :one
|
||||||
SELECT * FROM users
|
SELECT * FROM users
|
||||||
@@ -281,17 +271,37 @@ WHERE id = $user_id LIMIT 1;
|
|||||||
SELECT * FROM user_streaks
|
SELECT * FROM user_streaks
|
||||||
WHERE user_id = $user_id;
|
WHERE user_id = $user_id;
|
||||||
|
|
||||||
-- name: GetWPMLeaderboard :many
|
-- name: GetUsers :many
|
||||||
|
SELECT * FROM users;
|
||||||
|
|
||||||
|
-- name: GetUserStatistics :many
|
||||||
SELECT
|
SELECT
|
||||||
user_id,
|
user_id,
|
||||||
CAST(SUM(words_read) AS INTEGER) AS total_words_read,
|
|
||||||
|
CAST(SUM(total_words_read) AS INTEGER) AS total_words_read,
|
||||||
CAST(SUM(total_time_seconds) AS INTEGER) AS total_seconds,
|
CAST(SUM(total_time_seconds) AS INTEGER) AS total_seconds,
|
||||||
ROUND(CAST(SUM(words_read) AS REAL) / (SUM(total_time_seconds) / 60.0), 2)
|
ROUND(COALESCE(CAST(SUM(total_words_read) AS REAL) / (SUM(total_time_seconds) / 60.0), 0.0), 2)
|
||||||
AS wpm
|
AS total_wpm,
|
||||||
|
|
||||||
|
CAST(SUM(yearly_words_read) AS INTEGER) AS yearly_words_read,
|
||||||
|
CAST(SUM(yearly_time_seconds) AS INTEGER) AS yearly_seconds,
|
||||||
|
ROUND(COALESCE(CAST(SUM(yearly_words_read) AS REAL) / (SUM(yearly_time_seconds) / 60.0), 0.0), 2)
|
||||||
|
AS yearly_wpm,
|
||||||
|
|
||||||
|
CAST(SUM(monthly_words_read) AS INTEGER) AS monthly_words_read,
|
||||||
|
CAST(SUM(monthly_time_seconds) AS INTEGER) AS monthly_seconds,
|
||||||
|
ROUND(COALESCE(CAST(SUM(monthly_words_read) AS REAL) / (SUM(monthly_time_seconds) / 60.0), 0.0), 2)
|
||||||
|
AS monthly_wpm,
|
||||||
|
|
||||||
|
CAST(SUM(weekly_words_read) AS INTEGER) AS weekly_words_read,
|
||||||
|
CAST(SUM(weekly_time_seconds) AS INTEGER) AS weekly_seconds,
|
||||||
|
ROUND(COALESCE(CAST(SUM(weekly_words_read) AS REAL) / (SUM(weekly_time_seconds) / 60.0), 0.0), 2)
|
||||||
|
AS weekly_wpm
|
||||||
|
|
||||||
FROM document_user_statistics
|
FROM document_user_statistics
|
||||||
WHERE words_read > 0
|
WHERE total_words_read > 0
|
||||||
GROUP BY user_id
|
GROUP BY user_id
|
||||||
ORDER BY wpm DESC;
|
ORDER BY total_wpm DESC;
|
||||||
|
|
||||||
-- name: GetWantedDocuments :many
|
-- name: GetWantedDocuments :many
|
||||||
SELECT
|
SELECT
|
||||||
@@ -324,10 +334,21 @@ RETURNING *;
|
|||||||
UPDATE users
|
UPDATE users
|
||||||
SET
|
SET
|
||||||
pass = COALESCE($password, pass),
|
pass = COALESCE($password, pass),
|
||||||
time_offset = COALESCE($time_offset, time_offset)
|
auth_hash = COALESCE($auth_hash, auth_hash),
|
||||||
|
timezone = COALESCE($timezone, timezone),
|
||||||
|
admin = COALESCE($admin, admin)
|
||||||
WHERE id = $user_id
|
WHERE id = $user_id
|
||||||
RETURNING *;
|
RETURNING *;
|
||||||
|
|
||||||
|
-- name: UpdateSettings :one
|
||||||
|
INSERT INTO settings (name, value)
|
||||||
|
VALUES (?, ?)
|
||||||
|
ON CONFLICT DO UPDATE
|
||||||
|
SET
|
||||||
|
name = COALESCE(excluded.name, name),
|
||||||
|
value = COALESCE(excluded.value, value)
|
||||||
|
RETURNING *;
|
||||||
|
|
||||||
-- name: UpsertDevice :one
|
-- name: UpsertDevice :one
|
||||||
INSERT INTO devices (id, user_id, last_synced, device_name)
|
INSERT INTO devices (id, user_id, last_synced, device_name)
|
||||||
VALUES (?, ?, ?, ?)
|
VALUES (?, ?, ?, ?)
|
||||||
@@ -341,6 +362,7 @@ RETURNING *;
|
|||||||
INSERT INTO documents (
|
INSERT INTO documents (
|
||||||
id,
|
id,
|
||||||
md5,
|
md5,
|
||||||
|
basepath,
|
||||||
filepath,
|
filepath,
|
||||||
coverfile,
|
coverfile,
|
||||||
title,
|
title,
|
||||||
@@ -355,10 +377,11 @@ INSERT INTO documents (
|
|||||||
isbn10,
|
isbn10,
|
||||||
isbn13
|
isbn13
|
||||||
)
|
)
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
ON CONFLICT DO UPDATE
|
ON CONFLICT DO UPDATE
|
||||||
SET
|
SET
|
||||||
md5 = COALESCE(excluded.md5, md5),
|
md5 = COALESCE(excluded.md5, md5),
|
||||||
|
basepath = COALESCE(excluded.basepath, basepath),
|
||||||
filepath = COALESCE(excluded.filepath, filepath),
|
filepath = COALESCE(excluded.filepath, filepath),
|
||||||
coverfile = COALESCE(excluded.coverfile, coverfile),
|
coverfile = COALESCE(excluded.coverfile, coverfile),
|
||||||
title = COALESCE(excluded.title, title),
|
title = COALESCE(excluded.title, title),
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
// Code generated by sqlc. DO NOT EDIT.
|
// Code generated by sqlc. DO NOT EDIT.
|
||||||
// versions:
|
// versions:
|
||||||
// sqlc v1.21.0
|
// sqlc v1.29.0
|
||||||
// source: query.sql
|
// source: query.sql
|
||||||
|
|
||||||
package database
|
package database
|
||||||
@@ -85,7 +85,7 @@ type AddMetadataParams struct {
|
|||||||
Isbn13 *string `json:"isbn13"`
|
Isbn13 *string `json:"isbn13"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *Queries) AddMetadata(ctx context.Context, arg AddMetadataParams) (Metadatum, error) {
|
func (q *Queries) AddMetadata(ctx context.Context, arg AddMetadataParams) (Metadata, error) {
|
||||||
row := q.db.QueryRowContext(ctx, addMetadata,
|
row := q.db.QueryRowContext(ctx, addMetadata,
|
||||||
arg.DocumentID,
|
arg.DocumentID,
|
||||||
arg.Title,
|
arg.Title,
|
||||||
@@ -96,7 +96,7 @@ func (q *Queries) AddMetadata(ctx context.Context, arg AddMetadataParams) (Metad
|
|||||||
arg.Isbn10,
|
arg.Isbn10,
|
||||||
arg.Isbn13,
|
arg.Isbn13,
|
||||||
)
|
)
|
||||||
var i Metadatum
|
var i Metadata
|
||||||
err := row.Scan(
|
err := row.Scan(
|
||||||
&i.ID,
|
&i.ID,
|
||||||
&i.DocumentID,
|
&i.DocumentID,
|
||||||
@@ -113,18 +113,25 @@ func (q *Queries) AddMetadata(ctx context.Context, arg AddMetadataParams) (Metad
|
|||||||
}
|
}
|
||||||
|
|
||||||
const createUser = `-- name: CreateUser :execrows
|
const createUser = `-- name: CreateUser :execrows
|
||||||
INSERT INTO users (id, pass)
|
INSERT INTO users (id, pass, auth_hash, admin)
|
||||||
VALUES (?, ?)
|
VALUES (?, ?, ?, ?)
|
||||||
ON CONFLICT DO NOTHING
|
ON CONFLICT DO NOTHING
|
||||||
`
|
`
|
||||||
|
|
||||||
type CreateUserParams struct {
|
type CreateUserParams struct {
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
Pass *string `json:"-"`
|
Pass *string `json:"-"`
|
||||||
|
AuthHash *string `json:"auth_hash"`
|
||||||
|
Admin bool `json:"-"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *Queries) CreateUser(ctx context.Context, arg CreateUserParams) (int64, error) {
|
func (q *Queries) CreateUser(ctx context.Context, arg CreateUserParams) (int64, error) {
|
||||||
result, err := q.db.ExecContext(ctx, createUser, arg.ID, arg.Pass)
|
result, err := q.db.ExecContext(ctx, createUser,
|
||||||
|
arg.ID,
|
||||||
|
arg.Pass,
|
||||||
|
arg.AuthHash,
|
||||||
|
arg.Admin,
|
||||||
|
)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, err
|
return 0, err
|
||||||
}
|
}
|
||||||
@@ -146,6 +153,18 @@ func (q *Queries) DeleteDocument(ctx context.Context, id string) (int64, error)
|
|||||||
return result.RowsAffected()
|
return result.RowsAffected()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const deleteUser = `-- name: DeleteUser :execrows
|
||||||
|
DELETE FROM users WHERE id = ?1
|
||||||
|
`
|
||||||
|
|
||||||
|
func (q *Queries) DeleteUser(ctx context.Context, id string) (int64, error) {
|
||||||
|
result, err := q.db.ExecContext(ctx, deleteUser, id)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
return result.RowsAffected()
|
||||||
|
}
|
||||||
|
|
||||||
const getActivity = `-- name: GetActivity :many
|
const getActivity = `-- name: GetActivity :many
|
||||||
WITH filtered_activity AS (
|
WITH filtered_activity AS (
|
||||||
SELECT
|
SELECT
|
||||||
@@ -174,7 +193,7 @@ WITH filtered_activity AS (
|
|||||||
SELECT
|
SELECT
|
||||||
document_id,
|
document_id,
|
||||||
device_id,
|
device_id,
|
||||||
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', activity.start_time, users.time_offset) AS TEXT) AS start_time,
|
CAST(LOCAL_TIME(activity.start_time, users.timezone) AS TEXT) AS start_time,
|
||||||
title,
|
title,
|
||||||
author,
|
author,
|
||||||
duration,
|
duration,
|
||||||
@@ -247,7 +266,7 @@ func (q *Queries) GetActivity(ctx context.Context, arg GetActivityParams) ([]Get
|
|||||||
|
|
||||||
const getDailyReadStats = `-- name: GetDailyReadStats :many
|
const getDailyReadStats = `-- name: GetDailyReadStats :many
|
||||||
WITH RECURSIVE last_30_days AS (
|
WITH RECURSIVE last_30_days AS (
|
||||||
SELECT DATE('now', time_offset) AS date
|
SELECT LOCAL_DATE(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone) AS date
|
||||||
FROM users WHERE users.id = ?1
|
FROM users WHERE users.id = ?1
|
||||||
UNION ALL
|
UNION ALL
|
||||||
SELECT DATE(date, '-1 days')
|
SELECT DATE(date, '-1 days')
|
||||||
@@ -266,11 +285,10 @@ filtered_activity AS (
|
|||||||
activity_days AS (
|
activity_days AS (
|
||||||
SELECT
|
SELECT
|
||||||
SUM(duration) AS seconds_read,
|
SUM(duration) AS seconds_read,
|
||||||
DATE(start_time, time_offset) AS day
|
LOCAL_DATE(start_time, timezone) AS day
|
||||||
FROM filtered_activity AS activity
|
FROM filtered_activity AS activity
|
||||||
LEFT JOIN users ON users.id = activity.user_id
|
LEFT JOIN users ON users.id = activity.user_id
|
||||||
GROUP BY day
|
GROUP BY day
|
||||||
LIMIT 30
|
|
||||||
)
|
)
|
||||||
SELECT
|
SELECT
|
||||||
CAST(date AS TEXT),
|
CAST(date AS TEXT),
|
||||||
@@ -404,8 +422,8 @@ const getDevices = `-- name: GetDevices :many
|
|||||||
SELECT
|
SELECT
|
||||||
devices.id,
|
devices.id,
|
||||||
devices.device_name,
|
devices.device_name,
|
||||||
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', devices.created_at, users.time_offset) AS TEXT) AS created_at,
|
CAST(LOCAL_TIME(devices.created_at, users.timezone) AS TEXT) AS created_at,
|
||||||
CAST(STRFTIME('%Y-%m-%d %H:%M:%S', devices.last_synced, users.time_offset) AS TEXT) AS last_synced
|
CAST(LOCAL_TIME(devices.last_synced, users.timezone) AS TEXT) AS last_synced
|
||||||
FROM devices
|
FROM devices
|
||||||
JOIN users ON users.id = devices.user_id
|
JOIN users ON users.id = devices.user_id
|
||||||
WHERE users.id = ?1
|
WHERE users.id = ?1
|
||||||
@@ -448,7 +466,7 @@ func (q *Queries) GetDevices(ctx context.Context, userID string) ([]GetDevicesRo
|
|||||||
}
|
}
|
||||||
|
|
||||||
const getDocument = `-- name: GetDocument :one
|
const getDocument = `-- name: GetDocument :one
|
||||||
SELECT id, md5, filepath, coverfile, title, author, series, series_index, lang, description, words, gbid, olid, isbn10, isbn13, synced, deleted, updated_at, created_at FROM documents
|
SELECT id, md5, basepath, filepath, coverfile, title, author, series, series_index, lang, description, words, gbid, olid, isbn10, isbn13, synced, deleted, updated_at, created_at FROM documents
|
||||||
WHERE id = ?1 LIMIT 1
|
WHERE id = ?1 LIMIT 1
|
||||||
`
|
`
|
||||||
|
|
||||||
@@ -458,6 +476,7 @@ func (q *Queries) GetDocument(ctx context.Context, documentID string) (Document,
|
|||||||
err := row.Scan(
|
err := row.Scan(
|
||||||
&i.ID,
|
&i.ID,
|
||||||
&i.Md5,
|
&i.Md5,
|
||||||
|
&i.Basepath,
|
||||||
&i.Filepath,
|
&i.Filepath,
|
||||||
&i.Coverfile,
|
&i.Coverfile,
|
||||||
&i.Title,
|
&i.Title,
|
||||||
@@ -479,89 +498,53 @@ func (q *Queries) GetDocument(ctx context.Context, documentID string) (Document,
|
|||||||
return i, err
|
return i, err
|
||||||
}
|
}
|
||||||
|
|
||||||
const getDocumentWithStats = `-- name: GetDocumentWithStats :one
|
const getDocumentProgress = `-- name: GetDocumentProgress :one
|
||||||
SELECT
|
SELECT
|
||||||
docs.id,
|
document_progress.user_id, document_progress.document_id, document_progress.device_id, document_progress.percentage, document_progress.progress, document_progress.created_at,
|
||||||
docs.title,
|
devices.device_name
|
||||||
docs.author,
|
FROM document_progress
|
||||||
docs.description,
|
JOIN devices ON document_progress.device_id = devices.id
|
||||||
docs.isbn10,
|
WHERE
|
||||||
docs.isbn13,
|
document_progress.user_id = ?1
|
||||||
docs.filepath,
|
AND document_progress.document_id = ?2
|
||||||
docs.words,
|
ORDER BY
|
||||||
|
document_progress.created_at
|
||||||
CAST(COALESCE(dus.wpm, 0.0) AS INTEGER) AS wpm,
|
DESC
|
||||||
COALESCE(dus.read_percentage, 0) AS read_percentage,
|
|
||||||
COALESCE(dus.total_time_seconds, 0) AS total_time_seconds,
|
|
||||||
STRFTIME('%Y-%m-%d %H:%M:%S', COALESCE(dus.last_read, "1970-01-01"), users.time_offset)
|
|
||||||
AS last_read,
|
|
||||||
ROUND(CAST(CASE
|
|
||||||
WHEN dus.percentage IS NULL THEN 0.0
|
|
||||||
WHEN (dus.percentage * 100.0) > 97.0 THEN 100.0
|
|
||||||
ELSE dus.percentage * 100.0
|
|
||||||
END AS REAL), 2) AS percentage,
|
|
||||||
CAST(CASE
|
|
||||||
WHEN dus.total_time_seconds IS NULL THEN 0.0
|
|
||||||
ELSE
|
|
||||||
CAST(dus.total_time_seconds AS REAL)
|
|
||||||
/ (dus.read_percentage * 100.0)
|
|
||||||
END AS INTEGER) AS seconds_per_percent
|
|
||||||
FROM documents AS docs
|
|
||||||
LEFT JOIN users ON users.id = ?1
|
|
||||||
LEFT JOIN
|
|
||||||
document_user_statistics AS dus
|
|
||||||
ON dus.document_id = docs.id AND dus.user_id = ?1
|
|
||||||
WHERE users.id = ?1
|
|
||||||
AND docs.id = ?2
|
|
||||||
LIMIT 1
|
LIMIT 1
|
||||||
`
|
`
|
||||||
|
|
||||||
type GetDocumentWithStatsParams struct {
|
type GetDocumentProgressParams struct {
|
||||||
UserID string `json:"user_id"`
|
UserID string `json:"user_id"`
|
||||||
DocumentID string `json:"document_id"`
|
DocumentID string `json:"document_id"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type GetDocumentWithStatsRow struct {
|
type GetDocumentProgressRow struct {
|
||||||
ID string `json:"id"`
|
UserID string `json:"user_id"`
|
||||||
Title *string `json:"title"`
|
DocumentID string `json:"document_id"`
|
||||||
Author *string `json:"author"`
|
DeviceID string `json:"device_id"`
|
||||||
Description *string `json:"description"`
|
|
||||||
Isbn10 *string `json:"isbn10"`
|
|
||||||
Isbn13 *string `json:"isbn13"`
|
|
||||||
Filepath *string `json:"filepath"`
|
|
||||||
Words *int64 `json:"words"`
|
|
||||||
Wpm int64 `json:"wpm"`
|
|
||||||
ReadPercentage float64 `json:"read_percentage"`
|
|
||||||
TotalTimeSeconds int64 `json:"total_time_seconds"`
|
|
||||||
LastRead interface{} `json:"last_read"`
|
|
||||||
Percentage float64 `json:"percentage"`
|
Percentage float64 `json:"percentage"`
|
||||||
SecondsPerPercent int64 `json:"seconds_per_percent"`
|
Progress string `json:"progress"`
|
||||||
|
CreatedAt string `json:"created_at"`
|
||||||
|
DeviceName string `json:"device_name"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *Queries) GetDocumentWithStats(ctx context.Context, arg GetDocumentWithStatsParams) (GetDocumentWithStatsRow, error) {
|
func (q *Queries) GetDocumentProgress(ctx context.Context, arg GetDocumentProgressParams) (GetDocumentProgressRow, error) {
|
||||||
row := q.db.QueryRowContext(ctx, getDocumentWithStats, arg.UserID, arg.DocumentID)
|
row := q.db.QueryRowContext(ctx, getDocumentProgress, arg.UserID, arg.DocumentID)
|
||||||
var i GetDocumentWithStatsRow
|
var i GetDocumentProgressRow
|
||||||
err := row.Scan(
|
err := row.Scan(
|
||||||
&i.ID,
|
&i.UserID,
|
||||||
&i.Title,
|
&i.DocumentID,
|
||||||
&i.Author,
|
&i.DeviceID,
|
||||||
&i.Description,
|
|
||||||
&i.Isbn10,
|
|
||||||
&i.Isbn13,
|
|
||||||
&i.Filepath,
|
|
||||||
&i.Words,
|
|
||||||
&i.Wpm,
|
|
||||||
&i.ReadPercentage,
|
|
||||||
&i.TotalTimeSeconds,
|
|
||||||
&i.LastRead,
|
|
||||||
&i.Percentage,
|
&i.Percentage,
|
||||||
&i.SecondsPerPercent,
|
&i.Progress,
|
||||||
|
&i.CreatedAt,
|
||||||
|
&i.DeviceName,
|
||||||
)
|
)
|
||||||
return i, err
|
return i, err
|
||||||
}
|
}
|
||||||
|
|
||||||
const getDocuments = `-- name: GetDocuments :many
|
const getDocuments = `-- name: GetDocuments :many
|
||||||
SELECT id, md5, filepath, coverfile, title, author, series, series_index, lang, description, words, gbid, olid, isbn10, isbn13, synced, deleted, updated_at, created_at FROM documents
|
SELECT id, md5, basepath, filepath, coverfile, title, author, series, series_index, lang, description, words, gbid, olid, isbn10, isbn13, synced, deleted, updated_at, created_at FROM documents
|
||||||
ORDER BY created_at DESC
|
ORDER BY created_at DESC
|
||||||
LIMIT ?2
|
LIMIT ?2
|
||||||
OFFSET ?1
|
OFFSET ?1
|
||||||
@@ -584,6 +567,7 @@ func (q *Queries) GetDocuments(ctx context.Context, arg GetDocumentsParams) ([]D
|
|||||||
if err := rows.Scan(
|
if err := rows.Scan(
|
||||||
&i.ID,
|
&i.ID,
|
||||||
&i.Md5,
|
&i.Md5,
|
||||||
|
&i.Basepath,
|
||||||
&i.Filepath,
|
&i.Filepath,
|
||||||
&i.Coverfile,
|
&i.Coverfile,
|
||||||
&i.Title,
|
&i.Title,
|
||||||
@@ -644,45 +628,46 @@ SELECT
|
|||||||
docs.filepath,
|
docs.filepath,
|
||||||
docs.words,
|
docs.words,
|
||||||
|
|
||||||
CAST(COALESCE(dus.wpm, 0.0) AS INTEGER) AS wpm,
|
CAST(COALESCE(dus.total_wpm, 0.0) AS INTEGER) AS wpm,
|
||||||
COALESCE(dus.read_percentage, 0) AS read_percentage,
|
COALESCE(dus.read_percentage, 0) AS read_percentage,
|
||||||
COALESCE(dus.total_time_seconds, 0) AS total_time_seconds,
|
COALESCE(dus.total_time_seconds, 0) AS total_time_seconds,
|
||||||
STRFTIME('%Y-%m-%d %H:%M:%S', COALESCE(dus.last_read, "1970-01-01"), users.time_offset)
|
STRFTIME('%Y-%m-%d %H:%M:%S', LOCAL_TIME(COALESCE(dus.last_read, STRFTIME('%Y-%m-%dT%H:%M:%SZ', 0, 'unixepoch')), users.timezone))
|
||||||
AS last_read,
|
AS last_read,
|
||||||
ROUND(CAST(CASE
|
ROUND(CAST(CASE
|
||||||
WHEN dus.percentage IS NULL THEN 0.0
|
WHEN dus.percentage IS NULL THEN 0.0
|
||||||
WHEN (dus.percentage * 100.0) > 97.0 THEN 100.0
|
WHEN (dus.percentage * 100.0) > 97.0 THEN 100.0
|
||||||
ELSE dus.percentage * 100.0
|
ELSE dus.percentage * 100.0
|
||||||
END AS REAL), 2) AS percentage,
|
END AS REAL), 2) AS percentage,
|
||||||
|
CAST(CASE
|
||||||
CASE
|
|
||||||
WHEN dus.total_time_seconds IS NULL THEN 0.0
|
WHEN dus.total_time_seconds IS NULL THEN 0.0
|
||||||
ELSE
|
ELSE
|
||||||
ROUND(
|
|
||||||
CAST(dus.total_time_seconds AS REAL)
|
CAST(dus.total_time_seconds AS REAL)
|
||||||
/ (dus.read_percentage * 100.0)
|
/ (dus.read_percentage * 100.0)
|
||||||
)
|
END AS INTEGER) AS seconds_per_percent
|
||||||
END AS seconds_per_percent
|
|
||||||
FROM documents AS docs
|
FROM documents AS docs
|
||||||
LEFT JOIN users ON users.id = ?1
|
LEFT JOIN users ON users.id = ?1
|
||||||
LEFT JOIN
|
LEFT JOIN
|
||||||
document_user_statistics AS dus
|
document_user_statistics AS dus
|
||||||
ON dus.document_id = docs.id AND dus.user_id = ?1
|
ON dus.document_id = docs.id AND dus.user_id = ?1
|
||||||
WHERE
|
WHERE
|
||||||
docs.deleted = false AND (
|
(docs.id = ?2 OR ?2 IS NULL)
|
||||||
?2 IS NULL OR (
|
AND (docs.deleted = ?3 OR ?3 IS NULL)
|
||||||
docs.title LIKE ?2 OR
|
AND (
|
||||||
docs.author LIKE ?2
|
(
|
||||||
)
|
docs.title LIKE ?4 OR
|
||||||
|
docs.author LIKE ?4
|
||||||
|
) OR ?4 IS NULL
|
||||||
)
|
)
|
||||||
ORDER BY dus.last_read DESC, docs.created_at DESC
|
ORDER BY dus.last_read DESC, docs.created_at DESC
|
||||||
LIMIT ?4
|
LIMIT ?6
|
||||||
OFFSET ?3
|
OFFSET ?5
|
||||||
`
|
`
|
||||||
|
|
||||||
type GetDocumentsWithStatsParams struct {
|
type GetDocumentsWithStatsParams struct {
|
||||||
UserID string `json:"user_id"`
|
UserID string `json:"user_id"`
|
||||||
Query interface{} `json:"query"`
|
ID *string `json:"id"`
|
||||||
|
Deleted *bool `json:"-"`
|
||||||
|
Query *string `json:"query"`
|
||||||
Offset int64 `json:"offset"`
|
Offset int64 `json:"offset"`
|
||||||
Limit int64 `json:"limit"`
|
Limit int64 `json:"limit"`
|
||||||
}
|
}
|
||||||
@@ -701,12 +686,14 @@ type GetDocumentsWithStatsRow struct {
|
|||||||
TotalTimeSeconds int64 `json:"total_time_seconds"`
|
TotalTimeSeconds int64 `json:"total_time_seconds"`
|
||||||
LastRead interface{} `json:"last_read"`
|
LastRead interface{} `json:"last_read"`
|
||||||
Percentage float64 `json:"percentage"`
|
Percentage float64 `json:"percentage"`
|
||||||
SecondsPerPercent interface{} `json:"seconds_per_percent"`
|
SecondsPerPercent int64 `json:"seconds_per_percent"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *Queries) GetDocumentsWithStats(ctx context.Context, arg GetDocumentsWithStatsParams) ([]GetDocumentsWithStatsRow, error) {
|
func (q *Queries) GetDocumentsWithStats(ctx context.Context, arg GetDocumentsWithStatsParams) ([]GetDocumentsWithStatsRow, error) {
|
||||||
rows, err := q.db.QueryContext(ctx, getDocumentsWithStats,
|
rows, err := q.db.QueryContext(ctx, getDocumentsWithStats,
|
||||||
arg.UserID,
|
arg.UserID,
|
||||||
|
arg.ID,
|
||||||
|
arg.Deleted,
|
||||||
arg.Query,
|
arg.Query,
|
||||||
arg.Offset,
|
arg.Offset,
|
||||||
arg.Limit,
|
arg.Limit,
|
||||||
@@ -768,7 +755,7 @@ func (q *Queries) GetLastActivity(ctx context.Context, arg GetLastActivityParams
|
|||||||
}
|
}
|
||||||
|
|
||||||
const getMissingDocuments = `-- name: GetMissingDocuments :many
|
const getMissingDocuments = `-- name: GetMissingDocuments :many
|
||||||
SELECT documents.id, documents.md5, documents.filepath, documents.coverfile, documents.title, documents.author, documents.series, documents.series_index, documents.lang, documents.description, documents.words, documents.gbid, documents.olid, documents.isbn10, documents.isbn13, documents.synced, documents.deleted, documents.updated_at, documents.created_at FROM documents
|
SELECT documents.id, documents.md5, documents.basepath, documents.filepath, documents.coverfile, documents.title, documents.author, documents.series, documents.series_index, documents.lang, documents.description, documents.words, documents.gbid, documents.olid, documents.isbn10, documents.isbn13, documents.synced, documents.deleted, documents.updated_at, documents.created_at FROM documents
|
||||||
WHERE
|
WHERE
|
||||||
documents.filepath IS NOT NULL
|
documents.filepath IS NOT NULL
|
||||||
AND documents.deleted = false
|
AND documents.deleted = false
|
||||||
@@ -797,6 +784,7 @@ func (q *Queries) GetMissingDocuments(ctx context.Context, documentIds []string)
|
|||||||
if err := rows.Scan(
|
if err := rows.Scan(
|
||||||
&i.ID,
|
&i.ID,
|
||||||
&i.Md5,
|
&i.Md5,
|
||||||
|
&i.Basepath,
|
||||||
&i.Filepath,
|
&i.Filepath,
|
||||||
&i.Coverfile,
|
&i.Coverfile,
|
||||||
&i.Title,
|
&i.Title,
|
||||||
@@ -828,53 +816,89 @@ func (q *Queries) GetMissingDocuments(ctx context.Context, documentIds []string)
|
|||||||
return items, nil
|
return items, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
const getProgress = `-- name: GetProgress :one
|
const getProgress = `-- name: GetProgress :many
|
||||||
SELECT
|
SELECT
|
||||||
document_progress.user_id, document_progress.document_id, document_progress.device_id, document_progress.percentage, document_progress.progress, document_progress.created_at,
|
documents.title,
|
||||||
devices.device_name
|
documents.author,
|
||||||
FROM document_progress
|
devices.device_name,
|
||||||
JOIN devices ON document_progress.device_id = devices.id
|
ROUND(CAST(progress.percentage AS REAL) * 100, 2) AS percentage,
|
||||||
|
progress.document_id,
|
||||||
|
progress.user_id,
|
||||||
|
CAST(LOCAL_TIME(progress.created_at, users.timezone) AS TEXT) AS created_at
|
||||||
|
FROM document_progress AS progress
|
||||||
|
LEFT JOIN users ON progress.user_id = users.id
|
||||||
|
LEFT JOIN devices ON progress.device_id = devices.id
|
||||||
|
LEFT JOIN documents ON progress.document_id = documents.id
|
||||||
WHERE
|
WHERE
|
||||||
document_progress.user_id = ?1
|
progress.user_id = ?1
|
||||||
AND document_progress.document_id = ?2
|
AND (
|
||||||
ORDER BY
|
(
|
||||||
document_progress.created_at
|
CAST(?2 AS BOOLEAN) = TRUE
|
||||||
DESC
|
AND document_id = ?3
|
||||||
LIMIT 1
|
) OR ?2 = FALSE
|
||||||
|
)
|
||||||
|
ORDER BY created_at DESC
|
||||||
|
LIMIT ?5
|
||||||
|
OFFSET ?4
|
||||||
`
|
`
|
||||||
|
|
||||||
type GetProgressParams struct {
|
type GetProgressParams struct {
|
||||||
UserID string `json:"user_id"`
|
UserID string `json:"user_id"`
|
||||||
|
DocFilter bool `json:"doc_filter"`
|
||||||
DocumentID string `json:"document_id"`
|
DocumentID string `json:"document_id"`
|
||||||
|
Offset int64 `json:"offset"`
|
||||||
|
Limit int64 `json:"limit"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type GetProgressRow struct {
|
type GetProgressRow struct {
|
||||||
UserID string `json:"user_id"`
|
Title *string `json:"title"`
|
||||||
DocumentID string `json:"document_id"`
|
Author *string `json:"author"`
|
||||||
DeviceID string `json:"device_id"`
|
|
||||||
Percentage float64 `json:"percentage"`
|
|
||||||
Progress string `json:"progress"`
|
|
||||||
CreatedAt string `json:"created_at"`
|
|
||||||
DeviceName string `json:"device_name"`
|
DeviceName string `json:"device_name"`
|
||||||
|
Percentage float64 `json:"percentage"`
|
||||||
|
DocumentID string `json:"document_id"`
|
||||||
|
UserID string `json:"user_id"`
|
||||||
|
CreatedAt string `json:"created_at"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *Queries) GetProgress(ctx context.Context, arg GetProgressParams) (GetProgressRow, error) {
|
func (q *Queries) GetProgress(ctx context.Context, arg GetProgressParams) ([]GetProgressRow, error) {
|
||||||
row := q.db.QueryRowContext(ctx, getProgress, arg.UserID, arg.DocumentID)
|
rows, err := q.db.QueryContext(ctx, getProgress,
|
||||||
var i GetProgressRow
|
arg.UserID,
|
||||||
err := row.Scan(
|
arg.DocFilter,
|
||||||
&i.UserID,
|
arg.DocumentID,
|
||||||
&i.DocumentID,
|
arg.Offset,
|
||||||
&i.DeviceID,
|
arg.Limit,
|
||||||
&i.Percentage,
|
|
||||||
&i.Progress,
|
|
||||||
&i.CreatedAt,
|
|
||||||
&i.DeviceName,
|
|
||||||
)
|
)
|
||||||
return i, err
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
var items []GetProgressRow
|
||||||
|
for rows.Next() {
|
||||||
|
var i GetProgressRow
|
||||||
|
if err := rows.Scan(
|
||||||
|
&i.Title,
|
||||||
|
&i.Author,
|
||||||
|
&i.DeviceName,
|
||||||
|
&i.Percentage,
|
||||||
|
&i.DocumentID,
|
||||||
|
&i.UserID,
|
||||||
|
&i.CreatedAt,
|
||||||
|
); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
items = append(items, i)
|
||||||
|
}
|
||||||
|
if err := rows.Close(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return items, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
const getUser = `-- name: GetUser :one
|
const getUser = `-- name: GetUser :one
|
||||||
SELECT id, pass, admin, time_offset, created_at FROM users
|
SELECT id, pass, auth_hash, admin, timezone, created_at FROM users
|
||||||
WHERE id = ?1 LIMIT 1
|
WHERE id = ?1 LIMIT 1
|
||||||
`
|
`
|
||||||
|
|
||||||
@@ -884,15 +908,99 @@ func (q *Queries) GetUser(ctx context.Context, userID string) (User, error) {
|
|||||||
err := row.Scan(
|
err := row.Scan(
|
||||||
&i.ID,
|
&i.ID,
|
||||||
&i.Pass,
|
&i.Pass,
|
||||||
|
&i.AuthHash,
|
||||||
&i.Admin,
|
&i.Admin,
|
||||||
&i.TimeOffset,
|
&i.Timezone,
|
||||||
&i.CreatedAt,
|
&i.CreatedAt,
|
||||||
)
|
)
|
||||||
return i, err
|
return i, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const getUserStatistics = `-- name: GetUserStatistics :many
|
||||||
|
SELECT
|
||||||
|
user_id,
|
||||||
|
|
||||||
|
CAST(SUM(total_words_read) AS INTEGER) AS total_words_read,
|
||||||
|
CAST(SUM(total_time_seconds) AS INTEGER) AS total_seconds,
|
||||||
|
ROUND(COALESCE(CAST(SUM(total_words_read) AS REAL) / (SUM(total_time_seconds) / 60.0), 0.0), 2)
|
||||||
|
AS total_wpm,
|
||||||
|
|
||||||
|
CAST(SUM(yearly_words_read) AS INTEGER) AS yearly_words_read,
|
||||||
|
CAST(SUM(yearly_time_seconds) AS INTEGER) AS yearly_seconds,
|
||||||
|
ROUND(COALESCE(CAST(SUM(yearly_words_read) AS REAL) / (SUM(yearly_time_seconds) / 60.0), 0.0), 2)
|
||||||
|
AS yearly_wpm,
|
||||||
|
|
||||||
|
CAST(SUM(monthly_words_read) AS INTEGER) AS monthly_words_read,
|
||||||
|
CAST(SUM(monthly_time_seconds) AS INTEGER) AS monthly_seconds,
|
||||||
|
ROUND(COALESCE(CAST(SUM(monthly_words_read) AS REAL) / (SUM(monthly_time_seconds) / 60.0), 0.0), 2)
|
||||||
|
AS monthly_wpm,
|
||||||
|
|
||||||
|
CAST(SUM(weekly_words_read) AS INTEGER) AS weekly_words_read,
|
||||||
|
CAST(SUM(weekly_time_seconds) AS INTEGER) AS weekly_seconds,
|
||||||
|
ROUND(COALESCE(CAST(SUM(weekly_words_read) AS REAL) / (SUM(weekly_time_seconds) / 60.0), 0.0), 2)
|
||||||
|
AS weekly_wpm
|
||||||
|
|
||||||
|
FROM document_user_statistics
|
||||||
|
WHERE total_words_read > 0
|
||||||
|
GROUP BY user_id
|
||||||
|
ORDER BY total_wpm DESC
|
||||||
|
`
|
||||||
|
|
||||||
|
type GetUserStatisticsRow struct {
|
||||||
|
UserID string `json:"user_id"`
|
||||||
|
TotalWordsRead int64 `json:"total_words_read"`
|
||||||
|
TotalSeconds int64 `json:"total_seconds"`
|
||||||
|
TotalWpm float64 `json:"total_wpm"`
|
||||||
|
YearlyWordsRead int64 `json:"yearly_words_read"`
|
||||||
|
YearlySeconds int64 `json:"yearly_seconds"`
|
||||||
|
YearlyWpm float64 `json:"yearly_wpm"`
|
||||||
|
MonthlyWordsRead int64 `json:"monthly_words_read"`
|
||||||
|
MonthlySeconds int64 `json:"monthly_seconds"`
|
||||||
|
MonthlyWpm float64 `json:"monthly_wpm"`
|
||||||
|
WeeklyWordsRead int64 `json:"weekly_words_read"`
|
||||||
|
WeeklySeconds int64 `json:"weekly_seconds"`
|
||||||
|
WeeklyWpm float64 `json:"weekly_wpm"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) GetUserStatistics(ctx context.Context) ([]GetUserStatisticsRow, error) {
|
||||||
|
rows, err := q.db.QueryContext(ctx, getUserStatistics)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
var items []GetUserStatisticsRow
|
||||||
|
for rows.Next() {
|
||||||
|
var i GetUserStatisticsRow
|
||||||
|
if err := rows.Scan(
|
||||||
|
&i.UserID,
|
||||||
|
&i.TotalWordsRead,
|
||||||
|
&i.TotalSeconds,
|
||||||
|
&i.TotalWpm,
|
||||||
|
&i.YearlyWordsRead,
|
||||||
|
&i.YearlySeconds,
|
||||||
|
&i.YearlyWpm,
|
||||||
|
&i.MonthlyWordsRead,
|
||||||
|
&i.MonthlySeconds,
|
||||||
|
&i.MonthlyWpm,
|
||||||
|
&i.WeeklyWordsRead,
|
||||||
|
&i.WeeklySeconds,
|
||||||
|
&i.WeeklyWpm,
|
||||||
|
); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
items = append(items, i)
|
||||||
|
}
|
||||||
|
if err := rows.Close(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return items, nil
|
||||||
|
}
|
||||||
|
|
||||||
const getUserStreaks = `-- name: GetUserStreaks :many
|
const getUserStreaks = `-- name: GetUserStreaks :many
|
||||||
SELECT user_id, "window", max_streak, max_streak_start_date, max_streak_end_date, current_streak, current_streak_start_date, current_streak_end_date FROM user_streaks
|
SELECT user_id, "window", max_streak, max_streak_start_date, max_streak_end_date, current_streak, current_streak_start_date, current_streak_end_date, last_timezone, last_seen, last_record, last_calculated FROM user_streaks
|
||||||
WHERE user_id = ?1
|
WHERE user_id = ?1
|
||||||
`
|
`
|
||||||
|
|
||||||
@@ -914,6 +1022,10 @@ func (q *Queries) GetUserStreaks(ctx context.Context, userID string) ([]UserStre
|
|||||||
&i.CurrentStreak,
|
&i.CurrentStreak,
|
||||||
&i.CurrentStreakStartDate,
|
&i.CurrentStreakStartDate,
|
||||||
&i.CurrentStreakEndDate,
|
&i.CurrentStreakEndDate,
|
||||||
|
&i.LastTimezone,
|
||||||
|
&i.LastSeen,
|
||||||
|
&i.LastRecord,
|
||||||
|
&i.LastCalculated,
|
||||||
); err != nil {
|
); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -928,40 +1040,26 @@ func (q *Queries) GetUserStreaks(ctx context.Context, userID string) ([]UserStre
|
|||||||
return items, nil
|
return items, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
const getWPMLeaderboard = `-- name: GetWPMLeaderboard :many
|
const getUsers = `-- name: GetUsers :many
|
||||||
SELECT
|
SELECT id, pass, auth_hash, admin, timezone, created_at FROM users
|
||||||
user_id,
|
|
||||||
CAST(SUM(words_read) AS INTEGER) AS total_words_read,
|
|
||||||
CAST(SUM(total_time_seconds) AS INTEGER) AS total_seconds,
|
|
||||||
ROUND(CAST(SUM(words_read) AS REAL) / (SUM(total_time_seconds) / 60.0), 2)
|
|
||||||
AS wpm
|
|
||||||
FROM document_user_statistics
|
|
||||||
WHERE words_read > 0
|
|
||||||
GROUP BY user_id
|
|
||||||
ORDER BY wpm DESC
|
|
||||||
`
|
`
|
||||||
|
|
||||||
type GetWPMLeaderboardRow struct {
|
func (q *Queries) GetUsers(ctx context.Context) ([]User, error) {
|
||||||
UserID string `json:"user_id"`
|
rows, err := q.db.QueryContext(ctx, getUsers)
|
||||||
TotalWordsRead int64 `json:"total_words_read"`
|
|
||||||
TotalSeconds int64 `json:"total_seconds"`
|
|
||||||
Wpm float64 `json:"wpm"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (q *Queries) GetWPMLeaderboard(ctx context.Context) ([]GetWPMLeaderboardRow, error) {
|
|
||||||
rows, err := q.db.QueryContext(ctx, getWPMLeaderboard)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
defer rows.Close()
|
defer rows.Close()
|
||||||
var items []GetWPMLeaderboardRow
|
var items []User
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
var i GetWPMLeaderboardRow
|
var i User
|
||||||
if err := rows.Scan(
|
if err := rows.Scan(
|
||||||
&i.UserID,
|
&i.ID,
|
||||||
&i.TotalWordsRead,
|
&i.Pass,
|
||||||
&i.TotalSeconds,
|
&i.AuthHash,
|
||||||
&i.Wpm,
|
&i.Admin,
|
||||||
|
&i.Timezone,
|
||||||
|
&i.CreatedAt,
|
||||||
); err != nil {
|
); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -1062,29 +1160,67 @@ func (q *Queries) UpdateProgress(ctx context.Context, arg UpdateProgressParams)
|
|||||||
return i, err
|
return i, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const updateSettings = `-- name: UpdateSettings :one
|
||||||
|
INSERT INTO settings (name, value)
|
||||||
|
VALUES (?, ?)
|
||||||
|
ON CONFLICT DO UPDATE
|
||||||
|
SET
|
||||||
|
name = COALESCE(excluded.name, name),
|
||||||
|
value = COALESCE(excluded.value, value)
|
||||||
|
RETURNING id, name, value, created_at
|
||||||
|
`
|
||||||
|
|
||||||
|
type UpdateSettingsParams struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
Value string `json:"value"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) UpdateSettings(ctx context.Context, arg UpdateSettingsParams) (Setting, error) {
|
||||||
|
row := q.db.QueryRowContext(ctx, updateSettings, arg.Name, arg.Value)
|
||||||
|
var i Setting
|
||||||
|
err := row.Scan(
|
||||||
|
&i.ID,
|
||||||
|
&i.Name,
|
||||||
|
&i.Value,
|
||||||
|
&i.CreatedAt,
|
||||||
|
)
|
||||||
|
return i, err
|
||||||
|
}
|
||||||
|
|
||||||
const updateUser = `-- name: UpdateUser :one
|
const updateUser = `-- name: UpdateUser :one
|
||||||
UPDATE users
|
UPDATE users
|
||||||
SET
|
SET
|
||||||
pass = COALESCE(?1, pass),
|
pass = COALESCE(?1, pass),
|
||||||
time_offset = COALESCE(?2, time_offset)
|
auth_hash = COALESCE(?2, auth_hash),
|
||||||
WHERE id = ?3
|
timezone = COALESCE(?3, timezone),
|
||||||
RETURNING id, pass, admin, time_offset, created_at
|
admin = COALESCE(?4, admin)
|
||||||
|
WHERE id = ?5
|
||||||
|
RETURNING id, pass, auth_hash, admin, timezone, created_at
|
||||||
`
|
`
|
||||||
|
|
||||||
type UpdateUserParams struct {
|
type UpdateUserParams struct {
|
||||||
Password *string `json:"-"`
|
Password *string `json:"-"`
|
||||||
TimeOffset *string `json:"time_offset"`
|
AuthHash *string `json:"auth_hash"`
|
||||||
|
Timezone *string `json:"timezone"`
|
||||||
|
Admin bool `json:"-"`
|
||||||
UserID string `json:"user_id"`
|
UserID string `json:"user_id"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *Queries) UpdateUser(ctx context.Context, arg UpdateUserParams) (User, error) {
|
func (q *Queries) UpdateUser(ctx context.Context, arg UpdateUserParams) (User, error) {
|
||||||
row := q.db.QueryRowContext(ctx, updateUser, arg.Password, arg.TimeOffset, arg.UserID)
|
row := q.db.QueryRowContext(ctx, updateUser,
|
||||||
|
arg.Password,
|
||||||
|
arg.AuthHash,
|
||||||
|
arg.Timezone,
|
||||||
|
arg.Admin,
|
||||||
|
arg.UserID,
|
||||||
|
)
|
||||||
var i User
|
var i User
|
||||||
err := row.Scan(
|
err := row.Scan(
|
||||||
&i.ID,
|
&i.ID,
|
||||||
&i.Pass,
|
&i.Pass,
|
||||||
|
&i.AuthHash,
|
||||||
&i.Admin,
|
&i.Admin,
|
||||||
&i.TimeOffset,
|
&i.Timezone,
|
||||||
&i.CreatedAt,
|
&i.CreatedAt,
|
||||||
)
|
)
|
||||||
return i, err
|
return i, err
|
||||||
@@ -1130,6 +1266,7 @@ const upsertDocument = `-- name: UpsertDocument :one
|
|||||||
INSERT INTO documents (
|
INSERT INTO documents (
|
||||||
id,
|
id,
|
||||||
md5,
|
md5,
|
||||||
|
basepath,
|
||||||
filepath,
|
filepath,
|
||||||
coverfile,
|
coverfile,
|
||||||
title,
|
title,
|
||||||
@@ -1144,10 +1281,11 @@ INSERT INTO documents (
|
|||||||
isbn10,
|
isbn10,
|
||||||
isbn13
|
isbn13
|
||||||
)
|
)
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
ON CONFLICT DO UPDATE
|
ON CONFLICT DO UPDATE
|
||||||
SET
|
SET
|
||||||
md5 = COALESCE(excluded.md5, md5),
|
md5 = COALESCE(excluded.md5, md5),
|
||||||
|
basepath = COALESCE(excluded.basepath, basepath),
|
||||||
filepath = COALESCE(excluded.filepath, filepath),
|
filepath = COALESCE(excluded.filepath, filepath),
|
||||||
coverfile = COALESCE(excluded.coverfile, coverfile),
|
coverfile = COALESCE(excluded.coverfile, coverfile),
|
||||||
title = COALESCE(excluded.title, title),
|
title = COALESCE(excluded.title, title),
|
||||||
@@ -1161,12 +1299,13 @@ SET
|
|||||||
gbid = COALESCE(excluded.gbid, gbid),
|
gbid = COALESCE(excluded.gbid, gbid),
|
||||||
isbn10 = COALESCE(excluded.isbn10, isbn10),
|
isbn10 = COALESCE(excluded.isbn10, isbn10),
|
||||||
isbn13 = COALESCE(excluded.isbn13, isbn13)
|
isbn13 = COALESCE(excluded.isbn13, isbn13)
|
||||||
RETURNING id, md5, filepath, coverfile, title, author, series, series_index, lang, description, words, gbid, olid, isbn10, isbn13, synced, deleted, updated_at, created_at
|
RETURNING id, md5, basepath, filepath, coverfile, title, author, series, series_index, lang, description, words, gbid, olid, isbn10, isbn13, synced, deleted, updated_at, created_at
|
||||||
`
|
`
|
||||||
|
|
||||||
type UpsertDocumentParams struct {
|
type UpsertDocumentParams struct {
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
Md5 *string `json:"md5"`
|
Md5 *string `json:"md5"`
|
||||||
|
Basepath *string `json:"basepath"`
|
||||||
Filepath *string `json:"filepath"`
|
Filepath *string `json:"filepath"`
|
||||||
Coverfile *string `json:"coverfile"`
|
Coverfile *string `json:"coverfile"`
|
||||||
Title *string `json:"title"`
|
Title *string `json:"title"`
|
||||||
@@ -1186,6 +1325,7 @@ func (q *Queries) UpsertDocument(ctx context.Context, arg UpsertDocumentParams)
|
|||||||
row := q.db.QueryRowContext(ctx, upsertDocument,
|
row := q.db.QueryRowContext(ctx, upsertDocument,
|
||||||
arg.ID,
|
arg.ID,
|
||||||
arg.Md5,
|
arg.Md5,
|
||||||
|
arg.Basepath,
|
||||||
arg.Filepath,
|
arg.Filepath,
|
||||||
arg.Coverfile,
|
arg.Coverfile,
|
||||||
arg.Title,
|
arg.Title,
|
||||||
@@ -1204,6 +1344,7 @@ func (q *Queries) UpsertDocument(ctx context.Context, arg UpsertDocumentParams)
|
|||||||
err := row.Scan(
|
err := row.Scan(
|
||||||
&i.ID,
|
&i.ID,
|
||||||
&i.Md5,
|
&i.Md5,
|
||||||
|
&i.Basepath,
|
||||||
&i.Filepath,
|
&i.Filepath,
|
||||||
&i.Coverfile,
|
&i.Coverfile,
|
||||||
&i.Title,
|
&i.Title,
|
||||||
|
|||||||
@@ -1,6 +1,3 @@
|
|||||||
PRAGMA foreign_keys = ON;
|
|
||||||
PRAGMA journal_mode = WAL;
|
|
||||||
|
|
||||||
---------------------------------------------------------------
|
---------------------------------------------------------------
|
||||||
------------------------ Normal Tables ------------------------
|
------------------------ Normal Tables ------------------------
|
||||||
---------------------------------------------------------------
|
---------------------------------------------------------------
|
||||||
@@ -10,8 +7,9 @@ CREATE TABLE IF NOT EXISTS users (
|
|||||||
id TEXT NOT NULL PRIMARY KEY,
|
id TEXT NOT NULL PRIMARY KEY,
|
||||||
|
|
||||||
pass TEXT NOT NULL,
|
pass TEXT NOT NULL,
|
||||||
|
auth_hash TEXT NOT NULL,
|
||||||
admin BOOLEAN NOT NULL DEFAULT 0 CHECK (admin IN (0, 1)),
|
admin BOOLEAN NOT NULL DEFAULT 0 CHECK (admin IN (0, 1)),
|
||||||
time_offset TEXT NOT NULL DEFAULT '0 hours',
|
timezone TEXT NOT NULL DEFAULT 'Europe/London',
|
||||||
|
|
||||||
created_at DATETIME NOT NULL DEFAULT (STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'))
|
created_at DATETIME NOT NULL DEFAULT (STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'))
|
||||||
);
|
);
|
||||||
@@ -21,6 +19,7 @@ CREATE TABLE IF NOT EXISTS documents (
|
|||||||
id TEXT NOT NULL PRIMARY KEY,
|
id TEXT NOT NULL PRIMARY KEY,
|
||||||
|
|
||||||
md5 TEXT,
|
md5 TEXT,
|
||||||
|
basepath TEXT,
|
||||||
filepath TEXT,
|
filepath TEXT,
|
||||||
coverfile TEXT,
|
coverfile TEXT,
|
||||||
title TEXT,
|
title TEXT,
|
||||||
@@ -46,7 +45,6 @@ CREATE TABLE IF NOT EXISTS documents (
|
|||||||
-- Metadata
|
-- Metadata
|
||||||
CREATE TABLE IF NOT EXISTS metadata (
|
CREATE TABLE IF NOT EXISTS metadata (
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
|
||||||
document_id TEXT NOT NULL,
|
document_id TEXT NOT NULL,
|
||||||
|
|
||||||
title TEXT,
|
title TEXT,
|
||||||
@@ -110,12 +108,46 @@ CREATE TABLE IF NOT EXISTS activity (
|
|||||||
FOREIGN KEY (device_id) REFERENCES devices (id)
|
FOREIGN KEY (device_id) REFERENCES devices (id)
|
||||||
);
|
);
|
||||||
|
|
||||||
---------------------------------------------------------------
|
-- Settings
|
||||||
----------------------- Temporary Tables ----------------------
|
CREATE TABLE IF NOT EXISTS settings (
|
||||||
---------------------------------------------------------------
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
|
||||||
-- Temporary User Streaks Table (Cached from View)
|
name TEXT NOT NULL,
|
||||||
CREATE TEMPORARY TABLE IF NOT EXISTS user_streaks (
|
value TEXT NOT NULL,
|
||||||
|
|
||||||
|
created_at DATETIME NOT NULL DEFAULT (STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'))
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Document User Statistics Table
|
||||||
|
CREATE TABLE IF NOT EXISTS document_user_statistics (
|
||||||
|
document_id TEXT NOT NULL,
|
||||||
|
user_id TEXT NOT NULL,
|
||||||
|
percentage REAL NOT NULL,
|
||||||
|
last_read DATETIME NOT NULL,
|
||||||
|
last_seen DATETIME NOT NULL,
|
||||||
|
read_percentage REAL NOT NULL,
|
||||||
|
|
||||||
|
total_time_seconds INTEGER NOT NULL,
|
||||||
|
total_words_read INTEGER NOT NULL,
|
||||||
|
total_wpm REAL NOT NULL,
|
||||||
|
|
||||||
|
yearly_time_seconds INTEGER NOT NULL,
|
||||||
|
yearly_words_read INTEGER NOT NULL,
|
||||||
|
yearly_wpm REAL NOT NULL,
|
||||||
|
|
||||||
|
monthly_time_seconds INTEGER NOT NULL,
|
||||||
|
monthly_words_read INTEGER NOT NULL,
|
||||||
|
monthly_wpm REAL NOT NULL,
|
||||||
|
|
||||||
|
weekly_time_seconds INTEGER NOT NULL,
|
||||||
|
weekly_words_read INTEGER NOT NULL,
|
||||||
|
weekly_wpm REAL NOT NULL,
|
||||||
|
|
||||||
|
UNIQUE(document_id, user_id) ON CONFLICT REPLACE
|
||||||
|
);
|
||||||
|
|
||||||
|
-- User Streaks Table
|
||||||
|
CREATE TABLE IF NOT EXISTS user_streaks (
|
||||||
user_id TEXT NOT NULL,
|
user_id TEXT NOT NULL,
|
||||||
window TEXT NOT NULL,
|
window TEXT NOT NULL,
|
||||||
|
|
||||||
@@ -125,245 +157,28 @@ CREATE TEMPORARY TABLE IF NOT EXISTS user_streaks (
|
|||||||
|
|
||||||
current_streak INTEGER NOT NULL,
|
current_streak INTEGER NOT NULL,
|
||||||
current_streak_start_date TEXT NOT NULL,
|
current_streak_start_date TEXT NOT NULL,
|
||||||
current_streak_end_date TEXT NOT NULL
|
current_streak_end_date TEXT NOT NULL,
|
||||||
|
|
||||||
|
last_timezone TEXT NOT NULL,
|
||||||
|
last_seen TEXT NOT NULL,
|
||||||
|
last_record TEXT NOT NULL,
|
||||||
|
last_calculated TEXT NOT NULL,
|
||||||
|
|
||||||
|
UNIQUE(user_id, window) ON CONFLICT REPLACE
|
||||||
);
|
);
|
||||||
|
|
||||||
CREATE TEMPORARY TABLE IF NOT EXISTS document_user_statistics (
|
|
||||||
document_id TEXT NOT NULL,
|
|
||||||
user_id TEXT NOT NULL,
|
|
||||||
last_read TEXT NOT NULL,
|
|
||||||
total_time_seconds INTEGER NOT NULL,
|
|
||||||
read_percentage REAL NOT NULL,
|
|
||||||
percentage REAL NOT NULL,
|
|
||||||
words_read INTEGER NOT NULL,
|
|
||||||
wpm REAL NOT NULL,
|
|
||||||
|
|
||||||
UNIQUE(document_id, user_id) ON CONFLICT REPLACE
|
|
||||||
);
|
|
||||||
|
|
||||||
|
|
||||||
---------------------------------------------------------------
|
---------------------------------------------------------------
|
||||||
--------------------------- Indexes ---------------------------
|
--------------------------- Indexes ---------------------------
|
||||||
---------------------------------------------------------------
|
---------------------------------------------------------------
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS activity_start_time ON activity (start_time);
|
CREATE INDEX IF NOT EXISTS activity_start_time ON activity (start_time);
|
||||||
|
CREATE INDEX IF NOT EXISTS activity_created_at ON activity (created_at);
|
||||||
CREATE INDEX IF NOT EXISTS activity_user_id ON activity (user_id);
|
CREATE INDEX IF NOT EXISTS activity_user_id ON activity (user_id);
|
||||||
CREATE INDEX IF NOT EXISTS activity_user_id_document_id ON activity (
|
CREATE INDEX IF NOT EXISTS activity_user_id_document_id ON activity (
|
||||||
user_id,
|
user_id,
|
||||||
document_id
|
document_id
|
||||||
);
|
);
|
||||||
|
|
||||||
---------------------------------------------------------------
|
|
||||||
---------------------------- Views ----------------------------
|
|
||||||
---------------------------------------------------------------
|
|
||||||
|
|
||||||
--------------------------------
|
|
||||||
--------- User Streaks ---------
|
|
||||||
--------------------------------
|
|
||||||
|
|
||||||
CREATE VIEW IF NOT EXISTS view_user_streaks AS
|
|
||||||
|
|
||||||
WITH document_windows AS (
|
|
||||||
SELECT
|
|
||||||
activity.user_id,
|
|
||||||
users.time_offset,
|
|
||||||
DATE(
|
|
||||||
activity.start_time,
|
|
||||||
users.time_offset,
|
|
||||||
'weekday 0', '-7 day'
|
|
||||||
) AS weekly_read,
|
|
||||||
DATE(activity.start_time, users.time_offset) AS daily_read
|
|
||||||
FROM activity
|
|
||||||
LEFT JOIN users ON users.id = activity.user_id
|
|
||||||
GROUP BY activity.user_id, weekly_read, daily_read
|
|
||||||
),
|
|
||||||
|
|
||||||
weekly_partitions AS (
|
|
||||||
SELECT
|
|
||||||
user_id,
|
|
||||||
time_offset,
|
|
||||||
'WEEK' AS "window",
|
|
||||||
weekly_read AS read_window,
|
|
||||||
row_number() OVER (
|
|
||||||
PARTITION BY user_id ORDER BY weekly_read DESC
|
|
||||||
) AS seqnum
|
|
||||||
FROM document_windows
|
|
||||||
GROUP BY user_id, weekly_read
|
|
||||||
),
|
|
||||||
|
|
||||||
daily_partitions AS (
|
|
||||||
SELECT
|
|
||||||
user_id,
|
|
||||||
time_offset,
|
|
||||||
'DAY' AS "window",
|
|
||||||
daily_read AS read_window,
|
|
||||||
row_number() OVER (
|
|
||||||
PARTITION BY user_id ORDER BY daily_read DESC
|
|
||||||
) AS seqnum
|
|
||||||
FROM document_windows
|
|
||||||
GROUP BY user_id, daily_read
|
|
||||||
),
|
|
||||||
|
|
||||||
streaks AS (
|
|
||||||
SELECT
|
|
||||||
COUNT(*) AS streak,
|
|
||||||
MIN(read_window) AS start_date,
|
|
||||||
MAX(read_window) AS end_date,
|
|
||||||
window,
|
|
||||||
user_id,
|
|
||||||
time_offset
|
|
||||||
FROM daily_partitions
|
|
||||||
GROUP BY
|
|
||||||
time_offset,
|
|
||||||
user_id,
|
|
||||||
DATE(read_window, '+' || seqnum || ' day')
|
|
||||||
|
|
||||||
UNION ALL
|
|
||||||
|
|
||||||
SELECT
|
|
||||||
COUNT(*) AS streak,
|
|
||||||
MIN(read_window) AS start_date,
|
|
||||||
MAX(read_window) AS end_date,
|
|
||||||
window,
|
|
||||||
user_id,
|
|
||||||
time_offset
|
|
||||||
FROM weekly_partitions
|
|
||||||
GROUP BY
|
|
||||||
time_offset,
|
|
||||||
user_id,
|
|
||||||
DATE(read_window, '+' || (seqnum * 7) || ' day')
|
|
||||||
),
|
|
||||||
max_streak AS (
|
|
||||||
SELECT
|
|
||||||
MAX(streak) AS max_streak,
|
|
||||||
start_date AS max_streak_start_date,
|
|
||||||
end_date AS max_streak_end_date,
|
|
||||||
window,
|
|
||||||
user_id
|
|
||||||
FROM streaks
|
|
||||||
GROUP BY user_id, window
|
|
||||||
),
|
|
||||||
current_streak AS (
|
|
||||||
SELECT
|
|
||||||
streak AS current_streak,
|
|
||||||
start_date AS current_streak_start_date,
|
|
||||||
end_date AS current_streak_end_date,
|
|
||||||
window,
|
|
||||||
user_id
|
|
||||||
FROM streaks
|
|
||||||
WHERE CASE
|
|
||||||
WHEN window = "WEEK" THEN
|
|
||||||
DATE('now', time_offset, 'weekday 0', '-14 day') = current_streak_end_date
|
|
||||||
OR DATE('now', time_offset, 'weekday 0', '-7 day') = current_streak_end_date
|
|
||||||
WHEN window = "DAY" THEN
|
|
||||||
DATE('now', time_offset, '-1 day') = current_streak_end_date
|
|
||||||
OR DATE('now', time_offset) = current_streak_end_date
|
|
||||||
END
|
|
||||||
GROUP BY user_id, window
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
max_streak.user_id,
|
|
||||||
max_streak.window,
|
|
||||||
IFNULL(max_streak, 0) AS max_streak,
|
|
||||||
IFNULL(max_streak_start_date, "N/A") AS max_streak_start_date,
|
|
||||||
IFNULL(max_streak_end_date, "N/A") AS max_streak_end_date,
|
|
||||||
IFNULL(current_streak, 0) AS current_streak,
|
|
||||||
IFNULL(current_streak_start_date, "N/A") AS current_streak_start_date,
|
|
||||||
IFNULL(current_streak_end_date, "N/A") AS current_streak_end_date
|
|
||||||
FROM max_streak
|
|
||||||
LEFT JOIN current_streak ON
|
|
||||||
current_streak.user_id = max_streak.user_id
|
|
||||||
AND current_streak.window = max_streak.window;
|
|
||||||
|
|
||||||
--------------------------------
|
|
||||||
------- Document Stats ---------
|
|
||||||
--------------------------------
|
|
||||||
|
|
||||||
CREATE VIEW IF NOT EXISTS view_document_user_statistics AS
|
|
||||||
|
|
||||||
WITH intermediate_ga AS (
|
|
||||||
SELECT
|
|
||||||
ga1.id AS row_id,
|
|
||||||
ga1.user_id,
|
|
||||||
ga1.document_id,
|
|
||||||
ga1.duration,
|
|
||||||
ga1.start_time,
|
|
||||||
ga1.start_percentage,
|
|
||||||
ga1.end_percentage,
|
|
||||||
|
|
||||||
-- Find Overlapping Events (Assign Unique ID)
|
|
||||||
(
|
|
||||||
SELECT MIN(id)
|
|
||||||
FROM activity AS ga2
|
|
||||||
WHERE
|
|
||||||
ga1.document_id = ga2.document_id
|
|
||||||
AND ga1.user_id = ga2.user_id
|
|
||||||
AND ga1.start_percentage <= ga2.end_percentage
|
|
||||||
AND ga1.end_percentage >= ga2.start_percentage
|
|
||||||
) AS group_leader
|
|
||||||
FROM activity AS ga1
|
|
||||||
),
|
|
||||||
|
|
||||||
grouped_activity AS (
|
|
||||||
SELECT
|
|
||||||
user_id,
|
|
||||||
document_id,
|
|
||||||
MAX(start_time) AS start_time,
|
|
||||||
MIN(start_percentage) AS start_percentage,
|
|
||||||
MAX(end_percentage) AS end_percentage,
|
|
||||||
MAX(end_percentage) - MIN(start_percentage) AS read_percentage,
|
|
||||||
SUM(duration) AS duration
|
|
||||||
FROM intermediate_ga
|
|
||||||
GROUP BY group_leader
|
|
||||||
),
|
|
||||||
|
|
||||||
current_progress AS (
|
|
||||||
SELECT
|
|
||||||
user_id,
|
|
||||||
document_id,
|
|
||||||
COALESCE((
|
|
||||||
SELECT percentage
|
|
||||||
FROM document_progress AS dp
|
|
||||||
WHERE
|
|
||||||
dp.user_id = iga.user_id
|
|
||||||
AND dp.document_id = iga.document_id
|
|
||||||
ORDER BY created_at DESC
|
|
||||||
LIMIT 1
|
|
||||||
), end_percentage) AS percentage
|
|
||||||
FROM intermediate_ga AS iga
|
|
||||||
GROUP BY user_id, document_id
|
|
||||||
HAVING MAX(start_time)
|
|
||||||
)
|
|
||||||
|
|
||||||
SELECT
|
|
||||||
ga.document_id,
|
|
||||||
ga.user_id,
|
|
||||||
MAX(start_time) AS last_read,
|
|
||||||
SUM(duration) AS total_time_seconds,
|
|
||||||
SUM(read_percentage) AS read_percentage,
|
|
||||||
cp.percentage,
|
|
||||||
|
|
||||||
(CAST(COALESCE(d.words, 0.0) AS REAL) * SUM(read_percentage))
|
|
||||||
AS words_read,
|
|
||||||
|
|
||||||
(CAST(COALESCE(d.words, 0.0) AS REAL) * SUM(read_percentage))
|
|
||||||
/ (SUM(duration) / 60.0) AS wpm
|
|
||||||
FROM grouped_activity AS ga
|
|
||||||
INNER JOIN
|
|
||||||
current_progress AS cp
|
|
||||||
ON ga.user_id = cp.user_id AND ga.document_id = cp.document_id
|
|
||||||
INNER JOIN
|
|
||||||
documents AS d
|
|
||||||
ON d.id = ga.document_id
|
|
||||||
GROUP BY ga.document_id, ga.user_id
|
|
||||||
ORDER BY wpm DESC;
|
|
||||||
|
|
||||||
---------------------------------------------------------------
|
|
||||||
------------------ Populate Temporary Tables ------------------
|
|
||||||
---------------------------------------------------------------
|
|
||||||
INSERT INTO user_streaks SELECT * FROM view_user_streaks;
|
|
||||||
INSERT INTO document_user_statistics SELECT * FROM view_document_user_statistics;
|
|
||||||
|
|
||||||
---------------------------------------------------------------
|
---------------------------------------------------------------
|
||||||
--------------------------- Triggers --------------------------
|
--------------------------- Triggers --------------------------
|
||||||
---------------------------------------------------------------
|
---------------------------------------------------------------
|
||||||
@@ -375,3 +190,11 @@ UPDATE documents
|
|||||||
SET updated_at = STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now')
|
SET updated_at = STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now')
|
||||||
WHERE id = old.id;
|
WHERE id = old.id;
|
||||||
END;
|
END;
|
||||||
|
|
||||||
|
-- Delete User
|
||||||
|
CREATE TRIGGER IF NOT EXISTS user_deleted
|
||||||
|
BEFORE DELETE ON users BEGIN
|
||||||
|
DELETE FROM activity WHERE activity.user_id=OLD.id;
|
||||||
|
DELETE FROM devices WHERE devices.user_id=OLD.id;
|
||||||
|
DELETE FROM document_progress WHERE document_progress.user_id=OLD.id;
|
||||||
|
END;
|
||||||
|
|||||||
@@ -1,77 +0,0 @@
|
|||||||
INSERT INTO document_user_statistics
|
|
||||||
WITH intermediate_ga AS (
|
|
||||||
SELECT
|
|
||||||
ga1.id AS row_id,
|
|
||||||
ga1.user_id,
|
|
||||||
ga1.document_id,
|
|
||||||
ga1.duration,
|
|
||||||
ga1.start_time,
|
|
||||||
ga1.start_percentage,
|
|
||||||
ga1.end_percentage,
|
|
||||||
|
|
||||||
-- Find Overlapping Events (Assign Unique ID)
|
|
||||||
(
|
|
||||||
SELECT MIN(id)
|
|
||||||
FROM activity AS ga2
|
|
||||||
WHERE
|
|
||||||
ga1.document_id = ga2.document_id
|
|
||||||
AND ga1.user_id = ga2.user_id
|
|
||||||
AND ga1.start_percentage <= ga2.end_percentage
|
|
||||||
AND ga1.end_percentage >= ga2.start_percentage
|
|
||||||
) AS group_leader
|
|
||||||
FROM activity AS ga1
|
|
||||||
WHERE
|
|
||||||
document_id = ?
|
|
||||||
AND user_id = ?
|
|
||||||
),
|
|
||||||
grouped_activity AS (
|
|
||||||
SELECT
|
|
||||||
user_id,
|
|
||||||
document_id,
|
|
||||||
MAX(start_time) AS start_time,
|
|
||||||
MIN(start_percentage) AS start_percentage,
|
|
||||||
MAX(end_percentage) AS end_percentage,
|
|
||||||
MAX(end_percentage) - MIN(start_percentage) AS read_percentage,
|
|
||||||
SUM(duration) AS duration
|
|
||||||
FROM intermediate_ga
|
|
||||||
GROUP BY group_leader
|
|
||||||
),
|
|
||||||
current_progress AS (
|
|
||||||
SELECT
|
|
||||||
user_id,
|
|
||||||
document_id,
|
|
||||||
COALESCE((
|
|
||||||
SELECT percentage
|
|
||||||
FROM document_progress AS dp
|
|
||||||
WHERE
|
|
||||||
dp.user_id = iga.user_id
|
|
||||||
AND dp.document_id = iga.document_id
|
|
||||||
ORDER BY created_at DESC
|
|
||||||
LIMIT 1
|
|
||||||
), end_percentage) AS percentage
|
|
||||||
FROM intermediate_ga AS iga
|
|
||||||
GROUP BY user_id, document_id
|
|
||||||
HAVING MAX(start_time)
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
ga.document_id,
|
|
||||||
ga.user_id,
|
|
||||||
MAX(start_time) AS last_read,
|
|
||||||
SUM(duration) AS total_time_seconds,
|
|
||||||
SUM(read_percentage) AS read_percentage,
|
|
||||||
cp.percentage,
|
|
||||||
|
|
||||||
(CAST(COALESCE(d.words, 0.0) AS REAL) * SUM(read_percentage))
|
|
||||||
AS words_read,
|
|
||||||
|
|
||||||
(CAST(COALESCE(d.words, 0.0) AS REAL) * SUM(read_percentage))
|
|
||||||
/ (SUM(duration) / 60.0) AS wpm
|
|
||||||
FROM grouped_activity AS ga
|
|
||||||
INNER JOIN
|
|
||||||
current_progress AS cp
|
|
||||||
ON ga.user_id = cp.user_id AND ga.document_id = cp.document_id
|
|
||||||
INNER JOIN
|
|
||||||
documents AS d
|
|
||||||
ON d.id = ga.document_id
|
|
||||||
GROUP BY ga.document_id, ga.user_id
|
|
||||||
ORDER BY wpm DESC;
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
DELETE FROM user_streaks;
|
|
||||||
INSERT INTO user_streaks SELECT * FROM view_user_streaks;
|
|
||||||
DELETE FROM document_user_statistics;
|
|
||||||
INSERT INTO document_user_statistics
|
|
||||||
SELECT *
|
|
||||||
FROM view_document_user_statistics;
|
|
||||||
154
database/user_streaks.sql
Normal file
154
database/user_streaks.sql
Normal file
@@ -0,0 +1,154 @@
|
|||||||
|
WITH updated_users AS (
|
||||||
|
SELECT a.user_id
|
||||||
|
FROM activity AS a
|
||||||
|
LEFT JOIN users AS u ON u.id = a.user_id
|
||||||
|
LEFT JOIN user_streaks AS s ON a.user_id = s.user_id AND s.window = 'DAY'
|
||||||
|
WHERE
|
||||||
|
a.created_at > COALESCE(s.last_seen, '1970-01-01')
|
||||||
|
AND LOCAL_DATE(s.last_record, u.timezone) != LOCAL_DATE(a.start_time, u.timezone)
|
||||||
|
GROUP BY a.user_id
|
||||||
|
),
|
||||||
|
|
||||||
|
outdated_users AS (
|
||||||
|
SELECT
|
||||||
|
a.user_id,
|
||||||
|
u.timezone AS last_timezone,
|
||||||
|
MAX(a.created_at) AS last_seen,
|
||||||
|
MAX(a.start_time) AS last_record,
|
||||||
|
STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now') AS last_calculated
|
||||||
|
FROM activity AS a
|
||||||
|
LEFT JOIN users AS u ON u.id = a.user_id
|
||||||
|
LEFT JOIN user_streaks AS s ON a.user_id = s.user_id AND s.window = 'DAY'
|
||||||
|
GROUP BY a.user_id
|
||||||
|
HAVING
|
||||||
|
-- User Changed Timezones
|
||||||
|
s.last_timezone != u.timezone
|
||||||
|
|
||||||
|
-- Users Date Changed
|
||||||
|
OR LOCAL_DATE(COALESCE(s.last_calculated, '1970-01-01T00:00:00Z'), u.timezone) !=
|
||||||
|
LOCAL_DATE(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), u.timezone)
|
||||||
|
|
||||||
|
-- User Added New Data
|
||||||
|
OR a.user_id IN updated_users
|
||||||
|
),
|
||||||
|
|
||||||
|
document_windows AS (
|
||||||
|
SELECT
|
||||||
|
activity.user_id,
|
||||||
|
users.timezone,
|
||||||
|
DATE(
|
||||||
|
LOCAL_DATE(activity.start_time, users.timezone),
|
||||||
|
'weekday 0', '-7 day'
|
||||||
|
) AS weekly_read,
|
||||||
|
LOCAL_DATE(activity.start_time, users.timezone) AS daily_read
|
||||||
|
FROM activity
|
||||||
|
INNER JOIN outdated_users ON outdated_users.user_id = activity.user_id
|
||||||
|
LEFT JOIN users ON users.id = activity.user_id
|
||||||
|
GROUP BY activity.user_id, weekly_read, daily_read
|
||||||
|
),
|
||||||
|
|
||||||
|
weekly_partitions AS (
|
||||||
|
SELECT
|
||||||
|
user_id,
|
||||||
|
timezone,
|
||||||
|
'WEEK' AS "window",
|
||||||
|
weekly_read AS read_window,
|
||||||
|
ROW_NUMBER() OVER (
|
||||||
|
PARTITION BY user_id ORDER BY weekly_read DESC
|
||||||
|
) AS seqnum
|
||||||
|
FROM document_windows
|
||||||
|
GROUP BY user_id, weekly_read
|
||||||
|
),
|
||||||
|
|
||||||
|
daily_partitions AS (
|
||||||
|
SELECT
|
||||||
|
user_id,
|
||||||
|
timezone,
|
||||||
|
'DAY' AS "window",
|
||||||
|
daily_read AS read_window,
|
||||||
|
ROW_NUMBER() OVER (
|
||||||
|
PARTITION BY user_id ORDER BY daily_read DESC
|
||||||
|
) AS seqnum
|
||||||
|
FROM document_windows
|
||||||
|
GROUP BY user_id, daily_read
|
||||||
|
),
|
||||||
|
|
||||||
|
streaks AS (
|
||||||
|
SELECT
|
||||||
|
COUNT(*) AS streak,
|
||||||
|
MIN(read_window) AS start_date,
|
||||||
|
MAX(read_window) AS end_date,
|
||||||
|
window,
|
||||||
|
user_id,
|
||||||
|
timezone
|
||||||
|
FROM daily_partitions
|
||||||
|
GROUP BY
|
||||||
|
timezone,
|
||||||
|
user_id,
|
||||||
|
DATE(read_window, '+' || seqnum || ' day')
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
SELECT
|
||||||
|
COUNT(*) AS streak,
|
||||||
|
MIN(read_window) AS start_date,
|
||||||
|
MAX(read_window) AS end_date,
|
||||||
|
window,
|
||||||
|
user_id,
|
||||||
|
timezone
|
||||||
|
FROM weekly_partitions
|
||||||
|
GROUP BY
|
||||||
|
timezone,
|
||||||
|
user_id,
|
||||||
|
DATE(read_window, '+' || (seqnum * 7) || ' day')
|
||||||
|
),
|
||||||
|
|
||||||
|
max_streak AS (
|
||||||
|
SELECT
|
||||||
|
MAX(streak) AS max_streak,
|
||||||
|
start_date AS max_streak_start_date,
|
||||||
|
end_date AS max_streak_end_date,
|
||||||
|
window,
|
||||||
|
user_id
|
||||||
|
FROM streaks
|
||||||
|
GROUP BY user_id, window
|
||||||
|
),
|
||||||
|
|
||||||
|
current_streak AS (
|
||||||
|
SELECT
|
||||||
|
streak AS current_streak,
|
||||||
|
start_date AS current_streak_start_date,
|
||||||
|
end_date AS current_streak_end_date,
|
||||||
|
window,
|
||||||
|
user_id
|
||||||
|
FROM streaks
|
||||||
|
WHERE CASE
|
||||||
|
WHEN window = "WEEK" THEN
|
||||||
|
DATE(LOCAL_DATE(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone), 'weekday 0', '-14 day') = current_streak_end_date
|
||||||
|
OR DATE(LOCAL_DATE(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone), 'weekday 0', '-7 day') = current_streak_end_date
|
||||||
|
WHEN window = "DAY" THEN
|
||||||
|
DATE(LOCAL_DATE(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone), '-1 day') = current_streak_end_date
|
||||||
|
OR DATE(LOCAL_DATE(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone)) = current_streak_end_date
|
||||||
|
END
|
||||||
|
GROUP BY user_id, window
|
||||||
|
)
|
||||||
|
|
||||||
|
INSERT INTO user_streaks
|
||||||
|
SELECT
|
||||||
|
max_streak.user_id,
|
||||||
|
max_streak.window,
|
||||||
|
IFNULL(max_streak, 0) AS max_streak,
|
||||||
|
IFNULL(max_streak_start_date, "N/A") AS max_streak_start_date,
|
||||||
|
IFNULL(max_streak_end_date, "N/A") AS max_streak_end_date,
|
||||||
|
IFNULL(current_streak.current_streak, 0) AS current_streak,
|
||||||
|
IFNULL(current_streak.current_streak_start_date, "N/A") AS current_streak_start_date,
|
||||||
|
IFNULL(current_streak.current_streak_end_date, "N/A") AS current_streak_end_date,
|
||||||
|
outdated_users.last_timezone AS last_timezone,
|
||||||
|
outdated_users.last_seen AS last_seen,
|
||||||
|
outdated_users.last_record AS last_record,
|
||||||
|
outdated_users.last_calculated AS last_calculated
|
||||||
|
FROM max_streak
|
||||||
|
JOIN outdated_users ON max_streak.user_id = outdated_users.user_id
|
||||||
|
LEFT JOIN current_streak ON
|
||||||
|
current_streak.user_id = max_streak.user_id
|
||||||
|
AND current_streak.window = max_streak.window;
|
||||||
205
database/users_test.go
Normal file
205
database/users_test.go
Normal file
@@ -0,0 +1,205 @@
|
|||||||
|
package database
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
"fmt"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
"reichard.io/antholume/config"
|
||||||
|
"reichard.io/antholume/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
testUserID string = "testUser"
|
||||||
|
testUserPass string = "testPass"
|
||||||
|
)
|
||||||
|
|
||||||
|
type UsersTestSuite struct {
|
||||||
|
suite.Suite
|
||||||
|
dbm *DBManager
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestUsers(t *testing.T) {
|
||||||
|
suite.Run(t, new(UsersTestSuite))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *UsersTestSuite) SetupTest() {
|
||||||
|
cfg := config.Config{
|
||||||
|
DBType: "memory",
|
||||||
|
}
|
||||||
|
|
||||||
|
suite.dbm = NewMgr(&cfg)
|
||||||
|
|
||||||
|
// Create User
|
||||||
|
rawAuthHash, _ := utils.GenerateToken(64)
|
||||||
|
authHash := fmt.Sprintf("%x", rawAuthHash)
|
||||||
|
_, err := suite.dbm.Queries.CreateUser(context.Background(), CreateUserParams{
|
||||||
|
ID: testUserID,
|
||||||
|
Pass: &testUserPass,
|
||||||
|
AuthHash: &authHash,
|
||||||
|
})
|
||||||
|
suite.NoError(err)
|
||||||
|
|
||||||
|
// Create Document
|
||||||
|
_, err = suite.dbm.Queries.UpsertDocument(context.Background(), UpsertDocumentParams{
|
||||||
|
ID: documentID,
|
||||||
|
Title: &documentTitle,
|
||||||
|
Author: &documentAuthor,
|
||||||
|
Words: &documentWords,
|
||||||
|
})
|
||||||
|
suite.NoError(err)
|
||||||
|
|
||||||
|
// Create Device
|
||||||
|
_, err = suite.dbm.Queries.UpsertDevice(context.Background(), UpsertDeviceParams{
|
||||||
|
ID: deviceID,
|
||||||
|
UserID: testUserID,
|
||||||
|
DeviceName: deviceName,
|
||||||
|
})
|
||||||
|
suite.NoError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *UsersTestSuite) TestGetUser() {
|
||||||
|
user, err := suite.dbm.Queries.GetUser(context.Background(), testUserID)
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Equal(testUserPass, *user.Pass)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *UsersTestSuite) TestCreateUser() {
|
||||||
|
testUser := "user1"
|
||||||
|
testPass := "pass1"
|
||||||
|
|
||||||
|
// Generate Auth Hash
|
||||||
|
rawAuthHash, err := utils.GenerateToken(64)
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
|
||||||
|
authHash := fmt.Sprintf("%x", rawAuthHash)
|
||||||
|
changed, err := suite.dbm.Queries.CreateUser(context.Background(), CreateUserParams{
|
||||||
|
ID: testUser,
|
||||||
|
Pass: &testPass,
|
||||||
|
AuthHash: &authHash,
|
||||||
|
})
|
||||||
|
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Equal(int64(1), changed)
|
||||||
|
|
||||||
|
user, err := suite.dbm.Queries.GetUser(context.Background(), testUser)
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Equal(testPass, *user.Pass)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *UsersTestSuite) TestDeleteUser() {
|
||||||
|
changed, err := suite.dbm.Queries.DeleteUser(context.Background(), testUserID)
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Equal(int64(1), changed, "should have one changed row")
|
||||||
|
|
||||||
|
_, err = suite.dbm.Queries.GetUser(context.Background(), testUserID)
|
||||||
|
suite.ErrorIs(err, sql.ErrNoRows, "should have no rows error")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *UsersTestSuite) TestGetUsers() {
|
||||||
|
users, err := suite.dbm.Queries.GetUsers(context.Background())
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Len(users, 1, "should have single user")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *UsersTestSuite) TestUpdateUser() {
|
||||||
|
newPassword := "newPass123"
|
||||||
|
user, err := suite.dbm.Queries.UpdateUser(context.Background(), UpdateUserParams{
|
||||||
|
UserID: testUserID,
|
||||||
|
Password: &newPassword,
|
||||||
|
})
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Equal(newPassword, *user.Pass, "should have new password")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *UsersTestSuite) TestGetUserStatistics() {
|
||||||
|
err := suite.dbm.CacheTempTables(context.Background())
|
||||||
|
suite.NoError(err)
|
||||||
|
|
||||||
|
// Ensure Zero Items
|
||||||
|
userStats, err := suite.dbm.Queries.GetUserStatistics(context.Background())
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Empty(userStats, "should be empty")
|
||||||
|
|
||||||
|
// Create Activity
|
||||||
|
end := time.Now()
|
||||||
|
start := end.AddDate(0, 0, -9)
|
||||||
|
var counter int64 = 0
|
||||||
|
|
||||||
|
for d := start; d.After(end) == false; d = d.AddDate(0, 0, 1) {
|
||||||
|
counter += 1
|
||||||
|
|
||||||
|
// Add Item
|
||||||
|
activity, err := suite.dbm.Queries.AddActivity(context.Background(), AddActivityParams{
|
||||||
|
DocumentID: documentID,
|
||||||
|
DeviceID: deviceID,
|
||||||
|
UserID: testUserID,
|
||||||
|
StartTime: d.UTC().Format(time.RFC3339),
|
||||||
|
Duration: 60,
|
||||||
|
StartPercentage: float64(counter) / 100.0,
|
||||||
|
EndPercentage: float64(counter+1) / 100.0,
|
||||||
|
})
|
||||||
|
|
||||||
|
suite.Nil(err, fmt.Sprintf("[%d] should have nil err for add activity", counter))
|
||||||
|
suite.Equal(counter, activity.ID, fmt.Sprintf("[%d] should have correct id for add activity", counter))
|
||||||
|
}
|
||||||
|
|
||||||
|
err = suite.dbm.CacheTempTables(context.Background())
|
||||||
|
suite.NoError(err)
|
||||||
|
|
||||||
|
// Ensure One Item
|
||||||
|
userStats, err = suite.dbm.Queries.GetUserStatistics(context.Background())
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Len(userStats, 1, "should have length of one")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *UsersTestSuite) TestGetUsersStreaks() {
|
||||||
|
err := suite.dbm.CacheTempTables(context.Background())
|
||||||
|
suite.NoError(err)
|
||||||
|
|
||||||
|
// Ensure Zero Items
|
||||||
|
userStats, err := suite.dbm.Queries.GetUserStreaks(context.Background(), testUserID)
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Empty(userStats, "should be empty")
|
||||||
|
|
||||||
|
// Create Activity
|
||||||
|
end := time.Now()
|
||||||
|
start := end.AddDate(0, 0, -9)
|
||||||
|
var counter int64 = 0
|
||||||
|
|
||||||
|
for d := start; d.After(end) == false; d = d.AddDate(0, 0, 1) {
|
||||||
|
counter += 1
|
||||||
|
|
||||||
|
// Add Item
|
||||||
|
activity, err := suite.dbm.Queries.AddActivity(context.Background(), AddActivityParams{
|
||||||
|
DocumentID: documentID,
|
||||||
|
DeviceID: deviceID,
|
||||||
|
UserID: testUserID,
|
||||||
|
StartTime: d.UTC().Format(time.RFC3339),
|
||||||
|
Duration: 60,
|
||||||
|
StartPercentage: float64(counter) / 100.0,
|
||||||
|
EndPercentage: float64(counter+1) / 100.0,
|
||||||
|
})
|
||||||
|
|
||||||
|
suite.Nil(err, fmt.Sprintf("[%d] should have nil err for add activity", counter))
|
||||||
|
suite.Equal(counter, activity.ID, fmt.Sprintf("[%d] should have correct id for add activity", counter))
|
||||||
|
}
|
||||||
|
|
||||||
|
err = suite.dbm.CacheTempTables(context.Background())
|
||||||
|
suite.NoError(err)
|
||||||
|
|
||||||
|
// Ensure Two Item
|
||||||
|
userStats, err = suite.dbm.Queries.GetUserStreaks(context.Background(), testUserID)
|
||||||
|
suite.Nil(err, "should have nil err")
|
||||||
|
suite.Len(userStats, 2, "should have length of two")
|
||||||
|
|
||||||
|
// Ensure Streak Stats
|
||||||
|
dayStats := userStats[0]
|
||||||
|
weekStats := userStats[1]
|
||||||
|
suite.Equal(int64(10), dayStats.CurrentStreak, "should be 10 days")
|
||||||
|
suite.Greater(weekStats.CurrentStreak, int64(1), "should be 2 or 3")
|
||||||
|
}
|
||||||
61
flake.lock
generated
Normal file
61
flake.lock
generated
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
{
|
||||||
|
"nodes": {
|
||||||
|
"flake-utils": {
|
||||||
|
"inputs": {
|
||||||
|
"systems": "systems"
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1731533236,
|
||||||
|
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
|
||||||
|
"owner": "numtide",
|
||||||
|
"repo": "flake-utils",
|
||||||
|
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "numtide",
|
||||||
|
"repo": "flake-utils",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nixpkgs": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1764522689,
|
||||||
|
"narHash": "sha256-SqUuBFjhl/kpDiVaKLQBoD8TLD+/cTUzzgVFoaHrkqY=",
|
||||||
|
"owner": "NixOS",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"rev": "8bb5646e0bed5dbd3ab08c7a7cc15b75ab4e1d0f",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "NixOS",
|
||||||
|
"ref": "nixos-25.11",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": {
|
||||||
|
"inputs": {
|
||||||
|
"flake-utils": "flake-utils",
|
||||||
|
"nixpkgs": "nixpkgs"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"systems": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1681028828,
|
||||||
|
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||||
|
"owner": "nix-systems",
|
||||||
|
"repo": "default",
|
||||||
|
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "nix-systems",
|
||||||
|
"repo": "default",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": "root",
|
||||||
|
"version": 7
|
||||||
|
}
|
||||||
36
flake.nix
Normal file
36
flake.nix
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
{
|
||||||
|
description = "Development Environment";
|
||||||
|
|
||||||
|
inputs = {
|
||||||
|
nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.11";
|
||||||
|
flake-utils.url = "github:numtide/flake-utils";
|
||||||
|
};
|
||||||
|
|
||||||
|
outputs =
|
||||||
|
{ self
|
||||||
|
, nixpkgs
|
||||||
|
, flake-utils
|
||||||
|
,
|
||||||
|
}:
|
||||||
|
flake-utils.lib.eachDefaultSystem (
|
||||||
|
system:
|
||||||
|
let
|
||||||
|
pkgs = nixpkgs.legacyPackages.${system};
|
||||||
|
in
|
||||||
|
{
|
||||||
|
devShells.default = pkgs.mkShell {
|
||||||
|
packages = with pkgs; [
|
||||||
|
go
|
||||||
|
gopls
|
||||||
|
golangci-lint
|
||||||
|
nodejs
|
||||||
|
tailwindcss
|
||||||
|
python311Packages.grip
|
||||||
|
];
|
||||||
|
shellHook = ''
|
||||||
|
export PATH=$PATH:~/go/bin
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
114
go.mod
114
go.mod
@@ -1,70 +1,86 @@
|
|||||||
module reichard.io/bbank
|
module reichard.io/antholume
|
||||||
|
|
||||||
go 1.19
|
go 1.24
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/PuerkitoBio/goquery v1.8.1
|
github.com/PuerkitoBio/goquery v1.10.3
|
||||||
github.com/alexedwards/argon2id v0.0.0-20230305115115-4b3c3280a736
|
github.com/alexedwards/argon2id v1.0.0
|
||||||
github.com/gabriel-vasile/mimetype v1.4.2
|
github.com/gabriel-vasile/mimetype v1.4.9
|
||||||
github.com/gin-contrib/multitemplate v0.0.0-20230212012517-45920c92c271
|
github.com/gin-contrib/multitemplate v1.1.1
|
||||||
github.com/gin-contrib/sessions v0.0.4
|
github.com/gin-contrib/sessions v1.0.4
|
||||||
github.com/gin-gonic/gin v1.9.1
|
github.com/gin-gonic/gin v1.10.1
|
||||||
github.com/microcosm-cc/bluemonday v1.0.25
|
github.com/itchyny/gojq v0.12.17
|
||||||
|
github.com/jarcoal/httpmock v1.3.1
|
||||||
|
github.com/microcosm-cc/bluemonday v1.0.27
|
||||||
|
github.com/pkg/errors v0.9.1
|
||||||
|
github.com/pressly/goose/v3 v3.24.3
|
||||||
github.com/sirupsen/logrus v1.9.3
|
github.com/sirupsen/logrus v1.9.3
|
||||||
github.com/taylorskalyo/goreader v0.0.0-20230626212555-e7f5644f8115
|
github.com/stretchr/testify v1.10.0
|
||||||
github.com/urfave/cli/v2 v2.25.7
|
github.com/taylorskalyo/goreader v1.0.1
|
||||||
golang.org/x/exp v0.0.0-20230905200255-921286631fa9
|
github.com/urfave/cli/v2 v2.27.7
|
||||||
golang.org/x/net v0.15.0
|
golang.org/x/exp v0.0.0-20250718183923-645b1fa84792
|
||||||
modernc.org/sqlite v1.26.0
|
gopkg.in/natefinch/lumberjack.v2 v2.2.1
|
||||||
|
modernc.org/sqlite v1.38.2
|
||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/andybalholm/cascadia v1.3.1 // indirect
|
github.com/andybalholm/cascadia v1.3.3 // indirect
|
||||||
github.com/aymerick/douceur v0.2.0 // indirect
|
github.com/aymerick/douceur v0.2.0 // indirect
|
||||||
github.com/bytedance/sonic v1.10.0 // indirect
|
github.com/bytedance/sonic v1.14.0 // indirect
|
||||||
|
github.com/bytedance/sonic/loader v0.3.0 // indirect
|
||||||
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect
|
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect
|
||||||
github.com/chenzhuoyu/iasm v0.9.0 // indirect
|
github.com/chenzhuoyu/iasm v0.9.1 // indirect
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.2 // indirect
|
github.com/cloudwego/base64x v0.1.6 // indirect
|
||||||
|
github.com/cpuguy83/go-md2man/v2 v2.0.7 // indirect
|
||||||
|
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||||
github.com/gin-contrib/sse v0.1.0 // indirect
|
github.com/gin-contrib/sse v1.1.0 // indirect
|
||||||
github.com/go-playground/locales v0.14.1 // indirect
|
github.com/go-playground/locales v0.14.1 // indirect
|
||||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||||
github.com/go-playground/validator/v10 v10.15.3 // indirect
|
github.com/go-playground/validator/v10 v10.27.0 // indirect
|
||||||
github.com/goccy/go-json v0.10.2 // indirect
|
github.com/goccy/go-json v0.10.5 // indirect
|
||||||
github.com/google/uuid v1.3.0 // indirect
|
github.com/google/uuid v1.6.0 // indirect
|
||||||
github.com/gorilla/context v1.1.1 // indirect
|
github.com/gorilla/context v1.1.2 // indirect
|
||||||
github.com/gorilla/css v1.0.0 // indirect
|
github.com/gorilla/css v1.0.1 // indirect
|
||||||
github.com/gorilla/securecookie v1.1.1 // indirect
|
github.com/gorilla/securecookie v1.1.2 // indirect
|
||||||
github.com/gorilla/sessions v1.2.1 // indirect
|
github.com/gorilla/sessions v1.4.0 // indirect
|
||||||
|
github.com/itchyny/timefmt-go v0.1.6 // indirect
|
||||||
github.com/json-iterator/go v1.1.12 // indirect
|
github.com/json-iterator/go v1.1.12 // indirect
|
||||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect
|
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect
|
||||||
github.com/klauspost/cpuid/v2 v2.2.5 // indirect
|
github.com/klauspost/cpuid/v2 v2.3.0 // indirect
|
||||||
github.com/leodido/go-urn v1.2.4 // indirect
|
github.com/leodido/go-urn v1.4.0 // indirect
|
||||||
github.com/mattn/go-isatty v0.0.19 // indirect
|
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||||
github.com/mattn/go-sqlite3 v1.14.17 // indirect
|
github.com/mfridman/interpolate v0.0.2 // indirect
|
||||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||||
github.com/pelletier/go-toml/v2 v2.1.0 // indirect
|
github.com/ncruces/go-strftime v0.1.9 // indirect
|
||||||
|
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
|
||||||
|
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||||
github.com/russross/blackfriday/v2 v2.1.0 // indirect
|
github.com/russross/blackfriday/v2 v2.1.0 // indirect
|
||||||
|
github.com/sethvargo/go-retry v0.3.0 // indirect
|
||||||
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
|
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
|
||||||
github.com/ugorji/go/codec v1.2.11 // indirect
|
github.com/ugorji/go/codec v1.3.0 // indirect
|
||||||
github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673 // indirect
|
github.com/xrash/smetrics v0.0.0-20250705151800-55b8f293f342 // indirect
|
||||||
golang.org/x/arch v0.4.0 // indirect
|
go.uber.org/multierr v1.11.0 // indirect
|
||||||
golang.org/x/crypto v0.13.0 // indirect
|
golang.org/x/arch v0.20.0 // indirect
|
||||||
golang.org/x/mod v0.12.0 // indirect
|
golang.org/x/crypto v0.41.0 // indirect
|
||||||
golang.org/x/sys v0.12.0 // indirect
|
golang.org/x/mod v0.27.0 // indirect
|
||||||
golang.org/x/text v0.13.0 // indirect
|
golang.org/x/net v0.43.0 // indirect
|
||||||
golang.org/x/tools v0.13.0 // indirect
|
golang.org/x/sync v0.16.0 // indirect
|
||||||
google.golang.org/protobuf v1.31.0 // indirect
|
golang.org/x/sys v0.35.0 // indirect
|
||||||
|
golang.org/x/text v0.28.0 // indirect
|
||||||
|
golang.org/x/tools v0.36.0 // indirect
|
||||||
|
google.golang.org/protobuf v1.36.7 // indirect
|
||||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||||
lukechampine.com/uint128 v1.2.0 // indirect
|
lukechampine.com/uint128 v1.3.0 // indirect
|
||||||
modernc.org/cc/v3 v3.40.0 // indirect
|
maragu.dev/gomponents v1.1.0 // indirect
|
||||||
modernc.org/ccgo/v3 v3.16.13 // indirect
|
modernc.org/cc/v3 v3.41.0 // indirect
|
||||||
modernc.org/libc v1.24.1 // indirect
|
modernc.org/ccgo/v3 v3.17.0 // indirect
|
||||||
modernc.org/mathutil v1.5.0 // indirect
|
modernc.org/libc v1.66.6 // indirect
|
||||||
modernc.org/memory v1.6.0 // indirect
|
modernc.org/mathutil v1.7.1 // indirect
|
||||||
modernc.org/opt v0.1.3 // indirect
|
modernc.org/memory v1.11.0 // indirect
|
||||||
modernc.org/strutil v1.1.3 // indirect
|
modernc.org/opt v0.1.4 // indirect
|
||||||
modernc.org/token v1.0.1 // indirect
|
modernc.org/strutil v1.2.1 // indirect
|
||||||
|
modernc.org/token v1.1.0 // indirect
|
||||||
)
|
)
|
||||||
|
|||||||
478
go.sum
478
go.sum
@@ -1,91 +1,199 @@
|
|||||||
|
github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 h1:L/gRVlceqvL25UVaW/CKtUDjefjrs0SPonmDGUVOYP0=
|
||||||
|
github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E=
|
||||||
|
github.com/ClickHouse/ch-go v0.58.2 h1:jSm2szHbT9MCAB1rJ3WuCJqmGLi5UTjlNu+f530UTS0=
|
||||||
|
github.com/ClickHouse/ch-go v0.58.2/go.mod h1:Ap/0bEmiLa14gYjCiRkYGbXvbe8vwdrfTYWhsuQ99aw=
|
||||||
|
github.com/ClickHouse/ch-go v0.65.1 h1:SLuxmLl5Mjj44/XbINsK2HFvzqup0s6rwKLFH347ZhU=
|
||||||
|
github.com/ClickHouse/clickhouse-go/v2 v2.16.0 h1:rhMfnPewXPnY4Q4lQRGdYuTLRBRKJEIEYHtbUMrzmvI=
|
||||||
|
github.com/ClickHouse/clickhouse-go/v2 v2.16.0/go.mod h1:J7SPfIxwR+x4mQ+o8MLSe0oY50NNntEqCIjFe/T1VPM=
|
||||||
|
github.com/ClickHouse/clickhouse-go/v2 v2.34.0 h1:Y4rqkdrRHgExvC4o/NTbLdY5LFQ3LHS77/RNFxFX3Co=
|
||||||
|
github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow=
|
||||||
|
github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM=
|
||||||
|
github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 h1:TngWCqHvy9oXAN6lEVMRuU21PR1EtLVZJmdB18Gu3Rw=
|
||||||
|
github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5/go.mod h1:lmUJ/7eu/Q8D7ML55dXQrVaamCz2vxCfdQBasLZfHKk=
|
||||||
github.com/PuerkitoBio/goquery v1.8.1 h1:uQxhNlArOIdbrH1tr0UXwdVFgDcZDrZVdcpygAcwmWM=
|
github.com/PuerkitoBio/goquery v1.8.1 h1:uQxhNlArOIdbrH1tr0UXwdVFgDcZDrZVdcpygAcwmWM=
|
||||||
github.com/PuerkitoBio/goquery v1.8.1/go.mod h1:Q8ICL1kNUJ2sXGoAhPGUdYDJvgQgHzJsnnd3H7Ho5jQ=
|
github.com/PuerkitoBio/goquery v1.8.1/go.mod h1:Q8ICL1kNUJ2sXGoAhPGUdYDJvgQgHzJsnnd3H7Ho5jQ=
|
||||||
github.com/alexedwards/argon2id v0.0.0-20230305115115-4b3c3280a736 h1:qZaEtLxnqY5mJ0fVKbk31NVhlgi0yrKm51Pq/I5wcz4=
|
github.com/PuerkitoBio/goquery v1.10.3 h1:pFYcNSqHxBD06Fpj/KsbStFRsgRATgnf3LeXiUkhzPo=
|
||||||
github.com/alexedwards/argon2id v0.0.0-20230305115115-4b3c3280a736/go.mod h1:mTeFRcTdnpzOlRjMoFYC/80HwVUreupyAiqPkCZQOXc=
|
github.com/PuerkitoBio/goquery v1.10.3/go.mod h1:tMUX0zDMHXYlAQk6p35XxQMqMweEKB7iK7iLNd4RH4Y=
|
||||||
github.com/andybalholm/cascadia v1.3.1 h1:nhxRkql1kdYCc8Snf7D5/D3spOX+dBgjA6u8x004T2c=
|
github.com/alexedwards/argon2id v1.0.0 h1:wJzDx66hqWX7siL/SRUmgz3F8YMrd/nfX/xHHcQQP0w=
|
||||||
|
github.com/alexedwards/argon2id v1.0.0/go.mod h1:tYKkqIjzXvZdzPvADMWOEZ+l6+BD6CtBXMj5fnJppiw=
|
||||||
|
github.com/andybalholm/brotli v1.0.6 h1:Yf9fFpf49Zrxb9NlQaluyE92/+X7UVHlhMNJN2sxfOI=
|
||||||
|
github.com/andybalholm/brotli v1.0.6/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
|
||||||
|
github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA=
|
||||||
github.com/andybalholm/cascadia v1.3.1/go.mod h1:R4bJ1UQfqADjvDa4P6HZHLh/3OxWWEqc0Sk8XGwHqvA=
|
github.com/andybalholm/cascadia v1.3.1/go.mod h1:R4bJ1UQfqADjvDa4P6HZHLh/3OxWWEqc0Sk8XGwHqvA=
|
||||||
github.com/antonlindstrom/pgstore v0.0.0-20200229204646-b08ebf1105e0/go.mod h1:2Ti6VUHVxpC0VSmTZzEvpzysnaGAfGBOoMIz5ykPyyw=
|
github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsViSLyss=
|
||||||
|
github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU=
|
||||||
|
github.com/andybalholm/cascadia v1.3.3 h1:AG2YHrzJIm4BZ19iwJ/DAua6Btl3IwJX+VI4kktS1LM=
|
||||||
|
github.com/andybalholm/cascadia v1.3.3/go.mod h1:xNd9bqTn98Ln4DwST8/nG+H0yuB8Hmgu1YHNnWw0GeA=
|
||||||
github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk=
|
github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk=
|
||||||
github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4=
|
github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4=
|
||||||
github.com/boj/redistore v0.0.0-20180917114910-cd5dcc76aeff/go.mod h1:+RTT1BOk5P97fT2CiHkbFQwkK3mjsFAP6zCYV2aXtjw=
|
|
||||||
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b/go.mod h1:H0wQNHz2YrLsuXOZozoeDmnHXkNCRmMW0gwFWDfEZDA=
|
|
||||||
github.com/bradleypeabody/gorilla-sessions-memcache v0.0.0-20181103040241-659414f458e1/go.mod h1:dkChI7Tbtx7H1Tj7TqGSZMOeGpMP5gLHtjroHd4agiI=
|
|
||||||
github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM=
|
github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM=
|
||||||
github.com/bytedance/sonic v1.10.0-rc/go.mod h1:ElCzW+ufi8qKqNW0FY314xriJhyJhuoJ3gFZdAHF7NM=
|
github.com/bytedance/sonic v1.10.0-rc/go.mod h1:ElCzW+ufi8qKqNW0FY314xriJhyJhuoJ3gFZdAHF7NM=
|
||||||
github.com/bytedance/sonic v1.10.0 h1:qtNZduETEIWJVIyDl01BeNxur2rW9OwTQ/yBqFRkKEk=
|
github.com/bytedance/sonic v1.10.2 h1:GQebETVBxYB7JGWJtLBi07OVzWwt+8dWA00gEVW2ZFE=
|
||||||
github.com/bytedance/sonic v1.10.0/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4=
|
github.com/bytedance/sonic v1.10.2/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4=
|
||||||
|
github.com/bytedance/sonic v1.14.0 h1:/OfKt8HFw0kh2rj8N0F6C/qPGRESq0BbaNZgcNXXzQQ=
|
||||||
|
github.com/bytedance/sonic v1.14.0/go.mod h1:WoEbx8WTcFJfzCe0hbmyTGrfjt8PzNEBdxlNUO24NhA=
|
||||||
|
github.com/bytedance/sonic/loader v0.3.0 h1:dskwH8edlzNMctoruo8FPTJDF3vLtDT0sXZwvZJyqeA=
|
||||||
|
github.com/bytedance/sonic/loader v0.3.0/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI=
|
||||||
|
github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqylYbM=
|
||||||
|
github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
|
||||||
github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY=
|
github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY=
|
||||||
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk=
|
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk=
|
||||||
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ0g/qfRdp61a3Uu/AWrgIq2s0ClJV1g0=
|
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ0g/qfRdp61a3Uu/AWrgIq2s0ClJV1g0=
|
||||||
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpVsBuRksnlj1mLy4AWzRNQYxauNi62uWcE3to6eA=
|
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpVsBuRksnlj1mLy4AWzRNQYxauNi62uWcE3to6eA=
|
||||||
github.com/chenzhuoyu/iasm v0.9.0 h1:9fhXjVzq5hUy2gkhhgHl95zG2cEAhw9OSGs8toWWAwo=
|
|
||||||
github.com/chenzhuoyu/iasm v0.9.0/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog=
|
github.com/chenzhuoyu/iasm v0.9.0/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog=
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.2 h1:p1EgwI/C7NhT0JmVkwCD2ZBK8j4aeHQX2pMHHBfMQ6w=
|
github.com/chenzhuoyu/iasm v0.9.1 h1:tUHQJXo3NhBqw6s33wkGn9SP3bvrWLdlVIJ3hQBL7P0=
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
github.com/chenzhuoyu/iasm v0.9.1/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog=
|
||||||
|
github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M=
|
||||||
|
github.com/cloudwego/base64x v0.1.6/go.mod h1:OFcloc187FXDaYHvrNIjxSe8ncn0OOM8gEHfghB2IPU=
|
||||||
|
github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY=
|
||||||
|
github.com/containerd/continuity v0.4.3 h1:6HVkalIp+2u1ZLH1J/pYX2oBVXlJZvh1X1A7bEZ9Su8=
|
||||||
|
github.com/containerd/continuity v0.4.3/go.mod h1:F6PTNCKepoxEaXLQp3wDAjygEnImnZ/7o4JzpodfroQ=
|
||||||
|
github.com/cpuguy83/go-md2man/v2 v2.0.3 h1:qMCsGGgs+MAzDFyp9LpAe1Lqy/fY/qCovCm0qnXZOBM=
|
||||||
|
github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||||
|
github.com/cpuguy83/go-md2man/v2 v2.0.7 h1:zbFlGlXEAKlwXpmvle3d8Oe3YnkKIK4xSRTd3sHPnBo=
|
||||||
|
github.com/cpuguy83/go-md2man/v2 v2.0.7/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
||||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/docker/cli v24.0.7+incompatible h1:wa/nIwYFW7BVTGa7SWPVyyXU9lgORqUb1xfI36MSkFg=
|
||||||
|
github.com/docker/cli v24.0.7+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
|
||||||
|
github.com/docker/docker v24.0.7+incompatible h1:Wo6l37AuwP3JaMnZa226lzVXGA3F9Ig1seQen0cKYlM=
|
||||||
|
github.com/docker/docker v24.0.7+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
|
||||||
|
github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ=
|
||||||
|
github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec=
|
||||||
|
github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4=
|
||||||
|
github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
|
||||||
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
||||||
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
||||||
github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU=
|
github.com/elastic/go-sysinfo v1.11.2 h1:mcm4OSYVMyws6+n2HIVMGkln5HOpo5Ie1ZmbbNn0jg4=
|
||||||
github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA=
|
github.com/elastic/go-sysinfo v1.11.2/go.mod h1:GKqR8bbMK/1ITnez9NIsIfXQr25aLhRJa7AfT8HpBFQ=
|
||||||
github.com/gin-contrib/multitemplate v0.0.0-20230212012517-45920c92c271 h1:s+boMV47gwTyff2PL+k6V33edJpp+K5y3QPzZlRhno8=
|
github.com/elastic/go-sysinfo v1.15.3 h1:W+RnmhKFkqPTCRoFq2VCTmsT4p/fwpo+3gKNQsn1XU0=
|
||||||
github.com/gin-contrib/multitemplate v0.0.0-20230212012517-45920c92c271/go.mod h1:XLLtIXoP9+9zGcEDc7gAGV3AksGPO+vzv4kXHMJSdU0=
|
github.com/elastic/go-windows v1.0.1 h1:AlYZOldA+UJ0/2nBuqWdo90GFCgG9xuyw9SYzGUtJm0=
|
||||||
github.com/gin-contrib/sessions v0.0.4 h1:gq4fNa1Zmp564iHP5G6EBuktilEos8VKhe2sza1KMgo=
|
github.com/elastic/go-windows v1.0.1/go.mod h1:FoVvqWSun28vaDQPbj2Elfc0JahhPB7WQEGa3c814Ss=
|
||||||
github.com/gin-contrib/sessions v0.0.4/go.mod h1:pQ3sIyviBBGcxgyR8mkeJuXbeV3h3NYmhJADQTq5+Vo=
|
github.com/elastic/go-windows v1.0.2 h1:yoLLsAsV5cfg9FLhZ9EXZ2n2sQFKeDYrHenkcivY4vI=
|
||||||
|
github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0=
|
||||||
|
github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk=
|
||||||
|
github.com/gabriel-vasile/mimetype v1.4.9 h1:5k+WDwEsD9eTLL8Tz3L0VnmVh9QxGjRmjBvAG7U/oYY=
|
||||||
|
github.com/gabriel-vasile/mimetype v1.4.9/go.mod h1:WnSQhFKJuBlRyLiKohA/2DtIlPFAbguNaG7QCHcyGok=
|
||||||
|
github.com/gin-contrib/multitemplate v0.0.0-20231230012943-32b233489a81 h1:hQ/WeoPMTbN8NHk5i96dWy3D4uF7yCU+kORyWG+P4oU=
|
||||||
|
github.com/gin-contrib/multitemplate v0.0.0-20231230012943-32b233489a81/go.mod h1:XLLtIXoP9+9zGcEDc7gAGV3AksGPO+vzv4kXHMJSdU0=
|
||||||
|
github.com/gin-contrib/multitemplate v1.1.1 h1:uzhT/ZWS9nBd1h6P+AaxWaVSVAJRAcKH4yafrBU8sPc=
|
||||||
|
github.com/gin-contrib/multitemplate v1.1.1/go.mod h1:1Sa4984P8+x87U0cg5yWxK4jpbK1cXMYegUCZK6XT/M=
|
||||||
|
github.com/gin-contrib/sessions v0.0.5 h1:CATtfHmLMQrMNpJRgzjWXD7worTh7g7ritsQfmF+0jE=
|
||||||
|
github.com/gin-contrib/sessions v0.0.5/go.mod h1:vYAuaUPqie3WUSsft6HUlCjlwwoJQs97miaG2+7neKY=
|
||||||
|
github.com/gin-contrib/sessions v1.0.4 h1:ha6CNdpYiTOK/hTp05miJLbpTSNfOnFg5Jm2kbcqy8U=
|
||||||
|
github.com/gin-contrib/sessions v1.0.4/go.mod h1:ccmkrb2z6iU2osiAHZG3x3J4suJK+OU27oqzlWOqQgs=
|
||||||
github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
|
github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
|
||||||
github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
|
github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
|
||||||
github.com/gin-gonic/gin v1.7.4/go.mod h1:jD2toBW3GZUr5UMcdrwQA10I7RuaFOl/SGeDjXkfUtY=
|
github.com/gin-contrib/sse v1.1.0 h1:n0w2GMuUpWDVp7qSpvze6fAu9iRxJY4Hmj6AmBOU05w=
|
||||||
|
github.com/gin-contrib/sse v1.1.0/go.mod h1:hxRZ5gVpWMT7Z0B0gSNYqqsSCNIJMjzvm6fqCz9vjwM=
|
||||||
github.com/gin-gonic/gin v1.8.1/go.mod h1:ji8BvRH1azfM+SYow9zQ6SZMvR8qOMZHmsCuWR9tTTk=
|
github.com/gin-gonic/gin v1.8.1/go.mod h1:ji8BvRH1azfM+SYow9zQ6SZMvR8qOMZHmsCuWR9tTTk=
|
||||||
github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg=
|
github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg=
|
||||||
github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU=
|
github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU=
|
||||||
github.com/globalsign/mgo v0.0.0-20181015135952-eeefdecb41b8/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q=
|
github.com/gin-gonic/gin v1.10.1 h1:T0ujvqyCSqRopADpgPgiTT63DUQVSfojyME59Ei63pQ=
|
||||||
|
github.com/gin-gonic/gin v1.10.1/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y=
|
||||||
|
github.com/go-faster/city v1.0.1 h1:4WAxSZ3V2Ws4QRDrscLEDcibJY8uf41H6AhXDrNDcGw=
|
||||||
|
github.com/go-faster/city v1.0.1/go.mod h1:jKcUJId49qdW3L1qKHH/3wPeUstCVpVSXTM6vO3VcTw=
|
||||||
|
github.com/go-faster/errors v0.6.1 h1:nNIPOBkprlKzkThvS/0YaX8Zs9KewLCOSFQS5BU06FI=
|
||||||
|
github.com/go-faster/errors v0.6.1/go.mod h1:5MGV2/2T9yvlrbhe9pD9LO5Z/2zCSq2T8j+Jpi2LAyY=
|
||||||
|
github.com/go-faster/errors v0.7.1 h1:MkJTnDoEdi9pDabt1dpWf7AA8/BaSYZqibYyhZ20AYg=
|
||||||
github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
|
github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
|
||||||
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
|
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
|
||||||
github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8=
|
github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
|
||||||
github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs=
|
github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs=
|
||||||
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
|
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
|
||||||
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
|
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
|
||||||
github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA=
|
|
||||||
github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA=
|
github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA=
|
||||||
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
|
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
|
||||||
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
||||||
github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4=
|
|
||||||
github.com/go-playground/validator/v10 v10.10.0/go.mod h1:74x4gJWsvQexRdW8Pn3dXSGrTK4nAUsbPlLADvpJkos=
|
github.com/go-playground/validator/v10 v10.10.0/go.mod h1:74x4gJWsvQexRdW8Pn3dXSGrTK4nAUsbPlLADvpJkos=
|
||||||
github.com/go-playground/validator/v10 v10.15.3 h1:S+sSpunYjNPDuXkWbK+x+bA7iXiW296KG4dL3X7xUZo=
|
github.com/go-playground/validator/v10 v10.17.0 h1:SmVVlfAOtlZncTxRuinDPomC2DkXJ4E5T9gDA0AIH74=
|
||||||
github.com/go-playground/validator/v10 v10.15.3/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
|
github.com/go-playground/validator/v10 v10.17.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
|
||||||
|
github.com/go-playground/validator/v10 v10.27.0 h1:w8+XrWVMhGkxOaaowyKH35gFydVHOvC0/uWoy2Fzwn4=
|
||||||
|
github.com/go-playground/validator/v10 v10.27.0/go.mod h1:I5QpIEbmr8On7W0TktmJAumgzX4CA1XNl4ZmDuVHKKo=
|
||||||
|
github.com/go-sql-driver/mysql v1.7.1 h1:lUIinVbN1DY0xBg0eMOzmmtGoHwWBbvnWubQUrtU8EI=
|
||||||
|
github.com/go-sql-driver/mysql v1.7.1/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI=
|
||||||
|
github.com/go-sql-driver/mysql v1.9.2 h1:4cNKDYQ1I84SXslGddlsrMhc8k4LeDVj6Ad6WRjiHuU=
|
||||||
github.com/goccy/go-json v0.9.7/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
|
github.com/goccy/go-json v0.9.7/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
|
||||||
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
|
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
|
||||||
github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
|
github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
|
||||||
github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
|
github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
|
||||||
|
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
|
||||||
|
github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=
|
||||||
|
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
|
||||||
|
github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg=
|
||||||
|
github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
|
||||||
|
github.com/golang-jwt/jwt/v4 v4.5.2 h1:YtQM7lnr8iZ+j5q71MGKkNw9Mn7AjHM68uc9g5fXeUI=
|
||||||
|
github.com/golang/protobuf v1.5.0 h1:LUVKkCeviFUMKqHa4tXIIij/lbhnMbP7Fn5wKdKkRh4=
|
||||||
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
|
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
|
||||||
github.com/gomodule/redigo v2.0.0+incompatible/go.mod h1:B4C85qUVwatsJoIUNIfCRsp7qO0iAmpGFZ4EELWSbC4=
|
github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg=
|
||||||
|
github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
|
||||||
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||||
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
|
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
|
||||||
|
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||||
|
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||||
|
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||||
|
github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0=
|
||||||
|
github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||||
github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26 h1:Xim43kblpZXfIBQsbuBVKCudVG457BR2GZFIz3uw3hQ=
|
github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26 h1:Xim43kblpZXfIBQsbuBVKCudVG457BR2GZFIz3uw3hQ=
|
||||||
github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
|
github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26/go.mod h1:dDKJzRmX4S37WGHujM7tX//fmj1uioxKzKxz3lo4HJo=
|
||||||
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e h1:ijClszYn+mADRFY17kjQEVQ1XRhq2/JR1M3sGqeJoxs=
|
||||||
github.com/gorilla/context v1.1.1 h1:AWwleXJkX/nhcU9bZSnZoi3h/qGYqQAGhq6zZe/aQW8=
|
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4=
|
||||||
github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg=
|
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ=
|
||||||
github.com/gorilla/css v1.0.0 h1:BQqNyPTi50JCFMTw/b67hByjMVXZRwGha6wxVGkeihY=
|
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||||
github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c=
|
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
github.com/gorilla/securecookie v1.1.1 h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyCS8BvQ=
|
github.com/gorilla/context v1.1.2 h1:WRkNAv2uoa03QNIc1A6u4O7DAGMUVoopZhkiXWA2V1o=
|
||||||
github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4=
|
github.com/gorilla/context v1.1.2/go.mod h1:KDPwT9i/MeWHiLl90fuTgrt4/wPcv75vFAZLaOOcbxM=
|
||||||
github.com/gorilla/sessions v1.1.1/go.mod h1:8KCfur6+4Mqcc6S0FEfKuN15Vl5MgXW92AE8ovaJD0w=
|
github.com/gorilla/css v1.0.1 h1:ntNaBIghp6JmvWnxbZKANoLyuXTPZ4cAMlo6RyhlbO8=
|
||||||
github.com/gorilla/sessions v1.2.0/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM=
|
github.com/gorilla/css v1.0.1/go.mod h1:BvnYkspnSzMmwRK+b8/xgNPLiIuNZr6vbZBTPQ2A3b0=
|
||||||
github.com/gorilla/sessions v1.2.1 h1:DHd3rPN5lE3Ts3D8rKkQ8x/0kqfeNmBAaiSi+o7FsgI=
|
github.com/gorilla/securecookie v1.1.2 h1:YCIWL56dvtr73r6715mJs5ZvhtnY73hBvEF8kXD8ePA=
|
||||||
github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM=
|
github.com/gorilla/securecookie v1.1.2/go.mod h1:NfCASbcHqRSY+3a8tlWJwsQap2VX5pwzwo4h3eOamfo=
|
||||||
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
github.com/gorilla/sessions v1.2.2 h1:lqzMYz6bOfvn2WriPUjNByzeXIlVzURcPmgMczkmTjY=
|
||||||
|
github.com/gorilla/sessions v1.2.2/go.mod h1:ePLdVu+jbEgHH+KWw8I1z2wqd0BAdAQh/8LRvBeoNcQ=
|
||||||
|
github.com/gorilla/sessions v1.4.0 h1:kpIYOp/oi6MG/p5PgxApU8srsSw9tuFbt46Lt7auzqQ=
|
||||||
|
github.com/gorilla/sessions v1.4.0/go.mod h1:FLWm50oby91+hl7p/wRxDth9bWSuk0qVL2emc7lT5ik=
|
||||||
|
github.com/imdario/mergo v0.3.16 h1:wwQJbIsHYGMUyLSPrEq1CT16AhnhNJQ51+4fdHUnCl4=
|
||||||
|
github.com/imdario/mergo v0.3.16/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY=
|
||||||
|
github.com/itchyny/gojq v0.12.14 h1:6k8vVtsrhQSYgSGg827AD+PVVaB1NLXEdX+dda2oZCc=
|
||||||
|
github.com/itchyny/gojq v0.12.14/go.mod h1:y1G7oO7XkcR1LPZO59KyoCRy08T3j9vDYRV0GgYSS+s=
|
||||||
|
github.com/itchyny/gojq v0.12.17 h1:8av8eGduDb5+rvEdaOO+zQUjA04MS0m3Ps8HiD+fceg=
|
||||||
|
github.com/itchyny/gojq v0.12.17/go.mod h1:WBrEMkgAfAGO1LUcGOckBl5O726KPp+OlkKug0I/FEY=
|
||||||
|
github.com/itchyny/timefmt-go v0.1.5 h1:G0INE2la8S6ru/ZI5JecgyzbbJNs5lG1RcBqa7Jm6GE=
|
||||||
|
github.com/itchyny/timefmt-go v0.1.5/go.mod h1:nEP7L+2YmAbT2kZ2HfSs1d8Xtw9LY8D2stDBckWakZ8=
|
||||||
|
github.com/itchyny/timefmt-go v0.1.6 h1:ia3s54iciXDdzWzwaVKXZPbiXzxxnv1SPGFfM/myJ5Q=
|
||||||
|
github.com/itchyny/timefmt-go v0.1.6/go.mod h1:RRDZYC5s9ErkjQvTvvU7keJjxUYzIISJGxm9/mAERQg=
|
||||||
|
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
|
||||||
|
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
|
||||||
|
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk=
|
||||||
|
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
|
||||||
|
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo=
|
||||||
|
github.com/jackc/pgx/v5 v5.5.1 h1:5I9etrGkLrN+2XPCsi6XLlV5DITbSL/xBZdmAxFcXPI=
|
||||||
|
github.com/jackc/pgx/v5 v5.5.1/go.mod h1:Ig06C2Vu0t5qXC60W8sqIthScaEnFvojjj9dSljmHRA=
|
||||||
|
github.com/jackc/pgx/v5 v5.7.4 h1:9wKznZrhWa2QiHL+NjTSPP6yjl3451BX3imWDnokYlg=
|
||||||
|
github.com/jackc/puddle/v2 v2.2.1 h1:RhxXJtFG022u4ibrCSMSiu5aOq1i77R3OHKNJj77OAk=
|
||||||
|
github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
|
||||||
|
github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo=
|
||||||
|
github.com/jarcoal/httpmock v1.3.1 h1:iUx3whfZWVf3jT01hQTO/Eo5sAYtB2/rqaUuOtpInww=
|
||||||
|
github.com/jarcoal/httpmock v1.3.1/go.mod h1:3yb8rc4BI7TCBhFY8ng0gjuLKJNquuDNiPaZjnENuYg=
|
||||||
|
github.com/joeshaw/multierror v0.0.0-20140124173710-69b34d4ec901 h1:rp+c0RAYOWj8l6qbCUTSiRLG/iKnW3K3/QfPPuSsBt4=
|
||||||
|
github.com/joeshaw/multierror v0.0.0-20140124173710-69b34d4ec901/go.mod h1:Z86h9688Y0wesXCyonoVr47MasHilkuLMqGhRZ4Hpak=
|
||||||
|
github.com/jonboulle/clockwork v0.4.0 h1:p4Cf1aMWXnXAUh8lVfewRBx1zaTSYKrKMF2g3ST4RZ4=
|
||||||
|
github.com/jonboulle/clockwork v0.4.0/go.mod h1:xgRqUGwRcjKCO1vbZUEtSLrqKoPSsUpK7fnezOII0kc=
|
||||||
|
github.com/jonboulle/clockwork v0.5.0 h1:Hyh9A8u51kptdkR+cqRpT1EebBwTn1oK9YfGYbdFz6I=
|
||||||
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
||||||
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
||||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs=
|
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs=
|
||||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8=
|
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8=
|
||||||
github.com/kidstuff/mongostore v0.0.0-20181113001930-e650cd85ee4b/go.mod h1:g2nVr8KZVXJSS97Jo8pJ0jgq29P6H7dG0oplUA86MQw=
|
github.com/klauspost/compress v1.17.2 h1:RlWWUY/Dr4fL8qk9YG7DTZ7PDgME2V4csBXA8L/ixi4=
|
||||||
|
github.com/klauspost/compress v1.17.2/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
|
||||||
|
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
|
||||||
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
||||||
github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg=
|
github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/48xc=
|
||||||
github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
|
github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
|
||||||
|
github.com/klauspost/cpuid/v2 v2.3.0 h1:S4CRMLnYUhGeDFDqkGriYKdfoFlDnMtqTiI/sFzhA9Y=
|
||||||
|
github.com/klauspost/cpuid/v2 v2.3.0/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0=
|
||||||
github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
|
github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
|
||||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||||
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
|
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
|
||||||
@@ -95,49 +203,90 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
|||||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||||
github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII=
|
|
||||||
github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY=
|
github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY=
|
||||||
github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q=
|
github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q=
|
||||||
github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4=
|
github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4=
|
||||||
github.com/lib/pq v1.10.3/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
|
||||||
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
|
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
|
||||||
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
|
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
|
||||||
github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA=
|
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||||
github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||||
github.com/mattn/go-sqlite3 v1.14.17 h1:mCRHCLDUBXgpKAqIKsaAaAsrAlbkeomtRFKXh2L6YIM=
|
github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU=
|
||||||
github.com/mattn/go-sqlite3 v1.14.17/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg=
|
github.com/mattn/go-sqlite3 v2.0.3+incompatible h1:gXHsfypPkaMZrKbD5209QV9jbUTJKjyR5WD3HYQSd+U=
|
||||||
github.com/memcachier/mc v2.0.1+incompatible/go.mod h1:7bkvFE61leUBvXz+yxsOnGBQSZpBSPIMUQSmmSHvuXc=
|
github.com/mattn/go-sqlite3 v2.0.3+incompatible/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
|
||||||
github.com/microcosm-cc/bluemonday v1.0.25 h1:4NEwSfiJ+Wva0VxN5B8OwMicaJvD8r9tlJWm9rtloEg=
|
github.com/maxatome/go-testdeep v1.12.0 h1:Ql7Go8Tg0C1D/uMMX59LAoYK7LffeJQ6X2T04nTH68g=
|
||||||
github.com/microcosm-cc/bluemonday v1.0.25/go.mod h1:ZIOjCQp1OrzBBPIJmfX4qDYFuhU02nx4bn030ixfHLE=
|
github.com/maxatome/go-testdeep v1.12.0/go.mod h1:lPZc/HAcJMP92l7yI6TRz1aZN5URwUBUAfUNvrclaNM=
|
||||||
|
github.com/mfridman/interpolate v0.0.2 h1:pnuTK7MQIxxFz1Gr+rjSIx9u7qVjf5VOoM/u6BbAxPY=
|
||||||
|
github.com/mfridman/interpolate v0.0.2/go.mod h1:p+7uk6oE07mpE/Ik1b8EckO0O4ZXiGAfshKBWLUM9Xg=
|
||||||
|
github.com/microcosm-cc/bluemonday v1.0.26 h1:xbqSvqzQMeEHCqMi64VAs4d8uy6Mequs3rQ0k/Khz58=
|
||||||
|
github.com/microcosm-cc/bluemonday v1.0.26/go.mod h1:JyzOCs9gkyQyjs+6h10UEVSe02CGwkhd72Xdqh78TWs=
|
||||||
|
github.com/microcosm-cc/bluemonday v1.0.27 h1:MpEUotklkwCSLeH+Qdx1VJgNqLlpY2KXwXFM08ygZfk=
|
||||||
|
github.com/microcosm-cc/bluemonday v1.0.27/go.mod h1:jFi9vgW+H7c3V0lb6nR74Ib/DIB5OBs92Dimizgw2cA=
|
||||||
|
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
|
||||||
|
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||||
|
github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0=
|
||||||
|
github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y=
|
||||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
||||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
|
||||||
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
|
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
|
||||||
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
||||||
|
github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4=
|
||||||
|
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
||||||
|
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
|
||||||
|
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
|
||||||
|
github.com/opencontainers/image-spec v1.1.0-rc5 h1:Ygwkfw9bpDvs+c9E34SdgGOj41dX/cbdlwvlWt0pnFI=
|
||||||
|
github.com/opencontainers/image-spec v1.1.0-rc5/go.mod h1:X4pATf0uXsnn3g5aiGIsVnJBR4mxhKzfwmvK/B2NTm8=
|
||||||
|
github.com/opencontainers/runc v1.1.10 h1:EaL5WeO9lv9wmS6SASjszOeQdSctvpbu0DdBQBizE40=
|
||||||
|
github.com/opencontainers/runc v1.1.10/go.mod h1:+/R6+KmDlh+hOO8NkjmgkG9Qzvypzk0yXxAPYYR65+M=
|
||||||
|
github.com/ory/dockertest/v3 v3.10.0 h1:4K3z2VMe8Woe++invjaTB7VRyQXQy5UY+loujO4aNE4=
|
||||||
|
github.com/ory/dockertest/v3 v3.10.0/go.mod h1:nr57ZbRWMqfsdGdFNLHz5jjNdDb7VVFnzAeW1n5N1Lg=
|
||||||
|
github.com/paulmach/orb v0.10.0 h1:guVYVqzxHE/CQ1KpfGO077TR0ATHSNjp4s6XGLn3W9s=
|
||||||
|
github.com/paulmach/orb v0.10.0/go.mod h1:5mULz1xQfs3bmQm63QEJA6lNGujuRafwA5S/EnuLaLU=
|
||||||
|
github.com/paulmach/orb v0.11.1 h1:3koVegMC4X/WeiXYz9iswopaTwMem53NzTJuTF20JzU=
|
||||||
github.com/pelletier/go-toml/v2 v2.0.1/go.mod h1:r9LEWfGN8R5k0VXJ+0BkIe7MYkRdwZOjgMj2KwnJFUo=
|
github.com/pelletier/go-toml/v2 v2.0.1/go.mod h1:r9LEWfGN8R5k0VXJ+0BkIe7MYkRdwZOjgMj2KwnJFUo=
|
||||||
github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4=
|
github.com/pelletier/go-toml/v2 v2.1.1 h1:LWAJwfNvjQZCFIDKWYQaM62NcYeYViCmWIwmOStowAI=
|
||||||
github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc=
|
github.com/pelletier/go-toml/v2 v2.1.1/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc=
|
||||||
|
github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
|
||||||
|
github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY=
|
||||||
|
github.com/pierrec/lz4/v4 v4.1.18 h1:xaKrnTkyoqfh1YItXl56+6KJNVYWlEEPuAQW9xsplYQ=
|
||||||
|
github.com/pierrec/lz4/v4 v4.1.18/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
|
||||||
|
github.com/pierrec/lz4/v4 v4.1.22 h1:cKFw6uJDK+/gfw5BcDL0JL5aBsAFdsIT18eRtLj7VIU=
|
||||||
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
|
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
|
||||||
|
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
github.com/quasoft/memstore v0.0.0-20191010062613-2bce066d2b0b/go.mod h1:wTPjTepVu7uJBYgZ0SdWHQlIas582j6cn2jgk4DDdlg=
|
github.com/pressly/goose/v3 v3.17.0 h1:fT4CL3LRm4kfyLuPWzDFAoxjR5ZHjeJ6uQhibQtBaIs=
|
||||||
github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
github.com/pressly/goose/v3 v3.17.0/go.mod h1:22aw7NpnCPlS86oqkO/+3+o9FuCaJg4ZVWRUO3oGzHQ=
|
||||||
|
github.com/pressly/goose/v3 v3.24.3 h1:DSWWNwwggVUsYZ0X2VitiAa9sKuqtBfe+Jr9zFGwWlM=
|
||||||
|
github.com/pressly/goose/v3 v3.24.3/go.mod h1:v9zYL4xdViLHCUUJh/mhjnm6JrK7Eul8AS93IxiZM4E=
|
||||||
|
github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k6Bo=
|
||||||
|
github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo=
|
||||||
|
github.com/prometheus/procfs v0.16.1 h1:hZ15bTNuirocR6u0JZ6BAHHmwS1p8B4P6MRqxtzMyRg=
|
||||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
|
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
|
||||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
||||||
github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
|
github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
|
||||||
github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8=
|
github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8=
|
||||||
github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE=
|
github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE=
|
||||||
|
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
|
||||||
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
|
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
|
||||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||||
|
github.com/segmentio/asm v1.2.0 h1:9BQrFxC+YOHJlTlHGkTrFWf59nbL3XnCoFLTwDCI7ys=
|
||||||
|
github.com/segmentio/asm v1.2.0/go.mod h1:BqMnlJP91P8d+4ibuonYZw9mfnzI9HfxselHZr5aAcs=
|
||||||
|
github.com/sethvargo/go-retry v0.2.4 h1:T+jHEQy/zKJf5s95UkguisicE0zuF9y7+/vgz08Ocec=
|
||||||
|
github.com/sethvargo/go-retry v0.2.4/go.mod h1:1afjQuvh7s4gflMObvjLPaWgluLLyhA1wmVZ6KLpICw=
|
||||||
|
github.com/sethvargo/go-retry v0.3.0 h1:EEt31A35QhrcRZtrYFDTBg91cqZVnFL2navjDrah2SE=
|
||||||
|
github.com/sethvargo/go-retry v0.3.0/go.mod h1:mNX17F0C/HguQMyMyJxcnU471gOZGxCLyYaFyAZraas=
|
||||||
|
github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8=
|
||||||
|
github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
|
||||||
|
github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k=
|
||||||
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
|
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
|
||||||
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
||||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
|
||||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
@@ -146,54 +295,108 @@ github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o
|
|||||||
github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||||
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
|
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
|
||||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||||
|
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
||||||
|
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||||
github.com/taylorskalyo/goreader v0.0.0-20230626212555-e7f5644f8115 h1:OEAIMYp5l9kJ2kT9UPL5QSUriKIIDhnLmpJTy69sltA=
|
github.com/taylorskalyo/goreader v0.0.0-20230626212555-e7f5644f8115 h1:OEAIMYp5l9kJ2kT9UPL5QSUriKIIDhnLmpJTy69sltA=
|
||||||
github.com/taylorskalyo/goreader v0.0.0-20230626212555-e7f5644f8115/go.mod h1:AIVbkIe1G7fpFHiKOdxZnU5p9tFPYNTQyH3H5IrRkGw=
|
github.com/taylorskalyo/goreader v0.0.0-20230626212555-e7f5644f8115/go.mod h1:AIVbkIe1G7fpFHiKOdxZnU5p9tFPYNTQyH3H5IrRkGw=
|
||||||
|
github.com/taylorskalyo/goreader v1.0.1 h1:eS9SYiHai2aAHhm+YMGRTqrvNt2aoRMTd7p6ftm0crY=
|
||||||
|
github.com/taylorskalyo/goreader v1.0.1/go.mod h1:JrUsWCgnk4C3P5Jsr7Pf2mFrMpsR0ls/0bjR5aorYTI=
|
||||||
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
|
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
|
||||||
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
|
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
|
||||||
github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw=
|
|
||||||
github.com/ugorji/go v1.2.7/go.mod h1:nF9osbDWLy6bDVv/Rtoh6QgnvNDpmCalQV5urGCCS6M=
|
github.com/ugorji/go v1.2.7/go.mod h1:nF9osbDWLy6bDVv/Rtoh6QgnvNDpmCalQV5urGCCS6M=
|
||||||
github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY=
|
|
||||||
github.com/ugorji/go/codec v1.2.7/go.mod h1:WGN1fab3R1fzQlVQTkfxVtIBhWDRqOviHU95kRgeqEY=
|
github.com/ugorji/go/codec v1.2.7/go.mod h1:WGN1fab3R1fzQlVQTkfxVtIBhWDRqOviHU95kRgeqEY=
|
||||||
github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4dU=
|
github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE=
|
||||||
github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
|
github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
|
||||||
github.com/urfave/cli/v2 v2.25.7 h1:VAzn5oq403l5pHjc4OhD54+XGO9cdKVL/7lDjF+iKUs=
|
github.com/ugorji/go/codec v1.3.0 h1:Qd2W2sQawAfG8XSvzwhBeoGq71zXOC/Q1E9y/wUcsUA=
|
||||||
github.com/urfave/cli/v2 v2.25.7/go.mod h1:8qnjx1vcq5s2/wpsqoZFndg2CE5tNFyrTvS6SinrnYQ=
|
github.com/ugorji/go/codec v1.3.0/go.mod h1:pRBVtBSKl77K30Bv8R2P+cLSGaTtex6fsA2Wjqmfxj4=
|
||||||
github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673 h1:bAn7/zixMGCfxrRTfdpNzjtPYqr8smhKouy9mxVdGPU=
|
github.com/urfave/cli/v2 v2.27.1 h1:8xSQ6szndafKVRmfyeUMxkNUJQMjL1F2zmsZ+qHpfho=
|
||||||
github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673/go.mod h1:N3UwUGtsrSj3ccvlPHLoLsHnpR27oXr4ZE984MbSER8=
|
github.com/urfave/cli/v2 v2.27.1/go.mod h1:8qnjx1vcq5s2/wpsqoZFndg2CE5tNFyrTvS6SinrnYQ=
|
||||||
|
github.com/urfave/cli/v2 v2.27.7 h1:bH59vdhbjLv3LAvIu6gd0usJHgoTTPhCFib8qqOwXYU=
|
||||||
|
github.com/urfave/cli/v2 v2.27.7/go.mod h1:CyNAG/xg+iAOg0N4MPGZqVmv2rCoP267496AOXUZjA4=
|
||||||
|
github.com/vertica/vertica-sql-go v1.3.3 h1:fL+FKEAEy5ONmsvya2WH5T8bhkvY27y/Ik3ReR2T+Qw=
|
||||||
|
github.com/vertica/vertica-sql-go v1.3.3/go.mod h1:jnn2GFuv+O2Jcjktb7zyc4Utlbu9YVqpHH/lx63+1M4=
|
||||||
|
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo=
|
||||||
|
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
|
||||||
|
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0=
|
||||||
|
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=
|
||||||
|
github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74=
|
||||||
|
github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y=
|
||||||
|
github.com/xrash/smetrics v0.0.0-20231213231151-1d8dd44e695e h1:+SOyEddqYF09QP7vr7CgJ1eti3pY9Fn3LHO1M1r/0sI=
|
||||||
|
github.com/xrash/smetrics v0.0.0-20231213231151-1d8dd44e695e/go.mod h1:N3UwUGtsrSj3ccvlPHLoLsHnpR27oXr4ZE984MbSER8=
|
||||||
|
github.com/xrash/smetrics v0.0.0-20250705151800-55b8f293f342 h1:FnBeRrxr7OU4VvAzt5X7s6266i6cSVkkFPS0TuXWbIg=
|
||||||
|
github.com/xrash/smetrics v0.0.0-20250705151800-55b8f293f342/go.mod h1:Ohn+xnUBiLI6FVj/9LpzZWtj1/D6lUovWYBkxHVV3aM=
|
||||||
|
github.com/ydb-platform/ydb-go-genproto v0.0.0-20231012155159-f85a672542fd h1:dzWP1Lu+A40W883dK/Mr3xyDSM/2MggS8GtHT0qgAnE=
|
||||||
|
github.com/ydb-platform/ydb-go-genproto v0.0.0-20231012155159-f85a672542fd/go.mod h1:Er+FePu1dNUieD+XTMDduGpQuCPssK5Q4BjF+IIXJ3I=
|
||||||
|
github.com/ydb-platform/ydb-go-genproto v0.0.0-20241112172322-ea1f63298f77 h1:LY6cI8cP4B9rrpTleZk95+08kl2gF4rixG7+V/dwL6Q=
|
||||||
|
github.com/ydb-platform/ydb-go-sdk/v3 v3.54.2 h1:E0yUuuX7UmPxXm92+yQCjMveLFO3zfvYFIJVuAqsVRA=
|
||||||
|
github.com/ydb-platform/ydb-go-sdk/v3 v3.54.2/go.mod h1:fjBLQ2TdQNl4bMjuWl9adoTGBypwUTPoGC+EqYqiIcU=
|
||||||
|
github.com/ydb-platform/ydb-go-sdk/v3 v3.108.1 h1:ixAiqjj2S/dNuJqrz4AxSqgw2P5OBMXp68hB5nNriUk=
|
||||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||||
|
go.opentelemetry.io/otel v1.20.0 h1:vsb/ggIY+hUjD/zCAQHpzTmndPqv/ml2ArbsbfBYTAc=
|
||||||
|
go.opentelemetry.io/otel v1.20.0/go.mod h1:oUIGj3D77RwJdM6PPZImDpSZGDvkD9fhesHny69JFrs=
|
||||||
|
go.opentelemetry.io/otel v1.35.0 h1:xKWKPxrxB6OtMCbmMY021CqC45J+3Onta9MqjhnusiQ=
|
||||||
|
go.opentelemetry.io/otel/trace v1.20.0 h1:+yxVAPZPbQhbC3OfAkeIVTky6iTFpcr4SiY9om7mXSQ=
|
||||||
|
go.opentelemetry.io/otel/trace v1.20.0/go.mod h1:HJSK7F/hA5RlzpZ0zKDCHCDHm556LCDtKaAo6JmBFUU=
|
||||||
|
go.opentelemetry.io/otel/trace v1.35.0 h1:dPpEfJu1sDIqruz7BHFG3c7528f6ddfSWfFDVt/xgMs=
|
||||||
|
go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
|
||||||
|
go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
|
||||||
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
||||||
golang.org/x/arch v0.4.0 h1:A8WCeEWhLwPBKNbFi5Wv5UTCBx5zzubnXDlMOFAzFMc=
|
golang.org/x/arch v0.7.0 h1:pskyeJh/3AmoQ8CPE95vxHLqp1G1GfGNXTmcl9NEKTc=
|
||||||
golang.org/x/arch v0.4.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
golang.org/x/arch v0.7.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
|
||||||
|
golang.org/x/arch v0.20.0 h1:dx1zTU0MAE98U+TQ8BLl7XsJbgze2WnNKF/8tGp/Q6c=
|
||||||
|
golang.org/x/arch v0.20.0/go.mod h1:bdwinDaKcfZUGpH09BB7ZmOfhalA8lQdzl62l8gGWsk=
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
|
||||||
golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||||
golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU=
|
|
||||||
golang.org/x/crypto v0.13.0 h1:mvySKfSWJ+UKUii46M40LOvyWfN0s2U+46/jDd0e6Ck=
|
|
||||||
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
|
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
|
||||||
golang.org/x/exp v0.0.0-20230905200255-921286631fa9 h1:GoHiUyI/Tp2nVkLI2mCxVkOjsbSXD66ic0XW0js0R9g=
|
golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4=
|
||||||
golang.org/x/exp v0.0.0-20230905200255-921286631fa9/go.mod h1:S2oDrQGGwySpoQPVqRShND87VCbxmc6bL1Yd2oYrm6k=
|
golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc=
|
||||||
|
golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg=
|
||||||
|
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
|
||||||
|
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
|
||||||
|
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
|
||||||
|
golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4=
|
||||||
|
golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc=
|
||||||
|
golang.org/x/exp v0.0.0-20240119083558-1b970713d09a h1:Q8/wZp0KX97QFTc2ywcOE0YRjZPVIx+MXInMzdvQqcA=
|
||||||
|
golang.org/x/exp v0.0.0-20240119083558-1b970713d09a/go.mod h1:idGWGoKP1toJGkd5/ig9ZLuPcZBC3ewk7SzmH0uou08=
|
||||||
|
golang.org/x/exp v0.0.0-20250718183923-645b1fa84792 h1:R9PFI6EUdfVKgwKjZef7QIwGcBKu86OEFpJ9nUEP2l4=
|
||||||
|
golang.org/x/exp v0.0.0-20250718183923-645b1fa84792/go.mod h1:A+z0yzpGtvnG90cToK5n2tu8UJVP2XUATh+r+sfOOOc=
|
||||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||||
golang.org/x/mod v0.12.0 h1:rmsUpXtvNzj340zd98LZ4KntptpfRHwpFOHG188oHXc=
|
|
||||||
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
golang.org/x/mod v0.14.0 h1:dGoOF9QVLYng8IHTm7BAyWqCqSheQ5pYWGhzW00YJr0=
|
||||||
|
golang.org/x/mod v0.14.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||||
|
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||||
|
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||||
|
golang.org/x/mod v0.27.0 h1:kb+q2PyFnEADO2IEF935ehFUXlWiNjJWtRNgBLSfbxQ=
|
||||||
|
golang.org/x/mod v0.27.0/go.mod h1:rWI627Fq0DEoudcK+MBkNkCe0EetEaDSwJJkCcjpazc=
|
||||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||||
golang.org/x/net v0.0.0-20210916014120-12bc252f5db8/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
golang.org/x/net v0.0.0-20210916014120-12bc252f5db8/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||||
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||||
golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc=
|
golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns=
|
||||||
golang.org/x/net v0.15.0 h1:ugBLEUaxABaB5AJqW9enI0ACdci2RUd4eP51NTBvuJ8=
|
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
||||||
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
|
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
|
||||||
|
golang.org/x/net v0.20.0 h1:aCL9BSgETF1k+blQaYUBx9hJ9LOGP3gAVemcZlf1Kpo=
|
||||||
|
golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY=
|
||||||
|
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
|
||||||
|
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
|
||||||
|
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
|
||||||
|
golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE=
|
||||||
|
golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg=
|
||||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E=
|
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
|
||||||
|
golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ=
|
||||||
|
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||||
|
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||||
|
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||||
|
golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw=
|
||||||
|
golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
||||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
@@ -204,68 +407,125 @@ golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBc
|
|||||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.12.0 h1:CM0HF96J0hcLAwsHPJZjfdNzs0gftsLfgKt57wWHJ0o=
|
golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU=
|
||||||
|
golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
|
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
|
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
|
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
|
golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI=
|
||||||
|
golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||||
|
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
|
||||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||||
golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U=
|
golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY=
|
||||||
|
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
|
||||||
|
golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
|
||||||
|
golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U=
|
||||||
|
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
|
||||||
|
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
|
||||||
|
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
|
||||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
|
||||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||||
golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||||
golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k=
|
|
||||||
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||||
|
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
|
||||||
|
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||||
|
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||||
|
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
|
||||||
|
golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng=
|
||||||
|
golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU=
|
||||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||||
golang.org/x/tools v0.13.0 h1:Iey4qkscZuv0VvIt8E0neZjtPVQFSc870HQ448QgEmQ=
|
|
||||||
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
|
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
|
||||||
|
golang.org/x/tools v0.17.0 h1:FvmRgNOcs3kOa+T20R1uhfP9F6HgG2mfxDv1vrx1Htc=
|
||||||
|
golang.org/x/tools v0.17.0/go.mod h1:xsh6VxdV005rRVaS6SSAf9oiAqljS7UZUacMZ8Bnsps=
|
||||||
|
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
|
||||||
|
golang.org/x/tools v0.36.0 h1:kWS0uv/zsvHEle1LbV5LE8QujrxB3wfQyxHfhOk0Qkg=
|
||||||
|
golang.org/x/tools v0.36.0/go.mod h1:WBDiHKJK8YgLHlcQPYQzNCkUxUypCaa5ZegCVutKm+s=
|
||||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
google.golang.org/genproto/googleapis/rpc v0.0.0-20231106174013-bbf56f31fb17 h1:Jyp0Hsi0bmHXG6k9eATXoYtjd6e2UzZ1SCn/wIupY14=
|
||||||
|
google.golang.org/genproto/googleapis/rpc v0.0.0-20231106174013-bbf56f31fb17/go.mod h1:oQ5rr10WTTMvP4A36n8JpR1OrO1BEiV4f78CneXZxkA=
|
||||||
|
google.golang.org/genproto/googleapis/rpc v0.0.0-20250324211829-b45e905df463 h1:e0AIkUUhxyBKh6ssZNrAMeqhA7RKUj42346d1y02i2g=
|
||||||
|
google.golang.org/grpc v1.59.0 h1:Z5Iec2pjwb+LEOqzpB2MR12/eKFhDPhuqW91O+4bwUk=
|
||||||
|
google.golang.org/grpc v1.59.0/go.mod h1:aUPDwccQo6OTjy7Hct4AfBPD1GptF4fyUjIkQ9YtF98=
|
||||||
|
google.golang.org/grpc v1.71.0 h1:kF77BGdPTQ4/JZWMlb9VpJ5pa25aqvVqogsxNHHdeBg=
|
||||||
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
||||||
google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
|
google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
|
||||||
google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8=
|
google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I=
|
||||||
google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
|
google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
|
||||||
|
google.golang.org/protobuf v1.36.7 h1:IgrO7UwFQGJdRNXH/sQux4R1Dj1WAKcLElzeeRaXV2A=
|
||||||
|
google.golang.org/protobuf v1.36.7/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||||
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
||||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/natefinch/lumberjack.v2 v2.2.1 h1:bBRl1b0OH9s/DuPhuXpNl+VtCaJXFZ5/uEFST95x9zc=
|
||||||
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/natefinch/lumberjack.v2 v2.2.1/go.mod h1:YD8tP3GAjkrDg1eZH7EGmyESg/lsYskCTPBJVb9jqSc=
|
||||||
|
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
||||||
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
lukechampine.com/uint128 v1.2.0 h1:mBi/5l91vocEN8otkC5bDLhi2KdCticRiwbdB0O+rjI=
|
howett.net/plist v1.0.0 h1:7CrbWYbPPO/PyNy38b2EB/+gYbjCe2DXBxgtOOZbSQM=
|
||||||
lukechampine.com/uint128 v1.2.0/go.mod h1:c4eWIwlEGaxC/+H1VguhU4PHXNWDCDMUlWdIWl2j1gk=
|
howett.net/plist v1.0.0/go.mod h1:lqaXoTrLY4hg8tnEzNru53gicrbv7rrk+2xJA/7hw9g=
|
||||||
modernc.org/cc/v3 v3.40.0 h1:P3g79IUS/93SYhtoeaHW+kRCIrYaxJ27MFPv+7kaTOw=
|
howett.net/plist v1.0.1 h1:37GdZ8tP09Q35o9ych3ehygcsL+HqKSwzctveSlarvM=
|
||||||
modernc.org/cc/v3 v3.40.0/go.mod h1:/bTg4dnWkSXowUO6ssQKnOV0yMVxDYNIsIrzqTFDGH0=
|
lukechampine.com/uint128 v1.3.0 h1:cDdUVfRwDUDovz610ABgFD17nXD4/uDgVHl2sC3+sbo=
|
||||||
modernc.org/ccgo/v3 v3.16.13 h1:Mkgdzl46i5F/CNR/Kj80Ri59hC8TKAhZrYSaqvkwzUw=
|
lukechampine.com/uint128 v1.3.0/go.mod h1:c4eWIwlEGaxC/+H1VguhU4PHXNWDCDMUlWdIWl2j1gk=
|
||||||
modernc.org/ccgo/v3 v3.16.13/go.mod h1:2Quk+5YgpImhPjv2Qsob1DnZ/4som1lJTodubIcoUkY=
|
maragu.dev/gomponents v1.1.0 h1:iCybZZChHr1eSlvkWp/JP3CrZGzctLudQ/JI3sBcO4U=
|
||||||
|
maragu.dev/gomponents v1.1.0/go.mod h1:oEDahza2gZoXDoDHhw8jBNgH+3UR5ni7Ur648HORydM=
|
||||||
|
modernc.org/cc/v3 v3.41.0 h1:QoR1Sn3YWlmA1T4vLaKZfawdVtSiGx8H+cEojbC7v1Q=
|
||||||
|
modernc.org/cc/v3 v3.41.0/go.mod h1:Ni4zjJYJ04CDOhG7dn640WGfwBzfE0ecX8TyMB0Fv0Y=
|
||||||
|
modernc.org/ccgo/v3 v3.16.15 h1:KbDR3ZAVU+wiLyMESPtbtE/Add4elztFyfsWoNTgxS0=
|
||||||
|
modernc.org/ccgo/v3 v3.16.15/go.mod h1:yT7B+/E2m43tmMOT51GMoM98/MtHIcQQSleGnddkUNI=
|
||||||
|
modernc.org/ccgo/v3 v3.17.0 h1:o3OmOqx4/OFnl4Vm3G8Bgmqxnvxnh0nbxeT5p/dWChA=
|
||||||
|
modernc.org/ccgo/v3 v3.17.0/go.mod h1:Sg3fwVpmLvCUTaqEUjiBDAvshIaKDB0RXaf+zgqFu8I=
|
||||||
modernc.org/ccorpus v1.11.6 h1:J16RXiiqiCgua6+ZvQot4yUuUy8zxgqbqEEUuGPlISk=
|
modernc.org/ccorpus v1.11.6 h1:J16RXiiqiCgua6+ZvQot4yUuUy8zxgqbqEEUuGPlISk=
|
||||||
|
modernc.org/ccorpus v1.11.6/go.mod h1:2gEUTrWqdpH2pXsmTM1ZkjeSrUWDpjMu2T6m29L/ErQ=
|
||||||
modernc.org/httpfs v1.0.6 h1:AAgIpFZRXuYnkjftxTAZwMIiwEqAfk8aVB2/oA6nAeM=
|
modernc.org/httpfs v1.0.6 h1:AAgIpFZRXuYnkjftxTAZwMIiwEqAfk8aVB2/oA6nAeM=
|
||||||
modernc.org/libc v1.24.1 h1:uvJSeCKL/AgzBo2yYIPPTy82v21KgGnizcGYfBHaNuM=
|
modernc.org/httpfs v1.0.6/go.mod h1:7dosgurJGp0sPaRanU53W4xZYKh14wfzX420oZADeHM=
|
||||||
modernc.org/libc v1.24.1/go.mod h1:FmfO1RLrU3MHJfyi9eYYmZBfi/R+tqZ6+hQ3yQQUkak=
|
modernc.org/libc v1.40.7 h1:oeLS0G067ZqUu+v143Dqad0btMfKmNS7SuOsnkq0Ysg=
|
||||||
modernc.org/mathutil v1.5.0 h1:rV0Ko/6SfM+8G+yKiyI830l3Wuz1zRutdslNoQ0kfiQ=
|
modernc.org/libc v1.40.7/go.mod h1:YAXkAZ8ktnkCKaN9sw/UDeUVkGYJ/YquGO4FTi5nmHE=
|
||||||
modernc.org/mathutil v1.5.0/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E=
|
modernc.org/libc v1.66.6 h1:RyQpwAhM/19nXD8y3iejM/AjmKwY2TjxZTlUWTsWw2U=
|
||||||
modernc.org/memory v1.6.0 h1:i6mzavxrE9a30whzMfwf7XWVODx2r5OYXvU46cirX7o=
|
modernc.org/libc v1.66.6/go.mod h1:j8z0EYAuumoMQ3+cWXtmw6m+LYn3qm8dcZDFtFTSq+M=
|
||||||
modernc.org/memory v1.6.0/go.mod h1:PkUhL0Mugw21sHPeskwZW4D6VscE/GQJOnIpCnW6pSU=
|
modernc.org/mathutil v1.6.0 h1:fRe9+AmYlaej+64JsEEhoWuAYBkOtQiMEU7n/XgfYi4=
|
||||||
|
modernc.org/mathutil v1.6.0/go.mod h1:Ui5Q9q1TR2gFm0AQRqQUaBWFLAhQpCwNcuhBOSedWPo=
|
||||||
|
modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU=
|
||||||
|
modernc.org/mathutil v1.7.1/go.mod h1:4p5IwJITfppl0G4sUEDtCr4DthTaT47/N3aT6MhfgJg=
|
||||||
|
modernc.org/memory v1.7.2 h1:Klh90S215mmH8c9gO98QxQFsY+W451E8AnzjoE2ee1E=
|
||||||
|
modernc.org/memory v1.7.2/go.mod h1:NO4NVCQy0N7ln+T9ngWqOQfi7ley4vpwvARR+Hjw95E=
|
||||||
|
modernc.org/memory v1.11.0 h1:o4QC8aMQzmcwCK3t3Ux/ZHmwFPzE6hf2Y5LbkRs+hbI=
|
||||||
|
modernc.org/memory v1.11.0/go.mod h1:/JP4VbVC+K5sU2wZi9bHoq2MAkCnrt2r98UGeSK7Mjw=
|
||||||
modernc.org/opt v0.1.3 h1:3XOZf2yznlhC+ibLltsDGzABUGVx8J6pnFMS3E4dcq4=
|
modernc.org/opt v0.1.3 h1:3XOZf2yznlhC+ibLltsDGzABUGVx8J6pnFMS3E4dcq4=
|
||||||
modernc.org/opt v0.1.3/go.mod h1:WdSiB5evDcignE70guQKxYUl14mgWtbClRi5wmkkTX0=
|
modernc.org/opt v0.1.3/go.mod h1:WdSiB5evDcignE70guQKxYUl14mgWtbClRi5wmkkTX0=
|
||||||
modernc.org/sqlite v1.26.0 h1:SocQdLRSYlA8W99V8YH0NES75thx19d9sB/aFc4R8Lw=
|
modernc.org/opt v0.1.4 h1:2kNGMRiUjrp4LcaPuLY2PzUfqM/w9N23quVwhKt5Qm8=
|
||||||
modernc.org/sqlite v1.26.0/go.mod h1:FL3pVXie73rg3Rii6V/u5BoHlSoyeZeIgKZEgHARyCU=
|
modernc.org/opt v0.1.4/go.mod h1:03fq9lsNfvkYSfxrfUhZCWPk1lm4cq4N+Bh//bEtgns=
|
||||||
modernc.org/strutil v1.1.3 h1:fNMm+oJklMGYfU9Ylcywl0CO5O6nTfaowNsh2wpPjzY=
|
modernc.org/sqlite v1.28.0 h1:Zx+LyDDmXczNnEQdvPuEfcFVA2ZPyaD7UCZDjef3BHQ=
|
||||||
modernc.org/strutil v1.1.3/go.mod h1:MEHNA7PdEnEwLvspRMtWTNnp2nnyvMfkimT1NKNAGbw=
|
modernc.org/sqlite v1.28.0/go.mod h1:Qxpazz0zH8Z1xCFyi5GSL3FzbtZ3fvbjmywNogldEW0=
|
||||||
|
modernc.org/sqlite v1.38.2 h1:Aclu7+tgjgcQVShZqim41Bbw9Cho0y/7WzYptXqkEek=
|
||||||
|
modernc.org/sqlite v1.38.2/go.mod h1:cPTJYSlgg3Sfg046yBShXENNtPrWrDX8bsbAQBzgQ5E=
|
||||||
|
modernc.org/strutil v1.2.0 h1:agBi9dp1I+eOnxXeiZawM8F4LawKv4NzGWSaLfyeNZA=
|
||||||
|
modernc.org/strutil v1.2.0/go.mod h1:/mdcBmfOibveCTBxUl5B5l6W+TTH1FXPLHZE6bTosX0=
|
||||||
|
modernc.org/strutil v1.2.1 h1:UneZBkQA+DX2Rp35KcM69cSsNES9ly8mQWD71HKlOA0=
|
||||||
|
modernc.org/strutil v1.2.1/go.mod h1:EHkiggD70koQxjVdSBM3JKM7k6L0FbGE5eymy9i3B9A=
|
||||||
modernc.org/tcl v1.15.2 h1:C4ybAYCGJw968e+Me18oW55kD/FexcHbqH2xak1ROSY=
|
modernc.org/tcl v1.15.2 h1:C4ybAYCGJw968e+Me18oW55kD/FexcHbqH2xak1ROSY=
|
||||||
modernc.org/token v1.0.1 h1:A3qvTqOwexpfZZeyI0FeGPDlSWX5pjZu9hF4lU+EKWg=
|
modernc.org/tcl v1.15.2/go.mod h1:3+k/ZaEbKrC8ePv8zJWPtBSW0V7Gg9g8rkmhI1Kfs3c=
|
||||||
modernc.org/token v1.0.1/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM=
|
modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y=
|
||||||
|
modernc.org/token v1.1.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM=
|
||||||
modernc.org/z v1.7.3 h1:zDJf6iHjrnB+WRD88stbXokugjyc0/pB91ri1gO6LZY=
|
modernc.org/z v1.7.3 h1:zDJf6iHjrnB+WRD88stbXokugjyc0/pB91ri1gO6LZY=
|
||||||
|
modernc.org/z v1.7.3/go.mod h1:Ipv4tsdxZRbQyLq9Q1M6gdbkxYzdlrciF2Hi/lS7nWE=
|
||||||
nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50=
|
nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50=
|
||||||
rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=
|
rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ type SVGBezierOpposedLine struct {
|
|||||||
|
|
||||||
func GetSVGGraphData(inputData []int64, svgWidth int, svgHeight int) SVGGraphData {
|
func GetSVGGraphData(inputData []int64, svgWidth int, svgHeight int) SVGGraphData {
|
||||||
// Derive Height
|
// Derive Height
|
||||||
var maxHeight int = 0
|
var maxHeight int
|
||||||
for _, item := range inputData {
|
for _, item := range inputData {
|
||||||
if int(item) > maxHeight {
|
if int(item) > maxHeight {
|
||||||
maxHeight = int(item)
|
maxHeight = int(item)
|
||||||
@@ -39,19 +39,19 @@ func GetSVGGraphData(inputData []int64, svgWidth int, svgHeight int) SVGGraphDat
|
|||||||
var sizePercentage float32 = 0.5
|
var sizePercentage float32 = 0.5
|
||||||
|
|
||||||
// Scale Ratio -> Desired Height
|
// Scale Ratio -> Desired Height
|
||||||
var sizeRatio float32 = float32(svgHeight) * sizePercentage / float32(maxHeight)
|
sizeRatio := float32(svgHeight) * sizePercentage / float32(maxHeight)
|
||||||
|
|
||||||
// Point Block Offset
|
// Point Block Offset
|
||||||
var blockOffset int = int(math.Floor(float64(svgWidth) / float64(len(inputData))))
|
blockOffset := int(math.Floor(float64(svgWidth) / float64(len(inputData))))
|
||||||
|
|
||||||
// Line & Bar Points
|
// Line & Bar Points
|
||||||
linePoints := []SVGGraphPoint{}
|
linePoints := []SVGGraphPoint{}
|
||||||
barPoints := []SVGGraphPoint{}
|
barPoints := []SVGGraphPoint{}
|
||||||
|
|
||||||
// Bezier Fill Coordinates (Max X, Min X, Max Y)
|
// Bezier Fill Coordinates (Max X, Min X, Max Y)
|
||||||
var maxBX int = 0
|
var maxBX int
|
||||||
var maxBY int = 0
|
var maxBY int
|
||||||
var minBX int = 0
|
var minBX int
|
||||||
for idx, item := range inputData {
|
for idx, item := range inputData {
|
||||||
itemSize := int(float32(item) * sizeRatio)
|
itemSize := int(float32(item) * sizeRatio)
|
||||||
itemY := svgHeight - itemSize
|
itemY := svgHeight - itemSize
|
||||||
@@ -98,7 +98,7 @@ func getSVGBezierOpposedLine(pointA SVGGraphPoint, pointB SVGGraphPoint) SVGBezi
|
|||||||
lengthY := float64(pointB.Y - pointA.Y)
|
lengthY := float64(pointB.Y - pointA.Y)
|
||||||
|
|
||||||
return SVGBezierOpposedLine{
|
return SVGBezierOpposedLine{
|
||||||
Length: int(math.Sqrt(math.Pow(lengthX, 2) + math.Pow(lengthY, 2))),
|
Length: int(math.Sqrt(lengthX*lengthX + lengthY*lengthY)),
|
||||||
Angle: int(math.Atan2(lengthY, lengthX)),
|
Angle: int(math.Atan2(lengthY, lengthX)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -113,15 +113,15 @@ func getSVGBezierControlPoint(currentPoint *SVGGraphPoint, prevPoint *SVGGraphPo
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Modifiers
|
// Modifiers
|
||||||
var smoothingRatio float64 = 0.2
|
smoothingRatio := 0.2
|
||||||
var directionModifier float64 = 0
|
var directionModifier float64 = 0
|
||||||
if isReverse == true {
|
if isReverse {
|
||||||
directionModifier = math.Pi
|
directionModifier = math.Pi
|
||||||
}
|
}
|
||||||
|
|
||||||
opposingLine := getSVGBezierOpposedLine(*prevPoint, *nextPoint)
|
opposingLine := getSVGBezierOpposedLine(*prevPoint, *nextPoint)
|
||||||
var lineAngle float64 = float64(opposingLine.Angle) + directionModifier
|
lineAngle := float64(opposingLine.Angle) + directionModifier
|
||||||
var lineLength float64 = float64(opposingLine.Length) * smoothingRatio
|
lineLength := float64(opposingLine.Length) * smoothingRatio
|
||||||
|
|
||||||
// Calculate Control Point
|
// Calculate Control Point
|
||||||
return SVGGraphPoint{
|
return SVGGraphPoint{
|
||||||
@@ -156,7 +156,7 @@ func getSVGBezierCurve(point SVGGraphPoint, index int, allPoints []SVGGraphPoint
|
|||||||
}
|
}
|
||||||
|
|
||||||
func getSVGBezierPath(allPoints []SVGGraphPoint) string {
|
func getSVGBezierPath(allPoints []SVGGraphPoint) string {
|
||||||
var bezierSVGPath string = ""
|
var bezierSVGPath string
|
||||||
|
|
||||||
for index, point := range allPoints {
|
for index, point := range allPoints {
|
||||||
if index == 0 {
|
if index == 0 {
|
||||||
|
|||||||
49
main.go
49
main.go
@@ -2,34 +2,25 @@ package main
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"embed"
|
"embed"
|
||||||
|
"io/fs"
|
||||||
"os"
|
"os"
|
||||||
"os/signal"
|
"os/signal"
|
||||||
"sync"
|
|
||||||
"syscall"
|
"syscall"
|
||||||
|
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"github.com/urfave/cli/v2"
|
"github.com/urfave/cli/v2"
|
||||||
"reichard.io/bbank/server"
|
"reichard.io/antholume/config"
|
||||||
|
"reichard.io/antholume/server"
|
||||||
)
|
)
|
||||||
|
|
||||||
//go:embed templates/* assets/*
|
//go:embed templates/* assets/*
|
||||||
var assets embed.FS
|
var embeddedAssets embed.FS
|
||||||
|
|
||||||
type UTCFormatter struct {
|
|
||||||
log.Formatter
|
|
||||||
}
|
|
||||||
|
|
||||||
func (u UTCFormatter) Format(e *log.Entry) ([]byte, error) {
|
|
||||||
e.Time = e.Time.UTC()
|
|
||||||
return u.Formatter.Format(e)
|
|
||||||
}
|
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
log.SetFormatter(UTCFormatter{&log.TextFormatter{FullTimestamp: true}})
|
|
||||||
|
|
||||||
app := &cli.App{
|
app := &cli.App{
|
||||||
Name: "AnthoLume",
|
Name: "AnthoLume",
|
||||||
Usage: "A self hosted e-book progress tracker.",
|
Usage: "A self hosted e-book progress tracker.",
|
||||||
|
EnableBashCompletion: true,
|
||||||
Commands: []*cli.Command{
|
Commands: []*cli.Command{
|
||||||
{
|
{
|
||||||
Name: "serve",
|
Name: "serve",
|
||||||
@@ -46,23 +37,29 @@ func main() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func cmdServer(ctx *cli.Context) error {
|
func cmdServer(ctx *cli.Context) error {
|
||||||
|
var assets fs.FS = embeddedAssets
|
||||||
|
|
||||||
|
// Load config
|
||||||
|
c := config.Load()
|
||||||
|
if c.Version == "develop" {
|
||||||
|
assets = os.DirFS("./")
|
||||||
|
}
|
||||||
|
|
||||||
log.Info("Starting AnthoLume Server")
|
log.Info("Starting AnthoLume Server")
|
||||||
|
|
||||||
// Create Channel
|
// Create notify channel
|
||||||
wg := sync.WaitGroup{}
|
signals := make(chan os.Signal, 1)
|
||||||
done := make(chan struct{})
|
signal.Notify(signals, os.Interrupt, syscall.SIGTERM)
|
||||||
interrupt := make(chan os.Signal, 1)
|
|
||||||
signal.Notify(interrupt, os.Interrupt, syscall.SIGTERM)
|
|
||||||
|
|
||||||
// Start Server
|
// Start server
|
||||||
server := server.NewServer(assets)
|
s := server.New(c, assets)
|
||||||
server.StartServer(&wg, done)
|
s.Start()
|
||||||
|
|
||||||
// Wait & Close
|
// Wait & close
|
||||||
<-interrupt
|
<-signals
|
||||||
server.StopServer(&wg, done)
|
s.Stop()
|
||||||
|
|
||||||
// Stop Server
|
// Stop server
|
||||||
os.Exit(0)
|
os.Exit(0)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
|||||||
110
metadata/_test_files/gbooks_id_response.json
Normal file
110
metadata/_test_files/gbooks_id_response.json
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
{
|
||||||
|
"kind": "books#volume",
|
||||||
|
"id": "ZxwpakTv_MIC",
|
||||||
|
"etag": "mhqr3GsebaQ",
|
||||||
|
"selfLink": "https://www.googleapis.com/books/v1/volumes/ZxwpakTv_MIC",
|
||||||
|
"volumeInfo": {
|
||||||
|
"title": "Alice in Wonderland",
|
||||||
|
"authors": [
|
||||||
|
"Lewis Carroll"
|
||||||
|
],
|
||||||
|
"publisher": "The Floating Press",
|
||||||
|
"publishedDate": "2009-01-01",
|
||||||
|
"description": "Alice in Wonderland (also known as Alice's Adventures in Wonderland), from 1865, is the peculiar and imaginative tale of a girl who falls down a rabbit-hole into a bizarre world of eccentric and unusual creatures. Lewis Carroll's prominent example of the genre of \"literary nonsense\" has endured in popularity with its clever way of playing with logic and a narrative structure that has influence generations of fiction writing.",
|
||||||
|
"industryIdentifiers": [
|
||||||
|
{
|
||||||
|
"type": "ISBN_10",
|
||||||
|
"identifier": "1877527815"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "ISBN_13",
|
||||||
|
"identifier": "9781877527814"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"readingModes": {
|
||||||
|
"text": true,
|
||||||
|
"image": false
|
||||||
|
},
|
||||||
|
"pageCount": 104,
|
||||||
|
"printedPageCount": 112,
|
||||||
|
"printType": "BOOK",
|
||||||
|
"categories": [
|
||||||
|
"Fiction / Classics",
|
||||||
|
"Juvenile Fiction / General"
|
||||||
|
],
|
||||||
|
"averageRating": 5,
|
||||||
|
"ratingsCount": 1,
|
||||||
|
"maturityRating": "NOT_MATURE",
|
||||||
|
"allowAnonLogging": true,
|
||||||
|
"contentVersion": "0.2.3.0.preview.2",
|
||||||
|
"panelizationSummary": {
|
||||||
|
"containsEpubBubbles": false,
|
||||||
|
"containsImageBubbles": false
|
||||||
|
},
|
||||||
|
"imageLinks": {
|
||||||
|
"smallThumbnail": "http://books.google.com/books/content?id=ZxwpakTv_MIC&printsec=frontcover&img=1&zoom=5&edge=curl&imgtk=AFLRE71e5b-TeAKTiPSvXNUPeUi8rItzur2xSzwH8QU3qjKH0A2opmoq1o5I9RqJFt1BtcCCqILhnYRcB2aFLJmEvom11gx3Qn3PNN1iBLj2H5y2JHjM8wIwGT7iWFQmEn0Od7s6sOdk&source=gbs_api",
|
||||||
|
"thumbnail": "http://books.google.com/books/content?id=ZxwpakTv_MIC&printsec=frontcover&img=1&zoom=1&edge=curl&imgtk=AFLRE70QORt9J_DmKJgfyf9UEjQkdDMZ0qAu0GP315a1Q4CRS3snEjKnJJO2fYFdxjMwsSpmHoXDFPZbsy4gw-kMvF7lL8LtwxGbJGlfETHw_jbQBKBlKTrneK4XFvvV-EXNrZRgylxj&source=gbs_api",
|
||||||
|
"small": "http://books.google.com/books/content?id=ZxwpakTv_MIC&printsec=frontcover&img=1&zoom=2&edge=curl&imgtk=AFLRE70r1pAUt6VhuEEW8vXFhu8LvKln3yj0mdlaWPO4ZQuODLFQnH0fTebKMMX4ANR5i4PtC0oaI48XkwF-EdzlEM1WmUcR5383N4kRMXcta_i9nmb2y38dnh3hObwQW5VoAxbc9psn&source=gbs_api",
|
||||||
|
"medium": "http://books.google.com/books/content?id=ZxwpakTv_MIC&printsec=frontcover&img=1&zoom=3&edge=curl&imgtk=AFLRE7019EVuXvhzbhmtbz1QFh-ajB6kTKRHGhqijFf8big_GPRMMdpCdKlklFbkCfXvy8F64t5NKlThUHb3tFP-51bbDXkrVErFbCqKGzGnDSSm8cewqT8HiYDNHqn0hXYnuYvN4vYf&source=gbs_api",
|
||||||
|
"large": "http://books.google.com/books/content?id=ZxwpakTv_MIC&printsec=frontcover&img=1&zoom=4&edge=curl&imgtk=AFLRE72I15XZqp_8c8BAj4EskxkdC6nQz8F0Fs6VJhkykwIqfjzwuM34tUSQa3UnMGbx-UYjZjSLmCNFlePS8aR7yy-0UP9BRnYD-h5Qbesnnt_xdOb3u7Wdiobi6VbciNCBwUwbCyeH&source=gbs_api",
|
||||||
|
"extraLarge": "http://books.google.com/books/content?id=ZxwpakTv_MIC&printsec=frontcover&img=1&zoom=6&edge=curl&imgtk=AFLRE70rC6ktY6U0K_hqG1HxPl_9hMjpKb10p9DryVIwQgUjoJfWQOjpNA3EQ-5yk167yYDlO27gylqNAdJBYWu7ZHr3GuqkjTDpXjDvzBBppVyWaVNxKwhOz3gfJ-gzM6cC4kLHP26R&source=gbs_api"
|
||||||
|
},
|
||||||
|
"language": "en",
|
||||||
|
"previewLink": "http://books.google.com/books?id=ZxwpakTv_MIC&hl=&source=gbs_api",
|
||||||
|
"infoLink": "https://play.google.com/store/books/details?id=ZxwpakTv_MIC&source=gbs_api",
|
||||||
|
"canonicalVolumeLink": "https://play.google.com/store/books/details?id=ZxwpakTv_MIC"
|
||||||
|
},
|
||||||
|
"layerInfo": {
|
||||||
|
"layers": [
|
||||||
|
{
|
||||||
|
"layerId": "geo",
|
||||||
|
"volumeAnnotationsVersion": "2"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"saleInfo": {
|
||||||
|
"country": "US",
|
||||||
|
"saleability": "FOR_SALE",
|
||||||
|
"isEbook": true,
|
||||||
|
"listPrice": {
|
||||||
|
"amount": 3.99,
|
||||||
|
"currencyCode": "USD"
|
||||||
|
},
|
||||||
|
"retailPrice": {
|
||||||
|
"amount": 3.99,
|
||||||
|
"currencyCode": "USD"
|
||||||
|
},
|
||||||
|
"buyLink": "https://play.google.com/store/books/details?id=ZxwpakTv_MIC&rdid=book-ZxwpakTv_MIC&rdot=1&source=gbs_api",
|
||||||
|
"offers": [
|
||||||
|
{
|
||||||
|
"finskyOfferType": 1,
|
||||||
|
"listPrice": {
|
||||||
|
"amountInMicros": 3990000,
|
||||||
|
"currencyCode": "USD"
|
||||||
|
},
|
||||||
|
"retailPrice": {
|
||||||
|
"amountInMicros": 3990000,
|
||||||
|
"currencyCode": "USD"
|
||||||
|
},
|
||||||
|
"giftable": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"accessInfo": {
|
||||||
|
"country": "US",
|
||||||
|
"viewability": "PARTIAL",
|
||||||
|
"embeddable": true,
|
||||||
|
"publicDomain": false,
|
||||||
|
"textToSpeechPermission": "ALLOWED",
|
||||||
|
"epub": {
|
||||||
|
"isAvailable": true,
|
||||||
|
"acsTokenLink": "http://books.google.com/books/download/Alice_in_Wonderland-sample-epub.acsm?id=ZxwpakTv_MIC&format=epub&output=acs4_fulfillment_token&dl_type=sample&source=gbs_api"
|
||||||
|
},
|
||||||
|
"pdf": {
|
||||||
|
"isAvailable": false
|
||||||
|
},
|
||||||
|
"webReaderLink": "http://play.google.com/books/reader?id=ZxwpakTv_MIC&hl=&source=gbs_api",
|
||||||
|
"accessViewStatus": "SAMPLE",
|
||||||
|
"quoteSharingAllowed": false
|
||||||
|
}
|
||||||
|
}
|
||||||
105
metadata/_test_files/gbooks_query_response.json
Normal file
105
metadata/_test_files/gbooks_query_response.json
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
{
|
||||||
|
"kind": "books#volumes",
|
||||||
|
"totalItems": 1,
|
||||||
|
"items": [
|
||||||
|
{
|
||||||
|
"kind": "books#volume",
|
||||||
|
"id": "ZxwpakTv_MIC",
|
||||||
|
"etag": "F2eR9VV6VwQ",
|
||||||
|
"selfLink": "https://www.googleapis.com/books/v1/volumes/ZxwpakTv_MIC",
|
||||||
|
"volumeInfo": {
|
||||||
|
"title": "Alice in Wonderland",
|
||||||
|
"authors": [
|
||||||
|
"Lewis Carroll"
|
||||||
|
],
|
||||||
|
"publisher": "The Floating Press",
|
||||||
|
"publishedDate": "2009-01-01",
|
||||||
|
"description": "Alice in Wonderland (also known as Alice's Adventures in Wonderland), from 1865, is the peculiar and imaginative tale of a girl who falls down a rabbit-hole into a bizarre world of eccentric and unusual creatures. Lewis Carroll's prominent example of the genre of \"literary nonsense\" has endured in popularity with its clever way of playing with logic and a narrative structure that has influence generations of fiction writing.",
|
||||||
|
"industryIdentifiers": [
|
||||||
|
{
|
||||||
|
"type": "ISBN_13",
|
||||||
|
"identifier": "9781877527814"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "ISBN_10",
|
||||||
|
"identifier": "1877527815"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"readingModes": {
|
||||||
|
"text": true,
|
||||||
|
"image": false
|
||||||
|
},
|
||||||
|
"pageCount": 104,
|
||||||
|
"printType": "BOOK",
|
||||||
|
"categories": [
|
||||||
|
"Fiction"
|
||||||
|
],
|
||||||
|
"averageRating": 5,
|
||||||
|
"ratingsCount": 1,
|
||||||
|
"maturityRating": "NOT_MATURE",
|
||||||
|
"allowAnonLogging": true,
|
||||||
|
"contentVersion": "0.2.3.0.preview.2",
|
||||||
|
"panelizationSummary": {
|
||||||
|
"containsEpubBubbles": false,
|
||||||
|
"containsImageBubbles": false
|
||||||
|
},
|
||||||
|
"imageLinks": {
|
||||||
|
"smallThumbnail": "http://books.google.com/books/content?id=ZxwpakTv_MIC&printsec=frontcover&img=1&zoom=5&edge=curl&source=gbs_api",
|
||||||
|
"thumbnail": "http://books.google.com/books/content?id=ZxwpakTv_MIC&printsec=frontcover&img=1&zoom=1&edge=curl&source=gbs_api"
|
||||||
|
},
|
||||||
|
"language": "en",
|
||||||
|
"previewLink": "http://books.google.com/books?id=ZxwpakTv_MIC&printsec=frontcover&dq=isbn:1877527815&hl=&cd=1&source=gbs_api",
|
||||||
|
"infoLink": "https://play.google.com/store/books/details?id=ZxwpakTv_MIC&source=gbs_api",
|
||||||
|
"canonicalVolumeLink": "https://play.google.com/store/books/details?id=ZxwpakTv_MIC"
|
||||||
|
},
|
||||||
|
"saleInfo": {
|
||||||
|
"country": "US",
|
||||||
|
"saleability": "FOR_SALE",
|
||||||
|
"isEbook": true,
|
||||||
|
"listPrice": {
|
||||||
|
"amount": 3.99,
|
||||||
|
"currencyCode": "USD"
|
||||||
|
},
|
||||||
|
"retailPrice": {
|
||||||
|
"amount": 3.99,
|
||||||
|
"currencyCode": "USD"
|
||||||
|
},
|
||||||
|
"buyLink": "https://play.google.com/store/books/details?id=ZxwpakTv_MIC&rdid=book-ZxwpakTv_MIC&rdot=1&source=gbs_api",
|
||||||
|
"offers": [
|
||||||
|
{
|
||||||
|
"finskyOfferType": 1,
|
||||||
|
"listPrice": {
|
||||||
|
"amountInMicros": 3990000,
|
||||||
|
"currencyCode": "USD"
|
||||||
|
},
|
||||||
|
"retailPrice": {
|
||||||
|
"amountInMicros": 3990000,
|
||||||
|
"currencyCode": "USD"
|
||||||
|
},
|
||||||
|
"giftable": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"accessInfo": {
|
||||||
|
"country": "US",
|
||||||
|
"viewability": "PARTIAL",
|
||||||
|
"embeddable": true,
|
||||||
|
"publicDomain": false,
|
||||||
|
"textToSpeechPermission": "ALLOWED",
|
||||||
|
"epub": {
|
||||||
|
"isAvailable": true,
|
||||||
|
"acsTokenLink": "http://books.google.com/books/download/Alice_in_Wonderland-sample-epub.acsm?id=ZxwpakTv_MIC&format=epub&output=acs4_fulfillment_token&dl_type=sample&source=gbs_api"
|
||||||
|
},
|
||||||
|
"pdf": {
|
||||||
|
"isAvailable": false
|
||||||
|
},
|
||||||
|
"webReaderLink": "http://play.google.com/books/reader?id=ZxwpakTv_MIC&hl=&source=gbs_api",
|
||||||
|
"accessViewStatus": "SAMPLE",
|
||||||
|
"quoteSharingAllowed": false
|
||||||
|
},
|
||||||
|
"searchInfo": {
|
||||||
|
"textSnippet": "Alice in Wonderland (also known as Alice's Adventures in Wonderland), from 1865, is the peculiar and imaginative tale of a girl who falls down a rabbit-hole into a bizarre world of eccentric and unusual creatures."
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
package metadata
|
package metadata
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/PuerkitoBio/goquery"
|
"github.com/PuerkitoBio/goquery"
|
||||||
@@ -14,11 +15,34 @@ func getEPUBMetadata(filepath string) (*MetadataInfo, error) {
|
|||||||
}
|
}
|
||||||
rf := rc.Rootfiles[0]
|
rf := rc.Rootfiles[0]
|
||||||
|
|
||||||
return &MetadataInfo{
|
parsedMetadata := &MetadataInfo{
|
||||||
|
Type: TYPE_EPUB,
|
||||||
Title: &rf.Title,
|
Title: &rf.Title,
|
||||||
Author: &rf.Creator,
|
Author: &rf.Creator,
|
||||||
Description: &rf.Description,
|
Description: &rf.Description,
|
||||||
}, nil
|
}
|
||||||
|
|
||||||
|
// Parse Possible ISBN
|
||||||
|
if rf.Source != "" {
|
||||||
|
replaceRE := regexp.MustCompile(`[-\s]`)
|
||||||
|
possibleISBN := replaceRE.ReplaceAllString(rf.Source, "")
|
||||||
|
|
||||||
|
// ISBN Matches
|
||||||
|
isbn13RE := regexp.MustCompile(`(?P<ISBN>\d{13})`)
|
||||||
|
isbn10RE := regexp.MustCompile(`(?P<ISBN>\d{10})`)
|
||||||
|
isbn13Matches := isbn13RE.FindStringSubmatch(possibleISBN)
|
||||||
|
isbn10Matches := isbn10RE.FindStringSubmatch(possibleISBN)
|
||||||
|
|
||||||
|
if len(isbn13Matches) > 0 {
|
||||||
|
isbnIndex := isbn13RE.SubexpIndex("ISBN")
|
||||||
|
parsedMetadata.ISBN13 = &isbn13Matches[isbnIndex]
|
||||||
|
} else if len(isbn10Matches) > 0 {
|
||||||
|
isbnIndex := isbn10RE.SubexpIndex("ISBN")
|
||||||
|
parsedMetadata.ISBN10 = &isbn10Matches[isbnIndex]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return parsedMetadata, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func countEPUBWords(filepath string) (int64, error) {
|
func countEPUBWords(filepath string) (int64, error) {
|
||||||
@@ -29,10 +53,12 @@ func countEPUBWords(filepath string) (int64, error) {
|
|||||||
rf := rc.Rootfiles[0]
|
rf := rc.Rootfiles[0]
|
||||||
|
|
||||||
var completeCount int64
|
var completeCount int64
|
||||||
for _, item := range rf.Spine.Itemrefs {
|
for _, item := range rf.Itemrefs {
|
||||||
f, _ := item.Open()
|
f, _ := item.Open()
|
||||||
doc, _ := goquery.NewDocumentFromReader(f)
|
doc, _ := goquery.NewDocumentFromReader(f)
|
||||||
completeCount = completeCount + int64(len(strings.Fields(doc.Text())))
|
doc.Find("script, style, noscript, iframe").Remove()
|
||||||
|
words := len(strings.Fields(doc.Text()))
|
||||||
|
completeCount = completeCount + int64(words)
|
||||||
}
|
}
|
||||||
|
|
||||||
return completeCount, nil
|
return completeCount, nil
|
||||||
|
|||||||
@@ -41,9 +41,9 @@ const GBOOKS_GBID_COVER_URL string = "https://books.google.com/books/content/ima
|
|||||||
|
|
||||||
func getGBooksMetadata(metadataSearch MetadataInfo) ([]MetadataInfo, error) {
|
func getGBooksMetadata(metadataSearch MetadataInfo) ([]MetadataInfo, error) {
|
||||||
var queryResults []gBooksQueryItem
|
var queryResults []gBooksQueryItem
|
||||||
if metadataSearch.ID != nil {
|
if metadataSearch.SourceID != nil {
|
||||||
// Use GBID
|
// Use GBID
|
||||||
resp, err := performGBIDRequest(*metadataSearch.ID)
|
resp, err := performGBIDRequest(*metadataSearch.SourceID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -83,15 +83,16 @@ func getGBooksMetadata(metadataSearch MetadataInfo) ([]MetadataInfo, error) {
|
|||||||
|
|
||||||
queryResults = resp.Items
|
queryResults = resp.Items
|
||||||
} else {
|
} else {
|
||||||
return nil, errors.New("Invalid Data")
|
return nil, errors.New("invalid data")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Normalize Data
|
// Normalize Data
|
||||||
allMetadata := []MetadataInfo{}
|
var allMetadata []MetadataInfo
|
||||||
for i := range queryResults {
|
for i := range queryResults {
|
||||||
item := queryResults[i] // Range Value Pointer Issue
|
item := queryResults[i] // Range Value Pointer Issue
|
||||||
itemResult := MetadataInfo{
|
itemResult := MetadataInfo{
|
||||||
ID: &item.ID,
|
SourceID: &item.ID,
|
||||||
|
Source: SourceGoogleBooks,
|
||||||
Title: &item.Info.Title,
|
Title: &item.Info.Title,
|
||||||
Description: &item.Info.Description,
|
Description: &item.Info.Description,
|
||||||
}
|
}
|
||||||
@@ -121,35 +122,35 @@ func getGBooksMetadata(metadataSearch MetadataInfo) ([]MetadataInfo, error) {
|
|||||||
func saveGBooksCover(gbid string, coverFilePath string, overwrite bool) error {
|
func saveGBooksCover(gbid string, coverFilePath string, overwrite bool) error {
|
||||||
// Validate File Doesn't Exists
|
// Validate File Doesn't Exists
|
||||||
_, err := os.Stat(coverFilePath)
|
_, err := os.Stat(coverFilePath)
|
||||||
if err == nil && overwrite == false {
|
if err == nil && !overwrite {
|
||||||
log.Warn("[saveGBooksCover] File Alreads Exists")
|
log.Warn("File Alreads Exists")
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create File
|
// Create File
|
||||||
out, err := os.Create(coverFilePath)
|
out, err := os.Create(coverFilePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[saveGBooksCover] File Create Error")
|
log.Error("File Create Error")
|
||||||
return errors.New("File Failure")
|
return errors.New("file failure")
|
||||||
}
|
}
|
||||||
defer out.Close()
|
defer out.Close()
|
||||||
|
|
||||||
// Download File
|
// Download File
|
||||||
log.Info("[saveGBooksCover] Downloading Cover")
|
log.Info("Downloading Cover")
|
||||||
coverURL := fmt.Sprintf(GBOOKS_GBID_COVER_URL, gbid)
|
coverURL := fmt.Sprintf(GBOOKS_GBID_COVER_URL, gbid)
|
||||||
resp, err := http.Get(coverURL)
|
resp, err := http.Get(coverURL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[saveGBooksCover] Cover URL API Failure")
|
log.Error("Cover URL API Failure")
|
||||||
return errors.New("API Failure")
|
return errors.New("API Failure")
|
||||||
}
|
}
|
||||||
defer resp.Body.Close()
|
defer resp.Body.Close()
|
||||||
|
|
||||||
// Copy File to Disk
|
// Copy File to Disk
|
||||||
log.Info("[saveGBooksCover] Saving Cover")
|
log.Info("Saving Cover")
|
||||||
_, err = io.Copy(out, resp.Body)
|
_, err = io.Copy(out, resp.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[saveGBooksCover] File Copy Error")
|
log.Error("File Copy Error")
|
||||||
return errors.New("File Failure")
|
return errors.New("file failure")
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
@@ -157,42 +158,37 @@ func saveGBooksCover(gbid string, coverFilePath string, overwrite bool) error {
|
|||||||
|
|
||||||
func performSearchRequest(searchQuery string) (*gBooksQueryResponse, error) {
|
func performSearchRequest(searchQuery string) (*gBooksQueryResponse, error) {
|
||||||
apiQuery := fmt.Sprintf(GBOOKS_QUERY_URL, searchQuery)
|
apiQuery := fmt.Sprintf(GBOOKS_QUERY_URL, searchQuery)
|
||||||
log.Info("[performSearchRequest] Acquiring Metadata: ", apiQuery)
|
log.Info("Acquiring Metadata: ", apiQuery)
|
||||||
resp, err := http.Get(apiQuery)
|
resp, err := http.Get(apiQuery)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[performSearchRequest] Google Books Query URL API Failure")
|
log.Error("Google Books Query URL API Failure")
|
||||||
return nil, errors.New("API Failure")
|
return nil, errors.New("API Failure")
|
||||||
}
|
}
|
||||||
|
|
||||||
parsedResp := gBooksQueryResponse{}
|
var parsedResp gBooksQueryResponse
|
||||||
err = json.NewDecoder(resp.Body).Decode(&parsedResp)
|
err = json.NewDecoder(resp.Body).Decode(&parsedResp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[performSearchRequest] Google Books Query API Decode Failure")
|
log.Error("Google Books Query API Decode Failure")
|
||||||
return nil, errors.New("API Failure")
|
return nil, errors.New("API Failure")
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(parsedResp.Items) == 0 {
|
|
||||||
log.Warn("[performSearchRequest] No Results")
|
|
||||||
return nil, errors.New("No Results")
|
|
||||||
}
|
|
||||||
|
|
||||||
return &parsedResp, nil
|
return &parsedResp, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func performGBIDRequest(id string) (*gBooksQueryItem, error) {
|
func performGBIDRequest(id string) (*gBooksQueryItem, error) {
|
||||||
apiQuery := fmt.Sprintf(GBOOKS_GBID_INFO_URL, id)
|
apiQuery := fmt.Sprintf(GBOOKS_GBID_INFO_URL, id)
|
||||||
|
|
||||||
log.Info("[performGBIDRequest] Acquiring CoverID")
|
log.Info("Acquiring CoverID")
|
||||||
resp, err := http.Get(apiQuery)
|
resp, err := http.Get(apiQuery)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[performGBIDRequest] Cover URL API Failure")
|
log.Error("Cover URL API Failure")
|
||||||
return nil, errors.New("API Failure")
|
return nil, errors.New("API Failure")
|
||||||
}
|
}
|
||||||
|
|
||||||
parsedResp := gBooksQueryItem{}
|
parsedResp := gBooksQueryItem{}
|
||||||
err = json.NewDecoder(resp.Body).Decode(&parsedResp)
|
err = json.NewDecoder(resp.Body).Decode(&parsedResp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[performGBIDRequest] Google Books ID API Decode Failure")
|
log.Error("Google Books ID API Decode Failure")
|
||||||
return nil, errors.New("API Failure")
|
return nil, errors.New("API Failure")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
126
metadata/gbooks_test.go
Normal file
126
metadata/gbooks_test.go
Normal file
@@ -0,0 +1,126 @@
|
|||||||
|
package metadata
|
||||||
|
|
||||||
|
import (
|
||||||
|
_ "embed"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/jarcoal/httpmock"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
//go:embed _test_files/gbooks_id_response.json
|
||||||
|
var idResp string
|
||||||
|
|
||||||
|
//go:embed _test_files/gbooks_query_response.json
|
||||||
|
var queryResp string
|
||||||
|
|
||||||
|
type details struct {
|
||||||
|
URLs []string
|
||||||
|
}
|
||||||
|
|
||||||
|
// Hook API Helper
|
||||||
|
func hookAPI() *details {
|
||||||
|
// Start HTTPMock
|
||||||
|
httpmock.Activate()
|
||||||
|
|
||||||
|
// Create details struct
|
||||||
|
d := &details{
|
||||||
|
URLs: []string{},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create Hook
|
||||||
|
matchRE := regexp.MustCompile(`^https://www\.googleapis\.com/books/v1/volumes.*`)
|
||||||
|
httpmock.RegisterRegexpResponder("GET", matchRE, func(req *http.Request) (*http.Response, error) {
|
||||||
|
// Append URL
|
||||||
|
d.URLs = append(d.URLs, req.URL.String())
|
||||||
|
|
||||||
|
// Get Raw Response
|
||||||
|
var rawResp string
|
||||||
|
if req.URL.Query().Get("q") != "" {
|
||||||
|
rawResp = queryResp
|
||||||
|
} else {
|
||||||
|
rawResp = idResp
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert to JSON Response
|
||||||
|
var responseData map[string]any
|
||||||
|
_ = json.Unmarshal([]byte(rawResp), &responseData)
|
||||||
|
|
||||||
|
// Return Response
|
||||||
|
return httpmock.NewJsonResponse(200, responseData)
|
||||||
|
})
|
||||||
|
|
||||||
|
return d
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGBooksGBIDMetadata(t *testing.T) {
|
||||||
|
hookDetails := hookAPI()
|
||||||
|
defer httpmock.DeactivateAndReset()
|
||||||
|
|
||||||
|
GBID := "ZxwpakTv_MIC"
|
||||||
|
expectedURL := fmt.Sprintf(GBOOKS_GBID_INFO_URL, GBID)
|
||||||
|
metadataResp, err := getGBooksMetadata(MetadataInfo{SourceID: &GBID})
|
||||||
|
|
||||||
|
assert.Nil(t, err, "should not have error")
|
||||||
|
assert.Contains(t, hookDetails.URLs, expectedURL, "should have intercepted URL")
|
||||||
|
assert.Equal(t, 1, len(metadataResp), "should have single result")
|
||||||
|
|
||||||
|
mResult := metadataResp[0]
|
||||||
|
validateResult(t, &mResult)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGBooksISBNQuery(t *testing.T) {
|
||||||
|
hookDetails := hookAPI()
|
||||||
|
defer httpmock.DeactivateAndReset()
|
||||||
|
|
||||||
|
ISBN10 := "1877527815"
|
||||||
|
expectedURL := fmt.Sprintf(GBOOKS_QUERY_URL, "isbn:"+ISBN10)
|
||||||
|
metadataResp, err := getGBooksMetadata(MetadataInfo{
|
||||||
|
ISBN10: &ISBN10,
|
||||||
|
})
|
||||||
|
|
||||||
|
assert.Nil(t, err, "should not have error")
|
||||||
|
assert.Contains(t, hookDetails.URLs, expectedURL, "should have intercepted URL")
|
||||||
|
assert.Equal(t, 1, len(metadataResp), "should have single result")
|
||||||
|
|
||||||
|
mResult := metadataResp[0]
|
||||||
|
validateResult(t, &mResult)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGBooksTitleQuery(t *testing.T) {
|
||||||
|
hookDetails := hookAPI()
|
||||||
|
defer httpmock.DeactivateAndReset()
|
||||||
|
|
||||||
|
title := "Alice in Wonderland 1877527815"
|
||||||
|
expectedURL := fmt.Sprintf(GBOOKS_QUERY_URL, url.QueryEscape(strings.TrimSpace(title)))
|
||||||
|
metadataResp, err := getGBooksMetadata(MetadataInfo{
|
||||||
|
Title: &title,
|
||||||
|
})
|
||||||
|
|
||||||
|
assert.Nil(t, err, "should not have error")
|
||||||
|
assert.Contains(t, hookDetails.URLs, expectedURL, "should have intercepted URL")
|
||||||
|
assert.NotEqual(t, 0, len(metadataResp), "should not have no results")
|
||||||
|
|
||||||
|
mResult := metadataResp[0]
|
||||||
|
validateResult(t, &mResult)
|
||||||
|
}
|
||||||
|
|
||||||
|
func validateResult(t *testing.T, m *MetadataInfo) {
|
||||||
|
expectedTitle := "Alice in Wonderland"
|
||||||
|
expectedAuthor := "Lewis Carroll"
|
||||||
|
expectedDesc := "Alice in Wonderland (also known as Alice's Adventures in Wonderland), from 1865, is the peculiar and imaginative tale of a girl who falls down a rabbit-hole into a bizarre world of eccentric and unusual creatures. Lewis Carroll's prominent example of the genre of \"literary nonsense\" has endured in popularity with its clever way of playing with logic and a narrative structure that has influence generations of fiction writing."
|
||||||
|
expectedISBN10 := "1877527815"
|
||||||
|
expectedISBN13 := "9781877527814"
|
||||||
|
|
||||||
|
assert.Equal(t, expectedTitle, *m.Title, "should have title")
|
||||||
|
assert.Equal(t, expectedAuthor, *m.Author, "should have author")
|
||||||
|
assert.Equal(t, expectedDesc, *m.Description, "should have description")
|
||||||
|
assert.Equal(t, expectedISBN10, *m.ISBN10, "should have ISBN10")
|
||||||
|
assert.Equal(t, expectedISBN13, *m.ISBN13, "should have ISBN10")
|
||||||
|
}
|
||||||
@@ -1,76 +0,0 @@
|
|||||||
//go:build integration
|
|
||||||
|
|
||||||
package metadata
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestGBooksGBIDMetadata(t *testing.T) {
|
|
||||||
GBID := "ZxwpakTv_MIC"
|
|
||||||
metadataResp, err := getGBooksMetadata(MetadataInfo{
|
|
||||||
ID: &GBID,
|
|
||||||
})
|
|
||||||
|
|
||||||
if len(metadataResp) != 1 {
|
|
||||||
t.Fatalf(`Expected: %v, Got: %v, Error: %v`, 1, len(metadataResp), err)
|
|
||||||
}
|
|
||||||
|
|
||||||
mResult := metadataResp[0]
|
|
||||||
validateResult(&mResult, t)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestGBooksISBNQuery(t *testing.T) {
|
|
||||||
ISBN10 := "1877527815"
|
|
||||||
metadataResp, err := getGBooksMetadata(MetadataInfo{
|
|
||||||
ISBN10: &ISBN10,
|
|
||||||
})
|
|
||||||
|
|
||||||
if len(metadataResp) != 1 {
|
|
||||||
t.Fatalf(`Expected: %v, Got: %v, Error: %v`, 1, len(metadataResp), err)
|
|
||||||
}
|
|
||||||
|
|
||||||
mResult := metadataResp[0]
|
|
||||||
validateResult(&mResult, t)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestGBooksTitleQuery(t *testing.T) {
|
|
||||||
title := "Alice in Wonderland 1877527815"
|
|
||||||
metadataResp, err := getGBooksMetadata(MetadataInfo{
|
|
||||||
Title: &title,
|
|
||||||
})
|
|
||||||
|
|
||||||
if len(metadataResp) == 0 {
|
|
||||||
t.Fatalf(`Expected: %v, Got: %v, Error: %v`, "> 0", len(metadataResp), err)
|
|
||||||
}
|
|
||||||
|
|
||||||
mResult := metadataResp[0]
|
|
||||||
validateResult(&mResult, t)
|
|
||||||
}
|
|
||||||
|
|
||||||
func validateResult(m *MetadataInfo, t *testing.T) {
|
|
||||||
expect := "Lewis Carroll"
|
|
||||||
if *m.Author != expect {
|
|
||||||
t.Fatalf(`Expected: %v, Got: %v`, expect, *m.Author)
|
|
||||||
}
|
|
||||||
|
|
||||||
expect = "Alice in Wonderland"
|
|
||||||
if *m.Title != expect {
|
|
||||||
t.Fatalf(`Expected: %v, Got: %v`, expect, *m.Title)
|
|
||||||
}
|
|
||||||
|
|
||||||
expect = "Alice in Wonderland (also known as Alice's Adventures in Wonderland), from 1865, is the peculiar and imaginative tale of a girl who falls down a rabbit-hole into a bizarre world of eccentric and unusual creatures. Lewis Carroll's prominent example of the genre of \"literary nonsense\" has endured in popularity with its clever way of playing with logic and a narrative structure that has influence generations of fiction writing."
|
|
||||||
if *m.Description != expect {
|
|
||||||
t.Fatalf(`Expected: %v, Got: %v`, expect, *m.Description)
|
|
||||||
}
|
|
||||||
|
|
||||||
expect = "1877527815"
|
|
||||||
if *m.ISBN10 != expect {
|
|
||||||
t.Fatalf(`Expected: %v, Got: %v`, expect, *m.ISBN10)
|
|
||||||
}
|
|
||||||
|
|
||||||
expect = "9781877527814"
|
|
||||||
if *m.ISBN13 != expect {
|
|
||||||
t.Fatalf(`Expected: %v, Got: %v`, expect, *m.ISBN13)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -3,27 +3,48 @@ package metadata
|
|||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
|
||||||
"github.com/gabriel-vasile/mimetype"
|
"github.com/gabriel-vasile/mimetype"
|
||||||
|
"reichard.io/antholume/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type MetadataHandler func(string) (*MetadataInfo, error)
|
||||||
|
|
||||||
|
type DocumentType string
|
||||||
|
|
||||||
|
const (
|
||||||
|
TYPE_EPUB DocumentType = ".epub"
|
||||||
|
)
|
||||||
|
|
||||||
|
var extensionHandlerMap = map[DocumentType]MetadataHandler{
|
||||||
|
TYPE_EPUB: getEPUBMetadata,
|
||||||
|
}
|
||||||
|
|
||||||
type Source int
|
type Source int
|
||||||
|
|
||||||
const (
|
const (
|
||||||
GBOOK Source = iota
|
SourceGoogleBooks Source = iota
|
||||||
OLIB
|
SourceOpenLibrary
|
||||||
)
|
)
|
||||||
|
|
||||||
type MetadataInfo struct {
|
type MetadataInfo struct {
|
||||||
ID *string
|
SourceID *string
|
||||||
|
MD5 *string
|
||||||
|
PartialMD5 *string
|
||||||
|
WordCount *int64
|
||||||
|
|
||||||
Title *string
|
Title *string
|
||||||
Author *string
|
Author *string
|
||||||
Description *string
|
Description *string
|
||||||
ISBN10 *string
|
ISBN10 *string
|
||||||
ISBN13 *string
|
ISBN13 *string
|
||||||
|
Type DocumentType
|
||||||
|
Source Source
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Downloads the Google Books cover file and saves it to the provided directory.
|
||||||
func CacheCover(gbid string, coverDir string, documentID string, overwrite bool) (*string, error) {
|
func CacheCover(gbid string, coverDir string, documentID string, overwrite bool) (*string, error) {
|
||||||
// Get Filepath
|
// Get Filepath
|
||||||
coverFile := "." + filepath.Clean(fmt.Sprintf("/%s.jpg", documentID))
|
coverFile := "." + filepath.Clean(fmt.Sprintf("/%s.jpg", documentID))
|
||||||
@@ -39,44 +60,128 @@ func CacheCover(gbid string, coverDir string, documentID string, overwrite bool)
|
|||||||
return &coverFile, nil
|
return &coverFile, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Searches source for metadata based on the provided information.
|
||||||
func SearchMetadata(s Source, metadataSearch MetadataInfo) ([]MetadataInfo, error) {
|
func SearchMetadata(s Source, metadataSearch MetadataInfo) ([]MetadataInfo, error) {
|
||||||
switch s {
|
switch s {
|
||||||
case GBOOK:
|
case SourceGoogleBooks:
|
||||||
return getGBooksMetadata(metadataSearch)
|
return getGBooksMetadata(metadataSearch)
|
||||||
case OLIB:
|
case SourceOpenLibrary:
|
||||||
return nil, errors.New("Not implemented")
|
return nil, errors.New("not implemented")
|
||||||
default:
|
default:
|
||||||
return nil, errors.New("Not implemented")
|
return nil, errors.New("not implemented")
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetWordCount(filepath string) (int64, error) {
|
// Returns the word count of the provided filepath. An error will be returned
|
||||||
fileMime, err := mimetype.DetectFile(filepath)
|
// if the file is not supported.
|
||||||
if err != nil {
|
func GetWordCount(filepath string) (*int64, error) {
|
||||||
return 0, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if fileExtension := fileMime.Extension(); fileExtension == ".epub" {
|
|
||||||
totalWords, err := countEPUBWords(filepath)
|
|
||||||
if err != nil {
|
|
||||||
return 0, err
|
|
||||||
}
|
|
||||||
return totalWords, nil
|
|
||||||
} else {
|
|
||||||
return 0, errors.New("Invalid Extension")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func GetMetadata(filepath string) (*MetadataInfo, error) {
|
|
||||||
fileMime, err := mimetype.DetectFile(filepath)
|
fileMime, err := mimetype.DetectFile(filepath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if fileExtension := fileMime.Extension(); fileExtension == ".epub" {
|
if fileExtension := fileMime.Extension(); fileExtension == ".epub" {
|
||||||
return getEPUBMetadata(filepath)
|
totalWords, err := countEPUBWords(filepath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &totalWords, nil
|
||||||
} else {
|
} else {
|
||||||
return nil, errors.New("Invalid Extension")
|
return nil, fmt.Errorf("invalid extension: %s", fileExtension)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Returns embedded metadata of the provided file. An error will be returned if
|
||||||
|
// the file is not supported.
|
||||||
|
func GetMetadata(filepath string) (*MetadataInfo, error) {
|
||||||
|
// Detect Extension Type
|
||||||
|
fileMime, err := mimetype.DetectFile(filepath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get Extension Type Metadata Handler
|
||||||
|
fileExtension := fileMime.Extension()
|
||||||
|
handler, ok := extensionHandlerMap[DocumentType(fileExtension)]
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("invalid extension %s", fileExtension)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Acquire Metadata
|
||||||
|
metadataInfo, err := handler(filepath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("unable to acquire metadata")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate MD5 & Partial MD5
|
||||||
|
partialMD5, err := utils.CalculatePartialMD5(filepath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("unable to calculate partial MD5")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate Actual MD5
|
||||||
|
MD5, err := utils.CalculateMD5(filepath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("unable to calculate MD5")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate Word Count
|
||||||
|
wordCount, err := GetWordCount(filepath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("unable to calculate word count")
|
||||||
|
}
|
||||||
|
|
||||||
|
metadataInfo.WordCount = wordCount
|
||||||
|
metadataInfo.PartialMD5 = partialMD5
|
||||||
|
metadataInfo.MD5 = MD5
|
||||||
|
|
||||||
|
return metadataInfo, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Returns the extension of the provided filepath (e.g. ".epub"). An error
|
||||||
|
// will be returned if the file is not supported.
|
||||||
|
func GetDocumentType(filepath string) (*DocumentType, error) {
|
||||||
|
// Detect Extension Type
|
||||||
|
fileMime, err := mimetype.DetectFile(filepath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Detect
|
||||||
|
fileExtension := fileMime.Extension()
|
||||||
|
docType, ok := ParseDocumentType(fileExtension)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("filetype not supported")
|
||||||
|
}
|
||||||
|
|
||||||
|
return &docType, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Returns the extension of the provided file reader (e.g. ".epub"). An error
|
||||||
|
// will be returned if the file is not supported.
|
||||||
|
func GetDocumentTypeReader(r io.Reader) (*DocumentType, error) {
|
||||||
|
// Detect Extension Type
|
||||||
|
fileMime, err := mimetype.DetectReader(r)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Detect
|
||||||
|
fileExtension := fileMime.Extension()
|
||||||
|
docType, ok := ParseDocumentType(fileExtension)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("filetype not supported")
|
||||||
|
}
|
||||||
|
|
||||||
|
return &docType, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Given a filetype string, attempt to resolve a DocumentType
|
||||||
|
func ParseDocumentType(input string) (DocumentType, bool) {
|
||||||
|
validTypes := map[string]DocumentType{
|
||||||
|
string(TYPE_EPUB): TYPE_EPUB,
|
||||||
|
}
|
||||||
|
found, ok := validTypes[input]
|
||||||
|
return found, ok
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,36 +1,46 @@
|
|||||||
package metadata
|
package metadata
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"os"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestGetWordCount(t *testing.T) {
|
func TestGetWordCount(t *testing.T) {
|
||||||
var want int64 = 30080
|
var desiredCount int64 = 30070
|
||||||
wordCount, err := countEPUBWords("../_test_files/alice.epub")
|
actualCount, err := countEPUBWords("../_test_files/alice.epub")
|
||||||
|
|
||||||
|
assert.Nil(t, err, "should have no error")
|
||||||
|
assert.Equal(t, desiredCount, actualCount, "should be correct word count")
|
||||||
|
|
||||||
if wordCount != want {
|
|
||||||
t.Fatalf(`Expected: %v, Got: %v, Error: %v`, want, wordCount, err)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGetMetadata(t *testing.T) {
|
func TestGetMetadata(t *testing.T) {
|
||||||
metadataInfo, err := getEPUBMetadata("../_test_files/alice.epub")
|
desiredTitle := "Alice's Adventures in Wonderland / Illustrated by Arthur Rackham. With a Proem by Austin Dobson"
|
||||||
if err != nil {
|
desiredAuthor := "Lewis Carroll"
|
||||||
t.Fatalf(`Expected: *MetadataInfo, Got: nil, Error: %v`, err)
|
desiredDescription := ""
|
||||||
|
|
||||||
|
metadataInfo, err := GetMetadata("../_test_files/alice.epub")
|
||||||
|
|
||||||
|
assert.Nil(t, err, "should have no error")
|
||||||
|
assert.Equal(t, desiredTitle, *metadataInfo.Title, "should be correct title")
|
||||||
|
assert.Equal(t, desiredAuthor, *metadataInfo.Author, "should be correct author")
|
||||||
|
assert.Equal(t, desiredDescription, *metadataInfo.Description, "should be correct author")
|
||||||
|
assert.Equal(t, TYPE_EPUB, metadataInfo.Type, "should be correct type")
|
||||||
}
|
}
|
||||||
|
|
||||||
want := "Alice's Adventures in Wonderland / Illustrated by Arthur Rackham. With a Proem by Austin Dobson"
|
func TestGetExtension(t *testing.T) {
|
||||||
if *metadataInfo.Title != want {
|
docType, err := GetDocumentType("../_test_files/alice.epub")
|
||||||
t.Fatalf(`Expected: %v, Got: %v, Error: %v`, want, *metadataInfo.Title, err)
|
|
||||||
|
assert.Nil(t, err, "should have no error")
|
||||||
|
assert.Equal(t, TYPE_EPUB, *docType)
|
||||||
}
|
}
|
||||||
|
|
||||||
want = "Lewis Carroll"
|
func TestGetExtensionReader(t *testing.T) {
|
||||||
if *metadataInfo.Author != want {
|
file, _ := os.Open("../_test_files/alice.epub")
|
||||||
t.Fatalf(`Expected: %v, Got: %v, Error: %v`, want, *metadataInfo.Author, err)
|
docType, err := GetDocumentTypeReader(file)
|
||||||
}
|
|
||||||
|
|
||||||
want = ""
|
assert.Nil(t, err, "should have no error")
|
||||||
if *metadataInfo.Description != want {
|
assert.Equal(t, TYPE_EPUB, *docType)
|
||||||
t.Fatalf(`Expected: %v, Got: %v, Error: %v`, want, *metadataInfo.Description, err)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -32,24 +32,24 @@ const OLIB_ISBN_LINK_URL string = "https://openlibrary.org/isbn/%s"
|
|||||||
|
|
||||||
func GetCoverOLIDs(title *string, author *string) ([]string, error) {
|
func GetCoverOLIDs(title *string, author *string) ([]string, error) {
|
||||||
if title == nil || author == nil {
|
if title == nil || author == nil {
|
||||||
log.Error("[metadata] Invalid Search Query")
|
log.Error("Invalid Search Query")
|
||||||
return nil, errors.New("Invalid Query")
|
return nil, errors.New("Invalid Query")
|
||||||
}
|
}
|
||||||
|
|
||||||
searchQuery := url.QueryEscape(fmt.Sprintf("%s %s", *title, *author))
|
searchQuery := url.QueryEscape(fmt.Sprintf("%s %s", *title, *author))
|
||||||
apiQuery := fmt.Sprintf(OLIB_QUERY_URL, searchQuery)
|
apiQuery := fmt.Sprintf(OLIB_QUERY_URL, searchQuery)
|
||||||
|
|
||||||
log.Info("[metadata] Acquiring CoverID")
|
log.Info("Acquiring CoverID")
|
||||||
resp, err := http.Get(apiQuery)
|
resp, err := http.Get(apiQuery)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[metadata] Cover URL API Failure")
|
log.Error("Cover URL API Failure")
|
||||||
return nil, errors.New("API Failure")
|
return nil, errors.New("API Failure")
|
||||||
}
|
}
|
||||||
|
|
||||||
target := oLibQueryResponse{}
|
target := oLibQueryResponse{}
|
||||||
err = json.NewDecoder(resp.Body).Decode(&target)
|
err = json.NewDecoder(resp.Body).Decode(&target)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[metadata] Cover URL API Decode Failure")
|
log.Error("Cover URL API Decode Failure")
|
||||||
return nil, errors.New("API Failure")
|
return nil, errors.New("API Failure")
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -73,24 +73,24 @@ func DownloadAndSaveCover(coverID string, dirPath string) (*string, error) {
|
|||||||
// Validate File Doesn't Exists
|
// Validate File Doesn't Exists
|
||||||
_, err := os.Stat(safePath)
|
_, err := os.Stat(safePath)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
log.Warn("[metadata] File Alreads Exists")
|
log.Warn("File Alreads Exists")
|
||||||
return &safePath, nil
|
return &safePath, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create File
|
// Create File
|
||||||
out, err := os.Create(safePath)
|
out, err := os.Create(safePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[metadata] File Create Error")
|
log.Error("File Create Error")
|
||||||
return nil, errors.New("File Failure")
|
return nil, errors.New("File Failure")
|
||||||
}
|
}
|
||||||
defer out.Close()
|
defer out.Close()
|
||||||
|
|
||||||
// Download File
|
// Download File
|
||||||
log.Info("[metadata] Downloading Cover")
|
log.Info("Downloading Cover")
|
||||||
coverURL := fmt.Sprintf(OLIB_OLID_COVER_URL, coverID)
|
coverURL := fmt.Sprintf(OLIB_OLID_COVER_URL, coverID)
|
||||||
resp, err := http.Get(coverURL)
|
resp, err := http.Get(coverURL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[metadata] Cover URL API Failure")
|
log.Error("Cover URL API Failure")
|
||||||
return nil, errors.New("API Failure")
|
return nil, errors.New("API Failure")
|
||||||
}
|
}
|
||||||
defer resp.Body.Close()
|
defer resp.Body.Close()
|
||||||
@@ -98,7 +98,7 @@ func DownloadAndSaveCover(coverID string, dirPath string) (*string, error) {
|
|||||||
// Copy File to Disk
|
// Copy File to Disk
|
||||||
_, err = io.Copy(out, resp.Body)
|
_, err = io.Copy(out, resp.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[metadata] File Copy Error")
|
log.Error("File Copy Error")
|
||||||
return nil, errors.New("File Failure")
|
return nil, errors.New("File Failure")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,13 +0,0 @@
|
|||||||
# Notes
|
|
||||||
|
|
||||||
This folder consists of various notes / files that I want to save and may come back to at some point.
|
|
||||||
|
|
||||||
# Ideas / To Do
|
|
||||||
|
|
||||||
- Google Fonts -> SW Cache and/or Local
|
|
||||||
- Change Device Name / Assume Device
|
|
||||||
- Hide Document per User (Another Table?)
|
|
||||||
- Admin User?
|
|
||||||
- Reset Passwords
|
|
||||||
- Actually Delete Documents
|
|
||||||
- Activity Pagination
|
|
||||||
@@ -1,74 +0,0 @@
|
|||||||
{{ $data := (GetSVGGraphData .Data.GraphData 800 150 )}}
|
|
||||||
<svg viewBox="0 0 {{ $data.Width }} {{ $data.Height }}">
|
|
||||||
<!-- Box Graph -->
|
|
||||||
{{ range $idx, $item := $data.BarPoints }}
|
|
||||||
<g class="bar" transform="translate({{ $item.X }}, 0)" fill="gray">
|
|
||||||
<rect
|
|
||||||
y="{{ $item.Y }}"
|
|
||||||
height="{{ $item.Size }}"
|
|
||||||
width="{{ $data.Offset }}"
|
|
||||||
></rect>
|
|
||||||
</g>
|
|
||||||
{{ end }}
|
|
||||||
|
|
||||||
<!-- Linear Line Graph -->
|
|
||||||
<polyline
|
|
||||||
fill="none"
|
|
||||||
stroke="black"
|
|
||||||
stroke-width="2"
|
|
||||||
points="
|
|
||||||
{{ range $item := $data.LinePoints }}
|
|
||||||
{{ $item.X }},{{ $item.Y }}
|
|
||||||
{{ end }}
|
|
||||||
"
|
|
||||||
/>
|
|
||||||
|
|
||||||
<!-- Bezier Line Graph -->
|
|
||||||
<path
|
|
||||||
fill="#316BBE"
|
|
||||||
fill-opacity="0.5"
|
|
||||||
stroke="none"
|
|
||||||
d="{{ $data.BezierPath }} {{ $data.BezierFill }}"
|
|
||||||
/>
|
|
||||||
|
|
||||||
<path fill="none" stroke="#316BBE" d="{{ $data.BezierPath }}" />
|
|
||||||
|
|
||||||
{{ range $index, $item := $data.LinePoints }}
|
|
||||||
<line
|
|
||||||
class="hover-trigger"
|
|
||||||
stroke="black"
|
|
||||||
stroke-opacity="0.0"
|
|
||||||
stroke-width="{{ $data.Offset }}"
|
|
||||||
x1="{{ $item.X }}"
|
|
||||||
x2="{{ $item.X }}"
|
|
||||||
y1="0"
|
|
||||||
y2="{{ $data.Height }}"
|
|
||||||
></line>
|
|
||||||
<g class="hover-item">
|
|
||||||
<line
|
|
||||||
class="text-black dark:text-white"
|
|
||||||
stroke-opacity="0.2"
|
|
||||||
x1="{{ $item.X }}"
|
|
||||||
x2="{{ $item.X }}"
|
|
||||||
y1="30"
|
|
||||||
y2="{{ $data.Height }}"
|
|
||||||
></line>
|
|
||||||
<text
|
|
||||||
class="text-black dark:text-white"
|
|
||||||
alignment-baseline="middle"
|
|
||||||
transform="translate({{ $item.X }}, 5) translate(-30, 8)"
|
|
||||||
font-size="10"
|
|
||||||
>
|
|
||||||
{{ (index $.Data.GraphData $index).Date }}
|
|
||||||
</text>
|
|
||||||
<text
|
|
||||||
class="text-black dark:text-white"
|
|
||||||
alignment-baseline="middle"
|
|
||||||
transform="translate({{ $item.X }}, 25) translate(-30, -2)"
|
|
||||||
font-size="10"
|
|
||||||
>
|
|
||||||
{{ (index $.Data.GraphData $index).MinutesRead }} minutes
|
|
||||||
</text>
|
|
||||||
</g>
|
|
||||||
{{ end }}
|
|
||||||
</svg>
|
|
||||||
@@ -8,8 +8,8 @@ import (
|
|||||||
|
|
||||||
// Feed root element for acquisition or navigation feed
|
// Feed root element for acquisition or navigation feed
|
||||||
type Feed struct {
|
type Feed struct {
|
||||||
|
ID string `xml:"id,omitempty"`
|
||||||
XMLName xml.Name `xml:"feed"`
|
XMLName xml.Name `xml:"feed"`
|
||||||
ID string `xml:"id,omitempty",`
|
|
||||||
Title string `xml:"title,omitempty"`
|
Title string `xml:"title,omitempty"`
|
||||||
Updated time.Time `xml:"updated,omitempty"`
|
Updated time.Time `xml:"updated,omitempty"`
|
||||||
Entries []Entry `xml:"entry,omitempty"`
|
Entries []Entry `xml:"entry,omitempty"`
|
||||||
|
|||||||
55
package-lock.json
generated
Normal file
55
package-lock.json
generated
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
{
|
||||||
|
"name": "antholume",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"lockfileVersion": 3,
|
||||||
|
"requires": true,
|
||||||
|
"packages": {
|
||||||
|
"": {
|
||||||
|
"name": "antholume",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"devDependencies": {
|
||||||
|
"prettier-plugin-go-template": "^0.0.15"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/prettier": {
|
||||||
|
"version": "3.3.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.3.2.tgz",
|
||||||
|
"integrity": "sha512-rAVeHYMcv8ATV5d508CFdn+8/pHPpXeIid1DdrPwXnaAdH7cqjVbpJaT5eq4yRAFU/lsbwYwSF/n5iNrdJHPQA==",
|
||||||
|
"dev": true,
|
||||||
|
"peer": true,
|
||||||
|
"bin": {
|
||||||
|
"prettier": "bin/prettier.cjs"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/prettier/prettier?sponsor=1"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/prettier-plugin-go-template": {
|
||||||
|
"version": "0.0.15",
|
||||||
|
"resolved": "https://registry.npmjs.org/prettier-plugin-go-template/-/prettier-plugin-go-template-0.0.15.tgz",
|
||||||
|
"integrity": "sha512-WqU92E1NokWYNZ9mLE6ijoRg6LtIGdLMePt2C7UBDjXeDH9okcRI3zRqtnWR4s5AloiqyvZ66jNBAa9tmRY5EQ==",
|
||||||
|
"dev": true,
|
||||||
|
"dependencies": {
|
||||||
|
"ulid": "^2.3.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14.0.0"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"prettier": "^3.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/ulid": {
|
||||||
|
"version": "2.3.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/ulid/-/ulid-2.3.0.tgz",
|
||||||
|
"integrity": "sha512-keqHubrlpvT6G2wH0OEfSW4mquYRcbe/J8NMmveoQOjUqmo+hXtO+ORCpWhdbZ7k72UtY61BL7haGxW6enBnjw==",
|
||||||
|
"dev": true,
|
||||||
|
"bin": {
|
||||||
|
"ulid": "bin/cli.js"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
7
package.json
Normal file
7
package.json
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
{
|
||||||
|
"name": "antholume",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"devDependencies": {
|
||||||
|
"prettier-plugin-go-template": "^0.0.15"
|
||||||
|
}
|
||||||
|
}
|
||||||
37
pkg/formatters/duration.go
Normal file
37
pkg/formatters/duration.go
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
package formatters
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// FormatDuration takes a duration and returns a human-readable duration string.
|
||||||
|
// For example: 1928371 seconds -> "22d 7h 39m 31s"
|
||||||
|
func FormatDuration(d time.Duration) string {
|
||||||
|
if d == 0 {
|
||||||
|
return "N/A"
|
||||||
|
}
|
||||||
|
|
||||||
|
var parts []string
|
||||||
|
|
||||||
|
days := int(d.Hours()) / 24
|
||||||
|
hours := int(d.Hours()) % 24
|
||||||
|
minutes := int(d.Minutes()) % 60
|
||||||
|
seconds := int(d.Seconds()) % 60
|
||||||
|
|
||||||
|
if days > 0 {
|
||||||
|
parts = append(parts, fmt.Sprintf("%dd", days))
|
||||||
|
}
|
||||||
|
if hours > 0 {
|
||||||
|
parts = append(parts, fmt.Sprintf("%dh", hours))
|
||||||
|
}
|
||||||
|
if minutes > 0 {
|
||||||
|
parts = append(parts, fmt.Sprintf("%dm", minutes))
|
||||||
|
}
|
||||||
|
if seconds > 0 {
|
||||||
|
parts = append(parts, fmt.Sprintf("%ds", seconds))
|
||||||
|
}
|
||||||
|
|
||||||
|
return strings.Join(parts, " ")
|
||||||
|
}
|
||||||
22
pkg/formatters/duration_test.go
Normal file
22
pkg/formatters/duration_test.go
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
package formatters
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestFormatDuration(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
dur time.Duration
|
||||||
|
want string
|
||||||
|
}{
|
||||||
|
{0, "N/A"},
|
||||||
|
{22*24*time.Hour + 7*time.Hour + 39*time.Minute + 31*time.Second, "22d 7h 39m 31s"},
|
||||||
|
{5*time.Minute + 15*time.Second, "5m 15s"},
|
||||||
|
}
|
||||||
|
for _, tc := range tests {
|
||||||
|
if got := FormatDuration(tc.dur); got != tc.want {
|
||||||
|
t.Errorf("FormatDuration(%v) = %s, want %s", tc.dur, got, tc.want)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
45
pkg/formatters/numbers.go
Normal file
45
pkg/formatters/numbers.go
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
package formatters
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"math"
|
||||||
|
)
|
||||||
|
|
||||||
|
// FormatNumber takes an int64 and returns a human-readable string.
|
||||||
|
// For example: 19823 -> "19.8k", 1500000 -> "1.5M"
|
||||||
|
func FormatNumber(input int64) string {
|
||||||
|
if input == 0 {
|
||||||
|
return "0"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle Negative
|
||||||
|
negative := input < 0
|
||||||
|
if negative {
|
||||||
|
input = -input
|
||||||
|
}
|
||||||
|
|
||||||
|
abbreviations := []string{"", "k", "M", "B", "T"}
|
||||||
|
abbrevIndex := int(math.Log10(float64(input)) / 3)
|
||||||
|
|
||||||
|
// Bounds Check
|
||||||
|
if abbrevIndex >= len(abbreviations) {
|
||||||
|
abbrevIndex = len(abbreviations) - 1
|
||||||
|
}
|
||||||
|
|
||||||
|
scaledNumber := float64(input) / math.Pow(10, float64(abbrevIndex*3))
|
||||||
|
|
||||||
|
var result string
|
||||||
|
if scaledNumber >= 100 {
|
||||||
|
result = fmt.Sprintf("%.0f%s", scaledNumber, abbreviations[abbrevIndex])
|
||||||
|
} else if scaledNumber >= 10 {
|
||||||
|
result = fmt.Sprintf("%.1f%s", scaledNumber, abbreviations[abbrevIndex])
|
||||||
|
} else {
|
||||||
|
result = fmt.Sprintf("%.2f%s", scaledNumber, abbreviations[abbrevIndex])
|
||||||
|
}
|
||||||
|
|
||||||
|
if negative {
|
||||||
|
result = "-" + result
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
22
pkg/formatters/numbers_test.go
Normal file
22
pkg/formatters/numbers_test.go
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
package formatters
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestFormatNumber(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
input int64
|
||||||
|
want string
|
||||||
|
}{
|
||||||
|
{0, "0"},
|
||||||
|
{19823, "19.8k"},
|
||||||
|
{1500000, "1.50M"},
|
||||||
|
{-12345, "-12.3k"},
|
||||||
|
}
|
||||||
|
for _, tc := range tests {
|
||||||
|
if got := FormatNumber(tc.input); got != tc.want {
|
||||||
|
t.Errorf("FormatNumber(%d) = %s, want %s", tc.input, got, tc.want)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
13
pkg/ptr/ptr.go
Normal file
13
pkg/ptr/ptr.go
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
package ptr
|
||||||
|
|
||||||
|
func Of[T any](v T) *T {
|
||||||
|
return &v
|
||||||
|
}
|
||||||
|
|
||||||
|
func Deref[T any](v *T) T {
|
||||||
|
var zeroT T
|
||||||
|
if v == nil {
|
||||||
|
return zeroT
|
||||||
|
}
|
||||||
|
return *v
|
||||||
|
}
|
||||||
73
pkg/ptr/ptr_test.go
Normal file
73
pkg/ptr/ptr_test.go
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
package ptr
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestOf(t *testing.T) {
|
||||||
|
// Test with different types
|
||||||
|
intVal := 42
|
||||||
|
intPtr := Of(intVal)
|
||||||
|
if *intPtr != intVal {
|
||||||
|
t.Errorf("Expected %d, got %d", intVal, *intPtr)
|
||||||
|
}
|
||||||
|
|
||||||
|
stringVal := "hello"
|
||||||
|
stringPtr := Of(stringVal)
|
||||||
|
if *stringPtr != stringVal {
|
||||||
|
t.Errorf("Expected %s, got %s", stringVal, *stringPtr)
|
||||||
|
}
|
||||||
|
|
||||||
|
floatVal := 3.14
|
||||||
|
floatPtr := Of(floatVal)
|
||||||
|
if *floatPtr != floatVal {
|
||||||
|
t.Errorf("Expected %f, got %f", floatVal, *floatPtr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDeref(t *testing.T) {
|
||||||
|
// Test with non-nil pointer
|
||||||
|
intVal := 42
|
||||||
|
intPtr := Of(intVal)
|
||||||
|
result := Deref(intPtr)
|
||||||
|
if result != intVal {
|
||||||
|
t.Errorf("Expected %d, got %d", intVal, result)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test with nil pointer
|
||||||
|
var nilPtr *int
|
||||||
|
result = Deref(nilPtr)
|
||||||
|
if result != 0 {
|
||||||
|
t.Errorf("Expected 0, got %d", result)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test with string
|
||||||
|
stringVal := "hello"
|
||||||
|
stringPtr := Of(stringVal)
|
||||||
|
resultStr := Deref(stringPtr)
|
||||||
|
if resultStr != stringVal {
|
||||||
|
t.Errorf("Expected %s, got %s", stringVal, resultStr)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test with nil string pointer
|
||||||
|
var nilStrPtr *string
|
||||||
|
resultStr = Deref(nilStrPtr)
|
||||||
|
if resultStr != "" {
|
||||||
|
t.Errorf("Expected empty string, got %s", resultStr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDerefZeroValue(t *testing.T) {
|
||||||
|
// Test that Deref returns zero value for nil pointers
|
||||||
|
var nilInt *int
|
||||||
|
result := Deref(nilInt)
|
||||||
|
if result != 0 {
|
||||||
|
t.Errorf("Expected zero int, got %d", result)
|
||||||
|
}
|
||||||
|
|
||||||
|
var nilString *string
|
||||||
|
resultStr := Deref(nilString)
|
||||||
|
if resultStr != "" {
|
||||||
|
t.Errorf("Expected zero string, got %s", resultStr)
|
||||||
|
}
|
||||||
|
}
|
||||||
17
pkg/sliceutils/sliceutils.go
Normal file
17
pkg/sliceutils/sliceutils.go
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
package sliceutils
|
||||||
|
|
||||||
|
func First[T any](s []T) (T, bool) {
|
||||||
|
if len(s) == 0 {
|
||||||
|
var zeroT T
|
||||||
|
return zeroT, false
|
||||||
|
}
|
||||||
|
return s[0], true
|
||||||
|
}
|
||||||
|
|
||||||
|
func Map[R, I any](s []I, f func(I) R) []R {
|
||||||
|
r := make([]R, 0, len(s))
|
||||||
|
for _, v := range s {
|
||||||
|
r = append(r, f(v))
|
||||||
|
}
|
||||||
|
return r
|
||||||
|
}
|
||||||
50
pkg/sliceutils/sliceutils_test.go
Normal file
50
pkg/sliceutils/sliceutils_test.go
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
package sliceutils
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestFirst(t *testing.T) {
|
||||||
|
// Test with empty slice
|
||||||
|
var empty []int
|
||||||
|
result, ok := First(empty)
|
||||||
|
if ok != false {
|
||||||
|
t.Errorf("Expected ok=false for empty slice, got %v", ok)
|
||||||
|
}
|
||||||
|
if result != 0 {
|
||||||
|
t.Errorf("Expected zero value for empty slice, got %v", result)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test with non-empty slice
|
||||||
|
testSlice := []int{1, 2, 3}
|
||||||
|
result, ok = First(testSlice)
|
||||||
|
if ok != true {
|
||||||
|
t.Errorf("Expected ok=true for non-empty slice, got %v", ok)
|
||||||
|
}
|
||||||
|
if result != 1 {
|
||||||
|
t.Errorf("Expected first element, got %v", result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMap(t *testing.T) {
|
||||||
|
// Test with empty slice
|
||||||
|
var empty []int
|
||||||
|
result := Map(empty, func(x int) int { return x * 2 })
|
||||||
|
if len(result) != 0 {
|
||||||
|
t.Errorf("Expected empty result for empty input, got %v", result)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test with non-empty slice
|
||||||
|
testSlice := []int{1, 2, 3}
|
||||||
|
result = Map(testSlice, func(x int) int { return x * 2 })
|
||||||
|
expected := []int{2, 4, 6}
|
||||||
|
if len(result) != len(expected) {
|
||||||
|
t.Errorf("Expected length %d, got %d", len(expected), len(result))
|
||||||
|
}
|
||||||
|
for i, v := range result {
|
||||||
|
if v != expected[i] {
|
||||||
|
t.Errorf("Expected %d at index %d, got %d", expected[i], i, v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
18
pkg/utils/utils.go
Normal file
18
pkg/utils/utils.go
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
package utils
|
||||||
|
|
||||||
|
func Ternary[T any](cond bool, tVal, fVal T) T {
|
||||||
|
if cond {
|
||||||
|
return tVal
|
||||||
|
}
|
||||||
|
return fVal
|
||||||
|
}
|
||||||
|
|
||||||
|
func FirstNonZero[T comparable](v ...T) T {
|
||||||
|
var zero T
|
||||||
|
for _, val := range v {
|
||||||
|
if val != zero {
|
||||||
|
return val
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return zero
|
||||||
|
}
|
||||||
45
pkg/utils/utils_test.go
Normal file
45
pkg/utils/utils_test.go
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
package utils
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestTernary(t *testing.T) {
|
||||||
|
// Test true condition
|
||||||
|
result := Ternary(true, 42, 13)
|
||||||
|
if result != 42 {
|
||||||
|
t.Errorf("Expected 42, got %d", result)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test false condition
|
||||||
|
result = Ternary(false, 42, 13)
|
||||||
|
if result != 13 {
|
||||||
|
t.Errorf("Expected 13, got %d", result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFirstNonZero(t *testing.T) {
|
||||||
|
// Test with int values
|
||||||
|
result := FirstNonZero(0, 0, 42, 13)
|
||||||
|
if result != 42 {
|
||||||
|
t.Errorf("Expected 42, got %d", result)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test with string values
|
||||||
|
resultStr := FirstNonZero("", "", "hello")
|
||||||
|
if resultStr != "hello" {
|
||||||
|
t.Errorf("Expected hello, got %s", resultStr)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test all zero values (strings)
|
||||||
|
zeroResultStr := FirstNonZero("")
|
||||||
|
if zeroResultStr != "" {
|
||||||
|
t.Errorf("Expected empty string, got %s", zeroResultStr)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test with float values
|
||||||
|
floatResult := FirstNonZero(0.0, 0.0, 3.14)
|
||||||
|
if floatResult != 3.14 {
|
||||||
|
t.Errorf("Expected 3.14, got %f", floatResult)
|
||||||
|
}
|
||||||
|
}
|
||||||
62
search/anna.go
Normal file
62
search/anna.go
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
package search
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/url"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/PuerkitoBio/goquery"
|
||||||
|
)
|
||||||
|
|
||||||
|
func searchAnnasArchive(query string) ([]SearchItem, error) {
|
||||||
|
searchURL := "https://annas-archive.org/search?index=&q=%s&ext=epub&sort=&lang=en"
|
||||||
|
url := fmt.Sprintf(searchURL, url.QueryEscape(query))
|
||||||
|
body, err := getPage(url)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return parseAnnasArchive(body)
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseAnnasArchive(body io.ReadCloser) ([]SearchItem, error) {
|
||||||
|
// Parse
|
||||||
|
defer body.Close()
|
||||||
|
doc, err := goquery.NewDocumentFromReader(body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Normalize Results
|
||||||
|
var allEntries []SearchItem
|
||||||
|
doc.Find(".js-aarecord-list-outer > div > div").Each(func(ix int, rawBook *goquery.Selection) {
|
||||||
|
|
||||||
|
// Parse Details
|
||||||
|
details := rawBook.Find("div:nth-child(3)").Text()
|
||||||
|
detailsSplit := strings.Split(details, " · ")
|
||||||
|
|
||||||
|
// Invalid Details
|
||||||
|
if len(detailsSplit) < 3 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse MD5
|
||||||
|
titleAuthorDetails := rawBook.Find("div a")
|
||||||
|
titleEl := titleAuthorDetails.Eq(0)
|
||||||
|
itemHref, _ := titleEl.Attr("href")
|
||||||
|
hrefArray := strings.Split(itemHref, "/")
|
||||||
|
id := hrefArray[len(hrefArray)-1]
|
||||||
|
|
||||||
|
allEntries = append(allEntries, SearchItem{
|
||||||
|
ID: id,
|
||||||
|
Title: titleEl.Text(),
|
||||||
|
Author: titleAuthorDetails.Eq(1).Text(),
|
||||||
|
Language: detailsSplit[0],
|
||||||
|
FileType: detailsSplit[1],
|
||||||
|
FileSize: detailsSplit[2],
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
// Return Results
|
||||||
|
return allEntries, nil
|
||||||
|
}
|
||||||
69
search/downloaders.go
Normal file
69
search/downloaders.go
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
package search
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/PuerkitoBio/goquery"
|
||||||
|
)
|
||||||
|
|
||||||
|
func getLibGenDownloadURL(md5 string, _ Source) ([]string, error) {
|
||||||
|
// Get Page
|
||||||
|
body, err := getPage("http://libgen.li/ads.php?md5=" + md5)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer body.Close()
|
||||||
|
|
||||||
|
// Parse
|
||||||
|
doc, err := goquery.NewDocumentFromReader(body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return Download URL
|
||||||
|
downloadPath, exists := doc.Find("body > table > tbody > tr > td > a").Attr("href")
|
||||||
|
if !exists {
|
||||||
|
return nil, fmt.Errorf("download URL not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Possible Funky URL
|
||||||
|
downloadPath = strings.ReplaceAll(downloadPath, "\\", "/")
|
||||||
|
return []string{fmt.Sprintf("http://libgen.li/%s", downloadPath)}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func getLibraryDownloadURL(md5 string, source Source) ([]string, error) {
|
||||||
|
// Derive Info URL
|
||||||
|
var infoURL string
|
||||||
|
switch source {
|
||||||
|
case SourceLibGen, SourceAnnasArchive:
|
||||||
|
infoURL = "http://library.lol/fiction/" + md5
|
||||||
|
// case SOURCE_LIBGEN_NON_FICTION:
|
||||||
|
// infoURL = "http://library.lol/main/" + md5
|
||||||
|
default:
|
||||||
|
return nil, errors.New("invalid source")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get Page
|
||||||
|
body, err := getPage(infoURL)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer body.Close()
|
||||||
|
|
||||||
|
// Parse
|
||||||
|
doc, err := goquery.NewDocumentFromReader(body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return Download URL
|
||||||
|
// downloadURL, _ := doc.Find("#download [href*=cloudflare]").Attr("href")
|
||||||
|
downloadURL, exists := doc.Find("#download h2 a").Attr("href")
|
||||||
|
if !exists {
|
||||||
|
return nil, errors.New("download URL not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
return []string{downloadURL}, nil
|
||||||
|
}
|
||||||
42
search/goodreads.go
Normal file
42
search/goodreads.go
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
package search
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io"
|
||||||
|
|
||||||
|
"github.com/PuerkitoBio/goquery"
|
||||||
|
)
|
||||||
|
|
||||||
|
func GoodReadsMostRead(c Cadence) ([]SearchItem, error) {
|
||||||
|
body, err := getPage("https://www.goodreads.com/book/most_read?category=all&country=US&duration=" + string(c))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return parseGoodReads(body)
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseGoodReads(body io.ReadCloser) ([]SearchItem, error) {
|
||||||
|
// Parse
|
||||||
|
defer body.Close()
|
||||||
|
doc, err := goquery.NewDocumentFromReader(body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Normalize Results
|
||||||
|
var allEntries []SearchItem
|
||||||
|
|
||||||
|
doc.Find("[itemtype=\"http://schema.org/Book\"]").Each(func(ix int, rawBook *goquery.Selection) {
|
||||||
|
title := rawBook.Find(".bookTitle span").Text()
|
||||||
|
author := rawBook.Find(".authorName span").Text()
|
||||||
|
|
||||||
|
item := SearchItem{
|
||||||
|
Title: title,
|
||||||
|
Author: author,
|
||||||
|
}
|
||||||
|
|
||||||
|
allEntries = append(allEntries, item)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Return Results
|
||||||
|
return allEntries, nil
|
||||||
|
}
|
||||||
74
search/libgen.go
Normal file
74
search/libgen.go
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
package search
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/url"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/PuerkitoBio/goquery"
|
||||||
|
)
|
||||||
|
|
||||||
|
const LIBGEN_SEARCH_URL = "https://%s/index.php?req=ext:epub+%s&gmode=on"
|
||||||
|
|
||||||
|
var libgenDomains []string = []string{
|
||||||
|
"libgen.vg",
|
||||||
|
"libgen.is",
|
||||||
|
}
|
||||||
|
|
||||||
|
func searchLibGen(query string) ([]SearchItem, error) {
|
||||||
|
var allErrors []error
|
||||||
|
|
||||||
|
for _, domain := range libgenDomains {
|
||||||
|
url := fmt.Sprintf(LIBGEN_SEARCH_URL, domain, url.QueryEscape(query))
|
||||||
|
body, err := getPage(url)
|
||||||
|
if err != nil {
|
||||||
|
allErrors = append(allErrors, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
results, err := parseLibGen(body)
|
||||||
|
if err != nil {
|
||||||
|
allErrors = append(allErrors, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return results, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, fmt.Errorf("could not query libgen: %w", errors.Join(allErrors...))
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseLibGen(body io.ReadCloser) ([]SearchItem, error) {
|
||||||
|
// Parse
|
||||||
|
defer body.Close()
|
||||||
|
doc, err := goquery.NewDocumentFromReader(body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Normalize Results
|
||||||
|
var allEntries []SearchItem
|
||||||
|
doc.Find("#tablelibgen tbody > tr").Each(func(ix int, rawBook *goquery.Selection) {
|
||||||
|
// Parse MD5
|
||||||
|
linksRaw := rawBook.Find("td:nth-child(9) a")
|
||||||
|
linksHref, _ := linksRaw.Attr("href")
|
||||||
|
hrefArray := strings.Split(linksHref, "?md5=")
|
||||||
|
if len(hrefArray) == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
id := hrefArray[1]
|
||||||
|
|
||||||
|
allEntries = append(allEntries, SearchItem{
|
||||||
|
ID: id,
|
||||||
|
Title: rawBook.Find("td:nth-child(1) > a").First().Text(),
|
||||||
|
Author: rawBook.Find("td:nth-child(2)").Text(),
|
||||||
|
Series: rawBook.Find("td:nth-child(1) > b").Text(),
|
||||||
|
Language: rawBook.Find("td:nth-child(5)").Text(),
|
||||||
|
FileType: strings.ToLower(strings.TrimSpace(rawBook.Find("td:nth-child(8)").Text())),
|
||||||
|
FileSize: strings.ToLower(strings.TrimSpace(rawBook.Find("td:nth-child(7)").Text())),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
// Return Results
|
||||||
|
return allEntries, nil
|
||||||
|
}
|
||||||
22
search/progress.go
Normal file
22
search/progress.go
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
package search
|
||||||
|
|
||||||
|
type writeCounter struct {
|
||||||
|
Total int64
|
||||||
|
Current int64
|
||||||
|
ProgressFunction func(float32)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (wc *writeCounter) Write(p []byte) (int, error) {
|
||||||
|
n := len(p)
|
||||||
|
wc.Current += int64(n)
|
||||||
|
wc.flushProgress()
|
||||||
|
return n, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (wc *writeCounter) flushProgress() {
|
||||||
|
if wc.ProgressFunction == nil || wc.Total < 100000 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
percentage := float32(wc.Current) * 100 / float32(wc.Total)
|
||||||
|
wc.ProgressFunction(percentage)
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user