Compare commits

128 Commits

Author SHA1 Message Date
f9f23f2d3f fix: word count calculation
Some checks failed
continuous-integration/drone/push Build is failing
2025-11-12 19:13:04 -05:00
3cff965393 fix: annas archive parsing
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-17 17:04:46 -04:00
7937890acd fix: docker build
All checks were successful
continuous-integration/drone/push Build is passing
2025-08-10 13:18:37 -04:00
938dd69e5e chore(db): use context & add db helper 2025-08-10 13:17:51 -04:00
7c92c346fa feat(utils): add pkg utils 2025-08-10 13:17:44 -04:00
456b6e457c chore: update go & flake
Some checks failed
continuous-integration/drone/push Build is failing
2025-08-07 17:42:41 -04:00
d304421798 hm
All checks were successful
continuous-integration/drone/push Build is passing
2025-07-05 18:17:47 -04:00
0fe52bc541 fix: search parsing
Some checks failed
continuous-integration/drone/push Build is failing
2025-07-05 16:46:06 -04:00
49f3d53170 chore: nix flake
Some checks failed
continuous-integration/drone/push Build is failing
2025-07-05 15:21:44 -04:00
57f81e5dd7 fix(api): ko json content type
All checks were successful
continuous-integration/drone/push Build is passing
2025-05-13 12:37:45 -04:00
162adfbe16 feat: basic toc
All checks were successful
continuous-integration/drone/push Build is passing
2025-04-26 10:19:00 -04:00
e2cfdb3a0c update cicd
All checks were successful
continuous-integration/drone/push Build is passing
2025-03-14 08:36:01 -04:00
acf4119d9a fix(sql): document user stats
Some checks failed
continuous-integration/drone/push Build is passing
continuous-integration/drone Build was killed
2025-01-25 15:03:07 -05:00
f6dd8cee50 fix(streaks): incorrect calculation logic
All checks were successful
continuous-integration/drone/push Build is passing
2024-12-02 19:27:50 -05:00
a981d98ba5 feat(admin): basic log filter
All checks were successful
continuous-integration/drone/push Build is passing
2024-12-01 19:48:51 -05:00
a193f97d29 perf(db): incremental user streaks cache
All checks were successful
continuous-integration/drone/push Build is passing
2024-12-01 18:58:46 -05:00
841b29c425 improve(search): progress & retries
All checks were successful
continuous-integration/drone/push Build is passing
2024-12-01 17:04:41 -05:00
3d61d0f5ef perf(db): incremental document stats cache
All checks were successful
continuous-integration/drone/push Build is passing
2024-12-01 12:48:25 -05:00
5e388730a5 formatting: lua plugin 2024-12-01 11:28:33 -05:00
0a1dfeab65 fix(search): set user agent for dl
All checks were successful
continuous-integration/drone/push Build is passing
2024-08-13 22:32:16 -04:00
d4c8e4d2da fix(search): broken parser & download source
All checks were successful
continuous-integration/drone/push Build is passing
2024-08-11 11:02:46 -04:00
bbd3a00102 tests(db): additional document tests 2024-08-10 09:26:30 -04:00
3a633235ea tests(db): add additional tests & comments
All checks were successful
continuous-integration/drone/push Build is passing
2024-06-16 20:00:41 -04:00
9809a09d2e chore(prettier): format templates
All checks were successful
continuous-integration/drone/push Build is passing
2024-06-16 18:04:43 -04:00
f37bff365f chore(templates): prettier plugin & tables 2024-06-16 17:08:10 -04:00
77527bfb05 chore(templates): add better template loading
All checks were successful
continuous-integration/drone/push Build is passing
2024-05-27 20:20:47 -04:00
8de6fed5df fix(ui): document add styling 2024-05-27 14:01:10 -04:00
f9277d3b32 feat(admin): handle user deletion
All checks were successful
continuous-integration/drone/push Build is passing
2024-05-27 13:32:40 -04:00
db9629a618 chore(lint): address linter
All checks were successful
continuous-integration/drone/push Build is passing
2024-05-26 19:56:59 -04:00
546600db93 feat(admin): handle user demotion & promotion
All checks were successful
continuous-integration/drone/push Build is passing
2024-05-25 21:12:07 -04:00
7c6acad689 chore(templates): component-ize things
All checks were successful
continuous-integration/drone/push Build is passing
2024-05-25 20:04:26 -04:00
5482899075 feat(admin): adding user & importing 2024-05-25 20:02:57 -04:00
5a64ff7029 fix(tz): incorrect local_time function use
All checks were successful
continuous-integration/drone/push Build is passing
2024-04-06 20:56:30 -04:00
a7ecb1a6f8 fix(tz): add tzdata to docker image
All checks were successful
continuous-integration/drone/push Build is passing
2024-04-06 09:39:04 -04:00
2d206826d6 add(admin): add user
All checks were successful
continuous-integration/drone/push Build is passing
2024-03-11 22:20:41 -07:00
f1414e3e4e fix(timezones): move from utc offsets to timezones
This fixed various issues related to calculating streaks, etc. Now we
appropriately handle time as it was, vs as it is relative to an offset.
2024-03-11 22:20:21 -07:00
8e81acd381 fix(users): update user stomped on admin
All checks were successful
continuous-integration/drone/push Build is passing
2024-03-10 21:48:43 -04:00
6c6a6dd329 feat(api): first user is admin
All checks were successful
continuous-integration/drone/push Build is passing
2024-02-25 19:40:36 -05:00
c4602c8c3b chore(db): update sqlc 2024-02-25 19:01:34 -05:00
fe81b57a34 tests(db): migrate to testify
All checks were successful
continuous-integration/drone/push Build is passing
2024-02-25 15:13:53 -05:00
a69b7452ce chore(dev): dynamically load templates during dev
All checks were successful
continuous-integration/drone/push Build is passing
2024-02-25 14:54:50 -05:00
75ed394f8d tests(all): improve tests, refactor(api): saving books
All checks were successful
continuous-integration/drone/push Build is passing
2024-02-24 20:45:26 -05:00
803c187a00 fix(logs): ios pretty logs & overflow scroll 2024-02-24 17:07:12 -05:00
da1baeb4cd feat(reader): upgrade epubjs & add restrictive iframe CSP 2024-02-19 16:45:35 -05:00
5865fe3c13 feat(db): button up migrations
All checks were successful
continuous-integration/drone/push Build is passing
2024-02-01 20:05:35 -05:00
4a5464853b fix(graph): fix stretchy text on graph
All checks were successful
continuous-integration/drone/push Build is passing
2024-02-01 19:07:24 -05:00
622dcd5702 fix(settings): auth hash accidentally overridden
All checks were successful
continuous-integration/drone/push Build is passing
2024-01-28 22:23:37 -05:00
a86e2520ef feat(logs): jq filtering, feat(import): directory picker, refactor(admin): move routes to seperate file
All checks were successful
continuous-integration/drone/push Build is passing
2024-01-28 22:11:36 -05:00
b1cfd16627 feat(restore): rotate auth hash on restore
All checks were successful
continuous-integration/drone/push Build is passing
2024-01-28 11:38:44 -05:00
015ca30ac5 feat(auth): add auth hash (allows purging sessions & more)
All checks were successful
continuous-integration/drone/push Build is passing
2024-01-28 11:21:06 -05:00
9792a6ff19 refactor(managers): privatize manager struct fields
All checks were successful
continuous-integration/drone/push Build is passing
2024-01-27 14:56:01 -05:00
8c4c1022c3 refactor(errors): handle api / app errors better
All checks were successful
continuous-integration/drone/push Build is passing
2024-01-26 22:07:30 -05:00
fd8b6bcdc1 feat(logging): improve logging & migrate to json logger
All checks were successful
continuous-integration/drone/push Build is passing
2024-01-26 20:45:07 -05:00
0bbd5986cb add: db migrations & update
All checks were successful
continuous-integration/drone/push Build is passing
2024-01-25 19:22:57 -05:00
45cef2f4af chore(formatting): djlint templates
All checks were successful
continuous-integration/drone/push Build is passing
2024-01-24 21:40:14 -05:00
e33a64db96 fix: potential null query
All checks were successful
continuous-integration/drone/push Build is passing
2024-01-24 18:43:33 -05:00
35ca021649 add: more statistics
All checks were successful
continuous-integration/drone/push Build is passing
2024-01-23 23:00:51 -05:00
760b9ca0a0 fix: downloads, fix: logging space
All checks were successful
continuous-integration/drone/push Build is passing
2024-01-22 18:03:01 -05:00
c9edcd8f5a [add] progress performance debugging
All checks were successful
continuous-integration/drone/push Build is passing
2024-01-21 12:50:25 -05:00
2d63a7d109 [perf] dont immediately update view cache
All checks were successful
continuous-integration/drone/push Build is passing
2024-01-21 11:29:26 -05:00
9bd6bf7727 [fix] docker cicd build
All checks were successful
continuous-integration/drone/push Build is passing
2024-01-20 15:32:04 -05:00
f0a2d2cf69 [add] better log page, [add] admin users page, [add] admin nav
All checks were successful
continuous-integration/drone/push Build is passing
2024-01-20 15:03:32 -05:00
a65750ae21 [chore] rename package, [chore] rename vars
All checks were successful
continuous-integration/drone/push Build is passing
2024-01-10 20:23:36 -05:00
14b930781e [add] username in http access logs
All checks were successful
continuous-integration/drone/push Build is passing
2024-01-09 21:36:36 -05:00
8a8f12c07a [fix] export directories
All checks were successful
continuous-integration/drone/push Build is passing
2024-01-09 21:18:12 -05:00
c5b181dda4 [add] admin panel, [add] better logging
All checks were successful
continuous-integration/drone/push Build is passing
2024-01-09 21:08:40 -05:00
d3d89b36f6 [refactor] app routes, [add] progress table
All checks were successful
continuous-integration/drone/push Build is passing
2023-12-31 23:13:39 -05:00
a69f20d5a9 [fix] daily stats bug
All checks were successful
continuous-integration/drone/push Build is passing
2023-12-30 10:30:12 -05:00
c66a6c8499 [add] parse local isbn metadata
All checks were successful
continuous-integration/drone/push Build is passing
2023-12-30 10:18:43 -05:00
3057b86002 [add] progress streaming
All checks were successful
continuous-integration/drone/push Build is passing
2023-12-01 07:35:51 -05:00
2c240f2f5c [add] cache fonts
Some checks failed
continuous-integration/drone/push Build is failing
2023-11-29 06:15:44 -05:00
39fd7ab1f1 [fix] login error 2023-11-28 23:11:12 -05:00
e9f2e3a5a0 [fix] assets regression
All checks were successful
continuous-integration/drone/push Build is passing
2023-11-28 22:26:29 -05:00
a34906c266 [chore] embed filesystem
All checks were successful
continuous-integration/drone/push Build is passing
2023-11-28 22:01:49 -05:00
756db7a493 [refactor] template handling
All checks were successful
continuous-integration/drone/push Build is passing
2023-11-28 20:05:50 -05:00
bb837dd30e [fix] service worker route regex bug, [add] device selector / creator
All checks were successful
continuous-integration/drone/push Build is passing
2023-11-26 21:41:17 -05:00
e823a794cf [fix] SyncNinja status message 2023-11-26 15:51:47 -05:00
3c6f3ae237 [add] favicon
All checks were successful
continuous-integration/drone/push Build is passing
2023-11-25 19:21:18 -05:00
ca1cce1ff1 [add] opds search, [fix] opds urls, [add] log level env var
All checks were successful
continuous-integration/drone/push Build is passing
2023-11-25 18:38:18 -05:00
c3410b7833 [fix] version
All checks were successful
continuous-integration/drone/tag Build is passing
continuous-integration/drone/push Build is passing
2023-11-18 10:14:57 -05:00
1403bae036 [add] pagination
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2023-11-17 23:10:59 -05:00
af41946a65 [add] git link
All checks were successful
continuous-integration/drone/push Build is passing
2023-11-17 21:40:59 -05:00
243ae2a001 [add] document search
All checks were successful
continuous-integration/drone/push Build is passing
2023-11-17 21:24:19 -05:00
d94e79f39c [fix] syncninja koreader nil error
All checks were successful
continuous-integration/drone/push Build is passing
2023-11-09 22:53:41 -05:00
856bc7e2e6 [fix] xpath & cfi resolution
All checks were successful
continuous-integration/drone/push Build is passing
2023-11-07 19:19:06 -05:00
5cc1e2d71c [fix] wonky xpath issues
All checks were successful
continuous-integration/drone/push Build is passing
2023-11-06 07:12:24 -05:00
ffc5462326 [fix] opds no redirect - KOReader OPDS compatibility
All checks were successful
continuous-integration/drone/push Build is passing
2023-11-05 21:38:10 -05:00
3cbe4b1c0d [fix] null pointer deref
All checks were successful
continuous-integration/drone/push Build is passing
2023-11-05 21:20:15 -05:00
c213b3b09f [fix] wakelock detection
All checks were successful
continuous-integration/drone/push Build is passing
2023-11-05 19:27:43 -05:00
7d45bb0253 [add] logo & banner, [fix] mobile alignment issue
All checks were successful
continuous-integration/drone/push Build is passing
2023-11-05 13:20:19 -05:00
a8bcd0f588 [add] rename to AnthoLume
All checks were successful
continuous-integration/drone/push Build is passing
2023-11-04 19:55:38 -04:00
bc3e9cbaf0 [add] update assets & slight rearrangements
All checks were successful
continuous-integration/drone/push Build is passing
2023-11-04 13:12:05 -04:00
e6ad51ed70 [add] cleanup and minify
All checks were successful
continuous-integration/drone/push Build is passing
2023-11-04 12:27:35 -04:00
cce0ef2de1 [fix] book stat dom
All checks were successful
continuous-integration/drone/push Build is passing
2023-11-04 00:04:31 -04:00
71898c39e7 [improve] web reader activity & progress tracking
All checks were successful
continuous-integration/drone/push Build is passing
2023-11-03 23:43:08 -04:00
985b6e0851 [fix] percentage regression, [add] individual doc & user update (performance) 2023-11-03 21:37:26 -04:00
425f469097 Merge pull request 'Migrate Pages -> Percentages' (#2) from remove_pages into master
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: evan/BookManager#2
2023-11-03 23:50:40 +00:00
761163d666 [add] migrate to percentages vs pages
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/pr Build is passing
[add] task runner
[fix] calculate word count on upload
[remove] unused queries
2023-11-03 19:38:35 -04:00
67dedaa886 [add] demo readme link
All checks were successful
continuous-integration/drone/push Build is passing
2023-10-31 17:45:33 -04:00
5f1de4ec67 [fix] 0 page issue, [add] wpm logging
All checks were successful
continuous-integration/drone/push Build is passing
2023-10-31 17:30:42 -04:00
d27b9061bb [add] demo mode 2023-10-31 06:28:22 -04:00
0f271ac2fb [fix] copy error
All checks were successful
continuous-integration/drone/push Build is passing
2023-10-30 23:35:37 -04:00
20560ed246 [fix] negative current word values
All checks were successful
continuous-integration/drone/push Build is passing
2023-10-30 19:36:04 -04:00
aacf5a7195 [fix] login PWA styling, [add] login local link, [add] home local link
All checks were successful
continuous-integration/drone/push Build is passing
2023-10-30 19:23:38 -04:00
5880d3beb6 [fix] handle sw unsupported, [fix] sw install / upgrade, [add] local file upload / delete
All checks were successful
continuous-integration/drone/push Build is passing
2023-10-30 18:25:43 -04:00
0917172d1c Merge pull request 'Service Worker & Offline Reader' (#1) from add_service_worker into master
All checks were successful
continuous-integration/drone/push Build is passing
Reviewed-on: evan/BookManager#1
2023-10-30 01:27:18 +00:00
f74c81dc9b [add] service worker & offline reader
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/pr Build is passing
2023-10-29 21:17:47 -04:00
1b8b5060f1 [fix] server word count, [add] client word count
All checks were successful
continuous-integration/drone/push Build is passing
2023-10-26 21:50:05 -04:00
b9b9ad2098 [add] better search error handling 2023-10-26 06:20:56 -04:00
37b6ac10ac [fix] CFI from XPath issue
All checks were successful
continuous-integration/drone/push Build is passing
2023-10-25 22:44:18 -04:00
39c78997ac [fix] regression css style
All checks were successful
continuous-integration/drone/push Build is passing
2023-10-25 20:08:09 -04:00
cdec621043 [add] better error handling, [add] font selector, [add] tailwind generation
All checks were successful
continuous-integration/drone/push Build is passing
2023-10-25 19:52:01 -04:00
3577dd89a0 [add] configurable cookie attribute settings
All checks were successful
continuous-integration/drone/push Build is passing
2023-10-24 22:14:19 -04:00
912b054502 [fix] reader page
All checks were successful
continuous-integration/drone/push Build is passing
2023-10-24 18:30:01 -04:00
1f59ef7813 [add] docker publish
All checks were successful
continuous-integration/drone/push Build is passing
2023-10-24 08:25:27 -04:00
b8aef52913 [add] drone config
All checks were successful
continuous-integration/drone/push Build is passing
2023-10-23 21:00:44 -04:00
b5d5e4bd64 [add] tests, [add] refactor epub feat 2023-10-23 20:18:16 -04:00
bf6ac96376 [fix] non-default font size causes invalid page on resume 2023-10-19 19:03:15 -04:00
d8ee1f0747 [add] highlight position node on start & on resize (easier to find your place) 2023-10-15 23:23:58 -04:00
5d9c0804bd [add] better xpath generation, [add] wake lock, [add] device sorting,
[fix] better theme management
2023-10-13 21:29:22 -04:00
4da3f19c1a [fix] various ios pwa styling issues 2023-10-12 21:20:51 -04:00
a972b5a8aa [add] progress & activity sync with web reader 2023-10-12 19:14:29 -04:00
8ecd6ad57d [add] basic epub reader, [fix] empty device synced bug 2023-10-10 19:43:07 -04:00
edca763396 [add] search 2023-10-06 21:25:56 -04:00
70c7f4b991 [fix] store times as rfc3339 format 2023-10-05 21:04:57 -04:00
eb7d711022 [add] opds catalog, [add] migrate to non-cgo sqlite driver 2023-10-05 19:56:19 -04:00
4e1ee0022a [add] heavy query caching, [add] wpm leaderboard 2023-10-04 20:21:23 -04:00
5cd4e165b0 [new] refactor & rename, [fix] rescaled activity view performance 2023-10-03 07:37:14 -04:00
170 changed files with 14664 additions and 5266 deletions

14
.djlintrc Normal file
View File

@@ -0,0 +1,14 @@
{
"profile": "golang",
"indent": 2,
"close_void_tags": true,
"format_attribute_template_tags": true,
"format_js": true,
"js": {
"indent_size": 2
},
"format_css": true,
"css": {
"indent_size": 2
}
}

35
.drone.yml Normal file
View File

@@ -0,0 +1,35 @@
kind: pipeline
type: docker
name: default
trigger:
branch:
- master
steps:
# Unit Tests
- name: tests
image: golang
commands:
- make tests
# Fetch tags
- name: fetch tags
image: alpine/git
commands:
- git fetch --tags
# Publish docker image
- name: publish docker
image: plugins/docker
settings:
repo: gitea.va.reichard.io/evan/antholume
registry: gitea.va.reichard.io
tags:
- dev
custom_dns:
- 8.8.8.8
username:
from_secret: docker_username
password:
from_secret: docker_password

2
.envrc
View File

@@ -1 +1 @@
use nix use flake

3
.gitignore vendored
View File

@@ -1,4 +1,7 @@
TODO.md
.DS_Store .DS_Store
data/ data/
build/ build/
.direnv/ .direnv/
cover.html
node_modules

3
.prettierrc Normal file
View File

@@ -0,0 +1,3 @@
{
"plugins": ["prettier-plugin-go-template"]
}

View File

@@ -1,36 +1,27 @@
# Certificate Store # Certificates & Timezones
FROM alpine as certs FROM alpine AS alpine
RUN apk update && apk add ca-certificates RUN apk update && apk add --no-cache ca-certificates tzdata
# Build Image # Build Image
FROM --platform=$BUILDPLATFORM golang:1.20 AS build FROM golang:1.24 AS build
# Install Dependencies
RUN apt-get update -y
RUN apt install -y gcc-x86-64-linux-gnu
# Create Package Directory # Create Package Directory
WORKDIR /src RUN mkdir -p /opt/antholume
RUN mkdir -p /opt/bookmanager
# Cache Dependencies & Compile # Copy Source
ARG TARGETOS WORKDIR /src
ARG TARGETARCH COPY . .
RUN --mount=target=. \
--mount=type=cache,target=/root/.cache/go-build \ # Compile
--mount=type=cache,target=/go/pkg \ RUN go build \
if [ "$TARGETARCH" = "amd64" ]; then \ -ldflags "-X reichard.io/antholume/config.version=`git describe --tags`" \
GOOS=$TARGETOS GOARCH=$TARGETARCH CGO_ENABLED=1 CGO_CFLAGS="-D_LARGEFILE64_SOURCE" CC=x86_64-linux-gnu-gcc go build -o /opt/bookmanager/server; \ -o /opt/antholume/server
else \
GOOS=$TARGETOS GOARCH=$TARGETARCH CGO_ENABLED=1 CGO_CFLAGS="-D_LARGEFILE64_SOURCE" go build -o /opt/bookmanager/server; \
fi; \
cp -a ./templates /opt/bookmanager/templates; \
cp -a ./assets /opt/bookmanager/assets;
# Create Image # Create Image
FROM busybox:1.36 FROM busybox:1.36
COPY --from=certs /etc/ssl/certs /etc/ssl/certs COPY --from=alpine /etc/ssl/certs /etc/ssl/certs
COPY --from=build /opt/bookmanager /opt/bookmanager COPY --from=alpine /usr/share/zoneinfo /usr/share/zoneinfo
WORKDIR /opt/bookmanager COPY --from=build /opt/antholume /opt/antholume
WORKDIR /opt/antholume
EXPOSE 8585 EXPOSE 8585
ENTRYPOINT ["/opt/bookmanager/server", "serve"] ENTRYPOINT ["/opt/antholume/server", "serve"]

29
Dockerfile-BuildKit Normal file
View File

@@ -0,0 +1,29 @@
# Certificates & Timezones
FROM alpine AS alpine
RUN apk update && apk add --no-cache ca-certificates tzdata
# Build Image
FROM --platform=$BUILDPLATFORM golang:1.21 AS build
# Create Package Directory
WORKDIR /src
RUN mkdir -p /opt/antholume
# Cache Dependencies & Compile
ARG TARGETOS
ARG TARGETARCH
RUN --mount=target=. \
--mount=type=cache,target=/root/.cache/go-build \
--mount=type=cache,target=/go/pkg \
GOOS=$TARGETOS GOARCH=$TARGETARCH go build \
-ldflags "-X reichard.io/antholume/config.version=`git describe --tags`" \
-o /opt/antholume/server
# Create Image
FROM busybox:1.36
COPY --from=alpine /etc/ssl/certs /etc/ssl/certs
COPY --from=alpine /usr/share/zoneinfo /usr/share/zoneinfo
COPY --from=build /opt/antholume /opt/antholume
WORKDIR /opt/antholume
EXPOSE 8585
ENTRYPOINT ["/opt/antholume/server", "serve"]

View File

@@ -1,25 +1,48 @@
build_local: build_local: build_tailwind
go mod download go mod download
rm -r ./build rm -r ./build || true
mkdir -p ./build mkdir -p ./build
cp -a ./templates ./build/templates
cp -a ./assets ./build/assets
env GOOS=linux GOARCH=amd64 CGO_ENABLED=1 CC="zig cc -target x86_64-linux" CXX="zig c++ -target x86_64-linux" go build -o ./build/server_linux_x86_64 env GOOS=linux GOARCH=amd64 go build -ldflags "-X reichard.io/antholume/config.version=`git describe --tags`" -o ./build/server_linux_amd64
env GOOS=darwin GOARCH=arm64 CGO_ENABLED=1 CGO_CFLAGS="-D_LARGEFILE64_SOURCE" go build -o ./build/server_darwin_arm64 env GOOS=linux GOARCH=arm64 go build -ldflags "-X reichard.io/antholume/config.version=`git describe --tags`" -o ./build/server_linux_arm64
env GOOS=darwin GOARCH=arm64 go build -ldflags "-X reichard.io/antholume/config.version=`git describe --tags`" -o ./build/server_darwin_arm64
env GOOS=darwin GOARCH=amd64 go build -ldflags "-X reichard.io/antholume/config.version=`git describe --tags`" -o ./build/server_darwin_amd64
docker_build_local: docker_build_local: build_tailwind
docker build -t bookmanager:latest . docker build -t antholume:latest .
docker_build_release_dev: docker_build_release_dev: build_tailwind
docker buildx build \ docker buildx build \
--platform linux/amd64,linux/arm64 \ --platform linux/amd64,linux/arm64 \
-t gitea.va.reichard.io/evan/bookmanager:dev \ -t gitea.va.reichard.io/evan/antholume:dev \
-f Dockerfile-BuildKit \
--push . --push .
docker_build_release_latest: docker_build_release_latest: build_tailwind
docker buildx build \ docker buildx build \
--platform linux/amd64,linux/arm64 \ --platform linux/amd64,linux/arm64 \
-t gitea.va.reichard.io/evan/bookmanager:latest \ -t gitea.va.reichard.io/evan/antholume:latest \
-t gitea.va.reichard.io/evan/bookmanager:`git describe --tags` \ -t gitea.va.reichard.io/evan/antholume:`git describe --tags` \
-f Dockerfile-BuildKit \
--push . --push .
build_tailwind:
tailwindcss build -o ./assets/style.css --minify
dev: build_tailwind
GIN_MODE=release \
CONFIG_PATH=./data \
DATA_PATH=./data \
SEARCH_ENABLED=true \
REGISTRATION_ENABLED=true \
COOKIE_SECURE=false \
COOKIE_AUTH_KEY=1234 \
LOG_LEVEL=debug go run main.go serve
clean:
rm -rf ./build
tests:
SET_TEST=set_val go test -coverpkg=./... ./... -coverprofile=./cover.out
go tool cover -html=./cover.out -o ./cover.html
rm ./cover.out

146
README.md
View File

@@ -1,124 +1,166 @@
# Book Manager <p><img align="center" src="https://gitea.va.reichard.io/evan/AnthoLume/raw/branch/master/banner.png"></p>
<p align="center"> <p align="center">
<a href="https://gitea.va.reichard.io/evan/BookManager/raw/branch/master/screenshots/pwa/login.png"> <a href="https://gitea.va.reichard.io/evan/AnthoLume/raw/branch/master/screenshots/pwa/login.png">
<img src="https://gitea.va.reichard.io/evan/BookManager/raw/branch/master/screenshots/pwa/login.png" width="19%"> <img src="https://gitea.va.reichard.io/evan/AnthoLume/raw/branch/master/screenshots/pwa/login.png" width="19%">
</a> </a>
<a href="https://gitea.va.reichard.io/evan/BookManager/raw/branch/master/screenshots/pwa/home.png"> <a href="https://gitea.va.reichard.io/evan/AnthoLume/raw/branch/master/screenshots/pwa/home.png">
<img src="https://gitea.va.reichard.io/evan/BookManager/raw/branch/master/screenshots/pwa/home.png" width="19%"> <img src="https://gitea.va.reichard.io/evan/AnthoLume/raw/branch/master/screenshots/pwa/home.png" width="19%">
</a> </a>
<a href="https://gitea.va.reichard.io/evan/BookManager/raw/branch/master/screenshots/pwa/documents.png"> <a href="https://gitea.va.reichard.io/evan/AnthoLume/raw/branch/master/screenshots/pwa/documents.png">
<img src="https://gitea.va.reichard.io/evan/BookManager/raw/branch/master/screenshots/pwa/documents.png" width="19%"> <img src="https://gitea.va.reichard.io/evan/AnthoLume/raw/branch/master/screenshots/pwa/documents.png" width="19%">
</a> </a>
<a href="https://gitea.va.reichard.io/evan/BookManager/raw/branch/master/screenshots/pwa/document.png"> <a href="https://gitea.va.reichard.io/evan/AnthoLume/raw/branch/master/screenshots/pwa/document.png">
<img src="https://gitea.va.reichard.io/evan/BookManager/raw/branch/master/screenshots/pwa/document.png" width="19%"> <img src="https://gitea.va.reichard.io/evan/AnthoLume/raw/branch/master/screenshots/pwa/document.png" width="19%">
</a> </a>
<a href="https://gitea.va.reichard.io/evan/BookManager/raw/branch/master/screenshots/pwa/metadata.png"> <a href="https://gitea.va.reichard.io/evan/AnthoLume/raw/branch/master/screenshots/pwa/metadata.png">
<img src="https://gitea.va.reichard.io/evan/BookManager/raw/branch/master/screenshots/pwa/metadata.png" width="19%"> <img src="https://gitea.va.reichard.io/evan/AnthoLume/raw/branch/master/screenshots/pwa/metadata.png" width="19%">
</a> </a>
</p> </p>
<p align="center"> <p align="center">
<a href="https://gitea.va.reichard.io/evan/BookManager/src/branch/master/screenshots/web/README.md"> <strong><a href="https://gitea.va.reichard.io/evan/AnthoLume/src/branch/master/screenshots">Screenshots</a></strong> •
--- WEB --- <strong><a href="https://antholume-demo.cloud.reichard.io/">Demo Server</a></strong>
</a> </p>
<a href="https://gitea.va.reichard.io/evan/BookManager/src/branch/master/screenshots/pwa/README.md"> <p align="center"><strong>user:</strong> demo • <strong>pass:</strong> demo</p>
--- PWA ---
<p align="center">
<a href="https://drone.va.reichard.io/evan/AnthoLume" target="_blank">
<img src="https://drone.va.reichard.io/api/badges/evan/AnthoLume/status.svg">
</a> </a>
</p> </p>
--- ---
This is BookManager! Will probably be renamed at some point. This repository contains: AnthoLume is a Progressive Web App (PWA) that manages your EPUB documents, provides an EPUB reader, and tracks your reading activity! It also has a [KOReader KOSync](https://github.com/koreader/koreader-sync-server) compatible API, and a [KOReader](https://github.com/koreader/koreader) Plugin used to sync activity from your Kindle. Some additional features include:
- [KOReader KOSync](https://github.com/koreader/koreader-sync-server) Compatible API - OPDS API Endpoint
- KOReader Plugin (See `client` subfolder) - Local / Offline Reader (via ServiceWorker)
- WebApp - Metadata Scraping (Thanks [OpenLibrary](https://openlibrary.org/) & [Google Books API](https://developers.google.com/books/docs/v1/getting_started))
- Words / Minute (WPM) Tracking & Leaderboard (Amongst Server Users)
In additional to the compatible KOSync API's, we add: While some features require JavaScript (Service Worker & EPUB Reader), we make an effort to limit JavaScript usage. Outside of the two aforementioned features, no JavaScript is used.
- Additional APIs to automatically upload reading statistics ## Server
- Automatically upload documents to the server (can download in the "Documents" view)
- Book metadata scraping (Thanks [OpenLibrary](https://openlibrary.org/) & [Google Books API](https://developers.google.com/books/docs/v1/getting_started))
- No JavaScript! All information is generated server side.
# Server Docker Image: `docker pull gitea.va.reichard.io/evan/antholume:latest`
Docker Image: `docker pull gitea.va.reichard.io/evan/bookmanager:latest` ### Local / Offline Reader
## KOSync API The Local / Offline reader allows you to use any AnthoLume server as a standalone offline accessible reading app! Some features:
- Add local EPUB documents
- Read both local and any cached server documents
- Maintains progress for all types of documents (server / local)
- Uploads any progress or activity for cached server documents once the internet is accessible
### KOSync API
The KOSync compatible API endpoint is located at: `http(s)://<SERVER>/api/ko` The KOSync compatible API endpoint is located at: `http(s)://<SERVER>/api/ko`
## Quick Start ### OPDS API
The OPDS API endpoint is located at: `http(s)://<SERVER>/api/opds`
### Quick Start
**NOTE**: If you're accessing your instance over HTTP (not HTTPS), you must set `COOKIE_SECURE=false`, otherwise you will not be able to login.
```bash ```bash
# Make Data Directory # Make Data Directory
mkdir -p bookmanager_data mkdir -p antholume_data
# Run Server # Run Server
docker run \ docker run \
-p 8585:8585 \ -p 8585:8585 \
-e COOKIE_SECURE=false \
-e REGISTRATION_ENABLED=true \ -e REGISTRATION_ENABLED=true \
-v ./bookmanager_data:/config \ -v ./antholume_data:/config \
-v ./bookmanager_data:/data \ -v ./antholume_data:/data \
gitea.va.reichard.io/evan/bookmanager:latest gitea.va.reichard.io/evan/antholume:latest
``` ```
The service is now accessible at: `http://localhost:8585`. I recommend registering an account and then disabling registration unless you expect more users. The service is now accessible at: `http://localhost:8585`. I recommend registering an account and then disabling registration unless you expect more users.
## Configuration ### Configuration
| Environment Variable | Default Value | Description | | Environment Variable | Default Value | Description |
| -------------------- | ------------- | -------------------------------------------------------------------- | | -------------------- | ------------- | -------------------------------------------------------------------------- |
| DATABASE_TYPE | SQLite | Currently only "SQLite" is supported | | DATABASE_TYPE | SQLite | Currently only "SQLite" is supported |
| DATABASE_NAME | bbank | The database name, or in SQLite's case, the filename | | DATABASE_NAME | antholume | The database name, or in SQLite's case, the filename |
| DATABASE_PASSWORD | <EMPTY> | Currently not used. Placeholder for potential alternative DB support |
| CONFIG_PATH | /config | Directory where to store SQLite's DB | | CONFIG_PATH | /config | Directory where to store SQLite's DB |
| DATA_PATH | /data | Directory where to store the documents and cover metadata | | DATA_PATH | /data | Directory where to store the documents and cover metadata |
| LISTEN_PORT | 8585 | Port the server listens at | | LISTEN_PORT | 8585 | Port the server listens at |
| LOG_LEVEL | info | Set server log level |
| REGISTRATION_ENABLED | false | Whether to allow registration (applies to both WebApp & KOSync API) | | REGISTRATION_ENABLED | false | Whether to allow registration (applies to both WebApp & KOSync API) |
| COOKIE_SESSION_KEY | <EMPTY> | Optional secret cookie session key (auto generated if not provided) | | COOKIE_AUTH_KEY | <EMPTY> | Optional secret cookie authentication key (auto generated if not provided) |
| COOKIE_ENC_KEY | <EMPTY> | Optional secret cookie encryption key (16 or 32 bytes) |
| COOKIE_SECURE | true | Set Cookie `Secure` attribute (i.e. only works over HTTPS) |
| COOKIE_HTTP_ONLY | true | Set Cookie `HttpOnly` attribute (i.e. inacessible via JavaScript) |
# Client (KOReader Plugin) ## Security
See documentation in the `client` subfolder: [SyncNinja](https://gitea.va.reichard.io/evan/BookManager/src/branch/master/client/) ### Authentication
# Development - _Web App / PWA_ - Session based token (7 day expiry, refresh after 6 days)
- _KOSync & SyncNinja API_ - Header based - `X-Auth-User` & `X-Auth-Key` (KOSync compatibility)
- _OPDS API_ - Basic authentication (KOReader OPDS compatibility)
SQLC Generation (v1.21.0): ### Notes
- Credentials are the same amongst all endpoints
- The native KOSync plugin sends an MD5 hash of the password. Due to that:
- We store an Argon2 hash _and_ per-password salt of the MD5 hashed original password
## Client (KOReader Plugin)
See documentation in the `client` subfolder: [SyncNinja](https://gitea.va.reichard.io/evan/AnthoLume/src/branch/master/client/)
## Development
SQLC Generation (v1.26.0):
```bash ```bash
go install github.com/sqlc-dev/sqlc/cmd/sqlc@latest go install github.com/sqlc-dev/sqlc/cmd/sqlc@latest
~/go/bin/sqlc generate ~/go/bin/sqlc generate
``` ```
Goose Migrations:
```bash
go install github.com/pressly/goose/v3/cmd/goose@latest
```
Run Development: Run Development:
```bash ```bash
CONFIG_PATH=./data DATA_PATH=./data go run main.go serve CONFIG_PATH=./data DATA_PATH=./data REGISTRATION_ENABLED=true go run main.go serve
``` ```
# Building ## Building
The `Dockerfile` and `Makefile` contain the build information: The `Dockerfile` and `Makefile` contain the build information:
```bash ```bash
# Build Local (Linux & Darwin - arm64 & amd64)
make build_local
# Build Local Docker Image # Build Local Docker Image
make docker_build_local make docker_build_local
# Push Latest # Build Docker & Push Latest or Dev (Linux - arm64 & amd64)
make docker_build_release_latest make docker_build_release_latest
``` make docker_build_release_dev
If manually building, you must enable CGO: # Generate Tailwind CSS
make build_tailwind
```bash # Clean Local Build
# Download Dependencies make clean
go mod download
# Compile (Binary `./bookmanager`) # Tests (Unit & Integration - Google Books API)
CGO_ENABLED=1 CGO_CFLAGS="-D_LARGEFILE64_SOURCE" go build -o /bookmanager make tests_unit
make tests_integration
``` ```
## Notes ## Notes

BIN
_test_files/alice.epub Normal file

Binary file not shown.

View File

@@ -1,140 +1,369 @@
package api package api
import ( import (
"crypto/rand" "context"
"fmt"
"html/template" "html/template"
"io/fs"
"net/http" "net/http"
"path"
"path/filepath"
"strings"
"time"
"github.com/gin-contrib/multitemplate" "github.com/gin-contrib/multitemplate"
"github.com/gin-contrib/sessions" "github.com/gin-contrib/sessions"
"github.com/gin-contrib/sessions/cookie" "github.com/gin-contrib/sessions/cookie"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/microcosm-cc/bluemonday" "github.com/microcosm-cc/bluemonday"
"github.com/pkg/errors"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
"reichard.io/bbank/config" "reichard.io/antholume/config"
"reichard.io/bbank/database" "reichard.io/antholume/database"
"reichard.io/bbank/graph" "reichard.io/antholume/utils"
"reichard.io/bbank/utils"
) )
type API struct { type API struct {
Router *gin.Engine db *database.DBManager
Config *config.Config cfg *config.Config
DB *database.DBManager assets fs.FS
HTMLPolicy *bluemonday.Policy httpServer *http.Server
templates map[string]*template.Template
userAuthCache map[string]string
} }
func NewApi(db *database.DBManager, c *config.Config) *API { var htmlPolicy = bluemonday.StrictPolicy()
func NewApi(db *database.DBManager, c *config.Config, assets fs.FS) *API {
api := &API{ api := &API{
HTMLPolicy: bluemonday.StrictPolicy(), db: db,
Router: gin.Default(), cfg: c,
Config: c, assets: assets,
DB: db, templates: make(map[string]*template.Template),
userAuthCache: make(map[string]string),
} }
// Assets & Web App Templates // Create router
api.Router.Static("/assets", "./assets") router := gin.New()
// Generate Secure Token // Add server
api.httpServer = &http.Server{
Handler: router,
Addr: (":" + c.ListenPort),
}
// Add global logging middleware
router.Use(loggingMiddleware)
// Add global template loader middleware (develop)
if c.Version == "develop" {
log.Info("utilizing debug template loader")
router.Use(api.templateMiddleware(router))
}
// Assets & web app templates
assetsDir, _ := fs.Sub(assets, "assets")
router.StaticFS("/assets", http.FS(assetsDir))
// Generate auth token
var newToken []byte var newToken []byte
var err error var err error
if c.CookieAuthKey != "" {
if c.CookieSessionKey != "" { log.Info("utilizing environment cookie auth key")
log.Info("[NewApi] Utilizing Environment Cookie Session Key") newToken = []byte(c.CookieAuthKey)
newToken = []byte(c.CookieSessionKey)
} else { } else {
log.Info("[NewApi] Generating Cookie Session Key") log.Info("generating cookie auth key")
newToken, err = generateToken(64) newToken, err = utils.GenerateToken(64)
if err != nil { if err != nil {
panic("Unable to generate secure token") log.Panic("unable to generate cookie auth key")
} }
} }
// Configure Cookie Session Store // Set enc token
store := cookie.NewStore(newToken) store := cookie.NewStore(newToken)
if c.CookieEncKey != "" {
if len(c.CookieEncKey) == 16 || len(c.CookieEncKey) == 32 {
log.Info("utilizing environment cookie encryption key")
store = cookie.NewStore(newToken, []byte(c.CookieEncKey))
} else {
log.Panic("invalid cookie encryption key (must be 16 or 32 bytes)")
}
}
// Configure cookie session store
store.Options(sessions.Options{ store.Options(sessions.Options{
MaxAge: 60 * 60 * 24, MaxAge: 60 * 60 * 24 * 7,
Secure: true, Secure: c.CookieSecure,
HttpOnly: true, HttpOnly: c.CookieHTTPOnly,
SameSite: http.SameSiteStrictMode, SameSite: http.SameSiteStrictMode,
}) })
api.Router.Use(sessions.Sessions("token", store)) router.Use(sessions.Sessions("token", store))
// Register Web App Route // Register web app route
api.registerWebAppRoutes() api.registerWebAppRoutes(router)
// Register API Routes // Register API routes
apiGroup := api.Router.Group("/api") apiGroup := router.Group("/api")
api.registerKOAPIRoutes(apiGroup) api.registerKOAPIRoutes(apiGroup)
api.registerOPDSRoutes(apiGroup)
return api return api
} }
func (api *API) registerWebAppRoutes() { func (api *API) Start() error {
// Define Templates & Helper Functions return api.httpServer.ListenAndServe()
render := multitemplate.NewRenderer() }
helperFuncs := template.FuncMap{
"GetSVGGraphData": graph.GetSVGGraphData, func (api *API) Stop() error {
"GetUTCOffsets": utils.GetUTCOffsets, // Stop server
"NiceSeconds": utils.NiceSeconds, ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
err := api.httpServer.Shutdown(ctx)
if err != nil {
return err
} }
render.AddFromFilesFuncs("login", helperFuncs, "templates/login.html") // Close DB
render.AddFromFilesFuncs("home", helperFuncs, "templates/base.html", "templates/home.html") return api.db.DB.Close()
render.AddFromFilesFuncs("graphs", helperFuncs, "templates/base.html", "templates/graphs.html") }
render.AddFromFilesFuncs("settings", helperFuncs, "templates/base.html", "templates/settings.html")
render.AddFromFilesFuncs("activity", helperFuncs, "templates/base.html", "templates/activity.html")
render.AddFromFilesFuncs("documents", helperFuncs, "templates/base.html", "templates/documents.html")
render.AddFromFilesFuncs("document", helperFuncs, "templates/base.html", "templates/document.html")
api.Router.HTMLRender = render func (api *API) registerWebAppRoutes(router *gin.Engine) {
// Generate templates
router.HTMLRender = *api.generateTemplates()
api.Router.GET("/manifest.json", api.webManifest) // Static assets (required @ root)
api.Router.GET("/login", api.createAppResourcesRoute("login")) router.GET("/manifest.json", api.appWebManifest)
api.Router.GET("/register", api.createAppResourcesRoute("login", gin.H{"Register": true})) router.GET("/favicon.ico", api.appFaviconIcon)
api.Router.GET("/logout", api.authWebAppMiddleware, api.authLogout) router.GET("/sw.js", api.appServiceWorker)
api.Router.POST("/login", api.authFormLogin)
api.Router.POST("/register", api.authFormRegister)
api.Router.GET("/", api.authWebAppMiddleware, api.createAppResourcesRoute("home")) // Local / offline static pages (no template, no auth)
api.Router.GET("/settings", api.authWebAppMiddleware, api.createAppResourcesRoute("settings")) router.GET("/local", api.appLocalDocuments)
api.Router.POST("/settings", api.authWebAppMiddleware, api.editSettings)
api.Router.GET("/activity", api.authWebAppMiddleware, api.createAppResourcesRoute("activity"))
api.Router.GET("/documents", api.authWebAppMiddleware, api.createAppResourcesRoute("documents"))
api.Router.GET("/documents/:document", api.authWebAppMiddleware, api.createAppResourcesRoute("document"))
api.Router.GET("/documents/:document/file", api.authWebAppMiddleware, api.downloadDocumentFile)
api.Router.GET("/documents/:document/cover", api.authWebAppMiddleware, api.getDocumentCover)
api.Router.POST("/documents/:document/edit", api.authWebAppMiddleware, api.editDocument)
api.Router.POST("/documents/:document/identify", api.authWebAppMiddleware, api.identifyDocument)
api.Router.POST("/documents/:document/delete", api.authWebAppMiddleware, api.deleteDocument)
// TODO // Reader (reader page, document progress, devices)
api.Router.GET("/graphs", api.authWebAppMiddleware, baseResourceRoute("graphs")) router.GET("/reader", api.appDocumentReader)
router.GET("/reader/devices", api.authWebAppMiddleware, api.appGetDevices)
router.GET("/reader/progress/:document", api.authWebAppMiddleware, api.appGetDocumentProgress)
// Web app
router.GET("/", api.authWebAppMiddleware, api.appGetHome)
router.GET("/activity", api.authWebAppMiddleware, api.appGetActivity)
router.GET("/progress", api.authWebAppMiddleware, api.appGetProgress)
router.GET("/documents", api.authWebAppMiddleware, api.appGetDocuments)
router.GET("/documents/:document", api.authWebAppMiddleware, api.appGetDocument)
router.GET("/documents/:document/cover", api.authWebAppMiddleware, api.createGetCoverHandler(appErrorPage))
router.GET("/documents/:document/file", api.authWebAppMiddleware, api.createDownloadDocumentHandler(appErrorPage))
router.GET("/login", api.appGetLogin)
router.GET("/logout", api.authWebAppMiddleware, api.appAuthLogout)
router.GET("/register", api.appGetRegister)
router.GET("/settings", api.authWebAppMiddleware, api.appGetSettings)
router.GET("/admin/logs", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appGetAdminLogs)
router.GET("/admin/import", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appGetAdminImport)
router.POST("/admin/import", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appPerformAdminImport)
router.GET("/admin/users", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appGetAdminUsers)
router.POST("/admin/users", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appUpdateAdminUsers)
router.GET("/admin", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appGetAdmin)
router.POST("/admin", api.authWebAppMiddleware, api.authAdminWebAppMiddleware, api.appPerformAdminAction)
router.POST("/login", api.appAuthLogin)
router.POST("/register", api.appAuthRegister)
// Demo mode enabled configuration
if api.cfg.DemoMode {
router.POST("/documents", api.authWebAppMiddleware, api.appDemoModeError)
router.POST("/documents/:document/delete", api.authWebAppMiddleware, api.appDemoModeError)
router.POST("/documents/:document/edit", api.authWebAppMiddleware, api.appDemoModeError)
router.POST("/documents/:document/identify", api.authWebAppMiddleware, api.appDemoModeError)
router.POST("/settings", api.authWebAppMiddleware, api.appDemoModeError)
} else {
router.POST("/documents", api.authWebAppMiddleware, api.appUploadNewDocument)
router.POST("/documents/:document/delete", api.authWebAppMiddleware, api.appDeleteDocument)
router.POST("/documents/:document/edit", api.authWebAppMiddleware, api.appEditDocument)
router.POST("/documents/:document/identify", api.authWebAppMiddleware, api.appIdentifyDocument)
router.POST("/settings", api.authWebAppMiddleware, api.appEditSettings)
}
// Search enabled configuration
if api.cfg.SearchEnabled {
router.GET("/search", api.authWebAppMiddleware, api.appGetSearch)
router.POST("/search", api.authWebAppMiddleware, api.appSaveNewDocument)
}
} }
func (api *API) registerKOAPIRoutes(apiGroup *gin.RouterGroup) { func (api *API) registerKOAPIRoutes(apiGroup *gin.RouterGroup) {
koGroup := apiGroup.Group("/ko") koGroup := apiGroup.Group("/ko")
koGroup.POST("/users/create", api.createUser) // KO sync routes (webapp uses - progress & activity)
koGroup.GET("/users/auth", api.authAPIMiddleware, api.authorizeUser) koGroup.GET("/documents/:document/file", api.authKOMiddleware, api.createDownloadDocumentHandler(apiErrorPage))
koGroup.GET("/syncs/progress/:document", api.authKOMiddleware, api.koGetProgress)
koGroup.GET("/users/auth", api.authKOMiddleware, api.koAuthorizeUser)
koGroup.POST("/activity", api.authKOMiddleware, api.koAddActivities)
koGroup.POST("/syncs/activity", api.authKOMiddleware, api.koCheckActivitySync)
koGroup.POST("/users/create", api.koAuthRegister)
koGroup.PUT("/syncs/progress", api.authKOMiddleware, api.koSetProgress)
koGroup.PUT("/syncs/progress", api.authAPIMiddleware, api.setProgress) // Demo mode enabled configuration
koGroup.GET("/syncs/progress/:document", api.authAPIMiddleware, api.getProgress) if api.cfg.DemoMode {
koGroup.POST("/documents", api.authKOMiddleware, api.koDemoModeJSONError)
koGroup.POST("/documents", api.authAPIMiddleware, api.addDocuments) koGroup.POST("/syncs/documents", api.authKOMiddleware, api.koDemoModeJSONError)
koGroup.POST("/syncs/documents", api.authAPIMiddleware, api.checkDocumentsSync) koGroup.PUT("/documents/:document/file", api.authKOMiddleware, api.koDemoModeJSONError)
koGroup.PUT("/documents/:document/file", api.authAPIMiddleware, api.uploadDocumentFile) } else {
koGroup.GET("/documents/:document/file", api.authAPIMiddleware, api.downloadDocumentFile) koGroup.POST("/documents", api.authKOMiddleware, api.koAddDocuments)
koGroup.POST("/syncs/documents", api.authKOMiddleware, api.koCheckDocumentsSync)
koGroup.POST("/activity", api.authAPIMiddleware, api.addActivities) koGroup.PUT("/documents/:document/file", api.authKOMiddleware, api.koUploadExistingDocument)
koGroup.POST("/syncs/activity", api.authAPIMiddleware, api.checkActivitySync)
}
func generateToken(n int) ([]byte, error) {
b := make([]byte, n)
_, err := rand.Read(b)
if err != nil {
return nil, err
} }
return b, nil }
func (api *API) registerOPDSRoutes(apiGroup *gin.RouterGroup) {
opdsGroup := apiGroup.Group("/opds")
// OPDS routes
opdsGroup.GET("", api.authOPDSMiddleware, api.opdsEntry)
opdsGroup.GET("/", api.authOPDSMiddleware, api.opdsEntry)
opdsGroup.GET("/search.xml", api.authOPDSMiddleware, api.opdsSearchDescription)
opdsGroup.GET("/documents", api.authOPDSMiddleware, api.opdsDocuments)
opdsGroup.GET("/documents/:document/cover", api.authOPDSMiddleware, api.createGetCoverHandler(apiErrorPage))
opdsGroup.GET("/documents/:document/file", api.authOPDSMiddleware, api.createDownloadDocumentHandler(apiErrorPage))
}
func (api *API) generateTemplates() *multitemplate.Renderer {
// Define templates & helper functions
render := multitemplate.NewRenderer()
templates := make(map[string]*template.Template)
helperFuncs := template.FuncMap{
"dict": dict,
"slice": slice,
"fields": fields,
"getSVGGraphData": getSVGGraphData,
"getTimeZones": getTimeZones,
"hasPrefix": strings.HasPrefix,
"niceNumbers": niceNumbers,
"niceSeconds": niceSeconds,
}
// Load Base
b, err := fs.ReadFile(api.assets, "templates/base.tmpl")
if err != nil {
log.Errorf("error reading base template: %v", err)
return &render
}
// Parse Base
baseTemplate, err := template.New("base").Funcs(helperFuncs).Parse(string(b))
if err != nil {
log.Errorf("error parsing base template: %v", err)
return &render
}
// Load SVGs
err = api.loadTemplates("svg", baseTemplate, templates, false)
if err != nil {
log.Errorf("error loading svg templates: %v", err)
return &render
}
// Load Components
err = api.loadTemplates("component", baseTemplate, templates, false)
if err != nil {
log.Errorf("error loading component templates: %v", err)
return &render
}
// Load Pages
err = api.loadTemplates("page", baseTemplate, templates, true)
if err != nil {
log.Errorf("error loading page templates: %v", err)
return &render
}
// Populate Renderer
api.templates = templates
for templateName, templateValue := range templates {
render.Add(templateName, templateValue)
}
return &render
}
func (api *API) loadTemplates(
basePath string,
baseTemplate *template.Template,
allTemplates map[string]*template.Template,
cloneBase bool,
) error {
// Load Templates (Pluralize)
templateDirectory := fmt.Sprintf("templates/%ss", basePath)
allFiles, err := fs.ReadDir(api.assets, templateDirectory)
if err != nil {
return errors.Wrap(err, fmt.Sprintf("unable to read template dir: %s", templateDirectory))
}
// Generate Templates
for _, item := range allFiles {
templateFile := item.Name()
templatePath := path.Join(templateDirectory, templateFile)
templateName := fmt.Sprintf("%s/%s", basePath, strings.TrimSuffix(templateFile, filepath.Ext(templateFile)))
// Read Template
b, err := fs.ReadFile(api.assets, templatePath)
if err != nil {
return errors.Wrap(err, fmt.Sprintf("unable to read template: %s", templateName))
}
// Clone? (Pages - Don't Stomp)
if cloneBase {
baseTemplate = template.Must(baseTemplate.Clone())
}
// Parse Template
baseTemplate, err = baseTemplate.New(templateName).Parse(string(b))
if err != nil {
return errors.Wrap(err, fmt.Sprintf("unable to parse template: %s", templateName))
}
allTemplates[templateName] = baseTemplate
}
return nil
}
func (api *API) templateMiddleware(router *gin.Engine) gin.HandlerFunc {
return func(c *gin.Context) {
router.HTMLRender = *api.generateTemplates()
c.Next()
}
}
func loggingMiddleware(c *gin.Context) {
// Start timer
startTime := time.Now()
// Process request
c.Next()
// End timer
endTime := time.Now()
latency := endTime.Sub(startTime).Round(time.Microsecond)
// Log data
logData := log.Fields{
"type": "access",
"ip": c.ClientIP(),
"latency": latency.String(),
"status": c.Writer.Status(),
"method": c.Request.Method,
"path": c.Request.URL.Path,
}
// Get username
var auth authData
if data, _ := c.Get("Authorization"); data != nil {
auth = data.(authData)
}
// Log user
if auth.UserName != "" {
logData["user"] = auth.UserName
}
// Log result
log.WithFields(logData).Info(fmt.Sprintf("%s %s", c.Request.Method, c.Request.URL.Path))
} }

950
api/app-admin-routes.go Normal file
View File

@@ -0,0 +1,950 @@
package api
import (
"archive/zip"
"bufio"
"context"
"crypto/md5"
"encoding/json"
"fmt"
"io"
"io/fs"
"mime/multipart"
"net/http"
"os"
"path/filepath"
"slices"
"sort"
"strings"
"time"
argon2 "github.com/alexedwards/argon2id"
"github.com/gabriel-vasile/mimetype"
"github.com/gin-gonic/gin"
"github.com/itchyny/gojq"
"github.com/pkg/errors"
log "github.com/sirupsen/logrus"
"reichard.io/antholume/database"
"reichard.io/antholume/metadata"
"reichard.io/antholume/utils"
)
type adminAction string
const (
adminBackup adminAction = "BACKUP"
adminRestore adminAction = "RESTORE"
adminMetadataMatch adminAction = "METADATA_MATCH"
adminCacheTables adminAction = "CACHE_TABLES"
)
type requestAdminAction struct {
Action adminAction `form:"action"`
// Backup Action
BackupTypes []backupType `form:"backup_types"`
// Restore Action
RestoreFile *multipart.FileHeader `form:"restore_file"`
}
type importType string
const (
importDirect importType = "DIRECT"
importCopy importType = "COPY"
)
type requestAdminImport struct {
Directory string `form:"directory"`
Select string `form:"select"`
Type importType `form:"type"`
}
type operationType string
const (
opUpdate operationType = "UPDATE"
opCreate operationType = "CREATE"
opDelete operationType = "DELETE"
)
type requestAdminUpdateUser struct {
User string `form:"user"`
Password *string `form:"password"`
IsAdmin *bool `form:"is_admin"`
Operation operationType `form:"operation"`
}
type requestAdminLogs struct {
Filter string `form:"filter"`
}
type importStatus string
const (
importFailed importStatus = "FAILED"
importSuccess importStatus = "SUCCESS"
importExists importStatus = "EXISTS"
)
type importResult struct {
ID string
Name string
Path string
Status importStatus
Error error
}
func (api *API) appPerformAdminAction(c *gin.Context) {
templateVars, _ := api.getBaseTemplateVars("admin", c)
var rAdminAction requestAdminAction
if err := c.ShouldBind(&rAdminAction); err != nil {
log.Error("Invalid Form Bind: ", err)
appErrorPage(c, http.StatusBadRequest, "Invalid or missing form values")
return
}
switch rAdminAction.Action {
case adminMetadataMatch:
// TODO
// 1. Documents xref most recent metadata table?
// 2. Select all / deselect?
case adminCacheTables:
go func() {
err := api.db.CacheTempTables(c)
if err != nil {
log.Error("Unable to cache temp tables: ", err)
}
}()
case adminRestore:
api.processRestoreFile(rAdminAction, c)
return
case adminBackup:
// Vacuum
_, err := api.db.DB.ExecContext(c, "VACUUM;")
if err != nil {
log.Error("Unable to vacuum DB: ", err)
appErrorPage(c, http.StatusInternalServerError, "Unable to vacuum database")
return
}
// Set Headers
c.Header("Content-type", "application/octet-stream")
c.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"AnthoLumeBackup_%s.zip\"", time.Now().Format("20060102150405")))
// Stream Backup ZIP Archive
c.Stream(func(w io.Writer) bool {
var directories []string
for _, item := range rAdminAction.BackupTypes {
if item == backupCovers {
directories = append(directories, "covers")
} else if item == backupDocuments {
directories = append(directories, "documents")
}
}
err := api.createBackup(c, w, directories)
if err != nil {
log.Error("Backup Error: ", err)
}
return false
})
return
}
c.HTML(http.StatusOK, "page/admin", templateVars)
}
func (api *API) appGetAdmin(c *gin.Context) {
templateVars, _ := api.getBaseTemplateVars("admin", c)
c.HTML(http.StatusOK, "page/admin", templateVars)
}
func (api *API) appGetAdminLogs(c *gin.Context) {
templateVars, _ := api.getBaseTemplateVars("admin-logs", c)
var rAdminLogs requestAdminLogs
if err := c.ShouldBindQuery(&rAdminLogs); err != nil {
log.Error("Invalid URI Bind")
appErrorPage(c, http.StatusNotFound, "Invalid URI parameters")
return
}
rAdminLogs.Filter = strings.TrimSpace(rAdminLogs.Filter)
var jqFilter *gojq.Code
var basicFilter string
if strings.HasPrefix(rAdminLogs.Filter, "\"") && strings.HasSuffix(rAdminLogs.Filter, "\"") {
basicFilter = rAdminLogs.Filter[1 : len(rAdminLogs.Filter)-1]
} else if rAdminLogs.Filter != "" {
parsed, err := gojq.Parse(rAdminLogs.Filter)
if err != nil {
log.Error("Unable to parse JQ filter")
appErrorPage(c, http.StatusNotFound, "Unable to parse JQ filter")
return
}
jqFilter, err = gojq.Compile(parsed)
if err != nil {
log.Error("Unable to compile JQ filter")
appErrorPage(c, http.StatusNotFound, "Unable to compile JQ filter")
return
}
}
// Open Log File
logPath := filepath.Join(api.cfg.ConfigPath, "logs/antholume.log")
logFile, err := os.Open(logPath)
if err != nil {
appErrorPage(c, http.StatusBadRequest, "Missing AnthoLume log file")
return
}
defer logFile.Close()
// Log Lines
var logLines []string
scanner := bufio.NewScanner(logFile)
for scanner.Scan() {
rawLog := scanner.Text()
// Attempt JSON Pretty
var jsonMap map[string]any
err := json.Unmarshal([]byte(rawLog), &jsonMap)
if err != nil {
logLines = append(logLines, scanner.Text())
continue
}
// Parse JSON
rawData, err := json.MarshalIndent(jsonMap, "", " ")
if err != nil {
logLines = append(logLines, scanner.Text())
continue
}
// Basic Filter
if basicFilter != "" && strings.Contains(string(rawData), basicFilter) {
logLines = append(logLines, string(rawData))
continue
}
// No JQ Filter
if jqFilter == nil {
continue
}
// Error or nil
result, _ := jqFilter.Run(jsonMap).Next()
if _, ok := result.(error); ok {
logLines = append(logLines, string(rawData))
continue
} else if result == nil {
continue
}
// Attempt filtered json
filteredData, err := json.MarshalIndent(result, "", " ")
if err == nil {
rawData = filteredData
}
logLines = append(logLines, string(rawData))
}
templateVars["Data"] = logLines
templateVars["Filter"] = rAdminLogs.Filter
c.HTML(http.StatusOK, "page/admin-logs", templateVars)
}
func (api *API) appGetAdminUsers(c *gin.Context) {
templateVars, _ := api.getBaseTemplateVars("admin-users", c)
users, err := api.db.Queries.GetUsers(c)
if err != nil {
log.Error("GetUsers DB Error: ", err)
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("GetUsers DB Error: %v", err))
return
}
templateVars["Data"] = users
c.HTML(http.StatusOK, "page/admin-users", templateVars)
}
func (api *API) appUpdateAdminUsers(c *gin.Context) {
templateVars, _ := api.getBaseTemplateVars("admin-users", c)
var rUpdate requestAdminUpdateUser
if err := c.ShouldBind(&rUpdate); err != nil {
log.Error("Invalid URI Bind")
appErrorPage(c, http.StatusNotFound, "Invalid user parameters")
return
}
// Ensure Username
if rUpdate.User == "" {
appErrorPage(c, http.StatusInternalServerError, "User cannot be empty")
return
}
var err error
switch rUpdate.Operation {
case opCreate:
err = api.createUser(c, rUpdate.User, rUpdate.Password, rUpdate.IsAdmin)
case opUpdate:
err = api.updateUser(c, rUpdate.User, rUpdate.Password, rUpdate.IsAdmin)
case opDelete:
err = api.deleteUser(c, rUpdate.User)
default:
appErrorPage(c, http.StatusNotFound, "Unknown user operation")
return
}
if err != nil {
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("Unable to create or update user: %v", err))
return
}
users, err := api.db.Queries.GetUsers(c)
if err != nil {
log.Error("GetUsers DB Error: ", err)
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("GetUsers DB Error: %v", err))
return
}
templateVars["Data"] = users
c.HTML(http.StatusOK, "page/admin-users", templateVars)
}
func (api *API) appGetAdminImport(c *gin.Context) {
templateVars, _ := api.getBaseTemplateVars("admin-import", c)
var rImportFolder requestAdminImport
if err := c.ShouldBindQuery(&rImportFolder); err != nil {
log.Error("Invalid URI Bind")
appErrorPage(c, http.StatusNotFound, "Invalid directory")
return
}
if rImportFolder.Select != "" {
templateVars["SelectedDirectory"] = rImportFolder.Select
c.HTML(http.StatusOK, "page/admin-import", templateVars)
return
}
// Default Path
if rImportFolder.Directory == "" {
dPath, err := filepath.Abs(api.cfg.DataPath)
if err != nil {
log.Error("Absolute filepath error: ", rImportFolder.Directory)
appErrorPage(c, http.StatusNotFound, "Unable to get data directory absolute path")
return
}
rImportFolder.Directory = dPath
}
entries, err := os.ReadDir(rImportFolder.Directory)
if err != nil {
log.Error("Invalid directory: ", rImportFolder.Directory)
appErrorPage(c, http.StatusNotFound, "Invalid directory")
return
}
allDirectories := []string{}
for _, e := range entries {
if !e.IsDir() {
continue
}
allDirectories = append(allDirectories, e.Name())
}
templateVars["CurrentPath"] = filepath.Clean(rImportFolder.Directory)
templateVars["Data"] = allDirectories
c.HTML(http.StatusOK, "page/admin-import", templateVars)
}
func (api *API) appPerformAdminImport(c *gin.Context) {
templateVars, _ := api.getBaseTemplateVars("admin-import", c)
var rAdminImport requestAdminImport
if err := c.ShouldBind(&rAdminImport); err != nil {
log.Error("Invalid URI Bind")
appErrorPage(c, http.StatusNotFound, "Invalid directory")
return
}
// Get import directory
importDirectory := filepath.Clean(rAdminImport.Directory)
// Get data directory
absoluteDataPath, _ := filepath.Abs(filepath.Join(api.cfg.DataPath, "documents"))
// Validate different path
if absoluteDataPath == importDirectory {
appErrorPage(c, http.StatusBadRequest, "Directory is the same as data path")
return
}
// Do Transaction
tx, err := api.db.DB.Begin()
if err != nil {
log.Error("Transaction Begin DB Error:", err)
apiErrorPage(c, http.StatusBadRequest, "Unknown error")
return
}
// Defer & Start Transaction
defer func() {
if err := tx.Rollback(); err != nil {
log.Error("DB Rollback Error:", err)
}
}()
qtx := api.db.Queries.WithTx(tx)
// Track imports
importResults := make([]importResult, 0)
// Walk Directory & Import
err = filepath.WalkDir(importDirectory, func(importPath string, f fs.DirEntry, err error) error {
if err != nil {
return err
}
if f.IsDir() {
return nil
}
// Get relative path
basePath := importDirectory
relFilePath, err := filepath.Rel(importDirectory, importPath)
if err != nil {
log.Warnf("path error: %v", err)
return nil
}
// Track imports
iResult := importResult{
Path: relFilePath,
Status: importFailed,
}
defer func() {
importResults = append(importResults, iResult)
}()
// Get metadata
fileMeta, err := metadata.GetMetadata(importPath)
if err != nil {
log.Errorf("metadata error: %v", err)
iResult.Error = err
return nil
}
iResult.ID = *fileMeta.PartialMD5
iResult.Name = fmt.Sprintf("%s - %s", *fileMeta.Author, *fileMeta.Title)
// Check already exists
_, err = qtx.GetDocument(c, *fileMeta.PartialMD5)
if err == nil {
log.Warnf("document already exists: %s", *fileMeta.PartialMD5)
iResult.Status = importExists
return nil
}
// Import Copy
if rAdminImport.Type == importCopy {
// Derive & Sanitize File Name
relFilePath = deriveBaseFileName(fileMeta)
safePath := filepath.Join(api.cfg.DataPath, "documents", relFilePath)
// Open Source File
srcFile, err := os.Open(importPath)
if err != nil {
log.Errorf("unable to open current file: %v", err)
iResult.Error = err
return nil
}
defer srcFile.Close()
// Open Destination File
destFile, err := os.Create(safePath)
if err != nil {
log.Errorf("unable to open destination file: %v", err)
iResult.Error = err
return nil
}
defer destFile.Close()
// Copy File
if _, err = io.Copy(destFile, srcFile); err != nil {
log.Errorf("unable to save file: %v", err)
iResult.Error = err
return nil
}
// Update Base & Path
basePath = filepath.Join(api.cfg.DataPath, "documents")
iResult.Path = relFilePath
}
// Upsert document
if _, err = qtx.UpsertDocument(c, database.UpsertDocumentParams{
ID: *fileMeta.PartialMD5,
Title: fileMeta.Title,
Author: fileMeta.Author,
Description: fileMeta.Description,
Md5: fileMeta.MD5,
Words: fileMeta.WordCount,
Filepath: &relFilePath,
Basepath: &basePath,
}); err != nil {
log.Errorf("UpsertDocument DB Error: %v", err)
iResult.Error = err
return nil
}
iResult.Status = importSuccess
return nil
})
if err != nil {
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("Import Failed: %v", err))
return
}
// Commit transaction
if err := tx.Commit(); err != nil {
log.Error("Transaction Commit DB Error: ", err)
appErrorPage(c, http.StatusInternalServerError, fmt.Sprintf("Import DB Error: %v", err))
return
}
// Sort import results
sort.Slice(importResults, func(i int, j int) bool {
return importStatusPriority(importResults[i].Status) <
importStatusPriority(importResults[j].Status)
})
templateVars["Data"] = importResults
c.HTML(http.StatusOK, "page/admin-import-results", templateVars)
}
func (api *API) processRestoreFile(rAdminAction requestAdminAction, c *gin.Context) {
// Validate Type & Derive Extension on MIME
uploadedFile, err := rAdminAction.RestoreFile.Open()
if err != nil {
log.Error("File Error: ", err)
appErrorPage(c, http.StatusInternalServerError, "Unable to open file")
return
}
fileMime, err := mimetype.DetectReader(uploadedFile)
if err != nil {
log.Error("MIME Error")
appErrorPage(c, http.StatusInternalServerError, "Unable to detect filetype")
return
}
fileExtension := fileMime.Extension()
// Validate Extension
if !slices.Contains([]string{".zip"}, fileExtension) {
log.Error("Invalid FileType: ", fileExtension)
appErrorPage(c, http.StatusBadRequest, "Invalid filetype")
return
}
// Create Temp File
tempFile, err := os.CreateTemp("", "restore")
if err != nil {
log.Warn("Temp File Create Error: ", err)
appErrorPage(c, http.StatusInternalServerError, "Unable to create temp file")
return
}
defer os.Remove(tempFile.Name())
defer tempFile.Close()
// Save Temp
err = c.SaveUploadedFile(rAdminAction.RestoreFile, tempFile.Name())
if err != nil {
log.Error("File Error: ", err)
appErrorPage(c, http.StatusInternalServerError, "Unable to save file")
return
}
// ZIP Info
fileInfo, err := tempFile.Stat()
if err != nil {
log.Error("File Error: ", err)
appErrorPage(c, http.StatusInternalServerError, "Unable to read file")
return
}
// Create ZIP Reader
zipReader, err := zip.NewReader(tempFile, fileInfo.Size())
if err != nil {
log.Error("ZIP Error: ", err)
appErrorPage(c, http.StatusInternalServerError, "Unable to read zip")
return
}
// Validate ZIP Contents
hasDBFile := false
hasUnknownFile := false
for _, file := range zipReader.File {
fileName := strings.TrimPrefix(file.Name, "/")
if fileName == "antholume.db" {
hasDBFile = true
break
} else if !strings.HasPrefix(fileName, "covers/") && !strings.HasPrefix(fileName, "documents/") {
hasUnknownFile = true
break
}
}
// Invalid ZIP
if !hasDBFile {
log.Error("Invalid ZIP File - Missing DB")
appErrorPage(c, http.StatusInternalServerError, "Invalid Restore ZIP - Missing DB")
return
} else if hasUnknownFile {
log.Error("Invalid ZIP File - Invalid File(s)")
appErrorPage(c, http.StatusInternalServerError, "Invalid Restore ZIP - Invalid File(s)")
return
}
// Create Backup File
backupFilePath := filepath.Join(api.cfg.ConfigPath, fmt.Sprintf("backups/AnthoLumeBackup_%s.zip", time.Now().Format("20060102150405")))
backupFile, err := os.Create(backupFilePath)
if err != nil {
log.Error("Unable to create backup file: ", err)
appErrorPage(c, http.StatusInternalServerError, "Unable to create backup file")
return
}
defer backupFile.Close()
// Save Backup File
w := bufio.NewWriter(backupFile)
err = api.createBackup(c, w, []string{"covers", "documents"})
if err != nil {
log.Error("Unable to save backup file: ", err)
appErrorPage(c, http.StatusInternalServerError, "Unable to save backup file")
return
}
// Remove Data
err = api.removeData()
if err != nil {
log.Error("Unable to delete data: ", err)
appErrorPage(c, http.StatusInternalServerError, "Unable to delete data")
return
}
// Restore Data
err = api.restoreData(zipReader)
if err != nil {
appErrorPage(c, http.StatusInternalServerError, "Unable to restore data")
log.Panic("Unable to restore data: ", err)
}
// Reinit DB
if err := api.db.Reload(c); err != nil {
appErrorPage(c, http.StatusInternalServerError, "Unable to reload DB")
log.Panicf("Unable to reload DB: %v", err)
}
// Rotate Auth Hashes
if err := api.rotateAllAuthHashes(c); err != nil {
appErrorPage(c, http.StatusInternalServerError, "Unable to rotate hashes")
log.Panicf("Unable to rotate auth hashes: %v", err)
}
// Redirect to login page
c.Redirect(http.StatusFound, "/login")
}
func (api *API) restoreData(zipReader *zip.Reader) error {
// Ensure Directories
api.cfg.EnsureDirectories()
// Restore Data
for _, file := range zipReader.File {
rc, err := file.Open()
if err != nil {
return err
}
defer rc.Close()
destPath := filepath.Join(api.cfg.DataPath, file.Name)
destFile, err := os.Create(destPath)
if err != nil {
log.Errorf("error creating destination file: %v", err)
return err
}
defer destFile.Close()
// Copy the contents from the zip file to the destination file.
if _, err := io.Copy(destFile, rc); err != nil {
log.Errorf("Error copying file contents: %v", err)
return err
}
}
return nil
}
func (api *API) removeData() error {
allPaths := []string{
"covers",
"documents",
"antholume.db",
"antholume.db-wal",
"antholume.db-shm",
}
for _, name := range allPaths {
fullPath := filepath.Join(api.cfg.DataPath, name)
err := os.RemoveAll(fullPath)
if err != nil {
log.Errorf("Unable to delete %s: %v", name, err)
return err
}
}
return nil
}
func (api *API) createBackup(ctx context.Context, w io.Writer, directories []string) error {
// Vacuum DB
_, err := api.db.DB.ExecContext(ctx, "VACUUM;")
if err != nil {
return errors.Wrap(err, "Unable to vacuum database")
}
ar := zip.NewWriter(w)
exportWalker := func(currentPath string, f fs.DirEntry, err error) error {
if err != nil {
return err
}
if f.IsDir() {
return nil
}
// Open File on Disk
file, err := os.Open(currentPath)
if err != nil {
return err
}
defer file.Close()
// Derive Export Structure
fileName := filepath.Base(currentPath)
folderName := filepath.Base(filepath.Dir(currentPath))
// Create File in Export
newF, err := ar.Create(filepath.Join(folderName, fileName))
if err != nil {
return err
}
// Copy File in Export
_, err = io.Copy(newF, file)
if err != nil {
return err
}
return nil
}
// Get DB Path
fileName := fmt.Sprintf("%s.db", api.cfg.DBName)
dbLocation := filepath.Join(api.cfg.ConfigPath, fileName)
// Copy Database File
dbFile, err := os.Open(dbLocation)
if err != nil {
return err
}
defer dbFile.Close()
newDbFile, err := ar.Create(fileName)
if err != nil {
return err
}
_, err = io.Copy(newDbFile, dbFile)
if err != nil {
return err
}
// Backup Covers & Documents
for _, dir := range directories {
err = filepath.WalkDir(filepath.Join(api.cfg.DataPath, dir), exportWalker)
if err != nil {
return err
}
}
ar.Close()
return nil
}
func (api *API) isLastAdmin(ctx context.Context, userID string) (bool, error) {
allUsers, err := api.db.Queries.GetUsers(ctx)
if err != nil {
return false, errors.Wrap(err, fmt.Sprintf("GetUsers DB Error: %v", err))
}
hasAdmin := false
for _, user := range allUsers {
if user.Admin && user.ID != userID {
hasAdmin = true
break
}
}
return !hasAdmin, nil
}
func (api *API) createUser(ctx context.Context, user string, rawPassword *string, isAdmin *bool) error {
// Validate Necessary Parameters
if rawPassword == nil || *rawPassword == "" {
return fmt.Errorf("password can't be empty")
}
// Base Params
createParams := database.CreateUserParams{
ID: user,
}
// Handle Admin (Explicit or False)
if isAdmin != nil {
createParams.Admin = *isAdmin
} else {
createParams.Admin = false
}
// Parse Password
password := fmt.Sprintf("%x", md5.Sum([]byte(*rawPassword)))
hashedPassword, err := argon2.CreateHash(password, argon2.DefaultParams)
if err != nil {
return fmt.Errorf("unable to create hashed password")
}
createParams.Pass = &hashedPassword
// Generate Auth Hash
rawAuthHash, err := utils.GenerateToken(64)
if err != nil {
return fmt.Errorf("unable to create token for user")
}
authHash := fmt.Sprintf("%x", rawAuthHash)
createParams.AuthHash = &authHash
// Create user in DB
if rows, err := api.db.Queries.CreateUser(ctx, createParams); err != nil {
log.Error("CreateUser DB Error:", err)
return fmt.Errorf("unable to create user")
} else if rows == 0 {
log.Warn("User Already Exists:", createParams.ID)
return fmt.Errorf("user already exists")
}
return nil
}
func (api *API) updateUser(ctx context.Context, user string, rawPassword *string, isAdmin *bool) error {
// Validate Necessary Parameters
if rawPassword == nil && isAdmin == nil {
return fmt.Errorf("nothing to update")
}
// Base Params
updateParams := database.UpdateUserParams{
UserID: user,
}
// Handle Admin (Update or Existing)
if isAdmin != nil {
updateParams.Admin = *isAdmin
} else {
user, err := api.db.Queries.GetUser(ctx, user)
if err != nil {
return errors.Wrap(err, fmt.Sprintf("GetUser DB Error: %v", err))
}
updateParams.Admin = user.Admin
}
// Check Admins - Disallow Demotion
if isLast, err := api.isLastAdmin(ctx, user); err != nil {
return err
} else if isLast && !updateParams.Admin {
return fmt.Errorf("unable to demote %s - last admin", user)
}
// Handle Password
if rawPassword != nil {
if *rawPassword == "" {
return fmt.Errorf("password can't be empty")
}
// Parse Password
password := fmt.Sprintf("%x", md5.Sum([]byte(*rawPassword)))
hashedPassword, err := argon2.CreateHash(password, argon2.DefaultParams)
if err != nil {
return fmt.Errorf("unable to create hashed password")
}
updateParams.Password = &hashedPassword
// Generate Auth Hash
rawAuthHash, err := utils.GenerateToken(64)
if err != nil {
return fmt.Errorf("unable to create token for user")
}
authHash := fmt.Sprintf("%x", rawAuthHash)
updateParams.AuthHash = &authHash
}
// Update User
_, err := api.db.Queries.UpdateUser(ctx, updateParams)
if err != nil {
return errors.Wrap(err, fmt.Sprintf("UpdateUser DB Error: %v", err))
}
return nil
}
func (api *API) deleteUser(ctx context.Context, user string) error {
// Check Admins
if isLast, err := api.isLastAdmin(ctx, user); err != nil {
return err
} else if isLast {
return fmt.Errorf("unable to delete %s - last admin", user)
}
// Create Backup File
backupFilePath := filepath.Join(api.cfg.ConfigPath, fmt.Sprintf("backups/AnthoLumeBackup_%s.zip", time.Now().Format("20060102150405")))
backupFile, err := os.Create(backupFilePath)
if err != nil {
return err
}
defer backupFile.Close()
// Save Backup File (DB Only)
w := bufio.NewWriter(backupFile)
err = api.createBackup(ctx, w, []string{})
if err != nil {
return err
}
// Delete User
_, err = api.db.Queries.DeleteUser(ctx, user)
if err != nil {
return errors.Wrap(err, fmt.Sprintf("DeleteUser DB Error: %v", err))
}
return nil
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,48 +1,68 @@
package api package api
import ( import (
"context"
"crypto/md5" "crypto/md5"
"fmt" "fmt"
"net/http" "net/http"
"strings" "strings"
"time"
argon2 "github.com/alexedwards/argon2id" argon2 "github.com/alexedwards/argon2id"
"github.com/gin-contrib/sessions" "github.com/gin-contrib/sessions"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"reichard.io/bbank/database" log "github.com/sirupsen/logrus"
"reichard.io/antholume/database"
"reichard.io/antholume/utils"
) )
// Authorization Data
type authData struct {
UserName string
IsAdmin bool
AuthHash string
}
// KOSync API Auth Headers // KOSync API Auth Headers
type authHeader struct { type authKOHeader struct {
AuthUser string `header:"x-auth-user"` AuthUser string `header:"x-auth-user"`
AuthKey string `header:"x-auth-key"` AuthKey string `header:"x-auth-key"`
} }
func (api *API) authorizeCredentials(username string, password string) (authorized bool) { func (api *API) authorizeCredentials(ctx context.Context, username string, password string) (auth *authData) {
user, err := api.DB.Queries.GetUser(api.DB.Ctx, username) user, err := api.db.Queries.GetUser(ctx, username)
if err != nil { if err != nil {
return false return
} }
if match, err := argon2.ComparePasswordAndHash(password, *user.Pass); err != nil || match != true { if match, err := argon2.ComparePasswordAndHash(password, *user.Pass); err != nil || !match {
return false return
} }
return true // Update auth cache
api.userAuthCache[user.ID] = *user.AuthHash
return &authData{
UserName: user.ID,
IsAdmin: user.Admin,
AuthHash: *user.AuthHash,
}
} }
func (api *API) authAPIMiddleware(c *gin.Context) { func (api *API) authKOMiddleware(c *gin.Context) {
session := sessions.Default(c) session := sessions.Default(c)
// Utilize Session Token // Check Session First
if authorizedUser := session.Get("authorizedUser"); authorizedUser != nil { if auth, ok := api.getSession(c, session); ok {
c.Set("AuthorizedUser", authorizedUser) c.Set("Authorization", auth)
c.Header("Cache-Control", "private") c.Header("Cache-Control", "private")
c.Next() c.Next()
return return
} }
var rHeader authHeader // Session Failed -> Check Headers (Allowed on API for KOSync Compatibility)
var rHeader authKOHeader
if err := c.ShouldBindHeader(&rHeader); err != nil { if err := c.ShouldBindHeader(&rHeader); err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Incorrect Headers"}) c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Incorrect Headers"})
return return
@@ -52,25 +72,52 @@ func (api *API) authAPIMiddleware(c *gin.Context) {
return return
} }
if authorized := api.authorizeCredentials(rHeader.AuthUser, rHeader.AuthKey); authorized != true { authData := api.authorizeCredentials(c, rHeader.AuthUser, rHeader.AuthKey)
if authData == nil {
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Unauthorized"}) c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Unauthorized"})
return return
} }
// Set Session Cookie if err := api.setSession(session, *authData); err != nil {
session.Set("authorizedUser", rHeader.AuthUser) c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Unauthorized"})
session.Save() return
}
c.Set("AuthorizedUser", rHeader.AuthUser) c.Set("Authorization", *authData)
c.Header("Cache-Control", "private")
c.Next()
}
func (api *API) authOPDSMiddleware(c *gin.Context) {
c.Header("WWW-Authenticate", `Basic realm="restricted", charset="UTF-8"`)
user, rawPassword, hasAuth := c.Request.BasicAuth()
// Validate Auth Fields
if !hasAuth || user == "" || rawPassword == "" {
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Invalid Authorization Headers"})
return
}
// Validate Auth
password := fmt.Sprintf("%x", md5.Sum([]byte(rawPassword)))
authData := api.authorizeCredentials(c, user, password)
if authData == nil {
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Unauthorized"})
return
}
c.Set("Authorization", *authData)
c.Header("Cache-Control", "private")
c.Next() c.Next()
} }
func (api *API) authWebAppMiddleware(c *gin.Context) { func (api *API) authWebAppMiddleware(c *gin.Context) {
session := sessions.Default(c) session := sessions.Default(c)
// Utilize Session Token // Check Session
if authorizedUser := session.Get("authorizedUser"); authorizedUser != nil { if auth, ok := api.getSession(c, session); ok {
c.Set("AuthorizedUser", authorizedUser) c.Set("Authorization", auth)
c.Header("Cache-Control", "private") c.Header("Cache-Control", "private")
c.Next() c.Next()
return return
@@ -80,98 +127,346 @@ func (api *API) authWebAppMiddleware(c *gin.Context) {
c.Abort() c.Abort()
} }
func (api *API) authFormLogin(c *gin.Context) { func (api *API) authAdminWebAppMiddleware(c *gin.Context) {
if data, _ := c.Get("Authorization"); data != nil {
auth := data.(authData)
if auth.IsAdmin {
c.Next()
return
}
}
appErrorPage(c, http.StatusUnauthorized, "Admin Permissions Required")
c.Abort()
}
func (api *API) appAuthLogin(c *gin.Context) {
templateVars, _ := api.getBaseTemplateVars("login", c)
username := strings.TrimSpace(c.PostForm("username")) username := strings.TrimSpace(c.PostForm("username"))
rawPassword := strings.TrimSpace(c.PostForm("password")) rawPassword := strings.TrimSpace(c.PostForm("password"))
if username == "" || rawPassword == "" { if username == "" || rawPassword == "" {
c.HTML(http.StatusUnauthorized, "login", gin.H{ templateVars["Error"] = "Invalid Credentials"
"RegistrationEnabled": api.Config.RegistrationEnabled, c.HTML(http.StatusUnauthorized, "page/login", templateVars)
"Error": "Invalid Credentials",
})
return return
} }
// MD5 - KOSync Compatiblity // MD5 - KOSync Compatiblity
password := fmt.Sprintf("%x", md5.Sum([]byte(rawPassword))) password := fmt.Sprintf("%x", md5.Sum([]byte(rawPassword)))
if authorized := api.authorizeCredentials(username, password); authorized != true { authData := api.authorizeCredentials(c, username, password)
c.HTML(http.StatusUnauthorized, "login", gin.H{ if authData == nil {
"RegistrationEnabled": api.Config.RegistrationEnabled, templateVars["Error"] = "Invalid Credentials"
"Error": "Invalid Credentials", c.HTML(http.StatusUnauthorized, "page/login", templateVars)
})
return return
} }
// Set Session
session := sessions.Default(c) session := sessions.Default(c)
if err := api.setSession(session, *authData); err != nil {
templateVars["Error"] = "Invalid Credentials"
c.HTML(http.StatusUnauthorized, "page/login", templateVars)
return
}
// Set Session Cookie c.Header("Cache-Control", "private")
session.Set("authorizedUser", username)
session.Save()
c.Redirect(http.StatusFound, "/") c.Redirect(http.StatusFound, "/")
} }
func (api *API) authFormRegister(c *gin.Context) { func (api *API) appAuthRegister(c *gin.Context) {
if !api.Config.RegistrationEnabled { if !api.cfg.RegistrationEnabled {
c.AbortWithStatus(http.StatusConflict) appErrorPage(c, http.StatusUnauthorized, "Nice try. Registration is disabled.")
return
} }
templateVars, _ := api.getBaseTemplateVars("login", c)
templateVars["Register"] = true
username := strings.TrimSpace(c.PostForm("username")) username := strings.TrimSpace(c.PostForm("username"))
rawPassword := strings.TrimSpace(c.PostForm("password")) rawPassword := strings.TrimSpace(c.PostForm("password"))
if username == "" || rawPassword == "" { if username == "" || rawPassword == "" {
c.HTML(http.StatusBadRequest, "login", gin.H{ templateVars["Error"] = "Invalid User or Password"
"Register": true, c.HTML(http.StatusBadRequest, "page/login", templateVars)
"Error": "Registration Disabled or User Already Exists",
})
return return
} }
password := fmt.Sprintf("%x", md5.Sum([]byte(rawPassword))) password := fmt.Sprintf("%x", md5.Sum([]byte(rawPassword)))
hashedPassword, err := argon2.CreateHash(password, argon2.DefaultParams) hashedPassword, err := argon2.CreateHash(password, argon2.DefaultParams)
if err != nil { if err != nil {
c.HTML(http.StatusBadRequest, "login", gin.H{ templateVars["Error"] = "Registration Disabled or User Already Exists"
"Register": true, c.HTML(http.StatusBadRequest, "page/login", templateVars)
"Error": "Registration Disabled or User Already Exists",
})
return return
} }
rows, err := api.DB.Queries.CreateUser(api.DB.Ctx, database.CreateUserParams{ // Generate auth hash
rawAuthHash, err := utils.GenerateToken(64)
if err != nil {
log.Error("Failed to generate user token: ", err)
templateVars["Error"] = "Failed to Create User"
c.HTML(http.StatusBadRequest, "page/login", templateVars)
return
}
// Get current users
currentUsers, err := api.db.Queries.GetUsers(c)
if err != nil {
log.Error("Failed to check all users: ", err)
templateVars["Error"] = "Failed to Create User"
c.HTML(http.StatusBadRequest, "page/login", templateVars)
return
}
// Determine if we should be admin
isAdmin := false
if len(currentUsers) == 0 {
isAdmin = true
}
// Create user in DB
authHash := fmt.Sprintf("%x", rawAuthHash)
if rows, err := api.db.Queries.CreateUser(c, database.CreateUserParams{
ID: username, ID: username,
Pass: &hashedPassword, Pass: &hashedPassword,
}) AuthHash: &authHash,
Admin: isAdmin,
}); err != nil {
log.Error("CreateUser DB Error:", err)
templateVars["Error"] = "Registration Disabled or User Already Exists"
c.HTML(http.StatusBadRequest, "page/login", templateVars)
return
} else if rows == 0 {
log.Warn("User Already Exists:", username)
templateVars["Error"] = "Registration Disabled or User Already Exists"
c.HTML(http.StatusBadRequest, "page/login", templateVars)
return
}
// SQL Error // Get user
user, err := api.db.Queries.GetUser(c, username)
if err != nil { if err != nil {
c.HTML(http.StatusBadRequest, "login", gin.H{ log.Error("GetUser DB Error:", err)
"Register": true, templateVars["Error"] = "Registration Disabled or User Already Exists"
"Error": "Registration Disabled or User Already Exists", c.HTML(http.StatusBadRequest, "page/login", templateVars)
})
return return
} }
// User Already Exists // Set session
if rows == 0 { auth := authData{
c.HTML(http.StatusBadRequest, "login", gin.H{ UserName: user.ID,
"Register": true, IsAdmin: user.Admin,
"Error": "Registration Disabled or User Already Exists", AuthHash: *user.AuthHash,
})
return
} }
session := sessions.Default(c) session := sessions.Default(c)
if err := api.setSession(session, auth); err != nil {
appErrorPage(c, http.StatusUnauthorized, "Unauthorized.")
return
}
// Set Session Cookie c.Header("Cache-Control", "private")
session.Set("authorizedUser", username)
session.Save()
c.Redirect(http.StatusFound, "/") c.Redirect(http.StatusFound, "/")
} }
func (api *API) authLogout(c *gin.Context) { func (api *API) appAuthLogout(c *gin.Context) {
session := sessions.Default(c) session := sessions.Default(c)
session.Clear() session.Clear()
session.Save() if err := session.Save(); err != nil {
log.Error("unable to save session")
}
c.Redirect(http.StatusFound, "/login") c.Redirect(http.StatusFound, "/login")
} }
func (api *API) koAuthRegister(c *gin.Context) {
if !api.cfg.RegistrationEnabled {
c.AbortWithStatus(http.StatusConflict)
return
}
var rUser requestUser
if err := c.ShouldBindJSON(&rUser); err != nil {
log.Error("Invalid JSON Bind")
apiErrorPage(c, http.StatusBadRequest, "Invalid User Data")
return
}
if rUser.Username == "" || rUser.Password == "" {
log.Error("Invalid User - Empty Username or Password")
apiErrorPage(c, http.StatusBadRequest, "Invalid User Data")
return
}
// Generate password hash
hashedPassword, err := argon2.CreateHash(rUser.Password, argon2.DefaultParams)
if err != nil {
log.Error("Argon2 Hash Failure:", err)
apiErrorPage(c, http.StatusBadRequest, "Unknown Error")
return
}
// Generate auth hash
rawAuthHash, err := utils.GenerateToken(64)
if err != nil {
log.Error("Failed to generate user token: ", err)
apiErrorPage(c, http.StatusBadRequest, "Unknown Error")
return
}
// Get current users
currentUsers, err := api.db.Queries.GetUsers(c)
if err != nil {
log.Error("Failed to check all users: ", err)
apiErrorPage(c, http.StatusBadRequest, "Failed to Create User")
return
}
// Determine if we should be admin
isAdmin := false
if len(currentUsers) == 0 {
isAdmin = true
}
// Create user
authHash := fmt.Sprintf("%x", rawAuthHash)
if rows, err := api.db.Queries.CreateUser(c, database.CreateUserParams{
ID: rUser.Username,
Pass: &hashedPassword,
AuthHash: &authHash,
Admin: isAdmin,
}); err != nil {
log.Error("CreateUser DB Error:", err)
apiErrorPage(c, http.StatusBadRequest, "Invalid User Data")
return
} else if rows == 0 {
log.Error("User Already Exists:", rUser.Username)
apiErrorPage(c, http.StatusBadRequest, "User Already Exists")
return
}
c.JSON(http.StatusCreated, gin.H{
"username": rUser.Username,
})
}
func (api *API) getSession(ctx context.Context, session sessions.Session) (auth authData, ok bool) {
// Get Session
authorizedUser := session.Get("authorizedUser")
isAdmin := session.Get("isAdmin")
expiresAt := session.Get("expiresAt")
authHash := session.Get("authHash")
if authorizedUser == nil || isAdmin == nil || expiresAt == nil || authHash == nil {
return
}
// Create Auth Object
auth = authData{
UserName: authorizedUser.(string),
IsAdmin: isAdmin.(bool),
AuthHash: authHash.(string),
}
// Validate Auth Hash
correctAuthHash, err := api.getUserAuthHash(ctx, auth.UserName)
if err != nil || correctAuthHash != auth.AuthHash {
return
}
// Refresh
if expiresAt.(int64)-time.Now().Unix() < 60*60*24 {
log.Info("Refreshing Session")
if err := api.setSession(session, auth); err != nil {
log.Error("unable to get session")
return
}
}
// Authorized
return auth, true
}
func (api *API) setSession(session sessions.Session, auth authData) error {
// Set Session Cookie
session.Set("authorizedUser", auth.UserName)
session.Set("isAdmin", auth.IsAdmin)
session.Set("expiresAt", time.Now().Unix()+(60*60*24*7))
session.Set("authHash", auth.AuthHash)
return session.Save()
}
func (api *API) getUserAuthHash(ctx context.Context, username string) (string, error) {
// Return Cache
if api.userAuthCache[username] != "" {
return api.userAuthCache[username], nil
}
// Get DB
user, err := api.db.Queries.GetUser(ctx, username)
if err != nil {
log.Error("GetUser DB Error:", err)
return "", err
}
// Update Cache
api.userAuthCache[username] = *user.AuthHash
return api.userAuthCache[username], nil
}
func (api *API) rotateAllAuthHashes(ctx context.Context) error {
// Do Transaction
tx, err := api.db.DB.Begin()
if err != nil {
log.Error("Transaction Begin DB Error: ", err)
return err
}
// Defer & Start Transaction
defer func() {
if err := tx.Rollback(); err != nil {
log.Error("DB Rollback Error:", err)
}
}()
qtx := api.db.Queries.WithTx(tx)
users, err := qtx.GetUsers(ctx)
if err != nil {
return err
}
// Update Users
newAuthHashCache := make(map[string]string, 0)
for _, user := range users {
// Generate Auth Hash
rawAuthHash, err := utils.GenerateToken(64)
if err != nil {
return err
}
// Update User
authHash := fmt.Sprintf("%x", rawAuthHash)
if _, err = qtx.UpdateUser(ctx, database.UpdateUserParams{
UserID: user.ID,
AuthHash: &authHash,
Admin: user.Admin,
}); err != nil {
return err
}
// Save New Hash Cache
newAuthHashCache[user.ID] = fmt.Sprintf("%x", rawAuthHash)
}
// Commit Transaction
if err := tx.Commit(); err != nil {
log.Error("Transaction Commit DB Error: ", err)
return err
}
// Transaction Succeeded -> Update Cache
for user, hash := range newAuthHashCache {
api.userAuthCache[user] = hash
}
return nil
}

151
api/common.go Normal file
View File

@@ -0,0 +1,151 @@
package api
import (
"fmt"
"net/http"
"os"
"path/filepath"
"github.com/gin-gonic/gin"
log "github.com/sirupsen/logrus"
"reichard.io/antholume/database"
"reichard.io/antholume/metadata"
)
func (api *API) createDownloadDocumentHandler(errorFunc func(*gin.Context, int, string)) func(*gin.Context) {
return func(c *gin.Context) {
var rDoc requestDocumentID
if err := c.ShouldBindUri(&rDoc); err != nil {
log.Error("Invalid URI Bind")
errorFunc(c, http.StatusBadRequest, "Invalid Request")
return
}
// Get Document
document, err := api.db.Queries.GetDocument(c, rDoc.DocumentID)
if err != nil {
log.Error("GetDocument DB Error:", err)
errorFunc(c, http.StatusBadRequest, "Unknown Document")
return
}
if document.Filepath == nil {
log.Error("Document Doesn't Have File:", rDoc.DocumentID)
errorFunc(c, http.StatusBadRequest, "Document Doesn't Exist")
return
}
// Derive Basepath
basepath := filepath.Join(api.cfg.DataPath, "documents")
if document.Basepath != nil && *document.Basepath != "" {
basepath = *document.Basepath
}
// Derive Storage Location
filePath := filepath.Join(basepath, *document.Filepath)
// Validate File Exists
_, err = os.Stat(filePath)
if os.IsNotExist(err) {
log.Error("File should but doesn't exist: ", err)
errorFunc(c, http.StatusBadRequest, "Document Doesn't Exist")
return
}
// Force Download
c.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", filepath.Base(*document.Filepath)))
c.File(filePath)
}
}
func (api *API) createGetCoverHandler(errorFunc func(*gin.Context, int, string)) func(*gin.Context) {
return func(c *gin.Context) {
var rDoc requestDocumentID
if err := c.ShouldBindUri(&rDoc); err != nil {
log.Error("Invalid URI Bind")
errorFunc(c, http.StatusNotFound, "Invalid cover.")
return
}
// Validate Document Exists in DB
document, err := api.db.Queries.GetDocument(c, rDoc.DocumentID)
if err != nil {
log.Error("GetDocument DB Error:", err)
errorFunc(c, http.StatusInternalServerError, fmt.Sprintf("GetDocument DB Error: %v", err))
return
}
// Handle Identified Document
if document.Coverfile != nil {
if *document.Coverfile == "UNKNOWN" {
c.FileFromFS("assets/images/no-cover.jpg", http.FS(api.assets))
return
}
// Derive Path
safePath := filepath.Join(api.cfg.DataPath, "covers", *document.Coverfile)
// Validate File Exists
_, err = os.Stat(safePath)
if err != nil {
log.Error("File should but doesn't exist: ", err)
c.FileFromFS("assets/images/no-cover.jpg", http.FS(api.assets))
return
}
c.File(safePath)
return
}
// Attempt Metadata
var coverDir string = filepath.Join(api.cfg.DataPath, "covers")
var coverFile string = "UNKNOWN"
// Identify Documents & Save Covers
metadataResults, err := metadata.SearchMetadata(metadata.SOURCE_GBOOK, metadata.MetadataInfo{
Title: document.Title,
Author: document.Author,
})
if err == nil && len(metadataResults) > 0 && metadataResults[0].ID != nil {
firstResult := metadataResults[0]
// Save Cover
fileName, err := metadata.CacheCover(*firstResult.ID, coverDir, document.ID, false)
if err == nil {
coverFile = *fileName
}
// Store First Metadata Result
if _, err = api.db.Queries.AddMetadata(c, database.AddMetadataParams{
DocumentID: document.ID,
Title: firstResult.Title,
Author: firstResult.Author,
Description: firstResult.Description,
Gbid: firstResult.ID,
Olid: nil,
Isbn10: firstResult.ISBN10,
Isbn13: firstResult.ISBN13,
}); err != nil {
log.Error("AddMetadata DB Error:", err)
}
}
// Upsert Document
if _, err = api.db.Queries.UpsertDocument(c, database.UpsertDocumentParams{
ID: document.ID,
Coverfile: &coverFile,
}); err != nil {
log.Warn("UpsertDocument DB Error:", err)
}
// Return Unknown Cover
if coverFile == "UNKNOWN" {
c.FileFromFS("assets/images/no-cover.jpg", http.FS(api.assets))
return
}
coverFilePath := filepath.Join(coverDir, coverFile)
c.File(coverFilePath)
}
}

View File

@@ -10,15 +10,12 @@ import (
"net/http" "net/http"
"os" "os"
"path/filepath" "path/filepath"
"strings"
"time" "time"
argon2 "github.com/alexedwards/argon2id"
"github.com/gabriel-vasile/mimetype"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
"golang.org/x/exp/slices" "reichard.io/antholume/database"
"reichard.io/bbank/database" "reichard.io/antholume/metadata"
) )
type activityItem struct { type activityItem struct {
@@ -37,6 +34,7 @@ type requestActivity struct {
type requestCheckActivitySync struct { type requestCheckActivitySync struct {
DeviceID string `json:"device_id"` DeviceID string `json:"device_id"`
Device string `json:"device"`
} }
type requestDocument struct { type requestDocument struct {
@@ -73,127 +71,91 @@ type requestDocumentID struct {
DocumentID string `uri:"document" binding:"required"` DocumentID string `uri:"document" binding:"required"`
} }
func (api *API) authorizeUser(c *gin.Context) { func (api *API) koAuthorizeUser(c *gin.Context) {
c.JSON(200, gin.H{ koJSON(c, 200, gin.H{
"authorized": "OK", "authorized": "OK",
}) })
} }
func (api *API) createUser(c *gin.Context) { func (api *API) koSetProgress(c *gin.Context) {
if !api.Config.RegistrationEnabled { var auth authData
c.AbortWithStatus(http.StatusConflict) if data, _ := c.Get("Authorization"); data != nil {
return auth = data.(authData)
} }
var rUser requestUser
if err := c.ShouldBindJSON(&rUser); err != nil {
log.Error("[createUser] Invalid JSON Bind")
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid User Data"})
return
}
if rUser.Username == "" || rUser.Password == "" {
log.Error("[createUser] Invalid User - Empty Username or Password")
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid User Data"})
return
}
hashedPassword, err := argon2.CreateHash(rUser.Password, argon2.DefaultParams)
if err != nil {
log.Error("[createUser] Argon2 Hash Failure:", err)
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Unknown Error"})
return
}
rows, err := api.DB.Queries.CreateUser(api.DB.Ctx, database.CreateUserParams{
ID: rUser.Username,
Pass: &hashedPassword,
})
if err != nil {
log.Error("[createUser] CreateUser DB Error:", err)
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid User Data"})
return
}
// User Exists
if rows == 0 {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "User Already Exists"})
return
}
c.JSON(http.StatusCreated, gin.H{
"username": rUser.Username,
})
}
func (api *API) setProgress(c *gin.Context) {
rUser, _ := c.Get("AuthorizedUser")
var rPosition requestPosition var rPosition requestPosition
if err := c.ShouldBindJSON(&rPosition); err != nil { if err := c.ShouldBindJSON(&rPosition); err != nil {
log.Error("[setProgress] Invalid JSON Bind") log.Error("Invalid JSON Bind")
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Progress Data"}) apiErrorPage(c, http.StatusBadRequest, "Invalid Progress Data")
return return
} }
// Upsert Device // Upsert Device
if _, err := api.DB.Queries.UpsertDevice(api.DB.Ctx, database.UpsertDeviceParams{ if _, err := api.db.Queries.UpsertDevice(c, database.UpsertDeviceParams{
ID: rPosition.DeviceID, ID: rPosition.DeviceID,
UserID: rUser.(string), UserID: auth.UserName,
DeviceName: rPosition.Device, DeviceName: rPosition.Device,
LastSynced: time.Now().UTC().Format(time.RFC3339),
}); err != nil { }); err != nil {
log.Error("[setProgress] UpsertDevice DB Error:", err) log.Error("UpsertDevice DB Error:", err)
} }
// Upsert Document // Upsert Document
if _, err := api.DB.Queries.UpsertDocument(api.DB.Ctx, database.UpsertDocumentParams{ if _, err := api.db.Queries.UpsertDocument(c, database.UpsertDocumentParams{
ID: rPosition.DocumentID, ID: rPosition.DocumentID,
}); err != nil { }); err != nil {
log.Error("[setProgress] UpsertDocument DB Error:", err) log.Error("UpsertDocument DB Error:", err)
} }
// Create or Replace Progress // Create or Replace Progress
progress, err := api.DB.Queries.UpdateProgress(api.DB.Ctx, database.UpdateProgressParams{ progress, err := api.db.Queries.UpdateProgress(c, database.UpdateProgressParams{
Percentage: rPosition.Percentage, Percentage: rPosition.Percentage,
DocumentID: rPosition.DocumentID, DocumentID: rPosition.DocumentID,
DeviceID: rPosition.DeviceID, DeviceID: rPosition.DeviceID,
UserID: rUser.(string), UserID: auth.UserName,
Progress: rPosition.Progress, Progress: rPosition.Progress,
}) })
if err != nil { if err != nil {
log.Error("[setProgress] UpdateProgress DB Error:", err) log.Error("UpdateProgress DB Error:", err)
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"}) apiErrorPage(c, http.StatusBadRequest, "Invalid Request")
return return
} }
c.JSON(http.StatusOK, gin.H{ koJSON(c, http.StatusOK, gin.H{
"document": progress.DocumentID, "document": progress.DocumentID,
"timestamp": progress.CreatedAt, "timestamp": progress.CreatedAt,
}) })
} }
func (api *API) getProgress(c *gin.Context) { func (api *API) koGetProgress(c *gin.Context) {
rUser, _ := c.Get("AuthorizedUser") var auth authData
if data, _ := c.Get("Authorization"); data != nil {
auth = data.(authData)
}
var rDocID requestDocumentID var rDocID requestDocumentID
if err := c.ShouldBindUri(&rDocID); err != nil { if err := c.ShouldBindUri(&rDocID); err != nil {
log.Error("[getProgress] Invalid URI Bind") log.Error("Invalid URI Bind")
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"}) apiErrorPage(c, http.StatusBadRequest, "Invalid Request")
return return
} }
progress, err := api.DB.Queries.GetProgress(api.DB.Ctx, database.GetProgressParams{ progress, err := api.db.Queries.GetDocumentProgress(c, database.GetDocumentProgressParams{
DocumentID: rDocID.DocumentID, DocumentID: rDocID.DocumentID,
UserID: rUser.(string), UserID: auth.UserName,
}) })
if err != nil { if err == sql.ErrNoRows {
log.Error("[getProgress] GetProgress DB Error:", err) // Not Found
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Document"}) koJSON(c, http.StatusOK, gin.H{})
return
} else if err != nil {
log.Error("GetDocumentProgress DB Error:", err)
apiErrorPage(c, http.StatusBadRequest, "Invalid Document")
return return
} }
c.JSON(http.StatusOK, gin.H{ koJSON(c, http.StatusOK, gin.H{
"document": progress.DocumentID, "document": progress.DocumentID,
"percentage": progress.Percentage, "percentage": progress.Percentage,
"progress": progress.Progress, "progress": progress.Progress,
@@ -202,21 +164,24 @@ func (api *API) getProgress(c *gin.Context) {
}) })
} }
func (api *API) addActivities(c *gin.Context) { func (api *API) koAddActivities(c *gin.Context) {
rUser, _ := c.Get("AuthorizedUser") var auth authData
if data, _ := c.Get("Authorization"); data != nil {
auth = data.(authData)
}
var rActivity requestActivity var rActivity requestActivity
if err := c.ShouldBindJSON(&rActivity); err != nil { if err := c.ShouldBindJSON(&rActivity); err != nil {
log.Error("[addActivity] Invalid JSON Bind") log.Error("Invalid JSON Bind")
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Activity"}) apiErrorPage(c, http.StatusBadRequest, "Invalid Activity")
return return
} }
// Do Transaction // Do Transaction
tx, err := api.DB.DB.Begin() tx, err := api.db.DB.Begin()
if err != nil { if err != nil {
log.Error("[addActivities] Transaction Begin DB Error:", err) log.Error("Transaction Begin DB Error:", err)
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Unknown Error"}) apiErrorPage(c, http.StatusBadRequest, "Unknown Error")
return return
} }
@@ -228,111 +193,143 @@ func (api *API) addActivities(c *gin.Context) {
allDocuments := getKeys(allDocumentsMap) allDocuments := getKeys(allDocumentsMap)
// Defer & Start Transaction // Defer & Start Transaction
defer tx.Rollback() defer func() {
qtx := api.DB.Queries.WithTx(tx) if err := tx.Rollback(); err != nil {
log.Error("DB Rollback Error:", err)
}
}()
qtx := api.db.Queries.WithTx(tx)
// Upsert Documents // Upsert Documents
for _, doc := range allDocuments { for _, doc := range allDocuments {
if _, err := qtx.UpsertDocument(api.DB.Ctx, database.UpsertDocumentParams{ if _, err := qtx.UpsertDocument(c, database.UpsertDocumentParams{
ID: doc, ID: doc,
}); err != nil { }); err != nil {
log.Error("[addActivities] UpsertDocument DB Error:", err) log.Error("UpsertDocument DB Error:", err)
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Document"}) apiErrorPage(c, http.StatusBadRequest, "Invalid Document")
return return
} }
} }
// Upsert Device // Upsert Device
if _, err = qtx.UpsertDevice(api.DB.Ctx, database.UpsertDeviceParams{ if _, err = qtx.UpsertDevice(c, database.UpsertDeviceParams{
ID: rActivity.DeviceID, ID: rActivity.DeviceID,
UserID: rUser.(string), UserID: auth.UserName,
DeviceName: rActivity.Device, DeviceName: rActivity.Device,
LastSynced: time.Now().UTC().Format(time.RFC3339),
}); err != nil { }); err != nil {
log.Error("[addActivities] UpsertDevice DB Error:", err) log.Error("UpsertDevice DB Error:", err)
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Device"}) apiErrorPage(c, http.StatusBadRequest, "Invalid Device")
return return
} }
// Add All Activity // Add All Activity
for _, item := range rActivity.Activity { for _, item := range rActivity.Activity {
if _, err := qtx.AddActivity(api.DB.Ctx, database.AddActivityParams{ if _, err := qtx.AddActivity(c, database.AddActivityParams{
UserID: rUser.(string), UserID: auth.UserName,
DocumentID: item.DocumentID, DocumentID: item.DocumentID,
DeviceID: rActivity.DeviceID, DeviceID: rActivity.DeviceID,
StartTime: time.Unix(int64(item.StartTime), 0).UTC(), StartTime: time.Unix(int64(item.StartTime), 0).UTC().Format(time.RFC3339),
Duration: int64(item.Duration), Duration: int64(item.Duration),
Page: int64(item.Page), StartPercentage: float64(item.Page) / float64(item.Pages),
Pages: int64(item.Pages), EndPercentage: float64(item.Page+1) / float64(item.Pages),
}); err != nil { }); err != nil {
log.Error("[addActivities] AddActivity DB Error:", err) log.Error("AddActivity DB Error:", err)
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Activity"}) apiErrorPage(c, http.StatusBadRequest, "Invalid Activity")
return return
} }
} }
// Commit Transaction // Commit Transaction
if err := tx.Commit(); err != nil { if err := tx.Commit(); err != nil {
log.Error("[addActivities] Transaction Commit DB Error:", err) log.Error("Transaction Commit DB Error:", err)
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Unknown Error"}) apiErrorPage(c, http.StatusBadRequest, "Unknown Error")
return return
} }
c.JSON(http.StatusOK, gin.H{ koJSON(c, http.StatusOK, gin.H{
"added": len(rActivity.Activity), "added": len(rActivity.Activity),
}) })
} }
func (api *API) checkActivitySync(c *gin.Context) { func (api *API) koCheckActivitySync(c *gin.Context) {
rUser, _ := c.Get("AuthorizedUser") var auth authData
if data, _ := c.Get("Authorization"); data != nil {
auth = data.(authData)
}
var rCheckActivity requestCheckActivitySync var rCheckActivity requestCheckActivitySync
if err := c.ShouldBindJSON(&rCheckActivity); err != nil { if err := c.ShouldBindJSON(&rCheckActivity); err != nil {
log.Error("[checkActivitySync] Invalid JSON Bind") log.Error("Invalid JSON Bind")
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"}) apiErrorPage(c, http.StatusBadRequest, "Invalid Request")
return
}
// Upsert Device
if _, err := api.db.Queries.UpsertDevice(c, database.UpsertDeviceParams{
ID: rCheckActivity.DeviceID,
UserID: auth.UserName,
DeviceName: rCheckActivity.Device,
LastSynced: time.Now().UTC().Format(time.RFC3339),
}); err != nil {
log.Error("UpsertDevice DB Error", err)
apiErrorPage(c, http.StatusBadRequest, "Invalid Device")
return return
} }
// Get Last Device Activity // Get Last Device Activity
lastActivity, err := api.DB.Queries.GetLastActivity(api.DB.Ctx, database.GetLastActivityParams{ lastActivity, err := api.db.Queries.GetLastActivity(c, database.GetLastActivityParams{
UserID: rUser.(string), UserID: auth.UserName,
DeviceID: rCheckActivity.DeviceID, DeviceID: rCheckActivity.DeviceID,
}) })
if err == sql.ErrNoRows { if err == sql.ErrNoRows {
lastActivity = time.UnixMilli(0) lastActivity = time.UnixMilli(0).Format(time.RFC3339)
} else if err != nil { } else if err != nil {
log.Error("[checkActivitySync] GetLastActivity DB Error:", err) log.Error("GetLastActivity DB Error:", err)
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Unknown Error"}) apiErrorPage(c, http.StatusBadRequest, "Unknown Error")
return return
} }
c.JSON(http.StatusOK, gin.H{ // Parse Time
"last_sync": lastActivity.Unix(), parsedTime, err := time.Parse(time.RFC3339, lastActivity)
if err != nil {
log.Error("Time Parse Error:", err)
apiErrorPage(c, http.StatusBadRequest, "Unknown Error")
return
}
koJSON(c, http.StatusOK, gin.H{
"last_sync": parsedTime.Unix(),
}) })
} }
func (api *API) addDocuments(c *gin.Context) { func (api *API) koAddDocuments(c *gin.Context) {
var rNewDocs requestDocument var rNewDocs requestDocument
if err := c.ShouldBindJSON(&rNewDocs); err != nil { if err := c.ShouldBindJSON(&rNewDocs); err != nil {
log.Error("[addDocuments] Invalid JSON Bind") log.Error("Invalid JSON Bind")
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Document(s)"}) apiErrorPage(c, http.StatusBadRequest, "Invalid Document(s)")
return return
} }
// Do Transaction // Do Transaction
tx, err := api.DB.DB.Begin() tx, err := api.db.DB.Begin()
if err != nil { if err != nil {
log.Error("[addDocuments] Transaction Begin DB Error:", err) log.Error("Transaction Begin DB Error:", err)
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Unknown Error"}) apiErrorPage(c, http.StatusBadRequest, "Unknown Error")
return return
} }
// Defer & Start Transaction // Defer & Start Transaction
defer tx.Rollback() defer func() {
qtx := api.DB.Queries.WithTx(tx) if err := tx.Rollback(); err != nil {
log.Error("DB Rollback Error:", err)
}
}()
qtx := api.db.Queries.WithTx(tx)
// Upsert Documents // Upsert Documents
for _, doc := range rNewDocs.Documents { for _, doc := range rNewDocs.Documents {
doc, err := qtx.UpsertDocument(api.DB.Ctx, database.UpsertDocumentParams{ _, err := qtx.UpsertDocument(c, database.UpsertDocumentParams{
ID: doc.ID, ID: doc.ID,
Title: api.sanitizeInput(doc.Title), Title: api.sanitizeInput(doc.Title),
Author: api.sanitizeInput(doc.Author), Author: api.sanitizeInput(doc.Author),
@@ -342,89 +339,78 @@ func (api *API) addDocuments(c *gin.Context) {
Description: api.sanitizeInput(doc.Description), Description: api.sanitizeInput(doc.Description),
}) })
if err != nil { if err != nil {
log.Error("[addDocuments] UpsertDocument DB Error:", err) log.Error("UpsertDocument DB Error:", err)
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Document"}) apiErrorPage(c, http.StatusBadRequest, "Invalid Document")
return return
} }
if _, err = qtx.UpdateDocumentSync(api.DB.Ctx, database.UpdateDocumentSyncParams{
ID: doc.ID,
Synced: true,
}); err != nil {
log.Error("[addDocuments] UpdateDocumentSync DB Error:", err)
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Document"})
return
}
} }
// Commit Transaction // Commit Transaction
if err := tx.Commit(); err != nil { if err := tx.Commit(); err != nil {
log.Error("[addDocuments] Transaction Commit DB Error:", err) log.Error("Transaction Commit DB Error:", err)
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Unknown Error"}) apiErrorPage(c, http.StatusBadRequest, "Unknown Error")
return return
} }
c.JSON(http.StatusOK, gin.H{ koJSON(c, http.StatusOK, gin.H{
"changed": len(rNewDocs.Documents), "changed": len(rNewDocs.Documents),
}) })
} }
func (api *API) checkDocumentsSync(c *gin.Context) { func (api *API) koCheckDocumentsSync(c *gin.Context) {
rUser, _ := c.Get("AuthorizedUser") var auth authData
if data, _ := c.Get("Authorization"); data != nil {
auth = data.(authData)
}
var rCheckDocs requestCheckDocumentSync var rCheckDocs requestCheckDocumentSync
if err := c.ShouldBindJSON(&rCheckDocs); err != nil { if err := c.ShouldBindJSON(&rCheckDocs); err != nil {
log.Error("[checkDocumentsSync] Invalid JSON Bind") log.Error("Invalid JSON Bind")
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"}) apiErrorPage(c, http.StatusBadRequest, "Invalid Request")
return return
} }
// Upsert Device // Upsert Device
device, err := api.DB.Queries.UpsertDevice(api.DB.Ctx, database.UpsertDeviceParams{ _, err := api.db.Queries.UpsertDevice(c, database.UpsertDeviceParams{
ID: rCheckDocs.DeviceID, ID: rCheckDocs.DeviceID,
UserID: rUser.(string), UserID: auth.UserName,
DeviceName: rCheckDocs.Device, DeviceName: rCheckDocs.Device,
LastSynced: time.Now().UTC().Format(time.RFC3339),
}) })
if err != nil { if err != nil {
log.Error("[checkDocumentsSync] UpsertDevice DB Error", err) log.Error("UpsertDevice DB Error", err)
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Device"}) apiErrorPage(c, http.StatusBadRequest, "Invalid Device")
return return
} }
missingDocs := []database.Document{}
deletedDocIDs := []string{}
if device.Sync == true {
// Get Missing Documents // Get Missing Documents
missingDocs, err = api.DB.Queries.GetMissingDocuments(api.DB.Ctx, rCheckDocs.Have) missingDocs, err := api.db.Queries.GetMissingDocuments(c, rCheckDocs.Have)
if err != nil { if err != nil {
log.Error("[checkDocumentsSync] GetMissingDocuments DB Error", err) log.Error("GetMissingDocuments DB Error", err)
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"}) apiErrorPage(c, http.StatusBadRequest, "Invalid Request")
return return
} }
// Get Deleted Documents // Get Deleted Documents
deletedDocIDs, err = api.DB.Queries.GetDeletedDocuments(api.DB.Ctx, rCheckDocs.Have) deletedDocIDs, err := api.db.Queries.GetDeletedDocuments(c, rCheckDocs.Have)
if err != nil { if err != nil {
log.Error("[checkDocumentsSync] GetDeletedDocuments DB Error", err) log.Error("GetDeletedDocuments DB Error", err)
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"}) apiErrorPage(c, http.StatusBadRequest, "Invalid Request")
return return
} }
}
// Get Wanted Documents // Get Wanted Documents
jsonHaves, err := json.Marshal(rCheckDocs.Have) jsonHaves, err := json.Marshal(rCheckDocs.Have)
if err != nil { if err != nil {
log.Error("[checkDocumentsSync] JSON Marshal Error", err) log.Error("JSON Marshal Error", err)
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"}) apiErrorPage(c, http.StatusBadRequest, "Invalid Request")
return return
} }
wantedDocs, err := api.DB.Queries.GetWantedDocuments(api.DB.Ctx, string(jsonHaves)) wantedDocs, err := api.db.Queries.GetWantedDocuments(c, string(jsonHaves))
if err != nil { if err != nil {
log.Error("[checkDocumentsSync] GetWantedDocuments DB Error", err) log.Error("GetWantedDocuments DB Error", err)
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"}) apiErrorPage(c, http.StatusBadRequest, "Invalid Request")
return return
} }
@@ -461,159 +447,116 @@ func (api *API) checkDocumentsSync(c *gin.Context) {
rCheckDocSync.Delete = deletedDocIDs rCheckDocSync.Delete = deletedDocIDs
} }
c.JSON(http.StatusOK, rCheckDocSync) koJSON(c, http.StatusOK, rCheckDocSync)
} }
func (api *API) uploadDocumentFile(c *gin.Context) { func (api *API) koUploadExistingDocument(c *gin.Context) {
var rDoc requestDocumentID var rDoc requestDocumentID
if err := c.ShouldBindUri(&rDoc); err != nil { if err := c.ShouldBindUri(&rDoc); err != nil {
log.Error("[uploadDocumentFile] Invalid URI Bind") log.Error("Invalid URI Bind")
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"}) apiErrorPage(c, http.StatusBadRequest, "Invalid Request")
return return
} }
// Open Form File
fileData, err := c.FormFile("file") fileData, err := c.FormFile("file")
if err != nil { if err != nil {
log.Error("[uploadDocumentFile] File Error:", err) log.Error("File Error:", err)
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "File Error"}) apiErrorPage(c, http.StatusBadRequest, "File error")
return
}
// Validate Type & Derive Extension on MIME
uploadedFile, err := fileData.Open()
fileMime, err := mimetype.DetectReader(uploadedFile)
fileExtension := fileMime.Extension()
if !slices.Contains([]string{".epub", ".html"}, fileExtension) {
log.Error("[uploadDocumentFile] Invalid FileType:", fileExtension)
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Filetype"})
return return
} }
// Validate Document Exists in DB // Validate Document Exists in DB
document, err := api.DB.Queries.GetDocument(api.DB.Ctx, rDoc.DocumentID) document, err := api.db.Queries.GetDocument(c, rDoc.DocumentID)
if err != nil { if err != nil {
log.Error("[uploadDocumentFile] GetDocument DB Error:", err) log.Error("GetDocument DB Error:", err)
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Unknown Document"}) apiErrorPage(c, http.StatusBadRequest, "Unknown Document")
return
}
// Open File
uploadedFile, err := fileData.Open()
if err != nil {
log.Error("Unable to open file")
apiErrorPage(c, http.StatusBadRequest, "Unable to open file")
return
}
// Check Support
docType, err := metadata.GetDocumentTypeReader(uploadedFile)
if err != nil {
log.Error("Unsupported file")
apiErrorPage(c, http.StatusBadRequest, "Unsupported file")
return return
} }
// Derive Filename // Derive Filename
var fileName string fileName := deriveBaseFileName(&metadata.MetadataInfo{
if document.Author != nil { Type: *docType,
fileName = fileName + *document.Author PartialMD5: &document.ID,
} else { Title: document.Title,
fileName = fileName + "Unknown" Author: document.Author,
} })
if document.Title != nil {
fileName = fileName + " - " + *document.Title
} else {
fileName = fileName + " - Unknown"
}
// Remove Slashes
fileName = strings.ReplaceAll(fileName, "/", "")
// Derive & Sanitize File Name
fileName = "." + filepath.Clean(fmt.Sprintf("/%s [%s]%s", fileName, document.ID, fileExtension))
// Generate Storage Path // Generate Storage Path
safePath := filepath.Join(api.Config.DataPath, "documents", fileName) basePath := filepath.Join(api.cfg.DataPath, "documents")
safePath := filepath.Join(basePath, fileName)
// Save & Prevent Overwrites // Save & Prevent Overwrites
_, err = os.Stat(safePath) _, err = os.Stat(safePath)
if os.IsNotExist(err) { if os.IsNotExist(err) {
err = c.SaveUploadedFile(fileData, safePath) err = c.SaveUploadedFile(fileData, safePath)
if err != nil { if err != nil {
log.Error("[uploadDocumentFile] Save Failure:", err) log.Error("Save Failure:", err)
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "File Error"}) apiErrorPage(c, http.StatusBadRequest, "File Error")
return return
} }
} }
// Get MD5 Hash // Acquire Metadata
fileHash, err := getFileMD5(safePath) metadataInfo, err := metadata.GetMetadata(safePath)
if err != nil { if err != nil {
log.Error("[uploadDocumentFile] Hash Failure:", err) log.Errorf("Unable to acquire metadata: %v", err)
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "File Error"}) apiErrorPage(c, http.StatusBadRequest, "Unable to acquire metadata")
return return
} }
// Upsert Document // Upsert Document
if _, err = api.DB.Queries.UpsertDocument(api.DB.Ctx, database.UpsertDocumentParams{ if _, err = api.db.Queries.UpsertDocument(c, database.UpsertDocumentParams{
ID: document.ID, ID: document.ID,
Md5: fileHash, Md5: metadataInfo.MD5,
Words: metadataInfo.WordCount,
Filepath: &fileName, Filepath: &fileName,
Basepath: &basePath,
}); err != nil { }); err != nil {
log.Error("[uploadDocumentFile] UpsertDocument DB Error:", err) log.Error("UpsertDocument DB Error:", err)
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Document Error"}) apiErrorPage(c, http.StatusBadRequest, "Document Error")
return return
} }
// Update Document Sync Attribute koJSON(c, http.StatusOK, gin.H{
if _, err = api.DB.Queries.UpdateDocumentSync(api.DB.Ctx, database.UpdateDocumentSyncParams{
ID: document.ID,
Synced: true,
}); err != nil {
log.Error("[uploadDocumentFile] UpdateDocumentSync DB Error:", err)
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Document"})
return
}
c.JSON(http.StatusOK, gin.H{
"status": "ok", "status": "ok",
}) })
} }
func (api *API) downloadDocumentFile(c *gin.Context) { func (api *API) koDemoModeJSONError(c *gin.Context) {
var rDoc requestDocumentID apiErrorPage(c, http.StatusUnauthorized, "Not Allowed in Demo Mode")
if err := c.ShouldBindUri(&rDoc); err != nil { }
log.Error("[downloadDocumentFile] Invalid URI Bind")
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"})
return
}
// Get Document func apiErrorPage(c *gin.Context, errorCode int, errorMessage string) {
document, err := api.DB.Queries.GetDocument(api.DB.Ctx, rDoc.DocumentID) c.AbortWithStatusJSON(errorCode, gin.H{"error": errorMessage})
if err != nil {
log.Error("[uploadDocumentFile] GetDocument DB Error:", err)
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Unknown Document"})
return
}
if document.Filepath == nil {
log.Error("[uploadDocumentFile] Document Doesn't Have File:", rDoc.DocumentID)
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Document Doesn't Exist"})
return
}
// Derive Storage Location
filePath := filepath.Join(api.Config.DataPath, "documents", *document.Filepath)
// Validate File Exists
_, err = os.Stat(filePath)
if os.IsNotExist(err) {
log.Error("[uploadDocumentFile] File Doesn't Exist:", rDoc.DocumentID)
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Document Doesn't Exists"})
return
}
// Force Download (Security)
c.Header("Content-Disposition", fmt.Sprintf("attachment; filename=%s", filepath.Base(*document.Filepath)))
c.File(filePath)
} }
func (api *API) sanitizeInput(val any) *string { func (api *API) sanitizeInput(val any) *string {
switch v := val.(type) { switch v := val.(type) {
case *string: case *string:
if v != nil { if v != nil {
newString := html.UnescapeString(api.HTMLPolicy.Sanitize(string(*v))) newString := html.UnescapeString(htmlPolicy.Sanitize(string(*v)))
return &newString return &newString
} }
case string: case string:
if v != "" { if v != "" {
newString := html.UnescapeString(api.HTMLPolicy.Sanitize(string(v))) newString := html.UnescapeString(htmlPolicy.Sanitize(string(v)))
return &newString return &newString
} }
} }
@@ -646,3 +589,10 @@ func getFileMD5(filePath string) (*string, error) {
return &fileHash, nil return &fileHash, nil
} }
// koJSON forces koJSON Content-Type to only return `application/json`. This is addressing
// the following issue: https://github.com/koreader/koreader/issues/13629
func koJSON(c *gin.Context, code int, obj any) {
c.Header("Content-Type", "application/json")
c.JSON(code, obj)
}

168
api/opds-routes.go Normal file
View File

@@ -0,0 +1,168 @@
package api
import (
"fmt"
"net/http"
"strings"
"time"
"github.com/gin-gonic/gin"
log "github.com/sirupsen/logrus"
"reichard.io/antholume/database"
"reichard.io/antholume/opds"
"reichard.io/antholume/pkg/ptr"
)
var mimeMapping map[string]string = map[string]string{
"epub": "application/epub+zip",
"azw": "application/vnd.amazon.mobi8-ebook",
"mobi": "application/x-mobipocket-ebook",
"pdf": "application/pdf",
"zip": "application/zip",
"txt": "text/plain",
"rtf": "application/rtf",
"htm": "text/html",
"html": "text/html",
"doc": "application/msword",
"lit": "application/x-ms-reader",
}
func (api *API) opdsEntry(c *gin.Context) {
// Build & Return XML
mainFeed := &opds.Feed{
Title: "AnthoLume OPDS Server",
Updated: time.Now().UTC(),
Links: []opds.Link{
{
Title: "Search AnthoLume",
Rel: "search",
TypeLink: "application/opensearchdescription+xml",
Href: "/api/opds/search.xml",
},
},
Entries: []opds.Entry{
{
Title: "AnthoLume - All Documents",
Content: &opds.Content{
Content: "AnthoLume - All Documents",
ContentType: "text",
},
Links: []opds.Link{
{
Href: "/api/opds/documents",
TypeLink: "application/atom+xml;type=feed;profile=opds-catalog",
},
},
},
},
}
c.XML(http.StatusOK, mainFeed)
}
func (api *API) opdsDocuments(c *gin.Context) {
var auth authData
if data, _ := c.Get("Authorization"); data != nil {
auth = data.(authData)
}
// Potential URL Parameters (Default Pagination - 100)
qParams := bindQueryParams(c, 100)
// Possible Query
var query *string
if qParams.Search != nil && *qParams.Search != "" {
search := "%" + *qParams.Search + "%"
query = &search
}
// Get Documents
documents, err := api.db.Queries.GetDocumentsWithStats(c, database.GetDocumentsWithStatsParams{
UserID: auth.UserName,
Query: query,
Deleted: ptr.Of(false),
Offset: (*qParams.Page - 1) * *qParams.Limit,
Limit: *qParams.Limit,
})
if err != nil {
log.Error("GetDocumentsWithStats DB Error:", err)
c.AbortWithStatus(http.StatusBadRequest)
return
}
// Build OPDS Entries
var allEntries []opds.Entry
for _, doc := range documents {
// Require File
if doc.Filepath != nil {
splitFilepath := strings.Split(*doc.Filepath, ".")
fileType := splitFilepath[len(splitFilepath)-1]
title := "N/A"
if doc.Title != nil {
title = *doc.Title
}
author := "N/A"
if doc.Author != nil {
author = *doc.Author
}
description := "N/A"
if doc.Description != nil {
description = *doc.Description
}
item := opds.Entry{
Title: title,
Author: []opds.Author{
{
Name: author,
},
},
Content: &opds.Content{
Content: description,
ContentType: "text",
},
Links: []opds.Link{
{
Rel: "http://opds-spec.org/acquisition",
Href: fmt.Sprintf("/api/opds/documents/%s/file", doc.ID),
TypeLink: mimeMapping[fileType],
},
{
Rel: "http://opds-spec.org/image",
Href: fmt.Sprintf("/api/opds/documents/%s/cover", doc.ID),
TypeLink: "image/jpeg",
},
},
}
allEntries = append(allEntries, item)
}
}
feedTitle := "All Documents"
if query != nil {
feedTitle = "Search Results"
}
// Build & Return XML
searchFeed := &opds.Feed{
Title: feedTitle,
Updated: time.Now().UTC(),
Entries: allEntries,
}
c.XML(http.StatusOK, searchFeed)
}
func (api *API) opdsSearchDescription(c *gin.Context) {
rawXML := `<OpenSearchDescription xmlns="http://a9.com/-/spec/opensearch/1.1/">
<ShortName>Search AnthoLume</ShortName>
<Description>Search AnthoLume</Description>
<Url type="application/atom+xml;profile=opds-catalog;kind=acquisition" template="/api/opds/documents?search={searchTerms}"/>
</OpenSearchDescription>`
c.Data(http.StatusOK, "application/xml", []byte(rawXML))
}

76
api/streamer.go Normal file
View File

@@ -0,0 +1,76 @@
package api
import (
"bytes"
"html/template"
"net/http"
"sync"
"time"
"github.com/gin-gonic/gin"
)
type streamer struct {
templates map[string]*template.Template
writer gin.ResponseWriter
mutex sync.Mutex
completeCh chan struct{}
}
func (api *API) newStreamer(c *gin.Context, data string) *streamer {
stream := &streamer{
writer: c.Writer,
templates: api.templates,
completeCh: make(chan struct{}),
}
// Set Headers
header := stream.writer.Header()
header.Set("Transfer-Encoding", "chunked")
header.Set("Content-Type", "text/html; charset=utf-8")
header.Set("X-Content-Type-Options", "nosniff")
stream.writer.WriteHeader(http.StatusOK)
// Send Open Element Tags
stream.write(data)
// Keep Alive
go func() {
closeCh := stream.writer.CloseNotify()
for {
select {
case <-stream.completeCh:
return
case <-closeCh:
return
default:
stream.write("<!-- ping -->")
time.Sleep(2 * time.Second)
}
}
}()
return stream
}
func (stream *streamer) write(str string) {
stream.mutex.Lock()
stream.writer.WriteString(str)
stream.writer.(http.Flusher).Flush()
stream.mutex.Unlock()
}
func (stream *streamer) send(templateName string, templateVars gin.H) {
t := stream.templates[templateName]
buf := &bytes.Buffer{}
_ = t.ExecuteTemplate(buf, templateName, templateVars)
stream.write(buf.String())
}
func (stream *streamer) close(data string) {
// Send Close Element Tags
stream.write(data)
// Close
close(stream.completeCh)
}

188
api/utils.go Normal file
View File

@@ -0,0 +1,188 @@
package api
import (
"errors"
"fmt"
"math"
"path/filepath"
"reflect"
"strings"
"reichard.io/antholume/database"
"reichard.io/antholume/graph"
"reichard.io/antholume/metadata"
)
// getTimeZones returns a string slice of IANA timezones.
func getTimeZones() []string {
return []string{
"Africa/Cairo",
"Africa/Johannesburg",
"Africa/Lagos",
"Africa/Nairobi",
"America/Adak",
"America/Anchorage",
"America/Buenos_Aires",
"America/Chicago",
"America/Denver",
"America/Los_Angeles",
"America/Mexico_City",
"America/New_York",
"America/Nuuk",
"America/Phoenix",
"America/Puerto_Rico",
"America/Sao_Paulo",
"America/St_Johns",
"America/Toronto",
"Asia/Dubai",
"Asia/Hong_Kong",
"Asia/Kolkata",
"Asia/Seoul",
"Asia/Shanghai",
"Asia/Singapore",
"Asia/Tokyo",
"Atlantic/Azores",
"Australia/Melbourne",
"Australia/Sydney",
"Europe/Berlin",
"Europe/London",
"Europe/Moscow",
"Europe/Paris",
"Pacific/Auckland",
"Pacific/Honolulu",
}
}
// niceSeconds takes in an int (in seconds) and returns a string readable
// representation. For example 1928371 -> "22d 7h 39m 31s".
// Deprecated: Use formatters.FormatDuration
func niceSeconds(input int64) (result string) {
if input == 0 {
return "N/A"
}
days := math.Floor(float64(input) / 60 / 60 / 24)
seconds := input % (60 * 60 * 24)
hours := math.Floor(float64(seconds) / 60 / 60)
seconds = input % (60 * 60)
minutes := math.Floor(float64(seconds) / 60)
seconds = input % 60
if days > 0 {
result += fmt.Sprintf("%dd ", int(days))
}
if hours > 0 {
result += fmt.Sprintf("%dh ", int(hours))
}
if minutes > 0 {
result += fmt.Sprintf("%dm ", int(minutes))
}
if seconds > 0 {
result += fmt.Sprintf("%ds", int(seconds))
}
return
}
// niceNumbers takes in an int and returns a string representation. For example
// 19823 -> "19.8k".
// Deprecated: Use formatters.FormatNumber
func niceNumbers(input int64) string {
if input == 0 {
return "0"
}
abbreviations := []string{"", "k", "M", "B", "T"}
abbrevIndex := int(math.Log10(float64(input)) / 3)
scaledNumber := float64(input) / math.Pow(10, float64(abbrevIndex*3))
if scaledNumber >= 100 {
return fmt.Sprintf("%.0f%s", scaledNumber, abbreviations[abbrevIndex])
} else if scaledNumber >= 10 {
return fmt.Sprintf("%.1f%s", scaledNumber, abbreviations[abbrevIndex])
} else {
return fmt.Sprintf("%.2f%s", scaledNumber, abbreviations[abbrevIndex])
}
}
// getSVGGraphData builds SVGGraphData from the provided stats, width and height.
// It is used exclusively in templates to generate the daily read stats graph.
func getSVGGraphData(inputData []database.GetDailyReadStatsRow, svgWidth int, svgHeight int) graph.SVGGraphData {
var intData []int64
for _, item := range inputData {
intData = append(intData, item.MinutesRead)
}
return graph.GetSVGGraphData(intData, svgWidth, svgHeight)
}
// dict returns a map[string]any dict. Each pair of two is a key & value
// respectively. It's primarily utilized in templates.
func dict(values ...any) (map[string]any, error) {
if len(values)%2 != 0 {
return nil, errors.New("invalid dict call")
}
dict := make(map[string]any, len(values)/2)
for i := 0; i < len(values); i += 2 {
key, ok := values[i].(string)
if !ok {
return nil, errors.New("dict keys must be strings")
}
dict[key] = values[i+1]
}
return dict, nil
}
// fields returns a map[string]any of the provided struct. It's primarily
// utilized in templates.
func fields(value any) (map[string]any, error) {
v := reflect.Indirect(reflect.ValueOf(value))
if v.Kind() != reflect.Struct {
return nil, fmt.Errorf("%T is not a struct", value)
}
m := make(map[string]any)
t := v.Type()
for i := 0; i < t.NumField(); i++ {
sv := t.Field(i)
m[sv.Name] = v.Field(i).Interface()
}
return m, nil
}
// slice returns a slice of the provided arguments. It's primarily utilized in
// templates.
func slice(elements ...any) []any {
return elements
}
// deriveBaseFileName builds the base filename for a given MetadataInfo object.
func deriveBaseFileName(metadataInfo *metadata.MetadataInfo) string {
// Derive New FileName
var newFileName string
if *metadataInfo.Author != "" {
newFileName = newFileName + *metadataInfo.Author
} else {
newFileName = newFileName + "Unknown"
}
if *metadataInfo.Title != "" {
newFileName = newFileName + " - " + *metadataInfo.Title
} else {
newFileName = newFileName + " - Unknown"
}
// Remove Slashes
fileName := strings.ReplaceAll(newFileName, "/", "")
return "." + filepath.Clean(fmt.Sprintf("/%s [%s]%s", fileName, *metadataInfo.PartialMD5, metadataInfo.Type))
}
// importStatusPriority returns the order priority for import status in the UI.
func importStatusPriority(status importStatus) int {
switch status {
case importFailed:
return 1
case importExists:
return 2
default:
return 3
}
}

35
api/utils_test.go Normal file
View File

@@ -0,0 +1,35 @@
package api
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestNiceSeconds(t *testing.T) {
wantOne := "22d 7h 39m 31s"
wantNA := "N/A"
niceOne := niceSeconds(1928371)
niceNA := niceSeconds(0)
assert.Equal(t, wantOne, niceOne, "should be nice seconds")
assert.Equal(t, wantNA, niceNA, "should be nice NA")
}
func TestNiceNumbers(t *testing.T) {
wantMillions := "198M"
wantThousands := "19.8k"
wantThousandsTwo := "1.98k"
wantZero := "0"
niceMillions := niceNumbers(198236461)
niceThousands := niceNumbers(19823)
niceThousandsTwo := niceNumbers(1984)
niceZero := niceNumbers(0)
assert.Equal(t, wantMillions, niceMillions, "should be nice millions")
assert.Equal(t, wantThousands, niceThousands, "should be nice thousands")
assert.Equal(t, wantThousandsTwo, niceThousandsTwo, "should be nice thousands")
assert.Equal(t, wantZero, niceZero, "should be nice zero")
}

122
assets/common.js Normal file
View File

@@ -0,0 +1,122 @@
/**
* Custom Service Worker Convenience Functions Wrapper
**/
const SW = (function () {
// Helper Function
function randomID() {
return "00000000000000000000000000000000".replace(/[018]/g, (c) =>
(c ^ (crypto.getRandomValues(new Uint8Array(1))[0] & (15 >> (c / 4))))
.toString(16)
.toUpperCase()
);
}
// Variables
let swInstance = null;
let outstandingMessages = {};
navigator.serviceWorker?.addEventListener("message", ({ data }) => {
let { id } = data;
data = data.data;
console.log("[SW] Received Message:", { id, data });
if (!outstandingMessages[id])
return console.warn("[SW] Invalid Outstanding Message:", { id, data });
outstandingMessages[id](data);
delete outstandingMessages[id];
});
async function install() {
if (!navigator.serviceWorker)
throw new Error("Service Worker Not Supported");
// Register Service Worker
swInstance = await navigator.serviceWorker.register("/sw.js");
swInstance.onupdatefound = (data) =>
console.log("[SW.install] Update Found:", data);
// Wait for Registration / Update
let serviceWorker =
swInstance.installing || swInstance.waiting || swInstance.active;
// Await Installation
await new Promise((resolve) => {
serviceWorker.onstatechange = (data) => {
console.log("[SW.install] State Change:", serviceWorker.state);
if (["installed", "activated"].includes(serviceWorker.state)) resolve();
};
console.log("[SW.install] Current State:", serviceWorker.state);
if (["installed", "activated"].includes(serviceWorker.state)) resolve();
});
}
function send(data) {
if (!swInstance?.active) return Promise.reject("Inactive Service Worker");
let id = randomID();
let msgPromise = new Promise((resolve) => {
outstandingMessages[id] = resolve;
});
swInstance.active.postMessage({ id, data });
return msgPromise;
}
return { install, send };
})();
/**
* Custom IndexedDB Convenience Functions Wrapper
**/
const IDB = (function () {
if (!idbKeyval)
return console.error(
"[IDB] idbKeyval not found - Did you load idb-keyval?"
);
let { get, del, entries, update, keys } = idbKeyval;
return {
async set(key, newValue) {
let changeObj = {};
await update(key, (oldValue) => {
if (oldValue != null) changeObj.oldValue = oldValue;
changeObj.newValue = newValue;
return newValue;
});
return changeObj;
},
get(key, defaultValue) {
return get(key).then((resp) => {
return defaultValue && resp == null ? defaultValue : resp;
});
},
del(key) {
return del(key);
},
find(keyRegExp, includeValues = false) {
if (!(keyRegExp instanceof RegExp)) throw new Error("Invalid RegExp");
if (!includeValues)
return keys().then((allKeys) =>
allKeys.filter((key) => keyRegExp.test(key))
);
return entries().then((allItems) => {
const matchingKeys = allItems.filter((keyVal) =>
keyRegExp.test(keyVal[0])
);
return matchingKeys.reduce((obj, keyVal) => {
const [key, val] = keyVal;
obj[key] = val;
return obj;
}, {});
});
},
};
})();

BIN
assets/icons/favicon.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

BIN
assets/icons/icon512.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 108 KiB

View File

Before

Width:  |  Height:  |  Size: 511 KiB

After

Width:  |  Height:  |  Size: 511 KiB

View File

Before

Width:  |  Height:  |  Size: 699 KiB

After

Width:  |  Height:  |  Size: 699 KiB

View File

Before

Width:  |  Height:  |  Size: 462 KiB

After

Width:  |  Height:  |  Size: 462 KiB

View File

Before

Width:  |  Height:  |  Size: 457 KiB

After

Width:  |  Height:  |  Size: 457 KiB

View File

Before

Width:  |  Height:  |  Size: 213 KiB

After

Width:  |  Height:  |  Size: 213 KiB

78
assets/index.js Normal file
View File

@@ -0,0 +1,78 @@
// Install Service Worker
async function installServiceWorker() {
// Attempt Installation
await SW.install()
.then(() => console.log("[installServiceWorker] Service Worker Installed"))
.catch((e) =>
console.log("[installServiceWorker] Service Worker Install Error:", e)
);
}
// Flush Cached Progress & Activity
async function flushCachedData() {
let allProgress = await IDB.find(/^PROGRESS-/, true);
let allActivity = await IDB.get("ACTIVITY");
console.log("[flushCachedData] Flushing Data:", { allProgress, allActivity });
Object.entries(allProgress).forEach(([id, progressEvent]) => {
flushProgress(progressEvent)
.then(() => {
console.log("[flushCachedData] Progress Flush Success:", id);
return IDB.del(id);
})
.catch((e) => {
console.log("[flushCachedData] Progress Flush Failure:", id, e);
});
});
if (!allActivity) return;
flushActivity(allActivity)
.then(() => {
console.log("[flushCachedData] Activity Flush Success");
return IDB.del("ACTIVITY");
})
.catch((e) => {
console.log("[flushCachedData] Activity Flush Failure", e);
});
}
function flushActivity(activityEvent) {
console.log("[flushActivity] Flushing Activity...");
// Flush Activity
return fetch("/api/ko/activity", {
method: "POST",
body: JSON.stringify(activityEvent),
}).then(async (r) =>
console.log("[flushActivity] Flushed Activity:", {
response: r,
json: await r.json(),
data: activityEvent,
})
);
}
function flushProgress(progressEvent) {
console.log("[flushProgress] Flushing Progress...");
// Flush Progress
return fetch("/api/ko/syncs/progress", {
method: "PUT",
body: JSON.stringify(progressEvent),
}).then(async (r) =>
console.log("[flushProgress] Flushed Progress:", {
response: r,
json: await r.json(),
data: progressEvent,
})
);
}
// Event Listeners
window.addEventListener("online", flushCachedData);
// Initial Load
flushCachedData();
installServiceWorker();

1
assets/lib/epub.min.js vendored Normal file

File diff suppressed because one or more lines are too long

1
assets/lib/idb-keyval.min.js vendored Normal file
View File

@@ -0,0 +1 @@
function _slicedToArray(t,n){return _arrayWithHoles(t)||_iterableToArrayLimit(t,n)||_unsupportedIterableToArray(t,n)||_nonIterableRest()}function _nonIterableRest(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}function _unsupportedIterableToArray(t,n){if(t){if("string"==typeof t)return _arrayLikeToArray(t,n);var r=Object.prototype.toString.call(t).slice(8,-1);return"Object"===r&&t.constructor&&(r=t.constructor.name),"Map"===r||"Set"===r?Array.from(t):"Arguments"===r||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(r)?_arrayLikeToArray(t,n):void 0}}function _arrayLikeToArray(t,n){(null==n||n>t.length)&&(n=t.length);for(var r=0,e=new Array(n);r<n;r++)e[r]=t[r];return e}function _iterableToArrayLimit(t,n){var r=null==t?null:"undefined"!=typeof Symbol&&t[Symbol.iterator]||t["@@iterator"];if(null!=r){var e,o,u=[],i=!0,a=!1;try{for(r=r.call(t);!(i=(e=r.next()).done)&&(u.push(e.value),!n||u.length!==n);i=!0);}catch(t){a=!0,o=t}finally{try{i||null==r.return||r.return()}finally{if(a)throw o}}return u}}function _arrayWithHoles(t){if(Array.isArray(t))return t}function _typeof(t){return _typeof="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t},_typeof(t)}!function(t,n){"object"===("undefined"==typeof exports?"undefined":_typeof(exports))&&"undefined"!=typeof module?n(exports):"function"==typeof define&&define.amd?define(["exports"],n):n((t="undefined"!=typeof globalThis?globalThis:t||self).idbKeyval={})}(this,(function(t){"use strict";function n(t){return new Promise((function(n,r){t.oncomplete=t.onsuccess=function(){return n(t.result)},t.onabort=t.onerror=function(){return r(t.error)}}))}function r(t,r){var e=indexedDB.open(t);e.onupgradeneeded=function(){return e.result.createObjectStore(r)};var o=n(e);return function(t,n){return o.then((function(e){return n(e.transaction(r,t).objectStore(r))}))}}var e;function o(){return e||(e=r("keyval-store","keyval")),e}function u(t,r){return t.openCursor().onsuccess=function(){this.result&&(r(this.result),this.result.continue())},n(t.transaction)}t.clear=function(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:o();return t("readwrite",(function(t){return t.clear(),n(t.transaction)}))},t.createStore=r,t.del=function(t){var r=arguments.length>1&&void 0!==arguments[1]?arguments[1]:o();return r("readwrite",(function(r){return r.delete(t),n(r.transaction)}))},t.delMany=function(t){var r=arguments.length>1&&void 0!==arguments[1]?arguments[1]:o();return r("readwrite",(function(r){return t.forEach((function(t){return r.delete(t)})),n(r.transaction)}))},t.entries=function(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:o();return t("readonly",(function(r){if(r.getAll&&r.getAllKeys)return Promise.all([n(r.getAllKeys()),n(r.getAll())]).then((function(t){var n=_slicedToArray(t,2),r=n[0],e=n[1];return r.map((function(t,n){return[t,e[n]]}))}));var e=[];return t("readonly",(function(t){return u(t,(function(t){return e.push([t.key,t.value])})).then((function(){return e}))}))}))},t.get=function(t){var r=arguments.length>1&&void 0!==arguments[1]?arguments[1]:o();return r("readonly",(function(r){return n(r.get(t))}))},t.getMany=function(t){var r=arguments.length>1&&void 0!==arguments[1]?arguments[1]:o();return r("readonly",(function(r){return Promise.all(t.map((function(t){return n(r.get(t))})))}))},t.keys=function(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:o();return t("readonly",(function(t){if(t.getAllKeys)return n(t.getAllKeys());var r=[];return u(t,(function(t){return r.push(t.key)})).then((function(){return r}))}))},t.promisifyRequest=n,t.set=function(t,r){var e=arguments.length>2&&void 0!==arguments[2]?arguments[2]:o();return e("readwrite",(function(e){return e.put(r,t),n(e.transaction)}))},t.setMany=function(t){var r=arguments.length>1&&void 0!==arguments[1]?arguments[1]:o();return r("readwrite",(function(r){return t.forEach((function(t){return r.put(t[1],t[0])})),n(r.transaction)}))},t.update=function(t,r){var e=arguments.length>2&&void 0!==arguments[2]?arguments[2]:o();return e("readwrite",(function(e){return new Promise((function(o,u){e.get(t).onsuccess=function(){try{e.put(r(this.result),t),o(n(e.transaction))}catch(t){u(t)}}}))}))},t.values=function(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:o();return t("readonly",(function(t){if(t.getAll)return n(t.getAll());var r=[];return u(t,(function(t){return r.push(t.value)})).then((function(){return r}))}))},Object.defineProperty(t,"__esModule",{value:!0})}));

15
assets/lib/jszip.min.js vendored Normal file

File diff suppressed because one or more lines are too long

2
assets/lib/no-sleep.min.js vendored Normal file

File diff suppressed because one or more lines are too long

281
assets/local/index.htm Normal file
View File

@@ -0,0 +1,281 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<meta
name="viewport"
content="width=device-width, initial-scale=0.90, user-scalable=no, viewport-fit=cover"
/>
<meta name="apple-mobile-web-app-capable" content="yes" />
<meta
name="apple-mobile-web-app-status-bar-style"
content="black-translucent"
/>
<meta
name="theme-color"
content="#F3F4F6"
media="(prefers-color-scheme: light)"
/>
<meta
name="theme-color"
content="#1F2937"
media="(prefers-color-scheme: dark)"
/>
<title>AnthoLume - Local</title>
<link rel="manifest" href="/manifest.json" />
<link rel="stylesheet" href="/assets/style.css" />
<!-- Libraries -->
<script src="/assets/lib/jszip.min.js"></script>
<script src="/assets/lib/epub.min.js"></script>
<script src="/assets/lib/idb-keyval.min.js"></script>
<!-- Local -->
<script src="/assets/common.js"></script>
<script src="/assets/index.js"></script>
<script src="/assets/local/index.js"></script>
<style>
/* ----------------------------- */
/* -------- PWA Styling -------- */
/* ----------------------------- */
html,
body {
overscroll-behavior-y: none;
margin: 0px;
}
html {
height: calc(100% + env(safe-area-inset-bottom));
padding: env(safe-area-inset-top) env(safe-area-inset-right) 0
env(safe-area-inset-left);
}
main {
height: calc(100dvh - 4rem - env(safe-area-inset-top));
}
#container {
padding-bottom: calc(5em + env(safe-area-inset-bottom) * 2);
}
/* No Scrollbar - IE, Edge, Firefox */
* {
-ms-overflow-style: none;
scrollbar-width: none;
}
/* No Scrollbar - WebKit */
*::-webkit-scrollbar {
display: none;
}
.css-button:checked + div {
display: block;
opacity: 1;
}
.css-button + div {
display: none;
opacity: 0;
}
.css-button:checked + div + label {
display: none;
}
</style>
</head>
<body class="bg-gray-100 dark:bg-gray-800">
<div class="flex items-center justify-between w-full h-16">
<h1 class="text-xl font-bold dark:text-white px-6 lg:ml-48">
Local Documents
</h1>
</div>
<main class="relative overflow-hidden">
<div
id="container"
class="h-[100dvh] px-4 overflow-auto md:px-6 lg:mx-48"
>
<div
id="online"
class="rounded text-black dark:text-white bg-white dark:bg-gray-700 text-center p-3 mb-4"
>
You're Online:
<a
href="/"
class="p-2 text-white bg-blue-700 hover:bg-blue-800 focus:ring-4 focus:ring-blue-300 font-medium rounded text-sm text-center py-1 dark:bg-blue-600 dark:hover:bg-blue-700 focus:outline-none dark:focus:ring-blue-800"
>Go Home</a
>
</div>
<div
id="message"
class="rounded text-white bg-white dark:bg-gray-700 text-center p-3 mb-4"
>
Loading...
</div>
<div
id="items"
class="grid grid-cols-1 gap-4 md:grid-cols-2 lg:grid-cols-3"
></div>
<div
class="fixed bottom-6 right-6 rounded-full flex items-center justify-center"
>
<input
type="checkbox"
id="add-file-button"
class="hidden css-button"
/>
<div
class="rounded p-4 bg-gray-800 dark:bg-gray-200 text-white dark:text-black w-72 text-sm flex flex-col gap-2"
>
<div class="flex flex-col gap-2">
<input
type="file"
accept=".epub"
id="document_file"
name="document_file"
/>
<button
class="font-medium px-2 py-1 text-gray-800 bg-gray-500 dark:text-white hover:bg-gray-100 dark:hover:bg-gray-800"
>
Add File
</button>
</div>
<label for="add-file-button">
<div
class="w-full text-center cursor-pointer font-medium mt-2 px-2 py-1 text-gray-800 bg-gray-500 dark:text-white hover:bg-gray-100 dark:hover:bg-gray-800"
>
Cancel
</div>
</label>
</div>
<label
class="w-16 h-16 bg-gray-800 dark:bg-gray-200 rounded-full flex items-center justify-center opacity-30 hover:opacity-100 transition-all duration-200 cursor-pointer"
for="add-file-button"
>
<svg
width="34"
height="34"
class="text-gray-200 dark:text-gray-600"
viewBox="0 0 24 24"
fill="currentColor"
xmlns="http://www.w3.org/2000/svg"
>
<path
fill-rule="evenodd"
clip-rule="evenodd"
d="M12 15.75C12.4142 15.75 12.75 15.4142 12.75 15V4.02744L14.4306 5.98809C14.7001 6.30259 15.1736 6.33901 15.4881 6.06944C15.8026 5.79988 15.839 5.3264 15.5694 5.01191L12.5694 1.51191C12.427 1.34567 12.2189 1.25 12 1.25C11.7811 1.25 11.573 1.34567 11.4306 1.51191L8.43056 5.01191C8.16099 5.3264 8.19741 5.79988 8.51191 6.06944C8.8264 6.33901 9.29988 6.30259 9.56944 5.98809L11.25 4.02744L11.25 15C11.25 15.4142 11.5858 15.75 12 15.75Z"
/>
<path
d="M16 9C15.2978 9 14.9467 9 14.6945 9.16851C14.5853 9.24148 14.4915 9.33525 14.4186 9.44446C14.25 9.69667 14.25 10.0478 14.25 10.75L14.25 15C14.25 16.2426 13.2427 17.25 12 17.25C10.7574 17.25 9.75004 16.2426 9.75004 15L9.75004 10.75C9.75004 10.0478 9.75004 9.69664 9.58149 9.4444C9.50854 9.33523 9.41481 9.2415 9.30564 9.16855C9.05341 9 8.70227 9 8 9C5.17157 9 3.75736 9 2.87868 9.87868C2 10.7574 2 12.1714 2 14.9998V15.9998C2 18.8282 2 20.2424 2.87868 21.1211C3.75736 21.9998 5.17157 21.9998 8 21.9998H16C18.8284 21.9998 20.2426 21.9998 21.1213 21.1211C22 20.2424 22 18.8282 22 15.9998V14.9998C22 12.1714 22 10.7574 21.1213 9.87868C20.2426 9 18.8284 9 16 9Z"
/>
</svg>
</label>
</div>
</div>
</main>
<!-- Template HTML Elements -->
<div class="hidden">
<svg id="local-svg-template" viewBox="0 0 24 24" fill="currentColor" xmlns="http://www.w3.org/2000/svg">
<path fill-rule="evenodd" clip-rule="evenodd" d="M14 22H10C6.22876 22 4.34315 22 3.17157 20.8284C2 19.6569 2 17.7712 2 14V10C2 6.22876 2 4.34315 3.17157 3.17157C4.34315 2 6.23869 2 10.0298 2C10.6358 2 11.1214 2 11.53 2.01666C11.5166 2.09659 11.5095 2.17813 11.5092 2.26057L11.5 5.09497C11.4999 6.19207 11.4998 7.16164 11.6049 7.94316C11.7188 8.79028 11.9803 9.63726 12.6716 10.3285C13.3628 11.0198 14.2098 11.2813 15.0569 11.3952C15.8385 11.5003 16.808 11.5002 17.9051 11.5001L18 11.5001H21.9574C22 12.0344 22 12.6901 22 13.5629V14C22 17.7712 22 19.6569 20.8284 20.8284C19.6569 22 17.7712 22 14 22Z" />
<path d="M19.3517 7.61665L15.3929 4.05375C14.2651 3.03868 13.7012 2.53114 13.0092 2.26562L13 5.00011C13 7.35713 13 8.53564 13.7322 9.26787C14.4645 10.0001 15.643 10.0001 18 10.0001H21.5801C21.2175 9.29588 20.5684 8.71164 19.3517 7.61665Z" />
</svg>
<svg id="remote-svg-template" viewBox="0 0 24 24" fill="currentColor" xmlns="http://www.w3.org/2000/svg">
<path d="M19.3517 7.61665L15.3929 4.05375C14.2651 3.03868 13.7012 2.53114 13.0092 2.26562L13 5.00011C13 7.35713 13 8.53564 13.7322 9.26787C14.4645 10.0001 15.643 10.0001 18 10.0001H21.5801C21.2175 9.29588 20.5684 8.71164 19.3517 7.61665Z"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M10 22H14C17.7712 22 19.6569 22 20.8284 20.8284C22 19.6569 22 17.7712 22 14V13.5629C22 12.6901 22 12.0344 21.9574 11.5001H18L17.9051 11.5001C16.808 11.5002 15.8385 11.5003 15.0569 11.3952C14.2098 11.2813 13.3628 11.0198 12.6716 10.3285C11.9803 9.63726 11.7188 8.79028 11.6049 7.94316C11.4998 7.16164 11.4999 6.19207 11.5 5.09497L11.5092 2.26057C11.5095 2.17813 11.5166 2.09659 11.53 2.01666C11.1214 2 10.6358 2 10.0298 2C6.23869 2 4.34315 2 3.17157 3.17157C2 4.34315 2 6.22876 2 10V14C2 17.7712 2 19.6569 3.17157 20.8284C4.34315 22 6.22876 22 10 22ZM11 18C12.1046 18 13 17.2099 13 16.2353C13 15.4629 12.4375 14.8063 11.6543 14.5672C11.543 13.6855 10.6956 13 9.66667 13C8.5621 13 7.66667 13.7901 7.66667 14.7647C7.66667 14.9803 7.71047 15.1868 7.79066 15.3778C7.69662 15.3615 7.59944 15.3529 7.5 15.3529C6.67157 15.3529 6 15.9455 6 16.6765C6 17.4074 6.67157 18 7.5 18H11Z"/>
</svg>
<div id="item-template" class="w-full relative">
<div class="flex gap-4 w-full h-full p-4 bg-white shadow-lg dark:bg-gray-700 rounded">
<div class="min-w-fit my-auto h-48 relative">
<a href="#">
<img class="rounded object-cover h-full" src="/assets/images/no-cover.jpg"></img>
</a>
</div>
<div class="flex flex-col justify-around dark:text-white w-full text-sm">
<div class="inline-flex shrink-0 items-center">
<div>
<p class="text-gray-400">Title</p>
<p class="font-medium">
N/A
</p>
</div>
</div>
<div class="inline-flex shrink-0 items-center">
<div>
<p class="text-gray-400">Author</p>
<p class="font-medium">
N/A
</p>
</div>
</div>
<div class="inline-flex shrink-0 items-center">
<div>
<p class="text-gray-400">Progress</p>
<p class="font-medium">
0%
</p>
</div>
</div>
</div>
<div class="absolute flex flex-col gap-2 right-4 bottom-4 text-gray-500 dark:text-gray-400">
<div class="relative">
<label for="delete-button">
<svg
class="cursor-pointer hover:text-gray-800 dark:hover:text-gray-100"
viewBox="0 0 24 24"
fill="currentColor"
xmlns="http://www.w3.org/2000/svg"
>
<path
d="M3 6.52381C3 6.12932 3.32671 5.80952 3.72973 5.80952H8.51787C8.52437 4.9683 8.61554 3.81504 9.45037 3.01668C10.1074 2.38839 11.0081 2 12 2C12.9919 2 13.8926 2.38839 14.5496 3.01668C15.3844 3.81504 15.4756 4.9683 15.4821 5.80952H20.2703C20.6733 5.80952 21 6.12932 21 6.52381C21 6.9183 20.6733 7.2381 20.2703 7.2381H3.72973C3.32671 7.2381 3 6.9183 3 6.52381Z"
/>
<path
d="M11.6066 22H12.3935C15.101 22 16.4547 22 17.3349 21.1368C18.2151 20.2736 18.3052 18.8576 18.4853 16.0257L18.7448 11.9452C18.8425 10.4086 18.8913 9.64037 18.4498 9.15352C18.0082 8.66667 17.2625 8.66667 15.7712 8.66667H8.22884C6.7375 8.66667 5.99183 8.66667 5.55026 9.15352C5.1087 9.64037 5.15756 10.4086 5.25528 11.9452L5.51479 16.0257C5.69489 18.8576 5.78494 20.2736 6.66513 21.1368C7.54532 22 8.89906 22 11.6066 22Z"
/>
</svg>
</label>
<input type="checkbox" id="delete-button" class="hidden css-button"/>
<div class="absolute z-30 bottom-7 right-0 p-3 transition-all duration-200 bg-gray-200 rounded shadow-lg shadow-gray-500 dark:shadow-gray-900 dark:bg-gray-600">
<span
class="block cursor-pointer font-medium text-sm text-center w-32 px-2 py-1 text-white bg-gray-500 dark:text-gray-800 hover:bg-gray-800 dark:hover:bg-gray-100"
>Delete</span>
</div>
</div>
<a href="#">
<svg
width="24"
height="24"
class="cursor-pointer hover:text-gray-800 dark:hover:text-gray-100"
viewBox="0 0 24 24"
fill="currentColor"
xmlns="http://www.w3.org/2000/svg"
>
<path
fill-rule="evenodd"
clip-rule="evenodd"
d="M2 12C2 7.28595 2 4.92893 3.46447 3.46447C4.92893 2 7.28595 2 12 2C16.714 2 19.0711 2 20.5355 3.46447C22 4.92893 22 7.28595 22 12C22 16.714 22 19.0711 20.5355 20.5355C19.0711 22 16.714 22 12 22C7.28595 22 4.92893 22 3.46447 20.5355C2 19.0711 2 16.714 2 12ZM12 6.25C12.4142 6.25 12.75 6.58579 12.75 7V12.1893L14.4697 10.4697C14.7626 10.1768 15.2374 10.1768 15.5303 10.4697C15.8232 10.7626 15.8232 11.2374 15.5303 11.5303L12.5303 14.5303C12.3897 14.671 12.1989 14.75 12 14.75C11.8011 14.75 11.6103 14.671 11.4697 14.5303L8.46967 11.5303C8.17678 11.2374 8.17678 10.7626 8.46967 10.4697C8.76256 10.1768 9.23744 10.1768 9.53033 10.4697L11.25 12.1893V7C11.25 6.58579 11.5858 6.25 12 6.25ZM8 16.25C7.58579 16.25 7.25 16.5858 7.25 17C7.25 17.4142 7.58579 17.75 8 17.75H16C16.4142 17.75 16.75 17.4142 16.75 17C16.75 16.5858 16.4142 16.25 16 16.25H8Z"
/>
</svg>
</a>
</div>
</div>
<div class="absolute top-0 right-0">
<strong class="bg-blue-100 text-blue-700 inline-flex items-center gap-1 rounded-tr rounded-bl p-1">
<div class="w-4 h-4"></div>
<span class="text-xs font-medium">REMOTE</span>
</strong>
</div>
</div>
</div>
</body>
</html>

319
assets/local/index.js Normal file
View File

@@ -0,0 +1,319 @@
const GET_SW_CACHE = "GET_SW_CACHE";
const DEL_SW_CACHE = "DEL_SW_CACHE";
// ----------------------------------------------------------------------- //
// --------------------------- Event Listeners --------------------------- //
// ----------------------------------------------------------------------- //
/**
* Initial load handler. Gets called on DOMContentLoaded.
**/
async function handleLoad() {
handleOnlineChange();
// If SW Redirected
if (document.location.pathname !== "/local")
window.history.replaceState(null, null, "/local");
// Create Upload Listener
let uploadButton = document.querySelector("button");
uploadButton.addEventListener("click", handleFileAdd);
// Ensure Installed -> Get Cached Items
let swCache = await SW.install()
// Get Service Worker Cache Books
.then(async () => {
let swResponse = await SW.send({ type: GET_SW_CACHE });
return Promise.all(
// Normalize Cached Results
swResponse.map(async (item) => {
let localCache = await IDB.get("PROGRESS-" + item.id);
if (localCache) {
item.progress = localCache.progress;
item.percentage = Math.round(localCache.percentage * 10000) / 100;
}
// Additional Values
item.fileURL = "/documents/" + item.id + "/file";
item.coverURL = "/documents/" + item.id + "/cover";
item.type = "REMOTE";
return item;
})
);
})
// Fail Nicely -> Allows Local Feature
.catch((e) => {
console.log("[loadContent] Service Worker Cache Error:", e);
return [];
});
// Get & Normalize Local Books
let localResponse = await IDB.find(/^FILE-.{32}$/, false);
let localCache = await Promise.all(localResponse.map(getLocalProgress));
// Populate DOM with Cache & Local Books
populateDOMBooks([...swCache, ...localCache]);
}
/**
* Update DOM to indicate online status. If no argument is passed, we attempt
* to determine online status via `navigator.onLine`.
**/
function handleOnlineChange(isOnline) {
let onlineEl = document.querySelector("#online");
isOnline = isOnline == undefined ? navigator.onLine : isOnline;
onlineEl.hidden = !isOnline;
}
/**
* Allow deleting local or remote cached files. Deleting remotely cached files
* does not remove progress. Progress will still be flushed once online.
**/
async function handleFileDelete(event, item) {
let mainEl =
event.target.parentElement.parentElement.parentElement.parentElement
.parentElement;
if (item.type == "LOCAL") {
await IDB.del("FILE-" + item.id);
await IDB.del("FILE-METADATA-" + item.id);
} else if (item.type == "REMOTE") {
let swResp = await SW.send({ type: DEL_SW_CACHE, id: item.id });
if (swResp != "SUCCESS")
throw new Error("[handleFileDelete] Service Worker Error");
}
console.log("[handleFileDelete] Item Deleted");
mainEl.remove();
updateMessage();
}
/**
* Allow adding file to offline reader. Add to IndexedDB,
* and later upload? Add style indicating external file?
**/
async function handleFileAdd() {
const fileInput = document.getElementById("document_file");
const file = fileInput.files[0];
if (!file) return console.log("[handleFileAdd] No File");
function readFile(file) {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = (event) => resolve(event.target.result);
reader.onerror = (error) => reject(error);
reader.readAsArrayBuffer(file);
});
}
function randomID() {
return "00000000000000000000000000000000".replace(/[018]/g, (c) =>
(
c ^
(crypto.getRandomValues(new Uint8Array(1))[0] & (15 >> (c / 4)))
).toString(16)
);
}
let newID = randomID();
readFile(file)
// Store Blob in IDB
.then((fileData) => {
if (!isEpubFile(fileData)) throw new Error("Invalid File Type");
return IDB.set(
"FILE-" + newID,
new Blob([fileData], { type: "application/octet-binary" })
);
})
// Process File
.then(() => getLocalProgress("FILE-" + newID))
// Populate in DOM
.then((item) => populateDOMBooks([item]))
// Hide Add File Button
.then(() => {
let addButtonEl = document.querySelector("#add-file-button");
addButtonEl.checked = false;
})
// Logging
.then(() => console.log("[handleFileAdd] File Add Successfully"))
.catch((e) => console.log("[handleFileAdd] File Add Failed:", e));
}
// Add Event Listeners
window.addEventListener("DOMContentLoaded", handleLoad);
window.addEventListener("online", () => handleOnlineChange(true));
window.addEventListener("offline", () => handleOnlineChange(false));
// ----------------------------------------------------------------------- //
// ------------------------------- Helpers ------------------------------- //
// ----------------------------------------------------------------------- //
/**
* Update the message element. Called after initial load, on item add or on
* item delete.
**/
function updateMessage() {
// Update Loader / No Results Indicator
let itemsEl = document.querySelector("#items");
let messageEl = document.querySelector("#message");
if (itemsEl.children.length == 0) {
messageEl.innerText = "No Results";
messageEl.hidden = false;
} else messageEl.hidden = true;
}
/**
* Populate DOM with cached documents.
**/
function populateDOMBooks(data) {
let allDocuments = document.querySelector("#items");
// Create Document Items
data.forEach((item) => {
// Create Main Element
let baseEl = document.querySelector("#item-template").cloneNode(true);
baseEl.removeAttribute("id");
// Get Elements
let [titleEl, authorEl, percentageEl] = baseEl.querySelectorAll("p + p");
let [svgDivEl, textEl] = baseEl.querySelector("strong").children;
let coverEl = baseEl.querySelector("a img");
let downloadEl = baseEl.querySelector("svg").parentElement;
let deleteInputEl = baseEl.querySelector("#delete-button");
let deleteLabelEl = deleteInputEl.previousElementSibling;
let deleteTextEl = baseEl.querySelector("input + div span");
// Set Download Attributes
downloadEl.setAttribute("href", item.fileURL);
downloadEl.setAttribute(
"download",
item.title + " - " + item.author + ".epub"
);
// Set Cover Attributes
coverEl.setAttribute("src", item.coverURL);
coverEl.parentElement.setAttribute(
"href",
"/reader#id=" + item.id + "&type=" + item.type
);
// Set Additional Metadata Attributes
titleEl.textContent = item.title;
authorEl.textContent = item.author;
percentageEl.textContent = item.percentage + "%";
// Set Remote / Local Indicator
let newSvgEl =
item.type == "LOCAL"
? document.querySelector("#local-svg-template").cloneNode(true)
: document.querySelector("#remote-svg-template").cloneNode(true);
svgDivEl.append(newSvgEl);
textEl.textContent = item.type;
// Delete Item
deleteInputEl.setAttribute("id", "delete-button-" + item.id);
deleteLabelEl.setAttribute("for", "delete-button-" + item.id);
deleteTextEl.addEventListener("click", (e) => handleFileDelete(e, item));
deleteTextEl.textContent =
item.type == "LOCAL" ? "Delete Local" : "Delete Cache";
allDocuments.append(baseEl);
});
updateMessage();
}
/**
* Given an item id, generate expected item format from IDB data store.
**/
async function getLocalProgress(id) {
// Get Metadata (Cover Always Needed)
let fileBlob = await IDB.get(id);
let fileURL = URL.createObjectURL(fileBlob);
let metadata = await getMetadata(fileURL);
// Attempt Cache
let documentID = id.replace("FILE-", "");
let documentData = await IDB.get("FILE-METADATA-" + documentID);
if (documentData)
return { ...documentData, fileURL, coverURL: metadata.coverURL };
// Create Starting Progress
let newProgress = {
id: documentID,
title: metadata.title,
author: metadata.author,
type: "LOCAL",
percentage: 0,
progress: "",
words: 0,
};
// Update Cache
await IDB.set("FILE-METADATA-" + documentID, newProgress);
// Return Cache + coverURL
return { ...newProgress, fileURL, coverURL: metadata.coverURL };
}
/**
* Retrieve the Title, Author, and CoverURL (blob) for a given file.
**/
async function getMetadata(fileURL) {
let book = ePub(fileURL, { openAs: "epub" });
console.log({ book });
let coverURL = (await book.coverUrl()) || "/assets/images/no-cover.jpg";
let metadata = await book.loaded.metadata;
let title =
metadata.title && metadata.title != "" ? metadata.title : "Unknown";
let author =
metadata.creator && metadata.creator != "" ? metadata.creator : "Unknown";
book.destroy();
return { title, author, coverURL };
}
/**
* Validate filetype. We check the headers and validate that they are ZIP.
* After which we validate contents. This isn't 100% effective, but unless
* someone is trying to trick it, it should be fine.
**/
function isEpubFile(arrayBuffer) {
const view = new DataView(arrayBuffer);
// Too Small
if (view.byteLength < 4) {
return false;
}
// Check for the ZIP file signature (PK)
const littleEndianSignature = view.getUint16(0, true);
const bigEndianSignature = view.getUint16(0, false);
if (littleEndianSignature !== 0x504b && bigEndianSignature !== 0x504b) {
return false;
}
// Additional Checks (No FP on ZIP)
const textDecoder = new TextDecoder();
const zipContent = textDecoder.decode(new Uint8Array(arrayBuffer));
if (
zipContent.includes("mimetype") &&
zipContent.includes("META-INF/container.xml")
) {
return true;
}
return false;
}

View File

@@ -1,6 +1,17 @@
{ {
"short_name": "Book Manager", "name": "AnthoLume",
"name": "Book Manager", "short_name": "AnthoLume",
"lang": "en-US",
"theme_color": "#1F2937", "theme_color": "#1F2937",
"display": "standalone" "display": "standalone",
"scope": "/",
"start_url": "/",
"icons": [
{
"purpose": "any",
"sizes": "512x512",
"src": "/assets/icons/icon512.png",
"type": "image/png"
}
]
} }

119
assets/reader/fonts.css Normal file
View File

@@ -0,0 +1,119 @@
/**
* Lato
* - Charsets: [latin,latin-ext]
* - Styles: [100,700,100italic,regular,italic,700italic]
**/
/* lato-100 - latin_latin-ext */
@font-face {
font-display: swap; /* Check https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/font-display for other options. */
font-family: "Lato";
font-style: normal;
font-weight: 100;
src: url("./fonts/lato-v24-latin_latin-ext-100.woff2") format("woff2"); /* Chrome 36+, Opera 23+, Firefox 39+, Safari 12+, iOS 10+ */
}
/* lato-100italic - latin_latin-ext */
@font-face {
font-display: swap; /* Check https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/font-display for other options. */
font-family: "Lato";
font-style: italic;
font-weight: 100;
src: url("./fonts/lato-v24-latin_latin-ext-100italic.woff2") format("woff2"); /* Chrome 36+, Opera 23+, Firefox 39+, Safari 12+, iOS 10+ */
}
/* lato-regular - latin_latin-ext */
@font-face {
font-display: swap; /* Check https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/font-display for other options. */
font-family: "Lato";
font-style: normal;
font-weight: 400;
src: url("./fonts/lato-v24-latin_latin-ext-regular.woff2") format("woff2"); /* Chrome 36+, Opera 23+, Firefox 39+, Safari 12+, iOS 10+ */
}
/* lato-italic - latin_latin-ext */
@font-face {
font-display: swap; /* Check https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/font-display for other options. */
font-family: "Lato";
font-style: italic;
font-weight: 400;
src: url("./fonts/lato-v24-latin_latin-ext-italic.woff2") format("woff2"); /* Chrome 36+, Opera 23+, Firefox 39+, Safari 12+, iOS 10+ */
}
/* lato-700 - latin_latin-ext */
@font-face {
font-display: swap; /* Check https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/font-display for other options. */
font-family: "Lato";
font-style: normal;
font-weight: 700;
src: url("./fonts/lato-v24-latin_latin-ext-700.woff2") format("woff2"); /* Chrome 36+, Opera 23+, Firefox 39+, Safari 12+, iOS 10+ */
}
/* lato-700italic - latin_latin-ext */
@font-face {
font-display: swap; /* Check https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/font-display for other options. */
font-family: "Lato";
font-style: italic;
font-weight: 700;
src: url("./fonts/lato-v24-latin_latin-ext-700italic.woff2") format("woff2"); /* Chrome 36+, Opera 23+, Firefox 39+, Safari 12+, iOS 10+ */
}
/**
* Open Sans
* - Charsets: [latin,latin-ext]
* - Styles: [700,regular,italic,700italic]
**/
/* open-sans-regular - latin_latin-ext */
@font-face {
font-display: swap; /* Check https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/font-display for other options. */
font-family: "Open Sans";
font-style: normal;
font-weight: 400;
src: url("./fonts/open-sans-v36-latin_latin-ext-regular.woff2")
format("woff2"); /* Chrome 36+, Opera 23+, Firefox 39+, Safari 12+, iOS 10+ */
}
/* open-sans-italic - latin_latin-ext */
@font-face {
font-display: swap; /* Check https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/font-display for other options. */
font-family: "Open Sans";
font-style: italic;
font-weight: 400;
src: url("./fonts/open-sans-v36-latin_latin-ext-italic.woff2") format("woff2"); /* Chrome 36+, Opera 23+, Firefox 39+, Safari 12+, iOS 10+ */
}
/* open-sans-700 - latin_latin-ext */
@font-face {
font-display: swap; /* Check https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/font-display for other options. */
font-family: "Open Sans";
font-style: normal;
font-weight: 700;
src: url("./fonts/open-sans-v36-latin_latin-ext-700.woff2") format("woff2"); /* Chrome 36+, Opera 23+, Firefox 39+, Safari 12+, iOS 10+ */
}
/* open-sans-700italic - latin_latin-ext */
@font-face {
font-display: swap; /* Check https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/font-display for other options. */
font-family: "Open Sans";
font-style: italic;
font-weight: 700;
src: url("./fonts/open-sans-v36-latin_latin-ext-700italic.woff2")
format("woff2"); /* Chrome 36+, Opera 23+, Firefox 39+, Safari 12+, iOS 10+ */
}
/**
* Arbutus Slab
* - Charsets: [latin,latin-ext]
* - Styles: [regular]
**/
/* arbutus-slab-regular - latin_latin-ext */
@font-face {
font-display: swap; /* Check https://developer.mozilla.org/en-US/docs/Web/CSS/@font-face/font-display for other options. */
font-family: "Arbutus Slab";
font-style: normal;
font-weight: 400;
src: url("./fonts/arbutus-slab-v16-latin_latin-ext-regular.woff2")
format("woff2"); /* Chrome 36+, Opera 23+, Firefox 39+, Safari 12+, iOS 10+ */
}

Binary file not shown.

Binary file not shown.

385
assets/reader/index.htm Normal file
View File

@@ -0,0 +1,385 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8" />
<meta
id="viewport"
name="viewport"
content="width=device-width, minimum-scale=1.0, maximum-scale=1.0, user-scalable=no, viewport-fit=cover"
/>
<meta name="apple-mobile-web-app-capable" content="yes" />
<meta
name="apple-mobile-web-app-status-bar-style"
content="black-translucent"
/>
<meta name="theme-color" content="#D2B48C" />
<title>AnthoLume - Reader</title>
<link rel="manifest" href="/manifest.json" />
<link rel="stylesheet" href="/assets/style.css" />
<!-- Libraries -->
<script src="/assets/lib/jszip.min.js"></script>
<script src="/assets/lib/epub.min.js"></script>
<script src="/assets/lib/no-sleep.min.js"></script>
<script src="/assets/lib/idb-keyval.min.js"></script>
<!-- Reader -->
<script src="/assets/common.js"></script>
<script src="/assets/index.js"></script>
<script src="/assets/reader/index.js"></script>
<style>
/* ----------------------------- */
/* -------- PWA Styling -------- */
/* ----------------------------- */
html,
body {
overscroll-behavior-y: none;
margin: 0px;
}
html {
min-height: calc(100% + env(safe-area-inset-top));
}
#viewer {
padding-top: env(safe-area-inset-top);
}
/* For Webkit-based browsers (Chrome, Safari and Opera) */
.no-scrollbar::-webkit-scrollbar {
display: none;
}
/* For IE, Edge and Firefox */
.no-scrollbar {
-ms-overflow-style: none; /* IE and Edge */
scrollbar-width: none; /* Firefox */
}
#bottom-bar {
padding-bottom: env(safe-area-inset-bottom);
}
#top-bar {
padding-top: env(safe-area-inset-top);
}
#top-bar:not(.top-0) {
top: calc((8em + env(safe-area-inset-top)) * -1);
}
select:invalid {
color: gray;
}
</style>
</head>
<body class="bg-gray-100 dark:bg-gray-800">
<main class="relative overflow-hidden h-[100dvh]">
<div
id="top-bar"
class="transition-all duration-200 absolute z-10 bg-gray-100 dark:bg-gray-800 w-full px-2"
>
<div class="max-h-[75vh] w-full flex flex-col items-center justify-around relative dark:text-white">
<div class="h-32">
<div class="text-gray-500 absolute top-6 left-4 flex flex-col gap-4">
<a href="#">
<svg
width="32"
height="32"
class="cursor-pointer hover:text-gray-800 dark:hover:text-gray-100"
viewBox="0 0 24 24"
fill="currentColor"
xmlns="http://www.w3.org/2000/svg"
>
<path
fill-rule="evenodd"
clip-rule="evenodd"
d="M20.5355 3.46447C19.0711 2 16.714 2 12 2C7.28595 2 4.92893 2 3.46447 3.46447C2 4.92893 2 7.28595 2 12C2 16.714 2 19.0711 3.46447 20.5355C4.92893 22 7.28595 22 12 22C16.714 22 19.0711 22 20.5355 20.5355C22 19.0711 22 16.714 22 12C22 7.28595 22 4.92893 20.5355 3.46447ZM14.0303 8.46967C14.3232 8.76256 14.3232 9.23744 14.0303 9.53033L11.5607 12L14.0303 14.4697C14.3232 14.7626 14.3232 15.2374 14.0303 15.5303C13.7374 15.8232 13.2626 15.8232 12.9697 15.5303L9.96967 12.5303C9.82902 12.3897 9.75 12.1989 9.75 12C9.75 11.8011 9.82902 11.6103 9.96967 11.4697L12.9697 8.46967C13.2626 8.17678 13.7374 8.17678 14.0303 8.46967Z"
/>
</svg>
</a>
<svg
width="32"
height="32"
class="cursor-pointer hover:text-gray-800 dark:hover:text-gray-100 close-top-bar"
viewBox="0 0 24 24"
fill="currentColor"
xmlns="http://www.w3.org/2000/svg"
>
<path
fill-rule="evenodd"
clip-rule="evenodd"
d="M12 22C7.28595 22 4.92893 22 3.46447 20.5355C2 19.0711 2 16.714 2 12C2 7.28595 2 4.92893 3.46447 3.46447C4.92893 2 7.28595 2 12 2C16.714 2 19.0711 2 20.5355 3.46447C22 4.92893 22 7.28595 22 12C22 16.714 22 19.0711 20.5355 20.5355C19.0711 22 16.714 22 12 22ZM8.96965 8.96967C9.26254 8.67678 9.73742 8.67678 10.0303 8.96967L12 10.9394L13.9696 8.96969C14.2625 8.6768 14.7374 8.6768 15.0303 8.96969C15.3232 9.26258 15.3232 9.73746 15.0303 10.0303L13.0606 12L15.0303 13.9697C15.3232 14.2625 15.3232 14.7374 15.0303 15.0303C14.7374 15.3232 14.2625 15.3232 13.9696 15.0303L12 13.0607L10.0303 15.0303C9.73744 15.3232 9.26256 15.3232 8.96967 15.0303C8.67678 14.7374 8.67678 14.2626 8.96967 13.9697L10.9393 12L8.96965 10.0303C8.67676 9.73744 8.67676 9.26256 8.96965 8.96967Z"
/>
</svg>
</div>
<div class="flex gap-10 h-full p-4 pl-14 rounded">
<div class="h-full my-auto relative">
<a href="#">
<img
class="rounded object-cover h-full"
src="/assets/images/no-cover.jpg"
/>
</a>
</div>
<div class="flex gap-7 justify-around dark:text-white text-sm">
<div class="flex flex-col gap-4">
<div class="inline-flex shrink-0 items-center">
<div>
<p class="text-gray-400">Title</p>
<p
class="font-medium whitespace-nowrap text-ellipsis overflow-hidden max-w-[50dvw]"
>
"N/A"
</p>
</div>
</div>
<div class="inline-flex shrink-0 items-center">
<div>
<p class="text-gray-400">Author</p>
<p
class="font-medium whitespace-nowrap text-ellipsis overflow-hidden max-w-[50dvw]"
>
"N/A"
</p>
</div>
</div>
</div>
</div>
</div>
</div>
<div id="toc" class="w-full text-center max-h-[50%] overflow-scroll no-scrollbar"></div>
</div>
</div>
<div
id="bottom-bar"
class="-bottom-28 transition-all duration-200 absolute z-10 bg-gray-100 dark:bg-gray-800 items-center flex w-full overflow-y-scroll snap-x snap-mandatory no-scrollbar"
>
<div
class="items-center flex flex-col w-screen h-full flex-none snap-center p-2"
>
<div
class="flex flex-wrap gap-2 justify-around w-full dark:text-white pb-2"
>
<div class="flex justify-center gap-2 w-full md:w-fit">
<p class="text-gray-400 text-xs">Chapter:</p>
<p id="chapter-name-status" class="text-xs">N/A</p>
</div>
<div class="inline-flex gap-2">
<p class="text-gray-400 text-xs">Chapter Pages:</p>
<p id="chapter-status" class="text-xs">N/A</p>
</div>
<div class="inline-flex gap-2">
<p class="text-gray-400 text-xs">Progress:</p>
<p id="progress-status" class="text-xs">N/A</p>
</div>
</div>
<div class="w-[90%] h-2 rounded border border-gray-500">
<div
id="progress-bar-status"
class="w-0 bg-green-200 h-full rounded-l"
></div>
</div>
</div>
<div
class="items-center flex flex-col w-screen h-full flex-none snap-center p-2"
>
<p class="text-gray-400">Theme</p>
<div class="flex justify-around w-full gap-4 p-2 text-sm">
<div
class="color-scheme cursor-pointer rounded border border-white bg-[#fff] text-[#000] grow text-center"
>
light
</div>
<div
class="color-scheme cursor-pointer rounded border border-white bg-[#d2b48c] text-[#333] grow text-center"
>
tan
</div>
<div
class="color-scheme cursor-pointer rounded border border-white bg-[#1f2937] text-[#fff] grow text-center"
>
blue
</div>
<div
class="color-scheme cursor-pointer rounded border border-white bg-[#232323] text-[#fff] grow text-center"
>
gray
</div>
<div
class="color-scheme cursor-pointer rounded border border-white bg-[#000] text-[#ccc] grow text-center"
>
black
</div>
</div>
</div>
<div
class="items-center flex flex-col w-screen h-full flex-none snap-center p-2"
>
<p class="text-gray-400">Font</p>
<div class="flex justify-around w-full gap-4 p-2 text-sm">
<div
class="font-family cursor-pointer rounded border border-white grow text-center dark:text-white"
>
Serif
</div>
<div
class="font-family cursor-pointer rounded border border-white grow text-center dark:text-white"
>
Open Sans
</div>
<div
class="font-family cursor-pointer rounded border border-white grow text-center dark:text-white"
>
Arbutus Slab
</div>
<div
class="font-family cursor-pointer rounded border border-white grow text-center dark:text-white"
>
Lato
</div>
</div>
</div>
<div
class="items-center flex flex-col w-screen h-full flex-none snap-center p-2"
>
<p class="text-gray-400">Font Size</p>
<div class="flex justify-around w-full gap-4 p-2 text-sm">
<div
class="font-size cursor-pointer rounded border border-white grow text-center dark:text-white"
>
-
</div>
<div
class="font-size cursor-pointer rounded border border-white grow text-center dark:text-white"
>
+
</div>
</div>
</div>
</div>
<div id="viewer" class="w-full h-full"></div>
</main>
<!-- Device Selector -->
<div
id="device-selector"
class="hidden absolute top-0 left-0 w-full h-full z-50"
>
<div
class="fixed top-0 left-0 bg-black opacity-50 w-screen h-screen"
></div>
<div
class="relative flex flex-col gap-4 p-4 max-h-[95%] w-5/6 md:w-1/2 bg-white dark:bg-gray-800 overflow-scroll -translate-x-2/4 -translate-y-2/4 top-1/2 left-1/2 overflow-hidden shadow rounded"
>
<div class="text-center flex flex-col gap-2">
<h3 class="text-lg font-bold leading-6 dark:text-gray-300">
Select Device
</h3>
<p class="text-xs text-gray-500 text-center">
This device appears to be new! Please either assume an existing
device, or create a new one.
</p>
</div>
<div
class="flex flex-col gap-2 grow p-4 rounded shadow-lg bg-white dark:bg-gray-700 text-gray-500 dark:text-white"
>
<div class="flex gap-4 flex-col">
<div class="flex relative min-w-[12em]">
<span
class="inline-flex items-center px-3 border-t bg-white border-l border-b border-gray-300 text-gray-500 shadow-sm text-sm"
>
<svg
width="15"
height="15"
viewBox="0 0 24 24"
fill="currentColor"
xmlns="http://www.w3.org/2000/svg"
>
<path
d="M5.65517 2.22732C5.2225 2.34037 4.9438 2.50021 4.72718 2.71244C4.42179 3.01165 4.22268 3.43172 4.11382 4.225C4.00176 5.04159 4 6.12387 4 7.67568V16.2442C4.38867 15.9781 4.82674 15.7756 5.29899 15.6517C5.41296 15.6217 5.53103 15.5983 5.65517 15.5799V2.22732Z"
/>
<path
d="M7.31034 15.5135C7.32206 15.5135 7.33382 15.5135 7.34563 15.5135L20 15.5135V7.67568C20 6.12387 19.9982 5.04159 19.8862 4.22499C19.7773 3.43172 19.5782 3.01165 19.2728 2.71244C18.9674 2.41324 18.5387 2.21816 17.729 2.11151C16.8955 2.00172 15.7908 2 14.2069 2H9.7931C8.79138 2 7.98133 2.00069 7.31034 2.02897V15.5135Z"
/>
<path
d="M7.47341 17.1351C6.39395 17.1351 6.01657 17.1421 5.72738 17.218C4.93365 17.4264 4.30088 18.0044 4.02952 18.7558C4.0463 19.1382 4.07259 19.4746 4.11382 19.775C4.22268 20.5683 4.42179 20.9884 4.72718 21.2876C5.03258 21.5868 5.46135 21.7818 6.27103 21.8885C7.10452 21.9983 8.2092 22 9.7931 22H14.2069C15.7908 22 16.8955 21.9983 17.729 21.8885C18.5387 21.7818 18.9674 21.5868 19.2728 21.2876C19.5782 20.9884 19.7773 20.5683 19.8862 19.775C19.9776 19.1088 19.9956 18.2657 19.9991 17.1351H7.47341Z"
/>
</svg>
</span>
<select
class="flex-1 appearance-none rounded-none border border-gray-300 w-full py-2 px-4 bg-white text-gray-700 placeholder-gray-400 shadow-sm text-base focus:outline-none focus:ring-2 focus:ring-purple-600 focus:border-transparent"
id="source"
name="source"
required
>
<option value="" disabled selected hidden>
Select Existing Device
</option>
</select>
</div>
<button
class="px-10 py-2 text-base font-semibold text-center text-white transition duration-200 ease-in bg-black shadow-md hover:text-black hover:bg-white focus:outline-none focus:ring-2"
>
<span class="w-full">Assume Device</span>
</button>
</div>
</div>
<div
class="flex flex-col gap-2 grow p-4 rounded shadow-lg bg-white dark:bg-gray-700 text-gray-500 dark:text-white"
>
<div class="flex gap-4 flex-col">
<div class="flex flex-col w-full grow">
<div class="flex relative">
<span
class="inline-flex items-center px-3 border-t bg-white border-l border-b border-gray-300 text-gray-500 shadow-sm text-sm"
>
<svg
width="15"
height="15"
viewBox="0 0 24 24"
fill="currentColor"
xmlns="http://www.w3.org/2000/svg"
>
<path
d="M5.65517 2.22732C5.2225 2.34037 4.9438 2.50021 4.72718 2.71244C4.42179 3.01165 4.22268 3.43172 4.11382 4.225C4.00176 5.04159 4 6.12387 4 7.67568V16.2442C4.38867 15.9781 4.82674 15.7756 5.29899 15.6517C5.41296 15.6217 5.53103 15.5983 5.65517 15.5799V2.22732Z"
/>
<path
d="M7.31034 15.5135C7.32206 15.5135 7.33382 15.5135 7.34563 15.5135L20 15.5135V7.67568C20 6.12387 19.9982 5.04159 19.8862 4.22499C19.7773 3.43172 19.5782 3.01165 19.2728 2.71244C18.9674 2.41324 18.5387 2.21816 17.729 2.11151C16.8955 2.00172 15.7908 2 14.2069 2H9.7931C8.79138 2 7.98133 2.00069 7.31034 2.02897V15.5135Z"
/>
<path
d="M7.47341 17.1351C6.39395 17.1351 6.01657 17.1421 5.72738 17.218C4.93365 17.4264 4.30088 18.0044 4.02952 18.7558C4.0463 19.1382 4.07259 19.4746 4.11382 19.775C4.22268 20.5683 4.42179 20.9884 4.72718 21.2876C5.03258 21.5868 5.46135 21.7818 6.27103 21.8885C7.10452 21.9983 8.2092 22 9.7931 22H14.2069C15.7908 22 16.8955 21.9983 17.729 21.8885C18.5387 21.7818 18.9674 21.5868 19.2728 21.2876C19.5782 20.9884 19.7773 20.5683 19.8862 19.775C19.9776 19.1088 19.9956 18.2657 19.9991 17.1351H7.47341Z"
/>
</svg>
</span>
<input
type="text"
id="name"
name="name"
class="flex-1 appearance-none rounded-none border border-gray-300 w-full py-2 px-4 bg-white text-gray-700 placeholder-gray-400 shadow-sm text-base focus:outline-none focus:ring-2 focus:ring-purple-600 focus:border-transparent"
placeholder="New Device Name"
/>
</div>
</div>
<button
class="px-10 py-2 text-base font-semibold text-center text-white transition duration-200 ease-in bg-black shadow-md hover:text-black hover:bg-white focus:outline-none focus:ring-2"
>
<span class="w-full">Create Device</span>
</button>
</div>
</div>
</div>
</div>
</body>
</html>

1347
assets/reader/index.js Normal file

File diff suppressed because it is too large Load Diff

24
assets/reader/themes.css Normal file
View File

@@ -0,0 +1,24 @@
.light {
background-color: #fff;
color: #000;
}
.tan {
background: #d2b48c;
color: #333;
}
.blue {
background: #1f2937;
color: #fff;
}
.gray {
background: #232323;
color: #fff;
}
.black {
background: #000;
color: #ccc;
}

1
assets/style.css Normal file

File diff suppressed because one or more lines are too long

265
assets/sw.js Normal file
View File

@@ -0,0 +1,265 @@
// Misc Consts
const SW_VERSION = 1;
const SW_CACHE_NAME = "OFFLINE_V1";
// Message Types
const PURGE_SW_CACHE = "PURGE_SW_CACHE";
const DEL_SW_CACHE = "DEL_SW_CACHE";
const GET_SW_CACHE = "GET_SW_CACHE";
const GET_SW_VERSION = "GET_SW_VERSION";
// Cache Types
const CACHE_ONLY = "CACHE_ONLY";
const CACHE_NEVER = "CACHE_NEVER";
const CACHE_UPDATE_SYNC = "CACHE_UPDATE_SYNC";
const CACHE_UPDATE_ASYNC = "CACHE_UPDATE_ASYNC";
/**
* Define routes and their directives. Takes `routes`, `type`, and `fallback`.
*
* Routes (Required):
* Either a string of the exact request, or a RegExp. Order precedence.
*
* Fallback (Optional):
* A fallback function. If the request fails, this function is executed and
* its return value is returned as the result.
*
* Types (Required):
* - CACHE_ONLY
* Cache once & never refresh.
* - CACHE_NEVER
* Never cache & always perform a request.
* - CACHE_UPDATE_SYNC
* Update cache & return result.
* - CACHE_UPDATE_ASYNC
* Return cache if exists & update cache in background.
**/
const ROUTES = [
{ route: "/local", type: CACHE_UPDATE_ASYNC },
{ route: "/reader", type: CACHE_UPDATE_ASYNC },
{ route: "/manifest.json", type: CACHE_UPDATE_ASYNC },
{ route: /^\/assets\/reader\/fonts\//, type: CACHE_ONLY },
{ route: /^\/assets\//, type: CACHE_UPDATE_ASYNC },
{
route: /^\/documents\/[a-zA-Z0-9]{32}\/(cover|file)$/,
type: CACHE_UPDATE_ASYNC,
},
{
route: /^\/reader\/progress\/[a-zA-Z0-9]{32}$/,
type: CACHE_UPDATE_SYNC,
},
{
route: /.*/,
type: CACHE_NEVER,
fallback: (event) => caches.match("/local"),
},
];
/**
* These are assets that are cached on initial service worker installation.
**/
const PRECACHE_ASSETS = [
// Offline & Reader Assets
"/local",
"/reader",
"/assets/local/index.js",
"/assets/reader/index.js",
"/assets/reader/fonts.css",
"/assets/reader/themes.css",
"/assets/icons/icon512.png",
"/assets/images/no-cover.jpg",
// Main App Assets
"/manifest.json",
"/assets/index.js",
"/assets/style.css",
"/assets/common.js",
// Library Assets
"/assets/lib/jszip.min.js",
"/assets/lib/epub.min.js",
"/assets/lib/no-sleep.min.js",
"/assets/lib/idb-keyval.min.js",
// Fonts
"/assets/reader/fonts/arbutus-slab-v16-latin_latin-ext-regular.woff2",
"/assets/reader/fonts/lato-v24-latin_latin-ext-100.woff2",
"/assets/reader/fonts/lato-v24-latin_latin-ext-100italic.woff2",
"/assets/reader/fonts/lato-v24-latin_latin-ext-700.woff2",
"/assets/reader/fonts/lato-v24-latin_latin-ext-700italic.woff2",
"/assets/reader/fonts/lato-v24-latin_latin-ext-italic.woff2",
"/assets/reader/fonts/lato-v24-latin_latin-ext-regular.woff2",
"/assets/reader/fonts/open-sans-v36-latin_latin-ext-700.woff2",
"/assets/reader/fonts/open-sans-v36-latin_latin-ext-700italic.woff2",
"/assets/reader/fonts/open-sans-v36-latin_latin-ext-italic.woff2",
"/assets/reader/fonts/open-sans-v36-latin_latin-ext-regular.woff2",
];
// ------------------------------------------------------- //
// ----------------------- Helpers ----------------------- //
// ------------------------------------------------------- //
async function purgeCache() {
console.log("[purgeCache] Purging Cache");
return caches.keys().then(function (names) {
for (let name of names) caches.delete(name);
});
}
async function updateCache(request) {
let url = request.url ? new URL(request.url).pathname : request;
console.log("[updateCache] Updating Cache:", url);
let cache = await caches.open(SW_CACHE_NAME);
return fetch(request)
.then((response) => {
const resClone = response.clone();
if (response.status < 400) cache.put(request, resClone);
return response;
})
.catch((e) => {
console.log("[updateCache] Updating Cache Failed:", url);
throw e;
});
}
// ------------------------------------------------------- //
// ------------------- Event Listeners ------------------- //
// ------------------------------------------------------- //
async function handleFetch(event) {
// Get Path
let url = new URL(event.request.url).pathname;
// Find Directive
const directive = ROUTES.find(
(item) =>
(item.route instanceof RegExp && url.match(item.route)) ||
url == item.route,
) || { type: CACHE_NEVER };
// Get Fallback
const fallbackFunc = (event) => {
console.log("[handleFetch] Fallback:", { url, directive });
if (directive.fallback) return directive.fallback(event);
};
console.log("[handleFetch] Processing:", { url, directive });
// Get Current Cache
let currentCache = await caches.match(event.request);
// Perform Caching Method
switch (directive.type) {
case CACHE_NEVER:
return fetch(event.request).catch((e) => fallbackFunc(event));
case CACHE_ONLY:
return (
currentCache ||
updateCache(event.request).catch((e) => fallbackFunc(event))
);
case CACHE_UPDATE_SYNC:
return updateCache(event.request).catch(
(e) => currentCache || fallbackFunc(event),
);
case CACHE_UPDATE_ASYNC:
let newResponse = updateCache(event.request).catch((e) =>
fallbackFunc(event),
);
return currentCache || newResponse;
}
}
function handleMessage(event) {
console.log("[handleMessage] Received Message:", event.data);
let { id, data } = event.data;
if (data.type === GET_SW_VERSION) {
event.source.postMessage({ id, data: SW_VERSION });
} else if (data.type === PURGE_SW_CACHE) {
purgeCache()
.then(() => event.source.postMessage({ id, data: "SUCCESS" }))
.catch(() => event.source.postMessage({ id, data: "FAILURE" }));
} else if (data.type === GET_SW_CACHE) {
caches.open(SW_CACHE_NAME).then(async (cache) => {
let allKeys = await cache.keys();
// Get Cached Resources
let docResources = allKeys
.map((item) => new URL(item.url).pathname)
.filter(
(item) =>
item.startsWith("/documents/") ||
item.startsWith("/reader/progress/"),
);
// Derive Unique IDs
let documentIDs = Array.from(
new Set(
docResources
.filter((item) => item.startsWith("/documents/"))
.map((item) => item.split("/")[2]),
),
);
/**
* Filter for cached items only. Attempt to fetch updated result. If
* failure, return cached version. This ensures we return the most up to
* date version possible.
**/
let cachedDocuments = await Promise.all(
documentIDs
.filter(
(id) =>
docResources.includes("/documents/" + id + "/file") &&
docResources.includes("/reader/progress/" + id),
)
.map(async (id) => {
let url = "/reader/progress/" + id;
let currentCache = await caches.match(url);
let resp = await updateCache(url).catch((e) => currentCache);
return resp.json();
}),
);
event.source.postMessage({ id, data: cachedDocuments });
});
} else if (data.type === DEL_SW_CACHE) {
caches
.open(SW_CACHE_NAME)
.then((cache) =>
Promise.all([
cache.delete("/documents/" + data.id + "/file"),
cache.delete("/reader/progress/" + data.id),
]),
)
.then(() => event.source.postMessage({ id, data: "SUCCESS" }))
.catch(() => event.source.postMessage({ id, data: "FAILURE" }));
} else {
event.source.postMessage({ id, data: { pong: 1 } });
}
}
async function handleInstall(event) {
let cache = await caches.open(SW_CACHE_NAME);
return cache.addAll(PRECACHE_ASSETS);
}
self.addEventListener("message", handleMessage);
self.addEventListener("install", function (event) {
event.waitUntil(handleInstall(event));
});
self.addEventListener("fetch", (event) => {
/**
* Weird things happen when a service worker attempts to handle a request
* when the server responds with chunked transfer encoding. Right now we only
* use chunked encoding on POSTs. So this is to avoid processing those.
**/
if (event.request.method != "GET") return;
return event.respondWith(handleFetch(event));
});

BIN
banner.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 56 KiB

BIN
banner.xcf Normal file

Binary file not shown.

View File

View File

@@ -1,6 +1,6 @@
# Book Manager - SyncNinja KOReader Plugin # AnthoLume - SyncNinja KOReader Plugin
This is BookManagers KOReader Plugin called `syncninja.koplugin`. Features include: This is AnthoLume's KOReader Plugin called `syncninja.koplugin`. Features include:
- Syncing read activity - Syncing read activity
- Uploading documents - Uploading documents
@@ -12,10 +12,10 @@ Copy the `syncninja.koplugin` directory to the `plugins` directory for your KORe
## Configuration ## Configuration
You must configure the BookManager server and credentials in SyncNinja. Afterwhich you'll have the ability to configure the sync cadence as well as whether you'd like the plugin to sync your activity, document metadata, and/or documents themselves. You must configure the AnthoLume server and credentials in SyncNinja. Afterwhich you'll have the ability to configure the sync cadence as well as whether you'd like the plugin to sync your activity, document metadata, and/or documents themselves.
## KOSync Compatibility ## KOSync Compatibility
BookManager implements API's compatible with the KOSync plugin. This means that you can utilize this server for KOSync (and it's recommended!). SyncNinja provides an easy way to merge configurations between both KOSync and itself in the menu. AnthoLume implements API's compatible with the KOSync plugin. This means that you can utilize this server for KOSync (and it's recommended!). SyncNinja provides an easy way to merge configurations between both KOSync and itself in the menu.
The KOSync compatible API endpoint is located at: `http(s)://<SERVER>/api/ko`. You can either use the previous mentioned merge feature to automatically configure KOSync once SyncNinja is configured, or you can manually set KOSync's server to the above. The KOSync compatible API endpoint is located at: `http(s)://<SERVER>/api/ko`. You can either use the previous mentioned merge feature to automatically configure KOSync once SyncNinja is configured, or you can manually set KOSync's server to the above.

View File

@@ -72,7 +72,8 @@ end
-------------- New Functions ------------- -------------- New Functions -------------
------------------------------------------ ------------------------------------------
function SyncNinjaClient:check_activity(username, password, device_id, callback) function SyncNinjaClient:check_activity(username, password, device_id, device,
callback)
self.client:reset_middlewares() self.client:reset_middlewares()
self.client:enable("Format.JSON") self.client:enable("Format.JSON")
self.client:enable("GinClient") self.client:enable("GinClient")
@@ -82,7 +83,10 @@ function SyncNinjaClient:check_activity(username, password, device_id, callback)
socketutil:set_timeout(SYNC_TIMEOUTS[1], SYNC_TIMEOUTS[2]) socketutil:set_timeout(SYNC_TIMEOUTS[1], SYNC_TIMEOUTS[2])
local co = coroutine.create(function() local co = coroutine.create(function()
local ok, res = pcall(function() local ok, res = pcall(function()
return self.client:check_activity({device_id = device_id}) return self.client:check_activity({
device_id = device_id,
device = device
})
end) end)
if ok then if ok then
callback(res.status == 200, res.body) callback(res.status == 200, res.body)

View File

@@ -26,8 +26,8 @@
"check_activity": { "check_activity": {
"path": "/api/ko/syncs/activity", "path": "/api/ko/syncs/activity",
"method": "POST", "method": "POST",
"required_params": ["device_id"], "required_params": ["device_id", "device"],
"payload": ["device_id"], "payload": ["device_id", "device"],
"expected_status": [200, 401] "expected_status": [200, 401]
}, },
"download_document": { "download_document": {

File diff suppressed because it is too large Load Diff

View File

@@ -1,8 +1,14 @@
package config package config
import ( import (
"fmt"
"os" "os"
"path"
"path/filepath"
"runtime"
"strings" "strings"
log "github.com/sirupsen/logrus"
) )
type Config struct { type Config struct {
@@ -13,7 +19,6 @@ type Config struct {
// DB Configuration // DB Configuration
DBType string DBType string
DBName string DBName string
DBPassword string
// Data Paths // Data Paths
ConfigPath string ConfigPath string
@@ -21,21 +26,102 @@ type Config struct {
// Miscellaneous Settings // Miscellaneous Settings
RegistrationEnabled bool RegistrationEnabled bool
CookieSessionKey string SearchEnabled bool
DemoMode bool
LogLevel string
// Cookie Settings
CookieAuthKey string
CookieEncKey string
CookieSecure bool
CookieHTTPOnly bool
} }
type customFormatter struct {
log.Formatter
}
// Force UTC & Set type (app)
func (cf customFormatter) Format(e *log.Entry) ([]byte, error) {
if e.Data["type"] == nil {
e.Data["type"] = "app"
}
e.Time = e.Time.UTC()
return cf.Formatter.Format(e)
}
// Set at runtime
var version string = "develop"
func Load() *Config { func Load() *Config {
return &Config{ c := &Config{
Version: "0.0.2", Version: version,
DBType: trimLowerString(getEnv("DATABASE_TYPE", "SQLite")),
DBName: trimLowerString(getEnv("DATABASE_NAME", "book_manager")),
DBPassword: getEnv("DATABASE_PASSWORD", ""),
ConfigPath: getEnv("CONFIG_PATH", "/config"), ConfigPath: getEnv("CONFIG_PATH", "/config"),
DataPath: getEnv("DATA_PATH", "/data"), DataPath: getEnv("DATA_PATH", "/data"),
ListenPort: getEnv("LISTEN_PORT", "8585"), ListenPort: getEnv("LISTEN_PORT", "8585"),
CookieSessionKey: trimLowerString(getEnv("COOKIE_SESSION_KEY", "")), DBType: trimLowerString(getEnv("DATABASE_TYPE", "SQLite")),
DBName: trimLowerString(getEnv("DATABASE_NAME", "antholume")),
RegistrationEnabled: trimLowerString(getEnv("REGISTRATION_ENABLED", "false")) == "true", RegistrationEnabled: trimLowerString(getEnv("REGISTRATION_ENABLED", "false")) == "true",
DemoMode: trimLowerString(getEnv("DEMO_MODE", "false")) == "true",
SearchEnabled: trimLowerString(getEnv("SEARCH_ENABLED", "false")) == "true",
CookieAuthKey: trimLowerString(getEnv("COOKIE_AUTH_KEY", "")),
CookieEncKey: trimLowerString(getEnv("COOKIE_ENC_KEY", "")),
LogLevel: trimLowerString(getEnv("LOG_LEVEL", "info")),
CookieSecure: trimLowerString(getEnv("COOKIE_SECURE", "true")) == "true",
CookieHTTPOnly: trimLowerString(getEnv("COOKIE_HTTP_ONLY", "true")) == "true",
} }
// Parse log level
logLevel, err := log.ParseLevel(c.LogLevel)
if err != nil {
logLevel = log.InfoLevel
}
// Create custom formatter
logFormatter := &customFormatter{&log.JSONFormatter{
CallerPrettyfier: prettyCaller,
}}
// Create log rotator
rotateFileHook, err := NewRotateFileHook(RotateFileConfig{
Filename: path.Join(c.ConfigPath, "logs/antholume.log"),
MaxSize: 50,
MaxBackups: 3,
MaxAge: 30,
Level: logLevel,
Formatter: logFormatter,
})
if err != nil {
log.Fatal("Unable to initialize file rotate hook")
}
// Rotate now
rotateFileHook.Rotate()
// Set logger settings
log.SetLevel(logLevel)
log.SetFormatter(logFormatter)
log.SetReportCaller(true)
log.AddHook(rotateFileHook)
// Ensure directories exist
c.EnsureDirectories()
return c
}
// Ensures needed directories exist
func (c *Config) EnsureDirectories() {
os.Mkdir(c.ConfigPath, 0755)
os.Mkdir(c.DataPath, 0755)
docDir := filepath.Join(c.DataPath, "documents")
coversDir := filepath.Join(c.DataPath, "covers")
backupDir := filepath.Join(c.DataPath, "backups")
os.Mkdir(docDir, 0755)
os.Mkdir(coversDir, 0755)
os.Mkdir(backupDir, 0755)
} }
func getEnv(key, fallback string) string { func getEnv(key, fallback string) string {
@@ -48,3 +134,24 @@ func getEnv(key, fallback string) string {
func trimLowerString(val string) string { func trimLowerString(val string) string {
return strings.ToLower(strings.TrimSpace(val)) return strings.ToLower(strings.TrimSpace(val))
} }
func prettyCaller(f *runtime.Frame) (function string, file string) {
purgePrefix := "reichard.io/antholume/"
pathName := strings.Replace(f.Func.Name(), purgePrefix, "", 1)
parts := strings.Split(pathName, ".")
filepath, line := f.Func.FileLine(f.PC)
splitFilePath := strings.Split(filepath, "/")
fileName := fmt.Sprintf("%s/%s@%d", parts[0], splitFilePath[len(splitFilePath)-1], line)
functionName := strings.Replace(pathName, parts[0]+".", "", 1)
// Exclude GIN Logger
if functionName == "NewApi.apiLogger.func1" {
fileName = ""
functionName = ""
}
return functionName, fileName
}

37
config/config_test.go Normal file
View File

@@ -0,0 +1,37 @@
package config
import (
"runtime"
"testing"
"github.com/stretchr/testify/assert"
)
func TestLoadConfig(t *testing.T) {
conf := Load()
assert.Equal(t, "sqlite", conf.DBType)
}
func TestGetEnvDefault(t *testing.T) {
desiredValue := "def_val"
envDefault := getEnv("DEFAULT_TEST", desiredValue)
assert.Equal(t, desiredValue, envDefault)
}
func TestTrimLowerString(t *testing.T) {
desiredValue := "trimtest"
outputValue := trimLowerString(" trimTest ")
assert.Equal(t, desiredValue, outputValue)
}
func TestPrettyCaller(t *testing.T) {
p, _, _, _ := runtime.Caller(0)
result := runtime.CallersFrames([]uintptr{p})
f, _ := result.Next()
functionName, fileName := prettyCaller(&f)
assert.Equal(t, "TestPrettyCaller", functionName, "should have current function name")
assert.Equal(t, "config/config_test.go@30", fileName, "should have current file path and line number")
}

54
config/logger.go Normal file
View File

@@ -0,0 +1,54 @@
package config
import (
"github.com/sirupsen/logrus"
"gopkg.in/natefinch/lumberjack.v2"
)
// Modified "snowzach/rotatefilehook" to support manual rotation
type RotateFileConfig struct {
Filename string
MaxSize int
MaxBackups int
MaxAge int
Compress bool
Level logrus.Level
Formatter logrus.Formatter
}
type RotateFileHook struct {
Config RotateFileConfig
logWriter *lumberjack.Logger
}
func NewRotateFileHook(config RotateFileConfig) (*RotateFileHook, error) {
hook := RotateFileHook{
Config: config,
}
hook.logWriter = &lumberjack.Logger{
Filename: config.Filename,
MaxSize: config.MaxSize,
MaxBackups: config.MaxBackups,
MaxAge: config.MaxAge,
Compress: config.Compress,
}
return &hook, nil
}
func (hook *RotateFileHook) Rotate() error {
return hook.logWriter.Rotate()
}
func (hook *RotateFileHook) Levels() []logrus.Level {
return logrus.AllLevels[:hook.Config.Level+1]
}
func (hook *RotateFileHook) Fire(entry *logrus.Entry) (err error) {
b, err := hook.Config.Formatter.Format(entry)
if err != nil {
return err
}
hook.logWriter.Write(b)
return nil
}

View File

@@ -1,6 +1,6 @@
// Code generated by sqlc. DO NOT EDIT. // Code generated by sqlc. DO NOT EDIT.
// versions: // versions:
// sqlc v1.21.0 // sqlc v1.29.0
package database package database

View File

@@ -0,0 +1,151 @@
WITH grouped_activity AS (
SELECT
ga.user_id,
ga.document_id,
MAX(ga.created_at) AS created_at,
MAX(ga.start_time) AS start_time,
MIN(ga.start_percentage) AS start_percentage,
MAX(ga.end_percentage) AS end_percentage,
-- Total Duration & Percentage
SUM(ga.duration) AS total_time_seconds,
SUM(ga.end_percentage - ga.start_percentage) AS total_read_percentage,
-- Yearly Duration
SUM(
CASE
WHEN
ga.start_time >= DATE('now', '-1 year')
THEN ga.duration
ELSE 0
END
)
AS yearly_time_seconds,
-- Yearly Percentage
SUM(
CASE
WHEN
ga.start_time >= DATE('now', '-1 year')
THEN ga.end_percentage - ga.start_percentage
ELSE 0
END
)
AS yearly_read_percentage,
-- Monthly Duration
SUM(
CASE
WHEN
ga.start_time >= DATE('now', '-1 month')
THEN ga.duration
ELSE 0
END
)
AS monthly_time_seconds,
-- Monthly Percentage
SUM(
CASE
WHEN
ga.start_time >= DATE('now', '-1 month')
THEN ga.end_percentage - ga.start_percentage
ELSE 0
END
)
AS monthly_read_percentage,
-- Weekly Duration
SUM(
CASE
WHEN
ga.start_time >= DATE('now', '-7 days')
THEN ga.duration
ELSE 0
END
)
AS weekly_time_seconds,
-- Weekly Percentage
SUM(
CASE
WHEN
ga.start_time >= DATE('now', '-7 days')
THEN ga.end_percentage - ga.start_percentage
ELSE 0
END
)
AS weekly_read_percentage
FROM activity AS ga
GROUP BY ga.user_id, ga.document_id
),
current_progress AS (
SELECT
user_id,
document_id,
COALESCE((
SELECT dp.percentage
FROM document_progress AS dp
WHERE
dp.user_id = iga.user_id
AND dp.document_id = iga.document_id
ORDER BY dp.created_at DESC
LIMIT 1
), end_percentage) AS percentage
FROM grouped_activity AS iga
)
INSERT INTO document_user_statistics
SELECT
ga.document_id,
ga.user_id,
cp.percentage,
MAX(ga.start_time) AS last_read,
MAX(ga.created_at) AS last_seen,
SUM(ga.total_read_percentage) AS read_percentage,
-- All Time WPM
SUM(ga.total_time_seconds) AS total_time_seconds,
(CAST(COALESCE(d.words, 0.0) AS REAL) * SUM(ga.total_read_percentage))
AS total_words_read,
(CAST(COALESCE(d.words, 0.0) AS REAL) * SUM(ga.total_read_percentage))
/ (SUM(ga.total_time_seconds) / 60.0) AS total_wpm,
-- Yearly WPM
ga.yearly_time_seconds,
CAST(COALESCE(d.words, 0.0) AS REAL) * ga.yearly_read_percentage
AS yearly_words_read,
COALESCE(
(CAST(COALESCE(d.words, 0.0) AS REAL) * ga.yearly_read_percentage)
/ (ga.yearly_time_seconds / 60), 0.0)
AS yearly_wpm,
-- Monthly WPM
ga.monthly_time_seconds,
CAST(COALESCE(d.words, 0.0) AS REAL) * ga.monthly_read_percentage
AS monthly_words_read,
COALESCE(
(CAST(COALESCE(d.words, 0.0) AS REAL) * ga.monthly_read_percentage)
/ (ga.monthly_time_seconds / 60), 0.0)
AS monthly_wpm,
-- Weekly WPM
ga.weekly_time_seconds,
CAST(COALESCE(d.words, 0.0) AS REAL) * ga.weekly_read_percentage
AS weekly_words_read,
COALESCE(
(CAST(COALESCE(d.words, 0.0) AS REAL) * ga.weekly_read_percentage)
/ (ga.weekly_time_seconds / 60), 0.0)
AS weekly_wpm
FROM grouped_activity AS ga
INNER JOIN
current_progress AS cp
ON ga.user_id = cp.user_id AND ga.document_id = cp.document_id
INNER JOIN
documents AS d
ON ga.document_id = d.id
GROUP BY ga.document_id, ga.user_id
ORDER BY total_wpm DESC;

27
database/documents.go Normal file
View File

@@ -0,0 +1,27 @@
package database
import (
"context"
"fmt"
"reichard.io/antholume/pkg/ptr"
"reichard.io/antholume/pkg/sliceutils"
)
func (d *DBManager) GetDocument(ctx context.Context, docID, userID string) (*GetDocumentsWithStatsRow, error) {
documents, err := d.Queries.GetDocumentsWithStats(ctx, GetDocumentsWithStatsParams{
ID: ptr.Of(docID),
UserID: userID,
Limit: 1,
})
if err != nil {
return nil, err
}
document, found := sliceutils.First(documents)
if !found {
return nil, fmt.Errorf("document not found: %s", docID)
}
return &document, nil
}

115
database/documents_test.go Normal file
View File

@@ -0,0 +1,115 @@
package database
import (
"context"
"fmt"
"testing"
"github.com/stretchr/testify/suite"
"reichard.io/antholume/config"
)
type DocumentsTestSuite struct {
suite.Suite
dbm *DBManager
}
func TestDocuments(t *testing.T) {
suite.Run(t, new(DocumentsTestSuite))
}
func (suite *DocumentsTestSuite) SetupTest() {
cfg := config.Config{
DBType: "memory",
}
suite.dbm = NewMgr(&cfg)
// Create Document
_, err := suite.dbm.Queries.UpsertDocument(context.Background(), UpsertDocumentParams{
ID: documentID,
Title: &documentTitle,
Author: &documentAuthor,
Words: &documentWords,
})
suite.NoError(err)
}
// DOCUMENT - TODO:
// - 󰊕 (q *Queries) GetDocumentProgress
// - 󰊕 (q *Queries) GetDocumentWithStats
// - 󰊕 (q *Queries) GetDocumentsSize
// - 󰊕 (q *Queries) GetDocumentsWithStats
// - 󰊕 (q *Queries) GetMissingDocuments
func (suite *DocumentsTestSuite) TestGetDocument() {
doc, err := suite.dbm.Queries.GetDocument(context.Background(), documentID)
suite.Nil(err, "should have nil err")
suite.Equal(documentID, doc.ID, "should have changed the document")
}
func (suite *DocumentsTestSuite) TestUpsertDocument() {
testDocID := "docid1"
doc, err := suite.dbm.Queries.UpsertDocument(context.Background(), UpsertDocumentParams{
ID: testDocID,
Title: &documentTitle,
Author: &documentAuthor,
})
suite.Nil(err, "should have nil err")
suite.Equal(testDocID, doc.ID, "should have document id")
suite.Equal(documentTitle, *doc.Title, "should have document title")
suite.Equal(documentAuthor, *doc.Author, "should have document author")
}
func (suite *DocumentsTestSuite) TestDeleteDocument() {
changed, err := suite.dbm.Queries.DeleteDocument(context.Background(), documentID)
suite.Nil(err, "should have nil err")
suite.Equal(int64(1), changed, "should have changed the document")
doc, err := suite.dbm.Queries.GetDocument(context.Background(), documentID)
suite.Nil(err, "should have nil err")
suite.True(doc.Deleted, "should have deleted the document")
}
func (suite *DocumentsTestSuite) TestGetDeletedDocuments() {
changed, err := suite.dbm.Queries.DeleteDocument(context.Background(), documentID)
suite.Nil(err, "should have nil err")
suite.Equal(int64(1), changed, "should have changed the document")
deletedDocs, err := suite.dbm.Queries.GetDeletedDocuments(context.Background(), []string{documentID})
suite.Nil(err, "should have nil err")
suite.Len(deletedDocs, 1, "should have one deleted document")
}
// TODO - Convert GetWantedDocuments -> (sqlc.slice('document_ids'));
func (suite *DocumentsTestSuite) TestGetWantedDocuments() {
wantedDocs, err := suite.dbm.Queries.GetWantedDocuments(context.Background(), fmt.Sprintf("[\"%s\"]", documentID))
suite.Nil(err, "should have nil err")
suite.Len(wantedDocs, 1, "should have one wanted document")
}
func (suite *DocumentsTestSuite) TestGetMissingDocuments() {
// Create Document
_, err := suite.dbm.Queries.UpsertDocument(context.Background(), UpsertDocumentParams{
ID: documentID,
Filepath: &documentFilepath,
})
suite.NoError(err)
missingDocs, err := suite.dbm.Queries.GetMissingDocuments(context.Background(), []string{documentID})
suite.Nil(err, "should have nil err")
suite.Len(missingDocs, 0, "should have no wanted document")
missingDocs, err = suite.dbm.Queries.GetMissingDocuments(context.Background(), []string{"other"})
suite.Nil(err, "should have nil err")
suite.Len(missingDocs, 1, "should have one missing document")
suite.Equal(documentID, missingDocs[0].ID, "should have missing doc")
// TODO - https://github.com/sqlc-dev/sqlc/issues/3451
// missingDocs, err = suite.dbm.Queries.GetMissingDocuments(context.Background(), []string{})
// suite.Nil(err, "should have nil err")
// suite.Len(missingDocs, 1, "should have one missing document")
// suite.Equal(documentID, missingDocs[0].ID, "should have missing doc")
}

View File

@@ -3,63 +3,256 @@ package database
import ( import (
"context" "context"
"database/sql" "database/sql"
_ "embed" "database/sql/driver"
"embed"
"errors"
"fmt" "fmt"
"path" "path/filepath"
"time"
sqlite "github.com/mattn/go-sqlite3" "github.com/pressly/goose/v3"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
"reichard.io/bbank/config" sqlite "modernc.org/sqlite"
"reichard.io/antholume/config"
_ "reichard.io/antholume/database/migrations"
) )
type DBManager struct { type DBManager struct {
DB *sql.DB DB *sql.DB
Ctx context.Context
Queries *Queries Queries *Queries
cfg *config.Config
} }
//go:embed schema.sql //go:embed schema.sql
var ddl string var ddl string
//go:embed user_streaks.sql
var user_streaks string
//go:embed document_user_statistics.sql
var document_user_statistics string
//go:embed migrations/*
var migrations embed.FS
// Register scalar sqlite function on init
func init() {
sqlite.MustRegisterFunction("LOCAL_TIME", &sqlite.FunctionImpl{
NArgs: 2,
Deterministic: true,
Scalar: localTime,
})
sqlite.MustRegisterFunction("LOCAL_DATE", &sqlite.FunctionImpl{
NArgs: 2,
Deterministic: true,
Scalar: localDate,
})
}
// NewMgr Returns an initialized manager
func NewMgr(c *config.Config) *DBManager { func NewMgr(c *config.Config) *DBManager {
// Create Manager // Create Manager
dbm := &DBManager{ dbm := &DBManager{cfg: c}
Ctx: context.Background(),
if err := dbm.init(context.Background()); err != nil {
log.Panic("Unable to init DB")
} }
// Create Database
if c.DBType == "sqlite" {
sql.Register("sqlite3_custom", &sqlite.SQLiteDriver{
ConnectHook: connectHookSQLite,
})
dbLocation := path.Join(c.ConfigPath, fmt.Sprintf("%s.db", c.DBName))
var err error
dbm.DB, err = sql.Open("sqlite3_custom", dbLocation)
if err != nil {
log.Fatal(err)
}
} else {
log.Fatal("Unsupported Database")
}
// Create Tables
if _, err := dbm.DB.ExecContext(dbm.Ctx, ddl); err != nil {
log.Fatal(err)
}
dbm.Queries = New(dbm.DB)
return dbm return dbm
} }
func connectHookSQLite(conn *sqlite.SQLiteConn) error { // init loads the DB manager
if err := conn.RegisterFunc("test_func", func() string { func (dbm *DBManager) init(ctx context.Context) error {
return "FOOBAR" // Build DB Location
}, false); err != nil { var dbLocation string
log.Info("Error Registering Function") switch dbm.cfg.DBType {
case "sqlite":
dbLocation = filepath.Join(dbm.cfg.ConfigPath, fmt.Sprintf("%s.db", dbm.cfg.DBName))
case "memory":
dbLocation = ":memory:"
default:
return fmt.Errorf("unsupported database")
}
var err error
dbm.DB, err = sql.Open("sqlite", dbLocation)
if err != nil {
log.Panicf("Unable to open DB: %v", err)
return err return err
} }
// Single open connection
dbm.DB.SetMaxOpenConns(1)
// Check if DB is new
isNew, err := isEmpty(dbm.DB)
if err != nil {
log.Panicf("Unable to determine db info: %v", err)
return err
}
// Init SQLc
dbm.Queries = New(dbm.DB)
// Execute schema
if _, err := dbm.DB.Exec(ddl, nil); err != nil {
log.Panicf("Error executing schema: %v", err)
return err
}
// Perform migrations
err = dbm.performMigrations(isNew)
if err != nil && err != goose.ErrNoMigrationFiles {
log.Panicf("Error running DB migrations: %v", err)
return err
}
// Update settings
err = dbm.updateSettings(ctx)
if err != nil {
log.Panicf("Error running DB settings update: %v", err)
return err
}
// Cache tables
if err := dbm.CacheTempTables(ctx); err != nil {
log.Warn("Refreshing temp table cache failed: ", err)
}
return nil return nil
} }
// Reload closes the DB & reinits
func (dbm *DBManager) Reload(ctx context.Context) error {
// Close handle
err := dbm.DB.Close()
if err != nil {
return err
}
// Reinit DB
if err := dbm.init(ctx); err != nil {
return err
}
return nil
}
// CacheTempTables clears existing statistics and recalculates
func (dbm *DBManager) CacheTempTables(ctx context.Context) error {
start := time.Now()
if _, err := dbm.DB.ExecContext(ctx, user_streaks); err != nil {
return err
}
log.Debug("Cached 'user_streaks' in: ", time.Since(start))
start = time.Now()
if _, err := dbm.DB.ExecContext(ctx, document_user_statistics); err != nil {
return err
}
log.Debug("Cached 'document_user_statistics' in: ", time.Since(start))
return nil
}
// updateSettings ensures that we're enforcing foreign keys and enable journal
// mode.
func (dbm *DBManager) updateSettings(ctx context.Context) error {
// Set SQLite PRAGMA Settings
pragmaQuery := `
PRAGMA foreign_keys = ON;
PRAGMA journal_mode = WAL;
`
if _, err := dbm.DB.Exec(pragmaQuery, nil); err != nil {
log.Errorf("Error executing pragma: %v", err)
return err
}
// Update Antholume Version in DB
if _, err := dbm.Queries.UpdateSettings(ctx, UpdateSettingsParams{
Name: "version",
Value: dbm.cfg.Version,
}); err != nil {
log.Errorf("Error updating DB settings: %v", err)
return err
}
return nil
}
// performMigrations runs all migrations
func (dbm *DBManager) performMigrations(isNew bool) error {
// Create context
ctx := context.WithValue(context.Background(), "isNew", isNew) // nolint
// Set DB migration
goose.SetBaseFS(migrations)
// Run migrations
goose.SetLogger(log.StandardLogger())
if err := goose.SetDialect("sqlite"); err != nil {
return err
}
return goose.UpContext(ctx, dbm.DB, "migrations")
}
// isEmpty determines whether the database is empty
func isEmpty(db *sql.DB) (bool, error) {
var tableCount int
err := db.QueryRow("SELECT COUNT(*) FROM sqlite_master WHERE type='table';").Scan(&tableCount)
if err != nil {
return false, err
}
return tableCount == 0, nil
}
// localTime is a custom SQL function that is registered as LOCAL_TIME in the init function
func localTime(ctx *sqlite.FunctionContext, args []driver.Value) (driver.Value, error) {
timeStr, ok := args[0].(string)
if !ok {
return nil, errors.New("both arguments to TZTime must be strings")
}
timeZoneStr, ok := args[1].(string)
if !ok {
return nil, errors.New("both arguments to TZTime must be strings")
}
timeZone, err := time.LoadLocation(timeZoneStr)
if err != nil {
return nil, errors.New("unable to parse timezone")
}
formattedTime, err := time.ParseInLocation(time.RFC3339, timeStr, time.UTC)
if err != nil {
return nil, errors.New("unable to parse time")
}
return formattedTime.In(timeZone).Format(time.RFC3339), nil
}
// localDate is a custom SQL function that is registered as LOCAL_DATE in the init function
func localDate(ctx *sqlite.FunctionContext, args []driver.Value) (driver.Value, error) {
timeStr, ok := args[0].(string)
if !ok {
return nil, errors.New("both arguments to TZTime must be strings")
}
timeZoneStr, ok := args[1].(string)
if !ok {
return nil, errors.New("both arguments to TZTime must be strings")
}
timeZone, err := time.LoadLocation(timeZoneStr)
if err != nil {
return nil, errors.New("unable to parse timezone")
}
formattedTime, err := time.ParseInLocation(time.RFC3339, timeStr, time.UTC)
if err != nil {
return nil, errors.New("unable to parse time")
}
return formattedTime.In(timeZone).Format("2006-01-02"), nil
}

171
database/manager_test.go Normal file
View File

@@ -0,0 +1,171 @@
package database
import (
"context"
"fmt"
"testing"
"time"
"github.com/stretchr/testify/suite"
"reichard.io/antholume/config"
"reichard.io/antholume/utils"
)
var (
userID string = "testUser"
userPass string = "testPass"
deviceID string = "testDevice"
deviceName string = "testDeviceName"
documentID string = "testDocument"
documentTitle string = "testTitle"
documentAuthor string = "testAuthor"
documentFilepath string = "./testPath.epub"
documentWords int64 = 5000
)
type DatabaseTestSuite struct {
suite.Suite
dbm *DBManager
}
func TestDatabase(t *testing.T) {
suite.Run(t, new(DatabaseTestSuite))
}
// PROGRESS - TODO:
// - 󰊕 (q *Queries) GetProgress
// - 󰊕 (q *Queries) UpdateProgress
func (suite *DatabaseTestSuite) SetupTest() {
cfg := config.Config{
DBType: "memory",
}
suite.dbm = NewMgr(&cfg)
// Create User
rawAuthHash, _ := utils.GenerateToken(64)
authHash := fmt.Sprintf("%x", rawAuthHash)
_, err := suite.dbm.Queries.CreateUser(context.Background(), CreateUserParams{
ID: userID,
Pass: &userPass,
AuthHash: &authHash,
})
suite.NoError(err)
// Create Document
_, err = suite.dbm.Queries.UpsertDocument(context.Background(), UpsertDocumentParams{
ID: documentID,
Title: &documentTitle,
Author: &documentAuthor,
Filepath: &documentFilepath,
Words: &documentWords,
})
suite.NoError(err)
// Create Device
_, err = suite.dbm.Queries.UpsertDevice(context.Background(), UpsertDeviceParams{
ID: deviceID,
UserID: userID,
DeviceName: deviceName,
})
suite.NoError(err)
// Create Activity
end := time.Now()
start := end.AddDate(0, 0, -9)
var counter int64 = 0
for d := start; d.After(end) == false; d = d.AddDate(0, 0, 1) {
counter += 1
// Add Item
activity, err := suite.dbm.Queries.AddActivity(context.Background(), AddActivityParams{
DocumentID: documentID,
DeviceID: deviceID,
UserID: userID,
StartTime: d.UTC().Format(time.RFC3339),
Duration: 60,
StartPercentage: float64(counter) / 100.0,
EndPercentage: float64(counter+1) / 100.0,
})
suite.Nil(err, fmt.Sprintf("[%d] should have nil err for add activity", counter))
suite.Equal(counter, activity.ID, fmt.Sprintf("[%d] should have correct id for add activity", counter))
}
// Initiate Cache
err = suite.dbm.CacheTempTables(context.Background())
suite.NoError(err)
}
// DEVICES - TODO:
// - 󰊕 (q *Queries) GetDevice
// - 󰊕 (q *Queries) GetDevices
// - 󰊕 (q *Queries) UpsertDevice
func (suite *DatabaseTestSuite) TestDevice() {
testDevice := "dev123"
device, err := suite.dbm.Queries.UpsertDevice(context.Background(), UpsertDeviceParams{
ID: testDevice,
UserID: userID,
DeviceName: deviceName,
})
suite.Nil(err, "should have nil err")
suite.Equal(testDevice, device.ID, "should have device id")
suite.Equal(userID, device.UserID, "should have user id")
suite.Equal(deviceName, device.DeviceName, "should have device name")
}
// ACTIVITY - TODO:
// - 󰊕 (q *Queries) AddActivity
// - 󰊕 (q *Queries) GetActivity
// - 󰊕 (q *Queries) GetLastActivity
func (suite *DatabaseTestSuite) TestActivity() {
// Validate Exists
existsRows, err := suite.dbm.Queries.GetActivity(context.Background(), GetActivityParams{
UserID: userID,
Offset: 0,
Limit: 50,
})
suite.Nil(err, "should have nil err for get activity")
suite.Len(existsRows, 10, "should have correct number of rows get activity")
// Validate Doesn't Exist
doesntExistsRows, err := suite.dbm.Queries.GetActivity(context.Background(), GetActivityParams{
UserID: userID,
DocumentID: "unknownDoc",
DocFilter: true,
Offset: 0,
Limit: 50,
})
suite.Nil(err, "should have nil err for get activity")
suite.Len(doesntExistsRows, 0, "should have no rows")
}
// MISC - TODO:
// - 󰊕 (q *Queries) AddMetadata
// - 󰊕 (q *Queries) GetDailyReadStats
// - 󰊕 (q *Queries) GetDatabaseInfo
// - 󰊕 (q *Queries) UpdateSettings
func (suite *DatabaseTestSuite) TestGetDailyReadStats() {
readStats, err := suite.dbm.Queries.GetDailyReadStats(context.Background(), userID)
suite.Nil(err, "should have nil err")
suite.Len(readStats, 30, "should have length of 30")
// Validate 1 Minute / Day - Last 10 Days
for i := 0; i < 10; i++ {
stat := readStats[i]
suite.Equal(int64(1), stat.MinutesRead, "should have one minute read")
}
// Validate 0 Minute / Day - Remaining 20 Days
for i := 10; i < 30; i++ {
stat := readStats[i]
suite.Equal(int64(0), stat.MinutesRead, "should have zero minutes read")
}
}

View File

@@ -0,0 +1,89 @@
package migrations
import (
"context"
"database/sql"
"fmt"
"github.com/pressly/goose/v3"
"reichard.io/antholume/utils"
)
func init() {
goose.AddMigrationContext(upUserAuthHash, downUserAuthHash)
}
func upUserAuthHash(ctx context.Context, tx *sql.Tx) error {
// Determine if we have a new DB or not
isNew := ctx.Value("isNew").(bool)
if isNew {
return nil
}
// Copy table & create column
_, err := tx.Exec(`
-- Create Copy Table
CREATE TABLE temp_users AS SELECT * FROM users;
ALTER TABLE temp_users ADD COLUMN auth_hash TEXT;
-- Update Schema
DELETE FROM users;
ALTER TABLE users ADD COLUMN auth_hash TEXT NOT NULL;
`)
if err != nil {
return err
}
// Get current users
rows, err := tx.Query("SELECT id FROM temp_users")
if err != nil {
return err
}
// Query existing users
var users []string
for rows.Next() {
var user string
if err := rows.Scan(&user); err != nil {
return err
}
users = append(users, user)
}
// Create auth hash per user
for _, user := range users {
rawAuthHash, err := utils.GenerateToken(64)
if err != nil {
return err
}
authHash := fmt.Sprintf("%x", rawAuthHash)
_, err = tx.Exec("UPDATE temp_users SET auth_hash = ? WHERE id = ?", authHash, user)
if err != nil {
return err
}
}
// Copy from temp to true table
_, err = tx.Exec(`
-- Copy Into New
INSERT INTO users SELECT * FROM temp_users;
-- Drop Temp Table
DROP TABLE temp_users;
`)
if err != nil {
return err
}
return nil
}
func downUserAuthHash(ctx context.Context, tx *sql.Tx) error {
// Drop column
_, err := tx.Exec("ALTER users DROP COLUMN auth_hash")
if err != nil {
return err
}
return nil
}

View File

@@ -0,0 +1,58 @@
package migrations
import (
"context"
"database/sql"
"github.com/pressly/goose/v3"
)
func init() {
goose.AddMigrationContext(upUserTimezone, downUserTimezone)
}
func upUserTimezone(ctx context.Context, tx *sql.Tx) error {
// Determine if we have a new DB or not
isNew := ctx.Value("isNew").(bool)
if isNew {
return nil
}
// Copy table & create column
_, err := tx.Exec(`
-- Copy Table
CREATE TABLE temp_users AS SELECT * FROM users;
ALTER TABLE temp_users DROP COLUMN time_offset;
ALTER TABLE temp_users ADD COLUMN timezone TEXT;
UPDATE temp_users SET timezone = 'Europe/London';
-- Clean Table
DELETE FROM users;
ALTER TABLE users DROP COLUMN time_offset;
ALTER TABLE users ADD COLUMN timezone TEXT NOT NULL DEFAULT 'Europe/London';
-- Copy Temp Table -> Clean Table
INSERT INTO users SELECT * FROM temp_users;
-- Drop Temp Table
DROP TABLE temp_users;
`)
if err != nil {
return err
}
return nil
}
func downUserTimezone(ctx context.Context, tx *sql.Tx) error {
// Update column name & value
_, err := tx.Exec(`
ALTER TABLE users RENAME COLUMN timezone TO time_offset;
UPDATE users SET time_offset = '0 hours';
`)
if err != nil {
return err
}
return nil
}

View File

@@ -0,0 +1,38 @@
package migrations
import (
"context"
"database/sql"
"github.com/pressly/goose/v3"
)
func init() {
goose.AddMigrationContext(upImportBasepath, downImportBasepath)
}
func upImportBasepath(ctx context.Context, tx *sql.Tx) error {
// Determine if we have a new DB or not
isNew := ctx.Value("isNew").(bool)
if isNew {
return nil
}
// Add basepath column
_, err := tx.Exec(`ALTER TABLE documents ADD COLUMN basepath TEXT;`)
if err != nil {
return err
}
// This code is executed when the migration is applied.
return nil
}
func downImportBasepath(ctx context.Context, tx *sql.Tx) error {
// Drop basepath column
_, err := tx.Exec("ALTER documents DROP COLUMN basepath;")
if err != nil {
return err
}
return nil
}

View File

@@ -0,0 +1,9 @@
# DB Migrations
```bash
goose create migration_name
```
## Note
Since we update both the `schema.sql`, as well as the migration files, when we create a new DB it will inherently be up-to-date. We don't want to run the migrations if it's already up-to-date. Instead each migration checks if we have a new DB (via a value passed into the context), and if we do we simply return.

View File

@@ -1,29 +1,26 @@
// Code generated by sqlc. DO NOT EDIT. // Code generated by sqlc. DO NOT EDIT.
// versions: // versions:
// sqlc v1.21.0 // sqlc v1.29.0
package database package database
import (
"time"
)
type Activity struct { type Activity struct {
ID int64 `json:"id"` ID int64 `json:"id"`
UserID string `json:"user_id"` UserID string `json:"user_id"`
DocumentID string `json:"document_id"` DocumentID string `json:"document_id"`
DeviceID string `json:"device_id"` DeviceID string `json:"device_id"`
StartTime time.Time `json:"start_time"` StartTime string `json:"start_time"`
StartPercentage float64 `json:"start_percentage"`
EndPercentage float64 `json:"end_percentage"`
Duration int64 `json:"duration"` Duration int64 `json:"duration"`
Page int64 `json:"page"` CreatedAt string `json:"created_at"`
Pages int64 `json:"pages"`
CreatedAt time.Time `json:"created_at"`
} }
type Device struct { type Device struct {
ID string `json:"id"` ID string `json:"id"`
UserID string `json:"user_id"` UserID string `json:"user_id"`
DeviceName string `json:"device_name"` DeviceName string `json:"device_name"`
LastSynced string `json:"last_synced"`
CreatedAt string `json:"created_at"` CreatedAt string `json:"created_at"`
Sync bool `json:"sync"` Sync bool `json:"sync"`
} }
@@ -31,6 +28,7 @@ type Device struct {
type Document struct { type Document struct {
ID string `json:"id"` ID string `json:"id"`
Md5 *string `json:"md5"` Md5 *string `json:"md5"`
Basepath *string `json:"basepath"`
Filepath *string `json:"filepath"` Filepath *string `json:"filepath"`
Coverfile *string `json:"coverfile"` Coverfile *string `json:"coverfile"`
Title *string `json:"title"` Title *string `json:"title"`
@@ -46,16 +44,8 @@ type Document struct {
Isbn13 *string `json:"isbn13"` Isbn13 *string `json:"isbn13"`
Synced bool `json:"-"` Synced bool `json:"-"`
Deleted bool `json:"-"` Deleted bool `json:"-"`
UpdatedAt time.Time `json:"updated_at"` UpdatedAt string `json:"updated_at"`
CreatedAt time.Time `json:"created_at"` CreatedAt string `json:"created_at"`
}
type DocumentDeviceSync struct {
UserID string `json:"user_id"`
DocumentID string `json:"document_id"`
DeviceID string `json:"device_id"`
LastSynced time.Time `json:"last_synced"`
Sync bool `json:"sync"`
} }
type DocumentProgress struct { type DocumentProgress struct {
@@ -64,10 +54,31 @@ type DocumentProgress struct {
DeviceID string `json:"device_id"` DeviceID string `json:"device_id"`
Percentage float64 `json:"percentage"` Percentage float64 `json:"percentage"`
Progress string `json:"progress"` Progress string `json:"progress"`
CreatedAt time.Time `json:"created_at"` CreatedAt string `json:"created_at"`
} }
type Metadatum struct { type DocumentUserStatistic struct {
DocumentID string `json:"document_id"`
UserID string `json:"user_id"`
Percentage float64 `json:"percentage"`
LastRead string `json:"last_read"`
LastSeen string `json:"last_seen"`
ReadPercentage float64 `json:"read_percentage"`
TotalTimeSeconds int64 `json:"total_time_seconds"`
TotalWordsRead int64 `json:"total_words_read"`
TotalWpm float64 `json:"total_wpm"`
YearlyTimeSeconds int64 `json:"yearly_time_seconds"`
YearlyWordsRead int64 `json:"yearly_words_read"`
YearlyWpm float64 `json:"yearly_wpm"`
MonthlyTimeSeconds int64 `json:"monthly_time_seconds"`
MonthlyWordsRead int64 `json:"monthly_words_read"`
MonthlyWpm float64 `json:"monthly_wpm"`
WeeklyTimeSeconds int64 `json:"weekly_time_seconds"`
WeeklyWordsRead int64 `json:"weekly_words_read"`
WeeklyWpm float64 `json:"weekly_wpm"`
}
type Metadata struct {
ID int64 `json:"id"` ID int64 `json:"id"`
DocumentID string `json:"document_id"` DocumentID string `json:"document_id"`
Title *string `json:"title"` Title *string `json:"title"`
@@ -77,23 +88,36 @@ type Metadatum struct {
Olid *string `json:"olid"` Olid *string `json:"olid"`
Isbn10 *string `json:"isbn10"` Isbn10 *string `json:"isbn10"`
Isbn13 *string `json:"isbn13"` Isbn13 *string `json:"isbn13"`
CreatedAt time.Time `json:"created_at"` CreatedAt string `json:"created_at"`
} }
type RescaledActivity struct { type Setting struct {
DocumentID string `json:"document_id"` ID int64 `json:"id"`
DeviceID string `json:"device_id"` Name string `json:"name"`
UserID string `json:"user_id"` Value string `json:"value"`
StartTime time.Time `json:"start_time"` CreatedAt string `json:"created_at"`
Pages int64 `json:"pages"`
Page int64 `json:"page"`
Duration int64 `json:"duration"`
} }
type User struct { type User struct {
ID string `json:"id"` ID string `json:"id"`
Pass *string `json:"-"` Pass *string `json:"-"`
AuthHash *string `json:"auth_hash"`
Admin bool `json:"-"` Admin bool `json:"-"`
TimeOffset *string `json:"time_offset"` Timezone *string `json:"timezone"`
CreatedAt time.Time `json:"created_at"` CreatedAt string `json:"created_at"`
}
type UserStreak struct {
UserID string `json:"user_id"`
Window string `json:"window"`
MaxStreak int64 `json:"max_streak"`
MaxStreakStartDate string `json:"max_streak_start_date"`
MaxStreakEndDate string `json:"max_streak_end_date"`
CurrentStreak int64 `json:"current_streak"`
CurrentStreakStartDate string `json:"current_streak_start_date"`
CurrentStreakEndDate string `json:"current_streak_end_date"`
LastTimezone string `json:"last_timezone"`
LastSeen string `json:"last_seen"`
LastRecord string `json:"last_record"`
LastCalculated string `json:"last_calculated"`
} }

View File

@@ -1,3 +1,16 @@
-- name: AddActivity :one
INSERT INTO activity (
user_id,
document_id,
device_id,
start_time,
duration,
start_percentage,
end_percentage
)
VALUES (?, ?, ?, ?, ?, ?, ?)
RETURNING *;
-- name: AddMetadata :one -- name: AddMetadata :one
INSERT INTO metadata ( INSERT INTO metadata (
document_id, document_id,
@@ -13,26 +26,343 @@ VALUES (?, ?, ?, ?, ?, ?, ?, ?)
RETURNING *; RETURNING *;
-- name: CreateUser :execrows -- name: CreateUser :execrows
INSERT INTO users (id, pass) INSERT INTO users (id, pass, auth_hash, admin)
VALUES (?, ?) VALUES (?, ?, ?, ?)
ON CONFLICT DO NOTHING; ON CONFLICT DO NOTHING;
-- name: DeleteUser :execrows
DELETE FROM users WHERE id = $id;
-- name: DeleteDocument :execrows
UPDATE documents
SET
deleted = 1
WHERE id = $id;
-- name: GetActivity :many
WITH filtered_activity AS (
SELECT
document_id,
device_id,
user_id,
start_time,
duration,
ROUND(CAST(start_percentage AS REAL) * 100, 2) AS start_percentage,
ROUND(CAST(end_percentage AS REAL) * 100, 2) AS end_percentage,
ROUND(CAST(end_percentage - start_percentage AS REAL) * 100, 2) AS read_percentage
FROM activity
WHERE
activity.user_id = $user_id
AND (
(
CAST($doc_filter AS BOOLEAN) = TRUE
AND document_id = $document_id
) OR $doc_filter = FALSE
)
ORDER BY start_time DESC
LIMIT $limit
OFFSET $offset
)
SELECT
document_id,
device_id,
LOCAL_TIME(activity.start_time, users.timezone) AS start_time,
title,
author,
duration,
start_percentage,
end_percentage,
read_percentage
FROM filtered_activity AS activity
LEFT JOIN documents ON documents.id = activity.document_id
LEFT JOIN users ON users.id = activity.user_id;
-- name: GetDailyReadStats :many
WITH RECURSIVE last_30_days AS (
SELECT LOCAL_DATE(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone) AS date
FROM users WHERE users.id = $user_id
UNION ALL
SELECT DATE(date, '-1 days')
FROM last_30_days
LIMIT 30
),
filtered_activity AS (
SELECT
user_id,
start_time,
duration
FROM activity
WHERE start_time > DATE('now', '-31 days')
AND activity.user_id = $user_id
),
activity_days AS (
SELECT
SUM(duration) AS seconds_read,
LOCAL_DATE(start_time, timezone) AS day
FROM filtered_activity AS activity
LEFT JOIN users ON users.id = activity.user_id
GROUP BY day
)
SELECT
CAST(date AS TEXT),
CAST(CASE
WHEN seconds_read IS NULL THEN 0
ELSE seconds_read / 60
END AS INTEGER) AS minutes_read
FROM last_30_days
LEFT JOIN activity_days ON activity_days.day == last_30_days.date
ORDER BY date DESC
LIMIT 30;
-- name: GetDatabaseInfo :one
SELECT
(SELECT COUNT(rowid) FROM activity WHERE activity.user_id = $user_id) AS activity_size,
(SELECT COUNT(rowid) FROM documents) AS documents_size,
(SELECT COUNT(rowid) FROM document_progress WHERE document_progress.user_id = $user_id) AS progress_size,
(SELECT COUNT(rowid) FROM devices WHERE devices.user_id = $user_id) AS devices_size
LIMIT 1;
-- name: GetDeletedDocuments :many
SELECT documents.id
FROM documents
WHERE
documents.deleted = true
AND documents.id IN (sqlc.slice('document_ids'));
-- name: GetDevice :one
SELECT * FROM devices
WHERE id = $device_id LIMIT 1;
-- name: GetDevices :many
SELECT
devices.id,
devices.device_name,
LOCAL_TIME(devices.created_at, users.timezone) AS created_at,
LOCAL_TIME(devices.last_synced, users.timezone) AS last_synced
FROM devices
JOIN users ON users.id = devices.user_id
WHERE users.id = $user_id
ORDER BY devices.last_synced DESC;
-- name: GetDocument :one
SELECT * FROM documents
WHERE id = $document_id LIMIT 1;
-- name: GetDocumentProgress :one
SELECT
document_progress.*,
devices.device_name
FROM document_progress
JOIN devices ON document_progress.device_id = devices.id
WHERE
document_progress.user_id = $user_id
AND document_progress.document_id = $document_id
ORDER BY
document_progress.created_at
DESC
LIMIT 1;
-- name: GetDocuments :many
SELECT * FROM documents
ORDER BY created_at DESC
LIMIT $limit
OFFSET $offset;
-- name: GetDocumentsSize :one
SELECT
COUNT(rowid) AS length
FROM documents AS docs
WHERE $query IS NULL OR (
docs.title LIKE $query OR
docs.author LIKE $query
)
LIMIT 1;
-- name: GetDocumentsWithStats :many
SELECT
docs.id,
docs.title,
docs.author,
docs.description,
docs.isbn10,
docs.isbn13,
docs.filepath,
docs.words,
CAST(COALESCE(dus.total_wpm, 0.0) AS INTEGER) AS wpm,
COALESCE(dus.read_percentage, 0) AS read_percentage,
COALESCE(dus.total_time_seconds, 0) AS total_time_seconds,
STRFTIME('%Y-%m-%d %H:%M:%S', LOCAL_TIME(COALESCE(dus.last_read, STRFTIME('%Y-%m-%dT%H:%M:%SZ', 0, 'unixepoch')), users.timezone))
AS last_read,
ROUND(CAST(CASE
WHEN dus.percentage IS NULL THEN 0.0
WHEN (dus.percentage * 100.0) > 97.0 THEN 100.0
ELSE dus.percentage * 100.0
END AS REAL), 2) AS percentage,
CAST(CASE
WHEN dus.total_time_seconds IS NULL THEN 0.0
ELSE
CAST(dus.total_time_seconds AS REAL)
/ (dus.read_percentage * 100.0)
END AS INTEGER) AS seconds_per_percent
FROM documents AS docs
LEFT JOIN users ON users.id = $user_id
LEFT JOIN
document_user_statistics AS dus
ON dus.document_id = docs.id AND dus.user_id = $user_id
WHERE
(docs.id = sqlc.narg('id') OR $id IS NULL)
AND (docs.deleted = sqlc.narg(deleted) OR $deleted IS NULL)
AND (
(
docs.title LIKE sqlc.narg('query') OR
docs.author LIKE $query
) OR $query IS NULL
)
ORDER BY dus.last_read DESC, docs.created_at DESC
LIMIT $limit
OFFSET $offset;
-- name: GetLastActivity :one
SELECT start_time
FROM activity
WHERE device_id = $device_id
AND user_id = $user_id
ORDER BY start_time DESC LIMIT 1;
-- name: GetMissingDocuments :many
SELECT documents.* FROM documents
WHERE
documents.filepath IS NOT NULL
AND documents.deleted = false
AND documents.id NOT IN (sqlc.slice('document_ids'));
-- name: GetProgress :many
SELECT
documents.title,
documents.author,
devices.device_name,
ROUND(CAST(progress.percentage AS REAL) * 100, 2) AS percentage,
progress.document_id,
progress.user_id,
LOCAL_TIME(progress.created_at, users.timezone) AS created_at
FROM document_progress AS progress
LEFT JOIN users ON progress.user_id = users.id
LEFT JOIN devices ON progress.device_id = devices.id
LEFT JOIN documents ON progress.document_id = documents.id
WHERE
progress.user_id = $user_id
AND (
(
CAST($doc_filter AS BOOLEAN) = TRUE
AND document_id = $document_id
) OR $doc_filter = FALSE
)
ORDER BY created_at DESC
LIMIT $limit
OFFSET $offset;
-- name: GetUser :one -- name: GetUser :one
SELECT * FROM users SELECT * FROM users
WHERE id = $user_id LIMIT 1; WHERE id = $user_id LIMIT 1;
-- name: GetUserStreaks :many
SELECT * FROM user_streaks
WHERE user_id = $user_id;
-- name: GetUsers :many
SELECT * FROM users;
-- name: GetUserStatistics :many
SELECT
user_id,
CAST(SUM(total_words_read) AS INTEGER) AS total_words_read,
CAST(SUM(total_time_seconds) AS INTEGER) AS total_seconds,
ROUND(COALESCE(CAST(SUM(total_words_read) AS REAL) / (SUM(total_time_seconds) / 60.0), 0.0), 2)
AS total_wpm,
CAST(SUM(yearly_words_read) AS INTEGER) AS yearly_words_read,
CAST(SUM(yearly_time_seconds) AS INTEGER) AS yearly_seconds,
ROUND(COALESCE(CAST(SUM(yearly_words_read) AS REAL) / (SUM(yearly_time_seconds) / 60.0), 0.0), 2)
AS yearly_wpm,
CAST(SUM(monthly_words_read) AS INTEGER) AS monthly_words_read,
CAST(SUM(monthly_time_seconds) AS INTEGER) AS monthly_seconds,
ROUND(COALESCE(CAST(SUM(monthly_words_read) AS REAL) / (SUM(monthly_time_seconds) / 60.0), 0.0), 2)
AS monthly_wpm,
CAST(SUM(weekly_words_read) AS INTEGER) AS weekly_words_read,
CAST(SUM(weekly_time_seconds) AS INTEGER) AS weekly_seconds,
ROUND(COALESCE(CAST(SUM(weekly_words_read) AS REAL) / (SUM(weekly_time_seconds) / 60.0), 0.0), 2)
AS weekly_wpm
FROM document_user_statistics
WHERE total_words_read > 0
GROUP BY user_id
ORDER BY total_wpm DESC;
-- name: GetWantedDocuments :many
SELECT
CAST(value AS TEXT) AS id,
CAST((documents.filepath IS NULL) AS BOOLEAN) AS want_file,
CAST((documents.id IS NULL) AS BOOLEAN) AS want_metadata
FROM json_each(?1)
LEFT JOIN documents
ON value = documents.id
WHERE (
documents.id IS NOT NULL
AND documents.deleted = false
AND documents.filepath IS NULL
)
OR (documents.id IS NULL)
OR CAST($document_ids AS TEXT) != CAST($document_ids AS TEXT);
-- name: UpdateProgress :one
INSERT OR REPLACE INTO document_progress (
user_id,
document_id,
device_id,
percentage,
progress
)
VALUES (?, ?, ?, ?, ?)
RETURNING *;
-- name: UpdateUser :one -- name: UpdateUser :one
UPDATE users UPDATE users
SET SET
pass = COALESCE($password, pass), pass = COALESCE($password, pass),
time_offset = COALESCE($time_offset, time_offset) auth_hash = COALESCE($auth_hash, auth_hash),
timezone = COALESCE($timezone, timezone),
admin = COALESCE($admin, admin)
WHERE id = $user_id WHERE id = $user_id
RETURNING *; RETURNING *;
-- name: UpdateSettings :one
INSERT INTO settings (name, value)
VALUES (?, ?)
ON CONFLICT DO UPDATE
SET
name = COALESCE(excluded.name, name),
value = COALESCE(excluded.value, value)
RETURNING *;
-- name: UpsertDevice :one
INSERT INTO devices (id, user_id, last_synced, device_name)
VALUES (?, ?, ?, ?)
ON CONFLICT DO UPDATE
SET
device_name = COALESCE(excluded.device_name, device_name),
last_synced = COALESCE(excluded.last_synced, last_synced)
RETURNING *;
-- name: UpsertDocument :one -- name: UpsertDocument :one
INSERT INTO documents ( INSERT INTO documents (
id, id,
md5, md5,
basepath,
filepath, filepath,
coverfile, coverfile,
title, title,
@@ -47,10 +377,11 @@ INSERT INTO documents (
isbn10, isbn10,
isbn13 isbn13
) )
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT DO UPDATE ON CONFLICT DO UPDATE
SET SET
md5 = COALESCE(excluded.md5, md5), md5 = COALESCE(excluded.md5, md5),
basepath = COALESCE(excluded.basepath, basepath),
filepath = COALESCE(excluded.filepath, filepath), filepath = COALESCE(excluded.filepath, filepath),
coverfile = COALESCE(excluded.coverfile, coverfile), coverfile = COALESCE(excluded.coverfile, coverfile),
title = COALESCE(excluded.title, title), title = COALESCE(excluded.title, title),
@@ -65,406 +396,3 @@ SET
isbn10 = COALESCE(excluded.isbn10, isbn10), isbn10 = COALESCE(excluded.isbn10, isbn10),
isbn13 = COALESCE(excluded.isbn13, isbn13) isbn13 = COALESCE(excluded.isbn13, isbn13)
RETURNING *; RETURNING *;
-- name: DeleteDocument :execrows
UPDATE documents
SET
deleted = 1
WHERE id = $id;
-- name: UpdateDocumentSync :one
UPDATE documents
SET
synced = $synced
WHERE id = $id
RETURNING *;
-- name: UpdateDocumentDeleted :one
UPDATE documents
SET
deleted = $deleted
WHERE id = $id
RETURNING *;
-- name: GetDocument :one
SELECT * FROM documents
WHERE id = $document_id LIMIT 1;
-- name: UpsertDevice :one
INSERT INTO devices (id, user_id, device_name)
VALUES (?, ?, ?)
ON CONFLICT DO UPDATE
SET
device_name = COALESCE(excluded.device_name, device_name)
RETURNING *;
-- name: GetDevice :one
SELECT * FROM devices
WHERE id = $device_id LIMIT 1;
-- name: UpdateProgress :one
INSERT OR REPLACE INTO document_progress (
user_id,
document_id,
device_id,
percentage,
progress
)
VALUES (?, ?, ?, ?, ?)
RETURNING *;
-- name: GetProgress :one
SELECT
document_progress.*,
devices.device_name
FROM document_progress
JOIN devices ON document_progress.device_id = devices.id
WHERE
document_progress.user_id = $user_id
AND document_progress.document_id = $document_id
ORDER BY
document_progress.created_at
DESC
LIMIT 1;
-- name: GetLastActivity :one
SELECT start_time
FROM activity
WHERE device_id = $device_id
AND user_id = $user_id
ORDER BY start_time DESC LIMIT 1;
-- name: AddActivity :one
INSERT INTO activity (
user_id,
document_id,
device_id,
start_time,
duration,
page,
pages
)
VALUES (?, ?, ?, ?, ?, ?, ?)
RETURNING *;
-- name: GetMissingDocuments :many
SELECT documents.* FROM documents
WHERE
documents.filepath IS NOT NULL
AND documents.deleted = false
AND documents.id NOT IN (sqlc.slice('document_ids'));
-- name: GetWantedDocuments :many
SELECT
CAST(value AS TEXT) AS id,
CAST((documents.filepath IS NULL) AS BOOLEAN) AS want_file,
CAST((IFNULL(documents.synced, false) != true) AS BOOLEAN) AS want_metadata
FROM json_each(?1)
LEFT JOIN documents
ON value = documents.id
WHERE (
documents.id IS NOT NULL
AND documents.deleted = false
AND (
documents.synced = false
OR documents.filepath IS NULL
)
)
OR (documents.id IS NULL)
OR CAST($document_ids AS TEXT) != CAST($document_ids AS TEXT);
-- name: GetDeletedDocuments :many
SELECT documents.id
FROM documents
WHERE
documents.deleted = true
AND documents.id IN (sqlc.slice('document_ids'));
-- name: GetDocuments :many
SELECT * FROM documents
ORDER BY created_at DESC
LIMIT $limit
OFFSET $offset;
-- name: GetDocumentWithStats :one
WITH true_progress AS (
SELECT
start_time AS last_read,
SUM(duration) AS total_time_seconds,
document_id,
page,
pages,
-- Determine Read Pages
COUNT(DISTINCT page) AS read_pages,
-- Derive Percentage of Book
ROUND(CAST(page AS REAL) / CAST(pages AS REAL) * 100, 2) AS percentage
FROM rescaled_activity
WHERE user_id = $user_id
AND document_id = $document_id
GROUP BY document_id
HAVING MAX(start_time)
LIMIT 1
)
SELECT
documents.*,
CAST(IFNULL(page, 0) AS INTEGER) AS page,
CAST(IFNULL(pages, 0) AS INTEGER) AS pages,
CAST(IFNULL(total_time_seconds, 0) AS INTEGER) AS total_time_seconds,
CAST(DATETIME(IFNULL(last_read, "1970-01-01"), time_offset) AS TEXT) AS last_read,
CAST(IFNULL(read_pages, 0) AS INTEGER) AS read_pages,
-- Calculate Seconds / Page
-- 1. Calculate Total Time in Seconds (Sum Duration in Activity)
-- 2. Divide by Read Pages (Distinct Pages in Activity)
CAST(CASE
WHEN total_time_seconds IS NULL THEN 0.0
ELSE ROUND(CAST(total_time_seconds AS REAL) / CAST(read_pages AS REAL))
END AS INTEGER) AS seconds_per_page,
-- Arbitrarily >97% is Complete
CAST(CASE
WHEN percentage > 97.0 THEN 100.0
WHEN percentage IS NULL THEN 0.0
ELSE percentage
END AS REAL) AS percentage
FROM documents
LEFT JOIN true_progress ON true_progress.document_id = documents.id
LEFT JOIN users ON users.id = $user_id
WHERE documents.id = $document_id
ORDER BY true_progress.last_read DESC, documents.created_at DESC
LIMIT 1;
-- name: GetDocumentsWithStats :many
WITH true_progress AS (
SELECT
start_time AS last_read,
SUM(duration) AS total_time_seconds,
document_id,
page,
pages,
ROUND(CAST(page AS REAL) / CAST(pages AS REAL) * 100, 2) AS percentage
FROM activity
WHERE user_id = $user_id
GROUP BY document_id
HAVING MAX(start_time)
)
SELECT
documents.*,
CAST(IFNULL(page, 0) AS INTEGER) AS page,
CAST(IFNULL(pages, 0) AS INTEGER) AS pages,
CAST(IFNULL(total_time_seconds, 0) AS INTEGER) AS total_time_seconds,
CAST(DATETIME(IFNULL(last_read, "1970-01-01"), time_offset) AS TEXT) AS last_read,
CAST(CASE
WHEN percentage > 97.0 THEN 100.0
WHEN percentage IS NULL THEN 0.0
ELSE percentage
END AS REAL) AS percentage
FROM documents
LEFT JOIN true_progress ON true_progress.document_id = documents.id
LEFT JOIN users ON users.id = $user_id
WHERE documents.deleted == false
ORDER BY true_progress.last_read DESC, documents.created_at DESC
LIMIT $limit
OFFSET $offset;
-- name: GetUsers :many
SELECT * FROM users
WHERE
users.id = $user
OR ?1 IN (
SELECT id
FROM users
WHERE id = $user
AND admin = 1
)
ORDER BY created_at DESC
LIMIT $limit
OFFSET $offset;
-- name: GetActivity :many
SELECT
document_id,
CAST(DATETIME(activity.start_time, time_offset) AS TEXT) AS start_time,
title,
author,
duration,
page,
pages
FROM activity
LEFT JOIN documents ON documents.id = activity.document_id
LEFT JOIN users ON users.id = activity.user_id
WHERE
activity.user_id = $user_id
AND (
CAST($doc_filter AS BOOLEAN) = TRUE
AND document_id = $document_id
)
OR $doc_filter = FALSE
ORDER BY start_time DESC
LIMIT $limit
OFFSET $offset;
-- name: GetDevices :many
SELECT
devices.device_name,
CAST(DATETIME(devices.created_at, users.time_offset) AS TEXT) AS created_at,
CAST(DATETIME(MAX(activity.created_at), users.time_offset) AS TEXT) AS last_sync
FROM activity
JOIN devices ON devices.id = activity.device_id
JOIN users ON users.id = $user_id
WHERE devices.user_id = $user_id
GROUP BY activity.device_id;
-- name: GetDocumentReadStats :one
SELECT
COUNT(DISTINCT page) AS pages_read,
SUM(duration) AS total_time
FROM rescaled_activity
WHERE document_id = $document_id
AND user_id = $user_id
AND start_time >= $start_time;
-- name: GetDocumentReadStatsCapped :one
WITH capped_stats AS (
SELECT MIN(SUM(duration), CAST($page_duration_cap AS INTEGER)) AS durations
FROM rescaled_activity
WHERE document_id = $document_id
AND user_id = $user_id
AND start_time >= $start_time
GROUP BY page
)
SELECT
CAST(COUNT(*) AS INTEGER) AS pages_read,
CAST(SUM(durations) AS INTEGER) AS total_time
FROM capped_stats;
-- name: GetDocumentDaysRead :one
WITH document_days AS (
SELECT DATE(start_time, time_offset) AS dates
FROM activity
JOIN users ON users.id = activity.user_id
WHERE document_id = $document_id
AND user_id = $user_id
GROUP BY dates
)
SELECT CAST(COUNT(*) AS INTEGER) AS days_read
FROM document_days;
-- name: GetUserWindowStreaks :one
WITH document_windows AS (
SELECT
CASE
WHEN ?2 = "WEEK" THEN DATE(start_time, time_offset, 'weekday 0', '-7 day')
WHEN ?2 = "DAY" THEN DATE(start_time, time_offset)
END AS read_window,
time_offset
FROM activity
JOIN users ON users.id = activity.user_id
WHERE user_id = $user_id
AND CAST($window AS TEXT) = CAST($window AS TEXT)
GROUP BY read_window
),
partitions AS (
SELECT
document_windows.*,
row_number() OVER (
PARTITION BY 1 ORDER BY read_window DESC
) AS seqnum
FROM document_windows
),
streaks AS (
SELECT
COUNT(*) AS streak,
MIN(read_window) AS start_date,
MAX(read_window) AS end_date,
time_offset
FROM partitions
GROUP BY
CASE
WHEN ?2 = "DAY" THEN DATE(read_window, '+' || seqnum || ' day')
WHEN ?2 = "WEEK" THEN DATE(read_window, '+' || (seqnum * 7) || ' day')
END,
time_offset
ORDER BY end_date DESC
),
max_streak AS (
SELECT
MAX(streak) AS max_streak,
start_date AS max_streak_start_date,
end_date AS max_streak_end_date
FROM streaks
LIMIT 1
),
current_streak AS (
SELECT
streak AS current_streak,
start_date AS current_streak_start_date,
end_date AS current_streak_end_date
FROM streaks
WHERE CASE
WHEN ?2 = "WEEK" THEN
DATE('now', time_offset, 'weekday 0', '-14 day') = current_streak_end_date
OR DATE('now', time_offset, 'weekday 0', '-7 day') = current_streak_end_date
WHEN ?2 = "DAY" THEN
DATE('now', time_offset, '-1 day') = current_streak_end_date
OR DATE('now', time_offset) = current_streak_end_date
END
LIMIT 1
)
SELECT
CAST(IFNULL(max_streak, 0) AS INTEGER) AS max_streak,
CAST(IFNULL(max_streak_start_date, "N/A") AS TEXT) AS max_streak_start_date,
CAST(IFNULL(max_streak_end_date, "N/A") AS TEXT) AS max_streak_end_date,
IFNULL(current_streak, 0) AS current_streak,
CAST(IFNULL(current_streak_start_date, "N/A") AS TEXT) AS current_streak_start_date,
CAST(IFNULL(current_streak_end_date, "N/A") AS TEXT) AS current_streak_end_date
FROM max_streak
LEFT JOIN current_streak ON 1 = 1
LIMIT 1;
-- name: GetDatabaseInfo :one
SELECT
(SELECT COUNT(rowid) FROM activity WHERE activity.user_id = $user_id) AS activity_size,
(SELECT COUNT(rowid) FROM documents) AS documents_size,
(SELECT COUNT(rowid) FROM document_progress WHERE document_progress.user_id = $user_id) AS progress_size,
(SELECT COUNT(rowid) FROM devices WHERE devices.user_id = $user_id) AS devices_size
LIMIT 1;
-- name: GetDailyReadStats :many
WITH RECURSIVE last_30_days AS (
SELECT DATE('now', time_offset) AS date
FROM users WHERE users.id = $user_id
UNION ALL
SELECT DATE(date, '-1 days')
FROM last_30_days
LIMIT 30
),
activity_records AS (
SELECT
SUM(duration) AS seconds_read,
DATE(start_time, time_offset) AS day
FROM activity
LEFT JOIN users ON users.id = activity.user_id
WHERE user_id = $user_id
AND start_time > DATE('now', '-31 days')
GROUP BY day
ORDER BY day DESC
LIMIT 30
)
SELECT
CAST(date AS TEXT),
CAST(CASE
WHEN seconds_read IS NULL THEN 0
ELSE seconds_read / 60
END AS INTEGER) AS minutes_read
FROM last_30_days
LEFT JOIN activity_records ON activity_records.day == last_30_days.date
ORDER BY date DESC
LIMIT 30;

File diff suppressed because it is too large Load Diff

View File

@@ -1,15 +1,17 @@
PRAGMA foreign_keys = ON; ---------------------------------------------------------------
PRAGMA journal_mode = WAL; ------------------------ Normal Tables ------------------------
---------------------------------------------------------------
-- Authentication -- Authentication
CREATE TABLE IF NOT EXISTS users ( CREATE TABLE IF NOT EXISTS users (
id TEXT NOT NULL PRIMARY KEY, id TEXT NOT NULL PRIMARY KEY,
pass TEXT NOT NULL, pass TEXT NOT NULL,
auth_hash TEXT NOT NULL,
admin BOOLEAN NOT NULL DEFAULT 0 CHECK (admin IN (0, 1)), admin BOOLEAN NOT NULL DEFAULT 0 CHECK (admin IN (0, 1)),
time_offset TEXT NOT NULL DEFAULT '0 hours', timezone TEXT NOT NULL DEFAULT 'Europe/London',
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP created_at DATETIME NOT NULL DEFAULT (STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'))
); );
-- Books / Documents -- Books / Documents
@@ -17,6 +19,7 @@ CREATE TABLE IF NOT EXISTS documents (
id TEXT NOT NULL PRIMARY KEY, id TEXT NOT NULL PRIMARY KEY,
md5 TEXT, md5 TEXT,
basepath TEXT,
filepath TEXT, filepath TEXT,
coverfile TEXT, coverfile TEXT,
title TEXT, title TEXT,
@@ -35,14 +38,13 @@ CREATE TABLE IF NOT EXISTS documents (
synced BOOLEAN NOT NULL DEFAULT 0 CHECK (synced IN (0, 1)), synced BOOLEAN NOT NULL DEFAULT 0 CHECK (synced IN (0, 1)),
deleted BOOLEAN NOT NULL DEFAULT 0 CHECK (deleted IN (0, 1)), deleted BOOLEAN NOT NULL DEFAULT 0 CHECK (deleted IN (0, 1)),
updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, updated_at DATETIME NOT NULL DEFAULT (STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now')),
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP created_at DATETIME NOT NULL DEFAULT (STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'))
); );
-- Metadata -- Metadata
CREATE TABLE IF NOT EXISTS metadata ( CREATE TABLE IF NOT EXISTS metadata (
id INTEGER PRIMARY KEY AUTOINCREMENT, id INTEGER PRIMARY KEY AUTOINCREMENT,
document_id TEXT NOT NULL, document_id TEXT NOT NULL,
title TEXT, title TEXT,
@@ -53,7 +55,7 @@ CREATE TABLE IF NOT EXISTS metadata (
isbn10 TEXT, isbn10 TEXT,
isbn13 TEXT, isbn13 TEXT,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, created_at DATETIME NOT NULL DEFAULT (STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now')),
FOREIGN KEY (document_id) REFERENCES documents (id) FOREIGN KEY (document_id) REFERENCES documents (id)
); );
@@ -64,27 +66,13 @@ CREATE TABLE IF NOT EXISTS devices (
user_id TEXT NOT NULL, user_id TEXT NOT NULL,
device_name TEXT NOT NULL, device_name TEXT NOT NULL,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, last_synced DATETIME NOT NULL DEFAULT (STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now')),
created_at DATETIME NOT NULL DEFAULT (STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now')),
sync BOOLEAN NOT NULL DEFAULT 1 CHECK (sync IN (0, 1)), sync BOOLEAN NOT NULL DEFAULT 1 CHECK (sync IN (0, 1)),
FOREIGN KEY (user_id) REFERENCES users (id) FOREIGN KEY (user_id) REFERENCES users (id)
); );
-- Document Device Sync
CREATE TABLE IF NOT EXISTS document_device_sync (
user_id TEXT NOT NULL,
document_id TEXT NOT NULL,
device_id TEXT NOT NULL,
last_synced DATETIME NOT NULL,
sync BOOLEAN NOT NULL DEFAULT 1 CHECK (sync IN (0, 1)),
FOREIGN KEY (user_id) REFERENCES users (id),
FOREIGN KEY (document_id) REFERENCES documents (id),
FOREIGN KEY (device_id) REFERENCES devices (id),
PRIMARY KEY (user_id, document_id, device_id)
);
-- User Document Progress -- User Document Progress
CREATE TABLE IF NOT EXISTS document_progress ( CREATE TABLE IF NOT EXISTS document_progress (
user_id TEXT NOT NULL, user_id TEXT NOT NULL,
@@ -93,7 +81,7 @@ CREATE TABLE IF NOT EXISTS document_progress (
percentage REAL NOT NULL, percentage REAL NOT NULL,
progress TEXT NOT NULL, progress TEXT NOT NULL,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, created_at DATETIME NOT NULL DEFAULT (STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now')),
FOREIGN KEY (user_id) REFERENCES users (id), FOREIGN KEY (user_id) REFERENCES users (id),
FOREIGN KEY (document_id) REFERENCES documents (id), FOREIGN KEY (document_id) REFERENCES documents (id),
@@ -109,116 +97,104 @@ CREATE TABLE IF NOT EXISTS activity (
device_id TEXT NOT NULL, device_id TEXT NOT NULL,
start_time DATETIME NOT NULL, start_time DATETIME NOT NULL,
start_percentage REAL NOT NULL,
end_percentage REAL NOT NULL,
duration INTEGER NOT NULL, duration INTEGER NOT NULL,
page INTEGER NOT NULL, created_at DATETIME NOT NULL DEFAULT (STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now')),
pages INTEGER NOT NULL,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users (id), FOREIGN KEY (user_id) REFERENCES users (id),
FOREIGN KEY (document_id) REFERENCES documents (id), FOREIGN KEY (document_id) REFERENCES documents (id),
FOREIGN KEY (device_id) REFERENCES devices (id) FOREIGN KEY (device_id) REFERENCES devices (id)
); );
-- Indexes -- Settings
CREATE TABLE IF NOT EXISTS settings (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
value TEXT NOT NULL,
created_at DATETIME NOT NULL DEFAULT (STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'))
);
-- Document User Statistics Table
CREATE TABLE IF NOT EXISTS document_user_statistics (
document_id TEXT NOT NULL,
user_id TEXT NOT NULL,
percentage REAL NOT NULL,
last_read DATETIME NOT NULL,
last_seen DATETIME NOT NULL,
read_percentage REAL NOT NULL,
total_time_seconds INTEGER NOT NULL,
total_words_read INTEGER NOT NULL,
total_wpm REAL NOT NULL,
yearly_time_seconds INTEGER NOT NULL,
yearly_words_read INTEGER NOT NULL,
yearly_wpm REAL NOT NULL,
monthly_time_seconds INTEGER NOT NULL,
monthly_words_read INTEGER NOT NULL,
monthly_wpm REAL NOT NULL,
weekly_time_seconds INTEGER NOT NULL,
weekly_words_read INTEGER NOT NULL,
weekly_wpm REAL NOT NULL,
UNIQUE(document_id, user_id) ON CONFLICT REPLACE
);
-- User Streaks Table
CREATE TABLE IF NOT EXISTS user_streaks (
user_id TEXT NOT NULL,
window TEXT NOT NULL,
max_streak INTEGER NOT NULL,
max_streak_start_date TEXT NOT NULL,
max_streak_end_date TEXT NOT NULL,
current_streak INTEGER NOT NULL,
current_streak_start_date TEXT NOT NULL,
current_streak_end_date TEXT NOT NULL,
last_timezone TEXT NOT NULL,
last_seen TEXT NOT NULL,
last_record TEXT NOT NULL,
last_calculated TEXT NOT NULL,
UNIQUE(user_id, window) ON CONFLICT REPLACE
);
---------------------------------------------------------------
--------------------------- Indexes ---------------------------
---------------------------------------------------------------
CREATE INDEX IF NOT EXISTS activity_start_time ON activity (start_time); CREATE INDEX IF NOT EXISTS activity_start_time ON activity (start_time);
CREATE INDEX IF NOT EXISTS activity_created_at ON activity (created_at);
CREATE INDEX IF NOT EXISTS activity_user_id ON activity (user_id);
CREATE INDEX IF NOT EXISTS activity_user_id_document_id ON activity ( CREATE INDEX IF NOT EXISTS activity_user_id_document_id ON activity (
user_id, user_id,
document_id document_id
); );
---------------------------------------------------------------
--------------------------- Triggers --------------------------
---------------------------------------------------------------
-- Update Trigger -- Update Trigger
CREATE TRIGGER IF NOT EXISTS update_documents_updated_at CREATE TRIGGER IF NOT EXISTS update_documents_updated_at
BEFORE UPDATE ON documents BEGIN BEFORE UPDATE ON documents BEGIN
UPDATE documents UPDATE documents
SET updated_at = CURRENT_TIMESTAMP SET updated_at = STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now')
WHERE id = old.id; WHERE id = old.id;
END; END;
-- Rescaled Activity View (Adapted from KOReader) -- Delete User
CREATE VIEW IF NOT EXISTS rescaled_activity AS CREATE TRIGGER IF NOT EXISTS user_deleted
BEFORE DELETE ON users BEGIN
WITH RECURSIVE nums (idx) AS ( DELETE FROM activity WHERE activity.user_id=OLD.id;
SELECT 1 AS idx DELETE FROM devices WHERE devices.user_id=OLD.id;
UNION ALL DELETE FROM document_progress WHERE document_progress.user_id=OLD.id;
SELECT idx + 1 END;
FROM nums
LIMIT 1000
),
current_pages AS (
SELECT
document_id,
user_id,
pages
FROM activity
GROUP BY document_id, user_id
HAVING MAX(start_time)
ORDER BY start_time DESC
),
intermediate AS (
SELECT
activity.document_id,
activity.device_id,
activity.user_id,
activity.start_time,
activity.duration,
activity.page,
current_pages.pages,
-- Derive first page
((activity.page - 1) * current_pages.pages) / activity.pages
+ 1 AS first_page,
-- Derive last page
MAX(
((activity.page - 1) * current_pages.pages)
/ activity.pages
+ 1,
(activity.page * current_pages.pages) / activity.pages
) AS last_page
FROM activity
INNER JOIN current_pages ON
current_pages.document_id = activity.document_id
AND current_pages.user_id = activity.user_id
),
-- Improves performance
num_limit AS (
SELECT * FROM nums
LIMIT (SELECT MAX(last_page - first_page + 1) FROM intermediate)
),
rescaled_raw AS (
SELECT
document_id,
device_id,
user_id,
start_time,
last_page,
pages,
first_page + num_limit.idx - 1 AS page,
duration / (
last_page - first_page + 1.0
) AS duration
FROM intermediate
JOIN num_limit ON
num_limit.idx <= (last_page - first_page + 1)
)
SELECT
document_id,
device_id,
user_id,
start_time,
pages,
page,
-- Round up if last page (maintains total duration)
CAST(CASE
WHEN page = last_page AND duration != CAST(duration AS INTEGER)
THEN duration + 1
ELSE duration
END AS INTEGER) AS duration
FROM rescaled_raw;

154
database/user_streaks.sql Normal file
View File

@@ -0,0 +1,154 @@
WITH updated_users AS (
SELECT a.user_id
FROM activity AS a
LEFT JOIN users AS u ON u.id = a.user_id
LEFT JOIN user_streaks AS s ON a.user_id = s.user_id AND s.window = 'DAY'
WHERE
a.created_at > COALESCE(s.last_seen, '1970-01-01')
AND LOCAL_DATE(s.last_record, u.timezone) != LOCAL_DATE(a.start_time, u.timezone)
GROUP BY a.user_id
),
outdated_users AS (
SELECT
a.user_id,
u.timezone AS last_timezone,
MAX(a.created_at) AS last_seen,
MAX(a.start_time) AS last_record,
STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now') AS last_calculated
FROM activity AS a
LEFT JOIN users AS u ON u.id = a.user_id
LEFT JOIN user_streaks AS s ON a.user_id = s.user_id AND s.window = 'DAY'
GROUP BY a.user_id
HAVING
-- User Changed Timezones
s.last_timezone != u.timezone
-- Users Date Changed
OR LOCAL_DATE(COALESCE(s.last_calculated, '1970-01-01T00:00:00Z'), u.timezone) !=
LOCAL_DATE(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), u.timezone)
-- User Added New Data
OR a.user_id IN updated_users
),
document_windows AS (
SELECT
activity.user_id,
users.timezone,
DATE(
LOCAL_DATE(activity.start_time, users.timezone),
'weekday 0', '-7 day'
) AS weekly_read,
LOCAL_DATE(activity.start_time, users.timezone) AS daily_read
FROM activity
INNER JOIN outdated_users ON outdated_users.user_id = activity.user_id
LEFT JOIN users ON users.id = activity.user_id
GROUP BY activity.user_id, weekly_read, daily_read
),
weekly_partitions AS (
SELECT
user_id,
timezone,
'WEEK' AS "window",
weekly_read AS read_window,
ROW_NUMBER() OVER (
PARTITION BY user_id ORDER BY weekly_read DESC
) AS seqnum
FROM document_windows
GROUP BY user_id, weekly_read
),
daily_partitions AS (
SELECT
user_id,
timezone,
'DAY' AS "window",
daily_read AS read_window,
ROW_NUMBER() OVER (
PARTITION BY user_id ORDER BY daily_read DESC
) AS seqnum
FROM document_windows
GROUP BY user_id, daily_read
),
streaks AS (
SELECT
COUNT(*) AS streak,
MIN(read_window) AS start_date,
MAX(read_window) AS end_date,
window,
user_id,
timezone
FROM daily_partitions
GROUP BY
timezone,
user_id,
DATE(read_window, '+' || seqnum || ' day')
UNION ALL
SELECT
COUNT(*) AS streak,
MIN(read_window) AS start_date,
MAX(read_window) AS end_date,
window,
user_id,
timezone
FROM weekly_partitions
GROUP BY
timezone,
user_id,
DATE(read_window, '+' || (seqnum * 7) || ' day')
),
max_streak AS (
SELECT
MAX(streak) AS max_streak,
start_date AS max_streak_start_date,
end_date AS max_streak_end_date,
window,
user_id
FROM streaks
GROUP BY user_id, window
),
current_streak AS (
SELECT
streak AS current_streak,
start_date AS current_streak_start_date,
end_date AS current_streak_end_date,
window,
user_id
FROM streaks
WHERE CASE
WHEN window = "WEEK" THEN
DATE(LOCAL_DATE(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone), 'weekday 0', '-14 day') = current_streak_end_date
OR DATE(LOCAL_DATE(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone), 'weekday 0', '-7 day') = current_streak_end_date
WHEN window = "DAY" THEN
DATE(LOCAL_DATE(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone), '-1 day') = current_streak_end_date
OR DATE(LOCAL_DATE(STRFTIME('%Y-%m-%dT%H:%M:%SZ', 'now'), timezone)) = current_streak_end_date
END
GROUP BY user_id, window
)
INSERT INTO user_streaks
SELECT
max_streak.user_id,
max_streak.window,
IFNULL(max_streak, 0) AS max_streak,
IFNULL(max_streak_start_date, "N/A") AS max_streak_start_date,
IFNULL(max_streak_end_date, "N/A") AS max_streak_end_date,
IFNULL(current_streak.current_streak, 0) AS current_streak,
IFNULL(current_streak.current_streak_start_date, "N/A") AS current_streak_start_date,
IFNULL(current_streak.current_streak_end_date, "N/A") AS current_streak_end_date,
outdated_users.last_timezone AS last_timezone,
outdated_users.last_seen AS last_seen,
outdated_users.last_record AS last_record,
outdated_users.last_calculated AS last_calculated
FROM max_streak
JOIN outdated_users ON max_streak.user_id = outdated_users.user_id
LEFT JOIN current_streak ON
current_streak.user_id = max_streak.user_id
AND current_streak.window = max_streak.window;

205
database/users_test.go Normal file
View File

@@ -0,0 +1,205 @@
package database
import (
"context"
"database/sql"
"fmt"
"testing"
"time"
"github.com/stretchr/testify/suite"
"reichard.io/antholume/config"
"reichard.io/antholume/utils"
)
var (
testUserID string = "testUser"
testUserPass string = "testPass"
)
type UsersTestSuite struct {
suite.Suite
dbm *DBManager
}
func TestUsers(t *testing.T) {
suite.Run(t, new(UsersTestSuite))
}
func (suite *UsersTestSuite) SetupTest() {
cfg := config.Config{
DBType: "memory",
}
suite.dbm = NewMgr(&cfg)
// Create User
rawAuthHash, _ := utils.GenerateToken(64)
authHash := fmt.Sprintf("%x", rawAuthHash)
_, err := suite.dbm.Queries.CreateUser(context.Background(), CreateUserParams{
ID: testUserID,
Pass: &testUserPass,
AuthHash: &authHash,
})
suite.NoError(err)
// Create Document
_, err = suite.dbm.Queries.UpsertDocument(context.Background(), UpsertDocumentParams{
ID: documentID,
Title: &documentTitle,
Author: &documentAuthor,
Words: &documentWords,
})
suite.NoError(err)
// Create Device
_, err = suite.dbm.Queries.UpsertDevice(context.Background(), UpsertDeviceParams{
ID: deviceID,
UserID: testUserID,
DeviceName: deviceName,
})
suite.NoError(err)
}
func (suite *UsersTestSuite) TestGetUser() {
user, err := suite.dbm.Queries.GetUser(context.Background(), testUserID)
suite.Nil(err, "should have nil err")
suite.Equal(testUserPass, *user.Pass)
}
func (suite *UsersTestSuite) TestCreateUser() {
testUser := "user1"
testPass := "pass1"
// Generate Auth Hash
rawAuthHash, err := utils.GenerateToken(64)
suite.Nil(err, "should have nil err")
authHash := fmt.Sprintf("%x", rawAuthHash)
changed, err := suite.dbm.Queries.CreateUser(context.Background(), CreateUserParams{
ID: testUser,
Pass: &testPass,
AuthHash: &authHash,
})
suite.Nil(err, "should have nil err")
suite.Equal(int64(1), changed)
user, err := suite.dbm.Queries.GetUser(context.Background(), testUser)
suite.Nil(err, "should have nil err")
suite.Equal(testPass, *user.Pass)
}
func (suite *UsersTestSuite) TestDeleteUser() {
changed, err := suite.dbm.Queries.DeleteUser(context.Background(), testUserID)
suite.Nil(err, "should have nil err")
suite.Equal(int64(1), changed, "should have one changed row")
_, err = suite.dbm.Queries.GetUser(context.Background(), testUserID)
suite.ErrorIs(err, sql.ErrNoRows, "should have no rows error")
}
func (suite *UsersTestSuite) TestGetUsers() {
users, err := suite.dbm.Queries.GetUsers(context.Background())
suite.Nil(err, "should have nil err")
suite.Len(users, 1, "should have single user")
}
func (suite *UsersTestSuite) TestUpdateUser() {
newPassword := "newPass123"
user, err := suite.dbm.Queries.UpdateUser(context.Background(), UpdateUserParams{
UserID: testUserID,
Password: &newPassword,
})
suite.Nil(err, "should have nil err")
suite.Equal(newPassword, *user.Pass, "should have new password")
}
func (suite *UsersTestSuite) TestGetUserStatistics() {
err := suite.dbm.CacheTempTables(context.Background())
suite.NoError(err)
// Ensure Zero Items
userStats, err := suite.dbm.Queries.GetUserStatistics(context.Background())
suite.Nil(err, "should have nil err")
suite.Empty(userStats, "should be empty")
// Create Activity
end := time.Now()
start := end.AddDate(0, 0, -9)
var counter int64 = 0
for d := start; d.After(end) == false; d = d.AddDate(0, 0, 1) {
counter += 1
// Add Item
activity, err := suite.dbm.Queries.AddActivity(context.Background(), AddActivityParams{
DocumentID: documentID,
DeviceID: deviceID,
UserID: testUserID,
StartTime: d.UTC().Format(time.RFC3339),
Duration: 60,
StartPercentage: float64(counter) / 100.0,
EndPercentage: float64(counter+1) / 100.0,
})
suite.Nil(err, fmt.Sprintf("[%d] should have nil err for add activity", counter))
suite.Equal(counter, activity.ID, fmt.Sprintf("[%d] should have correct id for add activity", counter))
}
err = suite.dbm.CacheTempTables(context.Background())
suite.NoError(err)
// Ensure One Item
userStats, err = suite.dbm.Queries.GetUserStatistics(context.Background())
suite.Nil(err, "should have nil err")
suite.Len(userStats, 1, "should have length of one")
}
func (suite *UsersTestSuite) TestGetUsersStreaks() {
err := suite.dbm.CacheTempTables(context.Background())
suite.NoError(err)
// Ensure Zero Items
userStats, err := suite.dbm.Queries.GetUserStreaks(context.Background(), testUserID)
suite.Nil(err, "should have nil err")
suite.Empty(userStats, "should be empty")
// Create Activity
end := time.Now()
start := end.AddDate(0, 0, -9)
var counter int64 = 0
for d := start; d.After(end) == false; d = d.AddDate(0, 0, 1) {
counter += 1
// Add Item
activity, err := suite.dbm.Queries.AddActivity(context.Background(), AddActivityParams{
DocumentID: documentID,
DeviceID: deviceID,
UserID: testUserID,
StartTime: d.UTC().Format(time.RFC3339),
Duration: 60,
StartPercentage: float64(counter) / 100.0,
EndPercentage: float64(counter+1) / 100.0,
})
suite.Nil(err, fmt.Sprintf("[%d] should have nil err for add activity", counter))
suite.Equal(counter, activity.ID, fmt.Sprintf("[%d] should have correct id for add activity", counter))
}
err = suite.dbm.CacheTempTables(context.Background())
suite.NoError(err)
// Ensure Two Item
userStats, err = suite.dbm.Queries.GetUserStreaks(context.Background(), testUserID)
suite.Nil(err, "should have nil err")
suite.Len(userStats, 2, "should have length of two")
// Ensure Streak Stats
dayStats := userStats[0]
weekStats := userStats[1]
suite.Equal(int64(10), dayStats.CurrentStreak, "should be 10 days")
suite.Greater(weekStats.CurrentStreak, int64(1), "should be 2 or 3")
}

View File

@@ -1,6 +1,6 @@
--- ---
services: services:
bookmanager: antholume:
environment: environment:
- CONFIG_PATH=/data - CONFIG_PATH=/data
- DATA_PATH=/data - DATA_PATH=/data

61
flake.lock generated Normal file
View File

@@ -0,0 +1,61 @@
{
"nodes": {
"flake-utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1731533236,
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1754292888,
"narHash": "sha256-1ziydHSiDuSnaiPzCQh1mRFBsM2d2yRX9I+5OPGEmIE=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "ce01daebf8489ba97bd1609d185ea276efdeb121",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-25.05",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

29
flake.nix Normal file
View File

@@ -0,0 +1,29 @@
{
description = "Development Environment";
inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.05";
flake-utils.url = "github:numtide/flake-utils";
};
outputs = { self, nixpkgs, flake-utils }:
flake-utils.lib.eachDefaultSystem (system:
let
pkgs = nixpkgs.legacyPackages.${system};
in
{
devShells.default = pkgs.mkShell {
packages = with pkgs; [
go
golangci-lint
nodejs
tailwindcss
python311Packages.grip
];
shellHook = ''
export PATH=$PATH:~/go/bin
'';
};
}
);
}

100
go.mod
View File

@@ -1,51 +1,85 @@
module reichard.io/bbank module reichard.io/antholume
go 1.19 go 1.24
require ( require (
github.com/alexedwards/argon2id v0.0.0-20230305115115-4b3c3280a736 github.com/PuerkitoBio/goquery v1.10.3
github.com/gabriel-vasile/mimetype v1.4.2 github.com/alexedwards/argon2id v1.0.0
github.com/gin-contrib/multitemplate v0.0.0-20230212012517-45920c92c271 github.com/gabriel-vasile/mimetype v1.4.9
github.com/gin-contrib/sessions v0.0.4 github.com/gin-contrib/multitemplate v1.1.1
github.com/gin-gonic/gin v1.9.1 github.com/gin-contrib/sessions v1.0.4
github.com/mattn/go-sqlite3 v1.14.17 github.com/gin-gonic/gin v1.10.1
github.com/microcosm-cc/bluemonday v1.0.25 github.com/itchyny/gojq v0.12.17
github.com/jarcoal/httpmock v1.3.1
github.com/microcosm-cc/bluemonday v1.0.27
github.com/pkg/errors v0.9.1
github.com/pressly/goose/v3 v3.24.3
github.com/sirupsen/logrus v1.9.3 github.com/sirupsen/logrus v1.9.3
github.com/urfave/cli/v2 v2.25.7 github.com/stretchr/testify v1.10.0
golang.org/x/exp v0.0.0-20230905200255-921286631fa9 github.com/taylorskalyo/goreader v1.0.1
github.com/urfave/cli/v2 v2.27.7
golang.org/x/exp v0.0.0-20250718183923-645b1fa84792
gopkg.in/natefinch/lumberjack.v2 v2.2.1
modernc.org/sqlite v1.38.2
) )
require ( require (
github.com/andybalholm/cascadia v1.3.3 // indirect
github.com/aymerick/douceur v0.2.0 // indirect github.com/aymerick/douceur v0.2.0 // indirect
github.com/bytedance/sonic v1.10.0 // indirect github.com/bytedance/sonic v1.14.0 // indirect
github.com/bytedance/sonic/loader v0.3.0 // indirect
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect
github.com/chenzhuoyu/iasm v0.9.0 // indirect github.com/chenzhuoyu/iasm v0.9.1 // indirect
github.com/cpuguy83/go-md2man/v2 v2.0.2 // indirect github.com/cloudwego/base64x v0.1.6 // indirect
github.com/gin-contrib/sse v0.1.0 // indirect github.com/cpuguy83/go-md2man/v2 v2.0.7 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
github.com/gin-contrib/sse v1.1.0 // indirect
github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-playground/validator/v10 v10.15.3 // indirect github.com/go-playground/validator/v10 v10.27.0 // indirect
github.com/goccy/go-json v0.10.2 // indirect github.com/goccy/go-json v0.10.5 // indirect
github.com/gorilla/context v1.1.1 // indirect github.com/google/uuid v1.6.0 // indirect
github.com/gorilla/css v1.0.0 // indirect github.com/gorilla/context v1.1.2 // indirect
github.com/gorilla/securecookie v1.1.1 // indirect github.com/gorilla/css v1.0.1 // indirect
github.com/gorilla/sessions v1.2.1 // indirect github.com/gorilla/securecookie v1.1.2 // indirect
github.com/gorilla/sessions v1.4.0 // indirect
github.com/itchyny/timefmt-go v0.1.6 // indirect
github.com/json-iterator/go v1.1.12 // indirect github.com/json-iterator/go v1.1.12 // indirect
github.com/klauspost/cpuid/v2 v2.2.5 // indirect github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect
github.com/leodido/go-urn v1.2.4 // indirect github.com/klauspost/cpuid/v2 v2.3.0 // indirect
github.com/mattn/go-isatty v0.0.19 // indirect github.com/leodido/go-urn v1.4.0 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mfridman/interpolate v0.0.2 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/pelletier/go-toml/v2 v2.1.0 // indirect github.com/ncruces/go-strftime v0.1.9 // indirect
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
github.com/russross/blackfriday/v2 v2.1.0 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect
github.com/sethvargo/go-retry v0.3.0 // indirect
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
github.com/ugorji/go/codec v1.2.11 // indirect github.com/ugorji/go/codec v1.3.0 // indirect
github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673 // indirect github.com/xrash/smetrics v0.0.0-20250705151800-55b8f293f342 // indirect
golang.org/x/arch v0.4.0 // indirect go.uber.org/multierr v1.11.0 // indirect
golang.org/x/crypto v0.12.0 // indirect golang.org/x/arch v0.20.0 // indirect
golang.org/x/net v0.14.0 // indirect golang.org/x/crypto v0.41.0 // indirect
golang.org/x/sys v0.12.0 // indirect golang.org/x/mod v0.27.0 // indirect
golang.org/x/text v0.12.0 // indirect golang.org/x/net v0.43.0 // indirect
google.golang.org/protobuf v1.31.0 // indirect golang.org/x/sync v0.16.0 // indirect
golang.org/x/sys v0.35.0 // indirect
golang.org/x/text v0.28.0 // indirect
golang.org/x/tools v0.36.0 // indirect
google.golang.org/protobuf v1.36.7 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect
lukechampine.com/uint128 v1.3.0 // indirect
modernc.org/cc/v3 v3.41.0 // indirect
modernc.org/ccgo/v3 v3.17.0 // indirect
modernc.org/libc v1.66.6 // indirect
modernc.org/mathutil v1.7.1 // indirect
modernc.org/memory v1.11.0 // indirect
modernc.org/opt v0.1.4 // indirect
modernc.org/strutil v1.2.1 // indirect
modernc.org/token v1.1.0 // indirect
) )

482
go.sum
View File

@@ -1,80 +1,199 @@
github.com/alexedwards/argon2id v0.0.0-20230305115115-4b3c3280a736 h1:qZaEtLxnqY5mJ0fVKbk31NVhlgi0yrKm51Pq/I5wcz4= github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 h1:L/gRVlceqvL25UVaW/CKtUDjefjrs0SPonmDGUVOYP0=
github.com/alexedwards/argon2id v0.0.0-20230305115115-4b3c3280a736/go.mod h1:mTeFRcTdnpzOlRjMoFYC/80HwVUreupyAiqPkCZQOXc= github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E=
github.com/antonlindstrom/pgstore v0.0.0-20200229204646-b08ebf1105e0/go.mod h1:2Ti6VUHVxpC0VSmTZzEvpzysnaGAfGBOoMIz5ykPyyw= github.com/ClickHouse/ch-go v0.58.2 h1:jSm2szHbT9MCAB1rJ3WuCJqmGLi5UTjlNu+f530UTS0=
github.com/ClickHouse/ch-go v0.58.2/go.mod h1:Ap/0bEmiLa14gYjCiRkYGbXvbe8vwdrfTYWhsuQ99aw=
github.com/ClickHouse/ch-go v0.65.1 h1:SLuxmLl5Mjj44/XbINsK2HFvzqup0s6rwKLFH347ZhU=
github.com/ClickHouse/clickhouse-go/v2 v2.16.0 h1:rhMfnPewXPnY4Q4lQRGdYuTLRBRKJEIEYHtbUMrzmvI=
github.com/ClickHouse/clickhouse-go/v2 v2.16.0/go.mod h1:J7SPfIxwR+x4mQ+o8MLSe0oY50NNntEqCIjFe/T1VPM=
github.com/ClickHouse/clickhouse-go/v2 v2.34.0 h1:Y4rqkdrRHgExvC4o/NTbLdY5LFQ3LHS77/RNFxFX3Co=
github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow=
github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM=
github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 h1:TngWCqHvy9oXAN6lEVMRuU21PR1EtLVZJmdB18Gu3Rw=
github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5/go.mod h1:lmUJ/7eu/Q8D7ML55dXQrVaamCz2vxCfdQBasLZfHKk=
github.com/PuerkitoBio/goquery v1.8.1 h1:uQxhNlArOIdbrH1tr0UXwdVFgDcZDrZVdcpygAcwmWM=
github.com/PuerkitoBio/goquery v1.8.1/go.mod h1:Q8ICL1kNUJ2sXGoAhPGUdYDJvgQgHzJsnnd3H7Ho5jQ=
github.com/PuerkitoBio/goquery v1.10.3 h1:pFYcNSqHxBD06Fpj/KsbStFRsgRATgnf3LeXiUkhzPo=
github.com/PuerkitoBio/goquery v1.10.3/go.mod h1:tMUX0zDMHXYlAQk6p35XxQMqMweEKB7iK7iLNd4RH4Y=
github.com/alexedwards/argon2id v1.0.0 h1:wJzDx66hqWX7siL/SRUmgz3F8YMrd/nfX/xHHcQQP0w=
github.com/alexedwards/argon2id v1.0.0/go.mod h1:tYKkqIjzXvZdzPvADMWOEZ+l6+BD6CtBXMj5fnJppiw=
github.com/andybalholm/brotli v1.0.6 h1:Yf9fFpf49Zrxb9NlQaluyE92/+X7UVHlhMNJN2sxfOI=
github.com/andybalholm/brotli v1.0.6/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA=
github.com/andybalholm/cascadia v1.3.1/go.mod h1:R4bJ1UQfqADjvDa4P6HZHLh/3OxWWEqc0Sk8XGwHqvA=
github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsViSLyss=
github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU=
github.com/andybalholm/cascadia v1.3.3 h1:AG2YHrzJIm4BZ19iwJ/DAua6Btl3IwJX+VI4kktS1LM=
github.com/andybalholm/cascadia v1.3.3/go.mod h1:xNd9bqTn98Ln4DwST8/nG+H0yuB8Hmgu1YHNnWw0GeA=
github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk= github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk=
github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4= github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4=
github.com/boj/redistore v0.0.0-20180917114910-cd5dcc76aeff/go.mod h1:+RTT1BOk5P97fT2CiHkbFQwkK3mjsFAP6zCYV2aXtjw=
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b/go.mod h1:H0wQNHz2YrLsuXOZozoeDmnHXkNCRmMW0gwFWDfEZDA=
github.com/bradleypeabody/gorilla-sessions-memcache v0.0.0-20181103040241-659414f458e1/go.mod h1:dkChI7Tbtx7H1Tj7TqGSZMOeGpMP5gLHtjroHd4agiI=
github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM= github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM=
github.com/bytedance/sonic v1.10.0-rc/go.mod h1:ElCzW+ufi8qKqNW0FY314xriJhyJhuoJ3gFZdAHF7NM= github.com/bytedance/sonic v1.10.0-rc/go.mod h1:ElCzW+ufi8qKqNW0FY314xriJhyJhuoJ3gFZdAHF7NM=
github.com/bytedance/sonic v1.10.0 h1:qtNZduETEIWJVIyDl01BeNxur2rW9OwTQ/yBqFRkKEk= github.com/bytedance/sonic v1.10.2 h1:GQebETVBxYB7JGWJtLBi07OVzWwt+8dWA00gEVW2ZFE=
github.com/bytedance/sonic v1.10.0/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4= github.com/bytedance/sonic v1.10.2/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4=
github.com/bytedance/sonic v1.14.0 h1:/OfKt8HFw0kh2rj8N0F6C/qPGRESq0BbaNZgcNXXzQQ=
github.com/bytedance/sonic v1.14.0/go.mod h1:WoEbx8WTcFJfzCe0hbmyTGrfjt8PzNEBdxlNUO24NhA=
github.com/bytedance/sonic/loader v0.3.0 h1:dskwH8edlzNMctoruo8FPTJDF3vLtDT0sXZwvZJyqeA=
github.com/bytedance/sonic/loader v0.3.0/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI=
github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqylYbM=
github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY= github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY=
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk= github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk=
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ0g/qfRdp61a3Uu/AWrgIq2s0ClJV1g0= github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ0g/qfRdp61a3Uu/AWrgIq2s0ClJV1g0=
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpVsBuRksnlj1mLy4AWzRNQYxauNi62uWcE3to6eA= github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpVsBuRksnlj1mLy4AWzRNQYxauNi62uWcE3to6eA=
github.com/chenzhuoyu/iasm v0.9.0 h1:9fhXjVzq5hUy2gkhhgHl95zG2cEAhw9OSGs8toWWAwo=
github.com/chenzhuoyu/iasm v0.9.0/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog= github.com/chenzhuoyu/iasm v0.9.0/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog=
github.com/cpuguy83/go-md2man/v2 v2.0.2 h1:p1EgwI/C7NhT0JmVkwCD2ZBK8j4aeHQX2pMHHBfMQ6w= github.com/chenzhuoyu/iasm v0.9.1 h1:tUHQJXo3NhBqw6s33wkGn9SP3bvrWLdlVIJ3hQBL7P0=
github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/chenzhuoyu/iasm v0.9.1/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog=
github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M=
github.com/cloudwego/base64x v0.1.6/go.mod h1:OFcloc187FXDaYHvrNIjxSe8ncn0OOM8gEHfghB2IPU=
github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY=
github.com/containerd/continuity v0.4.3 h1:6HVkalIp+2u1ZLH1J/pYX2oBVXlJZvh1X1A7bEZ9Su8=
github.com/containerd/continuity v0.4.3/go.mod h1:F6PTNCKepoxEaXLQp3wDAjygEnImnZ/7o4JzpodfroQ=
github.com/cpuguy83/go-md2man/v2 v2.0.3 h1:qMCsGGgs+MAzDFyp9LpAe1Lqy/fY/qCovCm0qnXZOBM=
github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/cpuguy83/go-md2man/v2 v2.0.7 h1:zbFlGlXEAKlwXpmvle3d8Oe3YnkKIK4xSRTd3sHPnBo=
github.com/cpuguy83/go-md2man/v2 v2.0.7/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU= github.com/docker/cli v24.0.7+incompatible h1:wa/nIwYFW7BVTGa7SWPVyyXU9lgORqUb1xfI36MSkFg=
github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA= github.com/docker/cli v24.0.7+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
github.com/gin-contrib/multitemplate v0.0.0-20230212012517-45920c92c271 h1:s+boMV47gwTyff2PL+k6V33edJpp+K5y3QPzZlRhno8= github.com/docker/docker v24.0.7+incompatible h1:Wo6l37AuwP3JaMnZa226lzVXGA3F9Ig1seQen0cKYlM=
github.com/gin-contrib/multitemplate v0.0.0-20230212012517-45920c92c271/go.mod h1:XLLtIXoP9+9zGcEDc7gAGV3AksGPO+vzv4kXHMJSdU0= github.com/docker/docker v24.0.7+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/gin-contrib/sessions v0.0.4 h1:gq4fNa1Zmp564iHP5G6EBuktilEos8VKhe2sza1KMgo= github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ=
github.com/gin-contrib/sessions v0.0.4/go.mod h1:pQ3sIyviBBGcxgyR8mkeJuXbeV3h3NYmhJADQTq5+Vo= github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec=
github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4=
github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/elastic/go-sysinfo v1.11.2 h1:mcm4OSYVMyws6+n2HIVMGkln5HOpo5Ie1ZmbbNn0jg4=
github.com/elastic/go-sysinfo v1.11.2/go.mod h1:GKqR8bbMK/1ITnez9NIsIfXQr25aLhRJa7AfT8HpBFQ=
github.com/elastic/go-sysinfo v1.15.3 h1:W+RnmhKFkqPTCRoFq2VCTmsT4p/fwpo+3gKNQsn1XU0=
github.com/elastic/go-windows v1.0.1 h1:AlYZOldA+UJ0/2nBuqWdo90GFCgG9xuyw9SYzGUtJm0=
github.com/elastic/go-windows v1.0.1/go.mod h1:FoVvqWSun28vaDQPbj2Elfc0JahhPB7WQEGa3c814Ss=
github.com/elastic/go-windows v1.0.2 h1:yoLLsAsV5cfg9FLhZ9EXZ2n2sQFKeDYrHenkcivY4vI=
github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0=
github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk=
github.com/gabriel-vasile/mimetype v1.4.9 h1:5k+WDwEsD9eTLL8Tz3L0VnmVh9QxGjRmjBvAG7U/oYY=
github.com/gabriel-vasile/mimetype v1.4.9/go.mod h1:WnSQhFKJuBlRyLiKohA/2DtIlPFAbguNaG7QCHcyGok=
github.com/gin-contrib/multitemplate v0.0.0-20231230012943-32b233489a81 h1:hQ/WeoPMTbN8NHk5i96dWy3D4uF7yCU+kORyWG+P4oU=
github.com/gin-contrib/multitemplate v0.0.0-20231230012943-32b233489a81/go.mod h1:XLLtIXoP9+9zGcEDc7gAGV3AksGPO+vzv4kXHMJSdU0=
github.com/gin-contrib/multitemplate v1.1.1 h1:uzhT/ZWS9nBd1h6P+AaxWaVSVAJRAcKH4yafrBU8sPc=
github.com/gin-contrib/multitemplate v1.1.1/go.mod h1:1Sa4984P8+x87U0cg5yWxK4jpbK1cXMYegUCZK6XT/M=
github.com/gin-contrib/sessions v0.0.5 h1:CATtfHmLMQrMNpJRgzjWXD7worTh7g7ritsQfmF+0jE=
github.com/gin-contrib/sessions v0.0.5/go.mod h1:vYAuaUPqie3WUSsft6HUlCjlwwoJQs97miaG2+7neKY=
github.com/gin-contrib/sessions v1.0.4 h1:ha6CNdpYiTOK/hTp05miJLbpTSNfOnFg5Jm2kbcqy8U=
github.com/gin-contrib/sessions v1.0.4/go.mod h1:ccmkrb2z6iU2osiAHZG3x3J4suJK+OU27oqzlWOqQgs=
github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
github.com/gin-gonic/gin v1.7.4/go.mod h1:jD2toBW3GZUr5UMcdrwQA10I7RuaFOl/SGeDjXkfUtY= github.com/gin-contrib/sse v1.1.0 h1:n0w2GMuUpWDVp7qSpvze6fAu9iRxJY4Hmj6AmBOU05w=
github.com/gin-contrib/sse v1.1.0/go.mod h1:hxRZ5gVpWMT7Z0B0gSNYqqsSCNIJMjzvm6fqCz9vjwM=
github.com/gin-gonic/gin v1.8.1/go.mod h1:ji8BvRH1azfM+SYow9zQ6SZMvR8qOMZHmsCuWR9tTTk= github.com/gin-gonic/gin v1.8.1/go.mod h1:ji8BvRH1azfM+SYow9zQ6SZMvR8qOMZHmsCuWR9tTTk=
github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg= github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg=
github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU= github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU=
github.com/globalsign/mgo v0.0.0-20181015135952-eeefdecb41b8/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q= github.com/gin-gonic/gin v1.10.1 h1:T0ujvqyCSqRopADpgPgiTT63DUQVSfojyME59Ei63pQ=
github.com/gin-gonic/gin v1.10.1/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y=
github.com/go-faster/city v1.0.1 h1:4WAxSZ3V2Ws4QRDrscLEDcibJY8uf41H6AhXDrNDcGw=
github.com/go-faster/city v1.0.1/go.mod h1:jKcUJId49qdW3L1qKHH/3wPeUstCVpVSXTM6vO3VcTw=
github.com/go-faster/errors v0.6.1 h1:nNIPOBkprlKzkThvS/0YaX8Zs9KewLCOSFQS5BU06FI=
github.com/go-faster/errors v0.6.1/go.mod h1:5MGV2/2T9yvlrbhe9pD9LO5Z/2zCSq2T8j+Jpi2LAyY=
github.com/go-faster/errors v0.7.1 h1:MkJTnDoEdi9pDabt1dpWf7AA8/BaSYZqibYyhZ20AYg=
github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8= github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs= github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs=
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA=
github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA= github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA=
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4=
github.com/go-playground/validator/v10 v10.10.0/go.mod h1:74x4gJWsvQexRdW8Pn3dXSGrTK4nAUsbPlLADvpJkos= github.com/go-playground/validator/v10 v10.10.0/go.mod h1:74x4gJWsvQexRdW8Pn3dXSGrTK4nAUsbPlLADvpJkos=
github.com/go-playground/validator/v10 v10.15.3 h1:S+sSpunYjNPDuXkWbK+x+bA7iXiW296KG4dL3X7xUZo= github.com/go-playground/validator/v10 v10.17.0 h1:SmVVlfAOtlZncTxRuinDPomC2DkXJ4E5T9gDA0AIH74=
github.com/go-playground/validator/v10 v10.15.3/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= github.com/go-playground/validator/v10 v10.17.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
github.com/go-playground/validator/v10 v10.27.0 h1:w8+XrWVMhGkxOaaowyKH35gFydVHOvC0/uWoy2Fzwn4=
github.com/go-playground/validator/v10 v10.27.0/go.mod h1:I5QpIEbmr8On7W0TktmJAumgzX4CA1XNl4ZmDuVHKKo=
github.com/go-sql-driver/mysql v1.7.1 h1:lUIinVbN1DY0xBg0eMOzmmtGoHwWBbvnWubQUrtU8EI=
github.com/go-sql-driver/mysql v1.7.1/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI=
github.com/go-sql-driver/mysql v1.9.2 h1:4cNKDYQ1I84SXslGddlsrMhc8k4LeDVj6Ad6WRjiHuU=
github.com/goccy/go-json v0.9.7/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= github.com/goccy/go-json v0.9.7/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg=
github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
github.com/golang-jwt/jwt/v4 v4.5.2 h1:YtQM7lnr8iZ+j5q71MGKkNw9Mn7AjHM68uc9g5fXeUI=
github.com/golang/protobuf v1.5.0 h1:LUVKkCeviFUMKqHa4tXIIij/lbhnMbP7Fn5wKdKkRh4=
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
github.com/gomodule/redigo v2.0.0+incompatible/go.mod h1:B4C85qUVwatsJoIUNIfCRsp7qO0iAmpGFZ4EELWSbC4= github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg=
github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg= github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/gorilla/context v1.1.1 h1:AWwleXJkX/nhcU9bZSnZoi3h/qGYqQAGhq6zZe/aQW8= github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0=
github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/gorilla/css v1.0.0 h1:BQqNyPTi50JCFMTw/b67hByjMVXZRwGha6wxVGkeihY= github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26 h1:Xim43kblpZXfIBQsbuBVKCudVG457BR2GZFIz3uw3hQ=
github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c= github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26/go.mod h1:dDKJzRmX4S37WGHujM7tX//fmj1uioxKzKxz3lo4HJo=
github.com/gorilla/securecookie v1.1.1 h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyCS8BvQ= github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e h1:ijClszYn+mADRFY17kjQEVQ1XRhq2/JR1M3sGqeJoxs=
github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4= github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4=
github.com/gorilla/sessions v1.1.1/go.mod h1:8KCfur6+4Mqcc6S0FEfKuN15Vl5MgXW92AE8ovaJD0w= github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ=
github.com/gorilla/sessions v1.2.0/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/gorilla/sessions v1.2.1 h1:DHd3rPN5lE3Ts3D8rKkQ8x/0kqfeNmBAaiSi+o7FsgI= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM= github.com/gorilla/context v1.1.2 h1:WRkNAv2uoa03QNIc1A6u4O7DAGMUVoopZhkiXWA2V1o=
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/gorilla/context v1.1.2/go.mod h1:KDPwT9i/MeWHiLl90fuTgrt4/wPcv75vFAZLaOOcbxM=
github.com/gorilla/css v1.0.1 h1:ntNaBIghp6JmvWnxbZKANoLyuXTPZ4cAMlo6RyhlbO8=
github.com/gorilla/css v1.0.1/go.mod h1:BvnYkspnSzMmwRK+b8/xgNPLiIuNZr6vbZBTPQ2A3b0=
github.com/gorilla/securecookie v1.1.2 h1:YCIWL56dvtr73r6715mJs5ZvhtnY73hBvEF8kXD8ePA=
github.com/gorilla/securecookie v1.1.2/go.mod h1:NfCASbcHqRSY+3a8tlWJwsQap2VX5pwzwo4h3eOamfo=
github.com/gorilla/sessions v1.2.2 h1:lqzMYz6bOfvn2WriPUjNByzeXIlVzURcPmgMczkmTjY=
github.com/gorilla/sessions v1.2.2/go.mod h1:ePLdVu+jbEgHH+KWw8I1z2wqd0BAdAQh/8LRvBeoNcQ=
github.com/gorilla/sessions v1.4.0 h1:kpIYOp/oi6MG/p5PgxApU8srsSw9tuFbt46Lt7auzqQ=
github.com/gorilla/sessions v1.4.0/go.mod h1:FLWm50oby91+hl7p/wRxDth9bWSuk0qVL2emc7lT5ik=
github.com/imdario/mergo v0.3.16 h1:wwQJbIsHYGMUyLSPrEq1CT16AhnhNJQ51+4fdHUnCl4=
github.com/imdario/mergo v0.3.16/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY=
github.com/itchyny/gojq v0.12.14 h1:6k8vVtsrhQSYgSGg827AD+PVVaB1NLXEdX+dda2oZCc=
github.com/itchyny/gojq v0.12.14/go.mod h1:y1G7oO7XkcR1LPZO59KyoCRy08T3j9vDYRV0GgYSS+s=
github.com/itchyny/gojq v0.12.17 h1:8av8eGduDb5+rvEdaOO+zQUjA04MS0m3Ps8HiD+fceg=
github.com/itchyny/gojq v0.12.17/go.mod h1:WBrEMkgAfAGO1LUcGOckBl5O726KPp+OlkKug0I/FEY=
github.com/itchyny/timefmt-go v0.1.5 h1:G0INE2la8S6ru/ZI5JecgyzbbJNs5lG1RcBqa7Jm6GE=
github.com/itchyny/timefmt-go v0.1.5/go.mod h1:nEP7L+2YmAbT2kZ2HfSs1d8Xtw9LY8D2stDBckWakZ8=
github.com/itchyny/timefmt-go v0.1.6 h1:ia3s54iciXDdzWzwaVKXZPbiXzxxnv1SPGFfM/myJ5Q=
github.com/itchyny/timefmt-go v0.1.6/go.mod h1:RRDZYC5s9ErkjQvTvvU7keJjxUYzIISJGxm9/mAERQg=
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk=
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo=
github.com/jackc/pgx/v5 v5.5.1 h1:5I9etrGkLrN+2XPCsi6XLlV5DITbSL/xBZdmAxFcXPI=
github.com/jackc/pgx/v5 v5.5.1/go.mod h1:Ig06C2Vu0t5qXC60W8sqIthScaEnFvojjj9dSljmHRA=
github.com/jackc/pgx/v5 v5.7.4 h1:9wKznZrhWa2QiHL+NjTSPP6yjl3451BX3imWDnokYlg=
github.com/jackc/puddle/v2 v2.2.1 h1:RhxXJtFG022u4ibrCSMSiu5aOq1i77R3OHKNJj77OAk=
github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo=
github.com/jarcoal/httpmock v1.3.1 h1:iUx3whfZWVf3jT01hQTO/Eo5sAYtB2/rqaUuOtpInww=
github.com/jarcoal/httpmock v1.3.1/go.mod h1:3yb8rc4BI7TCBhFY8ng0gjuLKJNquuDNiPaZjnENuYg=
github.com/joeshaw/multierror v0.0.0-20140124173710-69b34d4ec901 h1:rp+c0RAYOWj8l6qbCUTSiRLG/iKnW3K3/QfPPuSsBt4=
github.com/joeshaw/multierror v0.0.0-20140124173710-69b34d4ec901/go.mod h1:Z86h9688Y0wesXCyonoVr47MasHilkuLMqGhRZ4Hpak=
github.com/jonboulle/clockwork v0.4.0 h1:p4Cf1aMWXnXAUh8lVfewRBx1zaTSYKrKMF2g3ST4RZ4=
github.com/jonboulle/clockwork v0.4.0/go.mod h1:xgRqUGwRcjKCO1vbZUEtSLrqKoPSsUpK7fnezOII0kc=
github.com/jonboulle/clockwork v0.5.0 h1:Hyh9A8u51kptdkR+cqRpT1EebBwTn1oK9YfGYbdFz6I=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
github.com/kidstuff/mongostore v0.0.0-20181113001930-e650cd85ee4b/go.mod h1:g2nVr8KZVXJSS97Jo8pJ0jgq29P6H7dG0oplUA86MQw= github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs=
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8=
github.com/klauspost/compress v1.17.2 h1:RlWWUY/Dr4fL8qk9YG7DTZ7PDgME2V4csBXA8L/ixi4=
github.com/klauspost/compress v1.17.2/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg= github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/48xc=
github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
github.com/klauspost/cpuid/v2 v2.3.0 h1:S4CRMLnYUhGeDFDqkGriYKdfoFlDnMtqTiI/sFzhA9Y=
github.com/klauspost/cpuid/v2 v2.3.0/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0=
github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M= github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
@@ -84,46 +203,90 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII=
github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY= github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY=
github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q=
github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4=
github.com/lib/pq v1.10.3/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-sqlite3 v1.14.17 h1:mCRHCLDUBXgpKAqIKsaAaAsrAlbkeomtRFKXh2L6YIM= github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU=
github.com/mattn/go-sqlite3 v1.14.17/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg= github.com/mattn/go-sqlite3 v2.0.3+incompatible h1:gXHsfypPkaMZrKbD5209QV9jbUTJKjyR5WD3HYQSd+U=
github.com/memcachier/mc v2.0.1+incompatible/go.mod h1:7bkvFE61leUBvXz+yxsOnGBQSZpBSPIMUQSmmSHvuXc= github.com/mattn/go-sqlite3 v2.0.3+incompatible/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
github.com/microcosm-cc/bluemonday v1.0.25 h1:4NEwSfiJ+Wva0VxN5B8OwMicaJvD8r9tlJWm9rtloEg= github.com/maxatome/go-testdeep v1.12.0 h1:Ql7Go8Tg0C1D/uMMX59LAoYK7LffeJQ6X2T04nTH68g=
github.com/microcosm-cc/bluemonday v1.0.25/go.mod h1:ZIOjCQp1OrzBBPIJmfX4qDYFuhU02nx4bn030ixfHLE= github.com/maxatome/go-testdeep v1.12.0/go.mod h1:lPZc/HAcJMP92l7yI6TRz1aZN5URwUBUAfUNvrclaNM=
github.com/mfridman/interpolate v0.0.2 h1:pnuTK7MQIxxFz1Gr+rjSIx9u7qVjf5VOoM/u6BbAxPY=
github.com/mfridman/interpolate v0.0.2/go.mod h1:p+7uk6oE07mpE/Ik1b8EckO0O4ZXiGAfshKBWLUM9Xg=
github.com/microcosm-cc/bluemonday v1.0.26 h1:xbqSvqzQMeEHCqMi64VAs4d8uy6Mequs3rQ0k/Khz58=
github.com/microcosm-cc/bluemonday v1.0.26/go.mod h1:JyzOCs9gkyQyjs+6h10UEVSe02CGwkhd72Xdqh78TWs=
github.com/microcosm-cc/bluemonday v1.0.27 h1:MpEUotklkwCSLeH+Qdx1VJgNqLlpY2KXwXFM08ygZfk=
github.com/microcosm-cc/bluemonday v1.0.27/go.mod h1:jFi9vgW+H7c3V0lb6nR74Ib/DIB5OBs92Dimizgw2cA=
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0=
github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4=
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
github.com/opencontainers/image-spec v1.1.0-rc5 h1:Ygwkfw9bpDvs+c9E34SdgGOj41dX/cbdlwvlWt0pnFI=
github.com/opencontainers/image-spec v1.1.0-rc5/go.mod h1:X4pATf0uXsnn3g5aiGIsVnJBR4mxhKzfwmvK/B2NTm8=
github.com/opencontainers/runc v1.1.10 h1:EaL5WeO9lv9wmS6SASjszOeQdSctvpbu0DdBQBizE40=
github.com/opencontainers/runc v1.1.10/go.mod h1:+/R6+KmDlh+hOO8NkjmgkG9Qzvypzk0yXxAPYYR65+M=
github.com/ory/dockertest/v3 v3.10.0 h1:4K3z2VMe8Woe++invjaTB7VRyQXQy5UY+loujO4aNE4=
github.com/ory/dockertest/v3 v3.10.0/go.mod h1:nr57ZbRWMqfsdGdFNLHz5jjNdDb7VVFnzAeW1n5N1Lg=
github.com/paulmach/orb v0.10.0 h1:guVYVqzxHE/CQ1KpfGO077TR0ATHSNjp4s6XGLn3W9s=
github.com/paulmach/orb v0.10.0/go.mod h1:5mULz1xQfs3bmQm63QEJA6lNGujuRafwA5S/EnuLaLU=
github.com/paulmach/orb v0.11.1 h1:3koVegMC4X/WeiXYz9iswopaTwMem53NzTJuTF20JzU=
github.com/pelletier/go-toml/v2 v2.0.1/go.mod h1:r9LEWfGN8R5k0VXJ+0BkIe7MYkRdwZOjgMj2KwnJFUo= github.com/pelletier/go-toml/v2 v2.0.1/go.mod h1:r9LEWfGN8R5k0VXJ+0BkIe7MYkRdwZOjgMj2KwnJFUo=
github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4= github.com/pelletier/go-toml/v2 v2.1.1 h1:LWAJwfNvjQZCFIDKWYQaM62NcYeYViCmWIwmOStowAI=
github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc= github.com/pelletier/go-toml/v2 v2.1.1/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc=
github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY=
github.com/pierrec/lz4/v4 v4.1.18 h1:xaKrnTkyoqfh1YItXl56+6KJNVYWlEEPuAQW9xsplYQ=
github.com/pierrec/lz4/v4 v4.1.18/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
github.com/pierrec/lz4/v4 v4.1.22 h1:cKFw6uJDK+/gfw5BcDL0JL5aBsAFdsIT18eRtLj7VIU=
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/quasoft/memstore v0.0.0-20191010062613-2bce066d2b0b/go.mod h1:wTPjTepVu7uJBYgZ0SdWHQlIas582j6cn2jgk4DDdlg= github.com/pressly/goose/v3 v3.17.0 h1:fT4CL3LRm4kfyLuPWzDFAoxjR5ZHjeJ6uQhibQtBaIs=
github.com/pressly/goose/v3 v3.17.0/go.mod h1:22aw7NpnCPlS86oqkO/+3+o9FuCaJg4ZVWRUO3oGzHQ=
github.com/pressly/goose/v3 v3.24.3 h1:DSWWNwwggVUsYZ0X2VitiAa9sKuqtBfe+Jr9zFGwWlM=
github.com/pressly/goose/v3 v3.24.3/go.mod h1:v9zYL4xdViLHCUUJh/mhjnm6JrK7Eul8AS93IxiZM4E=
github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k6Bo=
github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo=
github.com/prometheus/procfs v0.16.1 h1:hZ15bTNuirocR6u0JZ6BAHHmwS1p8B4P6MRqxtzMyRg=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8= github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8=
github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE= github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE=
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/segmentio/asm v1.2.0 h1:9BQrFxC+YOHJlTlHGkTrFWf59nbL3XnCoFLTwDCI7ys=
github.com/segmentio/asm v1.2.0/go.mod h1:BqMnlJP91P8d+4ibuonYZw9mfnzI9HfxselHZr5aAcs=
github.com/sethvargo/go-retry v0.2.4 h1:T+jHEQy/zKJf5s95UkguisicE0zuF9y7+/vgz08Ocec=
github.com/sethvargo/go-retry v0.2.4/go.mod h1:1afjQuvh7s4gflMObvjLPaWgluLLyhA1wmVZ6KLpICw=
github.com/sethvargo/go-retry v0.3.0 h1:EEt31A35QhrcRZtrYFDTBg91cqZVnFL2navjDrah2SE=
github.com/sethvargo/go-retry v0.3.0/go.mod h1:mNX17F0C/HguQMyMyJxcnU471gOZGxCLyYaFyAZraas=
github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8=
github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k=
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
@@ -132,48 +295,110 @@ github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o
github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/taylorskalyo/goreader v0.0.0-20230626212555-e7f5644f8115 h1:OEAIMYp5l9kJ2kT9UPL5QSUriKIIDhnLmpJTy69sltA=
github.com/taylorskalyo/goreader v0.0.0-20230626212555-e7f5644f8115/go.mod h1:AIVbkIe1G7fpFHiKOdxZnU5p9tFPYNTQyH3H5IrRkGw=
github.com/taylorskalyo/goreader v1.0.1 h1:eS9SYiHai2aAHhm+YMGRTqrvNt2aoRMTd7p6ftm0crY=
github.com/taylorskalyo/goreader v1.0.1/go.mod h1:JrUsWCgnk4C3P5Jsr7Pf2mFrMpsR0ls/0bjR5aorYTI=
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw=
github.com/ugorji/go v1.2.7/go.mod h1:nF9osbDWLy6bDVv/Rtoh6QgnvNDpmCalQV5urGCCS6M= github.com/ugorji/go v1.2.7/go.mod h1:nF9osbDWLy6bDVv/Rtoh6QgnvNDpmCalQV5urGCCS6M=
github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY=
github.com/ugorji/go/codec v1.2.7/go.mod h1:WGN1fab3R1fzQlVQTkfxVtIBhWDRqOviHU95kRgeqEY= github.com/ugorji/go/codec v1.2.7/go.mod h1:WGN1fab3R1fzQlVQTkfxVtIBhWDRqOviHU95kRgeqEY=
github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4dU= github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE=
github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
github.com/urfave/cli/v2 v2.25.7 h1:VAzn5oq403l5pHjc4OhD54+XGO9cdKVL/7lDjF+iKUs= github.com/ugorji/go/codec v1.3.0 h1:Qd2W2sQawAfG8XSvzwhBeoGq71zXOC/Q1E9y/wUcsUA=
github.com/urfave/cli/v2 v2.25.7/go.mod h1:8qnjx1vcq5s2/wpsqoZFndg2CE5tNFyrTvS6SinrnYQ= github.com/ugorji/go/codec v1.3.0/go.mod h1:pRBVtBSKl77K30Bv8R2P+cLSGaTtex6fsA2Wjqmfxj4=
github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673 h1:bAn7/zixMGCfxrRTfdpNzjtPYqr8smhKouy9mxVdGPU= github.com/urfave/cli/v2 v2.27.1 h1:8xSQ6szndafKVRmfyeUMxkNUJQMjL1F2zmsZ+qHpfho=
github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673/go.mod h1:N3UwUGtsrSj3ccvlPHLoLsHnpR27oXr4ZE984MbSER8= github.com/urfave/cli/v2 v2.27.1/go.mod h1:8qnjx1vcq5s2/wpsqoZFndg2CE5tNFyrTvS6SinrnYQ=
github.com/urfave/cli/v2 v2.27.7 h1:bH59vdhbjLv3LAvIu6gd0usJHgoTTPhCFib8qqOwXYU=
github.com/urfave/cli/v2 v2.27.7/go.mod h1:CyNAG/xg+iAOg0N4MPGZqVmv2rCoP267496AOXUZjA4=
github.com/vertica/vertica-sql-go v1.3.3 h1:fL+FKEAEy5ONmsvya2WH5T8bhkvY27y/Ik3ReR2T+Qw=
github.com/vertica/vertica-sql-go v1.3.3/go.mod h1:jnn2GFuv+O2Jcjktb7zyc4Utlbu9YVqpHH/lx63+1M4=
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo=
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0=
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=
github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74=
github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y=
github.com/xrash/smetrics v0.0.0-20231213231151-1d8dd44e695e h1:+SOyEddqYF09QP7vr7CgJ1eti3pY9Fn3LHO1M1r/0sI=
github.com/xrash/smetrics v0.0.0-20231213231151-1d8dd44e695e/go.mod h1:N3UwUGtsrSj3ccvlPHLoLsHnpR27oXr4ZE984MbSER8=
github.com/xrash/smetrics v0.0.0-20250705151800-55b8f293f342 h1:FnBeRrxr7OU4VvAzt5X7s6266i6cSVkkFPS0TuXWbIg=
github.com/xrash/smetrics v0.0.0-20250705151800-55b8f293f342/go.mod h1:Ohn+xnUBiLI6FVj/9LpzZWtj1/D6lUovWYBkxHVV3aM=
github.com/ydb-platform/ydb-go-genproto v0.0.0-20231012155159-f85a672542fd h1:dzWP1Lu+A40W883dK/Mr3xyDSM/2MggS8GtHT0qgAnE=
github.com/ydb-platform/ydb-go-genproto v0.0.0-20231012155159-f85a672542fd/go.mod h1:Er+FePu1dNUieD+XTMDduGpQuCPssK5Q4BjF+IIXJ3I=
github.com/ydb-platform/ydb-go-genproto v0.0.0-20241112172322-ea1f63298f77 h1:LY6cI8cP4B9rrpTleZk95+08kl2gF4rixG7+V/dwL6Q=
github.com/ydb-platform/ydb-go-sdk/v3 v3.54.2 h1:E0yUuuX7UmPxXm92+yQCjMveLFO3zfvYFIJVuAqsVRA=
github.com/ydb-platform/ydb-go-sdk/v3 v3.54.2/go.mod h1:fjBLQ2TdQNl4bMjuWl9adoTGBypwUTPoGC+EqYqiIcU=
github.com/ydb-platform/ydb-go-sdk/v3 v3.108.1 h1:ixAiqjj2S/dNuJqrz4AxSqgw2P5OBMXp68hB5nNriUk=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
go.opentelemetry.io/otel v1.20.0 h1:vsb/ggIY+hUjD/zCAQHpzTmndPqv/ml2ArbsbfBYTAc=
go.opentelemetry.io/otel v1.20.0/go.mod h1:oUIGj3D77RwJdM6PPZImDpSZGDvkD9fhesHny69JFrs=
go.opentelemetry.io/otel v1.35.0 h1:xKWKPxrxB6OtMCbmMY021CqC45J+3Onta9MqjhnusiQ=
go.opentelemetry.io/otel/trace v1.20.0 h1:+yxVAPZPbQhbC3OfAkeIVTky6iTFpcr4SiY9om7mXSQ=
go.opentelemetry.io/otel/trace v1.20.0/go.mod h1:HJSK7F/hA5RlzpZ0zKDCHCDHm556LCDtKaAo6JmBFUU=
go.opentelemetry.io/otel/trace v1.35.0 h1:dPpEfJu1sDIqruz7BHFG3c7528f6ddfSWfFDVt/xgMs=
go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
golang.org/x/arch v0.4.0 h1:A8WCeEWhLwPBKNbFi5Wv5UTCBx5zzubnXDlMOFAzFMc= golang.org/x/arch v0.7.0 h1:pskyeJh/3AmoQ8CPE95vxHLqp1G1GfGNXTmcl9NEKTc=
golang.org/x/arch v0.4.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= golang.org/x/arch v0.7.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
golang.org/x/arch v0.20.0 h1:dx1zTU0MAE98U+TQ8BLl7XsJbgze2WnNKF/8tGp/Q6c=
golang.org/x/arch v0.20.0/go.mod h1:bdwinDaKcfZUGpH09BB7ZmOfhalA8lQdzl62l8gGWsk=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU= golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
golang.org/x/crypto v0.12.0 h1:tFM/ta59kqch6LlvYnPa0yx5a83cL2nHflFhYKvv9Yk= golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4=
golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw= golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc=
golang.org/x/exp v0.0.0-20230905200255-921286631fa9 h1:GoHiUyI/Tp2nVkLI2mCxVkOjsbSXD66ic0XW0js0R9g= golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg=
golang.org/x/exp v0.0.0-20230905200255-921286631fa9/go.mod h1:S2oDrQGGwySpoQPVqRShND87VCbxmc6bL1Yd2oYrm6k= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4=
golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc=
golang.org/x/exp v0.0.0-20240119083558-1b970713d09a h1:Q8/wZp0KX97QFTc2ywcOE0YRjZPVIx+MXInMzdvQqcA=
golang.org/x/exp v0.0.0-20240119083558-1b970713d09a/go.mod h1:idGWGoKP1toJGkd5/ig9ZLuPcZBC3ewk7SzmH0uou08=
golang.org/x/exp v0.0.0-20250718183923-645b1fa84792 h1:R9PFI6EUdfVKgwKjZef7QIwGcBKu86OEFpJ9nUEP2l4=
golang.org/x/exp v0.0.0-20250718183923-645b1fa84792/go.mod h1:A+z0yzpGtvnG90cToK5n2tu8UJVP2XUATh+r+sfOOOc=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.14.0 h1:dGoOF9QVLYng8IHTm7BAyWqCqSheQ5pYWGhzW00YJr0=
golang.org/x/mod v0.14.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.27.0 h1:kb+q2PyFnEADO2IEF935ehFUXlWiNjJWtRNgBLSfbxQ=
golang.org/x/mod v0.27.0/go.mod h1:rWI627Fq0DEoudcK+MBkNkCe0EetEaDSwJJkCcjpazc=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210916014120-12bc252f5db8/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.14.0 h1:BONx9s002vGdD9umnlX1Po8vOZmrgH34qlHcD1MfK14= golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns=
golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
golang.org/x/net v0.20.0 h1:aCL9BSgETF1k+blQaYUBx9hJ9LOGP3gAVemcZlf1Kpo=
golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY=
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE=
golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ=
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw=
golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
@@ -182,42 +407,123 @@ golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0 h1:CM0HF96J0hcLAwsHPJZjfdNzs0gftsLfgKt57wWHJ0o= golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU=
golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI=
golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY=
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U=
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.12.0 h1:k+n5B8goJNdU7hSvEtMUz3d1Q6D/XW4COJSJR6fN0mc= golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng=
golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
golang.org/x/tools v0.17.0 h1:FvmRgNOcs3kOa+T20R1uhfP9F6HgG2mfxDv1vrx1Htc=
golang.org/x/tools v0.17.0/go.mod h1:xsh6VxdV005rRVaS6SSAf9oiAqljS7UZUacMZ8Bnsps=
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
golang.org/x/tools v0.36.0 h1:kWS0uv/zsvHEle1LbV5LE8QujrxB3wfQyxHfhOk0Qkg=
golang.org/x/tools v0.36.0/go.mod h1:WBDiHKJK8YgLHlcQPYQzNCkUxUypCaa5ZegCVutKm+s=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/genproto/googleapis/rpc v0.0.0-20231106174013-bbf56f31fb17 h1:Jyp0Hsi0bmHXG6k9eATXoYtjd6e2UzZ1SCn/wIupY14=
google.golang.org/genproto/googleapis/rpc v0.0.0-20231106174013-bbf56f31fb17/go.mod h1:oQ5rr10WTTMvP4A36n8JpR1OrO1BEiV4f78CneXZxkA=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250324211829-b45e905df463 h1:e0AIkUUhxyBKh6ssZNrAMeqhA7RKUj42346d1y02i2g=
google.golang.org/grpc v1.59.0 h1:Z5Iec2pjwb+LEOqzpB2MR12/eKFhDPhuqW91O+4bwUk=
google.golang.org/grpc v1.59.0/go.mod h1:aUPDwccQo6OTjy7Hct4AfBPD1GptF4fyUjIkQ9YtF98=
google.golang.org/grpc v1.71.0 h1:kF77BGdPTQ4/JZWMlb9VpJ5pa25aqvVqogsxNHHdeBg=
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8= google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I=
google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
google.golang.org/protobuf v1.36.7 h1:IgrO7UwFQGJdRNXH/sQux4R1Dj1WAKcLElzeeRaXV2A=
google.golang.org/protobuf v1.36.7/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/natefinch/lumberjack.v2 v2.2.1 h1:bBRl1b0OH9s/DuPhuXpNl+VtCaJXFZ5/uEFST95x9zc=
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/natefinch/lumberjack.v2 v2.2.1/go.mod h1:YD8tP3GAjkrDg1eZH7EGmyESg/lsYskCTPBJVb9jqSc=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
howett.net/plist v1.0.0 h1:7CrbWYbPPO/PyNy38b2EB/+gYbjCe2DXBxgtOOZbSQM=
howett.net/plist v1.0.0/go.mod h1:lqaXoTrLY4hg8tnEzNru53gicrbv7rrk+2xJA/7hw9g=
howett.net/plist v1.0.1 h1:37GdZ8tP09Q35o9ych3ehygcsL+HqKSwzctveSlarvM=
lukechampine.com/uint128 v1.3.0 h1:cDdUVfRwDUDovz610ABgFD17nXD4/uDgVHl2sC3+sbo=
lukechampine.com/uint128 v1.3.0/go.mod h1:c4eWIwlEGaxC/+H1VguhU4PHXNWDCDMUlWdIWl2j1gk=
modernc.org/cc/v3 v3.41.0 h1:QoR1Sn3YWlmA1T4vLaKZfawdVtSiGx8H+cEojbC7v1Q=
modernc.org/cc/v3 v3.41.0/go.mod h1:Ni4zjJYJ04CDOhG7dn640WGfwBzfE0ecX8TyMB0Fv0Y=
modernc.org/ccgo/v3 v3.16.15 h1:KbDR3ZAVU+wiLyMESPtbtE/Add4elztFyfsWoNTgxS0=
modernc.org/ccgo/v3 v3.16.15/go.mod h1:yT7B+/E2m43tmMOT51GMoM98/MtHIcQQSleGnddkUNI=
modernc.org/ccgo/v3 v3.17.0 h1:o3OmOqx4/OFnl4Vm3G8Bgmqxnvxnh0nbxeT5p/dWChA=
modernc.org/ccgo/v3 v3.17.0/go.mod h1:Sg3fwVpmLvCUTaqEUjiBDAvshIaKDB0RXaf+zgqFu8I=
modernc.org/ccorpus v1.11.6 h1:J16RXiiqiCgua6+ZvQot4yUuUy8zxgqbqEEUuGPlISk=
modernc.org/ccorpus v1.11.6/go.mod h1:2gEUTrWqdpH2pXsmTM1ZkjeSrUWDpjMu2T6m29L/ErQ=
modernc.org/httpfs v1.0.6 h1:AAgIpFZRXuYnkjftxTAZwMIiwEqAfk8aVB2/oA6nAeM=
modernc.org/httpfs v1.0.6/go.mod h1:7dosgurJGp0sPaRanU53W4xZYKh14wfzX420oZADeHM=
modernc.org/libc v1.40.7 h1:oeLS0G067ZqUu+v143Dqad0btMfKmNS7SuOsnkq0Ysg=
modernc.org/libc v1.40.7/go.mod h1:YAXkAZ8ktnkCKaN9sw/UDeUVkGYJ/YquGO4FTi5nmHE=
modernc.org/libc v1.66.6 h1:RyQpwAhM/19nXD8y3iejM/AjmKwY2TjxZTlUWTsWw2U=
modernc.org/libc v1.66.6/go.mod h1:j8z0EYAuumoMQ3+cWXtmw6m+LYn3qm8dcZDFtFTSq+M=
modernc.org/mathutil v1.6.0 h1:fRe9+AmYlaej+64JsEEhoWuAYBkOtQiMEU7n/XgfYi4=
modernc.org/mathutil v1.6.0/go.mod h1:Ui5Q9q1TR2gFm0AQRqQUaBWFLAhQpCwNcuhBOSedWPo=
modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU=
modernc.org/mathutil v1.7.1/go.mod h1:4p5IwJITfppl0G4sUEDtCr4DthTaT47/N3aT6MhfgJg=
modernc.org/memory v1.7.2 h1:Klh90S215mmH8c9gO98QxQFsY+W451E8AnzjoE2ee1E=
modernc.org/memory v1.7.2/go.mod h1:NO4NVCQy0N7ln+T9ngWqOQfi7ley4vpwvARR+Hjw95E=
modernc.org/memory v1.11.0 h1:o4QC8aMQzmcwCK3t3Ux/ZHmwFPzE6hf2Y5LbkRs+hbI=
modernc.org/memory v1.11.0/go.mod h1:/JP4VbVC+K5sU2wZi9bHoq2MAkCnrt2r98UGeSK7Mjw=
modernc.org/opt v0.1.3 h1:3XOZf2yznlhC+ibLltsDGzABUGVx8J6pnFMS3E4dcq4=
modernc.org/opt v0.1.3/go.mod h1:WdSiB5evDcignE70guQKxYUl14mgWtbClRi5wmkkTX0=
modernc.org/opt v0.1.4 h1:2kNGMRiUjrp4LcaPuLY2PzUfqM/w9N23quVwhKt5Qm8=
modernc.org/opt v0.1.4/go.mod h1:03fq9lsNfvkYSfxrfUhZCWPk1lm4cq4N+Bh//bEtgns=
modernc.org/sqlite v1.28.0 h1:Zx+LyDDmXczNnEQdvPuEfcFVA2ZPyaD7UCZDjef3BHQ=
modernc.org/sqlite v1.28.0/go.mod h1:Qxpazz0zH8Z1xCFyi5GSL3FzbtZ3fvbjmywNogldEW0=
modernc.org/sqlite v1.38.2 h1:Aclu7+tgjgcQVShZqim41Bbw9Cho0y/7WzYptXqkEek=
modernc.org/sqlite v1.38.2/go.mod h1:cPTJYSlgg3Sfg046yBShXENNtPrWrDX8bsbAQBzgQ5E=
modernc.org/strutil v1.2.0 h1:agBi9dp1I+eOnxXeiZawM8F4LawKv4NzGWSaLfyeNZA=
modernc.org/strutil v1.2.0/go.mod h1:/mdcBmfOibveCTBxUl5B5l6W+TTH1FXPLHZE6bTosX0=
modernc.org/strutil v1.2.1 h1:UneZBkQA+DX2Rp35KcM69cSsNES9ly8mQWD71HKlOA0=
modernc.org/strutil v1.2.1/go.mod h1:EHkiggD70koQxjVdSBM3JKM7k6L0FbGE5eymy9i3B9A=
modernc.org/tcl v1.15.2 h1:C4ybAYCGJw968e+Me18oW55kD/FexcHbqH2xak1ROSY=
modernc.org/tcl v1.15.2/go.mod h1:3+k/ZaEbKrC8ePv8zJWPtBSW0V7Gg9g8rkmhI1Kfs3c=
modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y=
modernc.org/token v1.1.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM=
modernc.org/z v1.7.3 h1:zDJf6iHjrnB+WRD88stbXokugjyc0/pB91ri1gO6LZY=
modernc.org/z v1.7.3/go.mod h1:Ipv4tsdxZRbQyLq9Q1M6gdbkxYzdlrciF2Hi/lS7nWE=
nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50= nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50=
rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=

View File

@@ -3,8 +3,6 @@ package graph
import ( import (
"fmt" "fmt"
"math" "math"
"reichard.io/bbank/database"
) )
type SVGGraphPoint struct { type SVGGraphPoint struct {
@@ -28,12 +26,12 @@ type SVGBezierOpposedLine struct {
Angle int Angle int
} }
func GetSVGGraphData(inputData []database.GetDailyReadStatsRow, svgWidth int, svgHeight int) SVGGraphData { func GetSVGGraphData(inputData []int64, svgWidth int, svgHeight int) SVGGraphData {
// Derive Height // Derive Height
var maxHeight int = 0 var maxHeight int = 0
for _, item := range inputData { for _, item := range inputData {
if int(item.MinutesRead) > maxHeight { if int(item) > maxHeight {
maxHeight = int(item.MinutesRead) maxHeight = int(item)
} }
} }
@@ -55,7 +53,7 @@ func GetSVGGraphData(inputData []database.GetDailyReadStatsRow, svgWidth int, sv
var maxBY int = 0 var maxBY int = 0
var minBX int = 0 var minBX int = 0
for idx, item := range inputData { for idx, item := range inputData {
itemSize := int(float32(item.MinutesRead) * sizeRatio) itemSize := int(float32(item) * sizeRatio)
itemY := svgHeight - itemSize itemY := svgHeight - itemSize
lineX := (idx + 1) * blockOffset lineX := (idx + 1) * blockOffset
barPoints = append(barPoints, SVGGraphPoint{ barPoints = append(barPoints, SVGGraphPoint{
@@ -103,9 +101,6 @@ func getSVGBezierOpposedLine(pointA SVGGraphPoint, pointB SVGGraphPoint) SVGBezi
Length: int(math.Sqrt(math.Pow(lengthX, 2) + math.Pow(lengthY, 2))), Length: int(math.Sqrt(math.Pow(lengthX, 2) + math.Pow(lengthY, 2))),
Angle: int(math.Atan2(lengthY, lengthX)), Angle: int(math.Atan2(lengthY, lengthX)),
} }
// length = Math.sqrt(Math.pow(lengthX, 2) + Math.pow(lengthY, 2)),
// angle = Math.atan2(lengthY, lengthX)
} }
func getSVGBezierControlPoint(currentPoint *SVGGraphPoint, prevPoint *SVGGraphPoint, nextPoint *SVGGraphPoint, isReverse bool) SVGGraphPoint { func getSVGBezierControlPoint(currentPoint *SVGGraphPoint, prevPoint *SVGGraphPoint, nextPoint *SVGGraphPoint, isReverse bool) SVGGraphPoint {
@@ -120,7 +115,7 @@ func getSVGBezierControlPoint(currentPoint *SVGGraphPoint, prevPoint *SVGGraphPo
// Modifiers // Modifiers
var smoothingRatio float64 = 0.2 var smoothingRatio float64 = 0.2
var directionModifier float64 = 0 var directionModifier float64 = 0
if isReverse == true { if isReverse {
directionModifier = math.Pi directionModifier = math.Pi
} }

33
graph/graph_test.go Normal file
View File

@@ -0,0 +1,33 @@
package graph
import (
"testing"
)
func TestGetSVGGraphData(t *testing.T) {
inputPoints := []int64{10, 90, 50, 5, 10, 5, 70, 60, 50, 90}
svgData := GetSVGGraphData(inputPoints, 500, 100)
expect := "M 50,95 C63,95 80,50 100,50 C120,50 128,73 150,73 C172,73 180,98 200,98 C220,98 230,95 250,95 C270,95 279,98 300,98 C321,98 330,62 350,62 C370,62 380,67 400,67 C420,67 430,73 450,73 C470,73 489,50 500,50"
if svgData.BezierPath != expect {
t.Fatalf(`Expected: %v, Got: %v`, expect, svgData.BezierPath)
}
expect = "L 500,98 L 50,98 Z"
if svgData.BezierFill != expect {
t.Fatalf(`Expected: %v, Got: %v`, expect, svgData.BezierFill)
}
if svgData.Width != 500 {
t.Fatalf(`Expected: %v, Got: %v`, 500, svgData.Width)
}
if svgData.Height != 100 {
t.Fatalf(`Expected: %v, Got: %v`, 100, svgData.Height)
}
if svgData.Offset != 50 {
t.Fatalf(`Expected: %v, Got: %v`, 50, svgData.Offset)
}
}

53
main.go
View File

@@ -1,34 +1,31 @@
package main package main
import ( import (
"embed"
"io/fs"
"os" "os"
"os/signal" "os/signal"
"syscall"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
"github.com/urfave/cli/v2" "github.com/urfave/cli/v2"
"reichard.io/bbank/server" "reichard.io/antholume/config"
"reichard.io/antholume/server"
) )
type UTCFormatter struct { //go:embed templates/* assets/*
log.Formatter var embeddedAssets embed.FS
}
func (u UTCFormatter) Format(e *log.Entry) ([]byte, error) {
e.Time = e.Time.UTC()
return u.Formatter.Format(e)
}
func main() { func main() {
log.SetFormatter(UTCFormatter{&log.TextFormatter{FullTimestamp: true}})
app := &cli.App{ app := &cli.App{
Name: "Book Bank", Name: "AnthoLume",
Usage: "A self hosted e-book progress tracker.", Usage: "A self hosted e-book progress tracker.",
EnableBashCompletion: true,
Commands: []*cli.Command{ Commands: []*cli.Command{
{ {
Name: "serve", Name: "serve",
Aliases: []string{"s"}, Aliases: []string{"s"},
Usage: "Start Book Bank web server.", Usage: "Start AnthoLume web server.",
Action: cmdServer, Action: cmdServer,
}, },
}, },
@@ -40,17 +37,29 @@ func main() {
} }
func cmdServer(ctx *cli.Context) error { func cmdServer(ctx *cli.Context) error {
log.Info("Starting Book Bank Server") var assets fs.FS = embeddedAssets
server := server.NewServer()
server.StartServer()
c := make(chan os.Signal, 1) // Load config
signal.Notify(c, os.Interrupt) c := config.Load()
<-c if c.Version == "develop" {
assets = os.DirFS("./")
}
log.Info("Stopping Server") log.Info("Starting AnthoLume Server")
server.StopServer()
log.Info("Server Stopped") // Create notify channel
signals := make(chan os.Signal, 1)
signal.Notify(signals, os.Interrupt, syscall.SIGTERM)
// Start server
s := server.New(c, assets)
s.Start()
// Wait & close
<-signals
s.Stop()
// Stop server
os.Exit(0) os.Exit(0)
return nil return nil

View File

@@ -0,0 +1,110 @@
{
"kind": "books#volume",
"id": "ZxwpakTv_MIC",
"etag": "mhqr3GsebaQ",
"selfLink": "https://www.googleapis.com/books/v1/volumes/ZxwpakTv_MIC",
"volumeInfo": {
"title": "Alice in Wonderland",
"authors": [
"Lewis Carroll"
],
"publisher": "The Floating Press",
"publishedDate": "2009-01-01",
"description": "Alice in Wonderland (also known as Alice's Adventures in Wonderland), from 1865, is the peculiar and imaginative tale of a girl who falls down a rabbit-hole into a bizarre world of eccentric and unusual creatures. Lewis Carroll's prominent example of the genre of \"literary nonsense\" has endured in popularity with its clever way of playing with logic and a narrative structure that has influence generations of fiction writing.",
"industryIdentifiers": [
{
"type": "ISBN_10",
"identifier": "1877527815"
},
{
"type": "ISBN_13",
"identifier": "9781877527814"
}
],
"readingModes": {
"text": true,
"image": false
},
"pageCount": 104,
"printedPageCount": 112,
"printType": "BOOK",
"categories": [
"Fiction / Classics",
"Juvenile Fiction / General"
],
"averageRating": 5,
"ratingsCount": 1,
"maturityRating": "NOT_MATURE",
"allowAnonLogging": true,
"contentVersion": "0.2.3.0.preview.2",
"panelizationSummary": {
"containsEpubBubbles": false,
"containsImageBubbles": false
},
"imageLinks": {
"smallThumbnail": "http://books.google.com/books/content?id=ZxwpakTv_MIC&printsec=frontcover&img=1&zoom=5&edge=curl&imgtk=AFLRE71e5b-TeAKTiPSvXNUPeUi8rItzur2xSzwH8QU3qjKH0A2opmoq1o5I9RqJFt1BtcCCqILhnYRcB2aFLJmEvom11gx3Qn3PNN1iBLj2H5y2JHjM8wIwGT7iWFQmEn0Od7s6sOdk&source=gbs_api",
"thumbnail": "http://books.google.com/books/content?id=ZxwpakTv_MIC&printsec=frontcover&img=1&zoom=1&edge=curl&imgtk=AFLRE70QORt9J_DmKJgfyf9UEjQkdDMZ0qAu0GP315a1Q4CRS3snEjKnJJO2fYFdxjMwsSpmHoXDFPZbsy4gw-kMvF7lL8LtwxGbJGlfETHw_jbQBKBlKTrneK4XFvvV-EXNrZRgylxj&source=gbs_api",
"small": "http://books.google.com/books/content?id=ZxwpakTv_MIC&printsec=frontcover&img=1&zoom=2&edge=curl&imgtk=AFLRE70r1pAUt6VhuEEW8vXFhu8LvKln3yj0mdlaWPO4ZQuODLFQnH0fTebKMMX4ANR5i4PtC0oaI48XkwF-EdzlEM1WmUcR5383N4kRMXcta_i9nmb2y38dnh3hObwQW5VoAxbc9psn&source=gbs_api",
"medium": "http://books.google.com/books/content?id=ZxwpakTv_MIC&printsec=frontcover&img=1&zoom=3&edge=curl&imgtk=AFLRE7019EVuXvhzbhmtbz1QFh-ajB6kTKRHGhqijFf8big_GPRMMdpCdKlklFbkCfXvy8F64t5NKlThUHb3tFP-51bbDXkrVErFbCqKGzGnDSSm8cewqT8HiYDNHqn0hXYnuYvN4vYf&source=gbs_api",
"large": "http://books.google.com/books/content?id=ZxwpakTv_MIC&printsec=frontcover&img=1&zoom=4&edge=curl&imgtk=AFLRE72I15XZqp_8c8BAj4EskxkdC6nQz8F0Fs6VJhkykwIqfjzwuM34tUSQa3UnMGbx-UYjZjSLmCNFlePS8aR7yy-0UP9BRnYD-h5Qbesnnt_xdOb3u7Wdiobi6VbciNCBwUwbCyeH&source=gbs_api",
"extraLarge": "http://books.google.com/books/content?id=ZxwpakTv_MIC&printsec=frontcover&img=1&zoom=6&edge=curl&imgtk=AFLRE70rC6ktY6U0K_hqG1HxPl_9hMjpKb10p9DryVIwQgUjoJfWQOjpNA3EQ-5yk167yYDlO27gylqNAdJBYWu7ZHr3GuqkjTDpXjDvzBBppVyWaVNxKwhOz3gfJ-gzM6cC4kLHP26R&source=gbs_api"
},
"language": "en",
"previewLink": "http://books.google.com/books?id=ZxwpakTv_MIC&hl=&source=gbs_api",
"infoLink": "https://play.google.com/store/books/details?id=ZxwpakTv_MIC&source=gbs_api",
"canonicalVolumeLink": "https://play.google.com/store/books/details?id=ZxwpakTv_MIC"
},
"layerInfo": {
"layers": [
{
"layerId": "geo",
"volumeAnnotationsVersion": "2"
}
]
},
"saleInfo": {
"country": "US",
"saleability": "FOR_SALE",
"isEbook": true,
"listPrice": {
"amount": 3.99,
"currencyCode": "USD"
},
"retailPrice": {
"amount": 3.99,
"currencyCode": "USD"
},
"buyLink": "https://play.google.com/store/books/details?id=ZxwpakTv_MIC&rdid=book-ZxwpakTv_MIC&rdot=1&source=gbs_api",
"offers": [
{
"finskyOfferType": 1,
"listPrice": {
"amountInMicros": 3990000,
"currencyCode": "USD"
},
"retailPrice": {
"amountInMicros": 3990000,
"currencyCode": "USD"
},
"giftable": true
}
]
},
"accessInfo": {
"country": "US",
"viewability": "PARTIAL",
"embeddable": true,
"publicDomain": false,
"textToSpeechPermission": "ALLOWED",
"epub": {
"isAvailable": true,
"acsTokenLink": "http://books.google.com/books/download/Alice_in_Wonderland-sample-epub.acsm?id=ZxwpakTv_MIC&format=epub&output=acs4_fulfillment_token&dl_type=sample&source=gbs_api"
},
"pdf": {
"isAvailable": false
},
"webReaderLink": "http://play.google.com/books/reader?id=ZxwpakTv_MIC&hl=&source=gbs_api",
"accessViewStatus": "SAMPLE",
"quoteSharingAllowed": false
}
}

View File

@@ -0,0 +1,105 @@
{
"kind": "books#volumes",
"totalItems": 1,
"items": [
{
"kind": "books#volume",
"id": "ZxwpakTv_MIC",
"etag": "F2eR9VV6VwQ",
"selfLink": "https://www.googleapis.com/books/v1/volumes/ZxwpakTv_MIC",
"volumeInfo": {
"title": "Alice in Wonderland",
"authors": [
"Lewis Carroll"
],
"publisher": "The Floating Press",
"publishedDate": "2009-01-01",
"description": "Alice in Wonderland (also known as Alice's Adventures in Wonderland), from 1865, is the peculiar and imaginative tale of a girl who falls down a rabbit-hole into a bizarre world of eccentric and unusual creatures. Lewis Carroll's prominent example of the genre of \"literary nonsense\" has endured in popularity with its clever way of playing with logic and a narrative structure that has influence generations of fiction writing.",
"industryIdentifiers": [
{
"type": "ISBN_13",
"identifier": "9781877527814"
},
{
"type": "ISBN_10",
"identifier": "1877527815"
}
],
"readingModes": {
"text": true,
"image": false
},
"pageCount": 104,
"printType": "BOOK",
"categories": [
"Fiction"
],
"averageRating": 5,
"ratingsCount": 1,
"maturityRating": "NOT_MATURE",
"allowAnonLogging": true,
"contentVersion": "0.2.3.0.preview.2",
"panelizationSummary": {
"containsEpubBubbles": false,
"containsImageBubbles": false
},
"imageLinks": {
"smallThumbnail": "http://books.google.com/books/content?id=ZxwpakTv_MIC&printsec=frontcover&img=1&zoom=5&edge=curl&source=gbs_api",
"thumbnail": "http://books.google.com/books/content?id=ZxwpakTv_MIC&printsec=frontcover&img=1&zoom=1&edge=curl&source=gbs_api"
},
"language": "en",
"previewLink": "http://books.google.com/books?id=ZxwpakTv_MIC&printsec=frontcover&dq=isbn:1877527815&hl=&cd=1&source=gbs_api",
"infoLink": "https://play.google.com/store/books/details?id=ZxwpakTv_MIC&source=gbs_api",
"canonicalVolumeLink": "https://play.google.com/store/books/details?id=ZxwpakTv_MIC"
},
"saleInfo": {
"country": "US",
"saleability": "FOR_SALE",
"isEbook": true,
"listPrice": {
"amount": 3.99,
"currencyCode": "USD"
},
"retailPrice": {
"amount": 3.99,
"currencyCode": "USD"
},
"buyLink": "https://play.google.com/store/books/details?id=ZxwpakTv_MIC&rdid=book-ZxwpakTv_MIC&rdot=1&source=gbs_api",
"offers": [
{
"finskyOfferType": 1,
"listPrice": {
"amountInMicros": 3990000,
"currencyCode": "USD"
},
"retailPrice": {
"amountInMicros": 3990000,
"currencyCode": "USD"
},
"giftable": true
}
]
},
"accessInfo": {
"country": "US",
"viewability": "PARTIAL",
"embeddable": true,
"publicDomain": false,
"textToSpeechPermission": "ALLOWED",
"epub": {
"isAvailable": true,
"acsTokenLink": "http://books.google.com/books/download/Alice_in_Wonderland-sample-epub.acsm?id=ZxwpakTv_MIC&format=epub&output=acs4_fulfillment_token&dl_type=sample&source=gbs_api"
},
"pdf": {
"isAvailable": false
},
"webReaderLink": "http://play.google.com/books/reader?id=ZxwpakTv_MIC&hl=&source=gbs_api",
"accessViewStatus": "SAMPLE",
"quoteSharingAllowed": false
},
"searchInfo": {
"textSnippet": "Alice in Wonderland (also known as Alice&#39;s Adventures in Wonderland), from 1865, is the peculiar and imaginative tale of a girl who falls down a rabbit-hole into a bizarre world of eccentric and unusual creatures."
}
}
]
}

View File

@@ -1,330 +1,65 @@
/*
Package epub provides basic support for reading EPUB archives.
Adapted from: https://github.com/taylorskalyo/goreader
*/
package metadata package metadata
import ( import (
"archive/zip" "regexp"
"bytes"
"encoding/xml"
"errors"
"io"
"os"
"path"
"strings" "strings"
"golang.org/x/net/html" "github.com/PuerkitoBio/goquery"
"github.com/taylorskalyo/goreader/epub"
) )
const containerPath = "META-INF/container.xml" func getEPUBMetadata(filepath string) (*MetadataInfo, error) {
rc, err := epub.OpenReader(filepath)
var (
// ErrNoRootfile occurs when there are no rootfile entries found in
// container.xml.
ErrNoRootfile = errors.New("epub: no rootfile found in container")
// ErrBadRootfile occurs when container.xml references a rootfile that does
// not exist in the zip.
ErrBadRootfile = errors.New("epub: container references non-existent rootfile")
// ErrNoItemref occurrs when a content.opf contains a spine without any
// itemref entries.
ErrNoItemref = errors.New("epub: no itemrefs found in spine")
// ErrBadItemref occurs when an itemref entry in content.opf references an
// item that does not exist in the manifest.
ErrBadItemref = errors.New("epub: itemref references non-existent item")
// ErrBadManifest occurs when a manifest in content.opf references an item
// that does not exist in the zip.
ErrBadManifest = errors.New("epub: manifest references non-existent item")
)
// Reader represents a readable epub file.
type Reader struct {
Container
files map[string]*zip.File
}
// ReadCloser represents a readable epub file that can be closed.
type ReadCloser struct {
Reader
f *os.File
}
// Rootfile contains the location of a content.opf package file.
type Rootfile struct {
FullPath string `xml:"full-path,attr"`
Package
}
// Container serves as a directory of Rootfiles.
type Container struct {
Rootfiles []*Rootfile `xml:"rootfiles>rootfile"`
}
// Package represents an epub content.opf file.
type Package struct {
Metadata
Manifest
Spine
}
// Metadata contains publishing information about the epub.
type Metadata struct {
Title string `xml:"metadata>title"`
Language string `xml:"metadata>language"`
Identifier string `xml:"metadata>idenifier"`
Creator string `xml:"metadata>creator"`
Contributor string `xml:"metadata>contributor"`
Publisher string `xml:"metadata>publisher"`
Subject string `xml:"metadata>subject"`
Description string `xml:"metadata>description"`
Event []struct {
Name string `xml:"event,attr"`
Date string `xml:",innerxml"`
} `xml:"metadata>date"`
Type string `xml:"metadata>type"`
Format string `xml:"metadata>format"`
Source string `xml:"metadata>source"`
Relation string `xml:"metadata>relation"`
Coverage string `xml:"metadata>coverage"`
Rights string `xml:"metadata>rights"`
}
// Manifest lists every file that is part of the epub.
type Manifest struct {
Items []Item `xml:"manifest>item"`
}
// Item represents a file stored in the epub.
type Item struct {
ID string `xml:"id,attr"`
HREF string `xml:"href,attr"`
MediaType string `xml:"media-type,attr"`
f *zip.File
}
// Spine defines the reading order of the epub documents.
type Spine struct {
Itemrefs []Itemref `xml:"spine>itemref"`
}
// Itemref points to an Item.
type Itemref struct {
IDREF string `xml:"idref,attr"`
*Item
}
// OpenEPUBReader will open the epub file specified by name and return a
// ReadCloser.
func OpenEPUBReader(name string) (*ReadCloser, error) {
f, err := os.Open(name)
if err != nil { if err != nil {
return nil, err return nil, err
} }
rc := new(ReadCloser)
rc.f = f
fi, err := f.Stat()
if err != nil {
f.Close()
return nil, err
}
z, err := zip.NewReader(f, fi.Size())
if err != nil {
return nil, err
}
if err = rc.init(z); err != nil {
return nil, err
}
return rc, nil
}
// NewReader returns a new Reader reading from ra, which is assumed to have the
// given size in bytes.
func NewReader(ra io.ReaderAt, size int64) (*Reader, error) {
z, err := zip.NewReader(ra, size)
if err != nil {
return nil, err
}
r := new(Reader)
if err = r.init(z); err != nil {
return nil, err
}
return r, nil
}
func (r *Reader) init(z *zip.Reader) error {
// Create a file lookup table
r.files = make(map[string]*zip.File)
for _, f := range z.File {
r.files[f.Name] = f
}
err := r.setContainer()
if err != nil {
return err
}
err = r.setPackages()
if err != nil {
return err
}
err = r.setItems()
if err != nil {
return err
}
return nil
}
// setContainer unmarshals the epub's container.xml file.
func (r *Reader) setContainer() error {
f, err := r.files[containerPath].Open()
if err != nil {
return err
}
var b bytes.Buffer
_, err = io.Copy(&b, f)
if err != nil {
return err
}
err = xml.Unmarshal(b.Bytes(), &r.Container)
if err != nil {
return err
}
if len(r.Container.Rootfiles) < 1 {
return ErrNoRootfile
}
return nil
}
// setPackages unmarshal's each of the epub's content.opf files.
func (r *Reader) setPackages() error {
for _, rf := range r.Container.Rootfiles {
if r.files[rf.FullPath] == nil {
return ErrBadRootfile
}
f, err := r.files[rf.FullPath].Open()
if err != nil {
return err
}
var b bytes.Buffer
_, err = io.Copy(&b, f)
if err != nil {
return err
}
err = xml.Unmarshal(b.Bytes(), &rf.Package)
if err != nil {
return err
}
}
return nil
}
// setItems associates Itemrefs with their respective Item and Items with
// their zip.File.
func (r *Reader) setItems() error {
itemrefCount := 0
for _, rf := range r.Container.Rootfiles {
itemMap := make(map[string]*Item)
for i := range rf.Manifest.Items {
item := &rf.Manifest.Items[i]
itemMap[item.ID] = item
abs := path.Join(path.Dir(rf.FullPath), item.HREF)
item.f = r.files[abs]
}
for i := range rf.Spine.Itemrefs {
itemref := &rf.Spine.Itemrefs[i]
itemref.Item = itemMap[itemref.IDREF]
if itemref.Item == nil {
return ErrBadItemref
}
}
itemrefCount += len(rf.Spine.Itemrefs)
}
if itemrefCount < 1 {
return ErrNoItemref
}
return nil
}
// Open returns a ReadCloser that provides access to the Items's contents.
// Multiple items may be read concurrently.
func (item *Item) Open() (r io.ReadCloser, err error) {
if item.f == nil {
return nil, ErrBadManifest
}
return item.f.Open()
}
// Close closes the epub file, rendering it unusable for I/O.
func (rc *ReadCloser) Close() {
rc.f.Close()
}
// Hehe
func (rf *Rootfile) CountWords() int64 {
var completeCount int64
for _, item := range rf.Spine.Itemrefs {
f, _ := item.Open()
tokenizer := html.NewTokenizer(f)
completeCount = completeCount + countWords(*tokenizer)
}
return completeCount
}
func countWords(tokenizer html.Tokenizer) int64 {
var err error
var totalWords int64
for {
tokenType := tokenizer.Next()
token := tokenizer.Token()
if tokenType == html.TextToken {
currStr := string(token.Data)
totalWords = totalWords + int64(len(strings.Fields(currStr)))
} else if tokenType == html.ErrorToken {
err = tokenizer.Err()
}
if err == io.EOF {
return totalWords
} else if err != nil {
return 0
}
}
}
/*
func main() {
rc, err := OpenEPUBReader("test.epub")
if err != nil {
log.Fatal(err)
}
rf := rc.Rootfiles[0] rf := rc.Rootfiles[0]
totalWords := rf.CountWords() parsedMetadata := &MetadataInfo{
log.Info("WOAH WORDS:", totalWords) Type: TYPE_EPUB,
Title: &rf.Title,
Author: &rf.Creator,
Description: &rf.Description,
}
// Parse Possible ISBN
if rf.Source != "" {
replaceRE := regexp.MustCompile(`[-\s]`)
possibleISBN := replaceRE.ReplaceAllString(rf.Source, "")
// ISBN Matches
isbn13RE := regexp.MustCompile(`(?P<ISBN>\d{13})`)
isbn10RE := regexp.MustCompile(`(?P<ISBN>\d{10})`)
isbn13Matches := isbn13RE.FindStringSubmatch(possibleISBN)
isbn10Matches := isbn10RE.FindStringSubmatch(possibleISBN)
if len(isbn13Matches) > 0 {
isbnIndex := isbn13RE.SubexpIndex("ISBN")
parsedMetadata.ISBN13 = &isbn13Matches[isbnIndex]
} else if len(isbn10Matches) > 0 {
isbnIndex := isbn10RE.SubexpIndex("ISBN")
parsedMetadata.ISBN10 = &isbn10Matches[isbnIndex]
}
}
return parsedMetadata, nil
}
func countEPUBWords(filepath string) (int64, error) {
rc, err := epub.OpenReader(filepath)
if err != nil {
return 0, err
}
rf := rc.Rootfiles[0]
var completeCount int64
for _, item := range rf.Itemrefs {
f, _ := item.Open()
doc, _ := goquery.NewDocumentFromReader(f)
doc.Find("script, style, noscript, iframe").Remove()
words := len(strings.Fields(doc.Text()))
completeCount = completeCount + int64(words)
}
return completeCount, nil
} }
*/

View File

@@ -121,34 +121,34 @@ func getGBooksMetadata(metadataSearch MetadataInfo) ([]MetadataInfo, error) {
func saveGBooksCover(gbid string, coverFilePath string, overwrite bool) error { func saveGBooksCover(gbid string, coverFilePath string, overwrite bool) error {
// Validate File Doesn't Exists // Validate File Doesn't Exists
_, err := os.Stat(coverFilePath) _, err := os.Stat(coverFilePath)
if err == nil && overwrite == false { if err == nil && !overwrite {
log.Warn("[saveGBooksCover] File Alreads Exists") log.Warn("File Alreads Exists")
return nil return nil
} }
// Create File // Create File
out, err := os.Create(coverFilePath) out, err := os.Create(coverFilePath)
if err != nil { if err != nil {
log.Error("[saveGBooksCover] File Create Error") log.Error("File Create Error")
return errors.New("File Failure") return errors.New("File Failure")
} }
defer out.Close() defer out.Close()
// Download File // Download File
log.Info("[saveGBooksCover] Downloading Cover") log.Info("Downloading Cover")
coverURL := fmt.Sprintf(GBOOKS_GBID_COVER_URL, gbid) coverURL := fmt.Sprintf(GBOOKS_GBID_COVER_URL, gbid)
resp, err := http.Get(coverURL) resp, err := http.Get(coverURL)
if err != nil { if err != nil {
log.Error("[saveGBooksCover] Cover URL API Failure") log.Error("Cover URL API Failure")
return errors.New("API Failure") return errors.New("API Failure")
} }
defer resp.Body.Close() defer resp.Body.Close()
// Copy File to Disk // Copy File to Disk
log.Info("[saveGBooksCover] Saving Cover") log.Info("Saving Cover")
_, err = io.Copy(out, resp.Body) _, err = io.Copy(out, resp.Body)
if err != nil { if err != nil {
log.Error("[saveGBooksCover] File Copy Error") log.Error("File Copy Error")
return errors.New("File Failure") return errors.New("File Failure")
} }
@@ -157,22 +157,22 @@ func saveGBooksCover(gbid string, coverFilePath string, overwrite bool) error {
func performSearchRequest(searchQuery string) (*gBooksQueryResponse, error) { func performSearchRequest(searchQuery string) (*gBooksQueryResponse, error) {
apiQuery := fmt.Sprintf(GBOOKS_QUERY_URL, searchQuery) apiQuery := fmt.Sprintf(GBOOKS_QUERY_URL, searchQuery)
log.Info("[performSearchRequest] Acquiring Metadata: ", apiQuery) log.Info("Acquiring Metadata: ", apiQuery)
resp, err := http.Get(apiQuery) resp, err := http.Get(apiQuery)
if err != nil { if err != nil {
log.Error("[performSearchRequest] Google Books Query URL API Failure") log.Error("Google Books Query URL API Failure")
return nil, errors.New("API Failure") return nil, errors.New("API Failure")
} }
parsedResp := gBooksQueryResponse{} parsedResp := gBooksQueryResponse{}
err = json.NewDecoder(resp.Body).Decode(&parsedResp) err = json.NewDecoder(resp.Body).Decode(&parsedResp)
if err != nil { if err != nil {
log.Error("[performSearchRequest] Google Books Query API Decode Failure") log.Error("Google Books Query API Decode Failure")
return nil, errors.New("API Failure") return nil, errors.New("API Failure")
} }
if len(parsedResp.Items) == 0 { if len(parsedResp.Items) == 0 {
log.Warn("[performSearchRequest] No Results") log.Warn("No Results")
return nil, errors.New("No Results") return nil, errors.New("No Results")
} }
@@ -182,17 +182,17 @@ func performSearchRequest(searchQuery string) (*gBooksQueryResponse, error) {
func performGBIDRequest(id string) (*gBooksQueryItem, error) { func performGBIDRequest(id string) (*gBooksQueryItem, error) {
apiQuery := fmt.Sprintf(GBOOKS_GBID_INFO_URL, id) apiQuery := fmt.Sprintf(GBOOKS_GBID_INFO_URL, id)
log.Info("[performGBIDRequest] Acquiring CoverID") log.Info("Acquiring CoverID")
resp, err := http.Get(apiQuery) resp, err := http.Get(apiQuery)
if err != nil { if err != nil {
log.Error("[performGBIDRequest] Cover URL API Failure") log.Error("Cover URL API Failure")
return nil, errors.New("API Failure") return nil, errors.New("API Failure")
} }
parsedResp := gBooksQueryItem{} parsedResp := gBooksQueryItem{}
err = json.NewDecoder(resp.Body).Decode(&parsedResp) err = json.NewDecoder(resp.Body).Decode(&parsedResp)
if err != nil { if err != nil {
log.Error("[performGBIDRequest] Google Books ID API Decode Failure") log.Error("Google Books ID API Decode Failure")
return nil, errors.New("API Failure") return nil, errors.New("API Failure")
} }

126
metadata/gbooks_test.go Normal file
View File

@@ -0,0 +1,126 @@
package metadata
import (
_ "embed"
"encoding/json"
"fmt"
"net/http"
"net/url"
"regexp"
"strings"
"testing"
"github.com/jarcoal/httpmock"
"github.com/stretchr/testify/assert"
)
//go:embed _test_files/gbooks_id_response.json
var idResp string
//go:embed _test_files/gbooks_query_response.json
var queryResp string
type details struct {
URLs []string
}
// Hook API Helper
func hookAPI() *details {
// Start HTTPMock
httpmock.Activate()
// Create details struct
d := &details{
URLs: []string{},
}
// Create Hook
matchRE := regexp.MustCompile(`^https://www\.googleapis\.com/books/v1/volumes.*`)
httpmock.RegisterRegexpResponder("GET", matchRE, func(req *http.Request) (*http.Response, error) {
// Append URL
d.URLs = append(d.URLs, req.URL.String())
// Get Raw Response
var rawResp string
if req.URL.Query().Get("q") != "" {
rawResp = queryResp
} else {
rawResp = idResp
}
// Convert to JSON Response
var responseData map[string]any
_ = json.Unmarshal([]byte(rawResp), &responseData)
// Return Response
return httpmock.NewJsonResponse(200, responseData)
})
return d
}
func TestGBooksGBIDMetadata(t *testing.T) {
hookDetails := hookAPI()
defer httpmock.DeactivateAndReset()
GBID := "ZxwpakTv_MIC"
expectedURL := fmt.Sprintf(GBOOKS_GBID_INFO_URL, GBID)
metadataResp, err := getGBooksMetadata(MetadataInfo{ID: &GBID})
assert.Nil(t, err, "should not have error")
assert.Contains(t, hookDetails.URLs, expectedURL, "should have intercepted URL")
assert.Equal(t, 1, len(metadataResp), "should have single result")
mResult := metadataResp[0]
validateResult(t, &mResult)
}
func TestGBooksISBNQuery(t *testing.T) {
hookDetails := hookAPI()
defer httpmock.DeactivateAndReset()
ISBN10 := "1877527815"
expectedURL := fmt.Sprintf(GBOOKS_QUERY_URL, "isbn:"+ISBN10)
metadataResp, err := getGBooksMetadata(MetadataInfo{
ISBN10: &ISBN10,
})
assert.Nil(t, err, "should not have error")
assert.Contains(t, hookDetails.URLs, expectedURL, "should have intercepted URL")
assert.Equal(t, 1, len(metadataResp), "should have single result")
mResult := metadataResp[0]
validateResult(t, &mResult)
}
func TestGBooksTitleQuery(t *testing.T) {
hookDetails := hookAPI()
defer httpmock.DeactivateAndReset()
title := "Alice in Wonderland 1877527815"
expectedURL := fmt.Sprintf(GBOOKS_QUERY_URL, url.QueryEscape(strings.TrimSpace(title)))
metadataResp, err := getGBooksMetadata(MetadataInfo{
Title: &title,
})
assert.Nil(t, err, "should not have error")
assert.Contains(t, hookDetails.URLs, expectedURL, "should have intercepted URL")
assert.NotEqual(t, 0, len(metadataResp), "should not have no results")
mResult := metadataResp[0]
validateResult(t, &mResult)
}
func validateResult(t *testing.T, m *MetadataInfo) {
expectedTitle := "Alice in Wonderland"
expectedAuthor := "Lewis Carroll"
expectedDesc := "Alice in Wonderland (also known as Alice's Adventures in Wonderland), from 1865, is the peculiar and imaginative tale of a girl who falls down a rabbit-hole into a bizarre world of eccentric and unusual creatures. Lewis Carroll's prominent example of the genre of \"literary nonsense\" has endured in popularity with its clever way of playing with logic and a narrative structure that has influence generations of fiction writing."
expectedISBN10 := "1877527815"
expectedISBN13 := "9781877527814"
assert.Equal(t, expectedTitle, *m.Title, "should have title")
assert.Equal(t, expectedAuthor, *m.Author, "should have author")
assert.Equal(t, expectedDesc, *m.Description, "should have description")
assert.Equal(t, expectedISBN10, *m.ISBN10, "should have ISBN10")
assert.Equal(t, expectedISBN13, *m.ISBN13, "should have ISBN10")
}

View File

@@ -3,27 +3,47 @@ package metadata
import ( import (
"errors" "errors"
"fmt" "fmt"
"io"
"path/filepath" "path/filepath"
"github.com/gabriel-vasile/mimetype" "github.com/gabriel-vasile/mimetype"
"reichard.io/antholume/utils"
) )
type MetadataHandler func(string) (*MetadataInfo, error)
type DocumentType string
const (
TYPE_EPUB DocumentType = ".epub"
)
var extensionHandlerMap = map[DocumentType]MetadataHandler{
TYPE_EPUB: getEPUBMetadata,
}
type Source int type Source int
const ( const (
GBOOK Source = iota SOURCE_GBOOK Source = iota
OLIB SOURCE_OLIB
) )
type MetadataInfo struct { type MetadataInfo struct {
ID *string ID *string
MD5 *string
PartialMD5 *string
WordCount *int64
Title *string Title *string
Author *string Author *string
Description *string Description *string
ISBN10 *string ISBN10 *string
ISBN13 *string ISBN13 *string
Type DocumentType
} }
// Downloads the Google Books cover file and saves it to the provided directory.
func CacheCover(gbid string, coverDir string, documentID string, overwrite bool) (*string, error) { func CacheCover(gbid string, coverDir string, documentID string, overwrite bool) (*string, error) {
// Get Filepath // Get Filepath
coverFile := "." + filepath.Clean(fmt.Sprintf("/%s.jpg", documentID)) coverFile := "." + filepath.Clean(fmt.Sprintf("/%s.jpg", documentID))
@@ -39,34 +59,128 @@ func CacheCover(gbid string, coverDir string, documentID string, overwrite bool)
return &coverFile, nil return &coverFile, nil
} }
// Searches source for metadata based on the provided information.
func SearchMetadata(s Source, metadataSearch MetadataInfo) ([]MetadataInfo, error) { func SearchMetadata(s Source, metadataSearch MetadataInfo) ([]MetadataInfo, error) {
switch s { switch s {
case GBOOK: case SOURCE_GBOOK:
return getGBooksMetadata(metadataSearch) return getGBooksMetadata(metadataSearch)
case OLIB: case SOURCE_OLIB:
return nil, errors.New("Not implemented") return nil, errors.New("not implemented")
default: default:
return nil, errors.New("Not implemented") return nil, errors.New("not implemented")
} }
} }
func GetWordCount(filepath string) (int64, error) { // Returns the word count of the provided filepath. An error will be returned
// if the file is not supported.
func GetWordCount(filepath string) (*int64, error) {
fileMime, err := mimetype.DetectFile(filepath) fileMime, err := mimetype.DetectFile(filepath)
if err != nil { if err != nil {
return 0, err return nil, err
} }
if fileExtension := fileMime.Extension(); fileExtension == ".epub" { if fileExtension := fileMime.Extension(); fileExtension == ".epub" {
rc, err := OpenEPUBReader(filepath) totalWords, err := countEPUBWords(filepath)
if err != nil { if err != nil {
return 0, err return nil, err
} }
return &totalWords, nil
rf := rc.Rootfiles[0]
totalWords := rf.CountWords()
return totalWords, nil
} else { } else {
return 0, errors.New("Invalid Extension") return nil, fmt.Errorf("invalid extension: %s", fileExtension)
} }
} }
// Returns embedded metadata of the provided file. An error will be returned if
// the file is not supported.
func GetMetadata(filepath string) (*MetadataInfo, error) {
// Detect Extension Type
fileMime, err := mimetype.DetectFile(filepath)
if err != nil {
return nil, err
}
// Get Extension Type Metadata Handler
fileExtension := fileMime.Extension()
handler, ok := extensionHandlerMap[DocumentType(fileExtension)]
if !ok {
return nil, fmt.Errorf("invalid extension %s", fileExtension)
}
// Acquire Metadata
metadataInfo, err := handler(filepath)
if err != nil {
return nil, fmt.Errorf("unable to acquire metadata")
}
// Calculate MD5 & Partial MD5
partialMD5, err := utils.CalculatePartialMD5(filepath)
if err != nil {
return nil, fmt.Errorf("unable to calculate partial MD5")
}
// Calculate Actual MD5
MD5, err := utils.CalculateMD5(filepath)
if err != nil {
return nil, fmt.Errorf("unable to calculate MD5")
}
// Calculate Word Count
wordCount, err := GetWordCount(filepath)
if err != nil {
return nil, fmt.Errorf("unable to calculate word count")
}
metadataInfo.WordCount = wordCount
metadataInfo.PartialMD5 = partialMD5
metadataInfo.MD5 = MD5
return metadataInfo, nil
}
// Returns the extension of the provided filepath (e.g. ".epub"). An error
// will be returned if the file is not supported.
func GetDocumentType(filepath string) (*DocumentType, error) {
// Detect Extension Type
fileMime, err := mimetype.DetectFile(filepath)
if err != nil {
return nil, err
}
// Detect
fileExtension := fileMime.Extension()
docType, ok := ParseDocumentType(fileExtension)
if !ok {
return nil, fmt.Errorf("filetype not supported")
}
return &docType, nil
}
// Returns the extension of the provided file reader (e.g. ".epub"). An error
// will be returned if the file is not supported.
func GetDocumentTypeReader(r io.Reader) (*DocumentType, error) {
// Detect Extension Type
fileMime, err := mimetype.DetectReader(r)
if err != nil {
return nil, err
}
// Detect
fileExtension := fileMime.Extension()
docType, ok := ParseDocumentType(fileExtension)
if !ok {
return nil, fmt.Errorf("filetype not supported")
}
return &docType, nil
}
// Given a filetype string, attempt to resolve a DocumentType
func ParseDocumentType(input string) (DocumentType, bool) {
validTypes := map[string]DocumentType{
string(TYPE_EPUB): TYPE_EPUB,
}
found, ok := validTypes[input]
return found, ok
}

46
metadata/metadata_test.go Normal file
View File

@@ -0,0 +1,46 @@
package metadata
import (
"os"
"testing"
"github.com/stretchr/testify/assert"
)
func TestGetWordCount(t *testing.T) {
var desiredCount int64 = 30080
actualCount, err := countEPUBWords("../_test_files/alice.epub")
assert.Nil(t, err, "should have no error")
assert.Equal(t, desiredCount, actualCount, "should be correct word count")
}
func TestGetMetadata(t *testing.T) {
desiredTitle := "Alice's Adventures in Wonderland / Illustrated by Arthur Rackham. With a Proem by Austin Dobson"
desiredAuthor := "Lewis Carroll"
desiredDescription := ""
metadataInfo, err := GetMetadata("../_test_files/alice.epub")
assert.Nil(t, err, "should have no error")
assert.Equal(t, desiredTitle, *metadataInfo.Title, "should be correct title")
assert.Equal(t, desiredAuthor, *metadataInfo.Author, "should be correct author")
assert.Equal(t, desiredDescription, *metadataInfo.Description, "should be correct author")
assert.Equal(t, TYPE_EPUB, metadataInfo.Type, "should be correct type")
}
func TestGetExtension(t *testing.T) {
docType, err := GetDocumentType("../_test_files/alice.epub")
assert.Nil(t, err, "should have no error")
assert.Equal(t, TYPE_EPUB, *docType)
}
func TestGetExtensionReader(t *testing.T) {
file, _ := os.Open("../_test_files/alice.epub")
docType, err := GetDocumentTypeReader(file)
assert.Nil(t, err, "should have no error")
assert.Equal(t, TYPE_EPUB, *docType)
}

View File

@@ -32,24 +32,24 @@ const OLIB_ISBN_LINK_URL string = "https://openlibrary.org/isbn/%s"
func GetCoverOLIDs(title *string, author *string) ([]string, error) { func GetCoverOLIDs(title *string, author *string) ([]string, error) {
if title == nil || author == nil { if title == nil || author == nil {
log.Error("[metadata] Invalid Search Query") log.Error("Invalid Search Query")
return nil, errors.New("Invalid Query") return nil, errors.New("Invalid Query")
} }
searchQuery := url.QueryEscape(fmt.Sprintf("%s %s", *title, *author)) searchQuery := url.QueryEscape(fmt.Sprintf("%s %s", *title, *author))
apiQuery := fmt.Sprintf(OLIB_QUERY_URL, searchQuery) apiQuery := fmt.Sprintf(OLIB_QUERY_URL, searchQuery)
log.Info("[metadata] Acquiring CoverID") log.Info("Acquiring CoverID")
resp, err := http.Get(apiQuery) resp, err := http.Get(apiQuery)
if err != nil { if err != nil {
log.Error("[metadata] Cover URL API Failure") log.Error("Cover URL API Failure")
return nil, errors.New("API Failure") return nil, errors.New("API Failure")
} }
target := oLibQueryResponse{} target := oLibQueryResponse{}
err = json.NewDecoder(resp.Body).Decode(&target) err = json.NewDecoder(resp.Body).Decode(&target)
if err != nil { if err != nil {
log.Error("[metadata] Cover URL API Decode Failure") log.Error("Cover URL API Decode Failure")
return nil, errors.New("API Failure") return nil, errors.New("API Failure")
} }
@@ -73,24 +73,24 @@ func DownloadAndSaveCover(coverID string, dirPath string) (*string, error) {
// Validate File Doesn't Exists // Validate File Doesn't Exists
_, err := os.Stat(safePath) _, err := os.Stat(safePath)
if err == nil { if err == nil {
log.Warn("[metadata] File Alreads Exists") log.Warn("File Alreads Exists")
return &safePath, nil return &safePath, nil
} }
// Create File // Create File
out, err := os.Create(safePath) out, err := os.Create(safePath)
if err != nil { if err != nil {
log.Error("[metadata] File Create Error") log.Error("File Create Error")
return nil, errors.New("File Failure") return nil, errors.New("File Failure")
} }
defer out.Close() defer out.Close()
// Download File // Download File
log.Info("[metadata] Downloading Cover") log.Info("Downloading Cover")
coverURL := fmt.Sprintf(OLIB_OLID_COVER_URL, coverID) coverURL := fmt.Sprintf(OLIB_OLID_COVER_URL, coverID)
resp, err := http.Get(coverURL) resp, err := http.Get(coverURL)
if err != nil { if err != nil {
log.Error("[metadata] Cover URL API Failure") log.Error("Cover URL API Failure")
return nil, errors.New("API Failure") return nil, errors.New("API Failure")
} }
defer resp.Body.Close() defer resp.Body.Close()
@@ -98,7 +98,7 @@ func DownloadAndSaveCover(coverID string, dirPath string) (*string, error) {
// Copy File to Disk // Copy File to Disk
_, err = io.Copy(out, resp.Body) _, err = io.Copy(out, resp.Body)
if err != nil { if err != nil {
log.Error("[metadata] File Copy Error") log.Error("File Copy Error")
return nil, errors.New("File Failure") return nil, errors.New("File Failure")
} }

90
opds/opds.go Normal file
View File

@@ -0,0 +1,90 @@
// https://github.com/opds-community/libopds2-go/blob/master/opds1/opds1.go
package opds
import (
"encoding/xml"
"time"
)
// Feed root element for acquisition or navigation feed
type Feed struct {
ID string `xml:"id,omitempty"`
XMLName xml.Name `xml:"feed"`
Title string `xml:"title,omitempty"`
Updated time.Time `xml:"updated,omitempty"`
Entries []Entry `xml:"entry,omitempty"`
Links []Link `xml:"link,omitempty"`
TotalResults int `xml:"totalResults,omitempty"`
ItemsPerPage int `xml:"itemsPerPage,omitempty"`
}
// Link link to different resources
type Link struct {
Rel string `xml:"rel,attr"`
Href string `xml:"href,attr,omitempty"`
TypeLink string `xml:"type,attr"`
Title string `xml:"title,attr,omitempty"`
FacetGroup string `xml:"facetGroup,attr,omitempty"`
Count int `xml:"count,attr,omitempty"`
Price *Price `xml:"price,omitempty"`
IndirectAcquisition []IndirectAcquisition `xml:"indirectAcquisition"`
}
// Author represent the feed author or the entry author
type Author struct {
Name string `xml:"name"`
URI string `xml:"uri,omitempty"`
}
// Entry an atom entry in the feed
type Entry struct {
Title string `xml:"title,omitempty"`
ID string `xml:"id,omitempty"`
Identifier string `xml:"identifier,omitempty"`
Updated *time.Time `xml:"updated,omitempty"`
Rights string `xml:"rights,omitempty"`
Publisher string `xml:"publisher,omitempty"`
Author []Author `xml:"author,omitempty"`
Language string `xml:"language,omitempty"`
Issued string `xml:"issued,omitempty"`
Published *time.Time `xml:"published,omitempty"`
Category []Category `xml:"category,omitempty"`
Links []Link `xml:"link,omitempty"`
Summary *Content `xml:"summary,omitempty"`
Content *Content `xml:"content,omitempty"`
Series []Serie `xml:"series,omitempty"`
}
// Content content tag in an entry, the type will be html or text
type Content struct {
Content string `xml:",cdata"`
ContentType string `xml:"type,attr"`
}
// Category represent the book category with scheme and term to machine
// handling
type Category struct {
Scheme string `xml:"scheme,attr"`
Term string `xml:"term,attr"`
Label string `xml:"label,attr"`
}
// Price represent the book price
type Price struct {
CurrencyCode string `xml:"currencycode,attr,omitempty"`
Value float64 `xml:",cdata"`
}
// IndirectAcquisition represent the link mostly for buying or borrowing
// a book
type IndirectAcquisition struct {
TypeAcquisition string `xml:"type,attr"`
IndirectAcquisition []IndirectAcquisition `xml:"indirectAcquisition"`
}
// Serie store serie information from schema.org
type Serie struct {
Name string `xml:"name,attr,omitempty"`
URL string `xml:"url,attr,omitempty"`
Position float32 `xml:"position,attr,omitempty"`
}

55
package-lock.json generated Normal file
View File

@@ -0,0 +1,55 @@
{
"name": "antholume",
"version": "1.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "antholume",
"version": "1.0.0",
"devDependencies": {
"prettier-plugin-go-template": "^0.0.15"
}
},
"node_modules/prettier": {
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.3.2.tgz",
"integrity": "sha512-rAVeHYMcv8ATV5d508CFdn+8/pHPpXeIid1DdrPwXnaAdH7cqjVbpJaT5eq4yRAFU/lsbwYwSF/n5iNrdJHPQA==",
"dev": true,
"peer": true,
"bin": {
"prettier": "bin/prettier.cjs"
},
"engines": {
"node": ">=14"
},
"funding": {
"url": "https://github.com/prettier/prettier?sponsor=1"
}
},
"node_modules/prettier-plugin-go-template": {
"version": "0.0.15",
"resolved": "https://registry.npmjs.org/prettier-plugin-go-template/-/prettier-plugin-go-template-0.0.15.tgz",
"integrity": "sha512-WqU92E1NokWYNZ9mLE6ijoRg6LtIGdLMePt2C7UBDjXeDH9okcRI3zRqtnWR4s5AloiqyvZ66jNBAa9tmRY5EQ==",
"dev": true,
"dependencies": {
"ulid": "^2.3.0"
},
"engines": {
"node": ">=14.0.0"
},
"peerDependencies": {
"prettier": "^3.0.0"
}
},
"node_modules/ulid": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/ulid/-/ulid-2.3.0.tgz",
"integrity": "sha512-keqHubrlpvT6G2wH0OEfSW4mquYRcbe/J8NMmveoQOjUqmo+hXtO+ORCpWhdbZ7k72UtY61BL7haGxW6enBnjw==",
"dev": true,
"bin": {
"ulid": "bin/cli.js"
}
}
}
}

7
package.json Normal file
View File

@@ -0,0 +1,7 @@
{
"name": "antholume",
"version": "1.0.0",
"devDependencies": {
"prettier-plugin-go-template": "^0.0.15"
}
}

View File

@@ -0,0 +1,37 @@
package formatters
import (
"fmt"
"strings"
"time"
)
// FormatDuration takes a duration and returns a human-readable duration string.
// For example: 1928371 seconds -> "22d 7h 39m 31s"
func FormatDuration(d time.Duration) string {
if d == 0 {
return "N/A"
}
var parts []string
days := int(d.Hours()) / 24
hours := int(d.Hours()) % 24
minutes := int(d.Minutes()) % 60
seconds := int(d.Seconds()) % 60
if days > 0 {
parts = append(parts, fmt.Sprintf("%dd", days))
}
if hours > 0 {
parts = append(parts, fmt.Sprintf("%dh", hours))
}
if minutes > 0 {
parts = append(parts, fmt.Sprintf("%dm", minutes))
}
if seconds > 0 {
parts = append(parts, fmt.Sprintf("%ds", seconds))
}
return strings.Join(parts, " ")
}

45
pkg/formatters/numbers.go Normal file
View File

@@ -0,0 +1,45 @@
package formatters
import (
"fmt"
"math"
)
// FormatNumber takes an int64 and returns a human-readable string.
// For example: 19823 -> "19.8k", 1500000 -> "1.5M"
func FormatNumber(input int64) string {
if input == 0 {
return "0"
}
// Handle Negative
negative := input < 0
if negative {
input = -input
}
abbreviations := []string{"", "k", "M", "B", "T"}
abbrevIndex := int(math.Log10(float64(input)) / 3)
// Bounds Check
if abbrevIndex >= len(abbreviations) {
abbrevIndex = len(abbreviations) - 1
}
scaledNumber := float64(input) / math.Pow(10, float64(abbrevIndex*3))
var result string
if scaledNumber >= 100 {
result = fmt.Sprintf("%.0f%s", scaledNumber, abbreviations[abbrevIndex])
} else if scaledNumber >= 10 {
result = fmt.Sprintf("%.1f%s", scaledNumber, abbreviations[abbrevIndex])
} else {
result = fmt.Sprintf("%.2f%s", scaledNumber, abbreviations[abbrevIndex])
}
if negative {
result = "-" + result
}
return result
}

Some files were not shown because too many files have changed in this diff Show More