From d02f8c324fcddaa82860a2514dff2c9eaadf6608 Mon Sep 17 00:00:00 2001
From: Evan Reichard
Date: Tue, 19 Sep 2023 19:29:55 -0400
Subject: [PATCH] [add] split wanted files vs wanted metadata for ko apis,
[add] documentation
---
.gitignore | 1 +
Dockerfile | 18 +-
Makefile | 12 +-
README.md | 55 ++++-
api/app-routes.go | 19 +-
api/auth.go | 4 +
api/ko-routes.go | 44 +++-
client/README.md | 15 ++
client/syncninja.koplugin/main.lua | 114 +++++++---
config/config.go | 32 ++-
database/manager.go | 29 ++-
database/query.sql | 141 ++++--------
database/query.sql.go | 215 +++++-------------
docker-compose.yml | 3 +-
graph/graph.go | 31 +--
cmd/main.go => main.go | 0
.../{documents.png => web_documents.png} | Bin
screenshots/{home.png => web_home.png} | Bin
screenshots/{login.png => web_login.png} | Bin
templates/graph.svg | 65 ++++--
templates/home.html | 3 +-
templates/login.html | 6 +-
22 files changed, 422 insertions(+), 385 deletions(-)
create mode 100644 client/README.md
rename cmd/main.go => main.go (100%)
rename screenshots/{documents.png => web_documents.png} (100%)
rename screenshots/{home.png => web_home.png} (100%)
rename screenshots/{login.png => web_login.png} (100%)
diff --git a/.gitignore b/.gitignore
index 9c45321..67e0ef5 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,4 @@
.DS_Store
data/
+build/
.direnv/
diff --git a/Dockerfile b/Dockerfile
index 24ea25b..ec308ad 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,12 +1,20 @@
-# FROM golang:1.20-alpine AS build
FROM alpine:edge AS build
RUN apk add --no-cache --update go gcc g++
WORKDIR /app
COPY . /app
-RUN go mod download
-RUN CGO_ENABLED=1 CGO_CFLAGS="-D_LARGEFILE64_SOURCE" go build -o /sync-ninja cmd/main.go
+# Copy Resources
+RUN mkdir -p /opt/bookmanager
+RUN cp -a ./templates /opt/bookmanager/templates
+RUN cp -a ./assets /opt/bookmanager/assets
+
+# Download Dependencies & Compile
+RUN go mod download
+RUN CGO_ENABLED=1 CGO_CFLAGS="-D_LARGEFILE64_SOURCE" go build -o /opt/bookmanager/server
+
+# Create Image
FROM alpine:3.18
-COPY --from=build /sync-ninja /sync-ninja
+COPY --from=build /opt/bookmanager /opt/bookmanager
+WORKDIR /opt/bookmanager
EXPOSE 8585
-ENTRYPOINT ["/sync-ninja", "serve"]
+ENTRYPOINT ["/opt/bookmanager/server", "serve"]
diff --git a/Makefile b/Makefile
index aceb130..46ac743 100644
--- a/Makefile
+++ b/Makefile
@@ -1,12 +1,18 @@
+build_local:
+ mkdir -p ./build
+ cp -a ./templates ./build/templates
+ cp -a ./assets ./build/assets
+ CGO_ENABLED=1 CGO_CFLAGS="-D_LARGEFILE64_SOURCE" go build -o ./build/server
+
docker_build_local:
- docker build -t sync-ninja:latest .
+ docker build -t bookmanager:latest .
docker_build_release_beta:
docker buildx build \
--platform linux/amd64,linux/arm64 \
- -t gitea.va.reichard.io/reichard/sync-ninja:beta --push .
+ -t gitea.va.reichard.io/reichard/bookmanager:beta --push .
docker_build_release_latest:
docker buildx build \
--platform linux/amd64,linux/arm64 \
- -t gitea.va.reichard.io/reichard/sync-ninja:latest --push .
+ -t gitea.va.reichard.io/reichard/bookmanager:latest --push .
diff --git a/README.md b/README.md
index 9b33469..5a7e99e 100644
--- a/README.md
+++ b/README.md
@@ -1,14 +1,14 @@
# Book Manager
-
-
+
+
-
-
+
+
-
-
+
+
@@ -25,10 +25,29 @@ In additional to the compatible KOSync API's, we add:
- Additional APIs to automatically upload reading statistics
- Automatically upload documents to the server (can download in the "Documents" view)
- Automatic book cover metadata scraping (Thanks [OpenLibrary](https://openlibrary.org/))
+- No JavaScript! All information is rendered server side.
+
+# Server
+
+## Configuration
+
+| Environment Variable | Default Value | Description |
+| -------------------- | ------------- | -------------------------------------------------------------------- |
+| DATABASE_TYPE | SQLite | Currently only "SQLite" is supported |
+| DATABASE_NAME | bbank | The database name, or in SQLite's case, the filename |
+| DATABASE_PASSWORD | | Currently not used. Placeholder for potential alternative DB support |
+| CONFIG_PATH | /config | Directory where to store SQLite's DB |
+| DATA_PATH | /data | Directory where to store the documents and cover metadata |
+| LISTEN_PORT | 8585 | Port the server listens at |
+| REGISTRATION_ENABLED | false | Whether to allow registration (applies to both WebApp & KOSync API) |
+
+# Client (KOReader Plugin)
+
+See documentation in the `client` subfolder: [SyncNinja](https://gitea.va.reichard.io/evan/BookManager/src/branch/master/client/)
# Development
-SQLC Generation:
+SQLC Generation (v1.21.0):
```
go install github.com/sqlc-dev/sqlc/cmd/sqlc@latest
@@ -41,6 +60,28 @@ Run Development:
CONFIG_PATH=./data DATA_PATH=./data go run cmd/main.go serve
```
+# Building
+
+The `Dockerfile` and `Makefile` contain the build information:
+
+```
+# Build Local Docker Image
+make docker_build_local
+
+# Push Latest
+make docker_build_release_latest
+```
+
+If manually building, you must enable CGO:
+
+```
+# Download Dependencies
+go mod download
+
+# Compile (Binary `./bookmanager`)
+CGO_ENABLED=1 CGO_CFLAGS="-D_LARGEFILE64_SOURCE" go build -o /bookmanager cmd/main.go
+```
+
## Notes
- Icons: https://www.svgrepo.com/collection/solar-bold-icons
diff --git a/api/app-routes.go b/api/app-routes.go
index 5aedab8..1cb49fe 100644
--- a/api/app-routes.go
+++ b/api/app-routes.go
@@ -74,6 +74,8 @@ func (api *API) createAppResourcesRoute(routeName string, args ...map[string]any
"DatabaseInfo": database_info,
"GraphData": read_graph_data,
}
+ } else if routeName == "login" {
+ templateVars["RegistrationEnabled"] = api.Config.RegistrationEnabled
}
c.HTML(http.StatusOK, routeName, templateVars)
@@ -150,20 +152,3 @@ func (api *API) getDocumentCover(c *gin.Context) {
c.File(*coverFilePath)
}
-
-/*
-METADATA:
- - Metadata Match
- - Update Metadata
-*/
-
-/*
-GRAPHS:
- - Streaks (Daily, Weekly, Monthly)
- - Last Week Activity (Daily - Pages & Time)
-
-
- - Pages Read (Daily, Weekly, Monthly)
- - Reading Progress
- - Average Reading Time (Daily, Weekly, Monthly)
-*/
diff --git a/api/auth.go b/api/auth.go
index 86107e2..836803c 100644
--- a/api/auth.go
+++ b/api/auth.go
@@ -113,6 +113,10 @@ func (api *API) authLogout(c *gin.Context) {
}
func (api *API) authFormRegister(c *gin.Context) {
+ if !api.Config.RegistrationEnabled {
+ c.AbortWithStatus(http.StatusConflict)
+ }
+
username := strings.TrimSpace(c.PostForm("username"))
rawPassword := strings.TrimSpace(c.PostForm("password"))
diff --git a/api/ko-routes.go b/api/ko-routes.go
index 0d5f3be..d49867e 100644
--- a/api/ko-routes.go
+++ b/api/ko-routes.go
@@ -9,6 +9,7 @@ import (
"net/http"
"os"
"path/filepath"
+ "strings"
"time"
argon2 "github.com/alexedwards/argon2id"
@@ -61,9 +62,10 @@ type requestCheckDocumentSync struct {
}
type responseCheckDocumentSync struct {
- Want []string `json:"want"`
- Give []database.Document `json:"give"`
- Delete []string `json:"deleted"`
+ WantFiles []string `json:"want_files"`
+ WantMetadata []string `json:"want_metadata"`
+ Give []database.Document `json:"give"`
+ Delete []string `json:"deleted"`
}
type requestDocumentID struct {
@@ -79,6 +81,10 @@ func (api *API) authorizeUser(c *gin.Context) {
}
func (api *API) createUser(c *gin.Context) {
+ if !api.Config.RegistrationEnabled {
+ c.AbortWithStatus(http.StatusConflict)
+ }
+
var rUser requestUser
if err := c.ShouldBindJSON(&rUser); err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid User Data"})
@@ -96,7 +102,6 @@ func (api *API) createUser(c *gin.Context) {
return
}
- // TODO - Initial User is Admin & Enable / Disable Registration
rows, err := api.DB.Queries.CreateUser(api.DB.Ctx, database.CreateUserParams{
ID: rUser.Username,
Pass: hashedPassword,
@@ -411,22 +416,38 @@ func (api *API) checkDocumentsSync(c *gin.Context) {
return
}
- wantedDocIDs, err := api.DB.Queries.GetWantedDocuments(api.DB.Ctx, string(jsonHaves))
+ wantedDocs, err := api.DB.Queries.GetWantedDocuments(api.DB.Ctx, string(jsonHaves))
if err != nil {
log.Error("GetWantedDocuments Error:", err)
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"})
return
}
+ // Split Metadata & File Wants
+ var wantedMetadataDocIDs []string
+ var wantedFilesDocIDs []string
+ for _, v := range wantedDocs {
+ if v.WantMetadata {
+ wantedMetadataDocIDs = append(wantedMetadataDocIDs, v.ID)
+ }
+ if v.WantFile {
+ wantedFilesDocIDs = append(wantedFilesDocIDs, v.ID)
+ }
+ }
+
rCheckDocSync := responseCheckDocumentSync{
- Delete: []string{},
- Want: []string{},
- Give: []database.Document{},
+ Delete: []string{},
+ WantFiles: []string{},
+ WantMetadata: []string{},
+ Give: []database.Document{},
}
// Ensure Empty Array
- if wantedDocIDs != nil {
- rCheckDocSync.Want = wantedDocIDs
+ if wantedMetadataDocIDs != nil {
+ rCheckDocSync.WantMetadata = wantedMetadataDocIDs
+ }
+ if wantedFilesDocIDs != nil {
+ rCheckDocSync.WantFiles = wantedFilesDocIDs
}
if missingDocs != nil {
rCheckDocSync.Give = missingDocs
@@ -482,6 +503,9 @@ func (api *API) uploadDocumentFile(c *gin.Context) {
fileName = fileName + " - Unknown"
}
+ // Remove Slashes
+ fileName = strings.ReplaceAll(fileName, "/", "")
+
// Derive & Sanitize File Name
fileName = "." + filepath.Clean(fmt.Sprintf("/%s [%s]%s", fileName, document.ID, fileExtension))
diff --git a/client/README.md b/client/README.md
new file mode 100644
index 0000000..be5a5db
--- /dev/null
+++ b/client/README.md
@@ -0,0 +1,15 @@
+# Book Manager - SyncNinja KOReader Plugin
+
+This is BookManagers KOReader Plugin called `syncninja.koplugin`.
+
+# Installation
+
+Copy the `syncninja.koplugin` directory to the `plugins` directory for your KOReader installation. Restart KOReader and SyncNinja will be accessible via the Tools menu.
+
+# Configuration
+
+You must configure the BookManager server and credentials in SyncNinja. Afterwhich you'll have the ability to configure the sync cadence as well as whether you'd like the plugin to sync your activity, document metadata, and/or documents themselves.
+
+# KOSync Compatibility
+
+BookManager implements API's compatible with the KOSync plugin. This means that you can utilize this server for KOSync (and it's recommended!). SyncNinja provides an easy way to merge configurations between both KOSync and itself in the menu.
diff --git a/client/syncninja.koplugin/main.lua b/client/syncninja.koplugin/main.lua
index dadfa1d..a8f94d0 100644
--- a/client/syncninja.koplugin/main.lua
+++ b/client/syncninja.koplugin/main.lua
@@ -1,3 +1,4 @@
+local ConfirmBox = require("ui/widget/confirmbox")
local DataStorage = require("datastorage")
local Device = require("device")
local Dispatcher = require("dispatcher")
@@ -593,11 +594,12 @@ function SyncNinja:checkActivity(interactive)
-- API Callback Function
local callback_func = function(ok, body)
if not ok then
- -- TODO: if interactive
- UIManager:show(InfoMessage:new{
- text = _("SyncNinja: checkActivity Error"),
- timeout = 3
- })
+ if interactive == true then
+ UIManager:show(InfoMessage:new{
+ text = _("SyncNinja: checkActivity Error"),
+ timeout = 3
+ })
+ end
return logger.dbg("SyncNinja: checkActivity Error:", dump(body))
end
@@ -626,11 +628,12 @@ function SyncNinja:uploadActivity(activity_data, interactive)
-- API Callback Function
local callback_func = function(ok, body)
if not ok then
- -- TODO: if interactive
- UIManager:show(InfoMessage:new{
- text = _("SyncNinja: uploadActivity Error"),
- timeout = 3
- })
+ if interactive == true then
+ UIManager:show(InfoMessage:new{
+ text = _("SyncNinja: uploadActivity Error"),
+ timeout = 3
+ })
+ end
return logger.dbg("SyncNinja: uploadActivity Error:", dump(body))
end
@@ -660,27 +663,47 @@ function SyncNinja:checkDocuments(interactive)
-- API Callback Function
local callback_func = function(ok, body)
if not ok then
- -- TODO: if interactive
- UIManager:show(InfoMessage:new{
- text = _("SyncNinja: checkDocuments Error"),
- timeout = 3
- })
+ if interactive == true then
+ UIManager:show(InfoMessage:new{
+ text = _("SyncNinja: checkDocuments Error"),
+ timeout = 3
+ })
+ end
return logger.dbg("SyncNinja: checkDocuments Error:", dump(body))
end
- -- Documents Wanted
- if not (next(body.want) == nil) then
- local hash_want = {}
- for _, v in pairs(body.want) do hash_want[v] = true end
+ -- Document Metadata Wanted
+ if not (next(body.want_metadata) == nil) then
+ local hash_want_metadata = {}
+ for _, v in pairs(body.want_metadata) do
+ hash_want_metadata[v] = true
+ end
local upload_doc_metadata = {}
for _, v in pairs(doc_metadata) do
- if hash_want[v.id] == true then
+ if hash_want_metadata[v.id] == true then
table.insert(upload_doc_metadata, v)
end
end
- self:uploadDocuments(upload_doc_metadata, interactive)
+ self:uploadDocumentMetadata(upload_doc_metadata, interactive)
+ end
+
+ -- Document Files Wanted
+ if not (next(body.want_files) == nil) then
+ local hash_want_files = {}
+ for _, v in pairs(body.want_files) do
+ hash_want_files[v] = true
+ end
+
+ local upload_doc_files = {}
+ for _, v in pairs(doc_metadata) do
+ if hash_want_files[v.id] == true then
+ table.insert(upload_doc_files, v)
+ end
+ end
+
+ self:uploadDocumentFiles(upload_doc_files, interactive)
end
-- Documents Provided
@@ -706,8 +729,8 @@ function SyncNinja:downloadDocuments(doc_metadata, interactive)
-- TODO
end
-function SyncNinja:uploadDocuments(doc_metadata, interactive)
- logger.dbg("SyncNinja: uploadDocuments")
+function SyncNinja:uploadDocumentMetadata(doc_metadata, interactive)
+ logger.dbg("SyncNinja: uploadDocumentMetadata")
-- Ensure Document Sync Enabled
if self.settings.sync_documents ~= true then return end
@@ -715,12 +738,14 @@ function SyncNinja:uploadDocuments(doc_metadata, interactive)
-- API Callback Function
local callback_func = function(ok, body)
if not ok then
- -- TODO: if interactive
- UIManager:show(InfoMessage:new{
- text = _("SyncNinja: uploadDocuments Error"),
- timeout = 3
- })
- return logger.dbg("SyncNinja: uploadDocuments Error:", dump(body))
+ if interactive == true then
+ UIManager:show(InfoMessage:new{
+ text = _("SyncNinja: uploadDocumentMetadata Error"),
+ timeout = 3
+ })
+ end
+ return logger.dbg("SyncNinja: uploadDocumentMetadata Error:",
+ dump(body))
end
end
@@ -735,20 +760,51 @@ function SyncNinja:uploadDocuments(doc_metadata, interactive)
local ok, err = pcall(client.add_documents, client, self.settings.username,
self.settings.password, doc_metadata, callback_func)
+end
+
+function SyncNinja:uploadDocumentFiles(doc_metadata, interactive)
+ logger.dbg("SyncNinja: uploadDocumentFiles")
+
-- Ensure Document File Sync Enabled
if self.settings.sync_document_files ~= true then return end
if interactive ~= true then return end
+ -- API Callback Function
+ local callback_func = function(ok, body)
+ if not ok then
+ UIManager:show(InfoMessage:new{
+ text = _("SyncNinja: uploadDocumentFiles Error"),
+ timeout = 3
+ })
+ return logger.dbg("SyncNinja: uploadDocumentFiles Error:",
+ dump(body))
+ end
+ end
+
-- API File Upload
local confirm_upload_callback = function()
+ UIManager:show(InfoMessage:new{
+ text = _("Uploading Documents - Please Wait...")
+ })
+
+ -- API Client
+ local SyncNinjaClient = require("SyncNinjaClient")
+ local client = SyncNinjaClient:new{
+ custom_url = self.settings.server,
+ service_spec = self.path .. "/api.json"
+ }
+
for _, v in pairs(doc_metadata) do
if v.filepath ~= nil then
+ -- TODO: Partial File Uploads (Resolve: OOM Issue)
local ok, err = pcall(client.upload_document, client,
self.settings.username,
self.settings.password, v.id, v.filepath,
callback_func)
end
end
+
+ UIManager:show(InfoMessage:new{text = _("Uploading Documents Complete")})
end
UIManager:show(ConfirmBox:new{
diff --git a/config/config.go b/config/config.go
index 9766dbc..2a51e93 100644
--- a/config/config.go
+++ b/config/config.go
@@ -2,27 +2,37 @@ package config
import (
"os"
+ "strings"
)
type Config struct {
+ // Server Config
+ Version string
+ ListenPort string
+
+ // DB Configuration
DBType string
DBName string
DBPassword string
+
+ // Data Paths
ConfigPath string
DataPath string
- ListenPort string
- Version string
+
+ // Miscellaneous Settings
+ RegistrationEnabled bool
}
func Load() *Config {
return &Config{
- DBType: getEnv("DATABASE_TYPE", "SQLite"),
- DBName: getEnv("DATABASE_NAME", "bbank"),
- DBPassword: getEnv("DATABASE_PASSWORD", ""),
- ConfigPath: getEnv("CONFIG_PATH", "/config"),
- DataPath: getEnv("DATA_PATH", "/data"),
- ListenPort: getEnv("LISTEN_PORT", "8585"),
- Version: "0.0.1",
+ Version: "0.0.1",
+ DBType: trimLowerString(getEnv("DATABASE_TYPE", "SQLite")),
+ DBName: trimLowerString(getEnv("DATABASE_NAME", "book_manager")),
+ DBPassword: getEnv("DATABASE_PASSWORD", ""),
+ ConfigPath: getEnv("CONFIG_PATH", "/config"),
+ DataPath: getEnv("DATA_PATH", "/data"),
+ ListenPort: getEnv("LISTEN_PORT", "8585"),
+ RegistrationEnabled: trimLowerString(getEnv("REGISTRATION_ENABLED", "false")) == "true",
}
}
@@ -32,3 +42,7 @@ func getEnv(key, fallback string) string {
}
return fallback
}
+
+func trimLowerString(val string) string {
+ return strings.ToLower(strings.TrimSpace(val))
+}
diff --git a/database/manager.go b/database/manager.go
index 39bd6ef..bceff4e 100644
--- a/database/manager.go
+++ b/database/manager.go
@@ -4,6 +4,7 @@ import (
"context"
"database/sql"
_ "embed"
+ "fmt"
"path"
sqlite "github.com/mattn/go-sqlite3"
@@ -20,11 +21,6 @@ type DBManager struct {
//go:embed schema.sql
var ddl string
-func foobar() string {
- log.Info("WTF")
- return ""
-}
-
func NewMgr(c *config.Config) *DBManager {
// Create Manager
dbm := &DBManager{
@@ -32,19 +28,12 @@ func NewMgr(c *config.Config) *DBManager {
}
// Create Database
- if c.DBType == "SQLite" {
-
+ if c.DBType == "sqlite" {
sql.Register("sqlite3_custom", &sqlite.SQLiteDriver{
- ConnectHook: func(conn *sqlite.SQLiteConn) error {
- if err := conn.RegisterFunc("test_func", foobar, false); err != nil {
- log.Info("Error Registering")
- return err
- }
- return nil
- },
+ ConnectHook: connectHookSQLite,
})
- dbLocation := path.Join(c.ConfigPath, "bbank.db")
+ dbLocation := path.Join(c.ConfigPath, fmt.Sprintf("%s.db", c.DBName))
var err error
dbm.DB, err = sql.Open("sqlite3_custom", dbLocation)
@@ -64,3 +53,13 @@ func NewMgr(c *config.Config) *DBManager {
return dbm
}
+
+func connectHookSQLite(conn *sqlite.SQLiteConn) error {
+ if err := conn.RegisterFunc("test_func", func() string {
+ return "FOOBAR"
+ }, false); err != nil {
+ log.Info("Error Registering Function")
+ return err
+ }
+ return nil
+}
diff --git a/database/query.sql b/database/query.sql
index 99fa14c..611fd43 100644
--- a/database/query.sql
+++ b/database/query.sql
@@ -123,13 +123,20 @@ WHERE
AND documents.id NOT IN (sqlc.slice('document_ids'));
-- name: GetWantedDocuments :many
-SELECT CAST(value AS TEXT) AS id
+SELECT
+ CAST(value AS TEXT) AS id,
+ CAST((documents.filepath IS NULL) AS BOOLEAN) AS want_file,
+ CAST((documents.synced != true) AS BOOLEAN) AS want_metadata
FROM json_each(?1)
LEFT JOIN documents
ON value = documents.id
WHERE (
documents.id IS NOT NULL
- AND documents.synced = false
+ AND documents.deleted = false
+ AND (
+ documents.synced = false
+ OR documents.filepath IS NULL
+ )
)
OR (documents.id IS NULL)
OR CAST($document_ids AS TEXT) != CAST($document_ids AS TEXT);
@@ -242,7 +249,7 @@ FROM capped_stats;
-- name: GetDocumentDaysRead :one
WITH document_days AS (
- SELECT date(start_time, 'localtime') AS dates
+ SELECT DATE(start_time, 'localtime') AS dates
FROM rescaled_activity
WHERE document_id = $document_id
AND user_id = $user_id
@@ -251,93 +258,13 @@ WITH document_days AS (
SELECT CAST(count(*) AS INTEGER) AS days_read
FROM document_days;
--- name: GetUserDayStreaks :one
-WITH document_days AS (
- SELECT date(start_time, 'localtime') AS read_day
- FROM activity
- WHERE user_id = $user_id
- GROUP BY read_day
- ORDER BY read_day DESC
-),
-partitions AS (
- SELECT
- document_days.*,
- row_number() OVER (
- PARTITION BY 1 ORDER BY read_day DESC
- ) AS seqnum
- FROM document_days
-),
-streaks AS (
- SELECT
- count(*) AS streak,
- MIN(read_day) AS start_date,
- MAX(read_day) AS end_date
- FROM partitions
- GROUP BY date(read_day, '+' || seqnum || ' day')
- ORDER BY end_date DESC
-),
-max_streak AS (
- SELECT
- MAX(streak) AS max_streak,
- start_date AS max_streak_start_date,
- end_date AS max_streak_end_date
- FROM streaks
-)
-SELECT
- CAST(max_streak AS INTEGER),
- CAST(max_streak_start_date AS TEXT),
- CAST(max_streak_end_date AS TEXT),
- streak AS current_streak,
- CAST(start_date AS TEXT) AS current_streak_start_date,
- CAST(end_date AS TEXT) AS current_streak_end_date
-FROM max_streak, streaks LIMIT 1;
-
--- name: GetUserWeekStreaks :one
-WITH document_weeks AS (
- SELECT STRFTIME('%Y-%m-%d', start_time, 'localtime', 'weekday 0', '-7 day') AS read_week
- FROM activity
- WHERE user_id = $user_id
- GROUP BY read_week
- ORDER BY read_week DESC
-),
-partitions AS (
- SELECT
- document_weeks.*,
- row_number() OVER (
- PARTITION BY 1 ORDER BY read_week DESC
- ) AS seqnum
- FROM document_weeks
-),
-streaks AS (
- SELECT
- count(*) AS streak,
- MIN(read_week) AS start_date,
- MAX(read_week) AS end_date
- FROM partitions
- GROUP BY date(read_week, '+' || (seqnum * 7) || ' day')
- ORDER BY end_date DESC
-),
-max_streak AS (
- SELECT
- MAX(streak) AS max_streak,
- start_date AS max_streak_start_date,
- end_date AS max_streak_end_date
- FROM streaks
-)
-SELECT
- CAST(max_streak AS INTEGER),
- CAST(max_streak_start_date AS TEXT),
- CAST(max_streak_end_date AS TEXT),
- streak AS current_streak,
- CAST(start_date AS TEXT) AS current_streak_start_date,
- CAST(end_date AS TEXT) AS current_streak_end_date
-FROM max_streak, streaks LIMIT 1;
-
-- name: GetUserWindowStreaks :one
WITH document_windows AS (
SELECT CASE
- WHEN ?2 = "WEEK" THEN STRFTIME('%Y-%m-%d', start_time, 'localtime', 'weekday 0', '-7 day')
- WHEN ?2 = "DAY" THEN date(start_time, 'localtime')
+ -- TODO: Timezones! E.g. DATE(start_time, '-5 hours')
+ -- TODO: Timezones! E.g. DATE(start_time, '-5 hours', '-7 days')
+ WHEN ?2 = "WEEK" THEN STRFTIME('%Y-%m-%d', start_time, 'weekday 0', '-7 day')
+ WHEN ?2 = "DAY" THEN DATE(start_time)
END AS read_window
FROM activity
WHERE user_id = $user_id
@@ -360,8 +287,8 @@ streaks AS (
MAX(read_window) AS end_date
FROM partitions
GROUP BY CASE
- WHEN ?2 = "DAY" THEN date(read_window, '+' || seqnum || ' day')
- WHEN ?2 = "WEEK" THEN date(read_window, '+' || (seqnum * 7) || ' day')
+ WHEN ?2 = "DAY" THEN DATE(read_window, '+' || seqnum || ' day')
+ WHEN ?2 = "WEEK" THEN DATE(read_window, '+' || (seqnum * 7) || ' day')
END
ORDER BY end_date DESC
),
@@ -371,15 +298,29 @@ max_streak AS (
start_date AS max_streak_start_date,
end_date AS max_streak_end_date
FROM streaks
+),
+current_streak AS (
+ SELECT
+ streak AS current_streak,
+ start_date AS current_streak_start_date,
+ end_date AS current_streak_end_date
+ FROM streaks
+ WHERE CASE
+ WHEN ?2 = "WEEK" THEN STRFTIME('%Y-%m-%d', 'now', 'weekday 0', '-7 day') = current_streak_end_date
+ WHEN ?2 = "DAY" THEN DATE('now', '-1 day') = current_streak_end_date OR DATE('now') = current_streak_end_date
+ END
+ LIMIT 1
)
SELECT
- CAST(max_streak AS INTEGER),
- CAST(max_streak_start_date AS TEXT),
- CAST(max_streak_end_date AS TEXT),
- streak AS current_streak,
- CAST(start_date AS TEXT) AS current_streak_start_date,
- CAST(end_date AS TEXT) AS current_streak_end_date
-FROM max_streak, streaks LIMIT 1;
+ CAST(IFNULL(max_streak, 0) AS INTEGER) AS max_streak,
+ CAST(IFNULL(max_streak_start_date, "N/A") AS TEXT) AS max_streak_start_date,
+ CAST(IFNULL(max_streak_end_date, "N/A") AS TEXT) AS max_streak_end_date,
+ IFNULL(current_streak, 0) AS current_streak,
+ CAST(IFNULL(current_streak_start_date, "N/A") AS TEXT) AS current_streak_start_date,
+ CAST(IFNULL(current_streak_end_date, "N/A") AS TEXT) AS current_streak_end_date
+FROM max_streak
+LEFT JOIN current_streak ON 1 = 1
+LIMIT 1;
-- name: GetDatabaseInfo :one
SELECT
@@ -391,16 +332,16 @@ LIMIT 1;
-- name: GetDailyReadStats :many
WITH RECURSIVE last_30_days (date) AS (
- SELECT date('now') AS date
+ SELECT DATE('now') AS date
UNION ALL
- SELECT date(date, '-1 days')
+ SELECT DATE(date, '-1 days')
FROM last_30_days
LIMIT 30
),
activity_records AS (
SELECT
sum(duration) AS seconds_read,
- date(start_time, 'localtime') AS day
+ DATE(start_time, 'localtime') AS day
FROM activity
WHERE user_id = $user_id
GROUP BY day
@@ -420,7 +361,7 @@ LIMIT 30;
-- SELECT
-- sum(duration) / 60 AS minutes_read,
--- date(start_time, 'localtime') AS day
+-- DATE(start_time, 'localtime') AS day
-- FROM activity
-- GROUP BY day
-- ORDER BY day DESC
diff --git a/database/query.sql.go b/database/query.sql.go
index ac38066..ebef836 100644
--- a/database/query.sql.go
+++ b/database/query.sql.go
@@ -157,16 +157,16 @@ func (q *Queries) GetActivity(ctx context.Context, arg GetActivityParams) ([]Act
const getDailyReadStats = `-- name: GetDailyReadStats :many
WITH RECURSIVE last_30_days (date) AS (
- SELECT date('now') AS date
+ SELECT DATE('now') AS date
UNION ALL
- SELECT date(date, '-1 days')
+ SELECT DATE(date, '-1 days')
FROM last_30_days
LIMIT 30
),
activity_records AS (
SELECT
sum(duration) AS seconds_read,
- date(start_time, 'localtime') AS day
+ DATE(start_time, 'localtime') AS day
FROM activity
WHERE user_id = ?1
GROUP BY day
@@ -372,7 +372,7 @@ func (q *Queries) GetDocument(ctx context.Context, documentID string) (Document,
const getDocumentDaysRead = `-- name: GetDocumentDaysRead :one
WITH document_days AS (
- SELECT date(start_time, 'localtime') AS dates
+ SELECT DATE(start_time, 'localtime') AS dates
FROM rescaled_activity
WHERE document_id = ?1
AND user_id = ?2
@@ -758,141 +758,13 @@ func (q *Queries) GetUser(ctx context.Context, userID string) (User, error) {
return i, err
}
-const getUserDayStreaks = `-- name: GetUserDayStreaks :one
-WITH document_days AS (
- SELECT date(start_time, 'localtime') AS read_day
- FROM activity
- WHERE user_id = ?1
- GROUP BY read_day
- ORDER BY read_day DESC
-),
-partitions AS (
- SELECT
- document_days.read_day,
- row_number() OVER (
- PARTITION BY 1 ORDER BY read_day DESC
- ) AS seqnum
- FROM document_days
-),
-streaks AS (
- SELECT
- count(*) AS streak,
- MIN(read_day) AS start_date,
- MAX(read_day) AS end_date
- FROM partitions
- GROUP BY date(read_day, '+' || seqnum || ' day')
- ORDER BY end_date DESC
-),
-max_streak AS (
- SELECT
- MAX(streak) AS max_streak,
- start_date AS max_streak_start_date,
- end_date AS max_streak_end_date
- FROM streaks
-)
-SELECT
- CAST(max_streak AS INTEGER),
- CAST(max_streak_start_date AS TEXT),
- CAST(max_streak_end_date AS TEXT),
- streak AS current_streak,
- CAST(start_date AS TEXT) AS current_streak_start_date,
- CAST(end_date AS TEXT) AS current_streak_end_date
-FROM max_streak, streaks LIMIT 1
-`
-
-type GetUserDayStreaksRow struct {
- MaxStreak int64 `json:"max_streak"`
- MaxStreakStartDate string `json:"max_streak_start_date"`
- MaxStreakEndDate string `json:"max_streak_end_date"`
- CurrentStreak int64 `json:"current_streak"`
- CurrentStreakStartDate string `json:"current_streak_start_date"`
- CurrentStreakEndDate string `json:"current_streak_end_date"`
-}
-
-func (q *Queries) GetUserDayStreaks(ctx context.Context, userID string) (GetUserDayStreaksRow, error) {
- row := q.db.QueryRowContext(ctx, getUserDayStreaks, userID)
- var i GetUserDayStreaksRow
- err := row.Scan(
- &i.MaxStreak,
- &i.MaxStreakStartDate,
- &i.MaxStreakEndDate,
- &i.CurrentStreak,
- &i.CurrentStreakStartDate,
- &i.CurrentStreakEndDate,
- )
- return i, err
-}
-
-const getUserWeekStreaks = `-- name: GetUserWeekStreaks :one
-WITH document_weeks AS (
- SELECT STRFTIME('%Y-%m-%d', start_time, 'localtime', 'weekday 0', '-7 day') AS read_week
- FROM activity
- WHERE user_id = ?1
- GROUP BY read_week
- ORDER BY read_week DESC
-),
-partitions AS (
- SELECT
- document_weeks.read_week,
- row_number() OVER (
- PARTITION BY 1 ORDER BY read_week DESC
- ) AS seqnum
- FROM document_weeks
-),
-streaks AS (
- SELECT
- count(*) AS streak,
- MIN(read_week) AS start_date,
- MAX(read_week) AS end_date
- FROM partitions
- GROUP BY date(read_week, '+' || (seqnum * 7) || ' day')
- ORDER BY end_date DESC
-),
-max_streak AS (
- SELECT
- MAX(streak) AS max_streak,
- start_date AS max_streak_start_date,
- end_date AS max_streak_end_date
- FROM streaks
-)
-SELECT
- CAST(max_streak AS INTEGER),
- CAST(max_streak_start_date AS TEXT),
- CAST(max_streak_end_date AS TEXT),
- streak AS current_streak,
- CAST(start_date AS TEXT) AS current_streak_start_date,
- CAST(end_date AS TEXT) AS current_streak_end_date
-FROM max_streak, streaks LIMIT 1
-`
-
-type GetUserWeekStreaksRow struct {
- MaxStreak int64 `json:"max_streak"`
- MaxStreakStartDate string `json:"max_streak_start_date"`
- MaxStreakEndDate string `json:"max_streak_end_date"`
- CurrentStreak int64 `json:"current_streak"`
- CurrentStreakStartDate string `json:"current_streak_start_date"`
- CurrentStreakEndDate string `json:"current_streak_end_date"`
-}
-
-func (q *Queries) GetUserWeekStreaks(ctx context.Context, userID string) (GetUserWeekStreaksRow, error) {
- row := q.db.QueryRowContext(ctx, getUserWeekStreaks, userID)
- var i GetUserWeekStreaksRow
- err := row.Scan(
- &i.MaxStreak,
- &i.MaxStreakStartDate,
- &i.MaxStreakEndDate,
- &i.CurrentStreak,
- &i.CurrentStreakStartDate,
- &i.CurrentStreakEndDate,
- )
- return i, err
-}
-
const getUserWindowStreaks = `-- name: GetUserWindowStreaks :one
WITH document_windows AS (
SELECT CASE
- WHEN ?2 = "WEEK" THEN STRFTIME('%Y-%m-%d', start_time, 'localtime', 'weekday 0', '-7 day')
- WHEN ?2 = "DAY" THEN date(start_time, 'localtime')
+ -- TODO: Timezones! E.g. DATE(start_time, '-5 hours')
+ -- TODO: Timezones! E.g. DATE(start_time, '-5 hours', '-7 days')
+ WHEN ?2 = "WEEK" THEN STRFTIME('%Y-%m-%d', start_time, 'weekday 0', '-7 day')
+ WHEN ?2 = "DAY" THEN DATE(start_time)
END AS read_window
FROM activity
WHERE user_id = ?1
@@ -915,8 +787,8 @@ streaks AS (
MAX(read_window) AS end_date
FROM partitions
GROUP BY CASE
- WHEN ?2 = "DAY" THEN date(read_window, '+' || seqnum || ' day')
- WHEN ?2 = "WEEK" THEN date(read_window, '+' || (seqnum * 7) || ' day')
+ WHEN ?2 = "DAY" THEN DATE(read_window, '+' || seqnum || ' day')
+ WHEN ?2 = "WEEK" THEN DATE(read_window, '+' || (seqnum * 7) || ' day')
END
ORDER BY end_date DESC
),
@@ -926,15 +798,29 @@ max_streak AS (
start_date AS max_streak_start_date,
end_date AS max_streak_end_date
FROM streaks
+),
+current_streak AS (
+ SELECT
+ streak AS current_streak,
+ start_date AS current_streak_start_date,
+ end_date AS current_streak_end_date
+ FROM streaks
+ WHERE CASE
+ WHEN ?2 = "WEEK" THEN STRFTIME('%Y-%m-%d', 'now', 'weekday 0', '-7 day') = current_streak_end_date
+ WHEN ?2 = "DAY" THEN DATE('now', '-1 day') = current_streak_end_date OR DATE('now') = current_streak_end_date
+ END
+ LIMIT 1
)
SELECT
- CAST(max_streak AS INTEGER),
- CAST(max_streak_start_date AS TEXT),
- CAST(max_streak_end_date AS TEXT),
- streak AS current_streak,
- CAST(start_date AS TEXT) AS current_streak_start_date,
- CAST(end_date AS TEXT) AS current_streak_end_date
-FROM max_streak, streaks LIMIT 1
+ CAST(IFNULL(max_streak, 0) AS INTEGER) AS max_streak,
+ CAST(IFNULL(max_streak_start_date, "N/A") AS TEXT) AS max_streak_start_date,
+ CAST(IFNULL(max_streak_end_date, "N/A") AS TEXT) AS max_streak_end_date,
+ IFNULL(current_streak, 0) AS current_streak,
+ CAST(IFNULL(current_streak_start_date, "N/A") AS TEXT) AS current_streak_start_date,
+ CAST(IFNULL(current_streak_end_date, "N/A") AS TEXT) AS current_streak_end_date
+FROM max_streak
+LEFT JOIN current_streak ON 1 = 1
+LIMIT 1
`
type GetUserWindowStreaksParams struct {
@@ -943,12 +829,12 @@ type GetUserWindowStreaksParams struct {
}
type GetUserWindowStreaksRow struct {
- MaxStreak int64 `json:"max_streak"`
- MaxStreakStartDate string `json:"max_streak_start_date"`
- MaxStreakEndDate string `json:"max_streak_end_date"`
- CurrentStreak int64 `json:"current_streak"`
- CurrentStreakStartDate string `json:"current_streak_start_date"`
- CurrentStreakEndDate string `json:"current_streak_end_date"`
+ MaxStreak int64 `json:"max_streak"`
+ MaxStreakStartDate string `json:"max_streak_start_date"`
+ MaxStreakEndDate string `json:"max_streak_end_date"`
+ CurrentStreak interface{} `json:"current_streak"`
+ CurrentStreakStartDate string `json:"current_streak_start_date"`
+ CurrentStreakEndDate string `json:"current_streak_end_date"`
}
func (q *Queries) GetUserWindowStreaks(ctx context.Context, arg GetUserWindowStreaksParams) (GetUserWindowStreaksRow, error) {
@@ -1015,31 +901,44 @@ func (q *Queries) GetUsers(ctx context.Context, arg GetUsersParams) ([]User, err
}
const getWantedDocuments = `-- name: GetWantedDocuments :many
-SELECT CAST(value AS TEXT) AS id
+SELECT
+ CAST(value AS TEXT) AS id,
+ CAST((documents.filepath IS NULL) AS BOOLEAN) AS want_file,
+ CAST((documents.synced != true) AS BOOLEAN) AS want_metadata
FROM json_each(?1)
LEFT JOIN documents
ON value = documents.id
WHERE (
documents.id IS NOT NULL
- AND documents.synced = false
+ AND documents.deleted = false
+ AND (
+ documents.synced = false
+ OR documents.filepath IS NULL
+ )
)
OR (documents.id IS NULL)
OR CAST(?1 AS TEXT) != CAST(?1 AS TEXT)
`
-func (q *Queries) GetWantedDocuments(ctx context.Context, documentIds string) ([]string, error) {
+type GetWantedDocumentsRow struct {
+ ID string `json:"id"`
+ WantFile bool `json:"want_file"`
+ WantMetadata bool `json:"want_metadata"`
+}
+
+func (q *Queries) GetWantedDocuments(ctx context.Context, documentIds string) ([]GetWantedDocumentsRow, error) {
rows, err := q.db.QueryContext(ctx, getWantedDocuments, documentIds)
if err != nil {
return nil, err
}
defer rows.Close()
- var items []string
+ var items []GetWantedDocumentsRow
for rows.Next() {
- var id string
- if err := rows.Scan(&id); err != nil {
+ var i GetWantedDocumentsRow
+ if err := rows.Scan(&i.ID, &i.WantFile, &i.WantMetadata); err != nil {
return nil, err
}
- items = append(items, id)
+ items = append(items, i)
}
if err := rows.Close(); err != nil {
return nil, err
diff --git a/docker-compose.yml b/docker-compose.yml
index cc27106..ee36dc8 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -1,7 +1,6 @@
---
services:
- sync-ninja:
- # working_dir: /app
+ bookmanager:
environment:
- CONFIG_PATH=/data
- DATA_PATH=/data
diff --git a/graph/graph.go b/graph/graph.go
index df13b18..a22dd46 100644
--- a/graph/graph.go
+++ b/graph/graph.go
@@ -28,10 +28,7 @@ type SVGBezierOpposedLine struct {
Angle int
}
-func GetSVGGraphData(inputData []database.GetDailyReadStatsRow, svgWidth int) SVGGraphData {
- // Static Padding
- var padding int = 5
-
+func GetSVGGraphData(inputData []database.GetDailyReadStatsRow, svgWidth int, svgHeight int) SVGGraphData {
// Derive Height
var maxHeight int = 0
for _, item := range inputData {
@@ -40,7 +37,13 @@ func GetSVGGraphData(inputData []database.GetDailyReadStatsRow, svgWidth int) SV
}
}
- // Derive Block Offsets & Transformed Coordinates (Line & Bar)
+ // Vertical Graph Real Estate
+ var sizePercentage float32 = 0.5
+
+ // Scale Ratio -> Desired Height
+ var sizeRatio float32 = float32(svgHeight) * sizePercentage / float32(maxHeight)
+
+ // Point Block Offset
var blockOffset int = int(math.Floor(float64(svgWidth) / float64(len(inputData))))
// Line & Bar Points
@@ -52,19 +55,19 @@ func GetSVGGraphData(inputData []database.GetDailyReadStatsRow, svgWidth int) SV
var maxBY int = 0
var minBX int = 0
for idx, item := range inputData {
- itemSize := int(item.MinutesRead)
- itemY := (maxHeight + padding) - itemSize
+ itemSize := int(float32(item.MinutesRead) * sizeRatio)
+ itemY := svgHeight - itemSize
+ lineX := (idx + 1) * blockOffset
barPoints = append(barPoints, SVGGraphPoint{
- X: (idx * blockOffset) + (blockOffset / 2),
+ X: lineX - (blockOffset / 2),
Y: itemY,
- Size: itemSize + padding,
+ Size: itemSize,
})
- lineX := (idx + 1) * blockOffset
linePoints = append(linePoints, SVGGraphPoint{
X: lineX,
Y: itemY,
- Size: itemSize + padding,
+ Size: itemSize,
})
if lineX > maxBX {
@@ -82,13 +85,13 @@ func GetSVGGraphData(inputData []database.GetDailyReadStatsRow, svgWidth int) SV
// Return Data
return SVGGraphData{
- Width: svgWidth + padding*2,
- Height: maxHeight + padding*2,
+ Width: svgWidth,
+ Height: svgHeight,
Offset: blockOffset,
LinePoints: linePoints,
BarPoints: barPoints,
BezierPath: getSVGBezierPath(linePoints),
- BezierFill: fmt.Sprintf("L %d,%d L %d,%d Z", maxBX, maxBY+padding, minBX, maxBY+padding),
+ BezierFill: fmt.Sprintf("L %d,%d L %d,%d Z", maxBX, maxBY, minBX+blockOffset, maxBY),
}
}
diff --git a/cmd/main.go b/main.go
similarity index 100%
rename from cmd/main.go
rename to main.go
diff --git a/screenshots/documents.png b/screenshots/web_documents.png
similarity index 100%
rename from screenshots/documents.png
rename to screenshots/web_documents.png
diff --git a/screenshots/home.png b/screenshots/web_home.png
similarity index 100%
rename from screenshots/home.png
rename to screenshots/web_home.png
diff --git a/screenshots/login.png b/screenshots/web_login.png
similarity index 100%
rename from screenshots/login.png
rename to screenshots/web_login.png
diff --git a/templates/graph.svg b/templates/graph.svg
index a587c26..d8fa64c 100644
--- a/templates/graph.svg
+++ b/templates/graph.svg
@@ -1,8 +1,13 @@
-
- {{ $data := (GetSVGGraphData .Data.GraphData 800)}}
+ {{ $data := (GetSVGGraphData .Data.GraphData 800 70 )}}
+
@@ -67,7 +67,7 @@
type="password"
id="password"
name="password"
- class="flex-1 appearance-none border border-gray-300 w-full py-2 px-4 bg-white text-gray-700 placeholder-gray-400 shadow-sm text-base focus:outline-none focus:ring-2 focus:ring-purple-600 focus:border-transparent"
+ class="flex-1 appearance-none rounded-none border border-gray-300 w-full py-2 px-4 bg-white text-gray-700 placeholder-gray-400 shadow-sm text-base focus:outline-none focus:ring-2 focus:ring-purple-600 focus:border-transparent"
placeholder="Password"
/>
+ {{ if .RegistrationEnabled }}
{{ if .Register }}
@@ -103,6 +104,7 @@
{{end}}
+ {{ end }}