[add] split wanted files vs wanted metadata for ko apis, [add] documentation
1
.gitignore
vendored
@ -1,3 +1,4 @@
|
|||||||
.DS_Store
|
.DS_Store
|
||||||
data/
|
data/
|
||||||
|
build/
|
||||||
.direnv/
|
.direnv/
|
||||||
|
18
Dockerfile
@ -1,12 +1,20 @@
|
|||||||
# FROM golang:1.20-alpine AS build
|
|
||||||
FROM alpine:edge AS build
|
FROM alpine:edge AS build
|
||||||
RUN apk add --no-cache --update go gcc g++
|
RUN apk add --no-cache --update go gcc g++
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
COPY . /app
|
COPY . /app
|
||||||
RUN go mod download
|
|
||||||
RUN CGO_ENABLED=1 CGO_CFLAGS="-D_LARGEFILE64_SOURCE" go build -o /sync-ninja cmd/main.go
|
|
||||||
|
|
||||||
|
# Copy Resources
|
||||||
|
RUN mkdir -p /opt/bookmanager
|
||||||
|
RUN cp -a ./templates /opt/bookmanager/templates
|
||||||
|
RUN cp -a ./assets /opt/bookmanager/assets
|
||||||
|
|
||||||
|
# Download Dependencies & Compile
|
||||||
|
RUN go mod download
|
||||||
|
RUN CGO_ENABLED=1 CGO_CFLAGS="-D_LARGEFILE64_SOURCE" go build -o /opt/bookmanager/server
|
||||||
|
|
||||||
|
# Create Image
|
||||||
FROM alpine:3.18
|
FROM alpine:3.18
|
||||||
COPY --from=build /sync-ninja /sync-ninja
|
COPY --from=build /opt/bookmanager /opt/bookmanager
|
||||||
|
WORKDIR /opt/bookmanager
|
||||||
EXPOSE 8585
|
EXPOSE 8585
|
||||||
ENTRYPOINT ["/sync-ninja", "serve"]
|
ENTRYPOINT ["/opt/bookmanager/server", "serve"]
|
||||||
|
12
Makefile
@ -1,12 +1,18 @@
|
|||||||
|
build_local:
|
||||||
|
mkdir -p ./build
|
||||||
|
cp -a ./templates ./build/templates
|
||||||
|
cp -a ./assets ./build/assets
|
||||||
|
CGO_ENABLED=1 CGO_CFLAGS="-D_LARGEFILE64_SOURCE" go build -o ./build/server
|
||||||
|
|
||||||
docker_build_local:
|
docker_build_local:
|
||||||
docker build -t sync-ninja:latest .
|
docker build -t bookmanager:latest .
|
||||||
|
|
||||||
docker_build_release_beta:
|
docker_build_release_beta:
|
||||||
docker buildx build \
|
docker buildx build \
|
||||||
--platform linux/amd64,linux/arm64 \
|
--platform linux/amd64,linux/arm64 \
|
||||||
-t gitea.va.reichard.io/reichard/sync-ninja:beta --push .
|
-t gitea.va.reichard.io/reichard/bookmanager:beta --push .
|
||||||
|
|
||||||
docker_build_release_latest:
|
docker_build_release_latest:
|
||||||
docker buildx build \
|
docker buildx build \
|
||||||
--platform linux/amd64,linux/arm64 \
|
--platform linux/amd64,linux/arm64 \
|
||||||
-t gitea.va.reichard.io/reichard/sync-ninja:latest --push .
|
-t gitea.va.reichard.io/reichard/bookmanager:latest --push .
|
||||||
|
55
README.md
@ -1,14 +1,14 @@
|
|||||||
# Book Manager
|
# Book Manager
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<a href="https://gitea.va.reichard.io/evan/BookManager/raw/branch/master/screenshots/login.png">
|
<a href="https://gitea.va.reichard.io/evan/BookManager/raw/branch/master/screenshots/web_login.png">
|
||||||
<img src="https://gitea.va.reichard.io/evan/BookManager/raw/branch/master/screenshots/login.png" width="30%">
|
<img src="https://gitea.va.reichard.io/evan/BookManager/raw/branch/master/screenshots/web_login.png" width="30%">
|
||||||
</a>
|
</a>
|
||||||
<a href="https://gitea.va.reichard.io/evan/BookManager/raw/branch/master/screenshots/home.png">
|
<a href="https://gitea.va.reichard.io/evan/BookManager/raw/branch/master/screenshots/web_home.png">
|
||||||
<img src="https://gitea.va.reichard.io/evan/BookManager/raw/branch/master/screenshots/home.png" width="30%">
|
<img src="https://gitea.va.reichard.io/evan/BookManager/raw/branch/master/screenshots/web_home.png" width="30%">
|
||||||
</a>
|
</a>
|
||||||
<a href="https://gitea.va.reichard.io/evan/BookManager/raw/branch/master/screenshots/documents.png">
|
<a href="https://gitea.va.reichard.io/evan/BookManager/raw/branch/master/screenshots/web_documents.png">
|
||||||
<img src="https://gitea.va.reichard.io/evan/BookManager/raw/branch/master/screenshots/documents.png" width="30%">
|
<img src="https://gitea.va.reichard.io/evan/BookManager/raw/branch/master/screenshots/web_documents.png" width="30%">
|
||||||
</a>
|
</a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
@ -25,10 +25,29 @@ In additional to the compatible KOSync API's, we add:
|
|||||||
- Additional APIs to automatically upload reading statistics
|
- Additional APIs to automatically upload reading statistics
|
||||||
- Automatically upload documents to the server (can download in the "Documents" view)
|
- Automatically upload documents to the server (can download in the "Documents" view)
|
||||||
- Automatic book cover metadata scraping (Thanks [OpenLibrary](https://openlibrary.org/))
|
- Automatic book cover metadata scraping (Thanks [OpenLibrary](https://openlibrary.org/))
|
||||||
|
- No JavaScript! All information is rendered server side.
|
||||||
|
|
||||||
|
# Server
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
| Environment Variable | Default Value | Description |
|
||||||
|
| -------------------- | ------------- | -------------------------------------------------------------------- |
|
||||||
|
| DATABASE_TYPE | SQLite | Currently only "SQLite" is supported |
|
||||||
|
| DATABASE_NAME | bbank | The database name, or in SQLite's case, the filename |
|
||||||
|
| DATABASE_PASSWORD | <EMPTY> | Currently not used. Placeholder for potential alternative DB support |
|
||||||
|
| CONFIG_PATH | /config | Directory where to store SQLite's DB |
|
||||||
|
| DATA_PATH | /data | Directory where to store the documents and cover metadata |
|
||||||
|
| LISTEN_PORT | 8585 | Port the server listens at |
|
||||||
|
| REGISTRATION_ENABLED | false | Whether to allow registration (applies to both WebApp & KOSync API) |
|
||||||
|
|
||||||
|
# Client (KOReader Plugin)
|
||||||
|
|
||||||
|
See documentation in the `client` subfolder: [SyncNinja](https://gitea.va.reichard.io/evan/BookManager/src/branch/master/client/)
|
||||||
|
|
||||||
# Development
|
# Development
|
||||||
|
|
||||||
SQLC Generation:
|
SQLC Generation (v1.21.0):
|
||||||
|
|
||||||
```
|
```
|
||||||
go install github.com/sqlc-dev/sqlc/cmd/sqlc@latest
|
go install github.com/sqlc-dev/sqlc/cmd/sqlc@latest
|
||||||
@ -41,6 +60,28 @@ Run Development:
|
|||||||
CONFIG_PATH=./data DATA_PATH=./data go run cmd/main.go serve
|
CONFIG_PATH=./data DATA_PATH=./data go run cmd/main.go serve
|
||||||
```
|
```
|
||||||
|
|
||||||
|
# Building
|
||||||
|
|
||||||
|
The `Dockerfile` and `Makefile` contain the build information:
|
||||||
|
|
||||||
|
```
|
||||||
|
# Build Local Docker Image
|
||||||
|
make docker_build_local
|
||||||
|
|
||||||
|
# Push Latest
|
||||||
|
make docker_build_release_latest
|
||||||
|
```
|
||||||
|
|
||||||
|
If manually building, you must enable CGO:
|
||||||
|
|
||||||
|
```
|
||||||
|
# Download Dependencies
|
||||||
|
go mod download
|
||||||
|
|
||||||
|
# Compile (Binary `./bookmanager`)
|
||||||
|
CGO_ENABLED=1 CGO_CFLAGS="-D_LARGEFILE64_SOURCE" go build -o /bookmanager cmd/main.go
|
||||||
|
```
|
||||||
|
|
||||||
## Notes
|
## Notes
|
||||||
|
|
||||||
- Icons: https://www.svgrepo.com/collection/solar-bold-icons
|
- Icons: https://www.svgrepo.com/collection/solar-bold-icons
|
||||||
|
@ -74,6 +74,8 @@ func (api *API) createAppResourcesRoute(routeName string, args ...map[string]any
|
|||||||
"DatabaseInfo": database_info,
|
"DatabaseInfo": database_info,
|
||||||
"GraphData": read_graph_data,
|
"GraphData": read_graph_data,
|
||||||
}
|
}
|
||||||
|
} else if routeName == "login" {
|
||||||
|
templateVars["RegistrationEnabled"] = api.Config.RegistrationEnabled
|
||||||
}
|
}
|
||||||
|
|
||||||
c.HTML(http.StatusOK, routeName, templateVars)
|
c.HTML(http.StatusOK, routeName, templateVars)
|
||||||
@ -150,20 +152,3 @@ func (api *API) getDocumentCover(c *gin.Context) {
|
|||||||
|
|
||||||
c.File(*coverFilePath)
|
c.File(*coverFilePath)
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
|
||||||
METADATA:
|
|
||||||
- Metadata Match
|
|
||||||
- Update Metadata
|
|
||||||
*/
|
|
||||||
|
|
||||||
/*
|
|
||||||
GRAPHS:
|
|
||||||
- Streaks (Daily, Weekly, Monthly)
|
|
||||||
- Last Week Activity (Daily - Pages & Time)
|
|
||||||
|
|
||||||
|
|
||||||
- Pages Read (Daily, Weekly, Monthly)
|
|
||||||
- Reading Progress
|
|
||||||
- Average Reading Time (Daily, Weekly, Monthly)
|
|
||||||
*/
|
|
||||||
|
@ -113,6 +113,10 @@ func (api *API) authLogout(c *gin.Context) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) authFormRegister(c *gin.Context) {
|
func (api *API) authFormRegister(c *gin.Context) {
|
||||||
|
if !api.Config.RegistrationEnabled {
|
||||||
|
c.AbortWithStatus(http.StatusConflict)
|
||||||
|
}
|
||||||
|
|
||||||
username := strings.TrimSpace(c.PostForm("username"))
|
username := strings.TrimSpace(c.PostForm("username"))
|
||||||
rawPassword := strings.TrimSpace(c.PostForm("password"))
|
rawPassword := strings.TrimSpace(c.PostForm("password"))
|
||||||
|
|
||||||
|
@ -9,6 +9,7 @@ import (
|
|||||||
"net/http"
|
"net/http"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
argon2 "github.com/alexedwards/argon2id"
|
argon2 "github.com/alexedwards/argon2id"
|
||||||
@ -61,9 +62,10 @@ type requestCheckDocumentSync struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type responseCheckDocumentSync struct {
|
type responseCheckDocumentSync struct {
|
||||||
Want []string `json:"want"`
|
WantFiles []string `json:"want_files"`
|
||||||
Give []database.Document `json:"give"`
|
WantMetadata []string `json:"want_metadata"`
|
||||||
Delete []string `json:"deleted"`
|
Give []database.Document `json:"give"`
|
||||||
|
Delete []string `json:"deleted"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type requestDocumentID struct {
|
type requestDocumentID struct {
|
||||||
@ -79,6 +81,10 @@ func (api *API) authorizeUser(c *gin.Context) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) createUser(c *gin.Context) {
|
func (api *API) createUser(c *gin.Context) {
|
||||||
|
if !api.Config.RegistrationEnabled {
|
||||||
|
c.AbortWithStatus(http.StatusConflict)
|
||||||
|
}
|
||||||
|
|
||||||
var rUser requestUser
|
var rUser requestUser
|
||||||
if err := c.ShouldBindJSON(&rUser); err != nil {
|
if err := c.ShouldBindJSON(&rUser); err != nil {
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid User Data"})
|
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid User Data"})
|
||||||
@ -96,7 +102,6 @@ func (api *API) createUser(c *gin.Context) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO - Initial User is Admin & Enable / Disable Registration
|
|
||||||
rows, err := api.DB.Queries.CreateUser(api.DB.Ctx, database.CreateUserParams{
|
rows, err := api.DB.Queries.CreateUser(api.DB.Ctx, database.CreateUserParams{
|
||||||
ID: rUser.Username,
|
ID: rUser.Username,
|
||||||
Pass: hashedPassword,
|
Pass: hashedPassword,
|
||||||
@ -411,22 +416,38 @@ func (api *API) checkDocumentsSync(c *gin.Context) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
wantedDocIDs, err := api.DB.Queries.GetWantedDocuments(api.DB.Ctx, string(jsonHaves))
|
wantedDocs, err := api.DB.Queries.GetWantedDocuments(api.DB.Ctx, string(jsonHaves))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("GetWantedDocuments Error:", err)
|
log.Error("GetWantedDocuments Error:", err)
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"})
|
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Request"})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Split Metadata & File Wants
|
||||||
|
var wantedMetadataDocIDs []string
|
||||||
|
var wantedFilesDocIDs []string
|
||||||
|
for _, v := range wantedDocs {
|
||||||
|
if v.WantMetadata {
|
||||||
|
wantedMetadataDocIDs = append(wantedMetadataDocIDs, v.ID)
|
||||||
|
}
|
||||||
|
if v.WantFile {
|
||||||
|
wantedFilesDocIDs = append(wantedFilesDocIDs, v.ID)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
rCheckDocSync := responseCheckDocumentSync{
|
rCheckDocSync := responseCheckDocumentSync{
|
||||||
Delete: []string{},
|
Delete: []string{},
|
||||||
Want: []string{},
|
WantFiles: []string{},
|
||||||
Give: []database.Document{},
|
WantMetadata: []string{},
|
||||||
|
Give: []database.Document{},
|
||||||
}
|
}
|
||||||
|
|
||||||
// Ensure Empty Array
|
// Ensure Empty Array
|
||||||
if wantedDocIDs != nil {
|
if wantedMetadataDocIDs != nil {
|
||||||
rCheckDocSync.Want = wantedDocIDs
|
rCheckDocSync.WantMetadata = wantedMetadataDocIDs
|
||||||
|
}
|
||||||
|
if wantedFilesDocIDs != nil {
|
||||||
|
rCheckDocSync.WantFiles = wantedFilesDocIDs
|
||||||
}
|
}
|
||||||
if missingDocs != nil {
|
if missingDocs != nil {
|
||||||
rCheckDocSync.Give = missingDocs
|
rCheckDocSync.Give = missingDocs
|
||||||
@ -482,6 +503,9 @@ func (api *API) uploadDocumentFile(c *gin.Context) {
|
|||||||
fileName = fileName + " - Unknown"
|
fileName = fileName + " - Unknown"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Remove Slashes
|
||||||
|
fileName = strings.ReplaceAll(fileName, "/", "")
|
||||||
|
|
||||||
// Derive & Sanitize File Name
|
// Derive & Sanitize File Name
|
||||||
fileName = "." + filepath.Clean(fmt.Sprintf("/%s [%s]%s", fileName, document.ID, fileExtension))
|
fileName = "." + filepath.Clean(fmt.Sprintf("/%s [%s]%s", fileName, document.ID, fileExtension))
|
||||||
|
|
||||||
|
15
client/README.md
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
# Book Manager - SyncNinja KOReader Plugin
|
||||||
|
|
||||||
|
This is BookManagers KOReader Plugin called `syncninja.koplugin`.
|
||||||
|
|
||||||
|
# Installation
|
||||||
|
|
||||||
|
Copy the `syncninja.koplugin` directory to the `plugins` directory for your KOReader installation. Restart KOReader and SyncNinja will be accessible via the Tools menu.
|
||||||
|
|
||||||
|
# Configuration
|
||||||
|
|
||||||
|
You must configure the BookManager server and credentials in SyncNinja. Afterwhich you'll have the ability to configure the sync cadence as well as whether you'd like the plugin to sync your activity, document metadata, and/or documents themselves.
|
||||||
|
|
||||||
|
# KOSync Compatibility
|
||||||
|
|
||||||
|
BookManager implements API's compatible with the KOSync plugin. This means that you can utilize this server for KOSync (and it's recommended!). SyncNinja provides an easy way to merge configurations between both KOSync and itself in the menu.
|
@ -1,3 +1,4 @@
|
|||||||
|
local ConfirmBox = require("ui/widget/confirmbox")
|
||||||
local DataStorage = require("datastorage")
|
local DataStorage = require("datastorage")
|
||||||
local Device = require("device")
|
local Device = require("device")
|
||||||
local Dispatcher = require("dispatcher")
|
local Dispatcher = require("dispatcher")
|
||||||
@ -593,11 +594,12 @@ function SyncNinja:checkActivity(interactive)
|
|||||||
-- API Callback Function
|
-- API Callback Function
|
||||||
local callback_func = function(ok, body)
|
local callback_func = function(ok, body)
|
||||||
if not ok then
|
if not ok then
|
||||||
-- TODO: if interactive
|
if interactive == true then
|
||||||
UIManager:show(InfoMessage:new{
|
UIManager:show(InfoMessage:new{
|
||||||
text = _("SyncNinja: checkActivity Error"),
|
text = _("SyncNinja: checkActivity Error"),
|
||||||
timeout = 3
|
timeout = 3
|
||||||
})
|
})
|
||||||
|
end
|
||||||
return logger.dbg("SyncNinja: checkActivity Error:", dump(body))
|
return logger.dbg("SyncNinja: checkActivity Error:", dump(body))
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -626,11 +628,12 @@ function SyncNinja:uploadActivity(activity_data, interactive)
|
|||||||
-- API Callback Function
|
-- API Callback Function
|
||||||
local callback_func = function(ok, body)
|
local callback_func = function(ok, body)
|
||||||
if not ok then
|
if not ok then
|
||||||
-- TODO: if interactive
|
if interactive == true then
|
||||||
UIManager:show(InfoMessage:new{
|
UIManager:show(InfoMessage:new{
|
||||||
text = _("SyncNinja: uploadActivity Error"),
|
text = _("SyncNinja: uploadActivity Error"),
|
||||||
timeout = 3
|
timeout = 3
|
||||||
})
|
})
|
||||||
|
end
|
||||||
|
|
||||||
return logger.dbg("SyncNinja: uploadActivity Error:", dump(body))
|
return logger.dbg("SyncNinja: uploadActivity Error:", dump(body))
|
||||||
end
|
end
|
||||||
@ -660,27 +663,47 @@ function SyncNinja:checkDocuments(interactive)
|
|||||||
-- API Callback Function
|
-- API Callback Function
|
||||||
local callback_func = function(ok, body)
|
local callback_func = function(ok, body)
|
||||||
if not ok then
|
if not ok then
|
||||||
-- TODO: if interactive
|
if interactive == true then
|
||||||
UIManager:show(InfoMessage:new{
|
UIManager:show(InfoMessage:new{
|
||||||
text = _("SyncNinja: checkDocuments Error"),
|
text = _("SyncNinja: checkDocuments Error"),
|
||||||
timeout = 3
|
timeout = 3
|
||||||
})
|
})
|
||||||
|
end
|
||||||
return logger.dbg("SyncNinja: checkDocuments Error:", dump(body))
|
return logger.dbg("SyncNinja: checkDocuments Error:", dump(body))
|
||||||
end
|
end
|
||||||
|
|
||||||
-- Documents Wanted
|
-- Document Metadata Wanted
|
||||||
if not (next(body.want) == nil) then
|
if not (next(body.want_metadata) == nil) then
|
||||||
local hash_want = {}
|
local hash_want_metadata = {}
|
||||||
for _, v in pairs(body.want) do hash_want[v] = true end
|
for _, v in pairs(body.want_metadata) do
|
||||||
|
hash_want_metadata[v] = true
|
||||||
|
end
|
||||||
|
|
||||||
local upload_doc_metadata = {}
|
local upload_doc_metadata = {}
|
||||||
for _, v in pairs(doc_metadata) do
|
for _, v in pairs(doc_metadata) do
|
||||||
if hash_want[v.id] == true then
|
if hash_want_metadata[v.id] == true then
|
||||||
table.insert(upload_doc_metadata, v)
|
table.insert(upload_doc_metadata, v)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
self:uploadDocuments(upload_doc_metadata, interactive)
|
self:uploadDocumentMetadata(upload_doc_metadata, interactive)
|
||||||
|
end
|
||||||
|
|
||||||
|
-- Document Files Wanted
|
||||||
|
if not (next(body.want_files) == nil) then
|
||||||
|
local hash_want_files = {}
|
||||||
|
for _, v in pairs(body.want_files) do
|
||||||
|
hash_want_files[v] = true
|
||||||
|
end
|
||||||
|
|
||||||
|
local upload_doc_files = {}
|
||||||
|
for _, v in pairs(doc_metadata) do
|
||||||
|
if hash_want_files[v.id] == true then
|
||||||
|
table.insert(upload_doc_files, v)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
self:uploadDocumentFiles(upload_doc_files, interactive)
|
||||||
end
|
end
|
||||||
|
|
||||||
-- Documents Provided
|
-- Documents Provided
|
||||||
@ -706,8 +729,8 @@ function SyncNinja:downloadDocuments(doc_metadata, interactive)
|
|||||||
-- TODO
|
-- TODO
|
||||||
end
|
end
|
||||||
|
|
||||||
function SyncNinja:uploadDocuments(doc_metadata, interactive)
|
function SyncNinja:uploadDocumentMetadata(doc_metadata, interactive)
|
||||||
logger.dbg("SyncNinja: uploadDocuments")
|
logger.dbg("SyncNinja: uploadDocumentMetadata")
|
||||||
|
|
||||||
-- Ensure Document Sync Enabled
|
-- Ensure Document Sync Enabled
|
||||||
if self.settings.sync_documents ~= true then return end
|
if self.settings.sync_documents ~= true then return end
|
||||||
@ -715,12 +738,14 @@ function SyncNinja:uploadDocuments(doc_metadata, interactive)
|
|||||||
-- API Callback Function
|
-- API Callback Function
|
||||||
local callback_func = function(ok, body)
|
local callback_func = function(ok, body)
|
||||||
if not ok then
|
if not ok then
|
||||||
-- TODO: if interactive
|
if interactive == true then
|
||||||
UIManager:show(InfoMessage:new{
|
UIManager:show(InfoMessage:new{
|
||||||
text = _("SyncNinja: uploadDocuments Error"),
|
text = _("SyncNinja: uploadDocumentMetadata Error"),
|
||||||
timeout = 3
|
timeout = 3
|
||||||
})
|
})
|
||||||
return logger.dbg("SyncNinja: uploadDocuments Error:", dump(body))
|
end
|
||||||
|
return logger.dbg("SyncNinja: uploadDocumentMetadata Error:",
|
||||||
|
dump(body))
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -735,20 +760,51 @@ function SyncNinja:uploadDocuments(doc_metadata, interactive)
|
|||||||
local ok, err = pcall(client.add_documents, client, self.settings.username,
|
local ok, err = pcall(client.add_documents, client, self.settings.username,
|
||||||
self.settings.password, doc_metadata, callback_func)
|
self.settings.password, doc_metadata, callback_func)
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
function SyncNinja:uploadDocumentFiles(doc_metadata, interactive)
|
||||||
|
logger.dbg("SyncNinja: uploadDocumentFiles")
|
||||||
|
|
||||||
-- Ensure Document File Sync Enabled
|
-- Ensure Document File Sync Enabled
|
||||||
if self.settings.sync_document_files ~= true then return end
|
if self.settings.sync_document_files ~= true then return end
|
||||||
if interactive ~= true then return end
|
if interactive ~= true then return end
|
||||||
|
|
||||||
|
-- API Callback Function
|
||||||
|
local callback_func = function(ok, body)
|
||||||
|
if not ok then
|
||||||
|
UIManager:show(InfoMessage:new{
|
||||||
|
text = _("SyncNinja: uploadDocumentFiles Error"),
|
||||||
|
timeout = 3
|
||||||
|
})
|
||||||
|
return logger.dbg("SyncNinja: uploadDocumentFiles Error:",
|
||||||
|
dump(body))
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
-- API File Upload
|
-- API File Upload
|
||||||
local confirm_upload_callback = function()
|
local confirm_upload_callback = function()
|
||||||
|
UIManager:show(InfoMessage:new{
|
||||||
|
text = _("Uploading Documents - Please Wait...")
|
||||||
|
})
|
||||||
|
|
||||||
|
-- API Client
|
||||||
|
local SyncNinjaClient = require("SyncNinjaClient")
|
||||||
|
local client = SyncNinjaClient:new{
|
||||||
|
custom_url = self.settings.server,
|
||||||
|
service_spec = self.path .. "/api.json"
|
||||||
|
}
|
||||||
|
|
||||||
for _, v in pairs(doc_metadata) do
|
for _, v in pairs(doc_metadata) do
|
||||||
if v.filepath ~= nil then
|
if v.filepath ~= nil then
|
||||||
|
-- TODO: Partial File Uploads (Resolve: OOM Issue)
|
||||||
local ok, err = pcall(client.upload_document, client,
|
local ok, err = pcall(client.upload_document, client,
|
||||||
self.settings.username,
|
self.settings.username,
|
||||||
self.settings.password, v.id, v.filepath,
|
self.settings.password, v.id, v.filepath,
|
||||||
callback_func)
|
callback_func)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
UIManager:show(InfoMessage:new{text = _("Uploading Documents Complete")})
|
||||||
end
|
end
|
||||||
|
|
||||||
UIManager:show(ConfirmBox:new{
|
UIManager:show(ConfirmBox:new{
|
||||||
|
@ -2,27 +2,37 @@ package config
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"os"
|
"os"
|
||||||
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Config struct {
|
type Config struct {
|
||||||
|
// Server Config
|
||||||
|
Version string
|
||||||
|
ListenPort string
|
||||||
|
|
||||||
|
// DB Configuration
|
||||||
DBType string
|
DBType string
|
||||||
DBName string
|
DBName string
|
||||||
DBPassword string
|
DBPassword string
|
||||||
|
|
||||||
|
// Data Paths
|
||||||
ConfigPath string
|
ConfigPath string
|
||||||
DataPath string
|
DataPath string
|
||||||
ListenPort string
|
|
||||||
Version string
|
// Miscellaneous Settings
|
||||||
|
RegistrationEnabled bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func Load() *Config {
|
func Load() *Config {
|
||||||
return &Config{
|
return &Config{
|
||||||
DBType: getEnv("DATABASE_TYPE", "SQLite"),
|
Version: "0.0.1",
|
||||||
DBName: getEnv("DATABASE_NAME", "bbank"),
|
DBType: trimLowerString(getEnv("DATABASE_TYPE", "SQLite")),
|
||||||
DBPassword: getEnv("DATABASE_PASSWORD", ""),
|
DBName: trimLowerString(getEnv("DATABASE_NAME", "book_manager")),
|
||||||
ConfigPath: getEnv("CONFIG_PATH", "/config"),
|
DBPassword: getEnv("DATABASE_PASSWORD", ""),
|
||||||
DataPath: getEnv("DATA_PATH", "/data"),
|
ConfigPath: getEnv("CONFIG_PATH", "/config"),
|
||||||
ListenPort: getEnv("LISTEN_PORT", "8585"),
|
DataPath: getEnv("DATA_PATH", "/data"),
|
||||||
Version: "0.0.1",
|
ListenPort: getEnv("LISTEN_PORT", "8585"),
|
||||||
|
RegistrationEnabled: trimLowerString(getEnv("REGISTRATION_ENABLED", "false")) == "true",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -32,3 +42,7 @@ func getEnv(key, fallback string) string {
|
|||||||
}
|
}
|
||||||
return fallback
|
return fallback
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func trimLowerString(val string) string {
|
||||||
|
return strings.ToLower(strings.TrimSpace(val))
|
||||||
|
}
|
||||||
|
@ -4,6 +4,7 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"database/sql"
|
"database/sql"
|
||||||
_ "embed"
|
_ "embed"
|
||||||
|
"fmt"
|
||||||
"path"
|
"path"
|
||||||
|
|
||||||
sqlite "github.com/mattn/go-sqlite3"
|
sqlite "github.com/mattn/go-sqlite3"
|
||||||
@ -20,11 +21,6 @@ type DBManager struct {
|
|||||||
//go:embed schema.sql
|
//go:embed schema.sql
|
||||||
var ddl string
|
var ddl string
|
||||||
|
|
||||||
func foobar() string {
|
|
||||||
log.Info("WTF")
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewMgr(c *config.Config) *DBManager {
|
func NewMgr(c *config.Config) *DBManager {
|
||||||
// Create Manager
|
// Create Manager
|
||||||
dbm := &DBManager{
|
dbm := &DBManager{
|
||||||
@ -32,19 +28,12 @@ func NewMgr(c *config.Config) *DBManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Create Database
|
// Create Database
|
||||||
if c.DBType == "SQLite" {
|
if c.DBType == "sqlite" {
|
||||||
|
|
||||||
sql.Register("sqlite3_custom", &sqlite.SQLiteDriver{
|
sql.Register("sqlite3_custom", &sqlite.SQLiteDriver{
|
||||||
ConnectHook: func(conn *sqlite.SQLiteConn) error {
|
ConnectHook: connectHookSQLite,
|
||||||
if err := conn.RegisterFunc("test_func", foobar, false); err != nil {
|
|
||||||
log.Info("Error Registering")
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
},
|
|
||||||
})
|
})
|
||||||
|
|
||||||
dbLocation := path.Join(c.ConfigPath, "bbank.db")
|
dbLocation := path.Join(c.ConfigPath, fmt.Sprintf("%s.db", c.DBName))
|
||||||
|
|
||||||
var err error
|
var err error
|
||||||
dbm.DB, err = sql.Open("sqlite3_custom", dbLocation)
|
dbm.DB, err = sql.Open("sqlite3_custom", dbLocation)
|
||||||
@ -64,3 +53,13 @@ func NewMgr(c *config.Config) *DBManager {
|
|||||||
|
|
||||||
return dbm
|
return dbm
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func connectHookSQLite(conn *sqlite.SQLiteConn) error {
|
||||||
|
if err := conn.RegisterFunc("test_func", func() string {
|
||||||
|
return "FOOBAR"
|
||||||
|
}, false); err != nil {
|
||||||
|
log.Info("Error Registering Function")
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
@ -123,13 +123,20 @@ WHERE
|
|||||||
AND documents.id NOT IN (sqlc.slice('document_ids'));
|
AND documents.id NOT IN (sqlc.slice('document_ids'));
|
||||||
|
|
||||||
-- name: GetWantedDocuments :many
|
-- name: GetWantedDocuments :many
|
||||||
SELECT CAST(value AS TEXT) AS id
|
SELECT
|
||||||
|
CAST(value AS TEXT) AS id,
|
||||||
|
CAST((documents.filepath IS NULL) AS BOOLEAN) AS want_file,
|
||||||
|
CAST((documents.synced != true) AS BOOLEAN) AS want_metadata
|
||||||
FROM json_each(?1)
|
FROM json_each(?1)
|
||||||
LEFT JOIN documents
|
LEFT JOIN documents
|
||||||
ON value = documents.id
|
ON value = documents.id
|
||||||
WHERE (
|
WHERE (
|
||||||
documents.id IS NOT NULL
|
documents.id IS NOT NULL
|
||||||
AND documents.synced = false
|
AND documents.deleted = false
|
||||||
|
AND (
|
||||||
|
documents.synced = false
|
||||||
|
OR documents.filepath IS NULL
|
||||||
|
)
|
||||||
)
|
)
|
||||||
OR (documents.id IS NULL)
|
OR (documents.id IS NULL)
|
||||||
OR CAST($document_ids AS TEXT) != CAST($document_ids AS TEXT);
|
OR CAST($document_ids AS TEXT) != CAST($document_ids AS TEXT);
|
||||||
@ -242,7 +249,7 @@ FROM capped_stats;
|
|||||||
|
|
||||||
-- name: GetDocumentDaysRead :one
|
-- name: GetDocumentDaysRead :one
|
||||||
WITH document_days AS (
|
WITH document_days AS (
|
||||||
SELECT date(start_time, 'localtime') AS dates
|
SELECT DATE(start_time, 'localtime') AS dates
|
||||||
FROM rescaled_activity
|
FROM rescaled_activity
|
||||||
WHERE document_id = $document_id
|
WHERE document_id = $document_id
|
||||||
AND user_id = $user_id
|
AND user_id = $user_id
|
||||||
@ -251,93 +258,13 @@ WITH document_days AS (
|
|||||||
SELECT CAST(count(*) AS INTEGER) AS days_read
|
SELECT CAST(count(*) AS INTEGER) AS days_read
|
||||||
FROM document_days;
|
FROM document_days;
|
||||||
|
|
||||||
-- name: GetUserDayStreaks :one
|
|
||||||
WITH document_days AS (
|
|
||||||
SELECT date(start_time, 'localtime') AS read_day
|
|
||||||
FROM activity
|
|
||||||
WHERE user_id = $user_id
|
|
||||||
GROUP BY read_day
|
|
||||||
ORDER BY read_day DESC
|
|
||||||
),
|
|
||||||
partitions AS (
|
|
||||||
SELECT
|
|
||||||
document_days.*,
|
|
||||||
row_number() OVER (
|
|
||||||
PARTITION BY 1 ORDER BY read_day DESC
|
|
||||||
) AS seqnum
|
|
||||||
FROM document_days
|
|
||||||
),
|
|
||||||
streaks AS (
|
|
||||||
SELECT
|
|
||||||
count(*) AS streak,
|
|
||||||
MIN(read_day) AS start_date,
|
|
||||||
MAX(read_day) AS end_date
|
|
||||||
FROM partitions
|
|
||||||
GROUP BY date(read_day, '+' || seqnum || ' day')
|
|
||||||
ORDER BY end_date DESC
|
|
||||||
),
|
|
||||||
max_streak AS (
|
|
||||||
SELECT
|
|
||||||
MAX(streak) AS max_streak,
|
|
||||||
start_date AS max_streak_start_date,
|
|
||||||
end_date AS max_streak_end_date
|
|
||||||
FROM streaks
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
CAST(max_streak AS INTEGER),
|
|
||||||
CAST(max_streak_start_date AS TEXT),
|
|
||||||
CAST(max_streak_end_date AS TEXT),
|
|
||||||
streak AS current_streak,
|
|
||||||
CAST(start_date AS TEXT) AS current_streak_start_date,
|
|
||||||
CAST(end_date AS TEXT) AS current_streak_end_date
|
|
||||||
FROM max_streak, streaks LIMIT 1;
|
|
||||||
|
|
||||||
-- name: GetUserWeekStreaks :one
|
|
||||||
WITH document_weeks AS (
|
|
||||||
SELECT STRFTIME('%Y-%m-%d', start_time, 'localtime', 'weekday 0', '-7 day') AS read_week
|
|
||||||
FROM activity
|
|
||||||
WHERE user_id = $user_id
|
|
||||||
GROUP BY read_week
|
|
||||||
ORDER BY read_week DESC
|
|
||||||
),
|
|
||||||
partitions AS (
|
|
||||||
SELECT
|
|
||||||
document_weeks.*,
|
|
||||||
row_number() OVER (
|
|
||||||
PARTITION BY 1 ORDER BY read_week DESC
|
|
||||||
) AS seqnum
|
|
||||||
FROM document_weeks
|
|
||||||
),
|
|
||||||
streaks AS (
|
|
||||||
SELECT
|
|
||||||
count(*) AS streak,
|
|
||||||
MIN(read_week) AS start_date,
|
|
||||||
MAX(read_week) AS end_date
|
|
||||||
FROM partitions
|
|
||||||
GROUP BY date(read_week, '+' || (seqnum * 7) || ' day')
|
|
||||||
ORDER BY end_date DESC
|
|
||||||
),
|
|
||||||
max_streak AS (
|
|
||||||
SELECT
|
|
||||||
MAX(streak) AS max_streak,
|
|
||||||
start_date AS max_streak_start_date,
|
|
||||||
end_date AS max_streak_end_date
|
|
||||||
FROM streaks
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
CAST(max_streak AS INTEGER),
|
|
||||||
CAST(max_streak_start_date AS TEXT),
|
|
||||||
CAST(max_streak_end_date AS TEXT),
|
|
||||||
streak AS current_streak,
|
|
||||||
CAST(start_date AS TEXT) AS current_streak_start_date,
|
|
||||||
CAST(end_date AS TEXT) AS current_streak_end_date
|
|
||||||
FROM max_streak, streaks LIMIT 1;
|
|
||||||
|
|
||||||
-- name: GetUserWindowStreaks :one
|
-- name: GetUserWindowStreaks :one
|
||||||
WITH document_windows AS (
|
WITH document_windows AS (
|
||||||
SELECT CASE
|
SELECT CASE
|
||||||
WHEN ?2 = "WEEK" THEN STRFTIME('%Y-%m-%d', start_time, 'localtime', 'weekday 0', '-7 day')
|
-- TODO: Timezones! E.g. DATE(start_time, '-5 hours')
|
||||||
WHEN ?2 = "DAY" THEN date(start_time, 'localtime')
|
-- TODO: Timezones! E.g. DATE(start_time, '-5 hours', '-7 days')
|
||||||
|
WHEN ?2 = "WEEK" THEN STRFTIME('%Y-%m-%d', start_time, 'weekday 0', '-7 day')
|
||||||
|
WHEN ?2 = "DAY" THEN DATE(start_time)
|
||||||
END AS read_window
|
END AS read_window
|
||||||
FROM activity
|
FROM activity
|
||||||
WHERE user_id = $user_id
|
WHERE user_id = $user_id
|
||||||
@ -360,8 +287,8 @@ streaks AS (
|
|||||||
MAX(read_window) AS end_date
|
MAX(read_window) AS end_date
|
||||||
FROM partitions
|
FROM partitions
|
||||||
GROUP BY CASE
|
GROUP BY CASE
|
||||||
WHEN ?2 = "DAY" THEN date(read_window, '+' || seqnum || ' day')
|
WHEN ?2 = "DAY" THEN DATE(read_window, '+' || seqnum || ' day')
|
||||||
WHEN ?2 = "WEEK" THEN date(read_window, '+' || (seqnum * 7) || ' day')
|
WHEN ?2 = "WEEK" THEN DATE(read_window, '+' || (seqnum * 7) || ' day')
|
||||||
END
|
END
|
||||||
ORDER BY end_date DESC
|
ORDER BY end_date DESC
|
||||||
),
|
),
|
||||||
@ -371,15 +298,29 @@ max_streak AS (
|
|||||||
start_date AS max_streak_start_date,
|
start_date AS max_streak_start_date,
|
||||||
end_date AS max_streak_end_date
|
end_date AS max_streak_end_date
|
||||||
FROM streaks
|
FROM streaks
|
||||||
|
),
|
||||||
|
current_streak AS (
|
||||||
|
SELECT
|
||||||
|
streak AS current_streak,
|
||||||
|
start_date AS current_streak_start_date,
|
||||||
|
end_date AS current_streak_end_date
|
||||||
|
FROM streaks
|
||||||
|
WHERE CASE
|
||||||
|
WHEN ?2 = "WEEK" THEN STRFTIME('%Y-%m-%d', 'now', 'weekday 0', '-7 day') = current_streak_end_date
|
||||||
|
WHEN ?2 = "DAY" THEN DATE('now', '-1 day') = current_streak_end_date OR DATE('now') = current_streak_end_date
|
||||||
|
END
|
||||||
|
LIMIT 1
|
||||||
)
|
)
|
||||||
SELECT
|
SELECT
|
||||||
CAST(max_streak AS INTEGER),
|
CAST(IFNULL(max_streak, 0) AS INTEGER) AS max_streak,
|
||||||
CAST(max_streak_start_date AS TEXT),
|
CAST(IFNULL(max_streak_start_date, "N/A") AS TEXT) AS max_streak_start_date,
|
||||||
CAST(max_streak_end_date AS TEXT),
|
CAST(IFNULL(max_streak_end_date, "N/A") AS TEXT) AS max_streak_end_date,
|
||||||
streak AS current_streak,
|
IFNULL(current_streak, 0) AS current_streak,
|
||||||
CAST(start_date AS TEXT) AS current_streak_start_date,
|
CAST(IFNULL(current_streak_start_date, "N/A") AS TEXT) AS current_streak_start_date,
|
||||||
CAST(end_date AS TEXT) AS current_streak_end_date
|
CAST(IFNULL(current_streak_end_date, "N/A") AS TEXT) AS current_streak_end_date
|
||||||
FROM max_streak, streaks LIMIT 1;
|
FROM max_streak
|
||||||
|
LEFT JOIN current_streak ON 1 = 1
|
||||||
|
LIMIT 1;
|
||||||
|
|
||||||
-- name: GetDatabaseInfo :one
|
-- name: GetDatabaseInfo :one
|
||||||
SELECT
|
SELECT
|
||||||
@ -391,16 +332,16 @@ LIMIT 1;
|
|||||||
|
|
||||||
-- name: GetDailyReadStats :many
|
-- name: GetDailyReadStats :many
|
||||||
WITH RECURSIVE last_30_days (date) AS (
|
WITH RECURSIVE last_30_days (date) AS (
|
||||||
SELECT date('now') AS date
|
SELECT DATE('now') AS date
|
||||||
UNION ALL
|
UNION ALL
|
||||||
SELECT date(date, '-1 days')
|
SELECT DATE(date, '-1 days')
|
||||||
FROM last_30_days
|
FROM last_30_days
|
||||||
LIMIT 30
|
LIMIT 30
|
||||||
),
|
),
|
||||||
activity_records AS (
|
activity_records AS (
|
||||||
SELECT
|
SELECT
|
||||||
sum(duration) AS seconds_read,
|
sum(duration) AS seconds_read,
|
||||||
date(start_time, 'localtime') AS day
|
DATE(start_time, 'localtime') AS day
|
||||||
FROM activity
|
FROM activity
|
||||||
WHERE user_id = $user_id
|
WHERE user_id = $user_id
|
||||||
GROUP BY day
|
GROUP BY day
|
||||||
@ -420,7 +361,7 @@ LIMIT 30;
|
|||||||
|
|
||||||
-- SELECT
|
-- SELECT
|
||||||
-- sum(duration) / 60 AS minutes_read,
|
-- sum(duration) / 60 AS minutes_read,
|
||||||
-- date(start_time, 'localtime') AS day
|
-- DATE(start_time, 'localtime') AS day
|
||||||
-- FROM activity
|
-- FROM activity
|
||||||
-- GROUP BY day
|
-- GROUP BY day
|
||||||
-- ORDER BY day DESC
|
-- ORDER BY day DESC
|
||||||
|
@ -157,16 +157,16 @@ func (q *Queries) GetActivity(ctx context.Context, arg GetActivityParams) ([]Act
|
|||||||
|
|
||||||
const getDailyReadStats = `-- name: GetDailyReadStats :many
|
const getDailyReadStats = `-- name: GetDailyReadStats :many
|
||||||
WITH RECURSIVE last_30_days (date) AS (
|
WITH RECURSIVE last_30_days (date) AS (
|
||||||
SELECT date('now') AS date
|
SELECT DATE('now') AS date
|
||||||
UNION ALL
|
UNION ALL
|
||||||
SELECT date(date, '-1 days')
|
SELECT DATE(date, '-1 days')
|
||||||
FROM last_30_days
|
FROM last_30_days
|
||||||
LIMIT 30
|
LIMIT 30
|
||||||
),
|
),
|
||||||
activity_records AS (
|
activity_records AS (
|
||||||
SELECT
|
SELECT
|
||||||
sum(duration) AS seconds_read,
|
sum(duration) AS seconds_read,
|
||||||
date(start_time, 'localtime') AS day
|
DATE(start_time, 'localtime') AS day
|
||||||
FROM activity
|
FROM activity
|
||||||
WHERE user_id = ?1
|
WHERE user_id = ?1
|
||||||
GROUP BY day
|
GROUP BY day
|
||||||
@ -372,7 +372,7 @@ func (q *Queries) GetDocument(ctx context.Context, documentID string) (Document,
|
|||||||
|
|
||||||
const getDocumentDaysRead = `-- name: GetDocumentDaysRead :one
|
const getDocumentDaysRead = `-- name: GetDocumentDaysRead :one
|
||||||
WITH document_days AS (
|
WITH document_days AS (
|
||||||
SELECT date(start_time, 'localtime') AS dates
|
SELECT DATE(start_time, 'localtime') AS dates
|
||||||
FROM rescaled_activity
|
FROM rescaled_activity
|
||||||
WHERE document_id = ?1
|
WHERE document_id = ?1
|
||||||
AND user_id = ?2
|
AND user_id = ?2
|
||||||
@ -758,141 +758,13 @@ func (q *Queries) GetUser(ctx context.Context, userID string) (User, error) {
|
|||||||
return i, err
|
return i, err
|
||||||
}
|
}
|
||||||
|
|
||||||
const getUserDayStreaks = `-- name: GetUserDayStreaks :one
|
|
||||||
WITH document_days AS (
|
|
||||||
SELECT date(start_time, 'localtime') AS read_day
|
|
||||||
FROM activity
|
|
||||||
WHERE user_id = ?1
|
|
||||||
GROUP BY read_day
|
|
||||||
ORDER BY read_day DESC
|
|
||||||
),
|
|
||||||
partitions AS (
|
|
||||||
SELECT
|
|
||||||
document_days.read_day,
|
|
||||||
row_number() OVER (
|
|
||||||
PARTITION BY 1 ORDER BY read_day DESC
|
|
||||||
) AS seqnum
|
|
||||||
FROM document_days
|
|
||||||
),
|
|
||||||
streaks AS (
|
|
||||||
SELECT
|
|
||||||
count(*) AS streak,
|
|
||||||
MIN(read_day) AS start_date,
|
|
||||||
MAX(read_day) AS end_date
|
|
||||||
FROM partitions
|
|
||||||
GROUP BY date(read_day, '+' || seqnum || ' day')
|
|
||||||
ORDER BY end_date DESC
|
|
||||||
),
|
|
||||||
max_streak AS (
|
|
||||||
SELECT
|
|
||||||
MAX(streak) AS max_streak,
|
|
||||||
start_date AS max_streak_start_date,
|
|
||||||
end_date AS max_streak_end_date
|
|
||||||
FROM streaks
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
CAST(max_streak AS INTEGER),
|
|
||||||
CAST(max_streak_start_date AS TEXT),
|
|
||||||
CAST(max_streak_end_date AS TEXT),
|
|
||||||
streak AS current_streak,
|
|
||||||
CAST(start_date AS TEXT) AS current_streak_start_date,
|
|
||||||
CAST(end_date AS TEXT) AS current_streak_end_date
|
|
||||||
FROM max_streak, streaks LIMIT 1
|
|
||||||
`
|
|
||||||
|
|
||||||
type GetUserDayStreaksRow struct {
|
|
||||||
MaxStreak int64 `json:"max_streak"`
|
|
||||||
MaxStreakStartDate string `json:"max_streak_start_date"`
|
|
||||||
MaxStreakEndDate string `json:"max_streak_end_date"`
|
|
||||||
CurrentStreak int64 `json:"current_streak"`
|
|
||||||
CurrentStreakStartDate string `json:"current_streak_start_date"`
|
|
||||||
CurrentStreakEndDate string `json:"current_streak_end_date"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (q *Queries) GetUserDayStreaks(ctx context.Context, userID string) (GetUserDayStreaksRow, error) {
|
|
||||||
row := q.db.QueryRowContext(ctx, getUserDayStreaks, userID)
|
|
||||||
var i GetUserDayStreaksRow
|
|
||||||
err := row.Scan(
|
|
||||||
&i.MaxStreak,
|
|
||||||
&i.MaxStreakStartDate,
|
|
||||||
&i.MaxStreakEndDate,
|
|
||||||
&i.CurrentStreak,
|
|
||||||
&i.CurrentStreakStartDate,
|
|
||||||
&i.CurrentStreakEndDate,
|
|
||||||
)
|
|
||||||
return i, err
|
|
||||||
}
|
|
||||||
|
|
||||||
const getUserWeekStreaks = `-- name: GetUserWeekStreaks :one
|
|
||||||
WITH document_weeks AS (
|
|
||||||
SELECT STRFTIME('%Y-%m-%d', start_time, 'localtime', 'weekday 0', '-7 day') AS read_week
|
|
||||||
FROM activity
|
|
||||||
WHERE user_id = ?1
|
|
||||||
GROUP BY read_week
|
|
||||||
ORDER BY read_week DESC
|
|
||||||
),
|
|
||||||
partitions AS (
|
|
||||||
SELECT
|
|
||||||
document_weeks.read_week,
|
|
||||||
row_number() OVER (
|
|
||||||
PARTITION BY 1 ORDER BY read_week DESC
|
|
||||||
) AS seqnum
|
|
||||||
FROM document_weeks
|
|
||||||
),
|
|
||||||
streaks AS (
|
|
||||||
SELECT
|
|
||||||
count(*) AS streak,
|
|
||||||
MIN(read_week) AS start_date,
|
|
||||||
MAX(read_week) AS end_date
|
|
||||||
FROM partitions
|
|
||||||
GROUP BY date(read_week, '+' || (seqnum * 7) || ' day')
|
|
||||||
ORDER BY end_date DESC
|
|
||||||
),
|
|
||||||
max_streak AS (
|
|
||||||
SELECT
|
|
||||||
MAX(streak) AS max_streak,
|
|
||||||
start_date AS max_streak_start_date,
|
|
||||||
end_date AS max_streak_end_date
|
|
||||||
FROM streaks
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
CAST(max_streak AS INTEGER),
|
|
||||||
CAST(max_streak_start_date AS TEXT),
|
|
||||||
CAST(max_streak_end_date AS TEXT),
|
|
||||||
streak AS current_streak,
|
|
||||||
CAST(start_date AS TEXT) AS current_streak_start_date,
|
|
||||||
CAST(end_date AS TEXT) AS current_streak_end_date
|
|
||||||
FROM max_streak, streaks LIMIT 1
|
|
||||||
`
|
|
||||||
|
|
||||||
type GetUserWeekStreaksRow struct {
|
|
||||||
MaxStreak int64 `json:"max_streak"`
|
|
||||||
MaxStreakStartDate string `json:"max_streak_start_date"`
|
|
||||||
MaxStreakEndDate string `json:"max_streak_end_date"`
|
|
||||||
CurrentStreak int64 `json:"current_streak"`
|
|
||||||
CurrentStreakStartDate string `json:"current_streak_start_date"`
|
|
||||||
CurrentStreakEndDate string `json:"current_streak_end_date"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (q *Queries) GetUserWeekStreaks(ctx context.Context, userID string) (GetUserWeekStreaksRow, error) {
|
|
||||||
row := q.db.QueryRowContext(ctx, getUserWeekStreaks, userID)
|
|
||||||
var i GetUserWeekStreaksRow
|
|
||||||
err := row.Scan(
|
|
||||||
&i.MaxStreak,
|
|
||||||
&i.MaxStreakStartDate,
|
|
||||||
&i.MaxStreakEndDate,
|
|
||||||
&i.CurrentStreak,
|
|
||||||
&i.CurrentStreakStartDate,
|
|
||||||
&i.CurrentStreakEndDate,
|
|
||||||
)
|
|
||||||
return i, err
|
|
||||||
}
|
|
||||||
|
|
||||||
const getUserWindowStreaks = `-- name: GetUserWindowStreaks :one
|
const getUserWindowStreaks = `-- name: GetUserWindowStreaks :one
|
||||||
WITH document_windows AS (
|
WITH document_windows AS (
|
||||||
SELECT CASE
|
SELECT CASE
|
||||||
WHEN ?2 = "WEEK" THEN STRFTIME('%Y-%m-%d', start_time, 'localtime', 'weekday 0', '-7 day')
|
-- TODO: Timezones! E.g. DATE(start_time, '-5 hours')
|
||||||
WHEN ?2 = "DAY" THEN date(start_time, 'localtime')
|
-- TODO: Timezones! E.g. DATE(start_time, '-5 hours', '-7 days')
|
||||||
|
WHEN ?2 = "WEEK" THEN STRFTIME('%Y-%m-%d', start_time, 'weekday 0', '-7 day')
|
||||||
|
WHEN ?2 = "DAY" THEN DATE(start_time)
|
||||||
END AS read_window
|
END AS read_window
|
||||||
FROM activity
|
FROM activity
|
||||||
WHERE user_id = ?1
|
WHERE user_id = ?1
|
||||||
@ -915,8 +787,8 @@ streaks AS (
|
|||||||
MAX(read_window) AS end_date
|
MAX(read_window) AS end_date
|
||||||
FROM partitions
|
FROM partitions
|
||||||
GROUP BY CASE
|
GROUP BY CASE
|
||||||
WHEN ?2 = "DAY" THEN date(read_window, '+' || seqnum || ' day')
|
WHEN ?2 = "DAY" THEN DATE(read_window, '+' || seqnum || ' day')
|
||||||
WHEN ?2 = "WEEK" THEN date(read_window, '+' || (seqnum * 7) || ' day')
|
WHEN ?2 = "WEEK" THEN DATE(read_window, '+' || (seqnum * 7) || ' day')
|
||||||
END
|
END
|
||||||
ORDER BY end_date DESC
|
ORDER BY end_date DESC
|
||||||
),
|
),
|
||||||
@ -926,15 +798,29 @@ max_streak AS (
|
|||||||
start_date AS max_streak_start_date,
|
start_date AS max_streak_start_date,
|
||||||
end_date AS max_streak_end_date
|
end_date AS max_streak_end_date
|
||||||
FROM streaks
|
FROM streaks
|
||||||
|
),
|
||||||
|
current_streak AS (
|
||||||
|
SELECT
|
||||||
|
streak AS current_streak,
|
||||||
|
start_date AS current_streak_start_date,
|
||||||
|
end_date AS current_streak_end_date
|
||||||
|
FROM streaks
|
||||||
|
WHERE CASE
|
||||||
|
WHEN ?2 = "WEEK" THEN STRFTIME('%Y-%m-%d', 'now', 'weekday 0', '-7 day') = current_streak_end_date
|
||||||
|
WHEN ?2 = "DAY" THEN DATE('now', '-1 day') = current_streak_end_date OR DATE('now') = current_streak_end_date
|
||||||
|
END
|
||||||
|
LIMIT 1
|
||||||
)
|
)
|
||||||
SELECT
|
SELECT
|
||||||
CAST(max_streak AS INTEGER),
|
CAST(IFNULL(max_streak, 0) AS INTEGER) AS max_streak,
|
||||||
CAST(max_streak_start_date AS TEXT),
|
CAST(IFNULL(max_streak_start_date, "N/A") AS TEXT) AS max_streak_start_date,
|
||||||
CAST(max_streak_end_date AS TEXT),
|
CAST(IFNULL(max_streak_end_date, "N/A") AS TEXT) AS max_streak_end_date,
|
||||||
streak AS current_streak,
|
IFNULL(current_streak, 0) AS current_streak,
|
||||||
CAST(start_date AS TEXT) AS current_streak_start_date,
|
CAST(IFNULL(current_streak_start_date, "N/A") AS TEXT) AS current_streak_start_date,
|
||||||
CAST(end_date AS TEXT) AS current_streak_end_date
|
CAST(IFNULL(current_streak_end_date, "N/A") AS TEXT) AS current_streak_end_date
|
||||||
FROM max_streak, streaks LIMIT 1
|
FROM max_streak
|
||||||
|
LEFT JOIN current_streak ON 1 = 1
|
||||||
|
LIMIT 1
|
||||||
`
|
`
|
||||||
|
|
||||||
type GetUserWindowStreaksParams struct {
|
type GetUserWindowStreaksParams struct {
|
||||||
@ -943,12 +829,12 @@ type GetUserWindowStreaksParams struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type GetUserWindowStreaksRow struct {
|
type GetUserWindowStreaksRow struct {
|
||||||
MaxStreak int64 `json:"max_streak"`
|
MaxStreak int64 `json:"max_streak"`
|
||||||
MaxStreakStartDate string `json:"max_streak_start_date"`
|
MaxStreakStartDate string `json:"max_streak_start_date"`
|
||||||
MaxStreakEndDate string `json:"max_streak_end_date"`
|
MaxStreakEndDate string `json:"max_streak_end_date"`
|
||||||
CurrentStreak int64 `json:"current_streak"`
|
CurrentStreak interface{} `json:"current_streak"`
|
||||||
CurrentStreakStartDate string `json:"current_streak_start_date"`
|
CurrentStreakStartDate string `json:"current_streak_start_date"`
|
||||||
CurrentStreakEndDate string `json:"current_streak_end_date"`
|
CurrentStreakEndDate string `json:"current_streak_end_date"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *Queries) GetUserWindowStreaks(ctx context.Context, arg GetUserWindowStreaksParams) (GetUserWindowStreaksRow, error) {
|
func (q *Queries) GetUserWindowStreaks(ctx context.Context, arg GetUserWindowStreaksParams) (GetUserWindowStreaksRow, error) {
|
||||||
@ -1015,31 +901,44 @@ func (q *Queries) GetUsers(ctx context.Context, arg GetUsersParams) ([]User, err
|
|||||||
}
|
}
|
||||||
|
|
||||||
const getWantedDocuments = `-- name: GetWantedDocuments :many
|
const getWantedDocuments = `-- name: GetWantedDocuments :many
|
||||||
SELECT CAST(value AS TEXT) AS id
|
SELECT
|
||||||
|
CAST(value AS TEXT) AS id,
|
||||||
|
CAST((documents.filepath IS NULL) AS BOOLEAN) AS want_file,
|
||||||
|
CAST((documents.synced != true) AS BOOLEAN) AS want_metadata
|
||||||
FROM json_each(?1)
|
FROM json_each(?1)
|
||||||
LEFT JOIN documents
|
LEFT JOIN documents
|
||||||
ON value = documents.id
|
ON value = documents.id
|
||||||
WHERE (
|
WHERE (
|
||||||
documents.id IS NOT NULL
|
documents.id IS NOT NULL
|
||||||
AND documents.synced = false
|
AND documents.deleted = false
|
||||||
|
AND (
|
||||||
|
documents.synced = false
|
||||||
|
OR documents.filepath IS NULL
|
||||||
|
)
|
||||||
)
|
)
|
||||||
OR (documents.id IS NULL)
|
OR (documents.id IS NULL)
|
||||||
OR CAST(?1 AS TEXT) != CAST(?1 AS TEXT)
|
OR CAST(?1 AS TEXT) != CAST(?1 AS TEXT)
|
||||||
`
|
`
|
||||||
|
|
||||||
func (q *Queries) GetWantedDocuments(ctx context.Context, documentIds string) ([]string, error) {
|
type GetWantedDocumentsRow struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
WantFile bool `json:"want_file"`
|
||||||
|
WantMetadata bool `json:"want_metadata"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) GetWantedDocuments(ctx context.Context, documentIds string) ([]GetWantedDocumentsRow, error) {
|
||||||
rows, err := q.db.QueryContext(ctx, getWantedDocuments, documentIds)
|
rows, err := q.db.QueryContext(ctx, getWantedDocuments, documentIds)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
defer rows.Close()
|
defer rows.Close()
|
||||||
var items []string
|
var items []GetWantedDocumentsRow
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
var id string
|
var i GetWantedDocumentsRow
|
||||||
if err := rows.Scan(&id); err != nil {
|
if err := rows.Scan(&i.ID, &i.WantFile, &i.WantMetadata); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
items = append(items, id)
|
items = append(items, i)
|
||||||
}
|
}
|
||||||
if err := rows.Close(); err != nil {
|
if err := rows.Close(); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
---
|
---
|
||||||
services:
|
services:
|
||||||
sync-ninja:
|
bookmanager:
|
||||||
# working_dir: /app
|
|
||||||
environment:
|
environment:
|
||||||
- CONFIG_PATH=/data
|
- CONFIG_PATH=/data
|
||||||
- DATA_PATH=/data
|
- DATA_PATH=/data
|
||||||
|
@ -28,10 +28,7 @@ type SVGBezierOpposedLine struct {
|
|||||||
Angle int
|
Angle int
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetSVGGraphData(inputData []database.GetDailyReadStatsRow, svgWidth int) SVGGraphData {
|
func GetSVGGraphData(inputData []database.GetDailyReadStatsRow, svgWidth int, svgHeight int) SVGGraphData {
|
||||||
// Static Padding
|
|
||||||
var padding int = 5
|
|
||||||
|
|
||||||
// Derive Height
|
// Derive Height
|
||||||
var maxHeight int = 0
|
var maxHeight int = 0
|
||||||
for _, item := range inputData {
|
for _, item := range inputData {
|
||||||
@ -40,7 +37,13 @@ func GetSVGGraphData(inputData []database.GetDailyReadStatsRow, svgWidth int) SV
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Derive Block Offsets & Transformed Coordinates (Line & Bar)
|
// Vertical Graph Real Estate
|
||||||
|
var sizePercentage float32 = 0.5
|
||||||
|
|
||||||
|
// Scale Ratio -> Desired Height
|
||||||
|
var sizeRatio float32 = float32(svgHeight) * sizePercentage / float32(maxHeight)
|
||||||
|
|
||||||
|
// Point Block Offset
|
||||||
var blockOffset int = int(math.Floor(float64(svgWidth) / float64(len(inputData))))
|
var blockOffset int = int(math.Floor(float64(svgWidth) / float64(len(inputData))))
|
||||||
|
|
||||||
// Line & Bar Points
|
// Line & Bar Points
|
||||||
@ -52,19 +55,19 @@ func GetSVGGraphData(inputData []database.GetDailyReadStatsRow, svgWidth int) SV
|
|||||||
var maxBY int = 0
|
var maxBY int = 0
|
||||||
var minBX int = 0
|
var minBX int = 0
|
||||||
for idx, item := range inputData {
|
for idx, item := range inputData {
|
||||||
itemSize := int(item.MinutesRead)
|
itemSize := int(float32(item.MinutesRead) * sizeRatio)
|
||||||
itemY := (maxHeight + padding) - itemSize
|
itemY := svgHeight - itemSize
|
||||||
|
lineX := (idx + 1) * blockOffset
|
||||||
barPoints = append(barPoints, SVGGraphPoint{
|
barPoints = append(barPoints, SVGGraphPoint{
|
||||||
X: (idx * blockOffset) + (blockOffset / 2),
|
X: lineX - (blockOffset / 2),
|
||||||
Y: itemY,
|
Y: itemY,
|
||||||
Size: itemSize + padding,
|
Size: itemSize,
|
||||||
})
|
})
|
||||||
|
|
||||||
lineX := (idx + 1) * blockOffset
|
|
||||||
linePoints = append(linePoints, SVGGraphPoint{
|
linePoints = append(linePoints, SVGGraphPoint{
|
||||||
X: lineX,
|
X: lineX,
|
||||||
Y: itemY,
|
Y: itemY,
|
||||||
Size: itemSize + padding,
|
Size: itemSize,
|
||||||
})
|
})
|
||||||
|
|
||||||
if lineX > maxBX {
|
if lineX > maxBX {
|
||||||
@ -82,13 +85,13 @@ func GetSVGGraphData(inputData []database.GetDailyReadStatsRow, svgWidth int) SV
|
|||||||
|
|
||||||
// Return Data
|
// Return Data
|
||||||
return SVGGraphData{
|
return SVGGraphData{
|
||||||
Width: svgWidth + padding*2,
|
Width: svgWidth,
|
||||||
Height: maxHeight + padding*2,
|
Height: svgHeight,
|
||||||
Offset: blockOffset,
|
Offset: blockOffset,
|
||||||
LinePoints: linePoints,
|
LinePoints: linePoints,
|
||||||
BarPoints: barPoints,
|
BarPoints: barPoints,
|
||||||
BezierPath: getSVGBezierPath(linePoints),
|
BezierPath: getSVGBezierPath(linePoints),
|
||||||
BezierFill: fmt.Sprintf("L %d,%d L %d,%d Z", maxBX, maxBY+padding, minBX, maxBY+padding),
|
BezierFill: fmt.Sprintf("L %d,%d L %d,%d Z", maxBX, maxBY, minBX+blockOffset, maxBY),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Before Width: | Height: | Size: 1.8 MiB After Width: | Height: | Size: 1.8 MiB |
Before Width: | Height: | Size: 362 KiB After Width: | Height: | Size: 362 KiB |
Before Width: | Height: | Size: 2.8 MiB After Width: | Height: | Size: 2.8 MiB |
@ -1,8 +1,13 @@
|
|||||||
<svg viewBox="0 0 {{ $data.Width }} {{ $data.Height }}" class="chart">
|
{{ $data := (GetSVGGraphData .Data.GraphData 800 150 )}}
|
||||||
|
<svg viewBox="0 0 {{ $data.Width }} {{ $data.Height }}">
|
||||||
<!-- Box Graph -->
|
<!-- Box Graph -->
|
||||||
{{ range $idx, $item := $data.BarPoints }}
|
{{ range $idx, $item := $data.BarPoints }}
|
||||||
<g class="bar" transform="translate({{ $item.X }}, 0)" fill="gray">
|
<g class="bar" transform="translate({{ $item.X }}, 0)" fill="gray">
|
||||||
<rect y="{{ $item.Y }}" height="{{ $item.Size }}" width="33"></rect>
|
<rect
|
||||||
|
y="{{ $item.Y }}"
|
||||||
|
height="{{ $item.Size }}"
|
||||||
|
width="{{ $data.Offset }}"
|
||||||
|
></rect>
|
||||||
</g>
|
</g>
|
||||||
{{ end }}
|
{{ end }}
|
||||||
|
|
||||||
@ -12,13 +17,13 @@
|
|||||||
stroke="black"
|
stroke="black"
|
||||||
stroke-width="2"
|
stroke-width="2"
|
||||||
points="
|
points="
|
||||||
{{ range $item := $data.LinePoints }}
|
{{ range $item := $data.LinePoints }}
|
||||||
{{ $item.X }},{{ $item.Y }}
|
{{ $item.X }},{{ $item.Y }}
|
||||||
{{ end }}
|
{{ end }}
|
||||||
"
|
"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
<!-- Bezier Curve Line Graph -->
|
<!-- Bezier Line Graph -->
|
||||||
<path
|
<path
|
||||||
fill="#316BBE"
|
fill="#316BBE"
|
||||||
fill-opacity="0.5"
|
fill-opacity="0.5"
|
||||||
@ -26,10 +31,44 @@
|
|||||||
d="{{ $data.BezierPath }} {{ $data.BezierFill }}"
|
d="{{ $data.BezierPath }} {{ $data.BezierFill }}"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
<path
|
<path fill="none" stroke="#316BBE" d="{{ $data.BezierPath }}" />
|
||||||
fill="none"
|
|
||||||
fill-opacity="0.1"
|
{{ range $index, $item := $data.LinePoints }}
|
||||||
stroke="none"
|
<line
|
||||||
d="{{ $data.BezierPath }}"
|
class="hover-trigger"
|
||||||
/>
|
stroke="black"
|
||||||
|
stroke-opacity="0.0"
|
||||||
|
stroke-width="{{ $data.Offset }}"
|
||||||
|
x1="{{ $item.X }}"
|
||||||
|
x2="{{ $item.X }}"
|
||||||
|
y1="0"
|
||||||
|
y2="{{ $data.Height }}"
|
||||||
|
></line>
|
||||||
|
<g class="hover-item">
|
||||||
|
<line
|
||||||
|
class="text-black dark:text-white"
|
||||||
|
stroke-opacity="0.2"
|
||||||
|
x1="{{ $item.X }}"
|
||||||
|
x2="{{ $item.X }}"
|
||||||
|
y1="30"
|
||||||
|
y2="{{ $data.Height }}"
|
||||||
|
></line>
|
||||||
|
<text
|
||||||
|
class="text-black dark:text-white"
|
||||||
|
alignment-baseline="middle"
|
||||||
|
transform="translate({{ $item.X }}, 5) translate(-30, 8)"
|
||||||
|
font-size="10"
|
||||||
|
>
|
||||||
|
{{ (index $.Data.GraphData $index).Date }}
|
||||||
|
</text>
|
||||||
|
<text
|
||||||
|
class="text-black dark:text-white"
|
||||||
|
alignment-baseline="middle"
|
||||||
|
transform="translate({{ $item.X }}, 25) translate(-30, -2)"
|
||||||
|
font-size="10"
|
||||||
|
>
|
||||||
|
{{ (index $.Data.GraphData $index).MinutesRead }} minutes
|
||||||
|
</text>
|
||||||
|
</g>
|
||||||
|
{{ end }}
|
||||||
</svg>
|
</svg>
|
||||||
|
Before Width: | Height: | Size: 789 B After Width: | Height: | Size: 1.8 KiB |
@ -7,9 +7,10 @@
|
|||||||
>
|
>
|
||||||
Daily Read Totals
|
Daily Read Totals
|
||||||
</p>
|
</p>
|
||||||
{{ $data := (GetSVGGraphData .Data.GraphData 800)}}
|
|
||||||
|
|
||||||
|
{{ $data := (GetSVGGraphData .Data.GraphData 800 70 )}}
|
||||||
<svg viewBox="0 0 {{ $data.Width }} {{ $data.Height }}">
|
<svg viewBox="0 0 {{ $data.Width }} {{ $data.Height }}">
|
||||||
|
<!-- Bezier Line Graph -->
|
||||||
<path
|
<path
|
||||||
fill="#316BBE"
|
fill="#316BBE"
|
||||||
fill-opacity="0.5"
|
fill-opacity="0.5"
|
||||||
|
@ -41,7 +41,7 @@
|
|||||||
type="text"
|
type="text"
|
||||||
id="username"
|
id="username"
|
||||||
name="username"
|
name="username"
|
||||||
class="flex-1 appearance-none border border-gray-300 w-full py-2 px-4 bg-white text-gray-700 placeholder-gray-400 shadow-sm text-base focus:outline-none focus:ring-2 focus:ring-purple-600 focus:border-transparent"
|
class="flex-1 appearance-none rounded-none border border-gray-300 w-full py-2 px-4 bg-white text-gray-700 placeholder-gray-400 shadow-sm text-base focus:outline-none focus:ring-2 focus:ring-purple-600 focus:border-transparent"
|
||||||
placeholder="Username"
|
placeholder="Username"
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
@ -67,7 +67,7 @@
|
|||||||
type="password"
|
type="password"
|
||||||
id="password"
|
id="password"
|
||||||
name="password"
|
name="password"
|
||||||
class="flex-1 appearance-none border border-gray-300 w-full py-2 px-4 bg-white text-gray-700 placeholder-gray-400 shadow-sm text-base focus:outline-none focus:ring-2 focus:ring-purple-600 focus:border-transparent"
|
class="flex-1 appearance-none rounded-none border border-gray-300 w-full py-2 px-4 bg-white text-gray-700 placeholder-gray-400 shadow-sm text-base focus:outline-none focus:ring-2 focus:ring-purple-600 focus:border-transparent"
|
||||||
placeholder="Password"
|
placeholder="Password"
|
||||||
/>
|
/>
|
||||||
<span class="absolute -bottom-5 text-red-400 text-xs"
|
<span class="absolute -bottom-5 text-red-400 text-xs"
|
||||||
@ -86,6 +86,7 @@
|
|||||||
{{end}}
|
{{end}}
|
||||||
</button>
|
</button>
|
||||||
</form>
|
</form>
|
||||||
|
{{ if .RegistrationEnabled }}
|
||||||
<div class="pt-12 pb-12 text-center">
|
<div class="pt-12 pb-12 text-center">
|
||||||
{{ if .Register }}
|
{{ if .Register }}
|
||||||
<p>
|
<p>
|
||||||
@ -103,6 +104,7 @@
|
|||||||
</p>
|
</p>
|
||||||
{{end}}
|
{{end}}
|
||||||
</div>
|
</div>
|
||||||
|
{{ end }}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div
|
<div
|
||||||
|