[add] heavy query caching, [add] wpm leaderboard
This commit is contained in:
parent
5cd4e165b0
commit
4e1ee0022a
@ -53,7 +53,7 @@ func NewApi(db *database.DBManager, c *config.Config) *API {
|
|||||||
// Configure Cookie Session Store
|
// Configure Cookie Session Store
|
||||||
store := cookie.NewStore(newToken)
|
store := cookie.NewStore(newToken)
|
||||||
store.Options(sessions.Options{
|
store.Options(sessions.Options{
|
||||||
MaxAge: 60 * 60 * 24,
|
MaxAge: 60 * 60 * 24 * 7,
|
||||||
Secure: true,
|
Secure: true,
|
||||||
HttpOnly: true,
|
HttpOnly: true,
|
||||||
SameSite: http.SameSiteStrictMode,
|
SameSite: http.SameSiteStrictMode,
|
||||||
@ -81,7 +81,6 @@ func (api *API) registerWebAppRoutes() {
|
|||||||
|
|
||||||
render.AddFromFilesFuncs("login", helperFuncs, "templates/login.html")
|
render.AddFromFilesFuncs("login", helperFuncs, "templates/login.html")
|
||||||
render.AddFromFilesFuncs("home", helperFuncs, "templates/base.html", "templates/home.html")
|
render.AddFromFilesFuncs("home", helperFuncs, "templates/base.html", "templates/home.html")
|
||||||
render.AddFromFilesFuncs("graphs", helperFuncs, "templates/base.html", "templates/graphs.html")
|
|
||||||
render.AddFromFilesFuncs("settings", helperFuncs, "templates/base.html", "templates/settings.html")
|
render.AddFromFilesFuncs("settings", helperFuncs, "templates/base.html", "templates/settings.html")
|
||||||
render.AddFromFilesFuncs("activity", helperFuncs, "templates/base.html", "templates/activity.html")
|
render.AddFromFilesFuncs("activity", helperFuncs, "templates/base.html", "templates/activity.html")
|
||||||
render.AddFromFilesFuncs("documents", helperFuncs, "templates/base.html", "templates/documents.html")
|
render.AddFromFilesFuncs("documents", helperFuncs, "templates/base.html", "templates/documents.html")
|
||||||
@ -107,9 +106,6 @@ func (api *API) registerWebAppRoutes() {
|
|||||||
api.Router.POST("/documents/:document/edit", api.authWebAppMiddleware, api.editDocument)
|
api.Router.POST("/documents/:document/edit", api.authWebAppMiddleware, api.editDocument)
|
||||||
api.Router.POST("/documents/:document/identify", api.authWebAppMiddleware, api.identifyDocument)
|
api.Router.POST("/documents/:document/identify", api.authWebAppMiddleware, api.identifyDocument)
|
||||||
api.Router.POST("/documents/:document/delete", api.authWebAppMiddleware, api.deleteDocument)
|
api.Router.POST("/documents/:document/delete", api.authWebAppMiddleware, api.deleteDocument)
|
||||||
|
|
||||||
// TODO
|
|
||||||
api.Router.GET("/graphs", api.authWebAppMiddleware, baseResourceRoute("graphs"))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) registerKOAPIRoutes(apiGroup *gin.RouterGroup) {
|
func (api *API) registerKOAPIRoutes(apiGroup *gin.RouterGroup) {
|
||||||
|
@ -48,19 +48,6 @@ type requestSettingsEdit struct {
|
|||||||
TimeOffset *string `form:"time_offset"`
|
TimeOffset *string `form:"time_offset"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func baseResourceRoute(template string, args ...map[string]any) func(c *gin.Context) {
|
|
||||||
variables := gin.H{"RouteName": template}
|
|
||||||
if len(args) > 0 {
|
|
||||||
variables = args[0]
|
|
||||||
}
|
|
||||||
|
|
||||||
return func(c *gin.Context) {
|
|
||||||
rUser, _ := c.Get("AuthorizedUser")
|
|
||||||
variables["User"] = rUser
|
|
||||||
c.HTML(http.StatusOK, template, variables)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (api *API) webManifest(c *gin.Context) {
|
func (api *API) webManifest(c *gin.Context) {
|
||||||
c.Header("Content-Type", "application/manifest+json")
|
c.Header("Content-Type", "application/manifest+json")
|
||||||
c.File("./assets/manifest.json")
|
c.File("./assets/manifest.json")
|
||||||
@ -125,18 +112,9 @@ func (api *API) createAppResourcesRoute(routeName string, args ...map[string]any
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
statistics := gin.H{
|
|
||||||
"TotalTimeLeftSeconds": (document.Pages - document.Page) * document.SecondsPerPage,
|
|
||||||
"WordsPerMinute": "N/A",
|
|
||||||
}
|
|
||||||
|
|
||||||
if document.Words != nil && *document.Words != 0 {
|
|
||||||
statistics["WordsPerMinute"] = (*document.Words / document.Pages * document.ReadPages) / (document.TotalTimeSeconds / 60.0)
|
|
||||||
}
|
|
||||||
|
|
||||||
templateVars["RelBase"] = "../"
|
templateVars["RelBase"] = "../"
|
||||||
templateVars["Data"] = document
|
templateVars["Data"] = document
|
||||||
templateVars["Statistics"] = statistics
|
templateVars["TotalTimeLeftSeconds"] = (document.Pages - document.Page) * document.SecondsPerPage
|
||||||
} else if routeName == "activity" {
|
} else if routeName == "activity" {
|
||||||
activityFilter := database.GetActivityParams{
|
activityFilter := database.GetActivityParams{
|
||||||
UserID: userID,
|
UserID: userID,
|
||||||
@ -158,39 +136,22 @@ func (api *API) createAppResourcesRoute(routeName string, args ...map[string]any
|
|||||||
|
|
||||||
templateVars["Data"] = activity
|
templateVars["Data"] = activity
|
||||||
} else if routeName == "home" {
|
} else if routeName == "home" {
|
||||||
start_time := time.Now()
|
start := time.Now()
|
||||||
weekly_streak, err := api.DB.Queries.GetUserWindowStreaks(api.DB.Ctx, database.GetUserWindowStreaksParams{
|
|
||||||
UserID: userID,
|
|
||||||
Window: "WEEK",
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
log.Warn("[createAppResourcesRoute] GetUserWindowStreaks DB Error:", err)
|
|
||||||
}
|
|
||||||
log.Debug("GetUserWindowStreaks - WEEK - ", time.Since(start_time))
|
|
||||||
start_time = time.Now()
|
|
||||||
|
|
||||||
daily_streak, err := api.DB.Queries.GetUserWindowStreaks(api.DB.Ctx, database.GetUserWindowStreaksParams{
|
|
||||||
UserID: userID,
|
|
||||||
Window: "DAY",
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
log.Warn("[createAppResourcesRoute] GetUserWindowStreaks DB Error:", err)
|
|
||||||
}
|
|
||||||
log.Debug("GetUserWindowStreaks - DAY - ", time.Since(start_time))
|
|
||||||
|
|
||||||
start_time = time.Now()
|
|
||||||
database_info, _ := api.DB.Queries.GetDatabaseInfo(api.DB.Ctx, userID)
|
|
||||||
log.Debug("GetDatabaseInfo - ", time.Since(start_time))
|
|
||||||
|
|
||||||
start_time = time.Now()
|
|
||||||
read_graph_data, _ := api.DB.Queries.GetDailyReadStats(api.DB.Ctx, userID)
|
read_graph_data, _ := api.DB.Queries.GetDailyReadStats(api.DB.Ctx, userID)
|
||||||
log.Debug("GetDailyReadStats - ", time.Since(start_time))
|
log.Info("GetDailyReadStats Performance: ", time.Since(start))
|
||||||
|
|
||||||
|
start = time.Now()
|
||||||
|
database_info, _ := api.DB.Queries.GetDatabaseInfo(api.DB.Ctx, userID)
|
||||||
|
log.Info("GetDatabaseInfo Performance: ", time.Since(start))
|
||||||
|
|
||||||
|
streaks, _ := api.DB.Queries.GetUserStreaks(api.DB.Ctx, userID)
|
||||||
|
wpn_leaderboard, _ := api.DB.Queries.GetWPMLeaderboard(api.DB.Ctx)
|
||||||
|
|
||||||
templateVars["Data"] = gin.H{
|
templateVars["Data"] = gin.H{
|
||||||
"DailyStreak": daily_streak,
|
"Streaks": streaks,
|
||||||
"WeeklyStreak": weekly_streak,
|
|
||||||
"DatabaseInfo": database_info,
|
|
||||||
"GraphData": read_graph_data,
|
"GraphData": read_graph_data,
|
||||||
|
"DatabaseInfo": database_info,
|
||||||
|
"WPMLeaderboard": wpn_leaderboard,
|
||||||
}
|
}
|
||||||
} else if routeName == "settings" {
|
} else if routeName == "settings" {
|
||||||
user, err := api.DB.Queries.GetUser(api.DB.Ctx, userID)
|
user, err := api.DB.Queries.GetUser(api.DB.Ctx, userID)
|
||||||
@ -512,17 +473,8 @@ func (api *API) identifyDocument(c *gin.Context) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
statistics := gin.H{
|
|
||||||
"TotalTimeLeftSeconds": (document.Pages - document.Page) * document.SecondsPerPage,
|
|
||||||
"WordsPerMinute": "N/A",
|
|
||||||
}
|
|
||||||
|
|
||||||
if document.Words != nil && *document.Words != 0 {
|
|
||||||
statistics["WordsPerMinute"] = (*document.Words / document.Pages * document.ReadPages) / (document.TotalTimeSeconds / 60.0)
|
|
||||||
}
|
|
||||||
|
|
||||||
templateVars["Data"] = document
|
templateVars["Data"] = document
|
||||||
templateVars["Statistics"] = statistics
|
templateVars["TotalTimeLeftSeconds"] = (document.Pages - document.Page) * document.SecondsPerPage
|
||||||
|
|
||||||
c.HTML(http.StatusOK, "document", templateVars)
|
c.HTML(http.StatusOK, "document", templateVars)
|
||||||
}
|
}
|
||||||
|
71
api/auth.go
71
api/auth.go
@ -5,10 +5,12 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strings"
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
argon2 "github.com/alexedwards/argon2id"
|
argon2 "github.com/alexedwards/argon2id"
|
||||||
"github.com/gin-contrib/sessions"
|
"github.com/gin-contrib/sessions"
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
"reichard.io/bbank/database"
|
"reichard.io/bbank/database"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -34,14 +36,16 @@ func (api *API) authorizeCredentials(username string, password string) (authoriz
|
|||||||
func (api *API) authAPIMiddleware(c *gin.Context) {
|
func (api *API) authAPIMiddleware(c *gin.Context) {
|
||||||
session := sessions.Default(c)
|
session := sessions.Default(c)
|
||||||
|
|
||||||
// Utilize Session Token
|
// Check Session First
|
||||||
if authorizedUser := session.Get("authorizedUser"); authorizedUser != nil {
|
if user, ok := getSession(session); ok == true {
|
||||||
c.Set("AuthorizedUser", authorizedUser)
|
c.Set("AuthorizedUser", user)
|
||||||
c.Header("Cache-Control", "private")
|
c.Header("Cache-Control", "private")
|
||||||
c.Next()
|
c.Next()
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Session Failed -> Check Headers (Allowed on API for KOSync Compatibility)
|
||||||
|
|
||||||
var rHeader authHeader
|
var rHeader authHeader
|
||||||
if err := c.ShouldBindHeader(&rHeader); err != nil {
|
if err := c.ShouldBindHeader(&rHeader); err != nil {
|
||||||
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Incorrect Headers"})
|
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Incorrect Headers"})
|
||||||
@ -57,20 +61,22 @@ func (api *API) authAPIMiddleware(c *gin.Context) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set Session Cookie
|
if err := setSession(session, rHeader.AuthUser); err != nil {
|
||||||
session.Set("authorizedUser", rHeader.AuthUser)
|
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Unauthorized"})
|
||||||
session.Save()
|
return
|
||||||
|
}
|
||||||
|
|
||||||
c.Set("AuthorizedUser", rHeader.AuthUser)
|
c.Set("AuthorizedUser", rHeader.AuthUser)
|
||||||
|
c.Header("Cache-Control", "private")
|
||||||
c.Next()
|
c.Next()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api *API) authWebAppMiddleware(c *gin.Context) {
|
func (api *API) authWebAppMiddleware(c *gin.Context) {
|
||||||
session := sessions.Default(c)
|
session := sessions.Default(c)
|
||||||
|
|
||||||
// Utilize Session Token
|
// Check Session
|
||||||
if authorizedUser := session.Get("authorizedUser"); authorizedUser != nil {
|
if user, ok := getSession(session); ok == true {
|
||||||
c.Set("AuthorizedUser", authorizedUser)
|
c.Set("AuthorizedUser", user)
|
||||||
c.Header("Cache-Control", "private")
|
c.Header("Cache-Control", "private")
|
||||||
c.Next()
|
c.Next()
|
||||||
return
|
return
|
||||||
@ -102,12 +108,17 @@ func (api *API) authFormLogin(c *gin.Context) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Set Session
|
||||||
session := sessions.Default(c)
|
session := sessions.Default(c)
|
||||||
|
if err := setSession(session, username); err != nil {
|
||||||
|
c.HTML(http.StatusUnauthorized, "login", gin.H{
|
||||||
|
"RegistrationEnabled": api.Config.RegistrationEnabled,
|
||||||
|
"Error": "Unknown Error",
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
// Set Session Cookie
|
c.Header("Cache-Control", "private")
|
||||||
session.Set("authorizedUser", username)
|
|
||||||
session.Save()
|
|
||||||
|
|
||||||
c.Redirect(http.StatusFound, "/")
|
c.Redirect(http.StatusFound, "/")
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -160,12 +171,14 @@ func (api *API) authFormRegister(c *gin.Context) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Set Session
|
||||||
session := sessions.Default(c)
|
session := sessions.Default(c)
|
||||||
|
if err := setSession(session, username); err != nil {
|
||||||
|
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Unauthorized"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
// Set Session Cookie
|
c.Header("Cache-Control", "private")
|
||||||
session.Set("authorizedUser", username)
|
|
||||||
session.Save()
|
|
||||||
|
|
||||||
c.Redirect(http.StatusFound, "/")
|
c.Redirect(http.StatusFound, "/")
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -175,3 +188,27 @@ func (api *API) authLogout(c *gin.Context) {
|
|||||||
session.Save()
|
session.Save()
|
||||||
c.Redirect(http.StatusFound, "/login")
|
c.Redirect(http.StatusFound, "/login")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func getSession(session sessions.Session) (user string, ok bool) {
|
||||||
|
// Check Session
|
||||||
|
authorizedUser := session.Get("authorizedUser")
|
||||||
|
if authorizedUser == nil {
|
||||||
|
return "", false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Refresh
|
||||||
|
expiresAt := session.Get("expiresAt")
|
||||||
|
if expiresAt != nil && expiresAt.(int64)-time.Now().Unix() < 60*60*24 {
|
||||||
|
log.Info("[getSession] Refreshing Session")
|
||||||
|
setSession(session, authorizedUser.(string))
|
||||||
|
}
|
||||||
|
|
||||||
|
return authorizedUser.(string), true
|
||||||
|
}
|
||||||
|
|
||||||
|
func setSession(session sessions.Session, user string) error {
|
||||||
|
// Set Session Cookie
|
||||||
|
session.Set("authorizedUser", user)
|
||||||
|
session.Set("expiresAt", time.Now().Unix()+(60*60*24*7))
|
||||||
|
return session.Save()
|
||||||
|
}
|
||||||
|
@ -37,6 +37,7 @@ type requestActivity struct {
|
|||||||
|
|
||||||
type requestCheckActivitySync struct {
|
type requestCheckActivitySync struct {
|
||||||
DeviceID string `json:"device_id"`
|
DeviceID string `json:"device_id"`
|
||||||
|
Device string `json:"device"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type requestDocument struct {
|
type requestDocument struct {
|
||||||
@ -277,6 +278,14 @@ func (api *API) addActivities(c *gin.Context) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Update Temp Tables
|
||||||
|
go func() {
|
||||||
|
log.Info("[addActivities] Caching Temp Tables")
|
||||||
|
if err := api.DB.CacheTempTables(); err != nil {
|
||||||
|
log.Warn("[addActivities] CacheTempTables Failure: ", err)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
c.JSON(http.StatusOK, gin.H{
|
c.JSON(http.StatusOK, gin.H{
|
||||||
"added": len(rActivity.Activity),
|
"added": len(rActivity.Activity),
|
||||||
})
|
})
|
||||||
@ -292,6 +301,18 @@ func (api *API) checkActivitySync(c *gin.Context) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Upsert Device
|
||||||
|
if _, err := api.DB.Queries.UpsertDevice(api.DB.Ctx, database.UpsertDeviceParams{
|
||||||
|
ID: rCheckActivity.DeviceID,
|
||||||
|
UserID: rUser.(string),
|
||||||
|
DeviceName: rCheckActivity.Device,
|
||||||
|
LastSynced: time.Now().UTC(),
|
||||||
|
}); err != nil {
|
||||||
|
log.Error("[checkActivitySync] UpsertDevice DB Error", err)
|
||||||
|
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Device"})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
// Get Last Device Activity
|
// Get Last Device Activity
|
||||||
lastActivity, err := api.DB.Queries.GetLastActivity(api.DB.Ctx, database.GetLastActivityParams{
|
lastActivity, err := api.DB.Queries.GetLastActivity(api.DB.Ctx, database.GetLastActivityParams{
|
||||||
UserID: rUser.(string),
|
UserID: rUser.(string),
|
||||||
@ -385,6 +406,7 @@ func (api *API) checkDocumentsSync(c *gin.Context) {
|
|||||||
ID: rCheckDocs.DeviceID,
|
ID: rCheckDocs.DeviceID,
|
||||||
UserID: rUser.(string),
|
UserID: rUser.(string),
|
||||||
DeviceName: rCheckDocs.Device,
|
DeviceName: rCheckDocs.Device,
|
||||||
|
LastSynced: time.Now().UTC(),
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("[checkDocumentsSync] UpsertDevice DB Error", err)
|
log.Error("[checkDocumentsSync] UpsertDevice DB Error", err)
|
||||||
|
@ -72,7 +72,8 @@ end
|
|||||||
-------------- New Functions -------------
|
-------------- New Functions -------------
|
||||||
------------------------------------------
|
------------------------------------------
|
||||||
|
|
||||||
function SyncNinjaClient:check_activity(username, password, device_id, callback)
|
function SyncNinjaClient:check_activity(username, password, device_id, device,
|
||||||
|
callback)
|
||||||
self.client:reset_middlewares()
|
self.client:reset_middlewares()
|
||||||
self.client:enable("Format.JSON")
|
self.client:enable("Format.JSON")
|
||||||
self.client:enable("GinClient")
|
self.client:enable("GinClient")
|
||||||
@ -82,7 +83,10 @@ function SyncNinjaClient:check_activity(username, password, device_id, callback)
|
|||||||
socketutil:set_timeout(SYNC_TIMEOUTS[1], SYNC_TIMEOUTS[2])
|
socketutil:set_timeout(SYNC_TIMEOUTS[1], SYNC_TIMEOUTS[2])
|
||||||
local co = coroutine.create(function()
|
local co = coroutine.create(function()
|
||||||
local ok, res = pcall(function()
|
local ok, res = pcall(function()
|
||||||
return self.client:check_activity({device_id = device_id})
|
return self.client:check_activity({
|
||||||
|
device_id = device_id,
|
||||||
|
device = device
|
||||||
|
})
|
||||||
end)
|
end)
|
||||||
if ok then
|
if ok then
|
||||||
callback(res.status == 200, res.body)
|
callback(res.status == 200, res.body)
|
||||||
|
@ -26,8 +26,8 @@
|
|||||||
"check_activity": {
|
"check_activity": {
|
||||||
"path": "/api/ko/syncs/activity",
|
"path": "/api/ko/syncs/activity",
|
||||||
"method": "POST",
|
"method": "POST",
|
||||||
"required_params": ["device_id"],
|
"required_params": ["device_id", "device"],
|
||||||
"payload": ["device_id"],
|
"payload": ["device_id", "device"],
|
||||||
"expected_status": [200, 401]
|
"expected_status": [200, 401]
|
||||||
},
|
},
|
||||||
"download_document": {
|
"download_document": {
|
||||||
|
@ -75,7 +75,7 @@ local STATISTICS_ACTIVITY_SINCE_QUERY = [[
|
|||||||
JOIN book AS b
|
JOIN book AS b
|
||||||
ON b.id = psd.id_book
|
ON b.id = psd.id_book
|
||||||
WHERE start_time > %d
|
WHERE start_time > %d
|
||||||
ORDER BY start_time ASC LIMIT 1000;
|
ORDER BY start_time ASC LIMIT 5000;
|
||||||
]]
|
]]
|
||||||
|
|
||||||
local STATISTICS_BOOK_QUERY = [[
|
local STATISTICS_BOOK_QUERY = [[
|
||||||
@ -615,7 +615,8 @@ function SyncNinja:checkActivity(interactive)
|
|||||||
service_spec = self.path .. "/api.json"
|
service_spec = self.path .. "/api.json"
|
||||||
}
|
}
|
||||||
local ok, err = pcall(client.check_activity, client, self.settings.username,
|
local ok, err = pcall(client.check_activity, client, self.settings.username,
|
||||||
self.settings.password, self.device_id, callback_func)
|
self.settings.password, self.device_id, Device.model,
|
||||||
|
callback_func)
|
||||||
end
|
end
|
||||||
|
|
||||||
function SyncNinja:uploadActivity(activity_data, interactive)
|
function SyncNinja:uploadActivity(activity_data, interactive)
|
||||||
@ -907,7 +908,7 @@ function SyncNinja:getStatisticsActivity(timestamp)
|
|||||||
local conn = SQ3.open(statistics_db)
|
local conn = SQ3.open(statistics_db)
|
||||||
local stmt = conn:prepare(string.format(STATISTICS_ACTIVITY_SINCE_QUERY,
|
local stmt = conn:prepare(string.format(STATISTICS_ACTIVITY_SINCE_QUERY,
|
||||||
timestamp))
|
timestamp))
|
||||||
local rows = stmt:resultset("i", 1000)
|
local rows = stmt:resultset("i", 5000)
|
||||||
conn:close()
|
conn:close()
|
||||||
|
|
||||||
-- No Results
|
-- No Results
|
||||||
|
@ -21,6 +21,9 @@ type DBManager struct {
|
|||||||
//go:embed schema.sql
|
//go:embed schema.sql
|
||||||
var ddl string
|
var ddl string
|
||||||
|
|
||||||
|
//go:embed update_temp_tables.sql
|
||||||
|
var tsql string
|
||||||
|
|
||||||
func NewMgr(c *config.Config) *DBManager {
|
func NewMgr(c *config.Config) *DBManager {
|
||||||
// Create Manager
|
// Create Manager
|
||||||
dbm := &DBManager{
|
dbm := &DBManager{
|
||||||
@ -44,22 +47,23 @@ func NewMgr(c *config.Config) *DBManager {
|
|||||||
log.Fatal("Unsupported Database")
|
log.Fatal("Unsupported Database")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create Tables
|
|
||||||
if _, err := dbm.DB.ExecContext(dbm.Ctx, ddl); err != nil {
|
|
||||||
log.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
dbm.Queries = New(dbm.DB)
|
dbm.Queries = New(dbm.DB)
|
||||||
|
|
||||||
return dbm
|
return dbm
|
||||||
}
|
}
|
||||||
|
|
||||||
func connectHookSQLite(conn *sqlite.SQLiteConn) error {
|
func (dbm *DBManager) CacheTempTables() error {
|
||||||
if err := conn.RegisterFunc("test_func", func() string {
|
if _, err := dbm.DB.ExecContext(dbm.Ctx, tsql); err != nil {
|
||||||
return "FOOBAR"
|
|
||||||
}, false); err != nil {
|
|
||||||
log.Info("Error Registering Function")
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func connectHookSQLite(conn *sqlite.SQLiteConn) error {
|
||||||
|
// Create Tables
|
||||||
|
log.Debug("Creating Schema")
|
||||||
|
if _, err := conn.Exec(ddl, nil); err != nil {
|
||||||
|
log.Warn("Create Schema Failure: ", err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
@ -5,25 +5,26 @@
|
|||||||
package database
|
package database
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"database/sql"
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Activity struct {
|
type Activity struct {
|
||||||
ID int64 `json:"id"`
|
|
||||||
UserID string `json:"user_id"`
|
UserID string `json:"user_id"`
|
||||||
DocumentID string `json:"document_id"`
|
DocumentID string `json:"document_id"`
|
||||||
DeviceID string `json:"device_id"`
|
DeviceID string `json:"device_id"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
StartTime time.Time `json:"start_time"`
|
StartTime time.Time `json:"start_time"`
|
||||||
Duration int64 `json:"duration"`
|
|
||||||
Page int64 `json:"page"`
|
Page int64 `json:"page"`
|
||||||
Pages int64 `json:"pages"`
|
Pages int64 `json:"pages"`
|
||||||
CreatedAt time.Time `json:"created_at"`
|
Duration int64 `json:"duration"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type Device struct {
|
type Device struct {
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
UserID string `json:"user_id"`
|
UserID string `json:"user_id"`
|
||||||
DeviceName string `json:"device_name"`
|
DeviceName string `json:"device_name"`
|
||||||
|
LastSynced time.Time `json:"last_synced"`
|
||||||
CreatedAt string `json:"created_at"`
|
CreatedAt string `json:"created_at"`
|
||||||
Sync bool `json:"sync"`
|
Sync bool `json:"sync"`
|
||||||
}
|
}
|
||||||
@ -50,14 +51,6 @@ type Document struct {
|
|||||||
CreatedAt time.Time `json:"created_at"`
|
CreatedAt time.Time `json:"created_at"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type DocumentDeviceSync struct {
|
|
||||||
UserID string `json:"user_id"`
|
|
||||||
DocumentID string `json:"document_id"`
|
|
||||||
DeviceID string `json:"device_id"`
|
|
||||||
LastSynced time.Time `json:"last_synced"`
|
|
||||||
Sync bool `json:"sync"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type DocumentProgress struct {
|
type DocumentProgress struct {
|
||||||
UserID string `json:"user_id"`
|
UserID string `json:"user_id"`
|
||||||
DocumentID string `json:"document_id"`
|
DocumentID string `json:"document_id"`
|
||||||
@ -67,6 +60,19 @@ type DocumentProgress struct {
|
|||||||
CreatedAt time.Time `json:"created_at"`
|
CreatedAt time.Time `json:"created_at"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type DocumentUserStatistic struct {
|
||||||
|
DocumentID string `json:"document_id"`
|
||||||
|
UserID string `json:"user_id"`
|
||||||
|
LastRead string `json:"last_read"`
|
||||||
|
Page int64 `json:"page"`
|
||||||
|
Pages int64 `json:"pages"`
|
||||||
|
TotalTimeSeconds int64 `json:"total_time_seconds"`
|
||||||
|
ReadPages int64 `json:"read_pages"`
|
||||||
|
Percentage float64 `json:"percentage"`
|
||||||
|
WordsRead int64 `json:"words_read"`
|
||||||
|
Wpm float64 `json:"wpm"`
|
||||||
|
}
|
||||||
|
|
||||||
type Metadatum struct {
|
type Metadatum struct {
|
||||||
ID int64 `json:"id"`
|
ID int64 `json:"id"`
|
||||||
DocumentID string `json:"document_id"`
|
DocumentID string `json:"document_id"`
|
||||||
@ -80,14 +86,16 @@ type Metadatum struct {
|
|||||||
CreatedAt time.Time `json:"created_at"`
|
CreatedAt time.Time `json:"created_at"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type RescaledActivity struct {
|
type RawActivity struct {
|
||||||
|
ID int64 `json:"id"`
|
||||||
|
UserID string `json:"user_id"`
|
||||||
DocumentID string `json:"document_id"`
|
DocumentID string `json:"document_id"`
|
||||||
DeviceID string `json:"device_id"`
|
DeviceID string `json:"device_id"`
|
||||||
UserID string `json:"user_id"`
|
|
||||||
StartTime time.Time `json:"start_time"`
|
StartTime time.Time `json:"start_time"`
|
||||||
Pages int64 `json:"pages"`
|
|
||||||
Page int64 `json:"page"`
|
Page int64 `json:"page"`
|
||||||
|
Pages int64 `json:"pages"`
|
||||||
Duration int64 `json:"duration"`
|
Duration int64 `json:"duration"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type User struct {
|
type User struct {
|
||||||
@ -97,3 +105,49 @@ type User struct {
|
|||||||
TimeOffset *string `json:"time_offset"`
|
TimeOffset *string `json:"time_offset"`
|
||||||
CreatedAt time.Time `json:"created_at"`
|
CreatedAt time.Time `json:"created_at"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type UserStreak struct {
|
||||||
|
UserID string `json:"user_id"`
|
||||||
|
Window string `json:"window"`
|
||||||
|
MaxStreak int64 `json:"max_streak"`
|
||||||
|
MaxStreakStartDate string `json:"max_streak_start_date"`
|
||||||
|
MaxStreakEndDate string `json:"max_streak_end_date"`
|
||||||
|
CurrentStreak int64 `json:"current_streak"`
|
||||||
|
CurrentStreakStartDate string `json:"current_streak_start_date"`
|
||||||
|
CurrentStreakEndDate string `json:"current_streak_end_date"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ViewDocumentUserStatistic struct {
|
||||||
|
DocumentID string `json:"document_id"`
|
||||||
|
UserID string `json:"user_id"`
|
||||||
|
LastRead time.Time `json:"last_read"`
|
||||||
|
Page int64 `json:"page"`
|
||||||
|
Pages int64 `json:"pages"`
|
||||||
|
TotalTimeSeconds sql.NullFloat64 `json:"total_time_seconds"`
|
||||||
|
ReadPages int64 `json:"read_pages"`
|
||||||
|
Percentage float64 `json:"percentage"`
|
||||||
|
WordsRead interface{} `json:"words_read"`
|
||||||
|
Wpm int64 `json:"wpm"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ViewRescaledActivity struct {
|
||||||
|
UserID string `json:"user_id"`
|
||||||
|
DocumentID string `json:"document_id"`
|
||||||
|
DeviceID string `json:"device_id"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
StartTime time.Time `json:"start_time"`
|
||||||
|
Page int64 `json:"page"`
|
||||||
|
Pages int64 `json:"pages"`
|
||||||
|
Duration int64 `json:"duration"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ViewUserStreak struct {
|
||||||
|
UserID string `json:"user_id"`
|
||||||
|
Window string `json:"window"`
|
||||||
|
MaxStreak interface{} `json:"max_streak"`
|
||||||
|
MaxStreakStartDate interface{} `json:"max_streak_start_date"`
|
||||||
|
MaxStreakEndDate interface{} `json:"max_streak_end_date"`
|
||||||
|
CurrentStreak interface{} `json:"current_streak"`
|
||||||
|
CurrentStreakStartDate interface{} `json:"current_streak_start_date"`
|
||||||
|
CurrentStreakEndDate interface{} `json:"current_streak_end_date"`
|
||||||
|
}
|
||||||
|
@ -1,3 +1,16 @@
|
|||||||
|
-- name: AddActivity :one
|
||||||
|
INSERT INTO raw_activity (
|
||||||
|
user_id,
|
||||||
|
document_id,
|
||||||
|
device_id,
|
||||||
|
start_time,
|
||||||
|
duration,
|
||||||
|
page,
|
||||||
|
pages
|
||||||
|
)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||||
|
RETURNING *;
|
||||||
|
|
||||||
-- name: AddMetadata :one
|
-- name: AddMetadata :one
|
||||||
INSERT INTO metadata (
|
INSERT INTO metadata (
|
||||||
document_id,
|
document_id,
|
||||||
@ -17,10 +30,343 @@ INSERT INTO users (id, pass)
|
|||||||
VALUES (?, ?)
|
VALUES (?, ?)
|
||||||
ON CONFLICT DO NOTHING;
|
ON CONFLICT DO NOTHING;
|
||||||
|
|
||||||
|
-- name: DeleteDocument :execrows
|
||||||
|
UPDATE documents
|
||||||
|
SET
|
||||||
|
deleted = 1
|
||||||
|
WHERE id = $id;
|
||||||
|
|
||||||
|
-- name: GetActivity :many
|
||||||
|
WITH filtered_activity AS (
|
||||||
|
SELECT
|
||||||
|
document_id,
|
||||||
|
user_id,
|
||||||
|
start_time,
|
||||||
|
duration,
|
||||||
|
page,
|
||||||
|
pages
|
||||||
|
FROM activity
|
||||||
|
WHERE
|
||||||
|
activity.user_id = $user_id
|
||||||
|
AND (
|
||||||
|
(
|
||||||
|
CAST($doc_filter AS BOOLEAN) = TRUE
|
||||||
|
AND document_id = $document_id
|
||||||
|
) OR $doc_filter = FALSE
|
||||||
|
)
|
||||||
|
ORDER BY start_time DESC
|
||||||
|
LIMIT $limit
|
||||||
|
OFFSET $offset
|
||||||
|
)
|
||||||
|
|
||||||
|
SELECT
|
||||||
|
document_id,
|
||||||
|
CAST(DATETIME(activity.start_time, users.time_offset) AS TEXT) AS start_time,
|
||||||
|
title,
|
||||||
|
author,
|
||||||
|
duration,
|
||||||
|
page,
|
||||||
|
pages
|
||||||
|
FROM filtered_activity AS activity
|
||||||
|
LEFT JOIN documents ON documents.id = activity.document_id
|
||||||
|
LEFT JOIN users ON users.id = activity.user_id;
|
||||||
|
|
||||||
|
-- name: GetDailyReadStats :many
|
||||||
|
WITH RECURSIVE last_30_days AS (
|
||||||
|
SELECT DATE('now', time_offset) AS date
|
||||||
|
FROM users WHERE users.id = $user_id
|
||||||
|
UNION ALL
|
||||||
|
SELECT DATE(date, '-1 days')
|
||||||
|
FROM last_30_days
|
||||||
|
LIMIT 30
|
||||||
|
),
|
||||||
|
filtered_activity AS (
|
||||||
|
SELECT
|
||||||
|
user_id,
|
||||||
|
start_time,
|
||||||
|
duration
|
||||||
|
FROM activity
|
||||||
|
WHERE start_time > DATE('now', '-31 days')
|
||||||
|
AND activity.user_id = $user_id
|
||||||
|
),
|
||||||
|
activity_days AS (
|
||||||
|
SELECT
|
||||||
|
SUM(duration) AS seconds_read,
|
||||||
|
DATE(start_time, time_offset) AS day
|
||||||
|
FROM filtered_activity AS activity
|
||||||
|
LEFT JOIN users ON users.id = activity.user_id
|
||||||
|
GROUP BY day
|
||||||
|
LIMIT 30
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
CAST(date AS TEXT),
|
||||||
|
CAST(CASE
|
||||||
|
WHEN seconds_read IS NULL THEN 0
|
||||||
|
ELSE seconds_read / 60
|
||||||
|
END AS INTEGER) AS minutes_read
|
||||||
|
FROM last_30_days
|
||||||
|
LEFT JOIN activity_days ON activity_days.day == last_30_days.date
|
||||||
|
ORDER BY date DESC
|
||||||
|
LIMIT 30;
|
||||||
|
|
||||||
|
-- name: GetDatabaseInfo :one
|
||||||
|
SELECT
|
||||||
|
(SELECT COUNT(rowid) FROM activity WHERE activity.user_id = $user_id) AS activity_size,
|
||||||
|
(SELECT COUNT(rowid) FROM documents) AS documents_size,
|
||||||
|
(SELECT COUNT(rowid) FROM document_progress WHERE document_progress.user_id = $user_id) AS progress_size,
|
||||||
|
(SELECT COUNT(rowid) FROM devices WHERE devices.user_id = $user_id) AS devices_size
|
||||||
|
LIMIT 1;
|
||||||
|
|
||||||
|
-- name: GetDeletedDocuments :many
|
||||||
|
SELECT documents.id
|
||||||
|
FROM documents
|
||||||
|
WHERE
|
||||||
|
documents.deleted = true
|
||||||
|
AND documents.id IN (sqlc.slice('document_ids'));
|
||||||
|
|
||||||
|
-- name: GetDevice :one
|
||||||
|
SELECT * FROM devices
|
||||||
|
WHERE id = $device_id LIMIT 1;
|
||||||
|
|
||||||
|
-- name: GetDevices :many
|
||||||
|
SELECT
|
||||||
|
devices.device_name,
|
||||||
|
CAST(DATETIME(devices.created_at, users.time_offset) AS TEXT) AS created_at,
|
||||||
|
CAST(DATETIME(devices.last_synced, users.time_offset) AS TEXT) AS last_synced
|
||||||
|
FROM devices
|
||||||
|
JOIN users ON users.id = devices.user_id
|
||||||
|
WHERE users.id = $user_id;
|
||||||
|
|
||||||
|
-- name: GetDocument :one
|
||||||
|
SELECT * FROM documents
|
||||||
|
WHERE id = $document_id LIMIT 1;
|
||||||
|
|
||||||
|
-- name: GetDocumentDaysRead :one
|
||||||
|
WITH document_days AS (
|
||||||
|
SELECT DATE(start_time, time_offset) AS dates
|
||||||
|
FROM activity
|
||||||
|
JOIN users ON users.id = activity.user_id
|
||||||
|
WHERE document_id = $document_id
|
||||||
|
AND user_id = $user_id
|
||||||
|
GROUP BY dates
|
||||||
|
)
|
||||||
|
SELECT CAST(COUNT(*) AS INTEGER) AS days_read
|
||||||
|
FROM document_days;
|
||||||
|
|
||||||
|
-- name: GetDocumentReadStats :one
|
||||||
|
SELECT
|
||||||
|
COUNT(DISTINCT page) AS pages_read,
|
||||||
|
SUM(duration) AS total_time
|
||||||
|
FROM activity
|
||||||
|
WHERE document_id = $document_id
|
||||||
|
AND user_id = $user_id
|
||||||
|
AND start_time >= $start_time;
|
||||||
|
|
||||||
|
-- name: GetDocumentReadStatsCapped :one
|
||||||
|
WITH capped_stats AS (
|
||||||
|
SELECT MIN(SUM(duration), CAST($page_duration_cap AS INTEGER)) AS durations
|
||||||
|
FROM activity
|
||||||
|
WHERE document_id = $document_id
|
||||||
|
AND user_id = $user_id
|
||||||
|
AND start_time >= $start_time
|
||||||
|
GROUP BY page
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
CAST(COUNT(*) AS INTEGER) AS pages_read,
|
||||||
|
CAST(SUM(durations) AS INTEGER) AS total_time
|
||||||
|
FROM capped_stats;
|
||||||
|
|
||||||
|
-- name: GetDocumentWithStats :one
|
||||||
|
SELECT
|
||||||
|
docs.id,
|
||||||
|
docs.title,
|
||||||
|
docs.author,
|
||||||
|
docs.description,
|
||||||
|
docs.isbn10,
|
||||||
|
docs.isbn13,
|
||||||
|
docs.filepath,
|
||||||
|
docs.words,
|
||||||
|
|
||||||
|
CAST(COALESCE(dus.wpm, 0.0) AS INTEGER) AS wpm,
|
||||||
|
COALESCE(dus.page, 0) AS page,
|
||||||
|
COALESCE(dus.pages, 0) AS pages,
|
||||||
|
COALESCE(dus.read_pages, 0) AS read_pages,
|
||||||
|
COALESCE(dus.total_time_seconds, 0) AS total_time_seconds,
|
||||||
|
DATETIME(COALESCE(dus.last_read, "1970-01-01"), users.time_offset)
|
||||||
|
AS last_read,
|
||||||
|
CASE
|
||||||
|
WHEN dus.percentage > 97.0 THEN 100.0
|
||||||
|
WHEN dus.percentage IS NULL THEN 0.0
|
||||||
|
ELSE dus.percentage
|
||||||
|
END AS percentage,
|
||||||
|
CAST(CASE
|
||||||
|
WHEN dus.total_time_seconds IS NULL THEN 0.0
|
||||||
|
ELSE
|
||||||
|
CAST(dus.total_time_seconds AS REAL)
|
||||||
|
/ CAST(dus.read_pages AS REAL)
|
||||||
|
END AS INTEGER) AS seconds_per_page
|
||||||
|
FROM documents AS docs
|
||||||
|
LEFT JOIN users ON users.id = $user_id
|
||||||
|
LEFT JOIN
|
||||||
|
document_user_statistics AS dus
|
||||||
|
ON dus.document_id = docs.id AND dus.user_id = $user_id
|
||||||
|
WHERE users.id = $user_id
|
||||||
|
AND docs.id = $document_id
|
||||||
|
LIMIT 1;
|
||||||
|
|
||||||
|
-- name: GetDocuments :many
|
||||||
|
SELECT * FROM documents
|
||||||
|
ORDER BY created_at DESC
|
||||||
|
LIMIT $limit
|
||||||
|
OFFSET $offset;
|
||||||
|
|
||||||
|
-- name: GetDocumentsWithStats :many
|
||||||
|
SELECT
|
||||||
|
docs.id,
|
||||||
|
docs.title,
|
||||||
|
docs.author,
|
||||||
|
docs.description,
|
||||||
|
docs.isbn10,
|
||||||
|
docs.isbn13,
|
||||||
|
docs.filepath,
|
||||||
|
docs.words,
|
||||||
|
|
||||||
|
CAST(COALESCE(dus.wpm, 0.0) AS INTEGER) AS wpm,
|
||||||
|
COALESCE(dus.page, 0) AS page,
|
||||||
|
COALESCE(dus.pages, 0) AS pages,
|
||||||
|
COALESCE(dus.read_pages, 0) AS read_pages,
|
||||||
|
COALESCE(dus.total_time_seconds, 0) AS total_time_seconds,
|
||||||
|
DATETIME(COALESCE(dus.last_read, "1970-01-01"), users.time_offset)
|
||||||
|
AS last_read,
|
||||||
|
CASE
|
||||||
|
WHEN dus.percentage > 97.0 THEN 100.0
|
||||||
|
WHEN dus.percentage IS NULL THEN 0.0
|
||||||
|
ELSE dus.percentage
|
||||||
|
END AS percentage,
|
||||||
|
CASE
|
||||||
|
WHEN dus.total_time_seconds IS NULL THEN 0.0
|
||||||
|
ELSE
|
||||||
|
ROUND(
|
||||||
|
CAST(dus.total_time_seconds AS REAL)
|
||||||
|
/ CAST(dus.read_pages AS REAL)
|
||||||
|
)
|
||||||
|
END AS seconds_per_page
|
||||||
|
FROM documents AS docs
|
||||||
|
LEFT JOIN users ON users.id = $user_id
|
||||||
|
LEFT JOIN
|
||||||
|
document_user_statistics AS dus
|
||||||
|
ON dus.document_id = docs.id AND dus.user_id = $user_id
|
||||||
|
WHERE docs.deleted = false
|
||||||
|
ORDER BY dus.last_read DESC, docs.created_at DESC
|
||||||
|
LIMIT $limit
|
||||||
|
OFFSET $offset;
|
||||||
|
|
||||||
|
-- name: GetLastActivity :one
|
||||||
|
SELECT start_time
|
||||||
|
FROM activity
|
||||||
|
WHERE device_id = $device_id
|
||||||
|
AND user_id = $user_id
|
||||||
|
ORDER BY start_time DESC LIMIT 1;
|
||||||
|
|
||||||
|
-- name: GetMissingDocuments :many
|
||||||
|
SELECT documents.* FROM documents
|
||||||
|
WHERE
|
||||||
|
documents.filepath IS NOT NULL
|
||||||
|
AND documents.deleted = false
|
||||||
|
AND documents.id NOT IN (sqlc.slice('document_ids'));
|
||||||
|
|
||||||
|
-- name: GetProgress :one
|
||||||
|
SELECT
|
||||||
|
document_progress.*,
|
||||||
|
devices.device_name
|
||||||
|
FROM document_progress
|
||||||
|
JOIN devices ON document_progress.device_id = devices.id
|
||||||
|
WHERE
|
||||||
|
document_progress.user_id = $user_id
|
||||||
|
AND document_progress.document_id = $document_id
|
||||||
|
ORDER BY
|
||||||
|
document_progress.created_at
|
||||||
|
DESC
|
||||||
|
LIMIT 1;
|
||||||
|
|
||||||
-- name: GetUser :one
|
-- name: GetUser :one
|
||||||
SELECT * FROM users
|
SELECT * FROM users
|
||||||
WHERE id = $user_id LIMIT 1;
|
WHERE id = $user_id LIMIT 1;
|
||||||
|
|
||||||
|
-- name: GetUserStreaks :many
|
||||||
|
SELECT * FROM user_streaks
|
||||||
|
WHERE user_id = $user_id;
|
||||||
|
|
||||||
|
-- name: GetUsers :many
|
||||||
|
SELECT * FROM users
|
||||||
|
WHERE
|
||||||
|
users.id = $user
|
||||||
|
OR ?1 IN (
|
||||||
|
SELECT id
|
||||||
|
FROM users
|
||||||
|
WHERE id = $user
|
||||||
|
AND admin = 1
|
||||||
|
)
|
||||||
|
ORDER BY created_at DESC
|
||||||
|
LIMIT $limit
|
||||||
|
OFFSET $offset;
|
||||||
|
|
||||||
|
-- name: GetWPMLeaderboard :many
|
||||||
|
SELECT
|
||||||
|
user_id,
|
||||||
|
CAST(SUM(words_read) AS INTEGER) AS total_words_read,
|
||||||
|
CAST(SUM(total_time_seconds) AS INTEGER) AS total_seconds,
|
||||||
|
ROUND(CAST(SUM(words_read) AS REAL) / (SUM(total_time_seconds) / 60.0), 2)
|
||||||
|
AS wpm
|
||||||
|
FROM document_user_statistics
|
||||||
|
WHERE words_read > 0
|
||||||
|
GROUP BY user_id
|
||||||
|
ORDER BY wpm DESC;
|
||||||
|
|
||||||
|
-- name: GetWantedDocuments :many
|
||||||
|
SELECT
|
||||||
|
CAST(value AS TEXT) AS id,
|
||||||
|
CAST((documents.filepath IS NULL) AS BOOLEAN) AS want_file,
|
||||||
|
CAST((IFNULL(documents.synced, false) != true) AS BOOLEAN) AS want_metadata
|
||||||
|
FROM json_each(?1)
|
||||||
|
LEFT JOIN documents
|
||||||
|
ON value = documents.id
|
||||||
|
WHERE (
|
||||||
|
documents.id IS NOT NULL
|
||||||
|
AND documents.deleted = false
|
||||||
|
AND (
|
||||||
|
documents.synced = false
|
||||||
|
OR documents.filepath IS NULL
|
||||||
|
)
|
||||||
|
)
|
||||||
|
OR (documents.id IS NULL)
|
||||||
|
OR CAST($document_ids AS TEXT) != CAST($document_ids AS TEXT);
|
||||||
|
|
||||||
|
-- name: UpdateDocumentDeleted :one
|
||||||
|
UPDATE documents
|
||||||
|
SET
|
||||||
|
deleted = $deleted
|
||||||
|
WHERE id = $id
|
||||||
|
RETURNING *;
|
||||||
|
|
||||||
|
-- name: UpdateDocumentSync :one
|
||||||
|
UPDATE documents
|
||||||
|
SET
|
||||||
|
synced = $synced
|
||||||
|
WHERE id = $id
|
||||||
|
RETURNING *;
|
||||||
|
|
||||||
|
-- name: UpdateProgress :one
|
||||||
|
INSERT OR REPLACE INTO document_progress (
|
||||||
|
user_id,
|
||||||
|
document_id,
|
||||||
|
device_id,
|
||||||
|
percentage,
|
||||||
|
progress
|
||||||
|
)
|
||||||
|
VALUES (?, ?, ?, ?, ?)
|
||||||
|
RETURNING *;
|
||||||
|
|
||||||
-- name: UpdateUser :one
|
-- name: UpdateUser :one
|
||||||
UPDATE users
|
UPDATE users
|
||||||
SET
|
SET
|
||||||
@ -29,6 +375,15 @@ SET
|
|||||||
WHERE id = $user_id
|
WHERE id = $user_id
|
||||||
RETURNING *;
|
RETURNING *;
|
||||||
|
|
||||||
|
-- name: UpsertDevice :one
|
||||||
|
INSERT INTO devices (id, user_id, last_synced, device_name)
|
||||||
|
VALUES (?, ?, ?, ?)
|
||||||
|
ON CONFLICT DO UPDATE
|
||||||
|
SET
|
||||||
|
device_name = COALESCE(excluded.device_name, device_name),
|
||||||
|
last_synced = COALESCE(excluded.last_synced, last_synced)
|
||||||
|
RETURNING *;
|
||||||
|
|
||||||
-- name: UpsertDocument :one
|
-- name: UpsertDocument :one
|
||||||
INSERT INTO documents (
|
INSERT INTO documents (
|
||||||
id,
|
id,
|
||||||
@ -65,406 +420,3 @@ SET
|
|||||||
isbn10 = COALESCE(excluded.isbn10, isbn10),
|
isbn10 = COALESCE(excluded.isbn10, isbn10),
|
||||||
isbn13 = COALESCE(excluded.isbn13, isbn13)
|
isbn13 = COALESCE(excluded.isbn13, isbn13)
|
||||||
RETURNING *;
|
RETURNING *;
|
||||||
|
|
||||||
-- name: DeleteDocument :execrows
|
|
||||||
UPDATE documents
|
|
||||||
SET
|
|
||||||
deleted = 1
|
|
||||||
WHERE id = $id;
|
|
||||||
|
|
||||||
-- name: UpdateDocumentSync :one
|
|
||||||
UPDATE documents
|
|
||||||
SET
|
|
||||||
synced = $synced
|
|
||||||
WHERE id = $id
|
|
||||||
RETURNING *;
|
|
||||||
|
|
||||||
-- name: UpdateDocumentDeleted :one
|
|
||||||
UPDATE documents
|
|
||||||
SET
|
|
||||||
deleted = $deleted
|
|
||||||
WHERE id = $id
|
|
||||||
RETURNING *;
|
|
||||||
|
|
||||||
-- name: GetDocument :one
|
|
||||||
SELECT * FROM documents
|
|
||||||
WHERE id = $document_id LIMIT 1;
|
|
||||||
|
|
||||||
-- name: UpsertDevice :one
|
|
||||||
INSERT INTO devices (id, user_id, device_name)
|
|
||||||
VALUES (?, ?, ?)
|
|
||||||
ON CONFLICT DO UPDATE
|
|
||||||
SET
|
|
||||||
device_name = COALESCE(excluded.device_name, device_name)
|
|
||||||
RETURNING *;
|
|
||||||
|
|
||||||
-- name: GetDevice :one
|
|
||||||
SELECT * FROM devices
|
|
||||||
WHERE id = $device_id LIMIT 1;
|
|
||||||
|
|
||||||
-- name: UpdateProgress :one
|
|
||||||
INSERT OR REPLACE INTO document_progress (
|
|
||||||
user_id,
|
|
||||||
document_id,
|
|
||||||
device_id,
|
|
||||||
percentage,
|
|
||||||
progress
|
|
||||||
)
|
|
||||||
VALUES (?, ?, ?, ?, ?)
|
|
||||||
RETURNING *;
|
|
||||||
|
|
||||||
-- name: GetProgress :one
|
|
||||||
SELECT
|
|
||||||
document_progress.*,
|
|
||||||
devices.device_name
|
|
||||||
FROM document_progress
|
|
||||||
JOIN devices ON document_progress.device_id = devices.id
|
|
||||||
WHERE
|
|
||||||
document_progress.user_id = $user_id
|
|
||||||
AND document_progress.document_id = $document_id
|
|
||||||
ORDER BY
|
|
||||||
document_progress.created_at
|
|
||||||
DESC
|
|
||||||
LIMIT 1;
|
|
||||||
|
|
||||||
-- name: GetLastActivity :one
|
|
||||||
SELECT start_time
|
|
||||||
FROM activity
|
|
||||||
WHERE device_id = $device_id
|
|
||||||
AND user_id = $user_id
|
|
||||||
ORDER BY start_time DESC LIMIT 1;
|
|
||||||
|
|
||||||
-- name: AddActivity :one
|
|
||||||
INSERT INTO activity (
|
|
||||||
user_id,
|
|
||||||
document_id,
|
|
||||||
device_id,
|
|
||||||
start_time,
|
|
||||||
duration,
|
|
||||||
page,
|
|
||||||
pages
|
|
||||||
)
|
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
||||||
RETURNING *;
|
|
||||||
|
|
||||||
-- name: GetMissingDocuments :many
|
|
||||||
SELECT documents.* FROM documents
|
|
||||||
WHERE
|
|
||||||
documents.filepath IS NOT NULL
|
|
||||||
AND documents.deleted = false
|
|
||||||
AND documents.id NOT IN (sqlc.slice('document_ids'));
|
|
||||||
|
|
||||||
-- name: GetWantedDocuments :many
|
|
||||||
SELECT
|
|
||||||
CAST(value AS TEXT) AS id,
|
|
||||||
CAST((documents.filepath IS NULL) AS BOOLEAN) AS want_file,
|
|
||||||
CAST((IFNULL(documents.synced, false) != true) AS BOOLEAN) AS want_metadata
|
|
||||||
FROM json_each(?1)
|
|
||||||
LEFT JOIN documents
|
|
||||||
ON value = documents.id
|
|
||||||
WHERE (
|
|
||||||
documents.id IS NOT NULL
|
|
||||||
AND documents.deleted = false
|
|
||||||
AND (
|
|
||||||
documents.synced = false
|
|
||||||
OR documents.filepath IS NULL
|
|
||||||
)
|
|
||||||
)
|
|
||||||
OR (documents.id IS NULL)
|
|
||||||
OR CAST($document_ids AS TEXT) != CAST($document_ids AS TEXT);
|
|
||||||
|
|
||||||
-- name: GetDeletedDocuments :many
|
|
||||||
SELECT documents.id
|
|
||||||
FROM documents
|
|
||||||
WHERE
|
|
||||||
documents.deleted = true
|
|
||||||
AND documents.id IN (sqlc.slice('document_ids'));
|
|
||||||
|
|
||||||
-- name: GetDocuments :many
|
|
||||||
SELECT * FROM documents
|
|
||||||
ORDER BY created_at DESC
|
|
||||||
LIMIT $limit
|
|
||||||
OFFSET $offset;
|
|
||||||
|
|
||||||
-- name: GetDocumentWithStats :one
|
|
||||||
WITH true_progress AS (
|
|
||||||
SELECT
|
|
||||||
start_time AS last_read,
|
|
||||||
SUM(duration) AS total_time_seconds,
|
|
||||||
document_id,
|
|
||||||
page,
|
|
||||||
pages,
|
|
||||||
|
|
||||||
-- Determine Read Pages
|
|
||||||
COUNT(DISTINCT page) AS read_pages,
|
|
||||||
|
|
||||||
-- Derive Percentage of Book
|
|
||||||
ROUND(CAST(page AS REAL) / CAST(pages AS REAL) * 100, 2) AS percentage
|
|
||||||
FROM rescaled_activity
|
|
||||||
WHERE user_id = $user_id
|
|
||||||
AND document_id = $document_id
|
|
||||||
GROUP BY document_id
|
|
||||||
HAVING MAX(start_time)
|
|
||||||
LIMIT 1
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
documents.*,
|
|
||||||
|
|
||||||
CAST(IFNULL(page, 0) AS INTEGER) AS page,
|
|
||||||
CAST(IFNULL(pages, 0) AS INTEGER) AS pages,
|
|
||||||
CAST(IFNULL(total_time_seconds, 0) AS INTEGER) AS total_time_seconds,
|
|
||||||
CAST(DATETIME(IFNULL(last_read, "1970-01-01"), time_offset) AS TEXT) AS last_read,
|
|
||||||
CAST(IFNULL(read_pages, 0) AS INTEGER) AS read_pages,
|
|
||||||
|
|
||||||
-- Calculate Seconds / Page
|
|
||||||
-- 1. Calculate Total Time in Seconds (Sum Duration in Activity)
|
|
||||||
-- 2. Divide by Read Pages (Distinct Pages in Activity)
|
|
||||||
CAST(CASE
|
|
||||||
WHEN total_time_seconds IS NULL THEN 0.0
|
|
||||||
ELSE ROUND(CAST(total_time_seconds AS REAL) / CAST(read_pages AS REAL))
|
|
||||||
END AS INTEGER) AS seconds_per_page,
|
|
||||||
|
|
||||||
-- Arbitrarily >97% is Complete
|
|
||||||
CAST(CASE
|
|
||||||
WHEN percentage > 97.0 THEN 100.0
|
|
||||||
WHEN percentage IS NULL THEN 0.0
|
|
||||||
ELSE percentage
|
|
||||||
END AS REAL) AS percentage
|
|
||||||
|
|
||||||
FROM documents
|
|
||||||
LEFT JOIN true_progress ON true_progress.document_id = documents.id
|
|
||||||
LEFT JOIN users ON users.id = $user_id
|
|
||||||
WHERE documents.id = $document_id
|
|
||||||
ORDER BY true_progress.last_read DESC, documents.created_at DESC
|
|
||||||
LIMIT 1;
|
|
||||||
|
|
||||||
-- name: GetDocumentsWithStats :many
|
|
||||||
WITH true_progress AS (
|
|
||||||
SELECT
|
|
||||||
start_time AS last_read,
|
|
||||||
SUM(duration) AS total_time_seconds,
|
|
||||||
document_id,
|
|
||||||
page,
|
|
||||||
pages,
|
|
||||||
ROUND(CAST(page AS REAL) / CAST(pages AS REAL) * 100, 2) AS percentage
|
|
||||||
FROM activity
|
|
||||||
WHERE user_id = $user_id
|
|
||||||
GROUP BY document_id
|
|
||||||
HAVING MAX(start_time)
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
documents.*,
|
|
||||||
|
|
||||||
CAST(IFNULL(page, 0) AS INTEGER) AS page,
|
|
||||||
CAST(IFNULL(pages, 0) AS INTEGER) AS pages,
|
|
||||||
CAST(IFNULL(total_time_seconds, 0) AS INTEGER) AS total_time_seconds,
|
|
||||||
CAST(DATETIME(IFNULL(last_read, "1970-01-01"), time_offset) AS TEXT) AS last_read,
|
|
||||||
|
|
||||||
CAST(CASE
|
|
||||||
WHEN percentage > 97.0 THEN 100.0
|
|
||||||
WHEN percentage IS NULL THEN 0.0
|
|
||||||
ELSE percentage
|
|
||||||
END AS REAL) AS percentage
|
|
||||||
|
|
||||||
FROM documents
|
|
||||||
LEFT JOIN true_progress ON true_progress.document_id = documents.id
|
|
||||||
LEFT JOIN users ON users.id = $user_id
|
|
||||||
WHERE documents.deleted == false
|
|
||||||
ORDER BY true_progress.last_read DESC, documents.created_at DESC
|
|
||||||
LIMIT $limit
|
|
||||||
OFFSET $offset;
|
|
||||||
|
|
||||||
-- name: GetUsers :many
|
|
||||||
SELECT * FROM users
|
|
||||||
WHERE
|
|
||||||
users.id = $user
|
|
||||||
OR ?1 IN (
|
|
||||||
SELECT id
|
|
||||||
FROM users
|
|
||||||
WHERE id = $user
|
|
||||||
AND admin = 1
|
|
||||||
)
|
|
||||||
ORDER BY created_at DESC
|
|
||||||
LIMIT $limit
|
|
||||||
OFFSET $offset;
|
|
||||||
|
|
||||||
-- name: GetActivity :many
|
|
||||||
SELECT
|
|
||||||
document_id,
|
|
||||||
CAST(DATETIME(activity.start_time, time_offset) AS TEXT) AS start_time,
|
|
||||||
title,
|
|
||||||
author,
|
|
||||||
duration,
|
|
||||||
page,
|
|
||||||
pages
|
|
||||||
FROM activity
|
|
||||||
LEFT JOIN documents ON documents.id = activity.document_id
|
|
||||||
LEFT JOIN users ON users.id = activity.user_id
|
|
||||||
WHERE
|
|
||||||
activity.user_id = $user_id
|
|
||||||
AND (
|
|
||||||
CAST($doc_filter AS BOOLEAN) = TRUE
|
|
||||||
AND document_id = $document_id
|
|
||||||
)
|
|
||||||
OR $doc_filter = FALSE
|
|
||||||
ORDER BY start_time DESC
|
|
||||||
LIMIT $limit
|
|
||||||
OFFSET $offset;
|
|
||||||
|
|
||||||
-- name: GetDevices :many
|
|
||||||
SELECT
|
|
||||||
devices.device_name,
|
|
||||||
CAST(DATETIME(devices.created_at, users.time_offset) AS TEXT) AS created_at,
|
|
||||||
CAST(DATETIME(MAX(activity.created_at), users.time_offset) AS TEXT) AS last_sync
|
|
||||||
FROM activity
|
|
||||||
JOIN devices ON devices.id = activity.device_id
|
|
||||||
JOIN users ON users.id = $user_id
|
|
||||||
WHERE devices.user_id = $user_id
|
|
||||||
GROUP BY activity.device_id;
|
|
||||||
|
|
||||||
-- name: GetDocumentReadStats :one
|
|
||||||
SELECT
|
|
||||||
COUNT(DISTINCT page) AS pages_read,
|
|
||||||
SUM(duration) AS total_time
|
|
||||||
FROM rescaled_activity
|
|
||||||
WHERE document_id = $document_id
|
|
||||||
AND user_id = $user_id
|
|
||||||
AND start_time >= $start_time;
|
|
||||||
|
|
||||||
-- name: GetDocumentReadStatsCapped :one
|
|
||||||
WITH capped_stats AS (
|
|
||||||
SELECT MIN(SUM(duration), CAST($page_duration_cap AS INTEGER)) AS durations
|
|
||||||
FROM rescaled_activity
|
|
||||||
WHERE document_id = $document_id
|
|
||||||
AND user_id = $user_id
|
|
||||||
AND start_time >= $start_time
|
|
||||||
GROUP BY page
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
CAST(COUNT(*) AS INTEGER) AS pages_read,
|
|
||||||
CAST(SUM(durations) AS INTEGER) AS total_time
|
|
||||||
FROM capped_stats;
|
|
||||||
|
|
||||||
-- name: GetDocumentDaysRead :one
|
|
||||||
WITH document_days AS (
|
|
||||||
SELECT DATE(start_time, time_offset) AS dates
|
|
||||||
FROM activity
|
|
||||||
JOIN users ON users.id = activity.user_id
|
|
||||||
WHERE document_id = $document_id
|
|
||||||
AND user_id = $user_id
|
|
||||||
GROUP BY dates
|
|
||||||
)
|
|
||||||
SELECT CAST(COUNT(*) AS INTEGER) AS days_read
|
|
||||||
FROM document_days;
|
|
||||||
|
|
||||||
-- name: GetUserWindowStreaks :one
|
|
||||||
WITH document_windows AS (
|
|
||||||
SELECT
|
|
||||||
CASE
|
|
||||||
WHEN ?2 = "WEEK" THEN DATE(start_time, time_offset, 'weekday 0', '-7 day')
|
|
||||||
WHEN ?2 = "DAY" THEN DATE(start_time, time_offset)
|
|
||||||
END AS read_window,
|
|
||||||
time_offset
|
|
||||||
FROM activity
|
|
||||||
JOIN users ON users.id = activity.user_id
|
|
||||||
WHERE user_id = $user_id
|
|
||||||
AND CAST($window AS TEXT) = CAST($window AS TEXT)
|
|
||||||
GROUP BY read_window
|
|
||||||
),
|
|
||||||
partitions AS (
|
|
||||||
SELECT
|
|
||||||
document_windows.*,
|
|
||||||
row_number() OVER (
|
|
||||||
PARTITION BY 1 ORDER BY read_window DESC
|
|
||||||
) AS seqnum
|
|
||||||
FROM document_windows
|
|
||||||
),
|
|
||||||
streaks AS (
|
|
||||||
SELECT
|
|
||||||
COUNT(*) AS streak,
|
|
||||||
MIN(read_window) AS start_date,
|
|
||||||
MAX(read_window) AS end_date,
|
|
||||||
time_offset
|
|
||||||
FROM partitions
|
|
||||||
GROUP BY
|
|
||||||
CASE
|
|
||||||
WHEN ?2 = "DAY" THEN DATE(read_window, '+' || seqnum || ' day')
|
|
||||||
WHEN ?2 = "WEEK" THEN DATE(read_window, '+' || (seqnum * 7) || ' day')
|
|
||||||
END,
|
|
||||||
time_offset
|
|
||||||
ORDER BY end_date DESC
|
|
||||||
),
|
|
||||||
max_streak AS (
|
|
||||||
SELECT
|
|
||||||
MAX(streak) AS max_streak,
|
|
||||||
start_date AS max_streak_start_date,
|
|
||||||
end_date AS max_streak_end_date
|
|
||||||
FROM streaks
|
|
||||||
LIMIT 1
|
|
||||||
),
|
|
||||||
current_streak AS (
|
|
||||||
SELECT
|
|
||||||
streak AS current_streak,
|
|
||||||
start_date AS current_streak_start_date,
|
|
||||||
end_date AS current_streak_end_date
|
|
||||||
FROM streaks
|
|
||||||
WHERE CASE
|
|
||||||
WHEN ?2 = "WEEK" THEN
|
|
||||||
DATE('now', time_offset, 'weekday 0', '-14 day') = current_streak_end_date
|
|
||||||
OR DATE('now', time_offset, 'weekday 0', '-7 day') = current_streak_end_date
|
|
||||||
WHEN ?2 = "DAY" THEN
|
|
||||||
DATE('now', time_offset, '-1 day') = current_streak_end_date
|
|
||||||
OR DATE('now', time_offset) = current_streak_end_date
|
|
||||||
END
|
|
||||||
LIMIT 1
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
CAST(IFNULL(max_streak, 0) AS INTEGER) AS max_streak,
|
|
||||||
CAST(IFNULL(max_streak_start_date, "N/A") AS TEXT) AS max_streak_start_date,
|
|
||||||
CAST(IFNULL(max_streak_end_date, "N/A") AS TEXT) AS max_streak_end_date,
|
|
||||||
IFNULL(current_streak, 0) AS current_streak,
|
|
||||||
CAST(IFNULL(current_streak_start_date, "N/A") AS TEXT) AS current_streak_start_date,
|
|
||||||
CAST(IFNULL(current_streak_end_date, "N/A") AS TEXT) AS current_streak_end_date
|
|
||||||
FROM max_streak
|
|
||||||
LEFT JOIN current_streak ON 1 = 1
|
|
||||||
LIMIT 1;
|
|
||||||
|
|
||||||
-- name: GetDatabaseInfo :one
|
|
||||||
SELECT
|
|
||||||
(SELECT COUNT(rowid) FROM activity WHERE activity.user_id = $user_id) AS activity_size,
|
|
||||||
(SELECT COUNT(rowid) FROM documents) AS documents_size,
|
|
||||||
(SELECT COUNT(rowid) FROM document_progress WHERE document_progress.user_id = $user_id) AS progress_size,
|
|
||||||
(SELECT COUNT(rowid) FROM devices WHERE devices.user_id = $user_id) AS devices_size
|
|
||||||
LIMIT 1;
|
|
||||||
|
|
||||||
-- name: GetDailyReadStats :many
|
|
||||||
WITH RECURSIVE last_30_days AS (
|
|
||||||
SELECT DATE('now', time_offset) AS date
|
|
||||||
FROM users WHERE users.id = $user_id
|
|
||||||
UNION ALL
|
|
||||||
SELECT DATE(date, '-1 days')
|
|
||||||
FROM last_30_days
|
|
||||||
LIMIT 30
|
|
||||||
),
|
|
||||||
activity_records AS (
|
|
||||||
SELECT
|
|
||||||
SUM(duration) AS seconds_read,
|
|
||||||
DATE(start_time, time_offset) AS day
|
|
||||||
FROM activity
|
|
||||||
LEFT JOIN users ON users.id = activity.user_id
|
|
||||||
WHERE user_id = $user_id
|
|
||||||
AND start_time > DATE('now', '-31 days')
|
|
||||||
GROUP BY day
|
|
||||||
ORDER BY day DESC
|
|
||||||
LIMIT 30
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
CAST(date AS TEXT),
|
|
||||||
CAST(CASE
|
|
||||||
WHEN seconds_read IS NULL THEN 0
|
|
||||||
ELSE seconds_read / 60
|
|
||||||
END AS INTEGER) AS minutes_read
|
|
||||||
FROM last_30_days
|
|
||||||
LEFT JOIN activity_records ON activity_records.day == last_30_days.date
|
|
||||||
ORDER BY date DESC
|
|
||||||
LIMIT 30;
|
|
||||||
|
@ -13,7 +13,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
const addActivity = `-- name: AddActivity :one
|
const addActivity = `-- name: AddActivity :one
|
||||||
INSERT INTO activity (
|
INSERT INTO raw_activity (
|
||||||
user_id,
|
user_id,
|
||||||
document_id,
|
document_id,
|
||||||
device_id,
|
device_id,
|
||||||
@ -23,7 +23,7 @@ INSERT INTO activity (
|
|||||||
pages
|
pages
|
||||||
)
|
)
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||||
RETURNING id, user_id, document_id, device_id, start_time, duration, page, pages, created_at
|
RETURNING id, user_id, document_id, device_id, start_time, page, pages, duration, created_at
|
||||||
`
|
`
|
||||||
|
|
||||||
type AddActivityParams struct {
|
type AddActivityParams struct {
|
||||||
@ -36,7 +36,7 @@ type AddActivityParams struct {
|
|||||||
Pages int64 `json:"pages"`
|
Pages int64 `json:"pages"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *Queries) AddActivity(ctx context.Context, arg AddActivityParams) (Activity, error) {
|
func (q *Queries) AddActivity(ctx context.Context, arg AddActivityParams) (RawActivity, error) {
|
||||||
row := q.db.QueryRowContext(ctx, addActivity,
|
row := q.db.QueryRowContext(ctx, addActivity,
|
||||||
arg.UserID,
|
arg.UserID,
|
||||||
arg.DocumentID,
|
arg.DocumentID,
|
||||||
@ -46,16 +46,16 @@ func (q *Queries) AddActivity(ctx context.Context, arg AddActivityParams) (Activ
|
|||||||
arg.Page,
|
arg.Page,
|
||||||
arg.Pages,
|
arg.Pages,
|
||||||
)
|
)
|
||||||
var i Activity
|
var i RawActivity
|
||||||
err := row.Scan(
|
err := row.Scan(
|
||||||
&i.ID,
|
&i.ID,
|
||||||
&i.UserID,
|
&i.UserID,
|
||||||
&i.DocumentID,
|
&i.DocumentID,
|
||||||
&i.DeviceID,
|
&i.DeviceID,
|
||||||
&i.StartTime,
|
&i.StartTime,
|
||||||
&i.Duration,
|
|
||||||
&i.Page,
|
&i.Page,
|
||||||
&i.Pages,
|
&i.Pages,
|
||||||
|
&i.Duration,
|
||||||
&i.CreatedAt,
|
&i.CreatedAt,
|
||||||
)
|
)
|
||||||
return i, err
|
return i, err
|
||||||
@ -149,27 +149,39 @@ func (q *Queries) DeleteDocument(ctx context.Context, id string) (int64, error)
|
|||||||
}
|
}
|
||||||
|
|
||||||
const getActivity = `-- name: GetActivity :many
|
const getActivity = `-- name: GetActivity :many
|
||||||
|
WITH filtered_activity AS (
|
||||||
|
SELECT
|
||||||
|
document_id,
|
||||||
|
user_id,
|
||||||
|
start_time,
|
||||||
|
duration,
|
||||||
|
page,
|
||||||
|
pages
|
||||||
|
FROM activity
|
||||||
|
WHERE
|
||||||
|
activity.user_id = ?1
|
||||||
|
AND (
|
||||||
|
(
|
||||||
|
CAST(?2 AS BOOLEAN) = TRUE
|
||||||
|
AND document_id = ?3
|
||||||
|
) OR ?2 = FALSE
|
||||||
|
)
|
||||||
|
ORDER BY start_time DESC
|
||||||
|
LIMIT ?5
|
||||||
|
OFFSET ?4
|
||||||
|
)
|
||||||
|
|
||||||
SELECT
|
SELECT
|
||||||
document_id,
|
document_id,
|
||||||
CAST(DATETIME(activity.start_time, time_offset) AS TEXT) AS start_time,
|
CAST(DATETIME(activity.start_time, users.time_offset) AS TEXT) AS start_time,
|
||||||
title,
|
title,
|
||||||
author,
|
author,
|
||||||
duration,
|
duration,
|
||||||
page,
|
page,
|
||||||
pages
|
pages
|
||||||
FROM activity
|
FROM filtered_activity AS activity
|
||||||
LEFT JOIN documents ON documents.id = activity.document_id
|
LEFT JOIN documents ON documents.id = activity.document_id
|
||||||
LEFT JOIN users ON users.id = activity.user_id
|
LEFT JOIN users ON users.id = activity.user_id
|
||||||
WHERE
|
|
||||||
activity.user_id = ?1
|
|
||||||
AND (
|
|
||||||
CAST(?2 AS BOOLEAN) = TRUE
|
|
||||||
AND document_id = ?3
|
|
||||||
)
|
|
||||||
OR ?2 = FALSE
|
|
||||||
ORDER BY start_time DESC
|
|
||||||
LIMIT ?5
|
|
||||||
OFFSET ?4
|
|
||||||
`
|
`
|
||||||
|
|
||||||
type GetActivityParams struct {
|
type GetActivityParams struct {
|
||||||
@ -236,16 +248,22 @@ WITH RECURSIVE last_30_days AS (
|
|||||||
FROM last_30_days
|
FROM last_30_days
|
||||||
LIMIT 30
|
LIMIT 30
|
||||||
),
|
),
|
||||||
activity_records AS (
|
filtered_activity AS (
|
||||||
|
SELECT
|
||||||
|
user_id,
|
||||||
|
start_time,
|
||||||
|
duration
|
||||||
|
FROM activity
|
||||||
|
WHERE start_time > DATE('now', '-31 days')
|
||||||
|
AND activity.user_id = ?1
|
||||||
|
),
|
||||||
|
activity_days AS (
|
||||||
SELECT
|
SELECT
|
||||||
SUM(duration) AS seconds_read,
|
SUM(duration) AS seconds_read,
|
||||||
DATE(start_time, time_offset) AS day
|
DATE(start_time, time_offset) AS day
|
||||||
FROM activity
|
FROM filtered_activity AS activity
|
||||||
LEFT JOIN users ON users.id = activity.user_id
|
LEFT JOIN users ON users.id = activity.user_id
|
||||||
WHERE user_id = ?1
|
|
||||||
AND start_time > DATE('now', '-31 days')
|
|
||||||
GROUP BY day
|
GROUP BY day
|
||||||
ORDER BY day DESC
|
|
||||||
LIMIT 30
|
LIMIT 30
|
||||||
)
|
)
|
||||||
SELECT
|
SELECT
|
||||||
@ -255,7 +273,7 @@ SELECT
|
|||||||
ELSE seconds_read / 60
|
ELSE seconds_read / 60
|
||||||
END AS INTEGER) AS minutes_read
|
END AS INTEGER) AS minutes_read
|
||||||
FROM last_30_days
|
FROM last_30_days
|
||||||
LEFT JOIN activity_records ON activity_records.day == last_30_days.date
|
LEFT JOIN activity_days ON activity_days.day == last_30_days.date
|
||||||
ORDER BY date DESC
|
ORDER BY date DESC
|
||||||
LIMIT 30
|
LIMIT 30
|
||||||
`
|
`
|
||||||
@ -358,7 +376,7 @@ func (q *Queries) GetDeletedDocuments(ctx context.Context, documentIds []string)
|
|||||||
}
|
}
|
||||||
|
|
||||||
const getDevice = `-- name: GetDevice :one
|
const getDevice = `-- name: GetDevice :one
|
||||||
SELECT id, user_id, device_name, created_at, sync FROM devices
|
SELECT id, user_id, device_name, last_synced, created_at, sync FROM devices
|
||||||
WHERE id = ?1 LIMIT 1
|
WHERE id = ?1 LIMIT 1
|
||||||
`
|
`
|
||||||
|
|
||||||
@ -369,6 +387,7 @@ func (q *Queries) GetDevice(ctx context.Context, deviceID string) (Device, error
|
|||||||
&i.ID,
|
&i.ID,
|
||||||
&i.UserID,
|
&i.UserID,
|
||||||
&i.DeviceName,
|
&i.DeviceName,
|
||||||
|
&i.LastSynced,
|
||||||
&i.CreatedAt,
|
&i.CreatedAt,
|
||||||
&i.Sync,
|
&i.Sync,
|
||||||
)
|
)
|
||||||
@ -379,18 +398,16 @@ const getDevices = `-- name: GetDevices :many
|
|||||||
SELECT
|
SELECT
|
||||||
devices.device_name,
|
devices.device_name,
|
||||||
CAST(DATETIME(devices.created_at, users.time_offset) AS TEXT) AS created_at,
|
CAST(DATETIME(devices.created_at, users.time_offset) AS TEXT) AS created_at,
|
||||||
CAST(DATETIME(MAX(activity.created_at), users.time_offset) AS TEXT) AS last_sync
|
CAST(DATETIME(devices.last_synced, users.time_offset) AS TEXT) AS last_synced
|
||||||
FROM activity
|
FROM devices
|
||||||
JOIN devices ON devices.id = activity.device_id
|
JOIN users ON users.id = devices.user_id
|
||||||
JOIN users ON users.id = ?1
|
WHERE users.id = ?1
|
||||||
WHERE devices.user_id = ?1
|
|
||||||
GROUP BY activity.device_id
|
|
||||||
`
|
`
|
||||||
|
|
||||||
type GetDevicesRow struct {
|
type GetDevicesRow struct {
|
||||||
DeviceName string `json:"device_name"`
|
DeviceName string `json:"device_name"`
|
||||||
CreatedAt string `json:"created_at"`
|
CreatedAt string `json:"created_at"`
|
||||||
LastSync string `json:"last_sync"`
|
LastSynced string `json:"last_synced"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *Queries) GetDevices(ctx context.Context, userID string) ([]GetDevicesRow, error) {
|
func (q *Queries) GetDevices(ctx context.Context, userID string) ([]GetDevicesRow, error) {
|
||||||
@ -402,7 +419,7 @@ func (q *Queries) GetDevices(ctx context.Context, userID string) ([]GetDevicesRo
|
|||||||
var items []GetDevicesRow
|
var items []GetDevicesRow
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
var i GetDevicesRow
|
var i GetDevicesRow
|
||||||
if err := rows.Scan(&i.DeviceName, &i.CreatedAt, &i.LastSync); err != nil {
|
if err := rows.Scan(&i.DeviceName, &i.CreatedAt, &i.LastSynced); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
items = append(items, i)
|
items = append(items, i)
|
||||||
@ -477,7 +494,7 @@ const getDocumentReadStats = `-- name: GetDocumentReadStats :one
|
|||||||
SELECT
|
SELECT
|
||||||
COUNT(DISTINCT page) AS pages_read,
|
COUNT(DISTINCT page) AS pages_read,
|
||||||
SUM(duration) AS total_time
|
SUM(duration) AS total_time
|
||||||
FROM rescaled_activity
|
FROM activity
|
||||||
WHERE document_id = ?1
|
WHERE document_id = ?1
|
||||||
AND user_id = ?2
|
AND user_id = ?2
|
||||||
AND start_time >= ?3
|
AND start_time >= ?3
|
||||||
@ -504,7 +521,7 @@ func (q *Queries) GetDocumentReadStats(ctx context.Context, arg GetDocumentReadS
|
|||||||
const getDocumentReadStatsCapped = `-- name: GetDocumentReadStatsCapped :one
|
const getDocumentReadStatsCapped = `-- name: GetDocumentReadStatsCapped :one
|
||||||
WITH capped_stats AS (
|
WITH capped_stats AS (
|
||||||
SELECT MIN(SUM(duration), CAST(?1 AS INTEGER)) AS durations
|
SELECT MIN(SUM(duration), CAST(?1 AS INTEGER)) AS durations
|
||||||
FROM rescaled_activity
|
FROM activity
|
||||||
WHERE document_id = ?2
|
WHERE document_id = ?2
|
||||||
AND user_id = ?3
|
AND user_id = ?3
|
||||||
AND start_time >= ?4
|
AND start_time >= ?4
|
||||||
@ -541,55 +558,41 @@ func (q *Queries) GetDocumentReadStatsCapped(ctx context.Context, arg GetDocumen
|
|||||||
}
|
}
|
||||||
|
|
||||||
const getDocumentWithStats = `-- name: GetDocumentWithStats :one
|
const getDocumentWithStats = `-- name: GetDocumentWithStats :one
|
||||||
WITH true_progress AS (
|
|
||||||
SELECT
|
|
||||||
start_time AS last_read,
|
|
||||||
SUM(duration) AS total_time_seconds,
|
|
||||||
document_id,
|
|
||||||
page,
|
|
||||||
pages,
|
|
||||||
|
|
||||||
-- Determine Read Pages
|
|
||||||
COUNT(DISTINCT page) AS read_pages,
|
|
||||||
|
|
||||||
-- Derive Percentage of Book
|
|
||||||
ROUND(CAST(page AS REAL) / CAST(pages AS REAL) * 100, 2) AS percentage
|
|
||||||
FROM rescaled_activity
|
|
||||||
WHERE user_id = ?1
|
|
||||||
AND document_id = ?2
|
|
||||||
GROUP BY document_id
|
|
||||||
HAVING MAX(start_time)
|
|
||||||
LIMIT 1
|
|
||||||
)
|
|
||||||
SELECT
|
SELECT
|
||||||
documents.id, documents.md5, documents.filepath, documents.coverfile, documents.title, documents.author, documents.series, documents.series_index, documents.lang, documents.description, documents.words, documents.gbid, documents.olid, documents.isbn10, documents.isbn13, documents.synced, documents.deleted, documents.updated_at, documents.created_at,
|
docs.id,
|
||||||
|
docs.title,
|
||||||
|
docs.author,
|
||||||
|
docs.description,
|
||||||
|
docs.isbn10,
|
||||||
|
docs.isbn13,
|
||||||
|
docs.filepath,
|
||||||
|
docs.words,
|
||||||
|
|
||||||
CAST(IFNULL(page, 0) AS INTEGER) AS page,
|
CAST(COALESCE(dus.wpm, 0.0) AS INTEGER) AS wpm,
|
||||||
CAST(IFNULL(pages, 0) AS INTEGER) AS pages,
|
COALESCE(dus.page, 0) AS page,
|
||||||
CAST(IFNULL(total_time_seconds, 0) AS INTEGER) AS total_time_seconds,
|
COALESCE(dus.pages, 0) AS pages,
|
||||||
CAST(DATETIME(IFNULL(last_read, "1970-01-01"), time_offset) AS TEXT) AS last_read,
|
COALESCE(dus.read_pages, 0) AS read_pages,
|
||||||
CAST(IFNULL(read_pages, 0) AS INTEGER) AS read_pages,
|
COALESCE(dus.total_time_seconds, 0) AS total_time_seconds,
|
||||||
|
DATETIME(COALESCE(dus.last_read, "1970-01-01"), users.time_offset)
|
||||||
-- Calculate Seconds / Page
|
AS last_read,
|
||||||
-- 1. Calculate Total Time in Seconds (Sum Duration in Activity)
|
CASE
|
||||||
-- 2. Divide by Read Pages (Distinct Pages in Activity)
|
WHEN dus.percentage > 97.0 THEN 100.0
|
||||||
|
WHEN dus.percentage IS NULL THEN 0.0
|
||||||
|
ELSE dus.percentage
|
||||||
|
END AS percentage,
|
||||||
CAST(CASE
|
CAST(CASE
|
||||||
WHEN total_time_seconds IS NULL THEN 0.0
|
WHEN dus.total_time_seconds IS NULL THEN 0.0
|
||||||
ELSE ROUND(CAST(total_time_seconds AS REAL) / CAST(read_pages AS REAL))
|
ELSE
|
||||||
END AS INTEGER) AS seconds_per_page,
|
CAST(dus.total_time_seconds AS REAL)
|
||||||
|
/ CAST(dus.read_pages AS REAL)
|
||||||
-- Arbitrarily >97% is Complete
|
END AS INTEGER) AS seconds_per_page
|
||||||
CAST(CASE
|
FROM documents AS docs
|
||||||
WHEN percentage > 97.0 THEN 100.0
|
|
||||||
WHEN percentage IS NULL THEN 0.0
|
|
||||||
ELSE percentage
|
|
||||||
END AS REAL) AS percentage
|
|
||||||
|
|
||||||
FROM documents
|
|
||||||
LEFT JOIN true_progress ON true_progress.document_id = documents.id
|
|
||||||
LEFT JOIN users ON users.id = ?1
|
LEFT JOIN users ON users.id = ?1
|
||||||
WHERE documents.id = ?2
|
LEFT JOIN
|
||||||
ORDER BY true_progress.last_read DESC, documents.created_at DESC
|
document_user_statistics AS dus
|
||||||
|
ON dus.document_id = docs.id AND dus.user_id = ?1
|
||||||
|
WHERE users.id = ?1
|
||||||
|
AND docs.id = ?2
|
||||||
LIMIT 1
|
LIMIT 1
|
||||||
`
|
`
|
||||||
|
|
||||||
@ -600,31 +603,21 @@ type GetDocumentWithStatsParams struct {
|
|||||||
|
|
||||||
type GetDocumentWithStatsRow struct {
|
type GetDocumentWithStatsRow struct {
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
Md5 *string `json:"md5"`
|
|
||||||
Filepath *string `json:"filepath"`
|
|
||||||
Coverfile *string `json:"coverfile"`
|
|
||||||
Title *string `json:"title"`
|
Title *string `json:"title"`
|
||||||
Author *string `json:"author"`
|
Author *string `json:"author"`
|
||||||
Series *string `json:"series"`
|
|
||||||
SeriesIndex *int64 `json:"series_index"`
|
|
||||||
Lang *string `json:"lang"`
|
|
||||||
Description *string `json:"description"`
|
Description *string `json:"description"`
|
||||||
Words *int64 `json:"words"`
|
|
||||||
Gbid *string `json:"gbid"`
|
|
||||||
Olid *string `json:"-"`
|
|
||||||
Isbn10 *string `json:"isbn10"`
|
Isbn10 *string `json:"isbn10"`
|
||||||
Isbn13 *string `json:"isbn13"`
|
Isbn13 *string `json:"isbn13"`
|
||||||
Synced bool `json:"-"`
|
Filepath *string `json:"filepath"`
|
||||||
Deleted bool `json:"-"`
|
Words *int64 `json:"words"`
|
||||||
UpdatedAt time.Time `json:"updated_at"`
|
Wpm int64 `json:"wpm"`
|
||||||
CreatedAt time.Time `json:"created_at"`
|
|
||||||
Page int64 `json:"page"`
|
Page int64 `json:"page"`
|
||||||
Pages int64 `json:"pages"`
|
Pages int64 `json:"pages"`
|
||||||
TotalTimeSeconds int64 `json:"total_time_seconds"`
|
|
||||||
LastRead string `json:"last_read"`
|
|
||||||
ReadPages int64 `json:"read_pages"`
|
ReadPages int64 `json:"read_pages"`
|
||||||
|
TotalTimeSeconds int64 `json:"total_time_seconds"`
|
||||||
|
LastRead interface{} `json:"last_read"`
|
||||||
|
Percentage interface{} `json:"percentage"`
|
||||||
SecondsPerPage int64 `json:"seconds_per_page"`
|
SecondsPerPage int64 `json:"seconds_per_page"`
|
||||||
Percentage float64 `json:"percentage"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *Queries) GetDocumentWithStats(ctx context.Context, arg GetDocumentWithStatsParams) (GetDocumentWithStatsRow, error) {
|
func (q *Queries) GetDocumentWithStats(ctx context.Context, arg GetDocumentWithStatsParams) (GetDocumentWithStatsRow, error) {
|
||||||
@ -632,31 +625,21 @@ func (q *Queries) GetDocumentWithStats(ctx context.Context, arg GetDocumentWithS
|
|||||||
var i GetDocumentWithStatsRow
|
var i GetDocumentWithStatsRow
|
||||||
err := row.Scan(
|
err := row.Scan(
|
||||||
&i.ID,
|
&i.ID,
|
||||||
&i.Md5,
|
|
||||||
&i.Filepath,
|
|
||||||
&i.Coverfile,
|
|
||||||
&i.Title,
|
&i.Title,
|
||||||
&i.Author,
|
&i.Author,
|
||||||
&i.Series,
|
|
||||||
&i.SeriesIndex,
|
|
||||||
&i.Lang,
|
|
||||||
&i.Description,
|
&i.Description,
|
||||||
&i.Words,
|
|
||||||
&i.Gbid,
|
|
||||||
&i.Olid,
|
|
||||||
&i.Isbn10,
|
&i.Isbn10,
|
||||||
&i.Isbn13,
|
&i.Isbn13,
|
||||||
&i.Synced,
|
&i.Filepath,
|
||||||
&i.Deleted,
|
&i.Words,
|
||||||
&i.UpdatedAt,
|
&i.Wpm,
|
||||||
&i.CreatedAt,
|
|
||||||
&i.Page,
|
&i.Page,
|
||||||
&i.Pages,
|
&i.Pages,
|
||||||
|
&i.ReadPages,
|
||||||
&i.TotalTimeSeconds,
|
&i.TotalTimeSeconds,
|
||||||
&i.LastRead,
|
&i.LastRead,
|
||||||
&i.ReadPages,
|
|
||||||
&i.SecondsPerPage,
|
|
||||||
&i.Percentage,
|
&i.Percentage,
|
||||||
|
&i.SecondsPerPage,
|
||||||
)
|
)
|
||||||
return i, err
|
return i, err
|
||||||
}
|
}
|
||||||
@ -717,38 +700,43 @@ func (q *Queries) GetDocuments(ctx context.Context, arg GetDocumentsParams) ([]D
|
|||||||
}
|
}
|
||||||
|
|
||||||
const getDocumentsWithStats = `-- name: GetDocumentsWithStats :many
|
const getDocumentsWithStats = `-- name: GetDocumentsWithStats :many
|
||||||
WITH true_progress AS (
|
|
||||||
SELECT
|
|
||||||
start_time AS last_read,
|
|
||||||
SUM(duration) AS total_time_seconds,
|
|
||||||
document_id,
|
|
||||||
page,
|
|
||||||
pages,
|
|
||||||
ROUND(CAST(page AS REAL) / CAST(pages AS REAL) * 100, 2) AS percentage
|
|
||||||
FROM activity
|
|
||||||
WHERE user_id = ?1
|
|
||||||
GROUP BY document_id
|
|
||||||
HAVING MAX(start_time)
|
|
||||||
)
|
|
||||||
SELECT
|
SELECT
|
||||||
documents.id, documents.md5, documents.filepath, documents.coverfile, documents.title, documents.author, documents.series, documents.series_index, documents.lang, documents.description, documents.words, documents.gbid, documents.olid, documents.isbn10, documents.isbn13, documents.synced, documents.deleted, documents.updated_at, documents.created_at,
|
docs.id,
|
||||||
|
docs.title,
|
||||||
|
docs.author,
|
||||||
|
docs.description,
|
||||||
|
docs.isbn10,
|
||||||
|
docs.isbn13,
|
||||||
|
docs.filepath,
|
||||||
|
docs.words,
|
||||||
|
|
||||||
CAST(IFNULL(page, 0) AS INTEGER) AS page,
|
CAST(COALESCE(dus.wpm, 0.0) AS INTEGER) AS wpm,
|
||||||
CAST(IFNULL(pages, 0) AS INTEGER) AS pages,
|
COALESCE(dus.page, 0) AS page,
|
||||||
CAST(IFNULL(total_time_seconds, 0) AS INTEGER) AS total_time_seconds,
|
COALESCE(dus.pages, 0) AS pages,
|
||||||
CAST(DATETIME(IFNULL(last_read, "1970-01-01"), time_offset) AS TEXT) AS last_read,
|
COALESCE(dus.read_pages, 0) AS read_pages,
|
||||||
|
COALESCE(dus.total_time_seconds, 0) AS total_time_seconds,
|
||||||
CAST(CASE
|
DATETIME(COALESCE(dus.last_read, "1970-01-01"), users.time_offset)
|
||||||
WHEN percentage > 97.0 THEN 100.0
|
AS last_read,
|
||||||
WHEN percentage IS NULL THEN 0.0
|
CASE
|
||||||
ELSE percentage
|
WHEN dus.percentage > 97.0 THEN 100.0
|
||||||
END AS REAL) AS percentage
|
WHEN dus.percentage IS NULL THEN 0.0
|
||||||
|
ELSE dus.percentage
|
||||||
FROM documents
|
END AS percentage,
|
||||||
LEFT JOIN true_progress ON true_progress.document_id = documents.id
|
CASE
|
||||||
|
WHEN dus.total_time_seconds IS NULL THEN 0.0
|
||||||
|
ELSE
|
||||||
|
ROUND(
|
||||||
|
CAST(dus.total_time_seconds AS REAL)
|
||||||
|
/ CAST(dus.read_pages AS REAL)
|
||||||
|
)
|
||||||
|
END AS seconds_per_page
|
||||||
|
FROM documents AS docs
|
||||||
LEFT JOIN users ON users.id = ?1
|
LEFT JOIN users ON users.id = ?1
|
||||||
WHERE documents.deleted == false
|
LEFT JOIN
|
||||||
ORDER BY true_progress.last_read DESC, documents.created_at DESC
|
document_user_statistics AS dus
|
||||||
|
ON dus.document_id = docs.id AND dus.user_id = ?1
|
||||||
|
WHERE docs.deleted = false
|
||||||
|
ORDER BY dus.last_read DESC, docs.created_at DESC
|
||||||
LIMIT ?3
|
LIMIT ?3
|
||||||
OFFSET ?2
|
OFFSET ?2
|
||||||
`
|
`
|
||||||
@ -761,29 +749,21 @@ type GetDocumentsWithStatsParams struct {
|
|||||||
|
|
||||||
type GetDocumentsWithStatsRow struct {
|
type GetDocumentsWithStatsRow struct {
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
Md5 *string `json:"md5"`
|
|
||||||
Filepath *string `json:"filepath"`
|
|
||||||
Coverfile *string `json:"coverfile"`
|
|
||||||
Title *string `json:"title"`
|
Title *string `json:"title"`
|
||||||
Author *string `json:"author"`
|
Author *string `json:"author"`
|
||||||
Series *string `json:"series"`
|
|
||||||
SeriesIndex *int64 `json:"series_index"`
|
|
||||||
Lang *string `json:"lang"`
|
|
||||||
Description *string `json:"description"`
|
Description *string `json:"description"`
|
||||||
Words *int64 `json:"words"`
|
|
||||||
Gbid *string `json:"gbid"`
|
|
||||||
Olid *string `json:"-"`
|
|
||||||
Isbn10 *string `json:"isbn10"`
|
Isbn10 *string `json:"isbn10"`
|
||||||
Isbn13 *string `json:"isbn13"`
|
Isbn13 *string `json:"isbn13"`
|
||||||
Synced bool `json:"-"`
|
Filepath *string `json:"filepath"`
|
||||||
Deleted bool `json:"-"`
|
Words *int64 `json:"words"`
|
||||||
UpdatedAt time.Time `json:"updated_at"`
|
Wpm int64 `json:"wpm"`
|
||||||
CreatedAt time.Time `json:"created_at"`
|
|
||||||
Page int64 `json:"page"`
|
Page int64 `json:"page"`
|
||||||
Pages int64 `json:"pages"`
|
Pages int64 `json:"pages"`
|
||||||
|
ReadPages int64 `json:"read_pages"`
|
||||||
TotalTimeSeconds int64 `json:"total_time_seconds"`
|
TotalTimeSeconds int64 `json:"total_time_seconds"`
|
||||||
LastRead string `json:"last_read"`
|
LastRead interface{} `json:"last_read"`
|
||||||
Percentage float64 `json:"percentage"`
|
Percentage interface{} `json:"percentage"`
|
||||||
|
SecondsPerPage interface{} `json:"seconds_per_page"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *Queries) GetDocumentsWithStats(ctx context.Context, arg GetDocumentsWithStatsParams) ([]GetDocumentsWithStatsRow, error) {
|
func (q *Queries) GetDocumentsWithStats(ctx context.Context, arg GetDocumentsWithStatsParams) ([]GetDocumentsWithStatsRow, error) {
|
||||||
@ -797,29 +777,21 @@ func (q *Queries) GetDocumentsWithStats(ctx context.Context, arg GetDocumentsWit
|
|||||||
var i GetDocumentsWithStatsRow
|
var i GetDocumentsWithStatsRow
|
||||||
if err := rows.Scan(
|
if err := rows.Scan(
|
||||||
&i.ID,
|
&i.ID,
|
||||||
&i.Md5,
|
|
||||||
&i.Filepath,
|
|
||||||
&i.Coverfile,
|
|
||||||
&i.Title,
|
&i.Title,
|
||||||
&i.Author,
|
&i.Author,
|
||||||
&i.Series,
|
|
||||||
&i.SeriesIndex,
|
|
||||||
&i.Lang,
|
|
||||||
&i.Description,
|
&i.Description,
|
||||||
&i.Words,
|
|
||||||
&i.Gbid,
|
|
||||||
&i.Olid,
|
|
||||||
&i.Isbn10,
|
&i.Isbn10,
|
||||||
&i.Isbn13,
|
&i.Isbn13,
|
||||||
&i.Synced,
|
&i.Filepath,
|
||||||
&i.Deleted,
|
&i.Words,
|
||||||
&i.UpdatedAt,
|
&i.Wpm,
|
||||||
&i.CreatedAt,
|
|
||||||
&i.Page,
|
&i.Page,
|
||||||
&i.Pages,
|
&i.Pages,
|
||||||
|
&i.ReadPages,
|
||||||
&i.TotalTimeSeconds,
|
&i.TotalTimeSeconds,
|
||||||
&i.LastRead,
|
&i.LastRead,
|
||||||
&i.Percentage,
|
&i.Percentage,
|
||||||
|
&i.SecondsPerPage,
|
||||||
); err != nil {
|
); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -978,105 +950,41 @@ func (q *Queries) GetUser(ctx context.Context, userID string) (User, error) {
|
|||||||
return i, err
|
return i, err
|
||||||
}
|
}
|
||||||
|
|
||||||
const getUserWindowStreaks = `-- name: GetUserWindowStreaks :one
|
const getUserStreaks = `-- name: GetUserStreaks :many
|
||||||
WITH document_windows AS (
|
SELECT user_id, "window", max_streak, max_streak_start_date, max_streak_end_date, current_streak, current_streak_start_date, current_streak_end_date FROM user_streaks
|
||||||
SELECT
|
WHERE user_id = ?1
|
||||||
CASE
|
|
||||||
WHEN ?2 = "WEEK" THEN DATE(start_time, time_offset, 'weekday 0', '-7 day')
|
|
||||||
WHEN ?2 = "DAY" THEN DATE(start_time, time_offset)
|
|
||||||
END AS read_window,
|
|
||||||
time_offset
|
|
||||||
FROM activity
|
|
||||||
JOIN users ON users.id = activity.user_id
|
|
||||||
WHERE user_id = ?1
|
|
||||||
AND CAST(?2 AS TEXT) = CAST(?2 AS TEXT)
|
|
||||||
GROUP BY read_window
|
|
||||||
),
|
|
||||||
partitions AS (
|
|
||||||
SELECT
|
|
||||||
document_windows.read_window, document_windows.time_offset,
|
|
||||||
row_number() OVER (
|
|
||||||
PARTITION BY 1 ORDER BY read_window DESC
|
|
||||||
) AS seqnum
|
|
||||||
FROM document_windows
|
|
||||||
),
|
|
||||||
streaks AS (
|
|
||||||
SELECT
|
|
||||||
COUNT(*) AS streak,
|
|
||||||
MIN(read_window) AS start_date,
|
|
||||||
MAX(read_window) AS end_date,
|
|
||||||
time_offset
|
|
||||||
FROM partitions
|
|
||||||
GROUP BY
|
|
||||||
CASE
|
|
||||||
WHEN ?2 = "DAY" THEN DATE(read_window, '+' || seqnum || ' day')
|
|
||||||
WHEN ?2 = "WEEK" THEN DATE(read_window, '+' || (seqnum * 7) || ' day')
|
|
||||||
END,
|
|
||||||
time_offset
|
|
||||||
ORDER BY end_date DESC
|
|
||||||
),
|
|
||||||
max_streak AS (
|
|
||||||
SELECT
|
|
||||||
MAX(streak) AS max_streak,
|
|
||||||
start_date AS max_streak_start_date,
|
|
||||||
end_date AS max_streak_end_date
|
|
||||||
FROM streaks
|
|
||||||
LIMIT 1
|
|
||||||
),
|
|
||||||
current_streak AS (
|
|
||||||
SELECT
|
|
||||||
streak AS current_streak,
|
|
||||||
start_date AS current_streak_start_date,
|
|
||||||
end_date AS current_streak_end_date
|
|
||||||
FROM streaks
|
|
||||||
WHERE CASE
|
|
||||||
WHEN ?2 = "WEEK" THEN
|
|
||||||
DATE('now', time_offset, 'weekday 0', '-14 day') = current_streak_end_date
|
|
||||||
OR DATE('now', time_offset, 'weekday 0', '-7 day') = current_streak_end_date
|
|
||||||
WHEN ?2 = "DAY" THEN
|
|
||||||
DATE('now', time_offset, '-1 day') = current_streak_end_date
|
|
||||||
OR DATE('now', time_offset) = current_streak_end_date
|
|
||||||
END
|
|
||||||
LIMIT 1
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
CAST(IFNULL(max_streak, 0) AS INTEGER) AS max_streak,
|
|
||||||
CAST(IFNULL(max_streak_start_date, "N/A") AS TEXT) AS max_streak_start_date,
|
|
||||||
CAST(IFNULL(max_streak_end_date, "N/A") AS TEXT) AS max_streak_end_date,
|
|
||||||
IFNULL(current_streak, 0) AS current_streak,
|
|
||||||
CAST(IFNULL(current_streak_start_date, "N/A") AS TEXT) AS current_streak_start_date,
|
|
||||||
CAST(IFNULL(current_streak_end_date, "N/A") AS TEXT) AS current_streak_end_date
|
|
||||||
FROM max_streak
|
|
||||||
LEFT JOIN current_streak ON 1 = 1
|
|
||||||
LIMIT 1
|
|
||||||
`
|
`
|
||||||
|
|
||||||
type GetUserWindowStreaksParams struct {
|
func (q *Queries) GetUserStreaks(ctx context.Context, userID string) ([]UserStreak, error) {
|
||||||
UserID string `json:"user_id"`
|
rows, err := q.db.QueryContext(ctx, getUserStreaks, userID)
|
||||||
Window string `json:"window"`
|
if err != nil {
|
||||||
}
|
return nil, err
|
||||||
|
}
|
||||||
type GetUserWindowStreaksRow struct {
|
defer rows.Close()
|
||||||
MaxStreak int64 `json:"max_streak"`
|
var items []UserStreak
|
||||||
MaxStreakStartDate string `json:"max_streak_start_date"`
|
for rows.Next() {
|
||||||
MaxStreakEndDate string `json:"max_streak_end_date"`
|
var i UserStreak
|
||||||
CurrentStreak interface{} `json:"current_streak"`
|
if err := rows.Scan(
|
||||||
CurrentStreakStartDate string `json:"current_streak_start_date"`
|
&i.UserID,
|
||||||
CurrentStreakEndDate string `json:"current_streak_end_date"`
|
&i.Window,
|
||||||
}
|
|
||||||
|
|
||||||
func (q *Queries) GetUserWindowStreaks(ctx context.Context, arg GetUserWindowStreaksParams) (GetUserWindowStreaksRow, error) {
|
|
||||||
row := q.db.QueryRowContext(ctx, getUserWindowStreaks, arg.UserID, arg.Window)
|
|
||||||
var i GetUserWindowStreaksRow
|
|
||||||
err := row.Scan(
|
|
||||||
&i.MaxStreak,
|
&i.MaxStreak,
|
||||||
&i.MaxStreakStartDate,
|
&i.MaxStreakStartDate,
|
||||||
&i.MaxStreakEndDate,
|
&i.MaxStreakEndDate,
|
||||||
&i.CurrentStreak,
|
&i.CurrentStreak,
|
||||||
&i.CurrentStreakStartDate,
|
&i.CurrentStreakStartDate,
|
||||||
&i.CurrentStreakEndDate,
|
&i.CurrentStreakEndDate,
|
||||||
)
|
); err != nil {
|
||||||
return i, err
|
return nil, err
|
||||||
|
}
|
||||||
|
items = append(items, i)
|
||||||
|
}
|
||||||
|
if err := rows.Close(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return items, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
const getUsers = `-- name: GetUsers :many
|
const getUsers = `-- name: GetUsers :many
|
||||||
@ -1129,6 +1037,54 @@ func (q *Queries) GetUsers(ctx context.Context, arg GetUsersParams) ([]User, err
|
|||||||
return items, nil
|
return items, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const getWPMLeaderboard = `-- name: GetWPMLeaderboard :many
|
||||||
|
SELECT
|
||||||
|
user_id,
|
||||||
|
CAST(SUM(words_read) AS INTEGER) AS total_words_read,
|
||||||
|
CAST(SUM(total_time_seconds) AS INTEGER) AS total_seconds,
|
||||||
|
ROUND(CAST(SUM(words_read) AS REAL) / (SUM(total_time_seconds) / 60.0), 2)
|
||||||
|
AS wpm
|
||||||
|
FROM document_user_statistics
|
||||||
|
WHERE words_read > 0
|
||||||
|
GROUP BY user_id
|
||||||
|
ORDER BY wpm DESC
|
||||||
|
`
|
||||||
|
|
||||||
|
type GetWPMLeaderboardRow struct {
|
||||||
|
UserID string `json:"user_id"`
|
||||||
|
TotalWordsRead int64 `json:"total_words_read"`
|
||||||
|
TotalSeconds int64 `json:"total_seconds"`
|
||||||
|
Wpm float64 `json:"wpm"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) GetWPMLeaderboard(ctx context.Context) ([]GetWPMLeaderboardRow, error) {
|
||||||
|
rows, err := q.db.QueryContext(ctx, getWPMLeaderboard)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
var items []GetWPMLeaderboardRow
|
||||||
|
for rows.Next() {
|
||||||
|
var i GetWPMLeaderboardRow
|
||||||
|
if err := rows.Scan(
|
||||||
|
&i.UserID,
|
||||||
|
&i.TotalWordsRead,
|
||||||
|
&i.TotalSeconds,
|
||||||
|
&i.Wpm,
|
||||||
|
); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
items = append(items, i)
|
||||||
|
}
|
||||||
|
if err := rows.Close(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return items, nil
|
||||||
|
}
|
||||||
|
|
||||||
const getWantedDocuments = `-- name: GetWantedDocuments :many
|
const getWantedDocuments = `-- name: GetWantedDocuments :many
|
||||||
SELECT
|
SELECT
|
||||||
CAST(value AS TEXT) AS id,
|
CAST(value AS TEXT) AS id,
|
||||||
@ -1327,27 +1283,35 @@ func (q *Queries) UpdateUser(ctx context.Context, arg UpdateUserParams) (User, e
|
|||||||
}
|
}
|
||||||
|
|
||||||
const upsertDevice = `-- name: UpsertDevice :one
|
const upsertDevice = `-- name: UpsertDevice :one
|
||||||
INSERT INTO devices (id, user_id, device_name)
|
INSERT INTO devices (id, user_id, last_synced, device_name)
|
||||||
VALUES (?, ?, ?)
|
VALUES (?, ?, ?, ?)
|
||||||
ON CONFLICT DO UPDATE
|
ON CONFLICT DO UPDATE
|
||||||
SET
|
SET
|
||||||
device_name = COALESCE(excluded.device_name, device_name)
|
device_name = COALESCE(excluded.device_name, device_name),
|
||||||
RETURNING id, user_id, device_name, created_at, sync
|
last_synced = COALESCE(excluded.last_synced, last_synced)
|
||||||
|
RETURNING id, user_id, device_name, last_synced, created_at, sync
|
||||||
`
|
`
|
||||||
|
|
||||||
type UpsertDeviceParams struct {
|
type UpsertDeviceParams struct {
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
UserID string `json:"user_id"`
|
UserID string `json:"user_id"`
|
||||||
|
LastSynced time.Time `json:"last_synced"`
|
||||||
DeviceName string `json:"device_name"`
|
DeviceName string `json:"device_name"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (q *Queries) UpsertDevice(ctx context.Context, arg UpsertDeviceParams) (Device, error) {
|
func (q *Queries) UpsertDevice(ctx context.Context, arg UpsertDeviceParams) (Device, error) {
|
||||||
row := q.db.QueryRowContext(ctx, upsertDevice, arg.ID, arg.UserID, arg.DeviceName)
|
row := q.db.QueryRowContext(ctx, upsertDevice,
|
||||||
|
arg.ID,
|
||||||
|
arg.UserID,
|
||||||
|
arg.LastSynced,
|
||||||
|
arg.DeviceName,
|
||||||
|
)
|
||||||
var i Device
|
var i Device
|
||||||
err := row.Scan(
|
err := row.Scan(
|
||||||
&i.ID,
|
&i.ID,
|
||||||
&i.UserID,
|
&i.UserID,
|
||||||
&i.DeviceName,
|
&i.DeviceName,
|
||||||
|
&i.LastSynced,
|
||||||
&i.CreatedAt,
|
&i.CreatedAt,
|
||||||
&i.Sync,
|
&i.Sync,
|
||||||
)
|
)
|
||||||
|
@ -1,6 +1,10 @@
|
|||||||
PRAGMA foreign_keys = ON;
|
PRAGMA foreign_keys = ON;
|
||||||
PRAGMA journal_mode = WAL;
|
PRAGMA journal_mode = WAL;
|
||||||
|
|
||||||
|
---------------------------------------------------------------
|
||||||
|
------------------------ Normal Tables ------------------------
|
||||||
|
---------------------------------------------------------------
|
||||||
|
|
||||||
-- Authentication
|
-- Authentication
|
||||||
CREATE TABLE IF NOT EXISTS users (
|
CREATE TABLE IF NOT EXISTS users (
|
||||||
id TEXT NOT NULL PRIMARY KEY,
|
id TEXT NOT NULL PRIMARY KEY,
|
||||||
@ -64,27 +68,13 @@ CREATE TABLE IF NOT EXISTS devices (
|
|||||||
user_id TEXT NOT NULL,
|
user_id TEXT NOT NULL,
|
||||||
|
|
||||||
device_name TEXT NOT NULL,
|
device_name TEXT NOT NULL,
|
||||||
|
last_synced DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
sync BOOLEAN NOT NULL DEFAULT 1 CHECK (sync IN (0, 1)),
|
sync BOOLEAN NOT NULL DEFAULT 1 CHECK (sync IN (0, 1)),
|
||||||
|
|
||||||
FOREIGN KEY (user_id) REFERENCES users (id)
|
FOREIGN KEY (user_id) REFERENCES users (id)
|
||||||
);
|
);
|
||||||
|
|
||||||
-- Document Device Sync
|
|
||||||
CREATE TABLE IF NOT EXISTS document_device_sync (
|
|
||||||
user_id TEXT NOT NULL,
|
|
||||||
document_id TEXT NOT NULL,
|
|
||||||
device_id TEXT NOT NULL,
|
|
||||||
|
|
||||||
last_synced DATETIME NOT NULL,
|
|
||||||
sync BOOLEAN NOT NULL DEFAULT 1 CHECK (sync IN (0, 1)),
|
|
||||||
|
|
||||||
FOREIGN KEY (user_id) REFERENCES users (id),
|
|
||||||
FOREIGN KEY (document_id) REFERENCES documents (id),
|
|
||||||
FOREIGN KEY (device_id) REFERENCES devices (id),
|
|
||||||
PRIMARY KEY (user_id, document_id, device_id)
|
|
||||||
);
|
|
||||||
|
|
||||||
-- User Document Progress
|
-- User Document Progress
|
||||||
CREATE TABLE IF NOT EXISTS document_progress (
|
CREATE TABLE IF NOT EXISTS document_progress (
|
||||||
user_id TEXT NOT NULL,
|
user_id TEXT NOT NULL,
|
||||||
@ -101,17 +91,17 @@ CREATE TABLE IF NOT EXISTS document_progress (
|
|||||||
PRIMARY KEY (user_id, document_id, device_id)
|
PRIMARY KEY (user_id, document_id, device_id)
|
||||||
);
|
);
|
||||||
|
|
||||||
-- Read Activity
|
-- Raw Read Activity
|
||||||
CREATE TABLE IF NOT EXISTS activity (
|
CREATE TABLE IF NOT EXISTS raw_activity (
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
user_id TEXT NOT NULL,
|
user_id TEXT NOT NULL,
|
||||||
document_id TEXT NOT NULL,
|
document_id TEXT NOT NULL,
|
||||||
device_id TEXT NOT NULL,
|
device_id TEXT NOT NULL,
|
||||||
|
|
||||||
start_time DATETIME NOT NULL,
|
start_time DATETIME NOT NULL,
|
||||||
duration INTEGER NOT NULL,
|
|
||||||
page INTEGER NOT NULL,
|
page INTEGER NOT NULL,
|
||||||
pages INTEGER NOT NULL,
|
pages INTEGER NOT NULL,
|
||||||
|
duration INTEGER NOT NULL,
|
||||||
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
FOREIGN KEY (user_id) REFERENCES users (id),
|
FOREIGN KEY (user_id) REFERENCES users (id),
|
||||||
@ -119,23 +109,71 @@ CREATE TABLE IF NOT EXISTS activity (
|
|||||||
FOREIGN KEY (device_id) REFERENCES devices (id)
|
FOREIGN KEY (device_id) REFERENCES devices (id)
|
||||||
);
|
);
|
||||||
|
|
||||||
-- Indexes
|
---------------------------------------------------------------
|
||||||
CREATE INDEX IF NOT EXISTS activity_start_time ON activity (start_time);
|
----------------------- Temporary Tables ----------------------
|
||||||
CREATE INDEX IF NOT EXISTS activity_user_id_document_id ON activity (
|
---------------------------------------------------------------
|
||||||
|
|
||||||
|
-- Temporary Activity Table (Cached from View)
|
||||||
|
CREATE TEMPORARY TABLE IF NOT EXISTS activity (
|
||||||
|
user_id TEXT NOT NULL,
|
||||||
|
document_id TEXT NOT NULL,
|
||||||
|
device_id TEXT NOT NULL,
|
||||||
|
|
||||||
|
created_at DATETIME NOT NULL,
|
||||||
|
start_time DATETIME NOT NULL,
|
||||||
|
page INTEGER NOT NULL,
|
||||||
|
pages INTEGER NOT NULL,
|
||||||
|
duration INTEGER NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Temporary User Streaks Table (Cached from View)
|
||||||
|
CREATE TEMPORARY TABLE IF NOT EXISTS user_streaks (
|
||||||
|
user_id TEXT NOT NULL,
|
||||||
|
window TEXT NOT NULL,
|
||||||
|
|
||||||
|
max_streak INTEGER NOT NULL,
|
||||||
|
max_streak_start_date TEXT NOT NULL,
|
||||||
|
max_streak_end_date TEXT NOT NULL,
|
||||||
|
|
||||||
|
current_streak INTEGER NOT NULL,
|
||||||
|
current_streak_start_date TEXT NOT NULL,
|
||||||
|
current_streak_end_date TEXT NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TEMPORARY TABLE IF NOT EXISTS document_user_statistics (
|
||||||
|
document_id TEXT NOT NULL,
|
||||||
|
user_id TEXT NOT NULL,
|
||||||
|
last_read TEXT NOT NULL,
|
||||||
|
page INTEGER NOT NULL,
|
||||||
|
pages INTEGER NOT NULL,
|
||||||
|
total_time_seconds INTEGER NOT NULL,
|
||||||
|
read_pages INTEGER NOT NULL,
|
||||||
|
percentage REAL NOT NULL,
|
||||||
|
words_read INTEGER NOT NULL,
|
||||||
|
wpm REAL NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
---------------------------------------------------------------
|
||||||
|
--------------------------- Indexes ---------------------------
|
||||||
|
---------------------------------------------------------------
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS temp.activity_start_time ON activity (start_time);
|
||||||
|
CREATE INDEX IF NOT EXISTS temp.activity_user_id ON activity (user_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS temp.activity_user_id_document_id ON activity (
|
||||||
user_id,
|
user_id,
|
||||||
document_id
|
document_id
|
||||||
);
|
);
|
||||||
|
|
||||||
-- Update Trigger
|
---------------------------------------------------------------
|
||||||
CREATE TRIGGER IF NOT EXISTS update_documents_updated_at
|
---------------------------- Views ----------------------------
|
||||||
BEFORE UPDATE ON documents BEGIN
|
---------------------------------------------------------------
|
||||||
UPDATE documents
|
|
||||||
SET updated_at = CURRENT_TIMESTAMP
|
|
||||||
WHERE id = old.id;
|
|
||||||
END;
|
|
||||||
|
|
||||||
-- Rescaled Activity View (Adapted from KOReader)
|
--------------------------------
|
||||||
CREATE VIEW IF NOT EXISTS rescaled_activity AS
|
------- Rescaled Activity ------
|
||||||
|
--------------------------------
|
||||||
|
|
||||||
|
CREATE VIEW IF NOT EXISTS view_rescaled_activity AS
|
||||||
|
|
||||||
WITH RECURSIVE nums (idx) AS (
|
WITH RECURSIVE nums (idx) AS (
|
||||||
SELECT 1 AS idx
|
SELECT 1 AS idx
|
||||||
@ -150,7 +188,7 @@ current_pages AS (
|
|||||||
document_id,
|
document_id,
|
||||||
user_id,
|
user_id,
|
||||||
pages
|
pages
|
||||||
FROM activity
|
FROM raw_activity
|
||||||
GROUP BY document_id, user_id
|
GROUP BY document_id, user_id
|
||||||
HAVING MAX(start_time)
|
HAVING MAX(start_time)
|
||||||
ORDER BY start_time DESC
|
ORDER BY start_time DESC
|
||||||
@ -158,33 +196,33 @@ current_pages AS (
|
|||||||
|
|
||||||
intermediate AS (
|
intermediate AS (
|
||||||
SELECT
|
SELECT
|
||||||
activity.document_id,
|
raw_activity.document_id,
|
||||||
activity.device_id,
|
raw_activity.device_id,
|
||||||
activity.user_id,
|
raw_activity.user_id,
|
||||||
activity.start_time,
|
raw_activity.created_at,
|
||||||
activity.duration,
|
raw_activity.start_time,
|
||||||
activity.page,
|
raw_activity.duration,
|
||||||
|
raw_activity.page,
|
||||||
current_pages.pages,
|
current_pages.pages,
|
||||||
|
|
||||||
-- Derive first page
|
-- Derive first page
|
||||||
((activity.page - 1) * current_pages.pages) / activity.pages
|
((raw_activity.page - 1) * current_pages.pages) / raw_activity.pages
|
||||||
+ 1 AS first_page,
|
+ 1 AS first_page,
|
||||||
|
|
||||||
-- Derive last page
|
-- Derive last page
|
||||||
MAX(
|
MAX(
|
||||||
((activity.page - 1) * current_pages.pages)
|
((raw_activity.page - 1) * current_pages.pages)
|
||||||
/ activity.pages
|
/ raw_activity.pages
|
||||||
+ 1,
|
+ 1,
|
||||||
(activity.page * current_pages.pages) / activity.pages
|
(raw_activity.page * current_pages.pages) / raw_activity.pages
|
||||||
) AS last_page
|
) AS last_page
|
||||||
|
|
||||||
FROM activity
|
FROM raw_activity
|
||||||
INNER JOIN current_pages ON
|
INNER JOIN current_pages ON
|
||||||
current_pages.document_id = activity.document_id
|
current_pages.document_id = raw_activity.document_id
|
||||||
AND current_pages.user_id = activity.user_id
|
AND current_pages.user_id = raw_activity.user_id
|
||||||
),
|
),
|
||||||
|
|
||||||
-- Improves performance
|
|
||||||
num_limit AS (
|
num_limit AS (
|
||||||
SELECT * FROM nums
|
SELECT * FROM nums
|
||||||
LIMIT (SELECT MAX(last_page - first_page + 1) FROM intermediate)
|
LIMIT (SELECT MAX(last_page - first_page + 1) FROM intermediate)
|
||||||
@ -192,28 +230,30 @@ num_limit AS (
|
|||||||
|
|
||||||
rescaled_raw AS (
|
rescaled_raw AS (
|
||||||
SELECT
|
SELECT
|
||||||
document_id,
|
intermediate.document_id,
|
||||||
device_id,
|
intermediate.device_id,
|
||||||
user_id,
|
intermediate.user_id,
|
||||||
start_time,
|
intermediate.created_at,
|
||||||
last_page,
|
intermediate.start_time,
|
||||||
pages,
|
intermediate.last_page,
|
||||||
first_page + num_limit.idx - 1 AS page,
|
intermediate.pages,
|
||||||
duration / (
|
intermediate.first_page + num_limit.idx - 1 AS page,
|
||||||
last_page - first_page + 1.0
|
intermediate.duration / (
|
||||||
|
intermediate.last_page - intermediate.first_page + 1.0
|
||||||
) AS duration
|
) AS duration
|
||||||
FROM intermediate
|
FROM intermediate
|
||||||
JOIN num_limit ON
|
LEFT JOIN num_limit ON
|
||||||
num_limit.idx <= (last_page - first_page + 1)
|
num_limit.idx <= (intermediate.last_page - intermediate.first_page + 1)
|
||||||
)
|
)
|
||||||
|
|
||||||
SELECT
|
SELECT
|
||||||
|
user_id,
|
||||||
document_id,
|
document_id,
|
||||||
device_id,
|
device_id,
|
||||||
user_id,
|
created_at,
|
||||||
start_time,
|
start_time,
|
||||||
pages,
|
|
||||||
page,
|
page,
|
||||||
|
pages,
|
||||||
|
|
||||||
-- Round up if last page (maintains total duration)
|
-- Round up if last page (maintains total duration)
|
||||||
CAST(CASE
|
CAST(CASE
|
||||||
@ -222,3 +262,174 @@ SELECT
|
|||||||
ELSE duration
|
ELSE duration
|
||||||
END AS INTEGER) AS duration
|
END AS INTEGER) AS duration
|
||||||
FROM rescaled_raw;
|
FROM rescaled_raw;
|
||||||
|
|
||||||
|
--------------------------------
|
||||||
|
--------- User Streaks ---------
|
||||||
|
--------------------------------
|
||||||
|
|
||||||
|
CREATE VIEW IF NOT EXISTS view_user_streaks AS
|
||||||
|
|
||||||
|
WITH document_windows AS (
|
||||||
|
SELECT
|
||||||
|
activity.user_id,
|
||||||
|
users.time_offset,
|
||||||
|
DATE(
|
||||||
|
activity.start_time,
|
||||||
|
users.time_offset,
|
||||||
|
'weekday 0', '-7 day'
|
||||||
|
) AS weekly_read,
|
||||||
|
DATE(activity.start_time, users.time_offset) AS daily_read
|
||||||
|
FROM raw_activity AS activity
|
||||||
|
LEFT JOIN users ON users.id = activity.user_id
|
||||||
|
GROUP BY activity.user_id, weekly_read, daily_read
|
||||||
|
),
|
||||||
|
|
||||||
|
weekly_partitions AS (
|
||||||
|
SELECT
|
||||||
|
user_id,
|
||||||
|
time_offset,
|
||||||
|
'WEEK' AS "window",
|
||||||
|
weekly_read AS read_window,
|
||||||
|
row_number() OVER (
|
||||||
|
PARTITION BY user_id ORDER BY weekly_read DESC
|
||||||
|
) AS seqnum
|
||||||
|
FROM document_windows
|
||||||
|
GROUP BY user_id, weekly_read
|
||||||
|
),
|
||||||
|
|
||||||
|
daily_partitions AS (
|
||||||
|
SELECT
|
||||||
|
user_id,
|
||||||
|
time_offset,
|
||||||
|
'DAY' AS "window",
|
||||||
|
daily_read AS read_window,
|
||||||
|
row_number() OVER (
|
||||||
|
PARTITION BY user_id ORDER BY daily_read DESC
|
||||||
|
) AS seqnum
|
||||||
|
FROM document_windows
|
||||||
|
GROUP BY user_id, daily_read
|
||||||
|
),
|
||||||
|
|
||||||
|
streaks AS (
|
||||||
|
SELECT
|
||||||
|
COUNT(*) AS streak,
|
||||||
|
MIN(read_window) AS start_date,
|
||||||
|
MAX(read_window) AS end_date,
|
||||||
|
window,
|
||||||
|
user_id,
|
||||||
|
time_offset
|
||||||
|
FROM daily_partitions
|
||||||
|
GROUP BY
|
||||||
|
time_offset,
|
||||||
|
user_id,
|
||||||
|
DATE(read_window, '+' || seqnum || ' day')
|
||||||
|
|
||||||
|
UNION ALL
|
||||||
|
|
||||||
|
SELECT
|
||||||
|
COUNT(*) AS streak,
|
||||||
|
MIN(read_window) AS start_date,
|
||||||
|
MAX(read_window) AS end_date,
|
||||||
|
window,
|
||||||
|
user_id,
|
||||||
|
time_offset
|
||||||
|
FROM weekly_partitions
|
||||||
|
GROUP BY
|
||||||
|
time_offset,
|
||||||
|
user_id,
|
||||||
|
DATE(read_window, '+' || (seqnum * 7) || ' day')
|
||||||
|
),
|
||||||
|
max_streak AS (
|
||||||
|
SELECT
|
||||||
|
MAX(streak) AS max_streak,
|
||||||
|
start_date AS max_streak_start_date,
|
||||||
|
end_date AS max_streak_end_date,
|
||||||
|
window,
|
||||||
|
user_id
|
||||||
|
FROM streaks
|
||||||
|
GROUP BY user_id, window
|
||||||
|
),
|
||||||
|
current_streak AS (
|
||||||
|
SELECT
|
||||||
|
streak AS current_streak,
|
||||||
|
start_date AS current_streak_start_date,
|
||||||
|
end_date AS current_streak_end_date,
|
||||||
|
window,
|
||||||
|
user_id
|
||||||
|
FROM streaks
|
||||||
|
WHERE CASE
|
||||||
|
WHEN window = "WEEK" THEN
|
||||||
|
DATE('now', time_offset, 'weekday 0', '-14 day') = current_streak_end_date
|
||||||
|
OR DATE('now', time_offset, 'weekday 0', '-7 day') = current_streak_end_date
|
||||||
|
WHEN window = "DAY" THEN
|
||||||
|
DATE('now', time_offset, '-1 day') = current_streak_end_date
|
||||||
|
OR DATE('now', time_offset) = current_streak_end_date
|
||||||
|
END
|
||||||
|
GROUP BY user_id, window
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
max_streak.user_id,
|
||||||
|
max_streak.window,
|
||||||
|
IFNULL(max_streak, 0) AS max_streak,
|
||||||
|
IFNULL(max_streak_start_date, "N/A") AS max_streak_start_date,
|
||||||
|
IFNULL(max_streak_end_date, "N/A") AS max_streak_end_date,
|
||||||
|
IFNULL(current_streak, 0) AS current_streak,
|
||||||
|
IFNULL(current_streak_start_date, "N/A") AS current_streak_start_date,
|
||||||
|
IFNULL(current_streak_end_date, "N/A") AS current_streak_end_date
|
||||||
|
FROM max_streak
|
||||||
|
LEFT JOIN current_streak ON
|
||||||
|
current_streak.user_id = max_streak.user_id
|
||||||
|
AND current_streak.window = max_streak.window;
|
||||||
|
|
||||||
|
--------------------------------
|
||||||
|
------- Document Stats ---------
|
||||||
|
--------------------------------
|
||||||
|
|
||||||
|
CREATE VIEW IF NOT EXISTS view_document_user_statistics AS
|
||||||
|
|
||||||
|
WITH true_progress AS (
|
||||||
|
SELECT
|
||||||
|
document_id,
|
||||||
|
user_id,
|
||||||
|
start_time AS last_read,
|
||||||
|
page,
|
||||||
|
pages,
|
||||||
|
SUM(duration) AS total_time_seconds,
|
||||||
|
|
||||||
|
-- Determine Read Pages
|
||||||
|
COUNT(DISTINCT page) AS read_pages,
|
||||||
|
|
||||||
|
-- Derive Percentage of Book
|
||||||
|
ROUND(CAST(page AS REAL) / CAST(pages AS REAL) * 100, 2) AS percentage
|
||||||
|
FROM view_rescaled_activity
|
||||||
|
GROUP BY document_id, user_id
|
||||||
|
HAVING MAX(start_time)
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
true_progress.*,
|
||||||
|
(CAST(COALESCE(documents.words, 0.0) AS REAL) / pages * read_pages)
|
||||||
|
AS words_read,
|
||||||
|
(CAST(COALESCE(documents.words, 0.0) AS REAL) / pages * read_pages)
|
||||||
|
/ (total_time_seconds / 60.0) AS wpm
|
||||||
|
FROM true_progress
|
||||||
|
INNER JOIN documents ON documents.id = true_progress.document_id
|
||||||
|
ORDER BY wpm DESC;
|
||||||
|
|
||||||
|
---------------------------------------------------------------
|
||||||
|
------------------ Populate Temporary Tables ------------------
|
||||||
|
---------------------------------------------------------------
|
||||||
|
INSERT INTO activity SELECT * FROM view_rescaled_activity;
|
||||||
|
INSERT INTO user_streaks SELECT * FROM view_user_streaks;
|
||||||
|
INSERT INTO document_user_statistics SELECT * FROM view_document_user_statistics;
|
||||||
|
|
||||||
|
---------------------------------------------------------------
|
||||||
|
--------------------------- Triggers --------------------------
|
||||||
|
---------------------------------------------------------------
|
||||||
|
|
||||||
|
-- Update Trigger
|
||||||
|
CREATE TRIGGER IF NOT EXISTS update_documents_updated_at
|
||||||
|
BEFORE UPDATE ON documents BEGIN
|
||||||
|
UPDATE documents
|
||||||
|
SET updated_at = CURRENT_TIMESTAMP
|
||||||
|
WHERE id = old.id;
|
||||||
|
END;
|
||||||
|
8
database/update_temp_tables.sql
Normal file
8
database/update_temp_tables.sql
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
DELETE FROM activity;
|
||||||
|
INSERT INTO activity SELECT * FROM view_rescaled_activity;
|
||||||
|
DELETE FROM user_streaks;
|
||||||
|
INSERT INTO user_streaks SELECT * FROM view_user_streaks;
|
||||||
|
DELETE FROM document_user_statistics;
|
||||||
|
INSERT INTO document_user_statistics
|
||||||
|
SELECT *
|
||||||
|
FROM view_document_user_statistics;
|
2
go.mod
2
go.mod
@ -13,6 +13,7 @@ require (
|
|||||||
github.com/sirupsen/logrus v1.9.3
|
github.com/sirupsen/logrus v1.9.3
|
||||||
github.com/urfave/cli/v2 v2.25.7
|
github.com/urfave/cli/v2 v2.25.7
|
||||||
golang.org/x/exp v0.0.0-20230905200255-921286631fa9
|
golang.org/x/exp v0.0.0-20230905200255-921286631fa9
|
||||||
|
golang.org/x/net v0.14.0
|
||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
@ -43,7 +44,6 @@ require (
|
|||||||
github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673 // indirect
|
github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673 // indirect
|
||||||
golang.org/x/arch v0.4.0 // indirect
|
golang.org/x/arch v0.4.0 // indirect
|
||||||
golang.org/x/crypto v0.12.0 // indirect
|
golang.org/x/crypto v0.12.0 // indirect
|
||||||
golang.org/x/net v0.14.0 // indirect
|
|
||||||
golang.org/x/sys v0.12.0 // indirect
|
golang.org/x/sys v0.12.0 // indirect
|
||||||
golang.org/x/text v0.12.0 // indirect
|
golang.org/x/text v0.12.0 // indirect
|
||||||
google.golang.org/protobuf v1.31.0 // indirect
|
google.golang.org/protobuf v1.31.0 // indirect
|
||||||
|
@ -79,28 +79,6 @@
|
|||||||
</span>
|
</span>
|
||||||
<span class="mx-4 text-sm font-normal"> Activity </span>
|
<span class="mx-4 text-sm font-normal"> Activity </span>
|
||||||
</a>
|
</a>
|
||||||
<a
|
|
||||||
class="flex items-center justify-start w-full p-2 pl-6 my-2 transition-colors duration-200 border-l-4 {{if eq .RouteName "graphs"}}border-purple-500 dark:text-white{{else}}border-transparent text-gray-400 hover:text-gray-800 dark:hover:text-gray-100{{end}}"
|
|
||||||
href="/graphs"
|
|
||||||
>
|
|
||||||
<span class="text-left">
|
|
||||||
<svg
|
|
||||||
width="20"
|
|
||||||
height="20"
|
|
||||||
viewBox="0 0 24 24"
|
|
||||||
fill="currentColor"
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
>
|
|
||||||
<path
|
|
||||||
fill-rule="evenodd"
|
|
||||||
clip-rule="evenodd"
|
|
||||||
d="M3.46447 3.46447C2 4.92893 2 7.28595 2 12C2 16.714 2 19.0711 3.46447 20.5355C4.92893 22 7.28595 22 12 22C16.714 22 19.0711 22 20.5355 20.5355C22 19.0711 22 16.714 22 12C22 7.28595 22 4.92893 20.5355 3.46447C19.0711 2 16.714 2 12 2C7.28595 2 4.92893 2 3.46447 3.46447ZM17.5762 10.4801C17.8413 10.1619 17.7983 9.68901 17.4801 9.42383C17.1619 9.15866 16.689 9.20165 16.4238 9.51986L14.6269 11.6761C14.2562 12.1211 14.0284 12.3915 13.8409 12.5609C13.7539 12.6394 13.7023 12.6708 13.6775 12.6827C13.6725 12.6852 13.6689 12.6866 13.6667 12.6875C13.6667 12.6875 13.6624 12.6858 13.659 12.6842L13.6558 12.6827C13.6311 12.6708 13.5795 12.6394 13.4925 12.5609C13.3049 12.3915 13.0772 12.1211 12.7064 11.6761L12.414 11.3252C12.0855 10.931 11.7894 10.5756 11.5128 10.3258C11.2119 10.0541 10.8328 9.81205 10.3333 9.81205C9.83384 9.81205 9.45478 10.0541 9.15384 10.3258C8.87725 10.5756 8.58113 10.931 8.25267 11.3253L6.42383 13.5199C6.15866 13.8381 6.20165 14.311 6.51986 14.5762C6.83807 14.8413 7.31099 14.7983 7.57617 14.4801L9.37306 12.3239C9.74385 11.8789 9.97155 11.6085 10.1591 11.4391C10.2461 11.3606 10.2977 11.3292 10.3225 11.3173C10.3251 11.316 10.3274 11.315 10.3292 11.3142L10.3333 11.3125C10.3356 11.3134 10.3392 11.3148 10.3442 11.3173C10.3689 11.3292 10.4205 11.3606 10.5075 11.4391C10.6951 11.6085 10.9228 11.8789 11.2936 12.3239L11.586 12.6748C11.9145 13.069 12.2106 13.4244 12.4872 13.6742C12.7881 13.9459 13.1672 14.188 13.6667 14.188C14.1662 14.188 14.5452 13.9459 14.8462 13.6742C15.1228 13.4244 15.4189 13.069 15.7473 12.6748L17.5762 10.4801Z"
|
|
||||||
fill="currentColor"
|
|
||||||
/>
|
|
||||||
</svg>
|
|
||||||
</span>
|
|
||||||
<span class="mx-4 text-sm font-normal"> Graphs </span>
|
|
||||||
</a>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -326,13 +326,13 @@
|
|||||||
<div class="text-xs flex">
|
<div class="text-xs flex">
|
||||||
<p class="text-gray-400 w-32">Words / Minute</p>
|
<p class="text-gray-400 w-32">Words / Minute</p>
|
||||||
<p class="font-medium dark:text-white">
|
<p class="font-medium dark:text-white">
|
||||||
{{ .Statistics.WordsPerMinute }}
|
{{ .Data.Wpm }}
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
<div class="text-xs flex">
|
<div class="text-xs flex">
|
||||||
<p class="text-gray-400 w-32">Est. Time Left</p>
|
<p class="text-gray-400 w-32">Est. Time Left</p>
|
||||||
<p class="font-medium dark:text-white whitespace-nowrap">
|
<p class="font-medium dark:text-white whitespace-nowrap">
|
||||||
{{ NiceSeconds .Statistics.TotalTimeLeftSeconds }}
|
{{ NiceSeconds .TotalTimeLeftSeconds }}
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -1,11 +0,0 @@
|
|||||||
{{template "base.html" .}}
|
|
||||||
|
|
||||||
{{define "title"}}Graphs{{end}}
|
|
||||||
|
|
||||||
{{define "header"}}
|
|
||||||
<a href="./graphs">Graphs</a>
|
|
||||||
{{end}}
|
|
||||||
|
|
||||||
{{define "content"}}
|
|
||||||
<h1>Graphs</h1>
|
|
||||||
{{end}}
|
|
@ -151,6 +151,7 @@
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="grid grid-cols-1 gap-4 my-4 md:grid-cols-2 lg:grid-cols-3">
|
<div class="grid grid-cols-1 gap-4 my-4 md:grid-cols-2 lg:grid-cols-3">
|
||||||
|
{{ range $item := .Data.Streaks }}
|
||||||
<div class="w-full">
|
<div class="w-full">
|
||||||
<div
|
<div
|
||||||
class="relative w-full px-4 py-6 bg-white shadow-lg dark:bg-gray-700 rounded"
|
class="relative w-full px-4 py-6 bg-white shadow-lg dark:bg-gray-700 rounded"
|
||||||
@ -158,11 +159,12 @@
|
|||||||
<p
|
<p
|
||||||
class="text-sm font-semibold text-gray-700 border-b border-gray-200 w-max dark:text-white dark:border-gray-500"
|
class="text-sm font-semibold text-gray-700 border-b border-gray-200 w-max dark:text-white dark:border-gray-500"
|
||||||
>
|
>
|
||||||
Daily Read Streak
|
{{ if eq $item.Window "WEEK" }} Weekly Read Streak {{ else }} Daily Read
|
||||||
|
Streak {{ end }}
|
||||||
</p>
|
</p>
|
||||||
<div class="flex items-end my-6 space-x-2">
|
<div class="flex items-end my-6 space-x-2">
|
||||||
<p class="text-5xl font-bold text-black dark:text-white">
|
<p class="text-5xl font-bold text-black dark:text-white">
|
||||||
{{ .Data.DailyStreak.CurrentStreak }}
|
{{ $item.CurrentStreak }}
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
<div class="dark:text-white">
|
<div class="dark:text-white">
|
||||||
@ -170,76 +172,73 @@
|
|||||||
class="flex items-center justify-between pb-2 mb-2 text-sm border-b border-gray-200"
|
class="flex items-center justify-between pb-2 mb-2 text-sm border-b border-gray-200"
|
||||||
>
|
>
|
||||||
<div>
|
<div>
|
||||||
<p>Current Daily Streak</p>
|
<p>
|
||||||
|
{{ if eq $item.Window "WEEK" }} Current Weekly Streak {{ else }}
|
||||||
|
Current Daily Streak {{ end }}
|
||||||
|
</p>
|
||||||
<div class="flex items-end text-sm text-gray-400">
|
<div class="flex items-end text-sm text-gray-400">
|
||||||
{{ .Data.DailyStreak.CurrentStreakStartDate }} ➞ {{
|
{{ $item.CurrentStreakStartDate }} ➞ {{ $item.CurrentStreakEndDate
|
||||||
.Data.DailyStreak.CurrentStreakEndDate }}
|
}}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="flex items-end font-bold">
|
<div class="flex items-end font-bold">{{ $item.CurrentStreak }}</div>
|
||||||
{{ .Data.DailyStreak.CurrentStreak }}
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
<div class="flex items-center justify-between pb-2 mb-2 text-sm">
|
<div class="flex items-center justify-between pb-2 mb-2 text-sm">
|
||||||
<div>
|
<div>
|
||||||
<p>Best Daily Streak</p>
|
<p>
|
||||||
|
{{ if eq $item.Window "WEEK" }} Best Weekly Streak {{ else }} Best
|
||||||
|
Daily Streak {{ end }}
|
||||||
|
</p>
|
||||||
<div class="flex items-end text-sm text-gray-400">
|
<div class="flex items-end text-sm text-gray-400">
|
||||||
{{ .Data.DailyStreak.MaxStreakStartDate }} ➞ {{
|
{{ $item.MaxStreakStartDate }} ➞ {{ $item.MaxStreakEndDate }}
|
||||||
.Data.DailyStreak.MaxStreakEndDate }}
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="flex items-end font-bold">
|
<div class="flex items-end font-bold">{{ $item.MaxStreak }}</div>
|
||||||
{{ .Data.DailyStreak.MaxStreak }}
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
{{ end }}
|
||||||
|
|
||||||
|
<div class="w-full">
|
||||||
|
<div
|
||||||
|
class="flex flex-col justify-between h-full w-full px-4 py-6 bg-white shadow-lg dark:bg-gray-700 rounded"
|
||||||
|
>
|
||||||
|
<div>
|
||||||
|
<p
|
||||||
|
class="text-sm font-semibold text-gray-700 border-b border-gray-200 w-max dark:text-white dark:border-gray-500"
|
||||||
|
>
|
||||||
|
WPM Leaderboard
|
||||||
|
</p>
|
||||||
|
<div class="flex items-end my-6 space-x-2">
|
||||||
|
{{ $length := len .Data.WPMLeaderboard }} {{ if eq $length 0 }}
|
||||||
|
<p class="text-5xl font-bold text-black dark:text-white">N/A</p>
|
||||||
|
{{ else }}
|
||||||
|
<p class="text-5xl font-bold text-black dark:text-white">
|
||||||
|
{{ (index .Data.WPMLeaderboard 0).UserID }}
|
||||||
|
</p>
|
||||||
|
{{ end }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="dark:text-white">
|
||||||
|
{{ range $index, $item := .Data.WPMLeaderboard }} {{ if lt $index 3 }}
|
||||||
|
{{ if eq $index 0 }}
|
||||||
|
<div class="flex items-center justify-between pt-2 pb-2 text-sm">
|
||||||
|
{{ else }}
|
||||||
|
<div
|
||||||
|
class="flex items-center justify-between pt-2 pb-2 text-sm border-t border-gray-200"
|
||||||
|
>
|
||||||
|
{{ end }}
|
||||||
|
<div>
|
||||||
|
<p>{{ $item.UserID }}</p>
|
||||||
|
</div>
|
||||||
|
<div class="flex items-end font-bold">{{ $item.Wpm }} WPM</div>
|
||||||
|
</div>
|
||||||
|
{{ end }} {{ end }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="w-full">
|
{{end}}
|
||||||
<div
|
|
||||||
class="relative w-full px-4 py-6 bg-white shadow-lg dark:bg-gray-700 rounded"
|
|
||||||
>
|
|
||||||
<p
|
|
||||||
class="text-sm font-semibold text-gray-700 border-b border-gray-200 w-max dark:text-white dark:border-gray-500"
|
|
||||||
>
|
|
||||||
Weekly Read Streak
|
|
||||||
</p>
|
|
||||||
<div class="flex items-end my-6 space-x-2">
|
|
||||||
<p class="text-5xl font-bold text-black dark:text-white">
|
|
||||||
{{ .Data.WeeklyStreak.CurrentStreak }}
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
<div class="dark:text-white">
|
|
||||||
<div
|
|
||||||
class="flex items-center justify-between pb-2 mb-2 text-sm border-b border-gray-200"
|
|
||||||
>
|
|
||||||
<div>
|
|
||||||
<p>Current Weekly Streak</p>
|
|
||||||
<div class="flex items-end text-sm text-gray-400">
|
|
||||||
{{ .Data.WeeklyStreak.CurrentStreakStartDate }} ➞ {{
|
|
||||||
.Data.WeeklyStreak.CurrentStreakEndDate }}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="flex items-end font-bold">
|
|
||||||
{{ .Data.WeeklyStreak.CurrentStreak }}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="flex items-center justify-between pb-2 mb-2 text-sm">
|
|
||||||
<div>
|
|
||||||
<p>Best Weekly Streak</p>
|
|
||||||
<div class="flex items-end text-sm text-gray-400">
|
|
||||||
{{ .Data.WeeklyStreak.MaxStreakStartDate }} ➞ {{
|
|
||||||
.Data.WeeklyStreak.MaxStreakEndDate }}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="flex items-end font-bold">
|
|
||||||
{{ .Data.WeeklyStreak.MaxStreak }}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{{end}}
|
|
||||||
|
@ -201,7 +201,7 @@
|
|||||||
<p>{{ $device.DeviceName }}</p>
|
<p>{{ $device.DeviceName }}</p>
|
||||||
</td>
|
</td>
|
||||||
<td class="p-3">
|
<td class="p-3">
|
||||||
<p>{{ $device.LastSync }}</p>
|
<p>{{ $device.LastSynced }}</p>
|
||||||
</td>
|
</td>
|
||||||
<td class="p-3">
|
<td class="p-3">
|
||||||
<p>{{ $device.CreatedAt }}</p>
|
<p>{{ $device.CreatedAt }}</p>
|
||||||
|
@ -56,6 +56,10 @@ func GetUTCOffsets() []UTCOffset {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func NiceSeconds(input int64) (result string) {
|
func NiceSeconds(input int64) (result string) {
|
||||||
|
if input == 0 {
|
||||||
|
return "N/A"
|
||||||
|
}
|
||||||
|
|
||||||
days := math.Floor(float64(input) / 60 / 60 / 24)
|
days := math.Floor(float64(input) / 60 / 60 / 24)
|
||||||
seconds := input % (60 * 60 * 24)
|
seconds := input % (60 * 60 * 24)
|
||||||
hours := math.Floor(float64(seconds) / 60 / 60)
|
hours := math.Floor(float64(seconds) / 60 / 60)
|
||||||
|
Loading…
Reference in New Issue
Block a user