diff --git a/api/api.go b/api/api.go index 9d6f651..163e7fc 100644 --- a/api/api.go +++ b/api/api.go @@ -53,7 +53,7 @@ func NewApi(db *database.DBManager, c *config.Config) *API { // Configure Cookie Session Store store := cookie.NewStore(newToken) store.Options(sessions.Options{ - MaxAge: 60 * 60 * 24, + MaxAge: 60 * 60 * 24 * 7, Secure: true, HttpOnly: true, SameSite: http.SameSiteStrictMode, @@ -81,7 +81,6 @@ func (api *API) registerWebAppRoutes() { render.AddFromFilesFuncs("login", helperFuncs, "templates/login.html") render.AddFromFilesFuncs("home", helperFuncs, "templates/base.html", "templates/home.html") - render.AddFromFilesFuncs("graphs", helperFuncs, "templates/base.html", "templates/graphs.html") render.AddFromFilesFuncs("settings", helperFuncs, "templates/base.html", "templates/settings.html") render.AddFromFilesFuncs("activity", helperFuncs, "templates/base.html", "templates/activity.html") render.AddFromFilesFuncs("documents", helperFuncs, "templates/base.html", "templates/documents.html") @@ -107,9 +106,6 @@ func (api *API) registerWebAppRoutes() { api.Router.POST("/documents/:document/edit", api.authWebAppMiddleware, api.editDocument) api.Router.POST("/documents/:document/identify", api.authWebAppMiddleware, api.identifyDocument) api.Router.POST("/documents/:document/delete", api.authWebAppMiddleware, api.deleteDocument) - - // TODO - api.Router.GET("/graphs", api.authWebAppMiddleware, baseResourceRoute("graphs")) } func (api *API) registerKOAPIRoutes(apiGroup *gin.RouterGroup) { diff --git a/api/app-routes.go b/api/app-routes.go index d9ee62e..d202e72 100644 --- a/api/app-routes.go +++ b/api/app-routes.go @@ -48,19 +48,6 @@ type requestSettingsEdit struct { TimeOffset *string `form:"time_offset"` } -func baseResourceRoute(template string, args ...map[string]any) func(c *gin.Context) { - variables := gin.H{"RouteName": template} - if len(args) > 0 { - variables = args[0] - } - - return func(c *gin.Context) { - rUser, _ := c.Get("AuthorizedUser") - variables["User"] = rUser - c.HTML(http.StatusOK, template, variables) - } -} - func (api *API) webManifest(c *gin.Context) { c.Header("Content-Type", "application/manifest+json") c.File("./assets/manifest.json") @@ -125,18 +112,9 @@ func (api *API) createAppResourcesRoute(routeName string, args ...map[string]any return } - statistics := gin.H{ - "TotalTimeLeftSeconds": (document.Pages - document.Page) * document.SecondsPerPage, - "WordsPerMinute": "N/A", - } - - if document.Words != nil && *document.Words != 0 { - statistics["WordsPerMinute"] = (*document.Words / document.Pages * document.ReadPages) / (document.TotalTimeSeconds / 60.0) - } - templateVars["RelBase"] = "../" templateVars["Data"] = document - templateVars["Statistics"] = statistics + templateVars["TotalTimeLeftSeconds"] = (document.Pages - document.Page) * document.SecondsPerPage } else if routeName == "activity" { activityFilter := database.GetActivityParams{ UserID: userID, @@ -158,39 +136,22 @@ func (api *API) createAppResourcesRoute(routeName string, args ...map[string]any templateVars["Data"] = activity } else if routeName == "home" { - start_time := time.Now() - weekly_streak, err := api.DB.Queries.GetUserWindowStreaks(api.DB.Ctx, database.GetUserWindowStreaksParams{ - UserID: userID, - Window: "WEEK", - }) - if err != nil { - log.Warn("[createAppResourcesRoute] GetUserWindowStreaks DB Error:", err) - } - log.Debug("GetUserWindowStreaks - WEEK - ", time.Since(start_time)) - start_time = time.Now() - - daily_streak, err := api.DB.Queries.GetUserWindowStreaks(api.DB.Ctx, database.GetUserWindowStreaksParams{ - UserID: userID, - Window: "DAY", - }) - if err != nil { - log.Warn("[createAppResourcesRoute] GetUserWindowStreaks DB Error:", err) - } - log.Debug("GetUserWindowStreaks - DAY - ", time.Since(start_time)) - - start_time = time.Now() - database_info, _ := api.DB.Queries.GetDatabaseInfo(api.DB.Ctx, userID) - log.Debug("GetDatabaseInfo - ", time.Since(start_time)) - - start_time = time.Now() + start := time.Now() read_graph_data, _ := api.DB.Queries.GetDailyReadStats(api.DB.Ctx, userID) - log.Debug("GetDailyReadStats - ", time.Since(start_time)) + log.Info("GetDailyReadStats Performance: ", time.Since(start)) + + start = time.Now() + database_info, _ := api.DB.Queries.GetDatabaseInfo(api.DB.Ctx, userID) + log.Info("GetDatabaseInfo Performance: ", time.Since(start)) + + streaks, _ := api.DB.Queries.GetUserStreaks(api.DB.Ctx, userID) + wpn_leaderboard, _ := api.DB.Queries.GetWPMLeaderboard(api.DB.Ctx) templateVars["Data"] = gin.H{ - "DailyStreak": daily_streak, - "WeeklyStreak": weekly_streak, - "DatabaseInfo": database_info, - "GraphData": read_graph_data, + "Streaks": streaks, + "GraphData": read_graph_data, + "DatabaseInfo": database_info, + "WPMLeaderboard": wpn_leaderboard, } } else if routeName == "settings" { user, err := api.DB.Queries.GetUser(api.DB.Ctx, userID) @@ -512,17 +473,8 @@ func (api *API) identifyDocument(c *gin.Context) { return } - statistics := gin.H{ - "TotalTimeLeftSeconds": (document.Pages - document.Page) * document.SecondsPerPage, - "WordsPerMinute": "N/A", - } - - if document.Words != nil && *document.Words != 0 { - statistics["WordsPerMinute"] = (*document.Words / document.Pages * document.ReadPages) / (document.TotalTimeSeconds / 60.0) - } - templateVars["Data"] = document - templateVars["Statistics"] = statistics + templateVars["TotalTimeLeftSeconds"] = (document.Pages - document.Page) * document.SecondsPerPage c.HTML(http.StatusOK, "document", templateVars) } diff --git a/api/auth.go b/api/auth.go index 3e8a1a8..a44bc5f 100644 --- a/api/auth.go +++ b/api/auth.go @@ -5,10 +5,12 @@ import ( "fmt" "net/http" "strings" + "time" argon2 "github.com/alexedwards/argon2id" "github.com/gin-contrib/sessions" "github.com/gin-gonic/gin" + log "github.com/sirupsen/logrus" "reichard.io/bbank/database" ) @@ -34,14 +36,16 @@ func (api *API) authorizeCredentials(username string, password string) (authoriz func (api *API) authAPIMiddleware(c *gin.Context) { session := sessions.Default(c) - // Utilize Session Token - if authorizedUser := session.Get("authorizedUser"); authorizedUser != nil { - c.Set("AuthorizedUser", authorizedUser) + // Check Session First + if user, ok := getSession(session); ok == true { + c.Set("AuthorizedUser", user) c.Header("Cache-Control", "private") c.Next() return } + // Session Failed -> Check Headers (Allowed on API for KOSync Compatibility) + var rHeader authHeader if err := c.ShouldBindHeader(&rHeader); err != nil { c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Incorrect Headers"}) @@ -57,20 +61,22 @@ func (api *API) authAPIMiddleware(c *gin.Context) { return } - // Set Session Cookie - session.Set("authorizedUser", rHeader.AuthUser) - session.Save() + if err := setSession(session, rHeader.AuthUser); err != nil { + c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Unauthorized"}) + return + } c.Set("AuthorizedUser", rHeader.AuthUser) + c.Header("Cache-Control", "private") c.Next() } func (api *API) authWebAppMiddleware(c *gin.Context) { session := sessions.Default(c) - // Utilize Session Token - if authorizedUser := session.Get("authorizedUser"); authorizedUser != nil { - c.Set("AuthorizedUser", authorizedUser) + // Check Session + if user, ok := getSession(session); ok == true { + c.Set("AuthorizedUser", user) c.Header("Cache-Control", "private") c.Next() return @@ -102,12 +108,17 @@ func (api *API) authFormLogin(c *gin.Context) { return } + // Set Session session := sessions.Default(c) + if err := setSession(session, username); err != nil { + c.HTML(http.StatusUnauthorized, "login", gin.H{ + "RegistrationEnabled": api.Config.RegistrationEnabled, + "Error": "Unknown Error", + }) + return + } - // Set Session Cookie - session.Set("authorizedUser", username) - session.Save() - + c.Header("Cache-Control", "private") c.Redirect(http.StatusFound, "/") } @@ -160,12 +171,14 @@ func (api *API) authFormRegister(c *gin.Context) { return } + // Set Session session := sessions.Default(c) + if err := setSession(session, username); err != nil { + c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Unauthorized"}) + return + } - // Set Session Cookie - session.Set("authorizedUser", username) - session.Save() - + c.Header("Cache-Control", "private") c.Redirect(http.StatusFound, "/") } @@ -175,3 +188,27 @@ func (api *API) authLogout(c *gin.Context) { session.Save() c.Redirect(http.StatusFound, "/login") } + +func getSession(session sessions.Session) (user string, ok bool) { + // Check Session + authorizedUser := session.Get("authorizedUser") + if authorizedUser == nil { + return "", false + } + + // Refresh + expiresAt := session.Get("expiresAt") + if expiresAt != nil && expiresAt.(int64)-time.Now().Unix() < 60*60*24 { + log.Info("[getSession] Refreshing Session") + setSession(session, authorizedUser.(string)) + } + + return authorizedUser.(string), true +} + +func setSession(session sessions.Session, user string) error { + // Set Session Cookie + session.Set("authorizedUser", user) + session.Set("expiresAt", time.Now().Unix()+(60*60*24*7)) + return session.Save() +} diff --git a/api/ko-routes.go b/api/ko-routes.go index 634be16..60f25d3 100644 --- a/api/ko-routes.go +++ b/api/ko-routes.go @@ -37,6 +37,7 @@ type requestActivity struct { type requestCheckActivitySync struct { DeviceID string `json:"device_id"` + Device string `json:"device"` } type requestDocument struct { @@ -277,6 +278,14 @@ func (api *API) addActivities(c *gin.Context) { return } + // Update Temp Tables + go func() { + log.Info("[addActivities] Caching Temp Tables") + if err := api.DB.CacheTempTables(); err != nil { + log.Warn("[addActivities] CacheTempTables Failure: ", err) + } + }() + c.JSON(http.StatusOK, gin.H{ "added": len(rActivity.Activity), }) @@ -292,6 +301,18 @@ func (api *API) checkActivitySync(c *gin.Context) { return } + // Upsert Device + if _, err := api.DB.Queries.UpsertDevice(api.DB.Ctx, database.UpsertDeviceParams{ + ID: rCheckActivity.DeviceID, + UserID: rUser.(string), + DeviceName: rCheckActivity.Device, + LastSynced: time.Now().UTC(), + }); err != nil { + log.Error("[checkActivitySync] UpsertDevice DB Error", err) + c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "Invalid Device"}) + return + } + // Get Last Device Activity lastActivity, err := api.DB.Queries.GetLastActivity(api.DB.Ctx, database.GetLastActivityParams{ UserID: rUser.(string), @@ -385,6 +406,7 @@ func (api *API) checkDocumentsSync(c *gin.Context) { ID: rCheckDocs.DeviceID, UserID: rUser.(string), DeviceName: rCheckDocs.Device, + LastSynced: time.Now().UTC(), }) if err != nil { log.Error("[checkDocumentsSync] UpsertDevice DB Error", err) diff --git a/client/syncninja.koplugin/SyncNinjaClient.lua b/client/syncninja.koplugin/SyncNinjaClient.lua index 4c5c635..059b318 100644 --- a/client/syncninja.koplugin/SyncNinjaClient.lua +++ b/client/syncninja.koplugin/SyncNinjaClient.lua @@ -72,7 +72,8 @@ end -------------- New Functions ------------- ------------------------------------------ -function SyncNinjaClient:check_activity(username, password, device_id, callback) +function SyncNinjaClient:check_activity(username, password, device_id, device, + callback) self.client:reset_middlewares() self.client:enable("Format.JSON") self.client:enable("GinClient") @@ -82,7 +83,10 @@ function SyncNinjaClient:check_activity(username, password, device_id, callback) socketutil:set_timeout(SYNC_TIMEOUTS[1], SYNC_TIMEOUTS[2]) local co = coroutine.create(function() local ok, res = pcall(function() - return self.client:check_activity({device_id = device_id}) + return self.client:check_activity({ + device_id = device_id, + device = device + }) end) if ok then callback(res.status == 200, res.body) diff --git a/client/syncninja.koplugin/api.json b/client/syncninja.koplugin/api.json index 0ef3cc6..3a0596b 100644 --- a/client/syncninja.koplugin/api.json +++ b/client/syncninja.koplugin/api.json @@ -26,8 +26,8 @@ "check_activity": { "path": "/api/ko/syncs/activity", "method": "POST", - "required_params": ["device_id"], - "payload": ["device_id"], + "required_params": ["device_id", "device"], + "payload": ["device_id", "device"], "expected_status": [200, 401] }, "download_document": { diff --git a/client/syncninja.koplugin/main.lua b/client/syncninja.koplugin/main.lua index d91289c..6cccd57 100644 --- a/client/syncninja.koplugin/main.lua +++ b/client/syncninja.koplugin/main.lua @@ -75,7 +75,7 @@ local STATISTICS_ACTIVITY_SINCE_QUERY = [[ JOIN book AS b ON b.id = psd.id_book WHERE start_time > %d - ORDER BY start_time ASC LIMIT 1000; + ORDER BY start_time ASC LIMIT 5000; ]] local STATISTICS_BOOK_QUERY = [[ @@ -615,7 +615,8 @@ function SyncNinja:checkActivity(interactive) service_spec = self.path .. "/api.json" } local ok, err = pcall(client.check_activity, client, self.settings.username, - self.settings.password, self.device_id, callback_func) + self.settings.password, self.device_id, Device.model, + callback_func) end function SyncNinja:uploadActivity(activity_data, interactive) @@ -907,7 +908,7 @@ function SyncNinja:getStatisticsActivity(timestamp) local conn = SQ3.open(statistics_db) local stmt = conn:prepare(string.format(STATISTICS_ACTIVITY_SINCE_QUERY, timestamp)) - local rows = stmt:resultset("i", 1000) + local rows = stmt:resultset("i", 5000) conn:close() -- No Results diff --git a/database/manager.go b/database/manager.go index bceff4e..b8fc36c 100644 --- a/database/manager.go +++ b/database/manager.go @@ -21,6 +21,9 @@ type DBManager struct { //go:embed schema.sql var ddl string +//go:embed update_temp_tables.sql +var tsql string + func NewMgr(c *config.Config) *DBManager { // Create Manager dbm := &DBManager{ @@ -44,22 +47,23 @@ func NewMgr(c *config.Config) *DBManager { log.Fatal("Unsupported Database") } - // Create Tables - if _, err := dbm.DB.ExecContext(dbm.Ctx, ddl); err != nil { - log.Fatal(err) - } - dbm.Queries = New(dbm.DB) return dbm } -func connectHookSQLite(conn *sqlite.SQLiteConn) error { - if err := conn.RegisterFunc("test_func", func() string { - return "FOOBAR" - }, false); err != nil { - log.Info("Error Registering Function") +func (dbm *DBManager) CacheTempTables() error { + if _, err := dbm.DB.ExecContext(dbm.Ctx, tsql); err != nil { return err } return nil } + +func connectHookSQLite(conn *sqlite.SQLiteConn) error { + // Create Tables + log.Debug("Creating Schema") + if _, err := conn.Exec(ddl, nil); err != nil { + log.Warn("Create Schema Failure: ", err) + } + return nil +} diff --git a/database/models.go b/database/models.go index fe1f082..cd9a691 100644 --- a/database/models.go +++ b/database/models.go @@ -5,27 +5,28 @@ package database import ( + "database/sql" "time" ) type Activity struct { - ID int64 `json:"id"` UserID string `json:"user_id"` DocumentID string `json:"document_id"` DeviceID string `json:"device_id"` + CreatedAt time.Time `json:"created_at"` StartTime time.Time `json:"start_time"` - Duration int64 `json:"duration"` Page int64 `json:"page"` Pages int64 `json:"pages"` - CreatedAt time.Time `json:"created_at"` + Duration int64 `json:"duration"` } type Device struct { - ID string `json:"id"` - UserID string `json:"user_id"` - DeviceName string `json:"device_name"` - CreatedAt string `json:"created_at"` - Sync bool `json:"sync"` + ID string `json:"id"` + UserID string `json:"user_id"` + DeviceName string `json:"device_name"` + LastSynced time.Time `json:"last_synced"` + CreatedAt string `json:"created_at"` + Sync bool `json:"sync"` } type Document struct { @@ -50,14 +51,6 @@ type Document struct { CreatedAt time.Time `json:"created_at"` } -type DocumentDeviceSync struct { - UserID string `json:"user_id"` - DocumentID string `json:"document_id"` - DeviceID string `json:"device_id"` - LastSynced time.Time `json:"last_synced"` - Sync bool `json:"sync"` -} - type DocumentProgress struct { UserID string `json:"user_id"` DocumentID string `json:"document_id"` @@ -67,6 +60,19 @@ type DocumentProgress struct { CreatedAt time.Time `json:"created_at"` } +type DocumentUserStatistic struct { + DocumentID string `json:"document_id"` + UserID string `json:"user_id"` + LastRead string `json:"last_read"` + Page int64 `json:"page"` + Pages int64 `json:"pages"` + TotalTimeSeconds int64 `json:"total_time_seconds"` + ReadPages int64 `json:"read_pages"` + Percentage float64 `json:"percentage"` + WordsRead int64 `json:"words_read"` + Wpm float64 `json:"wpm"` +} + type Metadatum struct { ID int64 `json:"id"` DocumentID string `json:"document_id"` @@ -80,14 +86,16 @@ type Metadatum struct { CreatedAt time.Time `json:"created_at"` } -type RescaledActivity struct { +type RawActivity struct { + ID int64 `json:"id"` + UserID string `json:"user_id"` DocumentID string `json:"document_id"` DeviceID string `json:"device_id"` - UserID string `json:"user_id"` StartTime time.Time `json:"start_time"` - Pages int64 `json:"pages"` Page int64 `json:"page"` + Pages int64 `json:"pages"` Duration int64 `json:"duration"` + CreatedAt time.Time `json:"created_at"` } type User struct { @@ -97,3 +105,49 @@ type User struct { TimeOffset *string `json:"time_offset"` CreatedAt time.Time `json:"created_at"` } + +type UserStreak struct { + UserID string `json:"user_id"` + Window string `json:"window"` + MaxStreak int64 `json:"max_streak"` + MaxStreakStartDate string `json:"max_streak_start_date"` + MaxStreakEndDate string `json:"max_streak_end_date"` + CurrentStreak int64 `json:"current_streak"` + CurrentStreakStartDate string `json:"current_streak_start_date"` + CurrentStreakEndDate string `json:"current_streak_end_date"` +} + +type ViewDocumentUserStatistic struct { + DocumentID string `json:"document_id"` + UserID string `json:"user_id"` + LastRead time.Time `json:"last_read"` + Page int64 `json:"page"` + Pages int64 `json:"pages"` + TotalTimeSeconds sql.NullFloat64 `json:"total_time_seconds"` + ReadPages int64 `json:"read_pages"` + Percentage float64 `json:"percentage"` + WordsRead interface{} `json:"words_read"` + Wpm int64 `json:"wpm"` +} + +type ViewRescaledActivity struct { + UserID string `json:"user_id"` + DocumentID string `json:"document_id"` + DeviceID string `json:"device_id"` + CreatedAt time.Time `json:"created_at"` + StartTime time.Time `json:"start_time"` + Page int64 `json:"page"` + Pages int64 `json:"pages"` + Duration int64 `json:"duration"` +} + +type ViewUserStreak struct { + UserID string `json:"user_id"` + Window string `json:"window"` + MaxStreak interface{} `json:"max_streak"` + MaxStreakStartDate interface{} `json:"max_streak_start_date"` + MaxStreakEndDate interface{} `json:"max_streak_end_date"` + CurrentStreak interface{} `json:"current_streak"` + CurrentStreakStartDate interface{} `json:"current_streak_start_date"` + CurrentStreakEndDate interface{} `json:"current_streak_end_date"` +} diff --git a/database/query.sql b/database/query.sql index 2515935..9ed1ac2 100644 --- a/database/query.sql +++ b/database/query.sql @@ -1,3 +1,16 @@ +-- name: AddActivity :one +INSERT INTO raw_activity ( + user_id, + document_id, + device_id, + start_time, + duration, + page, + pages +) +VALUES (?, ?, ?, ?, ?, ?, ?) +RETURNING *; + -- name: AddMetadata :one INSERT INTO metadata ( document_id, @@ -17,10 +30,343 @@ INSERT INTO users (id, pass) VALUES (?, ?) ON CONFLICT DO NOTHING; +-- name: DeleteDocument :execrows +UPDATE documents +SET + deleted = 1 +WHERE id = $id; + +-- name: GetActivity :many +WITH filtered_activity AS ( + SELECT + document_id, + user_id, + start_time, + duration, + page, + pages + FROM activity + WHERE + activity.user_id = $user_id + AND ( + ( + CAST($doc_filter AS BOOLEAN) = TRUE + AND document_id = $document_id + ) OR $doc_filter = FALSE + ) + ORDER BY start_time DESC + LIMIT $limit + OFFSET $offset +) + +SELECT + document_id, + CAST(DATETIME(activity.start_time, users.time_offset) AS TEXT) AS start_time, + title, + author, + duration, + page, + pages +FROM filtered_activity AS activity +LEFT JOIN documents ON documents.id = activity.document_id +LEFT JOIN users ON users.id = activity.user_id; + +-- name: GetDailyReadStats :many +WITH RECURSIVE last_30_days AS ( + SELECT DATE('now', time_offset) AS date + FROM users WHERE users.id = $user_id + UNION ALL + SELECT DATE(date, '-1 days') + FROM last_30_days + LIMIT 30 +), +filtered_activity AS ( + SELECT + user_id, + start_time, + duration + FROM activity + WHERE start_time > DATE('now', '-31 days') + AND activity.user_id = $user_id +), +activity_days AS ( + SELECT + SUM(duration) AS seconds_read, + DATE(start_time, time_offset) AS day + FROM filtered_activity AS activity + LEFT JOIN users ON users.id = activity.user_id + GROUP BY day + LIMIT 30 +) +SELECT + CAST(date AS TEXT), + CAST(CASE + WHEN seconds_read IS NULL THEN 0 + ELSE seconds_read / 60 + END AS INTEGER) AS minutes_read +FROM last_30_days +LEFT JOIN activity_days ON activity_days.day == last_30_days.date +ORDER BY date DESC +LIMIT 30; + +-- name: GetDatabaseInfo :one +SELECT + (SELECT COUNT(rowid) FROM activity WHERE activity.user_id = $user_id) AS activity_size, + (SELECT COUNT(rowid) FROM documents) AS documents_size, + (SELECT COUNT(rowid) FROM document_progress WHERE document_progress.user_id = $user_id) AS progress_size, + (SELECT COUNT(rowid) FROM devices WHERE devices.user_id = $user_id) AS devices_size +LIMIT 1; + +-- name: GetDeletedDocuments :many +SELECT documents.id +FROM documents +WHERE + documents.deleted = true + AND documents.id IN (sqlc.slice('document_ids')); + +-- name: GetDevice :one +SELECT * FROM devices +WHERE id = $device_id LIMIT 1; + +-- name: GetDevices :many +SELECT + devices.device_name, + CAST(DATETIME(devices.created_at, users.time_offset) AS TEXT) AS created_at, + CAST(DATETIME(devices.last_synced, users.time_offset) AS TEXT) AS last_synced +FROM devices +JOIN users ON users.id = devices.user_id +WHERE users.id = $user_id; + +-- name: GetDocument :one +SELECT * FROM documents +WHERE id = $document_id LIMIT 1; + +-- name: GetDocumentDaysRead :one +WITH document_days AS ( + SELECT DATE(start_time, time_offset) AS dates + FROM activity + JOIN users ON users.id = activity.user_id + WHERE document_id = $document_id + AND user_id = $user_id + GROUP BY dates +) +SELECT CAST(COUNT(*) AS INTEGER) AS days_read +FROM document_days; + +-- name: GetDocumentReadStats :one +SELECT + COUNT(DISTINCT page) AS pages_read, + SUM(duration) AS total_time +FROM activity +WHERE document_id = $document_id +AND user_id = $user_id +AND start_time >= $start_time; + +-- name: GetDocumentReadStatsCapped :one +WITH capped_stats AS ( + SELECT MIN(SUM(duration), CAST($page_duration_cap AS INTEGER)) AS durations + FROM activity + WHERE document_id = $document_id + AND user_id = $user_id + AND start_time >= $start_time + GROUP BY page +) +SELECT + CAST(COUNT(*) AS INTEGER) AS pages_read, + CAST(SUM(durations) AS INTEGER) AS total_time +FROM capped_stats; + +-- name: GetDocumentWithStats :one +SELECT + docs.id, + docs.title, + docs.author, + docs.description, + docs.isbn10, + docs.isbn13, + docs.filepath, + docs.words, + + CAST(COALESCE(dus.wpm, 0.0) AS INTEGER) AS wpm, + COALESCE(dus.page, 0) AS page, + COALESCE(dus.pages, 0) AS pages, + COALESCE(dus.read_pages, 0) AS read_pages, + COALESCE(dus.total_time_seconds, 0) AS total_time_seconds, + DATETIME(COALESCE(dus.last_read, "1970-01-01"), users.time_offset) + AS last_read, + CASE + WHEN dus.percentage > 97.0 THEN 100.0 + WHEN dus.percentage IS NULL THEN 0.0 + ELSE dus.percentage + END AS percentage, + CAST(CASE + WHEN dus.total_time_seconds IS NULL THEN 0.0 + ELSE + CAST(dus.total_time_seconds AS REAL) + / CAST(dus.read_pages AS REAL) + END AS INTEGER) AS seconds_per_page +FROM documents AS docs +LEFT JOIN users ON users.id = $user_id +LEFT JOIN + document_user_statistics AS dus + ON dus.document_id = docs.id AND dus.user_id = $user_id +WHERE users.id = $user_id +AND docs.id = $document_id +LIMIT 1; + +-- name: GetDocuments :many +SELECT * FROM documents +ORDER BY created_at DESC +LIMIT $limit +OFFSET $offset; + +-- name: GetDocumentsWithStats :many +SELECT + docs.id, + docs.title, + docs.author, + docs.description, + docs.isbn10, + docs.isbn13, + docs.filepath, + docs.words, + + CAST(COALESCE(dus.wpm, 0.0) AS INTEGER) AS wpm, + COALESCE(dus.page, 0) AS page, + COALESCE(dus.pages, 0) AS pages, + COALESCE(dus.read_pages, 0) AS read_pages, + COALESCE(dus.total_time_seconds, 0) AS total_time_seconds, + DATETIME(COALESCE(dus.last_read, "1970-01-01"), users.time_offset) + AS last_read, + CASE + WHEN dus.percentage > 97.0 THEN 100.0 + WHEN dus.percentage IS NULL THEN 0.0 + ELSE dus.percentage + END AS percentage, + CASE + WHEN dus.total_time_seconds IS NULL THEN 0.0 + ELSE + ROUND( + CAST(dus.total_time_seconds AS REAL) + / CAST(dus.read_pages AS REAL) + ) + END AS seconds_per_page +FROM documents AS docs +LEFT JOIN users ON users.id = $user_id +LEFT JOIN + document_user_statistics AS dus + ON dus.document_id = docs.id AND dus.user_id = $user_id +WHERE docs.deleted = false +ORDER BY dus.last_read DESC, docs.created_at DESC +LIMIT $limit +OFFSET $offset; + +-- name: GetLastActivity :one +SELECT start_time +FROM activity +WHERE device_id = $device_id +AND user_id = $user_id +ORDER BY start_time DESC LIMIT 1; + +-- name: GetMissingDocuments :many +SELECT documents.* FROM documents +WHERE + documents.filepath IS NOT NULL + AND documents.deleted = false + AND documents.id NOT IN (sqlc.slice('document_ids')); + +-- name: GetProgress :one +SELECT + document_progress.*, + devices.device_name +FROM document_progress +JOIN devices ON document_progress.device_id = devices.id +WHERE + document_progress.user_id = $user_id + AND document_progress.document_id = $document_id +ORDER BY + document_progress.created_at + DESC +LIMIT 1; + -- name: GetUser :one SELECT * FROM users WHERE id = $user_id LIMIT 1; +-- name: GetUserStreaks :many +SELECT * FROM user_streaks +WHERE user_id = $user_id; + +-- name: GetUsers :many +SELECT * FROM users +WHERE + users.id = $user + OR ?1 IN ( + SELECT id + FROM users + WHERE id = $user + AND admin = 1 + ) +ORDER BY created_at DESC +LIMIT $limit +OFFSET $offset; + +-- name: GetWPMLeaderboard :many +SELECT + user_id, + CAST(SUM(words_read) AS INTEGER) AS total_words_read, + CAST(SUM(total_time_seconds) AS INTEGER) AS total_seconds, + ROUND(CAST(SUM(words_read) AS REAL) / (SUM(total_time_seconds) / 60.0), 2) + AS wpm +FROM document_user_statistics +WHERE words_read > 0 +GROUP BY user_id +ORDER BY wpm DESC; + +-- name: GetWantedDocuments :many +SELECT + CAST(value AS TEXT) AS id, + CAST((documents.filepath IS NULL) AS BOOLEAN) AS want_file, + CAST((IFNULL(documents.synced, false) != true) AS BOOLEAN) AS want_metadata +FROM json_each(?1) +LEFT JOIN documents +ON value = documents.id +WHERE ( + documents.id IS NOT NULL + AND documents.deleted = false + AND ( + documents.synced = false + OR documents.filepath IS NULL + ) +) +OR (documents.id IS NULL) +OR CAST($document_ids AS TEXT) != CAST($document_ids AS TEXT); + +-- name: UpdateDocumentDeleted :one +UPDATE documents +SET + deleted = $deleted +WHERE id = $id +RETURNING *; + +-- name: UpdateDocumentSync :one +UPDATE documents +SET + synced = $synced +WHERE id = $id +RETURNING *; + +-- name: UpdateProgress :one +INSERT OR REPLACE INTO document_progress ( + user_id, + document_id, + device_id, + percentage, + progress +) +VALUES (?, ?, ?, ?, ?) +RETURNING *; + -- name: UpdateUser :one UPDATE users SET @@ -29,6 +375,15 @@ SET WHERE id = $user_id RETURNING *; +-- name: UpsertDevice :one +INSERT INTO devices (id, user_id, last_synced, device_name) +VALUES (?, ?, ?, ?) +ON CONFLICT DO UPDATE +SET + device_name = COALESCE(excluded.device_name, device_name), + last_synced = COALESCE(excluded.last_synced, last_synced) +RETURNING *; + -- name: UpsertDocument :one INSERT INTO documents ( id, @@ -65,406 +420,3 @@ SET isbn10 = COALESCE(excluded.isbn10, isbn10), isbn13 = COALESCE(excluded.isbn13, isbn13) RETURNING *; - --- name: DeleteDocument :execrows -UPDATE documents -SET - deleted = 1 -WHERE id = $id; - --- name: UpdateDocumentSync :one -UPDATE documents -SET - synced = $synced -WHERE id = $id -RETURNING *; - --- name: UpdateDocumentDeleted :one -UPDATE documents -SET - deleted = $deleted -WHERE id = $id -RETURNING *; - --- name: GetDocument :one -SELECT * FROM documents -WHERE id = $document_id LIMIT 1; - --- name: UpsertDevice :one -INSERT INTO devices (id, user_id, device_name) -VALUES (?, ?, ?) -ON CONFLICT DO UPDATE -SET - device_name = COALESCE(excluded.device_name, device_name) -RETURNING *; - --- name: GetDevice :one -SELECT * FROM devices -WHERE id = $device_id LIMIT 1; - --- name: UpdateProgress :one -INSERT OR REPLACE INTO document_progress ( - user_id, - document_id, - device_id, - percentage, - progress -) -VALUES (?, ?, ?, ?, ?) -RETURNING *; - --- name: GetProgress :one -SELECT - document_progress.*, - devices.device_name -FROM document_progress -JOIN devices ON document_progress.device_id = devices.id -WHERE - document_progress.user_id = $user_id - AND document_progress.document_id = $document_id -ORDER BY - document_progress.created_at - DESC -LIMIT 1; - --- name: GetLastActivity :one -SELECT start_time -FROM activity -WHERE device_id = $device_id -AND user_id = $user_id -ORDER BY start_time DESC LIMIT 1; - --- name: AddActivity :one -INSERT INTO activity ( - user_id, - document_id, - device_id, - start_time, - duration, - page, - pages -) -VALUES (?, ?, ?, ?, ?, ?, ?) -RETURNING *; - --- name: GetMissingDocuments :many -SELECT documents.* FROM documents -WHERE - documents.filepath IS NOT NULL - AND documents.deleted = false - AND documents.id NOT IN (sqlc.slice('document_ids')); - --- name: GetWantedDocuments :many -SELECT - CAST(value AS TEXT) AS id, - CAST((documents.filepath IS NULL) AS BOOLEAN) AS want_file, - CAST((IFNULL(documents.synced, false) != true) AS BOOLEAN) AS want_metadata -FROM json_each(?1) -LEFT JOIN documents -ON value = documents.id -WHERE ( - documents.id IS NOT NULL - AND documents.deleted = false - AND ( - documents.synced = false - OR documents.filepath IS NULL - ) -) -OR (documents.id IS NULL) -OR CAST($document_ids AS TEXT) != CAST($document_ids AS TEXT); - --- name: GetDeletedDocuments :many -SELECT documents.id -FROM documents -WHERE - documents.deleted = true - AND documents.id IN (sqlc.slice('document_ids')); - --- name: GetDocuments :many -SELECT * FROM documents -ORDER BY created_at DESC -LIMIT $limit -OFFSET $offset; - --- name: GetDocumentWithStats :one -WITH true_progress AS ( - SELECT - start_time AS last_read, - SUM(duration) AS total_time_seconds, - document_id, - page, - pages, - - -- Determine Read Pages - COUNT(DISTINCT page) AS read_pages, - - -- Derive Percentage of Book - ROUND(CAST(page AS REAL) / CAST(pages AS REAL) * 100, 2) AS percentage - FROM rescaled_activity - WHERE user_id = $user_id - AND document_id = $document_id - GROUP BY document_id - HAVING MAX(start_time) - LIMIT 1 -) -SELECT - documents.*, - - CAST(IFNULL(page, 0) AS INTEGER) AS page, - CAST(IFNULL(pages, 0) AS INTEGER) AS pages, - CAST(IFNULL(total_time_seconds, 0) AS INTEGER) AS total_time_seconds, - CAST(DATETIME(IFNULL(last_read, "1970-01-01"), time_offset) AS TEXT) AS last_read, - CAST(IFNULL(read_pages, 0) AS INTEGER) AS read_pages, - - -- Calculate Seconds / Page - -- 1. Calculate Total Time in Seconds (Sum Duration in Activity) - -- 2. Divide by Read Pages (Distinct Pages in Activity) - CAST(CASE - WHEN total_time_seconds IS NULL THEN 0.0 - ELSE ROUND(CAST(total_time_seconds AS REAL) / CAST(read_pages AS REAL)) - END AS INTEGER) AS seconds_per_page, - - -- Arbitrarily >97% is Complete - CAST(CASE - WHEN percentage > 97.0 THEN 100.0 - WHEN percentage IS NULL THEN 0.0 - ELSE percentage - END AS REAL) AS percentage - -FROM documents -LEFT JOIN true_progress ON true_progress.document_id = documents.id -LEFT JOIN users ON users.id = $user_id -WHERE documents.id = $document_id -ORDER BY true_progress.last_read DESC, documents.created_at DESC -LIMIT 1; - --- name: GetDocumentsWithStats :many -WITH true_progress AS ( - SELECT - start_time AS last_read, - SUM(duration) AS total_time_seconds, - document_id, - page, - pages, - ROUND(CAST(page AS REAL) / CAST(pages AS REAL) * 100, 2) AS percentage - FROM activity - WHERE user_id = $user_id - GROUP BY document_id - HAVING MAX(start_time) -) -SELECT - documents.*, - - CAST(IFNULL(page, 0) AS INTEGER) AS page, - CAST(IFNULL(pages, 0) AS INTEGER) AS pages, - CAST(IFNULL(total_time_seconds, 0) AS INTEGER) AS total_time_seconds, - CAST(DATETIME(IFNULL(last_read, "1970-01-01"), time_offset) AS TEXT) AS last_read, - - CAST(CASE - WHEN percentage > 97.0 THEN 100.0 - WHEN percentage IS NULL THEN 0.0 - ELSE percentage - END AS REAL) AS percentage - -FROM documents -LEFT JOIN true_progress ON true_progress.document_id = documents.id -LEFT JOIN users ON users.id = $user_id -WHERE documents.deleted == false -ORDER BY true_progress.last_read DESC, documents.created_at DESC -LIMIT $limit -OFFSET $offset; - --- name: GetUsers :many -SELECT * FROM users -WHERE - users.id = $user - OR ?1 IN ( - SELECT id - FROM users - WHERE id = $user - AND admin = 1 - ) -ORDER BY created_at DESC -LIMIT $limit -OFFSET $offset; - --- name: GetActivity :many -SELECT - document_id, - CAST(DATETIME(activity.start_time, time_offset) AS TEXT) AS start_time, - title, - author, - duration, - page, - pages -FROM activity -LEFT JOIN documents ON documents.id = activity.document_id -LEFT JOIN users ON users.id = activity.user_id -WHERE - activity.user_id = $user_id - AND ( - CAST($doc_filter AS BOOLEAN) = TRUE - AND document_id = $document_id - ) - OR $doc_filter = FALSE -ORDER BY start_time DESC -LIMIT $limit -OFFSET $offset; - --- name: GetDevices :many -SELECT - devices.device_name, - CAST(DATETIME(devices.created_at, users.time_offset) AS TEXT) AS created_at, - CAST(DATETIME(MAX(activity.created_at), users.time_offset) AS TEXT) AS last_sync -FROM activity -JOIN devices ON devices.id = activity.device_id -JOIN users ON users.id = $user_id -WHERE devices.user_id = $user_id -GROUP BY activity.device_id; - --- name: GetDocumentReadStats :one -SELECT - COUNT(DISTINCT page) AS pages_read, - SUM(duration) AS total_time -FROM rescaled_activity -WHERE document_id = $document_id -AND user_id = $user_id -AND start_time >= $start_time; - --- name: GetDocumentReadStatsCapped :one -WITH capped_stats AS ( - SELECT MIN(SUM(duration), CAST($page_duration_cap AS INTEGER)) AS durations - FROM rescaled_activity - WHERE document_id = $document_id - AND user_id = $user_id - AND start_time >= $start_time - GROUP BY page -) -SELECT - CAST(COUNT(*) AS INTEGER) AS pages_read, - CAST(SUM(durations) AS INTEGER) AS total_time -FROM capped_stats; - --- name: GetDocumentDaysRead :one -WITH document_days AS ( - SELECT DATE(start_time, time_offset) AS dates - FROM activity - JOIN users ON users.id = activity.user_id - WHERE document_id = $document_id - AND user_id = $user_id - GROUP BY dates -) -SELECT CAST(COUNT(*) AS INTEGER) AS days_read -FROM document_days; - --- name: GetUserWindowStreaks :one -WITH document_windows AS ( - SELECT - CASE - WHEN ?2 = "WEEK" THEN DATE(start_time, time_offset, 'weekday 0', '-7 day') - WHEN ?2 = "DAY" THEN DATE(start_time, time_offset) - END AS read_window, - time_offset - FROM activity - JOIN users ON users.id = activity.user_id - WHERE user_id = $user_id - AND CAST($window AS TEXT) = CAST($window AS TEXT) - GROUP BY read_window -), -partitions AS ( - SELECT - document_windows.*, - row_number() OVER ( - PARTITION BY 1 ORDER BY read_window DESC - ) AS seqnum - FROM document_windows -), -streaks AS ( - SELECT - COUNT(*) AS streak, - MIN(read_window) AS start_date, - MAX(read_window) AS end_date, - time_offset - FROM partitions - GROUP BY - CASE - WHEN ?2 = "DAY" THEN DATE(read_window, '+' || seqnum || ' day') - WHEN ?2 = "WEEK" THEN DATE(read_window, '+' || (seqnum * 7) || ' day') - END, - time_offset - ORDER BY end_date DESC -), -max_streak AS ( - SELECT - MAX(streak) AS max_streak, - start_date AS max_streak_start_date, - end_date AS max_streak_end_date - FROM streaks - LIMIT 1 -), -current_streak AS ( - SELECT - streak AS current_streak, - start_date AS current_streak_start_date, - end_date AS current_streak_end_date - FROM streaks - WHERE CASE - WHEN ?2 = "WEEK" THEN - DATE('now', time_offset, 'weekday 0', '-14 day') = current_streak_end_date - OR DATE('now', time_offset, 'weekday 0', '-7 day') = current_streak_end_date - WHEN ?2 = "DAY" THEN - DATE('now', time_offset, '-1 day') = current_streak_end_date - OR DATE('now', time_offset) = current_streak_end_date - END - LIMIT 1 -) -SELECT - CAST(IFNULL(max_streak, 0) AS INTEGER) AS max_streak, - CAST(IFNULL(max_streak_start_date, "N/A") AS TEXT) AS max_streak_start_date, - CAST(IFNULL(max_streak_end_date, "N/A") AS TEXT) AS max_streak_end_date, - IFNULL(current_streak, 0) AS current_streak, - CAST(IFNULL(current_streak_start_date, "N/A") AS TEXT) AS current_streak_start_date, - CAST(IFNULL(current_streak_end_date, "N/A") AS TEXT) AS current_streak_end_date -FROM max_streak -LEFT JOIN current_streak ON 1 = 1 -LIMIT 1; - --- name: GetDatabaseInfo :one -SELECT - (SELECT COUNT(rowid) FROM activity WHERE activity.user_id = $user_id) AS activity_size, - (SELECT COUNT(rowid) FROM documents) AS documents_size, - (SELECT COUNT(rowid) FROM document_progress WHERE document_progress.user_id = $user_id) AS progress_size, - (SELECT COUNT(rowid) FROM devices WHERE devices.user_id = $user_id) AS devices_size -LIMIT 1; - --- name: GetDailyReadStats :many -WITH RECURSIVE last_30_days AS ( - SELECT DATE('now', time_offset) AS date - FROM users WHERE users.id = $user_id - UNION ALL - SELECT DATE(date, '-1 days') - FROM last_30_days - LIMIT 30 -), -activity_records AS ( - SELECT - SUM(duration) AS seconds_read, - DATE(start_time, time_offset) AS day - FROM activity - LEFT JOIN users ON users.id = activity.user_id - WHERE user_id = $user_id - AND start_time > DATE('now', '-31 days') - GROUP BY day - ORDER BY day DESC - LIMIT 30 -) -SELECT - CAST(date AS TEXT), - CAST(CASE - WHEN seconds_read IS NULL THEN 0 - ELSE seconds_read / 60 - END AS INTEGER) AS minutes_read -FROM last_30_days -LEFT JOIN activity_records ON activity_records.day == last_30_days.date -ORDER BY date DESC -LIMIT 30; diff --git a/database/query.sql.go b/database/query.sql.go index 586725c..a8f10bb 100644 --- a/database/query.sql.go +++ b/database/query.sql.go @@ -13,7 +13,7 @@ import ( ) const addActivity = `-- name: AddActivity :one -INSERT INTO activity ( +INSERT INTO raw_activity ( user_id, document_id, device_id, @@ -23,7 +23,7 @@ INSERT INTO activity ( pages ) VALUES (?, ?, ?, ?, ?, ?, ?) -RETURNING id, user_id, document_id, device_id, start_time, duration, page, pages, created_at +RETURNING id, user_id, document_id, device_id, start_time, page, pages, duration, created_at ` type AddActivityParams struct { @@ -36,7 +36,7 @@ type AddActivityParams struct { Pages int64 `json:"pages"` } -func (q *Queries) AddActivity(ctx context.Context, arg AddActivityParams) (Activity, error) { +func (q *Queries) AddActivity(ctx context.Context, arg AddActivityParams) (RawActivity, error) { row := q.db.QueryRowContext(ctx, addActivity, arg.UserID, arg.DocumentID, @@ -46,16 +46,16 @@ func (q *Queries) AddActivity(ctx context.Context, arg AddActivityParams) (Activ arg.Page, arg.Pages, ) - var i Activity + var i RawActivity err := row.Scan( &i.ID, &i.UserID, &i.DocumentID, &i.DeviceID, &i.StartTime, - &i.Duration, &i.Page, &i.Pages, + &i.Duration, &i.CreatedAt, ) return i, err @@ -149,27 +149,39 @@ func (q *Queries) DeleteDocument(ctx context.Context, id string) (int64, error) } const getActivity = `-- name: GetActivity :many +WITH filtered_activity AS ( + SELECT + document_id, + user_id, + start_time, + duration, + page, + pages + FROM activity + WHERE + activity.user_id = ?1 + AND ( + ( + CAST(?2 AS BOOLEAN) = TRUE + AND document_id = ?3 + ) OR ?2 = FALSE + ) + ORDER BY start_time DESC + LIMIT ?5 + OFFSET ?4 +) + SELECT document_id, - CAST(DATETIME(activity.start_time, time_offset) AS TEXT) AS start_time, + CAST(DATETIME(activity.start_time, users.time_offset) AS TEXT) AS start_time, title, author, duration, page, pages -FROM activity +FROM filtered_activity AS activity LEFT JOIN documents ON documents.id = activity.document_id LEFT JOIN users ON users.id = activity.user_id -WHERE - activity.user_id = ?1 - AND ( - CAST(?2 AS BOOLEAN) = TRUE - AND document_id = ?3 - ) - OR ?2 = FALSE -ORDER BY start_time DESC -LIMIT ?5 -OFFSET ?4 ` type GetActivityParams struct { @@ -236,16 +248,22 @@ WITH RECURSIVE last_30_days AS ( FROM last_30_days LIMIT 30 ), -activity_records AS ( +filtered_activity AS ( + SELECT + user_id, + start_time, + duration + FROM activity + WHERE start_time > DATE('now', '-31 days') + AND activity.user_id = ?1 +), +activity_days AS ( SELECT SUM(duration) AS seconds_read, DATE(start_time, time_offset) AS day - FROM activity + FROM filtered_activity AS activity LEFT JOIN users ON users.id = activity.user_id - WHERE user_id = ?1 - AND start_time > DATE('now', '-31 days') GROUP BY day - ORDER BY day DESC LIMIT 30 ) SELECT @@ -255,7 +273,7 @@ SELECT ELSE seconds_read / 60 END AS INTEGER) AS minutes_read FROM last_30_days -LEFT JOIN activity_records ON activity_records.day == last_30_days.date +LEFT JOIN activity_days ON activity_days.day == last_30_days.date ORDER BY date DESC LIMIT 30 ` @@ -358,7 +376,7 @@ func (q *Queries) GetDeletedDocuments(ctx context.Context, documentIds []string) } const getDevice = `-- name: GetDevice :one -SELECT id, user_id, device_name, created_at, sync FROM devices +SELECT id, user_id, device_name, last_synced, created_at, sync FROM devices WHERE id = ?1 LIMIT 1 ` @@ -369,6 +387,7 @@ func (q *Queries) GetDevice(ctx context.Context, deviceID string) (Device, error &i.ID, &i.UserID, &i.DeviceName, + &i.LastSynced, &i.CreatedAt, &i.Sync, ) @@ -379,18 +398,16 @@ const getDevices = `-- name: GetDevices :many SELECT devices.device_name, CAST(DATETIME(devices.created_at, users.time_offset) AS TEXT) AS created_at, - CAST(DATETIME(MAX(activity.created_at), users.time_offset) AS TEXT) AS last_sync -FROM activity -JOIN devices ON devices.id = activity.device_id -JOIN users ON users.id = ?1 -WHERE devices.user_id = ?1 -GROUP BY activity.device_id + CAST(DATETIME(devices.last_synced, users.time_offset) AS TEXT) AS last_synced +FROM devices +JOIN users ON users.id = devices.user_id +WHERE users.id = ?1 ` type GetDevicesRow struct { DeviceName string `json:"device_name"` CreatedAt string `json:"created_at"` - LastSync string `json:"last_sync"` + LastSynced string `json:"last_synced"` } func (q *Queries) GetDevices(ctx context.Context, userID string) ([]GetDevicesRow, error) { @@ -402,7 +419,7 @@ func (q *Queries) GetDevices(ctx context.Context, userID string) ([]GetDevicesRo var items []GetDevicesRow for rows.Next() { var i GetDevicesRow - if err := rows.Scan(&i.DeviceName, &i.CreatedAt, &i.LastSync); err != nil { + if err := rows.Scan(&i.DeviceName, &i.CreatedAt, &i.LastSynced); err != nil { return nil, err } items = append(items, i) @@ -477,7 +494,7 @@ const getDocumentReadStats = `-- name: GetDocumentReadStats :one SELECT COUNT(DISTINCT page) AS pages_read, SUM(duration) AS total_time -FROM rescaled_activity +FROM activity WHERE document_id = ?1 AND user_id = ?2 AND start_time >= ?3 @@ -504,7 +521,7 @@ func (q *Queries) GetDocumentReadStats(ctx context.Context, arg GetDocumentReadS const getDocumentReadStatsCapped = `-- name: GetDocumentReadStatsCapped :one WITH capped_stats AS ( SELECT MIN(SUM(duration), CAST(?1 AS INTEGER)) AS durations - FROM rescaled_activity + FROM activity WHERE document_id = ?2 AND user_id = ?3 AND start_time >= ?4 @@ -541,55 +558,41 @@ func (q *Queries) GetDocumentReadStatsCapped(ctx context.Context, arg GetDocumen } const getDocumentWithStats = `-- name: GetDocumentWithStats :one -WITH true_progress AS ( - SELECT - start_time AS last_read, - SUM(duration) AS total_time_seconds, - document_id, - page, - pages, - - -- Determine Read Pages - COUNT(DISTINCT page) AS read_pages, - - -- Derive Percentage of Book - ROUND(CAST(page AS REAL) / CAST(pages AS REAL) * 100, 2) AS percentage - FROM rescaled_activity - WHERE user_id = ?1 - AND document_id = ?2 - GROUP BY document_id - HAVING MAX(start_time) - LIMIT 1 -) SELECT - documents.id, documents.md5, documents.filepath, documents.coverfile, documents.title, documents.author, documents.series, documents.series_index, documents.lang, documents.description, documents.words, documents.gbid, documents.olid, documents.isbn10, documents.isbn13, documents.synced, documents.deleted, documents.updated_at, documents.created_at, + docs.id, + docs.title, + docs.author, + docs.description, + docs.isbn10, + docs.isbn13, + docs.filepath, + docs.words, - CAST(IFNULL(page, 0) AS INTEGER) AS page, - CAST(IFNULL(pages, 0) AS INTEGER) AS pages, - CAST(IFNULL(total_time_seconds, 0) AS INTEGER) AS total_time_seconds, - CAST(DATETIME(IFNULL(last_read, "1970-01-01"), time_offset) AS TEXT) AS last_read, - CAST(IFNULL(read_pages, 0) AS INTEGER) AS read_pages, - - -- Calculate Seconds / Page - -- 1. Calculate Total Time in Seconds (Sum Duration in Activity) - -- 2. Divide by Read Pages (Distinct Pages in Activity) + CAST(COALESCE(dus.wpm, 0.0) AS INTEGER) AS wpm, + COALESCE(dus.page, 0) AS page, + COALESCE(dus.pages, 0) AS pages, + COALESCE(dus.read_pages, 0) AS read_pages, + COALESCE(dus.total_time_seconds, 0) AS total_time_seconds, + DATETIME(COALESCE(dus.last_read, "1970-01-01"), users.time_offset) + AS last_read, + CASE + WHEN dus.percentage > 97.0 THEN 100.0 + WHEN dus.percentage IS NULL THEN 0.0 + ELSE dus.percentage + END AS percentage, CAST(CASE - WHEN total_time_seconds IS NULL THEN 0.0 - ELSE ROUND(CAST(total_time_seconds AS REAL) / CAST(read_pages AS REAL)) - END AS INTEGER) AS seconds_per_page, - - -- Arbitrarily >97% is Complete - CAST(CASE - WHEN percentage > 97.0 THEN 100.0 - WHEN percentage IS NULL THEN 0.0 - ELSE percentage - END AS REAL) AS percentage - -FROM documents -LEFT JOIN true_progress ON true_progress.document_id = documents.id + WHEN dus.total_time_seconds IS NULL THEN 0.0 + ELSE + CAST(dus.total_time_seconds AS REAL) + / CAST(dus.read_pages AS REAL) + END AS INTEGER) AS seconds_per_page +FROM documents AS docs LEFT JOIN users ON users.id = ?1 -WHERE documents.id = ?2 -ORDER BY true_progress.last_read DESC, documents.created_at DESC +LEFT JOIN + document_user_statistics AS dus + ON dus.document_id = docs.id AND dus.user_id = ?1 +WHERE users.id = ?1 +AND docs.id = ?2 LIMIT 1 ` @@ -599,32 +602,22 @@ type GetDocumentWithStatsParams struct { } type GetDocumentWithStatsRow struct { - ID string `json:"id"` - Md5 *string `json:"md5"` - Filepath *string `json:"filepath"` - Coverfile *string `json:"coverfile"` - Title *string `json:"title"` - Author *string `json:"author"` - Series *string `json:"series"` - SeriesIndex *int64 `json:"series_index"` - Lang *string `json:"lang"` - Description *string `json:"description"` - Words *int64 `json:"words"` - Gbid *string `json:"gbid"` - Olid *string `json:"-"` - Isbn10 *string `json:"isbn10"` - Isbn13 *string `json:"isbn13"` - Synced bool `json:"-"` - Deleted bool `json:"-"` - UpdatedAt time.Time `json:"updated_at"` - CreatedAt time.Time `json:"created_at"` - Page int64 `json:"page"` - Pages int64 `json:"pages"` - TotalTimeSeconds int64 `json:"total_time_seconds"` - LastRead string `json:"last_read"` - ReadPages int64 `json:"read_pages"` - SecondsPerPage int64 `json:"seconds_per_page"` - Percentage float64 `json:"percentage"` + ID string `json:"id"` + Title *string `json:"title"` + Author *string `json:"author"` + Description *string `json:"description"` + Isbn10 *string `json:"isbn10"` + Isbn13 *string `json:"isbn13"` + Filepath *string `json:"filepath"` + Words *int64 `json:"words"` + Wpm int64 `json:"wpm"` + Page int64 `json:"page"` + Pages int64 `json:"pages"` + ReadPages int64 `json:"read_pages"` + TotalTimeSeconds int64 `json:"total_time_seconds"` + LastRead interface{} `json:"last_read"` + Percentage interface{} `json:"percentage"` + SecondsPerPage int64 `json:"seconds_per_page"` } func (q *Queries) GetDocumentWithStats(ctx context.Context, arg GetDocumentWithStatsParams) (GetDocumentWithStatsRow, error) { @@ -632,31 +625,21 @@ func (q *Queries) GetDocumentWithStats(ctx context.Context, arg GetDocumentWithS var i GetDocumentWithStatsRow err := row.Scan( &i.ID, - &i.Md5, - &i.Filepath, - &i.Coverfile, &i.Title, &i.Author, - &i.Series, - &i.SeriesIndex, - &i.Lang, &i.Description, - &i.Words, - &i.Gbid, - &i.Olid, &i.Isbn10, &i.Isbn13, - &i.Synced, - &i.Deleted, - &i.UpdatedAt, - &i.CreatedAt, + &i.Filepath, + &i.Words, + &i.Wpm, &i.Page, &i.Pages, + &i.ReadPages, &i.TotalTimeSeconds, &i.LastRead, - &i.ReadPages, - &i.SecondsPerPage, &i.Percentage, + &i.SecondsPerPage, ) return i, err } @@ -717,38 +700,43 @@ func (q *Queries) GetDocuments(ctx context.Context, arg GetDocumentsParams) ([]D } const getDocumentsWithStats = `-- name: GetDocumentsWithStats :many -WITH true_progress AS ( - SELECT - start_time AS last_read, - SUM(duration) AS total_time_seconds, - document_id, - page, - pages, - ROUND(CAST(page AS REAL) / CAST(pages AS REAL) * 100, 2) AS percentage - FROM activity - WHERE user_id = ?1 - GROUP BY document_id - HAVING MAX(start_time) -) SELECT - documents.id, documents.md5, documents.filepath, documents.coverfile, documents.title, documents.author, documents.series, documents.series_index, documents.lang, documents.description, documents.words, documents.gbid, documents.olid, documents.isbn10, documents.isbn13, documents.synced, documents.deleted, documents.updated_at, documents.created_at, + docs.id, + docs.title, + docs.author, + docs.description, + docs.isbn10, + docs.isbn13, + docs.filepath, + docs.words, - CAST(IFNULL(page, 0) AS INTEGER) AS page, - CAST(IFNULL(pages, 0) AS INTEGER) AS pages, - CAST(IFNULL(total_time_seconds, 0) AS INTEGER) AS total_time_seconds, - CAST(DATETIME(IFNULL(last_read, "1970-01-01"), time_offset) AS TEXT) AS last_read, - - CAST(CASE - WHEN percentage > 97.0 THEN 100.0 - WHEN percentage IS NULL THEN 0.0 - ELSE percentage - END AS REAL) AS percentage - -FROM documents -LEFT JOIN true_progress ON true_progress.document_id = documents.id + CAST(COALESCE(dus.wpm, 0.0) AS INTEGER) AS wpm, + COALESCE(dus.page, 0) AS page, + COALESCE(dus.pages, 0) AS pages, + COALESCE(dus.read_pages, 0) AS read_pages, + COALESCE(dus.total_time_seconds, 0) AS total_time_seconds, + DATETIME(COALESCE(dus.last_read, "1970-01-01"), users.time_offset) + AS last_read, + CASE + WHEN dus.percentage > 97.0 THEN 100.0 + WHEN dus.percentage IS NULL THEN 0.0 + ELSE dus.percentage + END AS percentage, + CASE + WHEN dus.total_time_seconds IS NULL THEN 0.0 + ELSE + ROUND( + CAST(dus.total_time_seconds AS REAL) + / CAST(dus.read_pages AS REAL) + ) + END AS seconds_per_page +FROM documents AS docs LEFT JOIN users ON users.id = ?1 -WHERE documents.deleted == false -ORDER BY true_progress.last_read DESC, documents.created_at DESC +LEFT JOIN + document_user_statistics AS dus + ON dus.document_id = docs.id AND dus.user_id = ?1 +WHERE docs.deleted = false +ORDER BY dus.last_read DESC, docs.created_at DESC LIMIT ?3 OFFSET ?2 ` @@ -760,30 +748,22 @@ type GetDocumentsWithStatsParams struct { } type GetDocumentsWithStatsRow struct { - ID string `json:"id"` - Md5 *string `json:"md5"` - Filepath *string `json:"filepath"` - Coverfile *string `json:"coverfile"` - Title *string `json:"title"` - Author *string `json:"author"` - Series *string `json:"series"` - SeriesIndex *int64 `json:"series_index"` - Lang *string `json:"lang"` - Description *string `json:"description"` - Words *int64 `json:"words"` - Gbid *string `json:"gbid"` - Olid *string `json:"-"` - Isbn10 *string `json:"isbn10"` - Isbn13 *string `json:"isbn13"` - Synced bool `json:"-"` - Deleted bool `json:"-"` - UpdatedAt time.Time `json:"updated_at"` - CreatedAt time.Time `json:"created_at"` - Page int64 `json:"page"` - Pages int64 `json:"pages"` - TotalTimeSeconds int64 `json:"total_time_seconds"` - LastRead string `json:"last_read"` - Percentage float64 `json:"percentage"` + ID string `json:"id"` + Title *string `json:"title"` + Author *string `json:"author"` + Description *string `json:"description"` + Isbn10 *string `json:"isbn10"` + Isbn13 *string `json:"isbn13"` + Filepath *string `json:"filepath"` + Words *int64 `json:"words"` + Wpm int64 `json:"wpm"` + Page int64 `json:"page"` + Pages int64 `json:"pages"` + ReadPages int64 `json:"read_pages"` + TotalTimeSeconds int64 `json:"total_time_seconds"` + LastRead interface{} `json:"last_read"` + Percentage interface{} `json:"percentage"` + SecondsPerPage interface{} `json:"seconds_per_page"` } func (q *Queries) GetDocumentsWithStats(ctx context.Context, arg GetDocumentsWithStatsParams) ([]GetDocumentsWithStatsRow, error) { @@ -797,29 +777,21 @@ func (q *Queries) GetDocumentsWithStats(ctx context.Context, arg GetDocumentsWit var i GetDocumentsWithStatsRow if err := rows.Scan( &i.ID, - &i.Md5, - &i.Filepath, - &i.Coverfile, &i.Title, &i.Author, - &i.Series, - &i.SeriesIndex, - &i.Lang, &i.Description, - &i.Words, - &i.Gbid, - &i.Olid, &i.Isbn10, &i.Isbn13, - &i.Synced, - &i.Deleted, - &i.UpdatedAt, - &i.CreatedAt, + &i.Filepath, + &i.Words, + &i.Wpm, &i.Page, &i.Pages, + &i.ReadPages, &i.TotalTimeSeconds, &i.LastRead, &i.Percentage, + &i.SecondsPerPage, ); err != nil { return nil, err } @@ -978,105 +950,41 @@ func (q *Queries) GetUser(ctx context.Context, userID string) (User, error) { return i, err } -const getUserWindowStreaks = `-- name: GetUserWindowStreaks :one -WITH document_windows AS ( - SELECT - CASE - WHEN ?2 = "WEEK" THEN DATE(start_time, time_offset, 'weekday 0', '-7 day') - WHEN ?2 = "DAY" THEN DATE(start_time, time_offset) - END AS read_window, - time_offset - FROM activity - JOIN users ON users.id = activity.user_id - WHERE user_id = ?1 - AND CAST(?2 AS TEXT) = CAST(?2 AS TEXT) - GROUP BY read_window -), -partitions AS ( - SELECT - document_windows.read_window, document_windows.time_offset, - row_number() OVER ( - PARTITION BY 1 ORDER BY read_window DESC - ) AS seqnum - FROM document_windows -), -streaks AS ( - SELECT - COUNT(*) AS streak, - MIN(read_window) AS start_date, - MAX(read_window) AS end_date, - time_offset - FROM partitions - GROUP BY - CASE - WHEN ?2 = "DAY" THEN DATE(read_window, '+' || seqnum || ' day') - WHEN ?2 = "WEEK" THEN DATE(read_window, '+' || (seqnum * 7) || ' day') - END, - time_offset - ORDER BY end_date DESC -), -max_streak AS ( - SELECT - MAX(streak) AS max_streak, - start_date AS max_streak_start_date, - end_date AS max_streak_end_date - FROM streaks - LIMIT 1 -), -current_streak AS ( - SELECT - streak AS current_streak, - start_date AS current_streak_start_date, - end_date AS current_streak_end_date - FROM streaks - WHERE CASE - WHEN ?2 = "WEEK" THEN - DATE('now', time_offset, 'weekday 0', '-14 day') = current_streak_end_date - OR DATE('now', time_offset, 'weekday 0', '-7 day') = current_streak_end_date - WHEN ?2 = "DAY" THEN - DATE('now', time_offset, '-1 day') = current_streak_end_date - OR DATE('now', time_offset) = current_streak_end_date - END - LIMIT 1 -) -SELECT - CAST(IFNULL(max_streak, 0) AS INTEGER) AS max_streak, - CAST(IFNULL(max_streak_start_date, "N/A") AS TEXT) AS max_streak_start_date, - CAST(IFNULL(max_streak_end_date, "N/A") AS TEXT) AS max_streak_end_date, - IFNULL(current_streak, 0) AS current_streak, - CAST(IFNULL(current_streak_start_date, "N/A") AS TEXT) AS current_streak_start_date, - CAST(IFNULL(current_streak_end_date, "N/A") AS TEXT) AS current_streak_end_date -FROM max_streak -LEFT JOIN current_streak ON 1 = 1 -LIMIT 1 +const getUserStreaks = `-- name: GetUserStreaks :many +SELECT user_id, "window", max_streak, max_streak_start_date, max_streak_end_date, current_streak, current_streak_start_date, current_streak_end_date FROM user_streaks +WHERE user_id = ?1 ` -type GetUserWindowStreaksParams struct { - UserID string `json:"user_id"` - Window string `json:"window"` -} - -type GetUserWindowStreaksRow struct { - MaxStreak int64 `json:"max_streak"` - MaxStreakStartDate string `json:"max_streak_start_date"` - MaxStreakEndDate string `json:"max_streak_end_date"` - CurrentStreak interface{} `json:"current_streak"` - CurrentStreakStartDate string `json:"current_streak_start_date"` - CurrentStreakEndDate string `json:"current_streak_end_date"` -} - -func (q *Queries) GetUserWindowStreaks(ctx context.Context, arg GetUserWindowStreaksParams) (GetUserWindowStreaksRow, error) { - row := q.db.QueryRowContext(ctx, getUserWindowStreaks, arg.UserID, arg.Window) - var i GetUserWindowStreaksRow - err := row.Scan( - &i.MaxStreak, - &i.MaxStreakStartDate, - &i.MaxStreakEndDate, - &i.CurrentStreak, - &i.CurrentStreakStartDate, - &i.CurrentStreakEndDate, - ) - return i, err +func (q *Queries) GetUserStreaks(ctx context.Context, userID string) ([]UserStreak, error) { + rows, err := q.db.QueryContext(ctx, getUserStreaks, userID) + if err != nil { + return nil, err + } + defer rows.Close() + var items []UserStreak + for rows.Next() { + var i UserStreak + if err := rows.Scan( + &i.UserID, + &i.Window, + &i.MaxStreak, + &i.MaxStreakStartDate, + &i.MaxStreakEndDate, + &i.CurrentStreak, + &i.CurrentStreakStartDate, + &i.CurrentStreakEndDate, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil } const getUsers = `-- name: GetUsers :many @@ -1129,6 +1037,54 @@ func (q *Queries) GetUsers(ctx context.Context, arg GetUsersParams) ([]User, err return items, nil } +const getWPMLeaderboard = `-- name: GetWPMLeaderboard :many +SELECT + user_id, + CAST(SUM(words_read) AS INTEGER) AS total_words_read, + CAST(SUM(total_time_seconds) AS INTEGER) AS total_seconds, + ROUND(CAST(SUM(words_read) AS REAL) / (SUM(total_time_seconds) / 60.0), 2) + AS wpm +FROM document_user_statistics +WHERE words_read > 0 +GROUP BY user_id +ORDER BY wpm DESC +` + +type GetWPMLeaderboardRow struct { + UserID string `json:"user_id"` + TotalWordsRead int64 `json:"total_words_read"` + TotalSeconds int64 `json:"total_seconds"` + Wpm float64 `json:"wpm"` +} + +func (q *Queries) GetWPMLeaderboard(ctx context.Context) ([]GetWPMLeaderboardRow, error) { + rows, err := q.db.QueryContext(ctx, getWPMLeaderboard) + if err != nil { + return nil, err + } + defer rows.Close() + var items []GetWPMLeaderboardRow + for rows.Next() { + var i GetWPMLeaderboardRow + if err := rows.Scan( + &i.UserID, + &i.TotalWordsRead, + &i.TotalSeconds, + &i.Wpm, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const getWantedDocuments = `-- name: GetWantedDocuments :many SELECT CAST(value AS TEXT) AS id, @@ -1327,27 +1283,35 @@ func (q *Queries) UpdateUser(ctx context.Context, arg UpdateUserParams) (User, e } const upsertDevice = `-- name: UpsertDevice :one -INSERT INTO devices (id, user_id, device_name) -VALUES (?, ?, ?) +INSERT INTO devices (id, user_id, last_synced, device_name) +VALUES (?, ?, ?, ?) ON CONFLICT DO UPDATE SET - device_name = COALESCE(excluded.device_name, device_name) -RETURNING id, user_id, device_name, created_at, sync + device_name = COALESCE(excluded.device_name, device_name), + last_synced = COALESCE(excluded.last_synced, last_synced) +RETURNING id, user_id, device_name, last_synced, created_at, sync ` type UpsertDeviceParams struct { - ID string `json:"id"` - UserID string `json:"user_id"` - DeviceName string `json:"device_name"` + ID string `json:"id"` + UserID string `json:"user_id"` + LastSynced time.Time `json:"last_synced"` + DeviceName string `json:"device_name"` } func (q *Queries) UpsertDevice(ctx context.Context, arg UpsertDeviceParams) (Device, error) { - row := q.db.QueryRowContext(ctx, upsertDevice, arg.ID, arg.UserID, arg.DeviceName) + row := q.db.QueryRowContext(ctx, upsertDevice, + arg.ID, + arg.UserID, + arg.LastSynced, + arg.DeviceName, + ) var i Device err := row.Scan( &i.ID, &i.UserID, &i.DeviceName, + &i.LastSynced, &i.CreatedAt, &i.Sync, ) diff --git a/database/schema.sql b/database/schema.sql index f55d319..2234167 100644 --- a/database/schema.sql +++ b/database/schema.sql @@ -1,6 +1,10 @@ PRAGMA foreign_keys = ON; PRAGMA journal_mode = WAL; +--------------------------------------------------------------- +------------------------ Normal Tables ------------------------ +--------------------------------------------------------------- + -- Authentication CREATE TABLE IF NOT EXISTS users ( id TEXT NOT NULL PRIMARY KEY, @@ -64,27 +68,13 @@ CREATE TABLE IF NOT EXISTS devices ( user_id TEXT NOT NULL, device_name TEXT NOT NULL, + last_synced DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, sync BOOLEAN NOT NULL DEFAULT 1 CHECK (sync IN (0, 1)), FOREIGN KEY (user_id) REFERENCES users (id) ); --- Document Device Sync -CREATE TABLE IF NOT EXISTS document_device_sync ( - user_id TEXT NOT NULL, - document_id TEXT NOT NULL, - device_id TEXT NOT NULL, - - last_synced DATETIME NOT NULL, - sync BOOLEAN NOT NULL DEFAULT 1 CHECK (sync IN (0, 1)), - - FOREIGN KEY (user_id) REFERENCES users (id), - FOREIGN KEY (document_id) REFERENCES documents (id), - FOREIGN KEY (device_id) REFERENCES devices (id), - PRIMARY KEY (user_id, document_id, device_id) -); - -- User Document Progress CREATE TABLE IF NOT EXISTS document_progress ( user_id TEXT NOT NULL, @@ -101,17 +91,17 @@ CREATE TABLE IF NOT EXISTS document_progress ( PRIMARY KEY (user_id, document_id, device_id) ); --- Read Activity -CREATE TABLE IF NOT EXISTS activity ( +-- Raw Read Activity +CREATE TABLE IF NOT EXISTS raw_activity ( id INTEGER PRIMARY KEY AUTOINCREMENT, user_id TEXT NOT NULL, document_id TEXT NOT NULL, device_id TEXT NOT NULL, start_time DATETIME NOT NULL, - duration INTEGER NOT NULL, page INTEGER NOT NULL, pages INTEGER NOT NULL, + duration INTEGER NOT NULL, created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY (user_id) REFERENCES users (id), @@ -119,23 +109,71 @@ CREATE TABLE IF NOT EXISTS activity ( FOREIGN KEY (device_id) REFERENCES devices (id) ); --- Indexes -CREATE INDEX IF NOT EXISTS activity_start_time ON activity (start_time); -CREATE INDEX IF NOT EXISTS activity_user_id_document_id ON activity ( +--------------------------------------------------------------- +----------------------- Temporary Tables ---------------------- +--------------------------------------------------------------- + +-- Temporary Activity Table (Cached from View) +CREATE TEMPORARY TABLE IF NOT EXISTS activity ( + user_id TEXT NOT NULL, + document_id TEXT NOT NULL, + device_id TEXT NOT NULL, + + created_at DATETIME NOT NULL, + start_time DATETIME NOT NULL, + page INTEGER NOT NULL, + pages INTEGER NOT NULL, + duration INTEGER NOT NULL +); + +-- Temporary User Streaks Table (Cached from View) +CREATE TEMPORARY TABLE IF NOT EXISTS user_streaks ( + user_id TEXT NOT NULL, + window TEXT NOT NULL, + + max_streak INTEGER NOT NULL, + max_streak_start_date TEXT NOT NULL, + max_streak_end_date TEXT NOT NULL, + + current_streak INTEGER NOT NULL, + current_streak_start_date TEXT NOT NULL, + current_streak_end_date TEXT NOT NULL +); + +CREATE TEMPORARY TABLE IF NOT EXISTS document_user_statistics ( + document_id TEXT NOT NULL, + user_id TEXT NOT NULL, + last_read TEXT NOT NULL, + page INTEGER NOT NULL, + pages INTEGER NOT NULL, + total_time_seconds INTEGER NOT NULL, + read_pages INTEGER NOT NULL, + percentage REAL NOT NULL, + words_read INTEGER NOT NULL, + wpm REAL NOT NULL +); + + +--------------------------------------------------------------- +--------------------------- Indexes --------------------------- +--------------------------------------------------------------- + +CREATE INDEX IF NOT EXISTS temp.activity_start_time ON activity (start_time); +CREATE INDEX IF NOT EXISTS temp.activity_user_id ON activity (user_id); +CREATE INDEX IF NOT EXISTS temp.activity_user_id_document_id ON activity ( user_id, document_id ); --- Update Trigger -CREATE TRIGGER IF NOT EXISTS update_documents_updated_at -BEFORE UPDATE ON documents BEGIN -UPDATE documents -SET updated_at = CURRENT_TIMESTAMP -WHERE id = old.id; -END; +--------------------------------------------------------------- +---------------------------- Views ---------------------------- +--------------------------------------------------------------- --- Rescaled Activity View (Adapted from KOReader) -CREATE VIEW IF NOT EXISTS rescaled_activity AS +-------------------------------- +------- Rescaled Activity ------ +-------------------------------- + +CREATE VIEW IF NOT EXISTS view_rescaled_activity AS WITH RECURSIVE nums (idx) AS ( SELECT 1 AS idx @@ -150,7 +188,7 @@ current_pages AS ( document_id, user_id, pages - FROM activity + FROM raw_activity GROUP BY document_id, user_id HAVING MAX(start_time) ORDER BY start_time DESC @@ -158,33 +196,33 @@ current_pages AS ( intermediate AS ( SELECT - activity.document_id, - activity.device_id, - activity.user_id, - activity.start_time, - activity.duration, - activity.page, + raw_activity.document_id, + raw_activity.device_id, + raw_activity.user_id, + raw_activity.created_at, + raw_activity.start_time, + raw_activity.duration, + raw_activity.page, current_pages.pages, -- Derive first page - ((activity.page - 1) * current_pages.pages) / activity.pages + ((raw_activity.page - 1) * current_pages.pages) / raw_activity.pages + 1 AS first_page, -- Derive last page MAX( - ((activity.page - 1) * current_pages.pages) - / activity.pages + ((raw_activity.page - 1) * current_pages.pages) + / raw_activity.pages + 1, - (activity.page * current_pages.pages) / activity.pages + (raw_activity.page * current_pages.pages) / raw_activity.pages ) AS last_page - FROM activity + FROM raw_activity INNER JOIN current_pages ON - current_pages.document_id = activity.document_id - AND current_pages.user_id = activity.user_id + current_pages.document_id = raw_activity.document_id + AND current_pages.user_id = raw_activity.user_id ), --- Improves performance num_limit AS ( SELECT * FROM nums LIMIT (SELECT MAX(last_page - first_page + 1) FROM intermediate) @@ -192,28 +230,30 @@ num_limit AS ( rescaled_raw AS ( SELECT - document_id, - device_id, - user_id, - start_time, - last_page, - pages, - first_page + num_limit.idx - 1 AS page, - duration / ( - last_page - first_page + 1.0 + intermediate.document_id, + intermediate.device_id, + intermediate.user_id, + intermediate.created_at, + intermediate.start_time, + intermediate.last_page, + intermediate.pages, + intermediate.first_page + num_limit.idx - 1 AS page, + intermediate.duration / ( + intermediate.last_page - intermediate.first_page + 1.0 ) AS duration FROM intermediate - JOIN num_limit ON - num_limit.idx <= (last_page - first_page + 1) + LEFT JOIN num_limit ON + num_limit.idx <= (intermediate.last_page - intermediate.first_page + 1) ) SELECT + user_id, document_id, device_id, - user_id, + created_at, start_time, - pages, page, + pages, -- Round up if last page (maintains total duration) CAST(CASE @@ -222,3 +262,174 @@ SELECT ELSE duration END AS INTEGER) AS duration FROM rescaled_raw; + +-------------------------------- +--------- User Streaks --------- +-------------------------------- + +CREATE VIEW IF NOT EXISTS view_user_streaks AS + +WITH document_windows AS ( + SELECT + activity.user_id, + users.time_offset, + DATE( + activity.start_time, + users.time_offset, + 'weekday 0', '-7 day' + ) AS weekly_read, + DATE(activity.start_time, users.time_offset) AS daily_read + FROM raw_activity AS activity + LEFT JOIN users ON users.id = activity.user_id + GROUP BY activity.user_id, weekly_read, daily_read +), + +weekly_partitions AS ( + SELECT + user_id, + time_offset, + 'WEEK' AS "window", + weekly_read AS read_window, + row_number() OVER ( + PARTITION BY user_id ORDER BY weekly_read DESC + ) AS seqnum + FROM document_windows + GROUP BY user_id, weekly_read +), + +daily_partitions AS ( + SELECT + user_id, + time_offset, + 'DAY' AS "window", + daily_read AS read_window, + row_number() OVER ( + PARTITION BY user_id ORDER BY daily_read DESC + ) AS seqnum + FROM document_windows + GROUP BY user_id, daily_read +), + +streaks AS ( + SELECT + COUNT(*) AS streak, + MIN(read_window) AS start_date, + MAX(read_window) AS end_date, + window, + user_id, + time_offset + FROM daily_partitions + GROUP BY + time_offset, + user_id, + DATE(read_window, '+' || seqnum || ' day') + + UNION ALL + + SELECT + COUNT(*) AS streak, + MIN(read_window) AS start_date, + MAX(read_window) AS end_date, + window, + user_id, + time_offset + FROM weekly_partitions + GROUP BY + time_offset, + user_id, + DATE(read_window, '+' || (seqnum * 7) || ' day') +), +max_streak AS ( + SELECT + MAX(streak) AS max_streak, + start_date AS max_streak_start_date, + end_date AS max_streak_end_date, + window, + user_id + FROM streaks + GROUP BY user_id, window +), +current_streak AS ( + SELECT + streak AS current_streak, + start_date AS current_streak_start_date, + end_date AS current_streak_end_date, + window, + user_id + FROM streaks + WHERE CASE + WHEN window = "WEEK" THEN + DATE('now', time_offset, 'weekday 0', '-14 day') = current_streak_end_date + OR DATE('now', time_offset, 'weekday 0', '-7 day') = current_streak_end_date + WHEN window = "DAY" THEN + DATE('now', time_offset, '-1 day') = current_streak_end_date + OR DATE('now', time_offset) = current_streak_end_date + END + GROUP BY user_id, window +) +SELECT + max_streak.user_id, + max_streak.window, + IFNULL(max_streak, 0) AS max_streak, + IFNULL(max_streak_start_date, "N/A") AS max_streak_start_date, + IFNULL(max_streak_end_date, "N/A") AS max_streak_end_date, + IFNULL(current_streak, 0) AS current_streak, + IFNULL(current_streak_start_date, "N/A") AS current_streak_start_date, + IFNULL(current_streak_end_date, "N/A") AS current_streak_end_date +FROM max_streak +LEFT JOIN current_streak ON + current_streak.user_id = max_streak.user_id + AND current_streak.window = max_streak.window; + +-------------------------------- +------- Document Stats --------- +-------------------------------- + +CREATE VIEW IF NOT EXISTS view_document_user_statistics AS + +WITH true_progress AS ( + SELECT + document_id, + user_id, + start_time AS last_read, + page, + pages, + SUM(duration) AS total_time_seconds, + + -- Determine Read Pages + COUNT(DISTINCT page) AS read_pages, + + -- Derive Percentage of Book + ROUND(CAST(page AS REAL) / CAST(pages AS REAL) * 100, 2) AS percentage + FROM view_rescaled_activity + GROUP BY document_id, user_id + HAVING MAX(start_time) +) +SELECT + true_progress.*, + (CAST(COALESCE(documents.words, 0.0) AS REAL) / pages * read_pages) + AS words_read, + (CAST(COALESCE(documents.words, 0.0) AS REAL) / pages * read_pages) + / (total_time_seconds / 60.0) AS wpm +FROM true_progress +INNER JOIN documents ON documents.id = true_progress.document_id +ORDER BY wpm DESC; + +--------------------------------------------------------------- +------------------ Populate Temporary Tables ------------------ +--------------------------------------------------------------- +INSERT INTO activity SELECT * FROM view_rescaled_activity; +INSERT INTO user_streaks SELECT * FROM view_user_streaks; +INSERT INTO document_user_statistics SELECT * FROM view_document_user_statistics; + +--------------------------------------------------------------- +--------------------------- Triggers -------------------------- +--------------------------------------------------------------- + +-- Update Trigger +CREATE TRIGGER IF NOT EXISTS update_documents_updated_at +BEFORE UPDATE ON documents BEGIN +UPDATE documents +SET updated_at = CURRENT_TIMESTAMP +WHERE id = old.id; +END; diff --git a/database/update_temp_tables.sql b/database/update_temp_tables.sql new file mode 100644 index 0000000..f63964e --- /dev/null +++ b/database/update_temp_tables.sql @@ -0,0 +1,8 @@ +DELETE FROM activity; +INSERT INTO activity SELECT * FROM view_rescaled_activity; +DELETE FROM user_streaks; +INSERT INTO user_streaks SELECT * FROM view_user_streaks; +DELETE FROM document_user_statistics; +INSERT INTO document_user_statistics +SELECT * +FROM view_document_user_statistics; diff --git a/go.mod b/go.mod index 34748da..fcf9cce 100644 --- a/go.mod +++ b/go.mod @@ -13,6 +13,7 @@ require ( github.com/sirupsen/logrus v1.9.3 github.com/urfave/cli/v2 v2.25.7 golang.org/x/exp v0.0.0-20230905200255-921286631fa9 + golang.org/x/net v0.14.0 ) require ( @@ -43,7 +44,6 @@ require ( github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673 // indirect golang.org/x/arch v0.4.0 // indirect golang.org/x/crypto v0.12.0 // indirect - golang.org/x/net v0.14.0 // indirect golang.org/x/sys v0.12.0 // indirect golang.org/x/text v0.12.0 // indirect google.golang.org/protobuf v1.31.0 // indirect diff --git a/templates/base.html b/templates/base.html index b238c16..179122f 100644 --- a/templates/base.html +++ b/templates/base.html @@ -79,28 +79,6 @@ Activity - - - - - - - Graphs - diff --git a/templates/document.html b/templates/document.html index cf63497..e8221c2 100644 --- a/templates/document.html +++ b/templates/document.html @@ -326,13 +326,13 @@

Words / Minute

- {{ .Statistics.WordsPerMinute }} + {{ .Data.Wpm }}

Est. Time Left

- {{ NiceSeconds .Statistics.TotalTimeLeftSeconds }} + {{ NiceSeconds .TotalTimeLeftSeconds }}

diff --git a/templates/graphs.html b/templates/graphs.html deleted file mode 100644 index 7c737ce..0000000 --- a/templates/graphs.html +++ /dev/null @@ -1,11 +0,0 @@ -{{template "base.html" .}} - -{{define "title"}}Graphs{{end}} - -{{define "header"}} -Graphs -{{end}} - -{{define "content"}} -

Graphs

-{{end}} diff --git a/templates/home.html b/templates/home.html index a64ee58..41aa98d 100644 --- a/templates/home.html +++ b/templates/home.html @@ -151,6 +151,7 @@
+ {{ range $item := .Data.Streaks }}
- Daily Read Streak + {{ if eq $item.Window "WEEK" }} Weekly Read Streak {{ else }} Daily Read + Streak {{ end }}

- {{ .Data.DailyStreak.CurrentStreak }} + {{ $item.CurrentStreak }}

@@ -170,76 +172,73 @@ class="flex items-center justify-between pb-2 mb-2 text-sm border-b border-gray-200" >
-

Current Daily Streak

+

+ {{ if eq $item.Window "WEEK" }} Current Weekly Streak {{ else }} + Current Daily Streak {{ end }} +

- {{ .Data.DailyStreak.CurrentStreakStartDate }} ➞ {{ - .Data.DailyStreak.CurrentStreakEndDate }} + {{ $item.CurrentStreakStartDate }} ➞ {{ $item.CurrentStreakEndDate + }}
-
- {{ .Data.DailyStreak.CurrentStreak }} -
+
{{ $item.CurrentStreak }}
-

Best Daily Streak

+

+ {{ if eq $item.Window "WEEK" }} Best Weekly Streak {{ else }} Best + Daily Streak {{ end }} +

- {{ .Data.DailyStreak.MaxStreakStartDate }} ➞ {{ - .Data.DailyStreak.MaxStreakEndDate }} + {{ $item.MaxStreakStartDate }} ➞ {{ $item.MaxStreakEndDate }}
-
- {{ .Data.DailyStreak.MaxStreak }} +
{{ $item.MaxStreak }}
+
+
+
+
+ {{ end }} + +
+
+
+

+ WPM Leaderboard +

+
+ {{ $length := len .Data.WPMLeaderboard }} {{ if eq $length 0 }} +

N/A

+ {{ else }} +

+ {{ (index .Data.WPMLeaderboard 0).UserID }} +

+ {{ end }} +
+
+
+ {{ range $index, $item := .Data.WPMLeaderboard }} {{ if lt $index 3 }} + {{ if eq $index 0 }} +
+ {{ else }} +
+ {{ end }} +
+

{{ $item.UserID }}

+
+
{{ $item.Wpm }} WPM
+ {{ end }} {{ end }}
-
-
-

- Weekly Read Streak -

-
-

- {{ .Data.WeeklyStreak.CurrentStreak }} -

-
-
-
-
-

Current Weekly Streak

-
- {{ .Data.WeeklyStreak.CurrentStreakStartDate }} ➞ {{ - .Data.WeeklyStreak.CurrentStreakEndDate }} -
-
-
- {{ .Data.WeeklyStreak.CurrentStreak }} -
-
-
-
-

Best Weekly Streak

-
- {{ .Data.WeeklyStreak.MaxStreakStartDate }} ➞ {{ - .Data.WeeklyStreak.MaxStreakEndDate }} -
-
-
- {{ .Data.WeeklyStreak.MaxStreak }} -
-
-
-
-
+ {{end}}
- -{{end}} diff --git a/templates/settings.html b/templates/settings.html index 193e84a..0320383 100644 --- a/templates/settings.html +++ b/templates/settings.html @@ -201,7 +201,7 @@

{{ $device.DeviceName }}

-

{{ $device.LastSync }}

+

{{ $device.LastSynced }}

{{ $device.CreatedAt }}

diff --git a/utils/utils.go b/utils/utils.go index 3b07e08..e09bb60 100644 --- a/utils/utils.go +++ b/utils/utils.go @@ -56,6 +56,10 @@ func GetUTCOffsets() []UTCOffset { } func NiceSeconds(input int64) (result string) { + if input == 0 { + return "N/A" + } + days := math.Floor(float64(input) / 60 / 60 / 24) seconds := input % (60 * 60 * 24) hours := math.Floor(float64(seconds) / 60 / 60)