diff --git a/api/app-routes.go b/api/app-routes.go index d9ee62e..86408ec 100644 --- a/api/app-routes.go +++ b/api/app-routes.go @@ -8,7 +8,6 @@ import ( "os" "path/filepath" "strings" - "time" argon2 "github.com/alexedwards/argon2id" "github.com/gabriel-vasile/mimetype" @@ -130,7 +129,7 @@ func (api *API) createAppResourcesRoute(routeName string, args ...map[string]any "WordsPerMinute": "N/A", } - if document.Words != nil && *document.Words != 0 { + if document.Words != nil && *document.Words != 0 && document.TotalTimeSeconds != 0 { statistics["WordsPerMinute"] = (*document.Words / document.Pages * document.ReadPages) / (document.TotalTimeSeconds / 60.0) } @@ -158,39 +157,17 @@ func (api *API) createAppResourcesRoute(routeName string, args ...map[string]any templateVars["Data"] = activity } else if routeName == "home" { - start_time := time.Now() - weekly_streak, err := api.DB.Queries.GetUserWindowStreaks(api.DB.Ctx, database.GetUserWindowStreaksParams{ - UserID: userID, - Window: "WEEK", - }) - if err != nil { - log.Warn("[createAppResourcesRoute] GetUserWindowStreaks DB Error:", err) - } - log.Debug("GetUserWindowStreaks - WEEK - ", time.Since(start_time)) - start_time = time.Now() - daily_streak, err := api.DB.Queries.GetUserWindowStreaks(api.DB.Ctx, database.GetUserWindowStreaksParams{ - UserID: userID, - Window: "DAY", - }) - if err != nil { - log.Warn("[createAppResourcesRoute] GetUserWindowStreaks DB Error:", err) - } - log.Debug("GetUserWindowStreaks - DAY - ", time.Since(start_time)) - - start_time = time.Now() + streaks, _ := api.DB.Queries.GetUserStreaks(api.DB.Ctx, userID) database_info, _ := api.DB.Queries.GetDatabaseInfo(api.DB.Ctx, userID) - log.Debug("GetDatabaseInfo - ", time.Since(start_time)) - - start_time = time.Now() read_graph_data, _ := api.DB.Queries.GetDailyReadStats(api.DB.Ctx, userID) - log.Debug("GetDailyReadStats - ", time.Since(start_time)) + wpn_leaderboard, _ := api.DB.Queries.GetWPMLeaderboard(api.DB.Ctx) templateVars["Data"] = gin.H{ - "DailyStreak": daily_streak, - "WeeklyStreak": weekly_streak, - "DatabaseInfo": database_info, - "GraphData": read_graph_data, + "Streaks": streaks, + "GraphData": read_graph_data, + "DatabaseInfo": database_info, + "WPMLeaderboard": wpn_leaderboard, } } else if routeName == "settings" { user, err := api.DB.Queries.GetUser(api.DB.Ctx, userID) diff --git a/api/ko-routes.go b/api/ko-routes.go index 634be16..40cbfa9 100644 --- a/api/ko-routes.go +++ b/api/ko-routes.go @@ -277,6 +277,11 @@ func (api *API) addActivities(c *gin.Context) { return } + // Update Temp Tables + if err := api.DB.CacheTempTables(); err != nil { + log.Warn("[addActivities] CacheTempTables Failure: ", err) + } + c.JSON(http.StatusOK, gin.H{ "added": len(rActivity.Activity), }) diff --git a/client/syncninja.koplugin/main.lua b/client/syncninja.koplugin/main.lua index d91289c..ebd2a6d 100644 --- a/client/syncninja.koplugin/main.lua +++ b/client/syncninja.koplugin/main.lua @@ -75,7 +75,7 @@ local STATISTICS_ACTIVITY_SINCE_QUERY = [[ JOIN book AS b ON b.id = psd.id_book WHERE start_time > %d - ORDER BY start_time ASC LIMIT 1000; + ORDER BY start_time ASC LIMIT 5000; ]] local STATISTICS_BOOK_QUERY = [[ @@ -907,7 +907,7 @@ function SyncNinja:getStatisticsActivity(timestamp) local conn = SQ3.open(statistics_db) local stmt = conn:prepare(string.format(STATISTICS_ACTIVITY_SINCE_QUERY, timestamp)) - local rows = stmt:resultset("i", 1000) + local rows = stmt:resultset("i", 5000) conn:close() -- No Results diff --git a/database/manager.go b/database/manager.go index bceff4e..b8fc36c 100644 --- a/database/manager.go +++ b/database/manager.go @@ -21,6 +21,9 @@ type DBManager struct { //go:embed schema.sql var ddl string +//go:embed update_temp_tables.sql +var tsql string + func NewMgr(c *config.Config) *DBManager { // Create Manager dbm := &DBManager{ @@ -44,22 +47,23 @@ func NewMgr(c *config.Config) *DBManager { log.Fatal("Unsupported Database") } - // Create Tables - if _, err := dbm.DB.ExecContext(dbm.Ctx, ddl); err != nil { - log.Fatal(err) - } - dbm.Queries = New(dbm.DB) return dbm } -func connectHookSQLite(conn *sqlite.SQLiteConn) error { - if err := conn.RegisterFunc("test_func", func() string { - return "FOOBAR" - }, false); err != nil { - log.Info("Error Registering Function") +func (dbm *DBManager) CacheTempTables() error { + if _, err := dbm.DB.ExecContext(dbm.Ctx, tsql); err != nil { return err } return nil } + +func connectHookSQLite(conn *sqlite.SQLiteConn) error { + // Create Tables + log.Debug("Creating Schema") + if _, err := conn.Exec(ddl, nil); err != nil { + log.Warn("Create Schema Failure: ", err) + } + return nil +} diff --git a/database/models.go b/database/models.go index fe1f082..a1f830f 100644 --- a/database/models.go +++ b/database/models.go @@ -5,19 +5,19 @@ package database import ( + "database/sql" "time" ) type Activity struct { - ID int64 `json:"id"` UserID string `json:"user_id"` DocumentID string `json:"document_id"` DeviceID string `json:"device_id"` + CreatedAt time.Time `json:"created_at"` StartTime time.Time `json:"start_time"` - Duration int64 `json:"duration"` Page int64 `json:"page"` Pages int64 `json:"pages"` - CreatedAt time.Time `json:"created_at"` + Duration int64 `json:"duration"` } type Device struct { @@ -67,6 +67,20 @@ type DocumentProgress struct { CreatedAt time.Time `json:"created_at"` } +type DocumentUserStatistic struct { + DocumentID string `json:"document_id"` + UserID string `json:"user_id"` + LastRead string `json:"last_read"` + Page int64 `json:"page"` + Pages int64 `json:"pages"` + TotalTimeSeconds int64 `json:"total_time_seconds"` + ReadPages int64 `json:"read_pages"` + Percentage float64 `json:"percentage"` + Words int64 `json:"words"` + WordsRead int64 `json:"words_read"` + Wpm float64 `json:"wpm"` +} + type Metadatum struct { ID int64 `json:"id"` DocumentID string `json:"document_id"` @@ -80,14 +94,16 @@ type Metadatum struct { CreatedAt time.Time `json:"created_at"` } -type RescaledActivity struct { +type RawActivity struct { + ID int64 `json:"id"` + UserID string `json:"user_id"` DocumentID string `json:"document_id"` DeviceID string `json:"device_id"` - UserID string `json:"user_id"` StartTime time.Time `json:"start_time"` - Pages int64 `json:"pages"` Page int64 `json:"page"` + Pages int64 `json:"pages"` Duration int64 `json:"duration"` + CreatedAt time.Time `json:"created_at"` } type User struct { @@ -97,3 +113,50 @@ type User struct { TimeOffset *string `json:"time_offset"` CreatedAt time.Time `json:"created_at"` } + +type UserStreak struct { + UserID string `json:"user_id"` + Window string `json:"window"` + MaxStreak int64 `json:"max_streak"` + MaxStreakStartDate string `json:"max_streak_start_date"` + MaxStreakEndDate string `json:"max_streak_end_date"` + CurrentStreak int64 `json:"current_streak"` + CurrentStreakStartDate string `json:"current_streak_start_date"` + CurrentStreakEndDate string `json:"current_streak_end_date"` +} + +type ViewDocumentUserStatistic struct { + DocumentID string `json:"document_id"` + UserID string `json:"user_id"` + LastRead time.Time `json:"last_read"` + Page int64 `json:"page"` + Pages int64 `json:"pages"` + TotalTimeSeconds sql.NullFloat64 `json:"total_time_seconds"` + ReadPages int64 `json:"read_pages"` + Percentage float64 `json:"percentage"` + Words float64 `json:"words"` + WordsRead interface{} `json:"words_read"` + Wpm int64 `json:"wpm"` +} + +type ViewRescaledActivity struct { + UserID string `json:"user_id"` + DocumentID string `json:"document_id"` + DeviceID string `json:"device_id"` + CreatedAt time.Time `json:"created_at"` + StartTime time.Time `json:"start_time"` + Page int64 `json:"page"` + Pages int64 `json:"pages"` + Duration int64 `json:"duration"` +} + +type ViewUserStreak struct { + UserID string `json:"user_id"` + Window string `json:"window"` + MaxStreak interface{} `json:"max_streak"` + MaxStreakStartDate interface{} `json:"max_streak_start_date"` + MaxStreakEndDate interface{} `json:"max_streak_end_date"` + CurrentStreak interface{} `json:"current_streak"` + CurrentStreakStartDate interface{} `json:"current_streak_start_date"` + CurrentStreakEndDate interface{} `json:"current_streak_end_date"` +} diff --git a/database/query.sql b/database/query.sql index 2515935..990d59b 100644 --- a/database/query.sql +++ b/database/query.sql @@ -135,7 +135,7 @@ AND user_id = $user_id ORDER BY start_time DESC LIMIT 1; -- name: AddActivity :one -INSERT INTO activity ( +INSERT INTO raw_activity ( user_id, document_id, device_id, @@ -195,12 +195,12 @@ WITH true_progress AS ( page, pages, - -- Determine Read Pages - COUNT(DISTINCT page) AS read_pages, + -- Determine Read Pages + COUNT(DISTINCT page) AS read_pages, - -- Derive Percentage of Book + -- Derive Percentage of Book ROUND(CAST(page AS REAL) / CAST(pages AS REAL) * 100, 2) AS percentage - FROM rescaled_activity + FROM activity WHERE user_id = $user_id AND document_id = $document_id GROUP BY document_id @@ -220,15 +220,15 @@ SELECT -- 1. Calculate Total Time in Seconds (Sum Duration in Activity) -- 2. Divide by Read Pages (Distinct Pages in Activity) CAST(CASE - WHEN total_time_seconds IS NULL THEN 0.0 - ELSE ROUND(CAST(total_time_seconds AS REAL) / CAST(read_pages AS REAL)) + WHEN total_time_seconds IS NULL THEN 0.0 + ELSE ROUND(CAST(total_time_seconds AS REAL) / CAST(read_pages AS REAL)) END AS INTEGER) AS seconds_per_page, -- Arbitrarily >97% is Complete CAST(CASE - WHEN percentage > 97.0 THEN 100.0 - WHEN percentage IS NULL THEN 0.0 - ELSE percentage + WHEN percentage > 97.0 THEN 100.0 + WHEN percentage IS NULL THEN 0.0 + ELSE percentage END AS REAL) AS percentage FROM documents @@ -291,7 +291,7 @@ OFFSET $offset; -- name: GetActivity :many SELECT document_id, - CAST(DATETIME(activity.start_time, time_offset) AS TEXT) AS start_time, + CAST(DATETIME(activity.start_time, users.time_offset) AS TEXT) AS start_time, title, author, duration, @@ -303,11 +303,12 @@ LEFT JOIN users ON users.id = activity.user_id WHERE activity.user_id = $user_id AND ( - CAST($doc_filter AS BOOLEAN) = TRUE - AND document_id = $document_id + ( + CAST($doc_filter AS BOOLEAN) = TRUE + AND document_id = $document_id + ) OR $doc_filter = FALSE ) - OR $doc_filter = FALSE -ORDER BY start_time DESC +ORDER BY activity.start_time DESC LIMIT $limit OFFSET $offset; @@ -326,7 +327,7 @@ GROUP BY activity.device_id; SELECT COUNT(DISTINCT page) AS pages_read, SUM(duration) AS total_time -FROM rescaled_activity +FROM activity WHERE document_id = $document_id AND user_id = $user_id AND start_time >= $start_time; @@ -334,7 +335,7 @@ AND start_time >= $start_time; -- name: GetDocumentReadStatsCapped :one WITH capped_stats AS ( SELECT MIN(SUM(duration), CAST($page_duration_cap AS INTEGER)) AS durations - FROM rescaled_activity + FROM activity WHERE document_id = $document_id AND user_id = $user_id AND start_time >= $start_time @@ -357,77 +358,9 @@ WITH document_days AS ( SELECT CAST(COUNT(*) AS INTEGER) AS days_read FROM document_days; --- name: GetUserWindowStreaks :one -WITH document_windows AS ( - SELECT - CASE - WHEN ?2 = "WEEK" THEN DATE(start_time, time_offset, 'weekday 0', '-7 day') - WHEN ?2 = "DAY" THEN DATE(start_time, time_offset) - END AS read_window, - time_offset - FROM activity - JOIN users ON users.id = activity.user_id - WHERE user_id = $user_id - AND CAST($window AS TEXT) = CAST($window AS TEXT) - GROUP BY read_window -), -partitions AS ( - SELECT - document_windows.*, - row_number() OVER ( - PARTITION BY 1 ORDER BY read_window DESC - ) AS seqnum - FROM document_windows -), -streaks AS ( - SELECT - COUNT(*) AS streak, - MIN(read_window) AS start_date, - MAX(read_window) AS end_date, - time_offset - FROM partitions - GROUP BY - CASE - WHEN ?2 = "DAY" THEN DATE(read_window, '+' || seqnum || ' day') - WHEN ?2 = "WEEK" THEN DATE(read_window, '+' || (seqnum * 7) || ' day') - END, - time_offset - ORDER BY end_date DESC -), -max_streak AS ( - SELECT - MAX(streak) AS max_streak, - start_date AS max_streak_start_date, - end_date AS max_streak_end_date - FROM streaks - LIMIT 1 -), -current_streak AS ( - SELECT - streak AS current_streak, - start_date AS current_streak_start_date, - end_date AS current_streak_end_date - FROM streaks - WHERE CASE - WHEN ?2 = "WEEK" THEN - DATE('now', time_offset, 'weekday 0', '-14 day') = current_streak_end_date - OR DATE('now', time_offset, 'weekday 0', '-7 day') = current_streak_end_date - WHEN ?2 = "DAY" THEN - DATE('now', time_offset, '-1 day') = current_streak_end_date - OR DATE('now', time_offset) = current_streak_end_date - END - LIMIT 1 -) -SELECT - CAST(IFNULL(max_streak, 0) AS INTEGER) AS max_streak, - CAST(IFNULL(max_streak_start_date, "N/A") AS TEXT) AS max_streak_start_date, - CAST(IFNULL(max_streak_end_date, "N/A") AS TEXT) AS max_streak_end_date, - IFNULL(current_streak, 0) AS current_streak, - CAST(IFNULL(current_streak_start_date, "N/A") AS TEXT) AS current_streak_start_date, - CAST(IFNULL(current_streak_end_date, "N/A") AS TEXT) AS current_streak_end_date -FROM max_streak -LEFT JOIN current_streak ON 1 = 1 -LIMIT 1; +-- name: GetUserStreaks :many +SELECT * FROM user_streaks +WHERE user_id = $user_id; -- name: GetDatabaseInfo :one SELECT @@ -468,3 +401,15 @@ FROM last_30_days LEFT JOIN activity_records ON activity_records.day == last_30_days.date ORDER BY date DESC LIMIT 30; + +-- name: GetWPMLeaderboard :many +SELECT + user_id, + CAST(SUM(words_read) AS INTEGER) AS total_words_read, + CAST(SUM(total_time_seconds) AS INTEGER) AS total_seconds, + ROUND(CAST(SUM(words_read) AS REAL) / (SUM(total_time_seconds) / 60.0), 2) + AS wpm +FROM document_user_statistics +WHERE words_read > 0 +GROUP BY user_id +ORDER BY wpm DESC; diff --git a/database/query.sql.go b/database/query.sql.go index 586725c..9458e38 100644 --- a/database/query.sql.go +++ b/database/query.sql.go @@ -13,7 +13,7 @@ import ( ) const addActivity = `-- name: AddActivity :one -INSERT INTO activity ( +INSERT INTO raw_activity ( user_id, document_id, device_id, @@ -23,7 +23,7 @@ INSERT INTO activity ( pages ) VALUES (?, ?, ?, ?, ?, ?, ?) -RETURNING id, user_id, document_id, device_id, start_time, duration, page, pages, created_at +RETURNING id, user_id, document_id, device_id, start_time, page, pages, duration, created_at ` type AddActivityParams struct { @@ -36,7 +36,7 @@ type AddActivityParams struct { Pages int64 `json:"pages"` } -func (q *Queries) AddActivity(ctx context.Context, arg AddActivityParams) (Activity, error) { +func (q *Queries) AddActivity(ctx context.Context, arg AddActivityParams) (RawActivity, error) { row := q.db.QueryRowContext(ctx, addActivity, arg.UserID, arg.DocumentID, @@ -46,16 +46,16 @@ func (q *Queries) AddActivity(ctx context.Context, arg AddActivityParams) (Activ arg.Page, arg.Pages, ) - var i Activity + var i RawActivity err := row.Scan( &i.ID, &i.UserID, &i.DocumentID, &i.DeviceID, &i.StartTime, - &i.Duration, &i.Page, &i.Pages, + &i.Duration, &i.CreatedAt, ) return i, err @@ -151,7 +151,7 @@ func (q *Queries) DeleteDocument(ctx context.Context, id string) (int64, error) const getActivity = `-- name: GetActivity :many SELECT document_id, - CAST(DATETIME(activity.start_time, time_offset) AS TEXT) AS start_time, + CAST(DATETIME(activity.start_time, users.time_offset) AS TEXT) AS start_time, title, author, duration, @@ -163,11 +163,12 @@ LEFT JOIN users ON users.id = activity.user_id WHERE activity.user_id = ?1 AND ( - CAST(?2 AS BOOLEAN) = TRUE - AND document_id = ?3 + ( + CAST(?2 AS BOOLEAN) = TRUE + AND document_id = ?3 + ) OR ?2 = FALSE ) - OR ?2 = FALSE -ORDER BY start_time DESC +ORDER BY activity.start_time DESC LIMIT ?5 OFFSET ?4 ` @@ -477,7 +478,7 @@ const getDocumentReadStats = `-- name: GetDocumentReadStats :one SELECT COUNT(DISTINCT page) AS pages_read, SUM(duration) AS total_time -FROM rescaled_activity +FROM activity WHERE document_id = ?1 AND user_id = ?2 AND start_time >= ?3 @@ -504,7 +505,7 @@ func (q *Queries) GetDocumentReadStats(ctx context.Context, arg GetDocumentReadS const getDocumentReadStatsCapped = `-- name: GetDocumentReadStatsCapped :one WITH capped_stats AS ( SELECT MIN(SUM(duration), CAST(?1 AS INTEGER)) AS durations - FROM rescaled_activity + FROM activity WHERE document_id = ?2 AND user_id = ?3 AND start_time >= ?4 @@ -549,12 +550,12 @@ WITH true_progress AS ( page, pages, - -- Determine Read Pages - COUNT(DISTINCT page) AS read_pages, + -- Determine Read Pages + COUNT(DISTINCT page) AS read_pages, - -- Derive Percentage of Book + -- Derive Percentage of Book ROUND(CAST(page AS REAL) / CAST(pages AS REAL) * 100, 2) AS percentage - FROM rescaled_activity + FROM activity WHERE user_id = ?1 AND document_id = ?2 GROUP BY document_id @@ -574,15 +575,15 @@ SELECT -- 1. Calculate Total Time in Seconds (Sum Duration in Activity) -- 2. Divide by Read Pages (Distinct Pages in Activity) CAST(CASE - WHEN total_time_seconds IS NULL THEN 0.0 - ELSE ROUND(CAST(total_time_seconds AS REAL) / CAST(read_pages AS REAL)) + WHEN total_time_seconds IS NULL THEN 0.0 + ELSE ROUND(CAST(total_time_seconds AS REAL) / CAST(read_pages AS REAL)) END AS INTEGER) AS seconds_per_page, -- Arbitrarily >97% is Complete CAST(CASE - WHEN percentage > 97.0 THEN 100.0 - WHEN percentage IS NULL THEN 0.0 - ELSE percentage + WHEN percentage > 97.0 THEN 100.0 + WHEN percentage IS NULL THEN 0.0 + ELSE percentage END AS REAL) AS percentage FROM documents @@ -978,105 +979,41 @@ func (q *Queries) GetUser(ctx context.Context, userID string) (User, error) { return i, err } -const getUserWindowStreaks = `-- name: GetUserWindowStreaks :one -WITH document_windows AS ( - SELECT - CASE - WHEN ?2 = "WEEK" THEN DATE(start_time, time_offset, 'weekday 0', '-7 day') - WHEN ?2 = "DAY" THEN DATE(start_time, time_offset) - END AS read_window, - time_offset - FROM activity - JOIN users ON users.id = activity.user_id - WHERE user_id = ?1 - AND CAST(?2 AS TEXT) = CAST(?2 AS TEXT) - GROUP BY read_window -), -partitions AS ( - SELECT - document_windows.read_window, document_windows.time_offset, - row_number() OVER ( - PARTITION BY 1 ORDER BY read_window DESC - ) AS seqnum - FROM document_windows -), -streaks AS ( - SELECT - COUNT(*) AS streak, - MIN(read_window) AS start_date, - MAX(read_window) AS end_date, - time_offset - FROM partitions - GROUP BY - CASE - WHEN ?2 = "DAY" THEN DATE(read_window, '+' || seqnum || ' day') - WHEN ?2 = "WEEK" THEN DATE(read_window, '+' || (seqnum * 7) || ' day') - END, - time_offset - ORDER BY end_date DESC -), -max_streak AS ( - SELECT - MAX(streak) AS max_streak, - start_date AS max_streak_start_date, - end_date AS max_streak_end_date - FROM streaks - LIMIT 1 -), -current_streak AS ( - SELECT - streak AS current_streak, - start_date AS current_streak_start_date, - end_date AS current_streak_end_date - FROM streaks - WHERE CASE - WHEN ?2 = "WEEK" THEN - DATE('now', time_offset, 'weekday 0', '-14 day') = current_streak_end_date - OR DATE('now', time_offset, 'weekday 0', '-7 day') = current_streak_end_date - WHEN ?2 = "DAY" THEN - DATE('now', time_offset, '-1 day') = current_streak_end_date - OR DATE('now', time_offset) = current_streak_end_date - END - LIMIT 1 -) -SELECT - CAST(IFNULL(max_streak, 0) AS INTEGER) AS max_streak, - CAST(IFNULL(max_streak_start_date, "N/A") AS TEXT) AS max_streak_start_date, - CAST(IFNULL(max_streak_end_date, "N/A") AS TEXT) AS max_streak_end_date, - IFNULL(current_streak, 0) AS current_streak, - CAST(IFNULL(current_streak_start_date, "N/A") AS TEXT) AS current_streak_start_date, - CAST(IFNULL(current_streak_end_date, "N/A") AS TEXT) AS current_streak_end_date -FROM max_streak -LEFT JOIN current_streak ON 1 = 1 -LIMIT 1 +const getUserStreaks = `-- name: GetUserStreaks :many +SELECT user_id, "window", max_streak, max_streak_start_date, max_streak_end_date, current_streak, current_streak_start_date, current_streak_end_date FROM user_streaks +WHERE user_id = ?1 ` -type GetUserWindowStreaksParams struct { - UserID string `json:"user_id"` - Window string `json:"window"` -} - -type GetUserWindowStreaksRow struct { - MaxStreak int64 `json:"max_streak"` - MaxStreakStartDate string `json:"max_streak_start_date"` - MaxStreakEndDate string `json:"max_streak_end_date"` - CurrentStreak interface{} `json:"current_streak"` - CurrentStreakStartDate string `json:"current_streak_start_date"` - CurrentStreakEndDate string `json:"current_streak_end_date"` -} - -func (q *Queries) GetUserWindowStreaks(ctx context.Context, arg GetUserWindowStreaksParams) (GetUserWindowStreaksRow, error) { - row := q.db.QueryRowContext(ctx, getUserWindowStreaks, arg.UserID, arg.Window) - var i GetUserWindowStreaksRow - err := row.Scan( - &i.MaxStreak, - &i.MaxStreakStartDate, - &i.MaxStreakEndDate, - &i.CurrentStreak, - &i.CurrentStreakStartDate, - &i.CurrentStreakEndDate, - ) - return i, err +func (q *Queries) GetUserStreaks(ctx context.Context, userID string) ([]UserStreak, error) { + rows, err := q.db.QueryContext(ctx, getUserStreaks, userID) + if err != nil { + return nil, err + } + defer rows.Close() + var items []UserStreak + for rows.Next() { + var i UserStreak + if err := rows.Scan( + &i.UserID, + &i.Window, + &i.MaxStreak, + &i.MaxStreakStartDate, + &i.MaxStreakEndDate, + &i.CurrentStreak, + &i.CurrentStreakStartDate, + &i.CurrentStreakEndDate, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil } const getUsers = `-- name: GetUsers :many @@ -1129,6 +1066,54 @@ func (q *Queries) GetUsers(ctx context.Context, arg GetUsersParams) ([]User, err return items, nil } +const getWPMLeaderboard = `-- name: GetWPMLeaderboard :many +SELECT + user_id, + CAST(SUM(words_read) AS INTEGER) AS total_words_read, + CAST(SUM(total_time_seconds) AS INTEGER) AS total_seconds, + ROUND(CAST(SUM(words_read) AS REAL) / (SUM(total_time_seconds) / 60.0), 2) + AS wpm +FROM document_user_statistics +WHERE words_read > 0 +GROUP BY user_id +ORDER BY wpm DESC +` + +type GetWPMLeaderboardRow struct { + UserID string `json:"user_id"` + TotalWordsRead int64 `json:"total_words_read"` + TotalSeconds int64 `json:"total_seconds"` + Wpm float64 `json:"wpm"` +} + +func (q *Queries) GetWPMLeaderboard(ctx context.Context) ([]GetWPMLeaderboardRow, error) { + rows, err := q.db.QueryContext(ctx, getWPMLeaderboard) + if err != nil { + return nil, err + } + defer rows.Close() + var items []GetWPMLeaderboardRow + for rows.Next() { + var i GetWPMLeaderboardRow + if err := rows.Scan( + &i.UserID, + &i.TotalWordsRead, + &i.TotalSeconds, + &i.Wpm, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const getWantedDocuments = `-- name: GetWantedDocuments :many SELECT CAST(value AS TEXT) AS id, diff --git a/database/schema.sql b/database/schema.sql index f55d319..6fa1fc6 100644 --- a/database/schema.sql +++ b/database/schema.sql @@ -1,6 +1,10 @@ PRAGMA foreign_keys = ON; PRAGMA journal_mode = WAL; +--------------------------------------------------------------- +------------------------ Normal Tables ------------------------ +--------------------------------------------------------------- + -- Authentication CREATE TABLE IF NOT EXISTS users ( id TEXT NOT NULL PRIMARY KEY, @@ -101,17 +105,17 @@ CREATE TABLE IF NOT EXISTS document_progress ( PRIMARY KEY (user_id, document_id, device_id) ); --- Read Activity -CREATE TABLE IF NOT EXISTS activity ( +-- Raw Read Activity +CREATE TABLE IF NOT EXISTS raw_activity ( id INTEGER PRIMARY KEY AUTOINCREMENT, user_id TEXT NOT NULL, document_id TEXT NOT NULL, device_id TEXT NOT NULL, start_time DATETIME NOT NULL, - duration INTEGER NOT NULL, page INTEGER NOT NULL, pages INTEGER NOT NULL, + duration INTEGER NOT NULL, created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY (user_id) REFERENCES users (id), @@ -119,23 +123,72 @@ CREATE TABLE IF NOT EXISTS activity ( FOREIGN KEY (device_id) REFERENCES devices (id) ); --- Indexes -CREATE INDEX IF NOT EXISTS activity_start_time ON activity (start_time); -CREATE INDEX IF NOT EXISTS activity_user_id_document_id ON activity ( +--------------------------------------------------------------- +----------------------- Temporary Tables ---------------------- +--------------------------------------------------------------- + +-- Temporary Activity Table (Cached from View) +CREATE TEMPORARY TABLE IF NOT EXISTS activity ( + user_id TEXT NOT NULL, + document_id TEXT NOT NULL, + device_id TEXT NOT NULL, + + created_at DATETIME NOT NULL, + start_time DATETIME NOT NULL, + page INTEGER NOT NULL, + pages INTEGER NOT NULL, + duration INTEGER NOT NULL +); + +-- Temporary User Streaks Table (Cached from View) +CREATE TEMPORARY TABLE IF NOT EXISTS user_streaks ( + user_id TEXT NOT NULL, + window TEXT NOT NULL, + + max_streak INTEGER NOT NULL, + max_streak_start_date TEXT NOT NULL, + max_streak_end_date TEXT NOT NULL, + + current_streak INTEGER NOT NULL, + current_streak_start_date TEXT NOT NULL, + current_streak_end_date TEXT NOT NULL +); + +CREATE TEMPORARY TABLE IF NOT EXISTS document_user_statistics ( + document_id TEXT NOT NULL, + user_id TEXT NOT NULL, + last_read TEXT NOT NULL, + page INTEGER NOT NULL, + pages INTEGER NOT NULL, + total_time_seconds INTEGER NOT NULL, + read_pages INTEGER NOT NULL, + percentage REAL NOT NULL, + words INTEGER NOT NULL, + words_read INTEGER NOT NULL, + wpm REAL NOT NULL +); + + +--------------------------------------------------------------- +--------------------------- Indexes --------------------------- +--------------------------------------------------------------- + +CREATE INDEX IF NOT EXISTS temp.activity_start_time ON activity (start_time); +CREATE INDEX IF NOT EXISTS temp.activity_user_id ON activity (user_id); +CREATE INDEX IF NOT EXISTS temp.activity_user_id_document_id ON activity ( user_id, document_id ); --- Update Trigger -CREATE TRIGGER IF NOT EXISTS update_documents_updated_at -BEFORE UPDATE ON documents BEGIN -UPDATE documents -SET updated_at = CURRENT_TIMESTAMP -WHERE id = old.id; -END; +--------------------------------------------------------------- +---------------------------- Views ---------------------------- +--------------------------------------------------------------- --- Rescaled Activity View (Adapted from KOReader) -CREATE VIEW IF NOT EXISTS rescaled_activity AS +-------------------------------- +------- Rescaled Activity ------ +-------------------------------- + +CREATE VIEW IF NOT EXISTS view_rescaled_activity AS WITH RECURSIVE nums (idx) AS ( SELECT 1 AS idx @@ -150,7 +203,7 @@ current_pages AS ( document_id, user_id, pages - FROM activity + FROM raw_activity GROUP BY document_id, user_id HAVING MAX(start_time) ORDER BY start_time DESC @@ -158,33 +211,33 @@ current_pages AS ( intermediate AS ( SELECT - activity.document_id, - activity.device_id, - activity.user_id, - activity.start_time, - activity.duration, - activity.page, + raw_activity.document_id, + raw_activity.device_id, + raw_activity.user_id, + raw_activity.created_at, + raw_activity.start_time, + raw_activity.duration, + raw_activity.page, current_pages.pages, -- Derive first page - ((activity.page - 1) * current_pages.pages) / activity.pages + ((raw_activity.page - 1) * current_pages.pages) / raw_activity.pages + 1 AS first_page, -- Derive last page MAX( - ((activity.page - 1) * current_pages.pages) - / activity.pages + ((raw_activity.page - 1) * current_pages.pages) + / raw_activity.pages + 1, - (activity.page * current_pages.pages) / activity.pages + (raw_activity.page * current_pages.pages) / raw_activity.pages ) AS last_page - FROM activity + FROM raw_activity INNER JOIN current_pages ON - current_pages.document_id = activity.document_id - AND current_pages.user_id = activity.user_id + current_pages.document_id = raw_activity.document_id + AND current_pages.user_id = raw_activity.user_id ), --- Improves performance num_limit AS ( SELECT * FROM nums LIMIT (SELECT MAX(last_page - first_page + 1) FROM intermediate) @@ -192,28 +245,30 @@ num_limit AS ( rescaled_raw AS ( SELECT - document_id, - device_id, - user_id, - start_time, - last_page, - pages, - first_page + num_limit.idx - 1 AS page, - duration / ( - last_page - first_page + 1.0 + intermediate.document_id, + intermediate.device_id, + intermediate.user_id, + intermediate.created_at, + intermediate.start_time, + intermediate.last_page, + intermediate.pages, + intermediate.first_page + num_limit.idx - 1 AS page, + intermediate.duration / ( + intermediate.last_page - intermediate.first_page + 1.0 ) AS duration FROM intermediate - JOIN num_limit ON - num_limit.idx <= (last_page - first_page + 1) + LEFT JOIN num_limit ON + num_limit.idx <= (intermediate.last_page - intermediate.first_page + 1) ) SELECT + user_id, document_id, device_id, - user_id, + created_at, start_time, - pages, page, + pages, -- Round up if last page (maintains total duration) CAST(CASE @@ -222,3 +277,175 @@ SELECT ELSE duration END AS INTEGER) AS duration FROM rescaled_raw; + +-------------------------------- +--------- User Streaks --------- +-------------------------------- + +CREATE VIEW IF NOT EXISTS view_user_streaks AS + +WITH document_windows AS ( + SELECT + activity.user_id, + users.time_offset, + DATE( + activity.start_time, + users.time_offset, + 'weekday 0', '-7 day' + ) AS weekly_read, + DATE(activity.start_time, users.time_offset) AS daily_read + FROM raw_activity AS activity + LEFT JOIN users ON users.id = activity.user_id + GROUP BY activity.user_id, weekly_read, daily_read +), + +weekly_partitions AS ( + SELECT + user_id, + time_offset, + 'WEEK' AS "window", + weekly_read AS read_window, + row_number() OVER ( + PARTITION BY user_id ORDER BY weekly_read DESC + ) AS seqnum + FROM document_windows + GROUP BY user_id, weekly_read +), + +daily_partitions AS ( + SELECT + user_id, + time_offset, + 'DAY' AS "window", + daily_read AS read_window, + row_number() OVER ( + PARTITION BY user_id ORDER BY daily_read DESC + ) AS seqnum + FROM document_windows + GROUP BY user_id, daily_read +), + +streaks AS ( + SELECT + COUNT(*) AS streak, + MIN(read_window) AS start_date, + MAX(read_window) AS end_date, + window, + user_id, + time_offset + FROM daily_partitions + GROUP BY + time_offset, + user_id, + DATE(read_window, '+' || seqnum || ' day') + + UNION ALL + + SELECT + COUNT(*) AS streak, + MIN(read_window) AS start_date, + MAX(read_window) AS end_date, + window, + user_id, + time_offset + FROM weekly_partitions + GROUP BY + time_offset, + user_id, + DATE(read_window, '+' || (seqnum * 7) || ' day') +), +max_streak AS ( + SELECT + MAX(streak) AS max_streak, + start_date AS max_streak_start_date, + end_date AS max_streak_end_date, + window, + user_id + FROM streaks + GROUP BY user_id, window +), +current_streak AS ( + SELECT + streak AS current_streak, + start_date AS current_streak_start_date, + end_date AS current_streak_end_date, + window, + user_id + FROM streaks + WHERE CASE + WHEN window = "WEEK" THEN + DATE('now', time_offset, 'weekday 0', '-14 day') = current_streak_end_date + OR DATE('now', time_offset, 'weekday 0', '-7 day') = current_streak_end_date + WHEN window = "DAY" THEN + DATE('now', time_offset, '-1 day') = current_streak_end_date + OR DATE('now', time_offset) = current_streak_end_date + END + GROUP BY user_id, window +) +SELECT + max_streak.user_id, + max_streak.window, + IFNULL(max_streak, 0) AS max_streak, + IFNULL(max_streak_start_date, "N/A") AS max_streak_start_date, + IFNULL(max_streak_end_date, "N/A") AS max_streak_end_date, + IFNULL(current_streak, 0) AS current_streak, + IFNULL(current_streak_start_date, "N/A") AS current_streak_start_date, + IFNULL(current_streak_end_date, "N/A") AS current_streak_end_date +FROM max_streak +LEFT JOIN current_streak ON + current_streak.user_id = max_streak.user_id + AND current_streak.window = max_streak.window; + +-------------------------------- +------- Document Stats --------- +-------------------------------- + +CREATE VIEW IF NOT EXISTS view_document_user_statistics AS + +WITH true_progress AS ( + SELECT + document_id, + user_id, + start_time AS last_read, + page, + pages, + SUM(duration) AS total_time_seconds, + + -- Determine Read Pages + COUNT(DISTINCT page) AS read_pages, + + -- Derive Percentage of Book + ROUND(CAST(page AS REAL) / CAST(pages AS REAL) * 100, 2) AS percentage + FROM view_rescaled_activity + GROUP BY document_id, user_id + HAVING MAX(start_time) +) +SELECT + true_progress.*, + CAST(COALESCE(documents.words, 0.0) AS REAL) AS words, + (CAST(COALESCE(documents.words, 0.0) AS REAL) / pages * read_pages) + AS words_read, + (CAST(COALESCE(documents.words, 0.0) AS REAL) / pages * read_pages) + / (total_time_seconds / 60.0) AS wpm +FROM true_progress +INNER JOIN documents ON documents.id = true_progress.document_id +ORDER BY wpm DESC; + +--------------------------------------------------------------- +------------------ Populate Temporary Tables ------------------ +--------------------------------------------------------------- +INSERT INTO activity SELECT * FROM view_rescaled_activity; +INSERT INTO user_streaks SELECT * FROM view_user_streaks; +INSERT INTO document_user_statistics SELECT * FROM view_document_user_statistics; + +--------------------------------------------------------------- +--------------------------- Triggers -------------------------- +--------------------------------------------------------------- + +-- Update Trigger +CREATE TRIGGER IF NOT EXISTS update_documents_updated_at +BEFORE UPDATE ON documents BEGIN +UPDATE documents +SET updated_at = CURRENT_TIMESTAMP +WHERE id = old.id; +END; diff --git a/database/update_temp_tables.sql b/database/update_temp_tables.sql new file mode 100644 index 0000000..f63964e --- /dev/null +++ b/database/update_temp_tables.sql @@ -0,0 +1,8 @@ +DELETE FROM activity; +INSERT INTO activity SELECT * FROM view_rescaled_activity; +DELETE FROM user_streaks; +INSERT INTO user_streaks SELECT * FROM view_user_streaks; +DELETE FROM document_user_statistics; +INSERT INTO document_user_statistics +SELECT * +FROM view_document_user_statistics; diff --git a/templates/home.html b/templates/home.html index a64ee58..41aa98d 100644 --- a/templates/home.html +++ b/templates/home.html @@ -151,6 +151,7 @@
- {{ .Data.DailyStreak.CurrentStreak }} + {{ $item.CurrentStreak }}
Current Daily Streak
++ {{ if eq $item.Window "WEEK" }} Current Weekly Streak {{ else }} + Current Daily Streak {{ end }} +
Best Daily Streak
++ {{ if eq $item.Window "WEEK" }} Best Weekly Streak {{ else }} Best + Daily Streak {{ end }} +
+ WPM Leaderboard +
+N/A
+ {{ else }} ++ {{ (index .Data.WPMLeaderboard 0).UserID }} +
+ {{ end }} +{{ $item.UserID }}
+- Weekly Read Streak -
-- {{ .Data.WeeklyStreak.CurrentStreak }} -
-Current Weekly Streak
-Best Weekly Streak
-